commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
36acb0222c3b5366de148460eb858c84eec4f5a4
|
play.py
|
play.py
|
#! /usr/bin/env python
# title :play.py
# description :Decides between 2 versions of reversi
# author :andresthor
# date :07-02-2017
# usage :python play.py
# python_version :3.5.2
# =============================================================================
# A veru crude way of chosing which version of the game to play. Just checks
# for the existance of pygame and what version of python the user has
import sys
print('\nREVERSI')
has_pygame = True
python_version = sys.version_info[0]
if python_version < 3:
try:
import pygame
except ImportError, e:
print('pygame not found - can only run cmd-line version.')
has_pygame = False
else:
print('Not running python3. GUI version will likely not run on python2.')
input = 0
while input not in [1, 2]:
try:
input = int(raw_input('\n1. Command-line version\n2. GUI version\n'))
except:
pass
if input == 1:
execfile('cmd_line.py')
elif input == 2:
if not has_pygame:
print('You need to install pygame to play the GUI version!')
else:
execfile('gui.py')
print('Exiting program!')
|
Add crude way of selecting game interface/version
|
Add crude way of selecting game interface/version
|
Python
|
mit
|
andresthor/reversi
|
Add crude way of selecting game interface/version
|
#! /usr/bin/env python
# title :play.py
# description :Decides between 2 versions of reversi
# author :andresthor
# date :07-02-2017
# usage :python play.py
# python_version :3.5.2
# =============================================================================
# A veru crude way of chosing which version of the game to play. Just checks
# for the existance of pygame and what version of python the user has
import sys
print('\nREVERSI')
has_pygame = True
python_version = sys.version_info[0]
if python_version < 3:
try:
import pygame
except ImportError, e:
print('pygame not found - can only run cmd-line version.')
has_pygame = False
else:
print('Not running python3. GUI version will likely not run on python2.')
input = 0
while input not in [1, 2]:
try:
input = int(raw_input('\n1. Command-line version\n2. GUI version\n'))
except:
pass
if input == 1:
execfile('cmd_line.py')
elif input == 2:
if not has_pygame:
print('You need to install pygame to play the GUI version!')
else:
execfile('gui.py')
print('Exiting program!')
|
<commit_before><commit_msg>Add crude way of selecting game interface/version<commit_after>
|
#! /usr/bin/env python
# title :play.py
# description :Decides between 2 versions of reversi
# author :andresthor
# date :07-02-2017
# usage :python play.py
# python_version :3.5.2
# =============================================================================
# A veru crude way of chosing which version of the game to play. Just checks
# for the existance of pygame and what version of python the user has
import sys
print('\nREVERSI')
has_pygame = True
python_version = sys.version_info[0]
if python_version < 3:
try:
import pygame
except ImportError, e:
print('pygame not found - can only run cmd-line version.')
has_pygame = False
else:
print('Not running python3. GUI version will likely not run on python2.')
input = 0
while input not in [1, 2]:
try:
input = int(raw_input('\n1. Command-line version\n2. GUI version\n'))
except:
pass
if input == 1:
execfile('cmd_line.py')
elif input == 2:
if not has_pygame:
print('You need to install pygame to play the GUI version!')
else:
execfile('gui.py')
print('Exiting program!')
|
Add crude way of selecting game interface/version#! /usr/bin/env python
# title :play.py
# description :Decides between 2 versions of reversi
# author :andresthor
# date :07-02-2017
# usage :python play.py
# python_version :3.5.2
# =============================================================================
# A veru crude way of chosing which version of the game to play. Just checks
# for the existance of pygame and what version of python the user has
import sys
print('\nREVERSI')
has_pygame = True
python_version = sys.version_info[0]
if python_version < 3:
try:
import pygame
except ImportError, e:
print('pygame not found - can only run cmd-line version.')
has_pygame = False
else:
print('Not running python3. GUI version will likely not run on python2.')
input = 0
while input not in [1, 2]:
try:
input = int(raw_input('\n1. Command-line version\n2. GUI version\n'))
except:
pass
if input == 1:
execfile('cmd_line.py')
elif input == 2:
if not has_pygame:
print('You need to install pygame to play the GUI version!')
else:
execfile('gui.py')
print('Exiting program!')
|
<commit_before><commit_msg>Add crude way of selecting game interface/version<commit_after>#! /usr/bin/env python
# title :play.py
# description :Decides between 2 versions of reversi
# author :andresthor
# date :07-02-2017
# usage :python play.py
# python_version :3.5.2
# =============================================================================
# A veru crude way of chosing which version of the game to play. Just checks
# for the existance of pygame and what version of python the user has
import sys
print('\nREVERSI')
has_pygame = True
python_version = sys.version_info[0]
if python_version < 3:
try:
import pygame
except ImportError, e:
print('pygame not found - can only run cmd-line version.')
has_pygame = False
else:
print('Not running python3. GUI version will likely not run on python2.')
input = 0
while input not in [1, 2]:
try:
input = int(raw_input('\n1. Command-line version\n2. GUI version\n'))
except:
pass
if input == 1:
execfile('cmd_line.py')
elif input == 2:
if not has_pygame:
print('You need to install pygame to play the GUI version!')
else:
execfile('gui.py')
print('Exiting program!')
|
|
5d42b027d5f438bb66de70c23b8d1631cac4ddd5
|
array/bubble-sort.py
|
array/bubble-sort.py
|
# Bubble sort python implementation
def bubble_sort(arr):
length = len(arr)
for i in range(length):
for j in range(0, length-i-1):
if arr[j] > arr[j+1]:
temp = arr[j]
arr[j] = arr[j+1]
arr[j] = temp
return arr
|
Add bubble sort method in python
|
Add bubble sort method in python
|
Python
|
mit
|
derekmpham/interview-prep,derekmpham/interview-prep
|
Add bubble sort method in python
|
# Bubble sort python implementation
def bubble_sort(arr):
length = len(arr)
for i in range(length):
for j in range(0, length-i-1):
if arr[j] > arr[j+1]:
temp = arr[j]
arr[j] = arr[j+1]
arr[j] = temp
return arr
|
<commit_before><commit_msg>Add bubble sort method in python<commit_after>
|
# Bubble sort python implementation
def bubble_sort(arr):
length = len(arr)
for i in range(length):
for j in range(0, length-i-1):
if arr[j] > arr[j+1]:
temp = arr[j]
arr[j] = arr[j+1]
arr[j] = temp
return arr
|
Add bubble sort method in python# Bubble sort python implementation
def bubble_sort(arr):
length = len(arr)
for i in range(length):
for j in range(0, length-i-1):
if arr[j] > arr[j+1]:
temp = arr[j]
arr[j] = arr[j+1]
arr[j] = temp
return arr
|
<commit_before><commit_msg>Add bubble sort method in python<commit_after># Bubble sort python implementation
def bubble_sort(arr):
length = len(arr)
for i in range(length):
for j in range(0, length-i-1):
if arr[j] > arr[j+1]:
temp = arr[j]
arr[j] = arr[j+1]
arr[j] = temp
return arr
|
|
139936eb92ea295cae620011c93bb4a1e41a32f3
|
wsgi.py
|
wsgi.py
|
# Copyright (c) 2014 Matthias Klumpp <mak@debian.org>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
from app import app
from debile.master.utils import init_master
# needs to be called unconditionally
init_master()
if __name__ == "__main__":
app.run()
|
Add simple uWSGI helper script
|
Add simple uWSGI helper script
|
Python
|
mit
|
opencollab/debile-web,opencollab/debile-web,opencollab/debile-web
|
Add simple uWSGI helper script
|
# Copyright (c) 2014 Matthias Klumpp <mak@debian.org>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
from app import app
from debile.master.utils import init_master
# needs to be called unconditionally
init_master()
if __name__ == "__main__":
app.run()
|
<commit_before><commit_msg>Add simple uWSGI helper script<commit_after>
|
# Copyright (c) 2014 Matthias Klumpp <mak@debian.org>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
from app import app
from debile.master.utils import init_master
# needs to be called unconditionally
init_master()
if __name__ == "__main__":
app.run()
|
Add simple uWSGI helper script# Copyright (c) 2014 Matthias Klumpp <mak@debian.org>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
from app import app
from debile.master.utils import init_master
# needs to be called unconditionally
init_master()
if __name__ == "__main__":
app.run()
|
<commit_before><commit_msg>Add simple uWSGI helper script<commit_after># Copyright (c) 2014 Matthias Klumpp <mak@debian.org>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
from app import app
from debile.master.utils import init_master
# needs to be called unconditionally
init_master()
if __name__ == "__main__":
app.run()
|
|
89f00372e1cf86c3ee48d292982cfca376010780
|
scripts/jenkins_plugins.py
|
scripts/jenkins_plugins.py
|
#!/usr/bin/env python3
"""Fetch and format currently installed Jenkins plugins as puppet configuration.
"""
import json
import sys
from argparse import ArgumentParser
from base64 import b64encode
from datetime import datetime
from urllib.request import Request, urlopen
resource_template = """ ::jenkins::plugin {{ '{name}':
version => '{version}',
require => {dependency_list}
}}
"""
def main():
parser = ArgumentParser(description="Generate a puppet class for currently installed Jenkins plugins.")
parser.add_argument("--username")
parser.add_argument("--password")
parser.add_argument("--jenkins")
args = parser.parse_args(sys.argv[1:])
generate_class(args)
def generate_class(args):
print("# This module was automatically generated on {0:%Y-%m-%d %H:%M:%S}".format(datetime.now()))
print("# Instead of editing it, update plugins via the Jenkins web UI and rerun the generator.")
print("# Otherwise your changes will be overwritten the next time it is run.")
print("class profile::jenkins::rosplugins {")
ciurl = "http://{}/pluginManager/api/json?depth=5".format(args.jenkins)
plugin_request = Request(ciurl)
plugin_request.add_header("Authorization", authorization_header_for(args.username, args.password))
response = urlopen(plugin_request)
parsed = json.loads(response.read().decode())
for plugin in sorted(parsed['plugins'], key=lambda p: p['shortName']):
dependencies = ", ".join(["Jenkins::Plugin['{}']".format(dep['shortName'])
for dep in sorted(plugin['dependencies'], key=lambda d: d['shortName'])])
print(resource_template.format(name=plugin['shortName'], version=plugin['version'],
dependency_list="[ " + dependencies + " ]"))
print("}")
def authorization_header_for(username, password):
encoded = b64encode((username + ":" + password).encode('ascii'))
return "Basic {}".format(encoded.decode('ascii'))
if __name__ == '__main__':
main()
|
Use current Jenkins plugin API to generate puppet class for plugins.
|
Use current Jenkins plugin API to generate puppet class for plugins.
This script can connect to a live jenkins instance and fetch plugin
data, then output a puppet class with the plugin versions and
dependencies specified.
This does not handle the job of configuring any plugins, that still
needs to be done manually.
There's lots of cleanup to be done with this script but I'm satisfied
enough for a Saturday afternoon.
|
Python
|
apache-2.0
|
clearpathrobotics/buildfarm_deployment,clearpathrobotics/buildfarm_deployment,ros-infrastructure/buildfarm_deployment,clearpathrobotics/buildfarm_deployment,ros-infrastructure/buildfarm_deployment,ros-infrastructure/buildfarm_deployment,ros-infrastructure/buildfarm_deployment
|
Use current Jenkins plugin API to generate puppet class for plugins.
This script can connect to a live jenkins instance and fetch plugin
data, then output a puppet class with the plugin versions and
dependencies specified.
This does not handle the job of configuring any plugins, that still
needs to be done manually.
There's lots of cleanup to be done with this script but I'm satisfied
enough for a Saturday afternoon.
|
#!/usr/bin/env python3
"""Fetch and format currently installed Jenkins plugins as puppet configuration.
"""
import json
import sys
from argparse import ArgumentParser
from base64 import b64encode
from datetime import datetime
from urllib.request import Request, urlopen
resource_template = """ ::jenkins::plugin {{ '{name}':
version => '{version}',
require => {dependency_list}
}}
"""
def main():
parser = ArgumentParser(description="Generate a puppet class for currently installed Jenkins plugins.")
parser.add_argument("--username")
parser.add_argument("--password")
parser.add_argument("--jenkins")
args = parser.parse_args(sys.argv[1:])
generate_class(args)
def generate_class(args):
print("# This module was automatically generated on {0:%Y-%m-%d %H:%M:%S}".format(datetime.now()))
print("# Instead of editing it, update plugins via the Jenkins web UI and rerun the generator.")
print("# Otherwise your changes will be overwritten the next time it is run.")
print("class profile::jenkins::rosplugins {")
ciurl = "http://{}/pluginManager/api/json?depth=5".format(args.jenkins)
plugin_request = Request(ciurl)
plugin_request.add_header("Authorization", authorization_header_for(args.username, args.password))
response = urlopen(plugin_request)
parsed = json.loads(response.read().decode())
for plugin in sorted(parsed['plugins'], key=lambda p: p['shortName']):
dependencies = ", ".join(["Jenkins::Plugin['{}']".format(dep['shortName'])
for dep in sorted(plugin['dependencies'], key=lambda d: d['shortName'])])
print(resource_template.format(name=plugin['shortName'], version=plugin['version'],
dependency_list="[ " + dependencies + " ]"))
print("}")
def authorization_header_for(username, password):
encoded = b64encode((username + ":" + password).encode('ascii'))
return "Basic {}".format(encoded.decode('ascii'))
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Use current Jenkins plugin API to generate puppet class for plugins.
This script can connect to a live jenkins instance and fetch plugin
data, then output a puppet class with the plugin versions and
dependencies specified.
This does not handle the job of configuring any plugins, that still
needs to be done manually.
There's lots of cleanup to be done with this script but I'm satisfied
enough for a Saturday afternoon.<commit_after>
|
#!/usr/bin/env python3
"""Fetch and format currently installed Jenkins plugins as puppet configuration.
"""
import json
import sys
from argparse import ArgumentParser
from base64 import b64encode
from datetime import datetime
from urllib.request import Request, urlopen
resource_template = """ ::jenkins::plugin {{ '{name}':
version => '{version}',
require => {dependency_list}
}}
"""
def main():
parser = ArgumentParser(description="Generate a puppet class for currently installed Jenkins plugins.")
parser.add_argument("--username")
parser.add_argument("--password")
parser.add_argument("--jenkins")
args = parser.parse_args(sys.argv[1:])
generate_class(args)
def generate_class(args):
print("# This module was automatically generated on {0:%Y-%m-%d %H:%M:%S}".format(datetime.now()))
print("# Instead of editing it, update plugins via the Jenkins web UI and rerun the generator.")
print("# Otherwise your changes will be overwritten the next time it is run.")
print("class profile::jenkins::rosplugins {")
ciurl = "http://{}/pluginManager/api/json?depth=5".format(args.jenkins)
plugin_request = Request(ciurl)
plugin_request.add_header("Authorization", authorization_header_for(args.username, args.password))
response = urlopen(plugin_request)
parsed = json.loads(response.read().decode())
for plugin in sorted(parsed['plugins'], key=lambda p: p['shortName']):
dependencies = ", ".join(["Jenkins::Plugin['{}']".format(dep['shortName'])
for dep in sorted(plugin['dependencies'], key=lambda d: d['shortName'])])
print(resource_template.format(name=plugin['shortName'], version=plugin['version'],
dependency_list="[ " + dependencies + " ]"))
print("}")
def authorization_header_for(username, password):
encoded = b64encode((username + ":" + password).encode('ascii'))
return "Basic {}".format(encoded.decode('ascii'))
if __name__ == '__main__':
main()
|
Use current Jenkins plugin API to generate puppet class for plugins.
This script can connect to a live jenkins instance and fetch plugin
data, then output a puppet class with the plugin versions and
dependencies specified.
This does not handle the job of configuring any plugins, that still
needs to be done manually.
There's lots of cleanup to be done with this script but I'm satisfied
enough for a Saturday afternoon.#!/usr/bin/env python3
"""Fetch and format currently installed Jenkins plugins as puppet configuration.
"""
import json
import sys
from argparse import ArgumentParser
from base64 import b64encode
from datetime import datetime
from urllib.request import Request, urlopen
resource_template = """ ::jenkins::plugin {{ '{name}':
version => '{version}',
require => {dependency_list}
}}
"""
def main():
parser = ArgumentParser(description="Generate a puppet class for currently installed Jenkins plugins.")
parser.add_argument("--username")
parser.add_argument("--password")
parser.add_argument("--jenkins")
args = parser.parse_args(sys.argv[1:])
generate_class(args)
def generate_class(args):
print("# This module was automatically generated on {0:%Y-%m-%d %H:%M:%S}".format(datetime.now()))
print("# Instead of editing it, update plugins via the Jenkins web UI and rerun the generator.")
print("# Otherwise your changes will be overwritten the next time it is run.")
print("class profile::jenkins::rosplugins {")
ciurl = "http://{}/pluginManager/api/json?depth=5".format(args.jenkins)
plugin_request = Request(ciurl)
plugin_request.add_header("Authorization", authorization_header_for(args.username, args.password))
response = urlopen(plugin_request)
parsed = json.loads(response.read().decode())
for plugin in sorted(parsed['plugins'], key=lambda p: p['shortName']):
dependencies = ", ".join(["Jenkins::Plugin['{}']".format(dep['shortName'])
for dep in sorted(plugin['dependencies'], key=lambda d: d['shortName'])])
print(resource_template.format(name=plugin['shortName'], version=plugin['version'],
dependency_list="[ " + dependencies + " ]"))
print("}")
def authorization_header_for(username, password):
encoded = b64encode((username + ":" + password).encode('ascii'))
return "Basic {}".format(encoded.decode('ascii'))
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Use current Jenkins plugin API to generate puppet class for plugins.
This script can connect to a live jenkins instance and fetch plugin
data, then output a puppet class with the plugin versions and
dependencies specified.
This does not handle the job of configuring any plugins, that still
needs to be done manually.
There's lots of cleanup to be done with this script but I'm satisfied
enough for a Saturday afternoon.<commit_after>#!/usr/bin/env python3
"""Fetch and format currently installed Jenkins plugins as puppet configuration.
"""
import json
import sys
from argparse import ArgumentParser
from base64 import b64encode
from datetime import datetime
from urllib.request import Request, urlopen
resource_template = """ ::jenkins::plugin {{ '{name}':
version => '{version}',
require => {dependency_list}
}}
"""
def main():
parser = ArgumentParser(description="Generate a puppet class for currently installed Jenkins plugins.")
parser.add_argument("--username")
parser.add_argument("--password")
parser.add_argument("--jenkins")
args = parser.parse_args(sys.argv[1:])
generate_class(args)
def generate_class(args):
print("# This module was automatically generated on {0:%Y-%m-%d %H:%M:%S}".format(datetime.now()))
print("# Instead of editing it, update plugins via the Jenkins web UI and rerun the generator.")
print("# Otherwise your changes will be overwritten the next time it is run.")
print("class profile::jenkins::rosplugins {")
ciurl = "http://{}/pluginManager/api/json?depth=5".format(args.jenkins)
plugin_request = Request(ciurl)
plugin_request.add_header("Authorization", authorization_header_for(args.username, args.password))
response = urlopen(plugin_request)
parsed = json.loads(response.read().decode())
for plugin in sorted(parsed['plugins'], key=lambda p: p['shortName']):
dependencies = ", ".join(["Jenkins::Plugin['{}']".format(dep['shortName'])
for dep in sorted(plugin['dependencies'], key=lambda d: d['shortName'])])
print(resource_template.format(name=plugin['shortName'], version=plugin['version'],
dependency_list="[ " + dependencies + " ]"))
print("}")
def authorization_header_for(username, password):
encoded = b64encode((username + ":" + password).encode('ascii'))
return "Basic {}".format(encoded.decode('ascii'))
if __name__ == '__main__':
main()
|
|
8591c4eac8f90612143fe64db9a36d18f08819ad
|
contrib/performance/event_move.py
|
contrib/performance/event_move.py
|
from itertools import count, cycle
from urllib2 import HTTPDigestAuthHandler
from twisted.internet import reactor
from twisted.internet.defer import inlineCallbacks, returnValue
from twisted.web.client import Agent
from twisted.web.http_headers import Headers
from httpauth import AuthHandlerAgent
from httpclient import StringProducer
from benchlib import initialize, sample
from event import makeEvent
@inlineCallbacks
def measure(host, port, dtrace, attendeeCount, samples):
user = password = "user01"
root = "/"
principal = "/"
# Two calendars between which to move the event.
fooCalendar = "event-move-foo-benchmark"
barCalendar = "event-move-bar-benchmark"
authinfo = HTTPDigestAuthHandler()
authinfo.add_password(
realm="Test Realm",
uri="http://%s:%d/" % (host, port),
user=user,
passwd=password)
agent = AuthHandlerAgent(Agent(reactor), authinfo)
# Set up the calendars first
for calendar in [fooCalendar, barCalendar]:
yield initialize(
agent, host, port, user, password, root, principal, calendar)
fooURI = 'http://%s:%d/calendars/__uids__/%s/%s/some-event.ics' % (
host, port, user, fooCalendar)
barURI = 'http://%s:%d/calendars/__uids__/%s/%s/some-event.ics' % (
host, port, user, barCalendar)
# Create the event that will move around
headers = Headers({"content-type": ["text/calendar"]})
yield agent.request(
'PUT', fooURI, headers, StringProducer(makeEvent(attendeeCount, 1)))
# Move it around sooo much
source = cycle([fooURI, barURI])
dest = cycle([barURI, fooURI])
params = (
('MOVE', source.next(),
Headers({"destination": [dest.next()], "overwrite": ["F"]}))
for i in count(1))
samples = yield sample(dtrace, samples, agent, params.next)
returnValue(samples)
|
Add a benchmark for moving events between calendars
|
Add a benchmark for moving events between calendars
git-svn-id: 81e381228600e5752b80483efd2b45b26c451ea2@6294 e27351fd-9f3e-4f54-a53b-843176b1656c
|
Python
|
apache-2.0
|
trevor/calendarserver,trevor/calendarserver,trevor/calendarserver
|
Add a benchmark for moving events between calendars
git-svn-id: 81e381228600e5752b80483efd2b45b26c451ea2@6294 e27351fd-9f3e-4f54-a53b-843176b1656c
|
from itertools import count, cycle
from urllib2 import HTTPDigestAuthHandler
from twisted.internet import reactor
from twisted.internet.defer import inlineCallbacks, returnValue
from twisted.web.client import Agent
from twisted.web.http_headers import Headers
from httpauth import AuthHandlerAgent
from httpclient import StringProducer
from benchlib import initialize, sample
from event import makeEvent
@inlineCallbacks
def measure(host, port, dtrace, attendeeCount, samples):
user = password = "user01"
root = "/"
principal = "/"
# Two calendars between which to move the event.
fooCalendar = "event-move-foo-benchmark"
barCalendar = "event-move-bar-benchmark"
authinfo = HTTPDigestAuthHandler()
authinfo.add_password(
realm="Test Realm",
uri="http://%s:%d/" % (host, port),
user=user,
passwd=password)
agent = AuthHandlerAgent(Agent(reactor), authinfo)
# Set up the calendars first
for calendar in [fooCalendar, barCalendar]:
yield initialize(
agent, host, port, user, password, root, principal, calendar)
fooURI = 'http://%s:%d/calendars/__uids__/%s/%s/some-event.ics' % (
host, port, user, fooCalendar)
barURI = 'http://%s:%d/calendars/__uids__/%s/%s/some-event.ics' % (
host, port, user, barCalendar)
# Create the event that will move around
headers = Headers({"content-type": ["text/calendar"]})
yield agent.request(
'PUT', fooURI, headers, StringProducer(makeEvent(attendeeCount, 1)))
# Move it around sooo much
source = cycle([fooURI, barURI])
dest = cycle([barURI, fooURI])
params = (
('MOVE', source.next(),
Headers({"destination": [dest.next()], "overwrite": ["F"]}))
for i in count(1))
samples = yield sample(dtrace, samples, agent, params.next)
returnValue(samples)
|
<commit_before><commit_msg>Add a benchmark for moving events between calendars
git-svn-id: 81e381228600e5752b80483efd2b45b26c451ea2@6294 e27351fd-9f3e-4f54-a53b-843176b1656c<commit_after>
|
from itertools import count, cycle
from urllib2 import HTTPDigestAuthHandler
from twisted.internet import reactor
from twisted.internet.defer import inlineCallbacks, returnValue
from twisted.web.client import Agent
from twisted.web.http_headers import Headers
from httpauth import AuthHandlerAgent
from httpclient import StringProducer
from benchlib import initialize, sample
from event import makeEvent
@inlineCallbacks
def measure(host, port, dtrace, attendeeCount, samples):
user = password = "user01"
root = "/"
principal = "/"
# Two calendars between which to move the event.
fooCalendar = "event-move-foo-benchmark"
barCalendar = "event-move-bar-benchmark"
authinfo = HTTPDigestAuthHandler()
authinfo.add_password(
realm="Test Realm",
uri="http://%s:%d/" % (host, port),
user=user,
passwd=password)
agent = AuthHandlerAgent(Agent(reactor), authinfo)
# Set up the calendars first
for calendar in [fooCalendar, barCalendar]:
yield initialize(
agent, host, port, user, password, root, principal, calendar)
fooURI = 'http://%s:%d/calendars/__uids__/%s/%s/some-event.ics' % (
host, port, user, fooCalendar)
barURI = 'http://%s:%d/calendars/__uids__/%s/%s/some-event.ics' % (
host, port, user, barCalendar)
# Create the event that will move around
headers = Headers({"content-type": ["text/calendar"]})
yield agent.request(
'PUT', fooURI, headers, StringProducer(makeEvent(attendeeCount, 1)))
# Move it around sooo much
source = cycle([fooURI, barURI])
dest = cycle([barURI, fooURI])
params = (
('MOVE', source.next(),
Headers({"destination": [dest.next()], "overwrite": ["F"]}))
for i in count(1))
samples = yield sample(dtrace, samples, agent, params.next)
returnValue(samples)
|
Add a benchmark for moving events between calendars
git-svn-id: 81e381228600e5752b80483efd2b45b26c451ea2@6294 e27351fd-9f3e-4f54-a53b-843176b1656c
from itertools import count, cycle
from urllib2 import HTTPDigestAuthHandler
from twisted.internet import reactor
from twisted.internet.defer import inlineCallbacks, returnValue
from twisted.web.client import Agent
from twisted.web.http_headers import Headers
from httpauth import AuthHandlerAgent
from httpclient import StringProducer
from benchlib import initialize, sample
from event import makeEvent
@inlineCallbacks
def measure(host, port, dtrace, attendeeCount, samples):
user = password = "user01"
root = "/"
principal = "/"
# Two calendars between which to move the event.
fooCalendar = "event-move-foo-benchmark"
barCalendar = "event-move-bar-benchmark"
authinfo = HTTPDigestAuthHandler()
authinfo.add_password(
realm="Test Realm",
uri="http://%s:%d/" % (host, port),
user=user,
passwd=password)
agent = AuthHandlerAgent(Agent(reactor), authinfo)
# Set up the calendars first
for calendar in [fooCalendar, barCalendar]:
yield initialize(
agent, host, port, user, password, root, principal, calendar)
fooURI = 'http://%s:%d/calendars/__uids__/%s/%s/some-event.ics' % (
host, port, user, fooCalendar)
barURI = 'http://%s:%d/calendars/__uids__/%s/%s/some-event.ics' % (
host, port, user, barCalendar)
# Create the event that will move around
headers = Headers({"content-type": ["text/calendar"]})
yield agent.request(
'PUT', fooURI, headers, StringProducer(makeEvent(attendeeCount, 1)))
# Move it around sooo much
source = cycle([fooURI, barURI])
dest = cycle([barURI, fooURI])
params = (
('MOVE', source.next(),
Headers({"destination": [dest.next()], "overwrite": ["F"]}))
for i in count(1))
samples = yield sample(dtrace, samples, agent, params.next)
returnValue(samples)
|
<commit_before><commit_msg>Add a benchmark for moving events between calendars
git-svn-id: 81e381228600e5752b80483efd2b45b26c451ea2@6294 e27351fd-9f3e-4f54-a53b-843176b1656c<commit_after>
from itertools import count, cycle
from urllib2 import HTTPDigestAuthHandler
from twisted.internet import reactor
from twisted.internet.defer import inlineCallbacks, returnValue
from twisted.web.client import Agent
from twisted.web.http_headers import Headers
from httpauth import AuthHandlerAgent
from httpclient import StringProducer
from benchlib import initialize, sample
from event import makeEvent
@inlineCallbacks
def measure(host, port, dtrace, attendeeCount, samples):
user = password = "user01"
root = "/"
principal = "/"
# Two calendars between which to move the event.
fooCalendar = "event-move-foo-benchmark"
barCalendar = "event-move-bar-benchmark"
authinfo = HTTPDigestAuthHandler()
authinfo.add_password(
realm="Test Realm",
uri="http://%s:%d/" % (host, port),
user=user,
passwd=password)
agent = AuthHandlerAgent(Agent(reactor), authinfo)
# Set up the calendars first
for calendar in [fooCalendar, barCalendar]:
yield initialize(
agent, host, port, user, password, root, principal, calendar)
fooURI = 'http://%s:%d/calendars/__uids__/%s/%s/some-event.ics' % (
host, port, user, fooCalendar)
barURI = 'http://%s:%d/calendars/__uids__/%s/%s/some-event.ics' % (
host, port, user, barCalendar)
# Create the event that will move around
headers = Headers({"content-type": ["text/calendar"]})
yield agent.request(
'PUT', fooURI, headers, StringProducer(makeEvent(attendeeCount, 1)))
# Move it around sooo much
source = cycle([fooURI, barURI])
dest = cycle([barURI, fooURI])
params = (
('MOVE', source.next(),
Headers({"destination": [dest.next()], "overwrite": ["F"]}))
for i in count(1))
samples = yield sample(dtrace, samples, agent, params.next)
returnValue(samples)
|
|
24e21dd5e11844e333278ba580da3814ad513c1a
|
src/excel_sheet_column_number.py
|
src/excel_sheet_column_number.py
|
"""
Source : https://oj.leetcode.com/problems/excel-sheet-column-number/
Author : Changxi Wu
Date : 2015-01-21
Given a column title as appear in an Excel sheet, return its corresponding column number.
For example:
A -> 1
B -> 2
C -> 3
...
Z -> 26
AA -> 27
AB -> 28
"""
def titleToNumber(s):
# @praram s, a string
# @return an integer
result = 0
length = len(s)
for i, c in enumerate(s):
if length == 1:
result += (ord(c)-ord('A')+1)
else:
result += (ord(c)-ord('A')+1)*(26**(length-i-1))
return result
if __name__ == '__main__':
test = {'A':1, 'B':2, 'C':3, 'D':4, 'AA':27, 'AB':28, 'ABC':731}
for s in test.keys():
result = titleToNumber(s)
if result != test[s]:
print 'Input: ', s
print 'Output:', result
print 'Expected:', test[s]
|
Add solution for excel sheet column number
|
Add solution for excel sheet column number
|
Python
|
mit
|
chancyWu/leetcode
|
Add solution for excel sheet column number
|
"""
Source : https://oj.leetcode.com/problems/excel-sheet-column-number/
Author : Changxi Wu
Date : 2015-01-21
Given a column title as appear in an Excel sheet, return its corresponding column number.
For example:
A -> 1
B -> 2
C -> 3
...
Z -> 26
AA -> 27
AB -> 28
"""
def titleToNumber(s):
# @praram s, a string
# @return an integer
result = 0
length = len(s)
for i, c in enumerate(s):
if length == 1:
result += (ord(c)-ord('A')+1)
else:
result += (ord(c)-ord('A')+1)*(26**(length-i-1))
return result
if __name__ == '__main__':
test = {'A':1, 'B':2, 'C':3, 'D':4, 'AA':27, 'AB':28, 'ABC':731}
for s in test.keys():
result = titleToNumber(s)
if result != test[s]:
print 'Input: ', s
print 'Output:', result
print 'Expected:', test[s]
|
<commit_before><commit_msg>Add solution for excel sheet column number<commit_after>
|
"""
Source : https://oj.leetcode.com/problems/excel-sheet-column-number/
Author : Changxi Wu
Date : 2015-01-21
Given a column title as appear in an Excel sheet, return its corresponding column number.
For example:
A -> 1
B -> 2
C -> 3
...
Z -> 26
AA -> 27
AB -> 28
"""
def titleToNumber(s):
# @praram s, a string
# @return an integer
result = 0
length = len(s)
for i, c in enumerate(s):
if length == 1:
result += (ord(c)-ord('A')+1)
else:
result += (ord(c)-ord('A')+1)*(26**(length-i-1))
return result
if __name__ == '__main__':
test = {'A':1, 'B':2, 'C':3, 'D':4, 'AA':27, 'AB':28, 'ABC':731}
for s in test.keys():
result = titleToNumber(s)
if result != test[s]:
print 'Input: ', s
print 'Output:', result
print 'Expected:', test[s]
|
Add solution for excel sheet column number"""
Source : https://oj.leetcode.com/problems/excel-sheet-column-number/
Author : Changxi Wu
Date : 2015-01-21
Given a column title as appear in an Excel sheet, return its corresponding column number.
For example:
A -> 1
B -> 2
C -> 3
...
Z -> 26
AA -> 27
AB -> 28
"""
def titleToNumber(s):
# @praram s, a string
# @return an integer
result = 0
length = len(s)
for i, c in enumerate(s):
if length == 1:
result += (ord(c)-ord('A')+1)
else:
result += (ord(c)-ord('A')+1)*(26**(length-i-1))
return result
if __name__ == '__main__':
test = {'A':1, 'B':2, 'C':3, 'D':4, 'AA':27, 'AB':28, 'ABC':731}
for s in test.keys():
result = titleToNumber(s)
if result != test[s]:
print 'Input: ', s
print 'Output:', result
print 'Expected:', test[s]
|
<commit_before><commit_msg>Add solution for excel sheet column number<commit_after>"""
Source : https://oj.leetcode.com/problems/excel-sheet-column-number/
Author : Changxi Wu
Date : 2015-01-21
Given a column title as appear in an Excel sheet, return its corresponding column number.
For example:
A -> 1
B -> 2
C -> 3
...
Z -> 26
AA -> 27
AB -> 28
"""
def titleToNumber(s):
# @praram s, a string
# @return an integer
result = 0
length = len(s)
for i, c in enumerate(s):
if length == 1:
result += (ord(c)-ord('A')+1)
else:
result += (ord(c)-ord('A')+1)*(26**(length-i-1))
return result
if __name__ == '__main__':
test = {'A':1, 'B':2, 'C':3, 'D':4, 'AA':27, 'AB':28, 'ABC':731}
for s in test.keys():
result = titleToNumber(s)
if result != test[s]:
print 'Input: ', s
print 'Output:', result
print 'Expected:', test[s]
|
|
b91efc73046fe1df8e7678e50464daf6a36ddab1
|
char_map.py
|
char_map.py
|
char_map = {
'a': 3,
'b': 6,
'c': 9,
'd': 12,
'e': 15,
'f': 18,
'g': 21,
'h': 24,
'i': 27,
'j': 30,
'k': 33,
'l': 36,
'm': 39,
'n': 42,
'o': 45,
'p': 48,
'q': 51,
'r': 54,
's': 57,
't': 60,
'u': 63,
'v': 66,
'w': 69,
'x': 72,
'y': 75,
'z': 78,
' ': 81,
'.': 84,
'!': 87,
'?': 90,
'EOC': 93, # End of Character value
'EOF': 96 # End of String value
}
|
Create mapping of characters to percentage values
|
Create mapping of characters to percentage values
|
Python
|
mit
|
eddiezane/hackpack-cloudbit,eddiezane/hackpack-cloudbit
|
Create mapping of characters to percentage values
|
char_map = {
'a': 3,
'b': 6,
'c': 9,
'd': 12,
'e': 15,
'f': 18,
'g': 21,
'h': 24,
'i': 27,
'j': 30,
'k': 33,
'l': 36,
'm': 39,
'n': 42,
'o': 45,
'p': 48,
'q': 51,
'r': 54,
's': 57,
't': 60,
'u': 63,
'v': 66,
'w': 69,
'x': 72,
'y': 75,
'z': 78,
' ': 81,
'.': 84,
'!': 87,
'?': 90,
'EOC': 93, # End of Character value
'EOF': 96 # End of String value
}
|
<commit_before><commit_msg>Create mapping of characters to percentage values<commit_after>
|
char_map = {
'a': 3,
'b': 6,
'c': 9,
'd': 12,
'e': 15,
'f': 18,
'g': 21,
'h': 24,
'i': 27,
'j': 30,
'k': 33,
'l': 36,
'm': 39,
'n': 42,
'o': 45,
'p': 48,
'q': 51,
'r': 54,
's': 57,
't': 60,
'u': 63,
'v': 66,
'w': 69,
'x': 72,
'y': 75,
'z': 78,
' ': 81,
'.': 84,
'!': 87,
'?': 90,
'EOC': 93, # End of Character value
'EOF': 96 # End of String value
}
|
Create mapping of characters to percentage valueschar_map = {
'a': 3,
'b': 6,
'c': 9,
'd': 12,
'e': 15,
'f': 18,
'g': 21,
'h': 24,
'i': 27,
'j': 30,
'k': 33,
'l': 36,
'm': 39,
'n': 42,
'o': 45,
'p': 48,
'q': 51,
'r': 54,
's': 57,
't': 60,
'u': 63,
'v': 66,
'w': 69,
'x': 72,
'y': 75,
'z': 78,
' ': 81,
'.': 84,
'!': 87,
'?': 90,
'EOC': 93, # End of Character value
'EOF': 96 # End of String value
}
|
<commit_before><commit_msg>Create mapping of characters to percentage values<commit_after>char_map = {
'a': 3,
'b': 6,
'c': 9,
'd': 12,
'e': 15,
'f': 18,
'g': 21,
'h': 24,
'i': 27,
'j': 30,
'k': 33,
'l': 36,
'm': 39,
'n': 42,
'o': 45,
'p': 48,
'q': 51,
'r': 54,
's': 57,
't': 60,
'u': 63,
'v': 66,
'w': 69,
'x': 72,
'y': 75,
'z': 78,
' ': 81,
'.': 84,
'!': 87,
'?': 90,
'EOC': 93, # End of Character value
'EOF': 96 # End of String value
}
|
|
e291a2e117444c431d7912beaaa359b695f7ec1f
|
src/rosrepo/__main__.py
|
src/rosrepo/__main__.py
|
# coding=utf-8
#
# ROSREPO
# Manage ROS workspaces with multiple Gitlab repositories
#
# Author: Timo Röhling
#
# Copyright 2016 Fraunhofer FKIE
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
from .main import main
main()
|
Make module executable with `python -m rosrepo`
|
Make module executable with `python -m rosrepo`
|
Python
|
apache-2.0
|
fkie/rosrepo,fkie/rosrepo
|
Make module executable with `python -m rosrepo`
|
# coding=utf-8
#
# ROSREPO
# Manage ROS workspaces with multiple Gitlab repositories
#
# Author: Timo Röhling
#
# Copyright 2016 Fraunhofer FKIE
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
from .main import main
main()
|
<commit_before><commit_msg>Make module executable with `python -m rosrepo`<commit_after>
|
# coding=utf-8
#
# ROSREPO
# Manage ROS workspaces with multiple Gitlab repositories
#
# Author: Timo Röhling
#
# Copyright 2016 Fraunhofer FKIE
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
from .main import main
main()
|
Make module executable with `python -m rosrepo`# coding=utf-8
#
# ROSREPO
# Manage ROS workspaces with multiple Gitlab repositories
#
# Author: Timo Röhling
#
# Copyright 2016 Fraunhofer FKIE
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
from .main import main
main()
|
<commit_before><commit_msg>Make module executable with `python -m rosrepo`<commit_after># coding=utf-8
#
# ROSREPO
# Manage ROS workspaces with multiple Gitlab repositories
#
# Author: Timo Röhling
#
# Copyright 2016 Fraunhofer FKIE
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
from .main import main
main()
|
|
8faf9798d2e9a2b60b70c327ddfe916a49658816
|
src/Route.py
|
src/Route.py
|
from .station_map import station_map
class Route:
def __init__(self, trip_attrs, legs):
self.origin = trip_attrs['origin']
self.destination = trip_attrs['destination']
self.fare = trip_attrs['fare']
self.departs = trip_attrs['origTimeMin']
self.arrives = trip_attrs['destTimeMin']
self.legs = legs
def has_transfer(self):
return len(self.legs) > 1
def num_transfers(self):
return len(self.legs) - 1
def short_description(self):
return [
'Departs: ' + self.departs,
'Arrives: ' + self.arrives,
'Transfers: {0}'.format(self.num_transfers())
]
def long_description(self):
s = []
for leg in self.legs:
attrs = leg.attrib
s.append('{departs}: {origin} to {dest} \n'.format(
departs=attrs['origTimeMin'],
origin=station_map[attrs['origin'].lower()],
dest=station_map[attrs['destination'].lower()]
))
s.append('{arrives}: Arrive at {destination}'.format(
destination=station_map[self.destination.lower()],
arrives=self.arrives
))
return s
|
Add class for handling routes
|
Add class for handling routes
|
Python
|
mit
|
ganemone/SublimeBart,ganemone/SublimeBart,ganemone/SublimeBart,ganemone/SublimeBart
|
Add class for handling routes
|
from .station_map import station_map
class Route:
def __init__(self, trip_attrs, legs):
self.origin = trip_attrs['origin']
self.destination = trip_attrs['destination']
self.fare = trip_attrs['fare']
self.departs = trip_attrs['origTimeMin']
self.arrives = trip_attrs['destTimeMin']
self.legs = legs
def has_transfer(self):
return len(self.legs) > 1
def num_transfers(self):
return len(self.legs) - 1
def short_description(self):
return [
'Departs: ' + self.departs,
'Arrives: ' + self.arrives,
'Transfers: {0}'.format(self.num_transfers())
]
def long_description(self):
s = []
for leg in self.legs:
attrs = leg.attrib
s.append('{departs}: {origin} to {dest} \n'.format(
departs=attrs['origTimeMin'],
origin=station_map[attrs['origin'].lower()],
dest=station_map[attrs['destination'].lower()]
))
s.append('{arrives}: Arrive at {destination}'.format(
destination=station_map[self.destination.lower()],
arrives=self.arrives
))
return s
|
<commit_before><commit_msg>Add class for handling routes<commit_after>
|
from .station_map import station_map
class Route:
def __init__(self, trip_attrs, legs):
self.origin = trip_attrs['origin']
self.destination = trip_attrs['destination']
self.fare = trip_attrs['fare']
self.departs = trip_attrs['origTimeMin']
self.arrives = trip_attrs['destTimeMin']
self.legs = legs
def has_transfer(self):
return len(self.legs) > 1
def num_transfers(self):
return len(self.legs) - 1
def short_description(self):
return [
'Departs: ' + self.departs,
'Arrives: ' + self.arrives,
'Transfers: {0}'.format(self.num_transfers())
]
def long_description(self):
s = []
for leg in self.legs:
attrs = leg.attrib
s.append('{departs}: {origin} to {dest} \n'.format(
departs=attrs['origTimeMin'],
origin=station_map[attrs['origin'].lower()],
dest=station_map[attrs['destination'].lower()]
))
s.append('{arrives}: Arrive at {destination}'.format(
destination=station_map[self.destination.lower()],
arrives=self.arrives
))
return s
|
Add class for handling routesfrom .station_map import station_map
class Route:
def __init__(self, trip_attrs, legs):
self.origin = trip_attrs['origin']
self.destination = trip_attrs['destination']
self.fare = trip_attrs['fare']
self.departs = trip_attrs['origTimeMin']
self.arrives = trip_attrs['destTimeMin']
self.legs = legs
def has_transfer(self):
return len(self.legs) > 1
def num_transfers(self):
return len(self.legs) - 1
def short_description(self):
return [
'Departs: ' + self.departs,
'Arrives: ' + self.arrives,
'Transfers: {0}'.format(self.num_transfers())
]
def long_description(self):
s = []
for leg in self.legs:
attrs = leg.attrib
s.append('{departs}: {origin} to {dest} \n'.format(
departs=attrs['origTimeMin'],
origin=station_map[attrs['origin'].lower()],
dest=station_map[attrs['destination'].lower()]
))
s.append('{arrives}: Arrive at {destination}'.format(
destination=station_map[self.destination.lower()],
arrives=self.arrives
))
return s
|
<commit_before><commit_msg>Add class for handling routes<commit_after>from .station_map import station_map
class Route:
def __init__(self, trip_attrs, legs):
self.origin = trip_attrs['origin']
self.destination = trip_attrs['destination']
self.fare = trip_attrs['fare']
self.departs = trip_attrs['origTimeMin']
self.arrives = trip_attrs['destTimeMin']
self.legs = legs
def has_transfer(self):
return len(self.legs) > 1
def num_transfers(self):
return len(self.legs) - 1
def short_description(self):
return [
'Departs: ' + self.departs,
'Arrives: ' + self.arrives,
'Transfers: {0}'.format(self.num_transfers())
]
def long_description(self):
s = []
for leg in self.legs:
attrs = leg.attrib
s.append('{departs}: {origin} to {dest} \n'.format(
departs=attrs['origTimeMin'],
origin=station_map[attrs['origin'].lower()],
dest=station_map[attrs['destination'].lower()]
))
s.append('{arrives}: Arrive at {destination}'.format(
destination=station_map[self.destination.lower()],
arrives=self.arrives
))
return s
|
|
89349fbf73b3377c73bcd5c6c44e24c3a4f62809
|
show_usbcamera_undistort.py
|
show_usbcamera_undistort.py
|
#! /usr/bin/env python
# -*- coding:utf-8 -*-
#
# Show the images from a USB camera
#
# External dependencies
import pickle
import cv2
# Load calibration file
with open( 'calibration.pkl', 'rb' ) as calibration_file :
calibration = pickle.load( calibration_file )
# Get the camera
camera = cv2.VideoCapture( 0 )
# Acquisition loop
while( True ) :
# Capture image-by-image
_, image = camera.read()
# Undistort the image
image = cv2.remap( image, calibration['undistort_map'][0], calibration['undistort_map'][1], cv2.INTER_LINEAR )
# Print ROI
cv2.rectangle( image, calibration['roi'][:2], calibration['roi'][2:], (0,0,255), 2 )
# Display the resulting image
cv2.imshow( 'USB Camera', image )
# Keyboard interruption
key = cv2.waitKey( 1 ) & 0xFF
# Escape : quit the application
if key == 27 : break
# Release the camera
camera.release()
# Close OpenCV windows
cv2.destroyAllWindows()
|
Add a script to show undistorted images from the USB camera.
|
Add a script to show undistorted images from the USB camera.
|
Python
|
mit
|
microy/RobotVision,microy/RobotVision
|
Add a script to show undistorted images from the USB camera.
|
#! /usr/bin/env python
# -*- coding:utf-8 -*-
#
# Show the images from a USB camera
#
# External dependencies
import pickle
import cv2
# Load calibration file
with open( 'calibration.pkl', 'rb' ) as calibration_file :
calibration = pickle.load( calibration_file )
# Get the camera
camera = cv2.VideoCapture( 0 )
# Acquisition loop
while( True ) :
# Capture image-by-image
_, image = camera.read()
# Undistort the image
image = cv2.remap( image, calibration['undistort_map'][0], calibration['undistort_map'][1], cv2.INTER_LINEAR )
# Print ROI
cv2.rectangle( image, calibration['roi'][:2], calibration['roi'][2:], (0,0,255), 2 )
# Display the resulting image
cv2.imshow( 'USB Camera', image )
# Keyboard interruption
key = cv2.waitKey( 1 ) & 0xFF
# Escape : quit the application
if key == 27 : break
# Release the camera
camera.release()
# Close OpenCV windows
cv2.destroyAllWindows()
|
<commit_before><commit_msg>Add a script to show undistorted images from the USB camera.<commit_after>
|
#! /usr/bin/env python
# -*- coding:utf-8 -*-
#
# Show the images from a USB camera
#
# External dependencies
import pickle
import cv2
# Load calibration file
with open( 'calibration.pkl', 'rb' ) as calibration_file :
calibration = pickle.load( calibration_file )
# Get the camera
camera = cv2.VideoCapture( 0 )
# Acquisition loop
while( True ) :
# Capture image-by-image
_, image = camera.read()
# Undistort the image
image = cv2.remap( image, calibration['undistort_map'][0], calibration['undistort_map'][1], cv2.INTER_LINEAR )
# Print ROI
cv2.rectangle( image, calibration['roi'][:2], calibration['roi'][2:], (0,0,255), 2 )
# Display the resulting image
cv2.imshow( 'USB Camera', image )
# Keyboard interruption
key = cv2.waitKey( 1 ) & 0xFF
# Escape : quit the application
if key == 27 : break
# Release the camera
camera.release()
# Close OpenCV windows
cv2.destroyAllWindows()
|
Add a script to show undistorted images from the USB camera.#! /usr/bin/env python
# -*- coding:utf-8 -*-
#
# Show the images from a USB camera
#
# External dependencies
import pickle
import cv2
# Load calibration file
with open( 'calibration.pkl', 'rb' ) as calibration_file :
calibration = pickle.load( calibration_file )
# Get the camera
camera = cv2.VideoCapture( 0 )
# Acquisition loop
while( True ) :
# Capture image-by-image
_, image = camera.read()
# Undistort the image
image = cv2.remap( image, calibration['undistort_map'][0], calibration['undistort_map'][1], cv2.INTER_LINEAR )
# Print ROI
cv2.rectangle( image, calibration['roi'][:2], calibration['roi'][2:], (0,0,255), 2 )
# Display the resulting image
cv2.imshow( 'USB Camera', image )
# Keyboard interruption
key = cv2.waitKey( 1 ) & 0xFF
# Escape : quit the application
if key == 27 : break
# Release the camera
camera.release()
# Close OpenCV windows
cv2.destroyAllWindows()
|
<commit_before><commit_msg>Add a script to show undistorted images from the USB camera.<commit_after>#! /usr/bin/env python
# -*- coding:utf-8 -*-
#
# Show the images from a USB camera
#
# External dependencies
import pickle
import cv2
# Load calibration file
with open( 'calibration.pkl', 'rb' ) as calibration_file :
calibration = pickle.load( calibration_file )
# Get the camera
camera = cv2.VideoCapture( 0 )
# Acquisition loop
while( True ) :
# Capture image-by-image
_, image = camera.read()
# Undistort the image
image = cv2.remap( image, calibration['undistort_map'][0], calibration['undistort_map'][1], cv2.INTER_LINEAR )
# Print ROI
cv2.rectangle( image, calibration['roi'][:2], calibration['roi'][2:], (0,0,255), 2 )
# Display the resulting image
cv2.imshow( 'USB Camera', image )
# Keyboard interruption
key = cv2.waitKey( 1 ) & 0xFF
# Escape : quit the application
if key == 27 : break
# Release the camera
camera.release()
# Close OpenCV windows
cv2.destroyAllWindows()
|
|
eb7dc7690ecd9f6fad5928057b1ec078a799dda4
|
icekit_events/migrations/0025_auto_20170519_1327.py
|
icekit_events/migrations/0025_auto_20170519_1327.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('icekit_events', '0024_auto_20170320_1824'),
]
operations = [
migrations.AddField(
model_name='eventbase',
name='admin_notes',
field=models.TextField(help_text=b"Administrator's notes about this content", blank=True),
),
migrations.AddField(
model_name='eventbase',
name='brief',
field=models.TextField(help_text=b'A document brief describing the purpose of this content', blank=True),
),
]
|
Update DB migrations following upstream change in ICEkit
|
Update DB migrations following upstream change in ICEkit
The `WorkflowStateMixin` model in django-icekit -- which
is used as a basis for the `EventBase` model -- was updated
with two new fields: `brief`, and `admin_notes`.
This change updates the model in this project to comply
with the upstream changes.
And will hopefully make ICEkit unit tests pass again in Travis.
|
Python
|
mit
|
ic-labs/icekit-events,ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/icekit-events,ic-labs/django-icekit,ic-labs/icekit-events,ic-labs/django-icekit
|
Update DB migrations following upstream change in ICEkit
The `WorkflowStateMixin` model in django-icekit -- which
is used as a basis for the `EventBase` model -- was updated
with two new fields: `brief`, and `admin_notes`.
This change updates the model in this project to comply
with the upstream changes.
And will hopefully make ICEkit unit tests pass again in Travis.
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('icekit_events', '0024_auto_20170320_1824'),
]
operations = [
migrations.AddField(
model_name='eventbase',
name='admin_notes',
field=models.TextField(help_text=b"Administrator's notes about this content", blank=True),
),
migrations.AddField(
model_name='eventbase',
name='brief',
field=models.TextField(help_text=b'A document brief describing the purpose of this content', blank=True),
),
]
|
<commit_before><commit_msg>Update DB migrations following upstream change in ICEkit
The `WorkflowStateMixin` model in django-icekit -- which
is used as a basis for the `EventBase` model -- was updated
with two new fields: `brief`, and `admin_notes`.
This change updates the model in this project to comply
with the upstream changes.
And will hopefully make ICEkit unit tests pass again in Travis.<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('icekit_events', '0024_auto_20170320_1824'),
]
operations = [
migrations.AddField(
model_name='eventbase',
name='admin_notes',
field=models.TextField(help_text=b"Administrator's notes about this content", blank=True),
),
migrations.AddField(
model_name='eventbase',
name='brief',
field=models.TextField(help_text=b'A document brief describing the purpose of this content', blank=True),
),
]
|
Update DB migrations following upstream change in ICEkit
The `WorkflowStateMixin` model in django-icekit -- which
is used as a basis for the `EventBase` model -- was updated
with two new fields: `brief`, and `admin_notes`.
This change updates the model in this project to comply
with the upstream changes.
And will hopefully make ICEkit unit tests pass again in Travis.# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('icekit_events', '0024_auto_20170320_1824'),
]
operations = [
migrations.AddField(
model_name='eventbase',
name='admin_notes',
field=models.TextField(help_text=b"Administrator's notes about this content", blank=True),
),
migrations.AddField(
model_name='eventbase',
name='brief',
field=models.TextField(help_text=b'A document brief describing the purpose of this content', blank=True),
),
]
|
<commit_before><commit_msg>Update DB migrations following upstream change in ICEkit
The `WorkflowStateMixin` model in django-icekit -- which
is used as a basis for the `EventBase` model -- was updated
with two new fields: `brief`, and `admin_notes`.
This change updates the model in this project to comply
with the upstream changes.
And will hopefully make ICEkit unit tests pass again in Travis.<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('icekit_events', '0024_auto_20170320_1824'),
]
operations = [
migrations.AddField(
model_name='eventbase',
name='admin_notes',
field=models.TextField(help_text=b"Administrator's notes about this content", blank=True),
),
migrations.AddField(
model_name='eventbase',
name='brief',
field=models.TextField(help_text=b'A document brief describing the purpose of this content', blank=True),
),
]
|
|
19280f953b3eac8231466b4cceae7160737ae0e1
|
tests/test_text.py
|
tests/test_text.py
|
"""Unit tests for straight text output."""
import unittest
import utils
class TestText(utils.TestCase):
def test_short_string(self):
self.assertEqual(self.render('{% template %}short'), 'short')
self.assertEqual(self.render('{% template %}short\n2'), 'short\n2')
self.assertEqual(self.render('{% template %}longer but not multiline and quick brown foxes jump over lazy dogs'),
'longer but not multiline and quick brown foxes jump over lazy dogs')
def test_long_string(self):
content = r"""This is a longer string
which definitely should
be broken, and it should
should work fine, as should\nescapes."""
self.assertEqual(self.render('{% template %}' + content), content)
content = content.replace(' it ', ' """ ')
self.assertEqual(self.render('{% template %}' + content), content)
content = '"""' + content + '"""'
self.assertEqual(self.render('{% template %}' + content), content)
def test_before_after_output(self):
self.assertEqual(self.render('{% template %}a{{"b"}}c'), 'abc')
if __name__ == '__main__':
unittest.main()
|
Add unit tests for straight text output
|
Add unit tests for straight text output
|
Python
|
bsd-3-clause
|
benhoyt/symplate
|
Add unit tests for straight text output
|
"""Unit tests for straight text output."""
import unittest
import utils
class TestText(utils.TestCase):
def test_short_string(self):
self.assertEqual(self.render('{% template %}short'), 'short')
self.assertEqual(self.render('{% template %}short\n2'), 'short\n2')
self.assertEqual(self.render('{% template %}longer but not multiline and quick brown foxes jump over lazy dogs'),
'longer but not multiline and quick brown foxes jump over lazy dogs')
def test_long_string(self):
content = r"""This is a longer string
which definitely should
be broken, and it should
should work fine, as should\nescapes."""
self.assertEqual(self.render('{% template %}' + content), content)
content = content.replace(' it ', ' """ ')
self.assertEqual(self.render('{% template %}' + content), content)
content = '"""' + content + '"""'
self.assertEqual(self.render('{% template %}' + content), content)
def test_before_after_output(self):
self.assertEqual(self.render('{% template %}a{{"b"}}c'), 'abc')
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add unit tests for straight text output<commit_after>
|
"""Unit tests for straight text output."""
import unittest
import utils
class TestText(utils.TestCase):
def test_short_string(self):
self.assertEqual(self.render('{% template %}short'), 'short')
self.assertEqual(self.render('{% template %}short\n2'), 'short\n2')
self.assertEqual(self.render('{% template %}longer but not multiline and quick brown foxes jump over lazy dogs'),
'longer but not multiline and quick brown foxes jump over lazy dogs')
def test_long_string(self):
content = r"""This is a longer string
which definitely should
be broken, and it should
should work fine, as should\nescapes."""
self.assertEqual(self.render('{% template %}' + content), content)
content = content.replace(' it ', ' """ ')
self.assertEqual(self.render('{% template %}' + content), content)
content = '"""' + content + '"""'
self.assertEqual(self.render('{% template %}' + content), content)
def test_before_after_output(self):
self.assertEqual(self.render('{% template %}a{{"b"}}c'), 'abc')
if __name__ == '__main__':
unittest.main()
|
Add unit tests for straight text output"""Unit tests for straight text output."""
import unittest
import utils
class TestText(utils.TestCase):
def test_short_string(self):
self.assertEqual(self.render('{% template %}short'), 'short')
self.assertEqual(self.render('{% template %}short\n2'), 'short\n2')
self.assertEqual(self.render('{% template %}longer but not multiline and quick brown foxes jump over lazy dogs'),
'longer but not multiline and quick brown foxes jump over lazy dogs')
def test_long_string(self):
content = r"""This is a longer string
which definitely should
be broken, and it should
should work fine, as should\nescapes."""
self.assertEqual(self.render('{% template %}' + content), content)
content = content.replace(' it ', ' """ ')
self.assertEqual(self.render('{% template %}' + content), content)
content = '"""' + content + '"""'
self.assertEqual(self.render('{% template %}' + content), content)
def test_before_after_output(self):
self.assertEqual(self.render('{% template %}a{{"b"}}c'), 'abc')
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add unit tests for straight text output<commit_after>"""Unit tests for straight text output."""
import unittest
import utils
class TestText(utils.TestCase):
def test_short_string(self):
self.assertEqual(self.render('{% template %}short'), 'short')
self.assertEqual(self.render('{% template %}short\n2'), 'short\n2')
self.assertEqual(self.render('{% template %}longer but not multiline and quick brown foxes jump over lazy dogs'),
'longer but not multiline and quick brown foxes jump over lazy dogs')
def test_long_string(self):
content = r"""This is a longer string
which definitely should
be broken, and it should
should work fine, as should\nescapes."""
self.assertEqual(self.render('{% template %}' + content), content)
content = content.replace(' it ', ' """ ')
self.assertEqual(self.render('{% template %}' + content), content)
content = '"""' + content + '"""'
self.assertEqual(self.render('{% template %}' + content), content)
def test_before_after_output(self):
self.assertEqual(self.render('{% template %}a{{"b"}}c'), 'abc')
if __name__ == '__main__':
unittest.main()
|
|
1f142cf73a14e50e11f26d6be2b8ac101504f1b9
|
tests/test_extended_functionality.py
|
tests/test_extended_functionality.py
|
"""Tests for non-core functionality in sandman2."""
from pytest_flask.fixtures import client
exclude_tables = ('Invoice')
def test_pagination(client):
"""Do we return paginated results when a 'page' parameter is provided?"""
response = client.get('/artist?page=2')
assert response.status_code == 200
assert len(response.json['resources']) == 20
assert response.json['resources'][0]['ArtistId'] == 21
|
Clean up tests; add tests for user-defined models and pagination
|
Clean up tests; add tests for user-defined models and pagination
|
Python
|
apache-2.0
|
jeffknupp/sandman2,jeffknupp/sandman2,jeffknupp/sandman2
|
Clean up tests; add tests for user-defined models and pagination
|
"""Tests for non-core functionality in sandman2."""
from pytest_flask.fixtures import client
exclude_tables = ('Invoice')
def test_pagination(client):
"""Do we return paginated results when a 'page' parameter is provided?"""
response = client.get('/artist?page=2')
assert response.status_code == 200
assert len(response.json['resources']) == 20
assert response.json['resources'][0]['ArtistId'] == 21
|
<commit_before><commit_msg>Clean up tests; add tests for user-defined models and pagination<commit_after>
|
"""Tests for non-core functionality in sandman2."""
from pytest_flask.fixtures import client
exclude_tables = ('Invoice')
def test_pagination(client):
"""Do we return paginated results when a 'page' parameter is provided?"""
response = client.get('/artist?page=2')
assert response.status_code == 200
assert len(response.json['resources']) == 20
assert response.json['resources'][0]['ArtistId'] == 21
|
Clean up tests; add tests for user-defined models and pagination"""Tests for non-core functionality in sandman2."""
from pytest_flask.fixtures import client
exclude_tables = ('Invoice')
def test_pagination(client):
"""Do we return paginated results when a 'page' parameter is provided?"""
response = client.get('/artist?page=2')
assert response.status_code == 200
assert len(response.json['resources']) == 20
assert response.json['resources'][0]['ArtistId'] == 21
|
<commit_before><commit_msg>Clean up tests; add tests for user-defined models and pagination<commit_after>"""Tests for non-core functionality in sandman2."""
from pytest_flask.fixtures import client
exclude_tables = ('Invoice')
def test_pagination(client):
"""Do we return paginated results when a 'page' parameter is provided?"""
response = client.get('/artist?page=2')
assert response.status_code == 200
assert len(response.json['resources']) == 20
assert response.json['resources'][0]['ArtistId'] == 21
|
|
361f62bbdbcf475a65c3e6e6b04b4c896c58b9bf
|
traffic-monitor.py
|
traffic-monitor.py
|
#!/usr/bin/env python3
import json
import requests
# Configuration
# To create an authentication key, see
# https://msdn.microsoft.com/en-ca/library/ff701720.aspx
bing_maps_auth_key = ""
# Coordinates of the bounding box where traffic incidents are to be monitored
# See https://msdn.microsoft.com/en-us/library/ff701726.aspx
coordinate_southwest = "45.219, -122.325"
coordinate_northeast = "46.610, -122.107"
# For the possible values and their explanations, see
# https://msdn.microsoft.com/en-ca/library/hh441726.aspx
severity = "1, 2, 3, 4"
type = "1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11"
def get_traffic_data():
map_area = ",".join([coordinate_southwest, coordinate_northeast])
url = "http://dev.virtualearth.net/REST/v1/Traffic/Incidents/" + map_area
req_params = {}
req_params["severity"] = severity
req_params["type"] = type
req_params["key"] = bing_maps_auth_key
return requests.get(url, params=req_params)
if __name__ == "__main__":
response = get_traffic_data()
response_body = response.json()
print(json.dumps(response_body, sort_keys=True, indent=4))
|
Add script with basic traffic data retrieval
|
Add script with basic traffic data retrieval
|
Python
|
mit
|
jleung51/scripts,jleung51/scripts,jleung51/scripts
|
Add script with basic traffic data retrieval
|
#!/usr/bin/env python3
import json
import requests
# Configuration
# To create an authentication key, see
# https://msdn.microsoft.com/en-ca/library/ff701720.aspx
bing_maps_auth_key = ""
# Coordinates of the bounding box where traffic incidents are to be monitored
# See https://msdn.microsoft.com/en-us/library/ff701726.aspx
coordinate_southwest = "45.219, -122.325"
coordinate_northeast = "46.610, -122.107"
# For the possible values and their explanations, see
# https://msdn.microsoft.com/en-ca/library/hh441726.aspx
severity = "1, 2, 3, 4"
type = "1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11"
def get_traffic_data():
map_area = ",".join([coordinate_southwest, coordinate_northeast])
url = "http://dev.virtualearth.net/REST/v1/Traffic/Incidents/" + map_area
req_params = {}
req_params["severity"] = severity
req_params["type"] = type
req_params["key"] = bing_maps_auth_key
return requests.get(url, params=req_params)
if __name__ == "__main__":
response = get_traffic_data()
response_body = response.json()
print(json.dumps(response_body, sort_keys=True, indent=4))
|
<commit_before><commit_msg>Add script with basic traffic data retrieval<commit_after>
|
#!/usr/bin/env python3
import json
import requests
# Configuration
# To create an authentication key, see
# https://msdn.microsoft.com/en-ca/library/ff701720.aspx
bing_maps_auth_key = ""
# Coordinates of the bounding box where traffic incidents are to be monitored
# See https://msdn.microsoft.com/en-us/library/ff701726.aspx
coordinate_southwest = "45.219, -122.325"
coordinate_northeast = "46.610, -122.107"
# For the possible values and their explanations, see
# https://msdn.microsoft.com/en-ca/library/hh441726.aspx
severity = "1, 2, 3, 4"
type = "1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11"
def get_traffic_data():
map_area = ",".join([coordinate_southwest, coordinate_northeast])
url = "http://dev.virtualearth.net/REST/v1/Traffic/Incidents/" + map_area
req_params = {}
req_params["severity"] = severity
req_params["type"] = type
req_params["key"] = bing_maps_auth_key
return requests.get(url, params=req_params)
if __name__ == "__main__":
response = get_traffic_data()
response_body = response.json()
print(json.dumps(response_body, sort_keys=True, indent=4))
|
Add script with basic traffic data retrieval#!/usr/bin/env python3
import json
import requests
# Configuration
# To create an authentication key, see
# https://msdn.microsoft.com/en-ca/library/ff701720.aspx
bing_maps_auth_key = ""
# Coordinates of the bounding box where traffic incidents are to be monitored
# See https://msdn.microsoft.com/en-us/library/ff701726.aspx
coordinate_southwest = "45.219, -122.325"
coordinate_northeast = "46.610, -122.107"
# For the possible values and their explanations, see
# https://msdn.microsoft.com/en-ca/library/hh441726.aspx
severity = "1, 2, 3, 4"
type = "1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11"
def get_traffic_data():
map_area = ",".join([coordinate_southwest, coordinate_northeast])
url = "http://dev.virtualearth.net/REST/v1/Traffic/Incidents/" + map_area
req_params = {}
req_params["severity"] = severity
req_params["type"] = type
req_params["key"] = bing_maps_auth_key
return requests.get(url, params=req_params)
if __name__ == "__main__":
response = get_traffic_data()
response_body = response.json()
print(json.dumps(response_body, sort_keys=True, indent=4))
|
<commit_before><commit_msg>Add script with basic traffic data retrieval<commit_after>#!/usr/bin/env python3
import json
import requests
# Configuration
# To create an authentication key, see
# https://msdn.microsoft.com/en-ca/library/ff701720.aspx
bing_maps_auth_key = ""
# Coordinates of the bounding box where traffic incidents are to be monitored
# See https://msdn.microsoft.com/en-us/library/ff701726.aspx
coordinate_southwest = "45.219, -122.325"
coordinate_northeast = "46.610, -122.107"
# For the possible values and their explanations, see
# https://msdn.microsoft.com/en-ca/library/hh441726.aspx
severity = "1, 2, 3, 4"
type = "1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11"
def get_traffic_data():
map_area = ",".join([coordinate_southwest, coordinate_northeast])
url = "http://dev.virtualearth.net/REST/v1/Traffic/Incidents/" + map_area
req_params = {}
req_params["severity"] = severity
req_params["type"] = type
req_params["key"] = bing_maps_auth_key
return requests.get(url, params=req_params)
if __name__ == "__main__":
response = get_traffic_data()
response_body = response.json()
print(json.dumps(response_body, sort_keys=True, indent=4))
|
|
a7b9860538c50e58a06f751b5f9eecde575fae2a
|
example/quickstart/show_time_2.py
|
example/quickstart/show_time_2.py
|
#!/usr/bin/env python
import asyncio
import datetime
import random
import websockets
CONNECTIONS = set()
async def register(websocket):
CONNECTIONS.add(websocket)
try:
await websocket.wait_closed()
finally:
CONNECTIONS.remove(websocket)
async def show_time():
while True:
message = datetime.datetime.utcnow().isoformat() + "Z"
websockets.broadcast(CONNECTIONS, message)
await asyncio.sleep(random.random() * 2 + 1)
async def main():
async with websockets.serve(register, "localhost", 5678):
await show_time()
if __name__ == "__main__":
asyncio.run(main())
|
Add file forgotten in 731ad8c.
|
Add file forgotten in 731ad8c.
|
Python
|
bsd-3-clause
|
aaugustin/websockets,aaugustin/websockets,aaugustin/websockets,aaugustin/websockets
|
Add file forgotten in 731ad8c.
|
#!/usr/bin/env python
import asyncio
import datetime
import random
import websockets
CONNECTIONS = set()
async def register(websocket):
CONNECTIONS.add(websocket)
try:
await websocket.wait_closed()
finally:
CONNECTIONS.remove(websocket)
async def show_time():
while True:
message = datetime.datetime.utcnow().isoformat() + "Z"
websockets.broadcast(CONNECTIONS, message)
await asyncio.sleep(random.random() * 2 + 1)
async def main():
async with websockets.serve(register, "localhost", 5678):
await show_time()
if __name__ == "__main__":
asyncio.run(main())
|
<commit_before><commit_msg>Add file forgotten in 731ad8c.<commit_after>
|
#!/usr/bin/env python
import asyncio
import datetime
import random
import websockets
CONNECTIONS = set()
async def register(websocket):
CONNECTIONS.add(websocket)
try:
await websocket.wait_closed()
finally:
CONNECTIONS.remove(websocket)
async def show_time():
while True:
message = datetime.datetime.utcnow().isoformat() + "Z"
websockets.broadcast(CONNECTIONS, message)
await asyncio.sleep(random.random() * 2 + 1)
async def main():
async with websockets.serve(register, "localhost", 5678):
await show_time()
if __name__ == "__main__":
asyncio.run(main())
|
Add file forgotten in 731ad8c.#!/usr/bin/env python
import asyncio
import datetime
import random
import websockets
CONNECTIONS = set()
async def register(websocket):
CONNECTIONS.add(websocket)
try:
await websocket.wait_closed()
finally:
CONNECTIONS.remove(websocket)
async def show_time():
while True:
message = datetime.datetime.utcnow().isoformat() + "Z"
websockets.broadcast(CONNECTIONS, message)
await asyncio.sleep(random.random() * 2 + 1)
async def main():
async with websockets.serve(register, "localhost", 5678):
await show_time()
if __name__ == "__main__":
asyncio.run(main())
|
<commit_before><commit_msg>Add file forgotten in 731ad8c.<commit_after>#!/usr/bin/env python
import asyncio
import datetime
import random
import websockets
CONNECTIONS = set()
async def register(websocket):
CONNECTIONS.add(websocket)
try:
await websocket.wait_closed()
finally:
CONNECTIONS.remove(websocket)
async def show_time():
while True:
message = datetime.datetime.utcnow().isoformat() + "Z"
websockets.broadcast(CONNECTIONS, message)
await asyncio.sleep(random.random() * 2 + 1)
async def main():
async with websockets.serve(register, "localhost", 5678):
await show_time()
if __name__ == "__main__":
asyncio.run(main())
|
|
0d31f071b5a5ba76f484ffa49e32f34381b44281
|
examples/continuous_recordings.py
|
examples/continuous_recordings.py
|
#!/usr/bin/env python3
# One common issue is that Saleae records traces into memory, which means that
# it can't handle very long captures. This example shows how to use scripting to
# do long recordings over time. There will be brief gaps every time Saleae saves
# the old recording and starts a new one.
import os
import time
import saleae
folder = time.strftime('%Y-%m-%d--%H-%M-%S')
os.mkdir(folder)
s = saleae.Saleae()
# Note: This is a short number of samples. You'll probably want more.
s.set_num_samples(1e6)
for i in range(5):
path = os.path.abspath(os.path.join(folder, str(i)))
s.capture_to_file(path)
|
Add example for repeated / continuous recordings
|
Add example for repeated / continuous recordings
|
Python
|
apache-2.0
|
ppannuto/python-saleae
|
Add example for repeated / continuous recordings
|
#!/usr/bin/env python3
# One common issue is that Saleae records traces into memory, which means that
# it can't handle very long captures. This example shows how to use scripting to
# do long recordings over time. There will be brief gaps every time Saleae saves
# the old recording and starts a new one.
import os
import time
import saleae
folder = time.strftime('%Y-%m-%d--%H-%M-%S')
os.mkdir(folder)
s = saleae.Saleae()
# Note: This is a short number of samples. You'll probably want more.
s.set_num_samples(1e6)
for i in range(5):
path = os.path.abspath(os.path.join(folder, str(i)))
s.capture_to_file(path)
|
<commit_before><commit_msg>Add example for repeated / continuous recordings<commit_after>
|
#!/usr/bin/env python3
# One common issue is that Saleae records traces into memory, which means that
# it can't handle very long captures. This example shows how to use scripting to
# do long recordings over time. There will be brief gaps every time Saleae saves
# the old recording and starts a new one.
import os
import time
import saleae
folder = time.strftime('%Y-%m-%d--%H-%M-%S')
os.mkdir(folder)
s = saleae.Saleae()
# Note: This is a short number of samples. You'll probably want more.
s.set_num_samples(1e6)
for i in range(5):
path = os.path.abspath(os.path.join(folder, str(i)))
s.capture_to_file(path)
|
Add example for repeated / continuous recordings#!/usr/bin/env python3
# One common issue is that Saleae records traces into memory, which means that
# it can't handle very long captures. This example shows how to use scripting to
# do long recordings over time. There will be brief gaps every time Saleae saves
# the old recording and starts a new one.
import os
import time
import saleae
folder = time.strftime('%Y-%m-%d--%H-%M-%S')
os.mkdir(folder)
s = saleae.Saleae()
# Note: This is a short number of samples. You'll probably want more.
s.set_num_samples(1e6)
for i in range(5):
path = os.path.abspath(os.path.join(folder, str(i)))
s.capture_to_file(path)
|
<commit_before><commit_msg>Add example for repeated / continuous recordings<commit_after>#!/usr/bin/env python3
# One common issue is that Saleae records traces into memory, which means that
# it can't handle very long captures. This example shows how to use scripting to
# do long recordings over time. There will be brief gaps every time Saleae saves
# the old recording and starts a new one.
import os
import time
import saleae
folder = time.strftime('%Y-%m-%d--%H-%M-%S')
os.mkdir(folder)
s = saleae.Saleae()
# Note: This is a short number of samples. You'll probably want more.
s.set_num_samples(1e6)
for i in range(5):
path = os.path.abspath(os.path.join(folder, str(i)))
s.capture_to_file(path)
|
|
0c91b4302ca4019ab1ea7c023b592c177dddc4fe
|
stdnum/fi/veronumero.py
|
stdnum/fi/veronumero.py
|
# veronumero.py - functions for handling Finnish individual tax numbers
# coding: utf-8
#
# Copyright (C) 2017 Holvi Payment Services Oy
# Copyright (C) 2017 Arthur de Jong
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
"""
Veronumero (Finnish individual tax number).
The Veronumero an individual tax number that is assigned to workers in the
construction industry in Finland. The number is separate from the HETU and is
a 12 digit number without any embedded information such as birth dates.
More information:
* https://www.vero.fi/en/detailed-guidance/guidance/48791/individual_tax_numbers__instructions_fo/
* https://prosentti.vero.fi/Veronumerorekisteri/Tarkistus/VeronumeronTarkistus.aspx
>>> validate('123456789123')
'123456789123'
>>> validate('12345678912A')
Traceback (most recent call last):
...
InvalidFormat: ...
>>> validate('123456789')
Traceback (most recent call last):
...
InvalidLength: ...
"""
from stdnum.exceptions import *
from stdnum.util import clean
def compact(number):
"""Convert the Veronumero to the minimal representation. This strips
surrounding whitespace and removes separators."""
return clean(number, ' ').strip()
def validate(number):
"""Checks to see if the number provided is a valid tax number. This
checks the length and formatting."""
number = compact(number)
if not number.isdigit():
raise InvalidFormat()
if len(number) != 12:
raise InvalidLength()
# there is no known check digit validation
return number
def is_valid(number):
"""Checks to see if the number provided is a valid tax number. This
checks the length and formatting."""
try:
return bool(validate(number))
except ValidationError:
return False
|
Implement Finnish individual tax number validation
|
Implement Finnish individual tax number validation
|
Python
|
lgpl-2.1
|
arthurdejong/python-stdnum,holvi/python-stdnum,arthurdejong/python-stdnum,holvi/python-stdnum,holvi/python-stdnum,arthurdejong/python-stdnum
|
Implement Finnish individual tax number validation
|
# veronumero.py - functions for handling Finnish individual tax numbers
# coding: utf-8
#
# Copyright (C) 2017 Holvi Payment Services Oy
# Copyright (C) 2017 Arthur de Jong
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
"""
Veronumero (Finnish individual tax number).
The Veronumero an individual tax number that is assigned to workers in the
construction industry in Finland. The number is separate from the HETU and is
a 12 digit number without any embedded information such as birth dates.
More information:
* https://www.vero.fi/en/detailed-guidance/guidance/48791/individual_tax_numbers__instructions_fo/
* https://prosentti.vero.fi/Veronumerorekisteri/Tarkistus/VeronumeronTarkistus.aspx
>>> validate('123456789123')
'123456789123'
>>> validate('12345678912A')
Traceback (most recent call last):
...
InvalidFormat: ...
>>> validate('123456789')
Traceback (most recent call last):
...
InvalidLength: ...
"""
from stdnum.exceptions import *
from stdnum.util import clean
def compact(number):
"""Convert the Veronumero to the minimal representation. This strips
surrounding whitespace and removes separators."""
return clean(number, ' ').strip()
def validate(number):
"""Checks to see if the number provided is a valid tax number. This
checks the length and formatting."""
number = compact(number)
if not number.isdigit():
raise InvalidFormat()
if len(number) != 12:
raise InvalidLength()
# there is no known check digit validation
return number
def is_valid(number):
"""Checks to see if the number provided is a valid tax number. This
checks the length and formatting."""
try:
return bool(validate(number))
except ValidationError:
return False
|
<commit_before><commit_msg>Implement Finnish individual tax number validation<commit_after>
|
# veronumero.py - functions for handling Finnish individual tax numbers
# coding: utf-8
#
# Copyright (C) 2017 Holvi Payment Services Oy
# Copyright (C) 2017 Arthur de Jong
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
"""
Veronumero (Finnish individual tax number).
The Veronumero an individual tax number that is assigned to workers in the
construction industry in Finland. The number is separate from the HETU and is
a 12 digit number without any embedded information such as birth dates.
More information:
* https://www.vero.fi/en/detailed-guidance/guidance/48791/individual_tax_numbers__instructions_fo/
* https://prosentti.vero.fi/Veronumerorekisteri/Tarkistus/VeronumeronTarkistus.aspx
>>> validate('123456789123')
'123456789123'
>>> validate('12345678912A')
Traceback (most recent call last):
...
InvalidFormat: ...
>>> validate('123456789')
Traceback (most recent call last):
...
InvalidLength: ...
"""
from stdnum.exceptions import *
from stdnum.util import clean
def compact(number):
"""Convert the Veronumero to the minimal representation. This strips
surrounding whitespace and removes separators."""
return clean(number, ' ').strip()
def validate(number):
"""Checks to see if the number provided is a valid tax number. This
checks the length and formatting."""
number = compact(number)
if not number.isdigit():
raise InvalidFormat()
if len(number) != 12:
raise InvalidLength()
# there is no known check digit validation
return number
def is_valid(number):
"""Checks to see if the number provided is a valid tax number. This
checks the length and formatting."""
try:
return bool(validate(number))
except ValidationError:
return False
|
Implement Finnish individual tax number validation# veronumero.py - functions for handling Finnish individual tax numbers
# coding: utf-8
#
# Copyright (C) 2017 Holvi Payment Services Oy
# Copyright (C) 2017 Arthur de Jong
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
"""
Veronumero (Finnish individual tax number).
The Veronumero an individual tax number that is assigned to workers in the
construction industry in Finland. The number is separate from the HETU and is
a 12 digit number without any embedded information such as birth dates.
More information:
* https://www.vero.fi/en/detailed-guidance/guidance/48791/individual_tax_numbers__instructions_fo/
* https://prosentti.vero.fi/Veronumerorekisteri/Tarkistus/VeronumeronTarkistus.aspx
>>> validate('123456789123')
'123456789123'
>>> validate('12345678912A')
Traceback (most recent call last):
...
InvalidFormat: ...
>>> validate('123456789')
Traceback (most recent call last):
...
InvalidLength: ...
"""
from stdnum.exceptions import *
from stdnum.util import clean
def compact(number):
"""Convert the Veronumero to the minimal representation. This strips
surrounding whitespace and removes separators."""
return clean(number, ' ').strip()
def validate(number):
"""Checks to see if the number provided is a valid tax number. This
checks the length and formatting."""
number = compact(number)
if not number.isdigit():
raise InvalidFormat()
if len(number) != 12:
raise InvalidLength()
# there is no known check digit validation
return number
def is_valid(number):
"""Checks to see if the number provided is a valid tax number. This
checks the length and formatting."""
try:
return bool(validate(number))
except ValidationError:
return False
|
<commit_before><commit_msg>Implement Finnish individual tax number validation<commit_after># veronumero.py - functions for handling Finnish individual tax numbers
# coding: utf-8
#
# Copyright (C) 2017 Holvi Payment Services Oy
# Copyright (C) 2017 Arthur de Jong
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
"""
Veronumero (Finnish individual tax number).
The Veronumero an individual tax number that is assigned to workers in the
construction industry in Finland. The number is separate from the HETU and is
a 12 digit number without any embedded information such as birth dates.
More information:
* https://www.vero.fi/en/detailed-guidance/guidance/48791/individual_tax_numbers__instructions_fo/
* https://prosentti.vero.fi/Veronumerorekisteri/Tarkistus/VeronumeronTarkistus.aspx
>>> validate('123456789123')
'123456789123'
>>> validate('12345678912A')
Traceback (most recent call last):
...
InvalidFormat: ...
>>> validate('123456789')
Traceback (most recent call last):
...
InvalidLength: ...
"""
from stdnum.exceptions import *
from stdnum.util import clean
def compact(number):
"""Convert the Veronumero to the minimal representation. This strips
surrounding whitespace and removes separators."""
return clean(number, ' ').strip()
def validate(number):
"""Checks to see if the number provided is a valid tax number. This
checks the length and formatting."""
number = compact(number)
if not number.isdigit():
raise InvalidFormat()
if len(number) != 12:
raise InvalidLength()
# there is no known check digit validation
return number
def is_valid(number):
"""Checks to see if the number provided is a valid tax number. This
checks the length and formatting."""
try:
return bool(validate(number))
except ValidationError:
return False
|
|
58a60d2380bd0a9768b1ca4eaee713c31ea8790e
|
tests/test_base.py
|
tests/test_base.py
|
import pytest
from celery.schedules import crontab
from scrapi import _Registry
from scrapi.base import BaseHarvester
from scrapi.base import HarvesterMeta
@pytest.fixture
def mock_registry(monkeypatch):
registry = _Registry()
monkeypatch.setattr('scrapi.base.registry', registry)
return registry
@pytest.fixture
def test_harvester():
class TestHarvester(BaseHarvester):
short_name = 'test'
long_name = 'test'
file_format = 'test'
harvest = lambda x: x
normalize = lambda x: x
return TestHarvester
class TestHarvesterMeta(object):
def test_meta_records(self, mock_registry):
class TestClass(object):
__metaclass__ = HarvesterMeta
short_name = 'test'
assert isinstance(mock_registry['test'], TestClass)
def test_beat_schedule(self, mock_registry):
assert mock_registry.beat_schedule == {}
def test_beat_schedule_adds(self, mock_registry):
class TestClass(object):
__metaclass__ = HarvesterMeta
short_name = 'test'
run_at = {
'hour': 1,
'minute': 1,
'day_of_week': 'mon',
}
assert mock_registry.beat_schedule == {
'run_test': {
'args': ['test'],
'task': 'scrapi.tasks.run_harvester',
'schedule': crontab(**TestClass.run_at),
}
}
class TestHarvesterBase(object):
def test_requires_short_name(self, monkeypatch, test_harvester):
# monkeypatch.delattr(test_harvester, 'short_name')
import ipdb; ipdb.set_trace()
test_harvester()
# assert 'short_name' in e.value.message
def test_requires_long_name(self):
class TestHarvester(BaseHarvester):
short_name = 'test'
long_name = 'test'
file_format = 'test'
harvest = lambda x: x
normalize = lambda x: x
with pytest.raises(TypeError) as e:
TestHarvester()
assert 'short_name' in e.value.message
|
Add some tests for base.py
|
Add some tests for base.py
|
Python
|
apache-2.0
|
fabianvf/scrapi,ostwald/scrapi,CenterForOpenScience/scrapi,mehanig/scrapi,erinspace/scrapi,alexgarciac/scrapi,felliott/scrapi,erinspace/scrapi,CenterForOpenScience/scrapi,jeffreyliu3230/scrapi,mehanig/scrapi,icereval/scrapi,felliott/scrapi,fabianvf/scrapi
|
Add some tests for base.py
|
import pytest
from celery.schedules import crontab
from scrapi import _Registry
from scrapi.base import BaseHarvester
from scrapi.base import HarvesterMeta
@pytest.fixture
def mock_registry(monkeypatch):
registry = _Registry()
monkeypatch.setattr('scrapi.base.registry', registry)
return registry
@pytest.fixture
def test_harvester():
class TestHarvester(BaseHarvester):
short_name = 'test'
long_name = 'test'
file_format = 'test'
harvest = lambda x: x
normalize = lambda x: x
return TestHarvester
class TestHarvesterMeta(object):
def test_meta_records(self, mock_registry):
class TestClass(object):
__metaclass__ = HarvesterMeta
short_name = 'test'
assert isinstance(mock_registry['test'], TestClass)
def test_beat_schedule(self, mock_registry):
assert mock_registry.beat_schedule == {}
def test_beat_schedule_adds(self, mock_registry):
class TestClass(object):
__metaclass__ = HarvesterMeta
short_name = 'test'
run_at = {
'hour': 1,
'minute': 1,
'day_of_week': 'mon',
}
assert mock_registry.beat_schedule == {
'run_test': {
'args': ['test'],
'task': 'scrapi.tasks.run_harvester',
'schedule': crontab(**TestClass.run_at),
}
}
class TestHarvesterBase(object):
def test_requires_short_name(self, monkeypatch, test_harvester):
# monkeypatch.delattr(test_harvester, 'short_name')
import ipdb; ipdb.set_trace()
test_harvester()
# assert 'short_name' in e.value.message
def test_requires_long_name(self):
class TestHarvester(BaseHarvester):
short_name = 'test'
long_name = 'test'
file_format = 'test'
harvest = lambda x: x
normalize = lambda x: x
with pytest.raises(TypeError) as e:
TestHarvester()
assert 'short_name' in e.value.message
|
<commit_before><commit_msg>Add some tests for base.py<commit_after>
|
import pytest
from celery.schedules import crontab
from scrapi import _Registry
from scrapi.base import BaseHarvester
from scrapi.base import HarvesterMeta
@pytest.fixture
def mock_registry(monkeypatch):
registry = _Registry()
monkeypatch.setattr('scrapi.base.registry', registry)
return registry
@pytest.fixture
def test_harvester():
class TestHarvester(BaseHarvester):
short_name = 'test'
long_name = 'test'
file_format = 'test'
harvest = lambda x: x
normalize = lambda x: x
return TestHarvester
class TestHarvesterMeta(object):
def test_meta_records(self, mock_registry):
class TestClass(object):
__metaclass__ = HarvesterMeta
short_name = 'test'
assert isinstance(mock_registry['test'], TestClass)
def test_beat_schedule(self, mock_registry):
assert mock_registry.beat_schedule == {}
def test_beat_schedule_adds(self, mock_registry):
class TestClass(object):
__metaclass__ = HarvesterMeta
short_name = 'test'
run_at = {
'hour': 1,
'minute': 1,
'day_of_week': 'mon',
}
assert mock_registry.beat_schedule == {
'run_test': {
'args': ['test'],
'task': 'scrapi.tasks.run_harvester',
'schedule': crontab(**TestClass.run_at),
}
}
class TestHarvesterBase(object):
def test_requires_short_name(self, monkeypatch, test_harvester):
# monkeypatch.delattr(test_harvester, 'short_name')
import ipdb; ipdb.set_trace()
test_harvester()
# assert 'short_name' in e.value.message
def test_requires_long_name(self):
class TestHarvester(BaseHarvester):
short_name = 'test'
long_name = 'test'
file_format = 'test'
harvest = lambda x: x
normalize = lambda x: x
with pytest.raises(TypeError) as e:
TestHarvester()
assert 'short_name' in e.value.message
|
Add some tests for base.pyimport pytest
from celery.schedules import crontab
from scrapi import _Registry
from scrapi.base import BaseHarvester
from scrapi.base import HarvesterMeta
@pytest.fixture
def mock_registry(monkeypatch):
registry = _Registry()
monkeypatch.setattr('scrapi.base.registry', registry)
return registry
@pytest.fixture
def test_harvester():
class TestHarvester(BaseHarvester):
short_name = 'test'
long_name = 'test'
file_format = 'test'
harvest = lambda x: x
normalize = lambda x: x
return TestHarvester
class TestHarvesterMeta(object):
def test_meta_records(self, mock_registry):
class TestClass(object):
__metaclass__ = HarvesterMeta
short_name = 'test'
assert isinstance(mock_registry['test'], TestClass)
def test_beat_schedule(self, mock_registry):
assert mock_registry.beat_schedule == {}
def test_beat_schedule_adds(self, mock_registry):
class TestClass(object):
__metaclass__ = HarvesterMeta
short_name = 'test'
run_at = {
'hour': 1,
'minute': 1,
'day_of_week': 'mon',
}
assert mock_registry.beat_schedule == {
'run_test': {
'args': ['test'],
'task': 'scrapi.tasks.run_harvester',
'schedule': crontab(**TestClass.run_at),
}
}
class TestHarvesterBase(object):
def test_requires_short_name(self, monkeypatch, test_harvester):
# monkeypatch.delattr(test_harvester, 'short_name')
import ipdb; ipdb.set_trace()
test_harvester()
# assert 'short_name' in e.value.message
def test_requires_long_name(self):
class TestHarvester(BaseHarvester):
short_name = 'test'
long_name = 'test'
file_format = 'test'
harvest = lambda x: x
normalize = lambda x: x
with pytest.raises(TypeError) as e:
TestHarvester()
assert 'short_name' in e.value.message
|
<commit_before><commit_msg>Add some tests for base.py<commit_after>import pytest
from celery.schedules import crontab
from scrapi import _Registry
from scrapi.base import BaseHarvester
from scrapi.base import HarvesterMeta
@pytest.fixture
def mock_registry(monkeypatch):
registry = _Registry()
monkeypatch.setattr('scrapi.base.registry', registry)
return registry
@pytest.fixture
def test_harvester():
class TestHarvester(BaseHarvester):
short_name = 'test'
long_name = 'test'
file_format = 'test'
harvest = lambda x: x
normalize = lambda x: x
return TestHarvester
class TestHarvesterMeta(object):
def test_meta_records(self, mock_registry):
class TestClass(object):
__metaclass__ = HarvesterMeta
short_name = 'test'
assert isinstance(mock_registry['test'], TestClass)
def test_beat_schedule(self, mock_registry):
assert mock_registry.beat_schedule == {}
def test_beat_schedule_adds(self, mock_registry):
class TestClass(object):
__metaclass__ = HarvesterMeta
short_name = 'test'
run_at = {
'hour': 1,
'minute': 1,
'day_of_week': 'mon',
}
assert mock_registry.beat_schedule == {
'run_test': {
'args': ['test'],
'task': 'scrapi.tasks.run_harvester',
'schedule': crontab(**TestClass.run_at),
}
}
class TestHarvesterBase(object):
def test_requires_short_name(self, monkeypatch, test_harvester):
# monkeypatch.delattr(test_harvester, 'short_name')
import ipdb; ipdb.set_trace()
test_harvester()
# assert 'short_name' in e.value.message
def test_requires_long_name(self):
class TestHarvester(BaseHarvester):
short_name = 'test'
long_name = 'test'
file_format = 'test'
harvest = lambda x: x
normalize = lambda x: x
with pytest.raises(TypeError) as e:
TestHarvester()
assert 'short_name' in e.value.message
|
|
d2be813ae6e2549ad36d823e9abdcb4dc5d21d0e
|
tests/test_this.py
|
tests/test_this.py
|
"""tests/test_this.py.
Tests the Zen of Hug
Copyright (C) 2019 Timothy Edmund Crosley
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or
substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
from hug import this
def test_this():
"""Test to ensure this exposes the ZEN_OF_HUG as a string"""
assert type(this.ZEN_OF_HUG) == str
|
Add test for this module
|
Add test for this module
|
Python
|
mit
|
timothycrosley/hug,timothycrosley/hug,timothycrosley/hug
|
Add test for this module
|
"""tests/test_this.py.
Tests the Zen of Hug
Copyright (C) 2019 Timothy Edmund Crosley
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or
substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
from hug import this
def test_this():
"""Test to ensure this exposes the ZEN_OF_HUG as a string"""
assert type(this.ZEN_OF_HUG) == str
|
<commit_before><commit_msg>Add test for this module<commit_after>
|
"""tests/test_this.py.
Tests the Zen of Hug
Copyright (C) 2019 Timothy Edmund Crosley
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or
substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
from hug import this
def test_this():
"""Test to ensure this exposes the ZEN_OF_HUG as a string"""
assert type(this.ZEN_OF_HUG) == str
|
Add test for this module"""tests/test_this.py.
Tests the Zen of Hug
Copyright (C) 2019 Timothy Edmund Crosley
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or
substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
from hug import this
def test_this():
"""Test to ensure this exposes the ZEN_OF_HUG as a string"""
assert type(this.ZEN_OF_HUG) == str
|
<commit_before><commit_msg>Add test for this module<commit_after>"""tests/test_this.py.
Tests the Zen of Hug
Copyright (C) 2019 Timothy Edmund Crosley
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or
substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
from hug import this
def test_this():
"""Test to ensure this exposes the ZEN_OF_HUG as a string"""
assert type(this.ZEN_OF_HUG) == str
|
|
1ce9502f212a49b13289570bb182d65c3ffcfafe
|
tests/acceptance/test_commits.py
|
tests/acceptance/test_commits.py
|
#!/usr/bin/python
# Copyright 2016 Mender Software AS
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
import re
import subprocess
class TestCommits:
def test_commits(self):
# First find which range to check. Include HEAD and exclude all known
# upstream branches.
git_branch = subprocess.check_output(["git", "branch", "-r"])
all_branches = [line.split()[0] for line in git_branch.strip().split('\n')]
# Exclude all non-pull requests.
commit_range = ["HEAD", "--not"]
for branch in all_branches:
# Include all non-origin branches.
if not branch.startswith("origin/"):
continue
# Include branches that have slashes after "origin/" (pull requests).
if re.match("^origin/.*/", branch):
continue
# Include branches that end with "patch-1" style text.
if re.match("^origin/.*patch-[0-9]+$", branch):
continue
# Exclude if no matches above.
commit_range.append(branch)
subprocess.check_call(["3rdparty/mendertesting/check_commits.sh"] + commit_range)
|
Add checking of commits using mendertesting.
|
Add checking of commits using mendertesting.
Changelog: None
Signed-off-by: Kristian Amlie <505e66ae45028a0596c853559221f0b72c1cee21@mender.io>
|
Python
|
apache-2.0
|
bboozzoo/meta-mender,bboozzoo/meta-mender,bboozzoo/meta-mender,bboozzoo/meta-mender
|
Add checking of commits using mendertesting.
Changelog: None
Signed-off-by: Kristian Amlie <505e66ae45028a0596c853559221f0b72c1cee21@mender.io>
|
#!/usr/bin/python
# Copyright 2016 Mender Software AS
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
import re
import subprocess
class TestCommits:
def test_commits(self):
# First find which range to check. Include HEAD and exclude all known
# upstream branches.
git_branch = subprocess.check_output(["git", "branch", "-r"])
all_branches = [line.split()[0] for line in git_branch.strip().split('\n')]
# Exclude all non-pull requests.
commit_range = ["HEAD", "--not"]
for branch in all_branches:
# Include all non-origin branches.
if not branch.startswith("origin/"):
continue
# Include branches that have slashes after "origin/" (pull requests).
if re.match("^origin/.*/", branch):
continue
# Include branches that end with "patch-1" style text.
if re.match("^origin/.*patch-[0-9]+$", branch):
continue
# Exclude if no matches above.
commit_range.append(branch)
subprocess.check_call(["3rdparty/mendertesting/check_commits.sh"] + commit_range)
|
<commit_before><commit_msg>Add checking of commits using mendertesting.
Changelog: None
Signed-off-by: Kristian Amlie <505e66ae45028a0596c853559221f0b72c1cee21@mender.io><commit_after>
|
#!/usr/bin/python
# Copyright 2016 Mender Software AS
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
import re
import subprocess
class TestCommits:
def test_commits(self):
# First find which range to check. Include HEAD and exclude all known
# upstream branches.
git_branch = subprocess.check_output(["git", "branch", "-r"])
all_branches = [line.split()[0] for line in git_branch.strip().split('\n')]
# Exclude all non-pull requests.
commit_range = ["HEAD", "--not"]
for branch in all_branches:
# Include all non-origin branches.
if not branch.startswith("origin/"):
continue
# Include branches that have slashes after "origin/" (pull requests).
if re.match("^origin/.*/", branch):
continue
# Include branches that end with "patch-1" style text.
if re.match("^origin/.*patch-[0-9]+$", branch):
continue
# Exclude if no matches above.
commit_range.append(branch)
subprocess.check_call(["3rdparty/mendertesting/check_commits.sh"] + commit_range)
|
Add checking of commits using mendertesting.
Changelog: None
Signed-off-by: Kristian Amlie <505e66ae45028a0596c853559221f0b72c1cee21@mender.io>#!/usr/bin/python
# Copyright 2016 Mender Software AS
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
import re
import subprocess
class TestCommits:
def test_commits(self):
# First find which range to check. Include HEAD and exclude all known
# upstream branches.
git_branch = subprocess.check_output(["git", "branch", "-r"])
all_branches = [line.split()[0] for line in git_branch.strip().split('\n')]
# Exclude all non-pull requests.
commit_range = ["HEAD", "--not"]
for branch in all_branches:
# Include all non-origin branches.
if not branch.startswith("origin/"):
continue
# Include branches that have slashes after "origin/" (pull requests).
if re.match("^origin/.*/", branch):
continue
# Include branches that end with "patch-1" style text.
if re.match("^origin/.*patch-[0-9]+$", branch):
continue
# Exclude if no matches above.
commit_range.append(branch)
subprocess.check_call(["3rdparty/mendertesting/check_commits.sh"] + commit_range)
|
<commit_before><commit_msg>Add checking of commits using mendertesting.
Changelog: None
Signed-off-by: Kristian Amlie <505e66ae45028a0596c853559221f0b72c1cee21@mender.io><commit_after>#!/usr/bin/python
# Copyright 2016 Mender Software AS
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
import re
import subprocess
class TestCommits:
def test_commits(self):
# First find which range to check. Include HEAD and exclude all known
# upstream branches.
git_branch = subprocess.check_output(["git", "branch", "-r"])
all_branches = [line.split()[0] for line in git_branch.strip().split('\n')]
# Exclude all non-pull requests.
commit_range = ["HEAD", "--not"]
for branch in all_branches:
# Include all non-origin branches.
if not branch.startswith("origin/"):
continue
# Include branches that have slashes after "origin/" (pull requests).
if re.match("^origin/.*/", branch):
continue
# Include branches that end with "patch-1" style text.
if re.match("^origin/.*patch-[0-9]+$", branch):
continue
# Exclude if no matches above.
commit_range.append(branch)
subprocess.check_call(["3rdparty/mendertesting/check_commits.sh"] + commit_range)
|
|
6c431129e64380754296b646e5e063521980b8da
|
test/_common.py
|
test/_common.py
|
# encoding: utf-8
'''
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
'''
from __future__ import absolute_import, print_function, unicode_literals
def print_result(expected, actual):
print("[expected]\n{}\n".format(expected))
print("[actual]\n{}\n".format(actual))
|
Add a test helper function
|
Add a test helper function
|
Python
|
mit
|
thombashi/pytablewriter
|
Add a test helper function
|
# encoding: utf-8
'''
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
'''
from __future__ import absolute_import, print_function, unicode_literals
def print_result(expected, actual):
print("[expected]\n{}\n".format(expected))
print("[actual]\n{}\n".format(actual))
|
<commit_before><commit_msg>Add a test helper function<commit_after>
|
# encoding: utf-8
'''
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
'''
from __future__ import absolute_import, print_function, unicode_literals
def print_result(expected, actual):
print("[expected]\n{}\n".format(expected))
print("[actual]\n{}\n".format(actual))
|
Add a test helper function# encoding: utf-8
'''
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
'''
from __future__ import absolute_import, print_function, unicode_literals
def print_result(expected, actual):
print("[expected]\n{}\n".format(expected))
print("[actual]\n{}\n".format(actual))
|
<commit_before><commit_msg>Add a test helper function<commit_after># encoding: utf-8
'''
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
'''
from __future__ import absolute_import, print_function, unicode_literals
def print_result(expected, actual):
print("[expected]\n{}\n".format(expected))
print("[actual]\n{}\n".format(actual))
|
|
488e471bc361d3754ecf4ee6072365f4f67dea2e
|
backend/initialize_database.py
|
backend/initialize_database.py
|
import json
import os
import django
import logging
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'backend.settings')
django.setup()
from unichat.models import Country, City, University, School
level = logging.DEBUG
logger = logging.getLogger(__name__)
logger.setLevel(level)
logging.basicConfig(format='%(message)s')
def setup_schools(data):
for country in data['countries']:
country_obj = Country(name=country['name'])
country_obj.save()
logger.debug(country_obj.name)
for city in country['cities']:
city_obj = City(name=city['name'], country=country_obj)
city_obj.save()
logger.debug(2 * ' ' + city_obj.name)
for university in city['universities']:
university_obj = University(name=university['name'], city=city_obj)
university_obj.save()
logger.debug(4 * ' ' + university_obj.name)
for school in university['schools']:
school_obj = School(name=school['name'], site=school['site'], mailRegex=school['mailRegex'], university=university_obj)
school_obj.save()
logger.debug(6 * ' ' + school_obj.name)
if __name__ == '__main__':
try:
with open(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data.json'), 'r') as f:
data = json.load(f)
except IOError, err:
logger.error('IOError: %s' % err)
exit(1)
setup_schools(data)
|
Add script to initialize db with schools, unis etc
|
Add script to initialize db with schools, unis etc
|
Python
|
mit
|
dimkarakostas/unimeet,dimkarakostas/unimeet,dimkarakostas/unimeet,dimkarakostas/unimeet
|
Add script to initialize db with schools, unis etc
|
import json
import os
import django
import logging
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'backend.settings')
django.setup()
from unichat.models import Country, City, University, School
level = logging.DEBUG
logger = logging.getLogger(__name__)
logger.setLevel(level)
logging.basicConfig(format='%(message)s')
def setup_schools(data):
for country in data['countries']:
country_obj = Country(name=country['name'])
country_obj.save()
logger.debug(country_obj.name)
for city in country['cities']:
city_obj = City(name=city['name'], country=country_obj)
city_obj.save()
logger.debug(2 * ' ' + city_obj.name)
for university in city['universities']:
university_obj = University(name=university['name'], city=city_obj)
university_obj.save()
logger.debug(4 * ' ' + university_obj.name)
for school in university['schools']:
school_obj = School(name=school['name'], site=school['site'], mailRegex=school['mailRegex'], university=university_obj)
school_obj.save()
logger.debug(6 * ' ' + school_obj.name)
if __name__ == '__main__':
try:
with open(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data.json'), 'r') as f:
data = json.load(f)
except IOError, err:
logger.error('IOError: %s' % err)
exit(1)
setup_schools(data)
|
<commit_before><commit_msg>Add script to initialize db with schools, unis etc<commit_after>
|
import json
import os
import django
import logging
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'backend.settings')
django.setup()
from unichat.models import Country, City, University, School
level = logging.DEBUG
logger = logging.getLogger(__name__)
logger.setLevel(level)
logging.basicConfig(format='%(message)s')
def setup_schools(data):
for country in data['countries']:
country_obj = Country(name=country['name'])
country_obj.save()
logger.debug(country_obj.name)
for city in country['cities']:
city_obj = City(name=city['name'], country=country_obj)
city_obj.save()
logger.debug(2 * ' ' + city_obj.name)
for university in city['universities']:
university_obj = University(name=university['name'], city=city_obj)
university_obj.save()
logger.debug(4 * ' ' + university_obj.name)
for school in university['schools']:
school_obj = School(name=school['name'], site=school['site'], mailRegex=school['mailRegex'], university=university_obj)
school_obj.save()
logger.debug(6 * ' ' + school_obj.name)
if __name__ == '__main__':
try:
with open(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data.json'), 'r') as f:
data = json.load(f)
except IOError, err:
logger.error('IOError: %s' % err)
exit(1)
setup_schools(data)
|
Add script to initialize db with schools, unis etcimport json
import os
import django
import logging
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'backend.settings')
django.setup()
from unichat.models import Country, City, University, School
level = logging.DEBUG
logger = logging.getLogger(__name__)
logger.setLevel(level)
logging.basicConfig(format='%(message)s')
def setup_schools(data):
for country in data['countries']:
country_obj = Country(name=country['name'])
country_obj.save()
logger.debug(country_obj.name)
for city in country['cities']:
city_obj = City(name=city['name'], country=country_obj)
city_obj.save()
logger.debug(2 * ' ' + city_obj.name)
for university in city['universities']:
university_obj = University(name=university['name'], city=city_obj)
university_obj.save()
logger.debug(4 * ' ' + university_obj.name)
for school in university['schools']:
school_obj = School(name=school['name'], site=school['site'], mailRegex=school['mailRegex'], university=university_obj)
school_obj.save()
logger.debug(6 * ' ' + school_obj.name)
if __name__ == '__main__':
try:
with open(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data.json'), 'r') as f:
data = json.load(f)
except IOError, err:
logger.error('IOError: %s' % err)
exit(1)
setup_schools(data)
|
<commit_before><commit_msg>Add script to initialize db with schools, unis etc<commit_after>import json
import os
import django
import logging
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'backend.settings')
django.setup()
from unichat.models import Country, City, University, School
level = logging.DEBUG
logger = logging.getLogger(__name__)
logger.setLevel(level)
logging.basicConfig(format='%(message)s')
def setup_schools(data):
for country in data['countries']:
country_obj = Country(name=country['name'])
country_obj.save()
logger.debug(country_obj.name)
for city in country['cities']:
city_obj = City(name=city['name'], country=country_obj)
city_obj.save()
logger.debug(2 * ' ' + city_obj.name)
for university in city['universities']:
university_obj = University(name=university['name'], city=city_obj)
university_obj.save()
logger.debug(4 * ' ' + university_obj.name)
for school in university['schools']:
school_obj = School(name=school['name'], site=school['site'], mailRegex=school['mailRegex'], university=university_obj)
school_obj.save()
logger.debug(6 * ' ' + school_obj.name)
if __name__ == '__main__':
try:
with open(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data.json'), 'r') as f:
data = json.load(f)
except IOError, err:
logger.error('IOError: %s' % err)
exit(1)
setup_schools(data)
|
|
36a9950e81cfa273a6782cc0377f9dfd510291d8
|
test_polycomp_low_level.py
|
test_polycomp_low_level.py
|
import numpy as np
import pypolycomp
def test_polycomp():
inv_cheby = pypolycomp.Chebyshev(samples.size, pypolycomp.PCOMP_TD_INVERSE)
for alg in (pypolycomp.PCOMP_ALG_USE_CHEBYSHEV,
pypolycomp.PCOMP_ALG_NO_CHEBYSHEV):
max_error = 0.1
samples = np.array([1.0, 2.0, 3.0, 4.1])
params = pypolycomp.Polycomp(samples.size, 2, max_error, alg)
chunk = pypolycomp.PolycompChunk(params, samples.size)
chunk.compress(samples)
assert np.all(np.abs(chunk.decompress(inv_cheby) - samples) < max_error)
|
Add tests for PolycompChunk class
|
Add tests for PolycompChunk class
|
Python
|
bsd-3-clause
|
ziotom78/polycomp
|
Add tests for PolycompChunk class
|
import numpy as np
import pypolycomp
def test_polycomp():
inv_cheby = pypolycomp.Chebyshev(samples.size, pypolycomp.PCOMP_TD_INVERSE)
for alg in (pypolycomp.PCOMP_ALG_USE_CHEBYSHEV,
pypolycomp.PCOMP_ALG_NO_CHEBYSHEV):
max_error = 0.1
samples = np.array([1.0, 2.0, 3.0, 4.1])
params = pypolycomp.Polycomp(samples.size, 2, max_error, alg)
chunk = pypolycomp.PolycompChunk(params, samples.size)
chunk.compress(samples)
assert np.all(np.abs(chunk.decompress(inv_cheby) - samples) < max_error)
|
<commit_before><commit_msg>Add tests for PolycompChunk class<commit_after>
|
import numpy as np
import pypolycomp
def test_polycomp():
inv_cheby = pypolycomp.Chebyshev(samples.size, pypolycomp.PCOMP_TD_INVERSE)
for alg in (pypolycomp.PCOMP_ALG_USE_CHEBYSHEV,
pypolycomp.PCOMP_ALG_NO_CHEBYSHEV):
max_error = 0.1
samples = np.array([1.0, 2.0, 3.0, 4.1])
params = pypolycomp.Polycomp(samples.size, 2, max_error, alg)
chunk = pypolycomp.PolycompChunk(params, samples.size)
chunk.compress(samples)
assert np.all(np.abs(chunk.decompress(inv_cheby) - samples) < max_error)
|
Add tests for PolycompChunk classimport numpy as np
import pypolycomp
def test_polycomp():
inv_cheby = pypolycomp.Chebyshev(samples.size, pypolycomp.PCOMP_TD_INVERSE)
for alg in (pypolycomp.PCOMP_ALG_USE_CHEBYSHEV,
pypolycomp.PCOMP_ALG_NO_CHEBYSHEV):
max_error = 0.1
samples = np.array([1.0, 2.0, 3.0, 4.1])
params = pypolycomp.Polycomp(samples.size, 2, max_error, alg)
chunk = pypolycomp.PolycompChunk(params, samples.size)
chunk.compress(samples)
assert np.all(np.abs(chunk.decompress(inv_cheby) - samples) < max_error)
|
<commit_before><commit_msg>Add tests for PolycompChunk class<commit_after>import numpy as np
import pypolycomp
def test_polycomp():
inv_cheby = pypolycomp.Chebyshev(samples.size, pypolycomp.PCOMP_TD_INVERSE)
for alg in (pypolycomp.PCOMP_ALG_USE_CHEBYSHEV,
pypolycomp.PCOMP_ALG_NO_CHEBYSHEV):
max_error = 0.1
samples = np.array([1.0, 2.0, 3.0, 4.1])
params = pypolycomp.Polycomp(samples.size, 2, max_error, alg)
chunk = pypolycomp.PolycompChunk(params, samples.size)
chunk.compress(samples)
assert np.all(np.abs(chunk.decompress(inv_cheby) - samples) < max_error)
|
|
cb166b81a243cb1251ab9185f92b8a4734e4db55
|
euler012.py
|
euler012.py
|
#!/usr/bin/python
from math import sqrt
""" Our limit """
LIMIT = 500
""" We start from 6 """
test = 1 + 2 + 3
""" Next to add """
add = 4
div_count = 0
while div_count < LIMIT:
div_count = 1
test += add
add += 1
tmp = test
for i in range(2, int(sqrt(test))):
factor_count = 0
while tmp % i == 0:
tmp //= i
factor_count += 1
div_count *= (factor_count + 1)
print (test)
|
Add solution for problem 12, really slow, need to be optimize
|
Add solution for problem 12, really slow, need to be optimize
|
Python
|
mit
|
cifvts/PyEuler
|
Add solution for problem 12, really slow, need to be optimize
|
#!/usr/bin/python
from math import sqrt
""" Our limit """
LIMIT = 500
""" We start from 6 """
test = 1 + 2 + 3
""" Next to add """
add = 4
div_count = 0
while div_count < LIMIT:
div_count = 1
test += add
add += 1
tmp = test
for i in range(2, int(sqrt(test))):
factor_count = 0
while tmp % i == 0:
tmp //= i
factor_count += 1
div_count *= (factor_count + 1)
print (test)
|
<commit_before><commit_msg>Add solution for problem 12, really slow, need to be optimize<commit_after>
|
#!/usr/bin/python
from math import sqrt
""" Our limit """
LIMIT = 500
""" We start from 6 """
test = 1 + 2 + 3
""" Next to add """
add = 4
div_count = 0
while div_count < LIMIT:
div_count = 1
test += add
add += 1
tmp = test
for i in range(2, int(sqrt(test))):
factor_count = 0
while tmp % i == 0:
tmp //= i
factor_count += 1
div_count *= (factor_count + 1)
print (test)
|
Add solution for problem 12, really slow, need to be optimize#!/usr/bin/python
from math import sqrt
""" Our limit """
LIMIT = 500
""" We start from 6 """
test = 1 + 2 + 3
""" Next to add """
add = 4
div_count = 0
while div_count < LIMIT:
div_count = 1
test += add
add += 1
tmp = test
for i in range(2, int(sqrt(test))):
factor_count = 0
while tmp % i == 0:
tmp //= i
factor_count += 1
div_count *= (factor_count + 1)
print (test)
|
<commit_before><commit_msg>Add solution for problem 12, really slow, need to be optimize<commit_after>#!/usr/bin/python
from math import sqrt
""" Our limit """
LIMIT = 500
""" We start from 6 """
test = 1 + 2 + 3
""" Next to add """
add = 4
div_count = 0
while div_count < LIMIT:
div_count = 1
test += add
add += 1
tmp = test
for i in range(2, int(sqrt(test))):
factor_count = 0
while tmp % i == 0:
tmp //= i
factor_count += 1
div_count *= (factor_count + 1)
print (test)
|
|
25b3ad79bda44fda8b110ef183049e08765dfb18
|
euler017.py
|
euler017.py
|
#!/usr/bin/python
values = {}
values[1] = "one"
values[2] = "two"
values[3] = "three"
values[4] = "four"
values[5] = "five"
values[6] = "six"
values[7] = "seven"
values[8] = "eight"
values[9] = "nine"
values[10] = "ten"
values[11] = "eleven"
values[12] = "twelve"
values[13] = "thirteen"
values[14] = "fourteen"
values[15] = "fifteen"
values[16] = "sixteen"
values[17] = "seventeen"
values[18] = "eighteen"
values[19] = "nineteen"
values[20] = "twenty"
values[30] = "thirty"
values[40] = "forty"
values[50] = "fifty"
values[60] = "sixty"
values[70] = "seventy"
values[80] = "eighty"
values[90] = "ninety"
count = 0
for i in range(1, 1000 + 1):
tmp = i
if tmp // 1000:
count += len(values[tmp // 1000]) + 8
break
if tmp // 100:
count += len(values[tmp // 100]) + 10
tmp %= 100
if not tmp:
count -= 3
elif tmp in values:
count += len(values[tmp])
else:
count += len(values[(tmp // 10) * 10])
tmp %= 10
count += len(values[tmp])
print(count)
|
Add solution for problem 17
|
Add solution for problem 17
|
Python
|
mit
|
cifvts/PyEuler
|
Add solution for problem 17
|
#!/usr/bin/python
values = {}
values[1] = "one"
values[2] = "two"
values[3] = "three"
values[4] = "four"
values[5] = "five"
values[6] = "six"
values[7] = "seven"
values[8] = "eight"
values[9] = "nine"
values[10] = "ten"
values[11] = "eleven"
values[12] = "twelve"
values[13] = "thirteen"
values[14] = "fourteen"
values[15] = "fifteen"
values[16] = "sixteen"
values[17] = "seventeen"
values[18] = "eighteen"
values[19] = "nineteen"
values[20] = "twenty"
values[30] = "thirty"
values[40] = "forty"
values[50] = "fifty"
values[60] = "sixty"
values[70] = "seventy"
values[80] = "eighty"
values[90] = "ninety"
count = 0
for i in range(1, 1000 + 1):
tmp = i
if tmp // 1000:
count += len(values[tmp // 1000]) + 8
break
if tmp // 100:
count += len(values[tmp // 100]) + 10
tmp %= 100
if not tmp:
count -= 3
elif tmp in values:
count += len(values[tmp])
else:
count += len(values[(tmp // 10) * 10])
tmp %= 10
count += len(values[tmp])
print(count)
|
<commit_before><commit_msg>Add solution for problem 17<commit_after>
|
#!/usr/bin/python
values = {}
values[1] = "one"
values[2] = "two"
values[3] = "three"
values[4] = "four"
values[5] = "five"
values[6] = "six"
values[7] = "seven"
values[8] = "eight"
values[9] = "nine"
values[10] = "ten"
values[11] = "eleven"
values[12] = "twelve"
values[13] = "thirteen"
values[14] = "fourteen"
values[15] = "fifteen"
values[16] = "sixteen"
values[17] = "seventeen"
values[18] = "eighteen"
values[19] = "nineteen"
values[20] = "twenty"
values[30] = "thirty"
values[40] = "forty"
values[50] = "fifty"
values[60] = "sixty"
values[70] = "seventy"
values[80] = "eighty"
values[90] = "ninety"
count = 0
for i in range(1, 1000 + 1):
tmp = i
if tmp // 1000:
count += len(values[tmp // 1000]) + 8
break
if tmp // 100:
count += len(values[tmp // 100]) + 10
tmp %= 100
if not tmp:
count -= 3
elif tmp in values:
count += len(values[tmp])
else:
count += len(values[(tmp // 10) * 10])
tmp %= 10
count += len(values[tmp])
print(count)
|
Add solution for problem 17#!/usr/bin/python
values = {}
values[1] = "one"
values[2] = "two"
values[3] = "three"
values[4] = "four"
values[5] = "five"
values[6] = "six"
values[7] = "seven"
values[8] = "eight"
values[9] = "nine"
values[10] = "ten"
values[11] = "eleven"
values[12] = "twelve"
values[13] = "thirteen"
values[14] = "fourteen"
values[15] = "fifteen"
values[16] = "sixteen"
values[17] = "seventeen"
values[18] = "eighteen"
values[19] = "nineteen"
values[20] = "twenty"
values[30] = "thirty"
values[40] = "forty"
values[50] = "fifty"
values[60] = "sixty"
values[70] = "seventy"
values[80] = "eighty"
values[90] = "ninety"
count = 0
for i in range(1, 1000 + 1):
tmp = i
if tmp // 1000:
count += len(values[tmp // 1000]) + 8
break
if tmp // 100:
count += len(values[tmp // 100]) + 10
tmp %= 100
if not tmp:
count -= 3
elif tmp in values:
count += len(values[tmp])
else:
count += len(values[(tmp // 10) * 10])
tmp %= 10
count += len(values[tmp])
print(count)
|
<commit_before><commit_msg>Add solution for problem 17<commit_after>#!/usr/bin/python
values = {}
values[1] = "one"
values[2] = "two"
values[3] = "three"
values[4] = "four"
values[5] = "five"
values[6] = "six"
values[7] = "seven"
values[8] = "eight"
values[9] = "nine"
values[10] = "ten"
values[11] = "eleven"
values[12] = "twelve"
values[13] = "thirteen"
values[14] = "fourteen"
values[15] = "fifteen"
values[16] = "sixteen"
values[17] = "seventeen"
values[18] = "eighteen"
values[19] = "nineteen"
values[20] = "twenty"
values[30] = "thirty"
values[40] = "forty"
values[50] = "fifty"
values[60] = "sixty"
values[70] = "seventy"
values[80] = "eighty"
values[90] = "ninety"
count = 0
for i in range(1, 1000 + 1):
tmp = i
if tmp // 1000:
count += len(values[tmp // 1000]) + 8
break
if tmp // 100:
count += len(values[tmp // 100]) + 10
tmp %= 100
if not tmp:
count -= 3
elif tmp in values:
count += len(values[tmp])
else:
count += len(values[(tmp // 10) * 10])
tmp %= 10
count += len(values[tmp])
print(count)
|
|
10be73daab13815662871898ccfc8201e43ea3db
|
testifi/pypi.py
|
testifi/pypi.py
|
# -*- coding: utf-8 -*-
"""
testifi.pypi
~~~~~~~~~~~~
This module contains the portions of testifi code that know how to handle
interacting with PyPI.
"""
import treq
from twisted.internet.defer import inlineCallbacks, returnValue
@inlineCallbacks
def certifiVersions():
"""
This function determines what certifi versions are available and can be
tested. It uses as its baseline the 14.05.14 release of certifi, and will
locate all other verisons.
:returns: A Deferred that fires with a list of tuples of certifi versions
and tarball URLs.
"""
print "it begins!"
r = yield treq.get('https://pypi.python.org/pypi/certifi/json', timeout=5)
print "response!"
data = yield r.json()
print "data!"
# Note: this takes advantage of the fact that certifi's releases have the
# same version number sort order as lexicographical. If that changes,
# this will break.
releases = sorted(data[u'releases'].keys())
first_release = releases.index('14.05.14')
target_versions = releases[first_release:]
result = []
for version in target_versions:
files = data[u'releases'][version]
# Find the .tar.gz release.
for file in files:
if file[u'filename'].endswith(u'.tar.gz'):
break
else:
raise RuntimeError("Unable to locate tarball!")
result.append((version, file[u'url']))
print result
returnValue(result)
|
Add basic PyPI reading logic
|
Add basic PyPI reading logic
|
Python
|
mit
|
Lukasa/testifi
|
Add basic PyPI reading logic
|
# -*- coding: utf-8 -*-
"""
testifi.pypi
~~~~~~~~~~~~
This module contains the portions of testifi code that know how to handle
interacting with PyPI.
"""
import treq
from twisted.internet.defer import inlineCallbacks, returnValue
@inlineCallbacks
def certifiVersions():
"""
This function determines what certifi versions are available and can be
tested. It uses as its baseline the 14.05.14 release of certifi, and will
locate all other verisons.
:returns: A Deferred that fires with a list of tuples of certifi versions
and tarball URLs.
"""
print "it begins!"
r = yield treq.get('https://pypi.python.org/pypi/certifi/json', timeout=5)
print "response!"
data = yield r.json()
print "data!"
# Note: this takes advantage of the fact that certifi's releases have the
# same version number sort order as lexicographical. If that changes,
# this will break.
releases = sorted(data[u'releases'].keys())
first_release = releases.index('14.05.14')
target_versions = releases[first_release:]
result = []
for version in target_versions:
files = data[u'releases'][version]
# Find the .tar.gz release.
for file in files:
if file[u'filename'].endswith(u'.tar.gz'):
break
else:
raise RuntimeError("Unable to locate tarball!")
result.append((version, file[u'url']))
print result
returnValue(result)
|
<commit_before><commit_msg>Add basic PyPI reading logic<commit_after>
|
# -*- coding: utf-8 -*-
"""
testifi.pypi
~~~~~~~~~~~~
This module contains the portions of testifi code that know how to handle
interacting with PyPI.
"""
import treq
from twisted.internet.defer import inlineCallbacks, returnValue
@inlineCallbacks
def certifiVersions():
"""
This function determines what certifi versions are available and can be
tested. It uses as its baseline the 14.05.14 release of certifi, and will
locate all other verisons.
:returns: A Deferred that fires with a list of tuples of certifi versions
and tarball URLs.
"""
print "it begins!"
r = yield treq.get('https://pypi.python.org/pypi/certifi/json', timeout=5)
print "response!"
data = yield r.json()
print "data!"
# Note: this takes advantage of the fact that certifi's releases have the
# same version number sort order as lexicographical. If that changes,
# this will break.
releases = sorted(data[u'releases'].keys())
first_release = releases.index('14.05.14')
target_versions = releases[first_release:]
result = []
for version in target_versions:
files = data[u'releases'][version]
# Find the .tar.gz release.
for file in files:
if file[u'filename'].endswith(u'.tar.gz'):
break
else:
raise RuntimeError("Unable to locate tarball!")
result.append((version, file[u'url']))
print result
returnValue(result)
|
Add basic PyPI reading logic# -*- coding: utf-8 -*-
"""
testifi.pypi
~~~~~~~~~~~~
This module contains the portions of testifi code that know how to handle
interacting with PyPI.
"""
import treq
from twisted.internet.defer import inlineCallbacks, returnValue
@inlineCallbacks
def certifiVersions():
"""
This function determines what certifi versions are available and can be
tested. It uses as its baseline the 14.05.14 release of certifi, and will
locate all other verisons.
:returns: A Deferred that fires with a list of tuples of certifi versions
and tarball URLs.
"""
print "it begins!"
r = yield treq.get('https://pypi.python.org/pypi/certifi/json', timeout=5)
print "response!"
data = yield r.json()
print "data!"
# Note: this takes advantage of the fact that certifi's releases have the
# same version number sort order as lexicographical. If that changes,
# this will break.
releases = sorted(data[u'releases'].keys())
first_release = releases.index('14.05.14')
target_versions = releases[first_release:]
result = []
for version in target_versions:
files = data[u'releases'][version]
# Find the .tar.gz release.
for file in files:
if file[u'filename'].endswith(u'.tar.gz'):
break
else:
raise RuntimeError("Unable to locate tarball!")
result.append((version, file[u'url']))
print result
returnValue(result)
|
<commit_before><commit_msg>Add basic PyPI reading logic<commit_after># -*- coding: utf-8 -*-
"""
testifi.pypi
~~~~~~~~~~~~
This module contains the portions of testifi code that know how to handle
interacting with PyPI.
"""
import treq
from twisted.internet.defer import inlineCallbacks, returnValue
@inlineCallbacks
def certifiVersions():
"""
This function determines what certifi versions are available and can be
tested. It uses as its baseline the 14.05.14 release of certifi, and will
locate all other verisons.
:returns: A Deferred that fires with a list of tuples of certifi versions
and tarball URLs.
"""
print "it begins!"
r = yield treq.get('https://pypi.python.org/pypi/certifi/json', timeout=5)
print "response!"
data = yield r.json()
print "data!"
# Note: this takes advantage of the fact that certifi's releases have the
# same version number sort order as lexicographical. If that changes,
# this will break.
releases = sorted(data[u'releases'].keys())
first_release = releases.index('14.05.14')
target_versions = releases[first_release:]
result = []
for version in target_versions:
files = data[u'releases'][version]
# Find the .tar.gz release.
for file in files:
if file[u'filename'].endswith(u'.tar.gz'):
break
else:
raise RuntimeError("Unable to locate tarball!")
result.append((version, file[u'url']))
print result
returnValue(result)
|
|
a160b70ced5cf5d7debcdc8cf73a94a79de275b1
|
tests/basics/frozenset_set.py
|
tests/basics/frozenset_set.py
|
try:
frozenset
except NameError:
print("SKIP")
import sys
sys.exit()
# Examples from https://docs.python.org/3/library/stdtypes.html#set
# "Instances of set are compared to instances of frozenset based on their
# members. For example:"
print(set('abc') == frozenset('abc'))
# This doesn't work in uPy
#print(set('abc') in set([frozenset('abc')]))
|
Add test on set/frozenset equality.
|
tests: Add test on set/frozenset equality.
|
Python
|
mit
|
dmazzella/micropython,feilongfl/micropython,adafruit/micropython,ernesto-g/micropython,vriera/micropython,toolmacher/micropython,ahotam/micropython,ahotam/micropython,skybird6672/micropython,blazewicz/micropython,noahchense/micropython,dxxb/micropython,blazewicz/micropython,ernesto-g/micropython,lowRISC/micropython,drrk/micropython,torwag/micropython,toolmacher/micropython,infinnovation/micropython,TDAbboud/micropython,supergis/micropython,mhoffma/micropython,pramasoul/micropython,misterdanb/micropython,dinau/micropython,jmarcelino/pycom-micropython,AriZuu/micropython,hiway/micropython,torwag/micropython,galenhz/micropython,martinribelotta/micropython,rubencabrera/micropython,danicampora/micropython,dxxb/micropython,matthewelse/micropython,lowRISC/micropython,pfalcon/micropython,MrSurly/micropython,adafruit/circuitpython,martinribelotta/micropython,noahwilliamsson/micropython,dhylands/micropython,AriZuu/micropython,kerneltask/micropython,Peetz0r/micropython-esp32,Timmenem/micropython,redbear/micropython,Peetz0r/micropython-esp32,noahchense/micropython,adafruit/circuitpython,cloudformdesign/micropython,henriknelson/micropython,hiway/micropython,emfcamp/micropython,MrSurly/micropython,kerneltask/micropython,trezor/micropython,micropython/micropython-esp32,feilongfl/micropython,praemdonck/micropython,neilh10/micropython,cwyark/micropython,vitiral/micropython,noahchense/micropython,redbear/micropython,misterdanb/micropython,supergis/micropython,rubencabrera/micropython,pozetroninc/micropython,redbear/micropython,rubencabrera/micropython,chrisdearman/micropython,dinau/micropython,praemdonck/micropython,dmazzella/micropython,micropython/micropython-esp32,deshipu/micropython,chrisdearman/micropython,ryannathans/micropython,omtinez/micropython,oopy/micropython,dhylands/micropython,ruffy91/micropython,neilh10/micropython,ganshun666/micropython,turbinenreiter/micropython,tuc-osg/micropython,ryannathans/micropython,bvernoux/micropython,xuxiaoxin/micropython,torwag/micropython,TDAbboud/micropython,MrSurly/micropython-esp32,henriknelson/micropython,adamkh/micropython,dinau/micropython,AriZuu/micropython,chrisdearman/micropython,xuxiaoxin/micropython,selste/micropython,xhat/micropython,alex-march/micropython,swegener/micropython,misterdanb/micropython,drrk/micropython,tobbad/micropython,henriknelson/micropython,feilongfl/micropython,HenrikSolver/micropython,blazewicz/micropython,blazewicz/micropython,dhylands/micropython,infinnovation/micropython,puuu/micropython,infinnovation/micropython,micropython/micropython-esp32,Timmenem/micropython,bvernoux/micropython,trezor/micropython,hosaka/micropython,xhat/micropython,SHA2017-badge/micropython-esp32,pozetroninc/micropython,blmorris/micropython,turbinenreiter/micropython,AriZuu/micropython,pozetroninc/micropython,turbinenreiter/micropython,martinribelotta/micropython,tobbad/micropython,vitiral/micropython,vitiral/micropython,dxxb/micropython,AriZuu/micropython,misterdanb/micropython,henriknelson/micropython,blmorris/micropython,henriknelson/micropython,noahchense/micropython,swegener/micropython,selste/micropython,dhylands/micropython,drrk/micropython,EcmaXp/micropython,pramasoul/micropython,rubencabrera/micropython,redbear/micropython,EcmaXp/micropython,emfcamp/micropython,vitiral/micropython,HenrikSolver/micropython,matthewelse/micropython,puuu/micropython,ruffy91/micropython,tuc-osg/micropython,supergis/micropython,feilongfl/micropython,tralamazza/micropython,dxxb/micropython,vitiral/micropython,selste/micropython,kerneltask/micropython,trezor/micropython,ruffy91/micropython,deshipu/micropython,skybird6672/micropython,pramasoul/micropython,jmarcelino/pycom-micropython,adamkh/micropython,MrSurly/micropython-esp32,praemdonck/micropython,cwyark/micropython,martinribelotta/micropython,Peetz0r/micropython-esp32,ahotam/micropython,feilongfl/micropython,ganshun666/micropython,deshipu/micropython,ryannathans/micropython,adafruit/circuitpython,alex-robbins/micropython,xuxiaoxin/micropython,pfalcon/micropython,noahwilliamsson/micropython,emfcamp/micropython,pozetroninc/micropython,micropython/micropython-esp32,ryannathans/micropython,adamkh/micropython,mhoffma/micropython,mpalomer/micropython,ChuckM/micropython,orionrobots/micropython,blmorris/micropython,utopiaprince/micropython,vriera/micropython,Peetz0r/micropython-esp32,danicampora/micropython,tralamazza/micropython,tralamazza/micropython,hosaka/micropython,MrSurly/micropython,PappaPeppar/micropython,noahwilliamsson/micropython,tuc-osg/micropython,matthewelse/micropython,hiway/micropython,pfalcon/micropython,matthewelse/micropython,skybird6672/micropython,ganshun666/micropython,redbear/micropython,deshipu/micropython,selste/micropython,puuu/micropython,puuu/micropython,pfalcon/micropython,lowRISC/micropython,utopiaprince/micropython,pozetroninc/micropython,adamkh/micropython,SHA2017-badge/micropython-esp32,dinau/micropython,torwag/micropython,danicampora/micropython,tobbad/micropython,jlillest/micropython,bvernoux/micropython,galenhz/micropython,alex-robbins/micropython,infinnovation/micropython,adafruit/circuitpython,chrisdearman/micropython,cwyark/micropython,ericsnowcurrently/micropython,dhylands/micropython,MrSurly/micropython,galenhz/micropython,MrSurly/micropython-esp32,ernesto-g/micropython,pramasoul/micropython,adafruit/micropython,hosaka/micropython,trezor/micropython,mianos/micropython,torwag/micropython,vriera/micropython,omtinez/micropython,jlillest/micropython,adafruit/circuitpython,cloudformdesign/micropython,turbinenreiter/micropython,cwyark/micropython,ganshun666/micropython,utopiaprince/micropython,supergis/micropython,oopy/micropython,mpalomer/micropython,mianos/micropython,Timmenem/micropython,mpalomer/micropython,adafruit/circuitpython,praemdonck/micropython,mianos/micropython,xhat/micropython,jlillest/micropython,emfcamp/micropython,HenrikSolver/micropython,dinau/micropython,oopy/micropython,TDAbboud/micropython,toolmacher/micropython,HenrikSolver/micropython,Peetz0r/micropython-esp32,ernesto-g/micropython,danicampora/micropython,blmorris/micropython,cloudformdesign/micropython,alex-march/micropython,ChuckM/micropython,dmazzella/micropython,dxxb/micropython,ryannathans/micropython,turbinenreiter/micropython,micropython/micropython-esp32,kerneltask/micropython,PappaPeppar/micropython,PappaPeppar/micropython,drrk/micropython,oopy/micropython,neilh10/micropython,ChuckM/micropython,TDAbboud/micropython,ahotam/micropython,hiway/micropython,cloudformdesign/micropython,omtinez/micropython,EcmaXp/micropython,alex-march/micropython,blmorris/micropython,utopiaprince/micropython,ganshun666/micropython,skybird6672/micropython,hosaka/micropython,skybird6672/micropython,jlillest/micropython,trezor/micropython,mhoffma/micropython,emfcamp/micropython,ahotam/micropython,neilh10/micropython,mianos/micropython,alex-march/micropython,danicampora/micropython,bvernoux/micropython,martinribelotta/micropython,galenhz/micropython,MrSurly/micropython,TDAbboud/micropython,omtinez/micropython,SHA2017-badge/micropython-esp32,kerneltask/micropython,MrSurly/micropython-esp32,ericsnowcurrently/micropython,xuxiaoxin/micropython,hosaka/micropython,tuc-osg/micropython,jlillest/micropython,adafruit/micropython,xuxiaoxin/micropython,cloudformdesign/micropython,ChuckM/micropython,swegener/micropython,tralamazza/micropython,ernesto-g/micropython,ChuckM/micropython,utopiaprince/micropython,orionrobots/micropython,orionrobots/micropython,lowRISC/micropython,PappaPeppar/micropython,orionrobots/micropython,PappaPeppar/micropython,pfalcon/micropython,mianos/micropython,tuc-osg/micropython,tobbad/micropython,ruffy91/micropython,mpalomer/micropython,pramasoul/micropython,SHA2017-badge/micropython-esp32,EcmaXp/micropython,praemdonck/micropython,alex-robbins/micropython,deshipu/micropython,orionrobots/micropython,jmarcelino/pycom-micropython,drrk/micropython,vriera/micropython,Timmenem/micropython,noahwilliamsson/micropython,alex-robbins/micropython,swegener/micropython,toolmacher/micropython,xhat/micropython,adamkh/micropython,vriera/micropython,selste/micropython,alex-robbins/micropython,lowRISC/micropython,mhoffma/micropython,mhoffma/micropython,alex-march/micropython,matthewelse/micropython,ericsnowcurrently/micropython,jmarcelino/pycom-micropython,ericsnowcurrently/micropython,matthewelse/micropython,misterdanb/micropython,mpalomer/micropython,blazewicz/micropython,chrisdearman/micropython,neilh10/micropython,cwyark/micropython,infinnovation/micropython,MrSurly/micropython-esp32,HenrikSolver/micropython,adafruit/micropython,hiway/micropython,EcmaXp/micropython,ruffy91/micropython,bvernoux/micropython,SHA2017-badge/micropython-esp32,ericsnowcurrently/micropython,rubencabrera/micropython,oopy/micropython,omtinez/micropython,adafruit/micropython,supergis/micropython,toolmacher/micropython,swegener/micropython,puuu/micropython,galenhz/micropython,tobbad/micropython,noahwilliamsson/micropython,xhat/micropython,jmarcelino/pycom-micropython,dmazzella/micropython,Timmenem/micropython,noahchense/micropython
|
tests: Add test on set/frozenset equality.
|
try:
frozenset
except NameError:
print("SKIP")
import sys
sys.exit()
# Examples from https://docs.python.org/3/library/stdtypes.html#set
# "Instances of set are compared to instances of frozenset based on their
# members. For example:"
print(set('abc') == frozenset('abc'))
# This doesn't work in uPy
#print(set('abc') in set([frozenset('abc')]))
|
<commit_before><commit_msg>tests: Add test on set/frozenset equality.<commit_after>
|
try:
frozenset
except NameError:
print("SKIP")
import sys
sys.exit()
# Examples from https://docs.python.org/3/library/stdtypes.html#set
# "Instances of set are compared to instances of frozenset based on their
# members. For example:"
print(set('abc') == frozenset('abc'))
# This doesn't work in uPy
#print(set('abc') in set([frozenset('abc')]))
|
tests: Add test on set/frozenset equality.try:
frozenset
except NameError:
print("SKIP")
import sys
sys.exit()
# Examples from https://docs.python.org/3/library/stdtypes.html#set
# "Instances of set are compared to instances of frozenset based on their
# members. For example:"
print(set('abc') == frozenset('abc'))
# This doesn't work in uPy
#print(set('abc') in set([frozenset('abc')]))
|
<commit_before><commit_msg>tests: Add test on set/frozenset equality.<commit_after>try:
frozenset
except NameError:
print("SKIP")
import sys
sys.exit()
# Examples from https://docs.python.org/3/library/stdtypes.html#set
# "Instances of set are compared to instances of frozenset based on their
# members. For example:"
print(set('abc') == frozenset('abc'))
# This doesn't work in uPy
#print(set('abc') in set([frozenset('abc')]))
|
|
f2474508e799a4cd37533baa9ce2acae7af1ee89
|
tests/HashContainerSimpleTest.py
|
tests/HashContainerSimpleTest.py
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy.HashContainer import HashContainer
class HashContainerSimpleTest(TestCase):
def test_emptyCreation(self):
h = HashContainer()
self.assertEqual(h.count(), 0)
def test_creationWithArray(self):
h = HashContainer([0, 1])
self.assertEqual(h.count(), 2)
self.assertTrue(h.have(0))
self.assertTrue(h.have(1))
self.assertFalse(h.have(2))
self.assertEqual(h.get(0), 0)
self.assertEqual(h.get(1), 1)
self.assertIsNone(h.get(2))
def test_addOne(self):
h = HashContainer()
h.add(1)
self.assertEqual(h.count(), 1)
self.assertFalse(h.have(0))
self.assertTrue(h.have(1))
self.assertFalse(h.have(2))
self.assertIsNone(h.get(0))
self.assertEqual(h.get(1), 1)
self.assertIsNone(h.get(2))
if __name__ == '__main__':
main()
|
Add few tests of HashContainer
|
Add few tests of HashContainer
|
Python
|
mit
|
PatrikValkovic/grammpy
|
Add few tests of HashContainer
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy.HashContainer import HashContainer
class HashContainerSimpleTest(TestCase):
def test_emptyCreation(self):
h = HashContainer()
self.assertEqual(h.count(), 0)
def test_creationWithArray(self):
h = HashContainer([0, 1])
self.assertEqual(h.count(), 2)
self.assertTrue(h.have(0))
self.assertTrue(h.have(1))
self.assertFalse(h.have(2))
self.assertEqual(h.get(0), 0)
self.assertEqual(h.get(1), 1)
self.assertIsNone(h.get(2))
def test_addOne(self):
h = HashContainer()
h.add(1)
self.assertEqual(h.count(), 1)
self.assertFalse(h.have(0))
self.assertTrue(h.have(1))
self.assertFalse(h.have(2))
self.assertIsNone(h.get(0))
self.assertEqual(h.get(1), 1)
self.assertIsNone(h.get(2))
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add few tests of HashContainer<commit_after>
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy.HashContainer import HashContainer
class HashContainerSimpleTest(TestCase):
def test_emptyCreation(self):
h = HashContainer()
self.assertEqual(h.count(), 0)
def test_creationWithArray(self):
h = HashContainer([0, 1])
self.assertEqual(h.count(), 2)
self.assertTrue(h.have(0))
self.assertTrue(h.have(1))
self.assertFalse(h.have(2))
self.assertEqual(h.get(0), 0)
self.assertEqual(h.get(1), 1)
self.assertIsNone(h.get(2))
def test_addOne(self):
h = HashContainer()
h.add(1)
self.assertEqual(h.count(), 1)
self.assertFalse(h.have(0))
self.assertTrue(h.have(1))
self.assertFalse(h.have(2))
self.assertIsNone(h.get(0))
self.assertEqual(h.get(1), 1)
self.assertIsNone(h.get(2))
if __name__ == '__main__':
main()
|
Add few tests of HashContainer#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy.HashContainer import HashContainer
class HashContainerSimpleTest(TestCase):
def test_emptyCreation(self):
h = HashContainer()
self.assertEqual(h.count(), 0)
def test_creationWithArray(self):
h = HashContainer([0, 1])
self.assertEqual(h.count(), 2)
self.assertTrue(h.have(0))
self.assertTrue(h.have(1))
self.assertFalse(h.have(2))
self.assertEqual(h.get(0), 0)
self.assertEqual(h.get(1), 1)
self.assertIsNone(h.get(2))
def test_addOne(self):
h = HashContainer()
h.add(1)
self.assertEqual(h.count(), 1)
self.assertFalse(h.have(0))
self.assertTrue(h.have(1))
self.assertFalse(h.have(2))
self.assertIsNone(h.get(0))
self.assertEqual(h.get(1), 1)
self.assertIsNone(h.get(2))
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add few tests of HashContainer<commit_after>#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy.HashContainer import HashContainer
class HashContainerSimpleTest(TestCase):
def test_emptyCreation(self):
h = HashContainer()
self.assertEqual(h.count(), 0)
def test_creationWithArray(self):
h = HashContainer([0, 1])
self.assertEqual(h.count(), 2)
self.assertTrue(h.have(0))
self.assertTrue(h.have(1))
self.assertFalse(h.have(2))
self.assertEqual(h.get(0), 0)
self.assertEqual(h.get(1), 1)
self.assertIsNone(h.get(2))
def test_addOne(self):
h = HashContainer()
h.add(1)
self.assertEqual(h.count(), 1)
self.assertFalse(h.have(0))
self.assertTrue(h.have(1))
self.assertFalse(h.have(2))
self.assertIsNone(h.get(0))
self.assertEqual(h.get(1), 1)
self.assertIsNone(h.get(2))
if __name__ == '__main__':
main()
|
|
67e2e29fb23e53c73655fe8df9779f7e8cc69796
|
alerts/proxy_drop_exfil_domains.py
|
alerts/proxy_drop_exfil_domains.py
|
#!/usr/bin/env python
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Copyright (c) 2014 Mozilla Corporation
from lib.alerttask import AlertTask
from mozdef_util.query_models import QueryStringMatch, SearchQuery, TermMatch
class AlertProxyDropExfilDomain(AlertTask):
def main(self):
self.parse_config('proxy_drop_exfil_domains.conf', ['exfil_domains'])
search_query = SearchQuery(minutes=20)
search_query.add_must([
TermMatch('category', 'squid'),
TermMatch('tags', 'squid'),
TermMatch('details.proxyaction', 'TCP_DENIED/-')
])
# Only notify on certain domains listed in the config
domain_regex = "/({0}).*/".format(
self.config.exfil_daomins.replace(',', '|'))
search_query.add_must([
QueryStringMatch('details.destination: {}'.format(domain_regex))
])
self.filtersManual(search_query)
# Search aggregations on field 'hostname', keep X samples of
# events at most
self.searchEventsAggregated('details.sourceipaddress', samplesLimit=10)
# alert when >= X matching events in an aggregation
# I think it makes sense to alert every time here
self.walkAggregations(threshold=1)
# Set alert properties
def onAggregation(self, aggreg):
# aggreg['count']: number of items in the aggregation, ex: number of failed login attempts
# aggreg['value']: value of the aggregation field, ex: toto@example.com
# aggreg['events']: list of events in the aggregation
category = 'squid'
tags = ['squid', 'proxy']
severity = 'WARNING'
dropped_domains = set()
for event in aggreg['allevents']:
dropped_domains.add(event['_source']['details']['destination'])
summary = 'Suspicious Proxy DROP event(s) detected from {0} to the following exfil domains: {1}'.format(
aggreg['value'],
",".join(sorted(dropped_domains))
)
# Create the alert object based on these properties
return self.createAlertDict(summary, category, tags, aggreg['events'], severity)
|
Create an alert on attempts to use known exfil domains
|
Create an alert on attempts to use known exfil domains
For example, pastebin.com
|
Python
|
mpl-2.0
|
mpurzynski/MozDef,Phrozyn/MozDef,jeffbryner/MozDef,gdestuynder/MozDef,mozilla/MozDef,gdestuynder/MozDef,mozilla/MozDef,jeffbryner/MozDef,gdestuynder/MozDef,Phrozyn/MozDef,jeffbryner/MozDef,mpurzynski/MozDef,mozilla/MozDef,mozilla/MozDef,Phrozyn/MozDef,mpurzynski/MozDef,gdestuynder/MozDef,Phrozyn/MozDef,jeffbryner/MozDef,mpurzynski/MozDef
|
Create an alert on attempts to use known exfil domains
For example, pastebin.com
|
#!/usr/bin/env python
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Copyright (c) 2014 Mozilla Corporation
from lib.alerttask import AlertTask
from mozdef_util.query_models import QueryStringMatch, SearchQuery, TermMatch
class AlertProxyDropExfilDomain(AlertTask):
def main(self):
self.parse_config('proxy_drop_exfil_domains.conf', ['exfil_domains'])
search_query = SearchQuery(minutes=20)
search_query.add_must([
TermMatch('category', 'squid'),
TermMatch('tags', 'squid'),
TermMatch('details.proxyaction', 'TCP_DENIED/-')
])
# Only notify on certain domains listed in the config
domain_regex = "/({0}).*/".format(
self.config.exfil_daomins.replace(',', '|'))
search_query.add_must([
QueryStringMatch('details.destination: {}'.format(domain_regex))
])
self.filtersManual(search_query)
# Search aggregations on field 'hostname', keep X samples of
# events at most
self.searchEventsAggregated('details.sourceipaddress', samplesLimit=10)
# alert when >= X matching events in an aggregation
# I think it makes sense to alert every time here
self.walkAggregations(threshold=1)
# Set alert properties
def onAggregation(self, aggreg):
# aggreg['count']: number of items in the aggregation, ex: number of failed login attempts
# aggreg['value']: value of the aggregation field, ex: toto@example.com
# aggreg['events']: list of events in the aggregation
category = 'squid'
tags = ['squid', 'proxy']
severity = 'WARNING'
dropped_domains = set()
for event in aggreg['allevents']:
dropped_domains.add(event['_source']['details']['destination'])
summary = 'Suspicious Proxy DROP event(s) detected from {0} to the following exfil domains: {1}'.format(
aggreg['value'],
",".join(sorted(dropped_domains))
)
# Create the alert object based on these properties
return self.createAlertDict(summary, category, tags, aggreg['events'], severity)
|
<commit_before><commit_msg>Create an alert on attempts to use known exfil domains
For example, pastebin.com<commit_after>
|
#!/usr/bin/env python
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Copyright (c) 2014 Mozilla Corporation
from lib.alerttask import AlertTask
from mozdef_util.query_models import QueryStringMatch, SearchQuery, TermMatch
class AlertProxyDropExfilDomain(AlertTask):
def main(self):
self.parse_config('proxy_drop_exfil_domains.conf', ['exfil_domains'])
search_query = SearchQuery(minutes=20)
search_query.add_must([
TermMatch('category', 'squid'),
TermMatch('tags', 'squid'),
TermMatch('details.proxyaction', 'TCP_DENIED/-')
])
# Only notify on certain domains listed in the config
domain_regex = "/({0}).*/".format(
self.config.exfil_daomins.replace(',', '|'))
search_query.add_must([
QueryStringMatch('details.destination: {}'.format(domain_regex))
])
self.filtersManual(search_query)
# Search aggregations on field 'hostname', keep X samples of
# events at most
self.searchEventsAggregated('details.sourceipaddress', samplesLimit=10)
# alert when >= X matching events in an aggregation
# I think it makes sense to alert every time here
self.walkAggregations(threshold=1)
# Set alert properties
def onAggregation(self, aggreg):
# aggreg['count']: number of items in the aggregation, ex: number of failed login attempts
# aggreg['value']: value of the aggregation field, ex: toto@example.com
# aggreg['events']: list of events in the aggregation
category = 'squid'
tags = ['squid', 'proxy']
severity = 'WARNING'
dropped_domains = set()
for event in aggreg['allevents']:
dropped_domains.add(event['_source']['details']['destination'])
summary = 'Suspicious Proxy DROP event(s) detected from {0} to the following exfil domains: {1}'.format(
aggreg['value'],
",".join(sorted(dropped_domains))
)
# Create the alert object based on these properties
return self.createAlertDict(summary, category, tags, aggreg['events'], severity)
|
Create an alert on attempts to use known exfil domains
For example, pastebin.com#!/usr/bin/env python
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Copyright (c) 2014 Mozilla Corporation
from lib.alerttask import AlertTask
from mozdef_util.query_models import QueryStringMatch, SearchQuery, TermMatch
class AlertProxyDropExfilDomain(AlertTask):
def main(self):
self.parse_config('proxy_drop_exfil_domains.conf', ['exfil_domains'])
search_query = SearchQuery(minutes=20)
search_query.add_must([
TermMatch('category', 'squid'),
TermMatch('tags', 'squid'),
TermMatch('details.proxyaction', 'TCP_DENIED/-')
])
# Only notify on certain domains listed in the config
domain_regex = "/({0}).*/".format(
self.config.exfil_daomins.replace(',', '|'))
search_query.add_must([
QueryStringMatch('details.destination: {}'.format(domain_regex))
])
self.filtersManual(search_query)
# Search aggregations on field 'hostname', keep X samples of
# events at most
self.searchEventsAggregated('details.sourceipaddress', samplesLimit=10)
# alert when >= X matching events in an aggregation
# I think it makes sense to alert every time here
self.walkAggregations(threshold=1)
# Set alert properties
def onAggregation(self, aggreg):
# aggreg['count']: number of items in the aggregation, ex: number of failed login attempts
# aggreg['value']: value of the aggregation field, ex: toto@example.com
# aggreg['events']: list of events in the aggregation
category = 'squid'
tags = ['squid', 'proxy']
severity = 'WARNING'
dropped_domains = set()
for event in aggreg['allevents']:
dropped_domains.add(event['_source']['details']['destination'])
summary = 'Suspicious Proxy DROP event(s) detected from {0} to the following exfil domains: {1}'.format(
aggreg['value'],
",".join(sorted(dropped_domains))
)
# Create the alert object based on these properties
return self.createAlertDict(summary, category, tags, aggreg['events'], severity)
|
<commit_before><commit_msg>Create an alert on attempts to use known exfil domains
For example, pastebin.com<commit_after>#!/usr/bin/env python
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Copyright (c) 2014 Mozilla Corporation
from lib.alerttask import AlertTask
from mozdef_util.query_models import QueryStringMatch, SearchQuery, TermMatch
class AlertProxyDropExfilDomain(AlertTask):
def main(self):
self.parse_config('proxy_drop_exfil_domains.conf', ['exfil_domains'])
search_query = SearchQuery(minutes=20)
search_query.add_must([
TermMatch('category', 'squid'),
TermMatch('tags', 'squid'),
TermMatch('details.proxyaction', 'TCP_DENIED/-')
])
# Only notify on certain domains listed in the config
domain_regex = "/({0}).*/".format(
self.config.exfil_daomins.replace(',', '|'))
search_query.add_must([
QueryStringMatch('details.destination: {}'.format(domain_regex))
])
self.filtersManual(search_query)
# Search aggregations on field 'hostname', keep X samples of
# events at most
self.searchEventsAggregated('details.sourceipaddress', samplesLimit=10)
# alert when >= X matching events in an aggregation
# I think it makes sense to alert every time here
self.walkAggregations(threshold=1)
# Set alert properties
def onAggregation(self, aggreg):
# aggreg['count']: number of items in the aggregation, ex: number of failed login attempts
# aggreg['value']: value of the aggregation field, ex: toto@example.com
# aggreg['events']: list of events in the aggregation
category = 'squid'
tags = ['squid', 'proxy']
severity = 'WARNING'
dropped_domains = set()
for event in aggreg['allevents']:
dropped_domains.add(event['_source']['details']['destination'])
summary = 'Suspicious Proxy DROP event(s) detected from {0} to the following exfil domains: {1}'.format(
aggreg['value'],
",".join(sorted(dropped_domains))
)
# Create the alert object based on these properties
return self.createAlertDict(summary, category, tags, aggreg['events'], severity)
|
|
ac3e2cff6850b46d6daa38f90af276d23214c772
|
st2tests/st2tests/base_test_classes.py
|
st2tests/st2tests/base_test_classes.py
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Note: This module is here as a nasty work-around. The config parsing code
# relies on horrible and unpredictable import ordering and we need work around
# to make it work so the config doesn't get parsed multiple times.
# Real solution is nuking this awful config parsing and setting related code
# from orbit.
from st2tests.base import EventletTestCase
from st2tests.base import DbTestCase
from st2tests.base import DbModelTestCase
__all__ = [
'EventletTestCase',
'DbTestCase',
'DbModelTestCase'
]
|
Add CRUD model db test cases for RBAC models.
|
Add CRUD model db test cases for RBAC models.
|
Python
|
apache-2.0
|
nzlosh/st2,nzlosh/st2,tonybaloney/st2,Plexxi/st2,Plexxi/st2,StackStorm/st2,Plexxi/st2,nzlosh/st2,tonybaloney/st2,StackStorm/st2,nzlosh/st2,tonybaloney/st2,StackStorm/st2,Plexxi/st2,StackStorm/st2
|
Add CRUD model db test cases for RBAC models.
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Note: This module is here as a nasty work-around. The config parsing code
# relies on horrible and unpredictable import ordering and we need work around
# to make it work so the config doesn't get parsed multiple times.
# Real solution is nuking this awful config parsing and setting related code
# from orbit.
from st2tests.base import EventletTestCase
from st2tests.base import DbTestCase
from st2tests.base import DbModelTestCase
__all__ = [
'EventletTestCase',
'DbTestCase',
'DbModelTestCase'
]
|
<commit_before><commit_msg>Add CRUD model db test cases for RBAC models.<commit_after>
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Note: This module is here as a nasty work-around. The config parsing code
# relies on horrible and unpredictable import ordering and we need work around
# to make it work so the config doesn't get parsed multiple times.
# Real solution is nuking this awful config parsing and setting related code
# from orbit.
from st2tests.base import EventletTestCase
from st2tests.base import DbTestCase
from st2tests.base import DbModelTestCase
__all__ = [
'EventletTestCase',
'DbTestCase',
'DbModelTestCase'
]
|
Add CRUD model db test cases for RBAC models.# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Note: This module is here as a nasty work-around. The config parsing code
# relies on horrible and unpredictable import ordering and we need work around
# to make it work so the config doesn't get parsed multiple times.
# Real solution is nuking this awful config parsing and setting related code
# from orbit.
from st2tests.base import EventletTestCase
from st2tests.base import DbTestCase
from st2tests.base import DbModelTestCase
__all__ = [
'EventletTestCase',
'DbTestCase',
'DbModelTestCase'
]
|
<commit_before><commit_msg>Add CRUD model db test cases for RBAC models.<commit_after># Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Note: This module is here as a nasty work-around. The config parsing code
# relies on horrible and unpredictable import ordering and we need work around
# to make it work so the config doesn't get parsed multiple times.
# Real solution is nuking this awful config parsing and setting related code
# from orbit.
from st2tests.base import EventletTestCase
from st2tests.base import DbTestCase
from st2tests.base import DbModelTestCase
__all__ = [
'EventletTestCase',
'DbTestCase',
'DbModelTestCase'
]
|
|
254ef4c3a433bebd8a668f5516d2f2ac707e2943
|
isUnique.py
|
isUnique.py
|
def verifyUnique(string):
if len(string) > 128:
return False
characterHash = [0] * 128
for character in string:
hashKey = ord(character)%128
if(characterHash[hashKey] > 0):
return False
else:
characterHash[hashKey] = characterHash[hashKey]+1
return True
print verifyUnique('test') # False ,O(n)
print verifyUnique('aquickboASDFwnfxjmps><verthlzydg') # True ,O(n)
|
Verify the given string has unique characters
|
Verify the given string has unique characters
|
Python
|
mit
|
arunkumarpalaniappan/algorithm_tryouts
|
Verify the given string has unique characters
|
def verifyUnique(string):
if len(string) > 128:
return False
characterHash = [0] * 128
for character in string:
hashKey = ord(character)%128
if(characterHash[hashKey] > 0):
return False
else:
characterHash[hashKey] = characterHash[hashKey]+1
return True
print verifyUnique('test') # False ,O(n)
print verifyUnique('aquickboASDFwnfxjmps><verthlzydg') # True ,O(n)
|
<commit_before><commit_msg>Verify the given string has unique characters<commit_after>
|
def verifyUnique(string):
if len(string) > 128:
return False
characterHash = [0] * 128
for character in string:
hashKey = ord(character)%128
if(characterHash[hashKey] > 0):
return False
else:
characterHash[hashKey] = characterHash[hashKey]+1
return True
print verifyUnique('test') # False ,O(n)
print verifyUnique('aquickboASDFwnfxjmps><verthlzydg') # True ,O(n)
|
Verify the given string has unique charactersdef verifyUnique(string):
if len(string) > 128:
return False
characterHash = [0] * 128
for character in string:
hashKey = ord(character)%128
if(characterHash[hashKey] > 0):
return False
else:
characterHash[hashKey] = characterHash[hashKey]+1
return True
print verifyUnique('test') # False ,O(n)
print verifyUnique('aquickboASDFwnfxjmps><verthlzydg') # True ,O(n)
|
<commit_before><commit_msg>Verify the given string has unique characters<commit_after>def verifyUnique(string):
if len(string) > 128:
return False
characterHash = [0] * 128
for character in string:
hashKey = ord(character)%128
if(characterHash[hashKey] > 0):
return False
else:
characterHash[hashKey] = characterHash[hashKey]+1
return True
print verifyUnique('test') # False ,O(n)
print verifyUnique('aquickboASDFwnfxjmps><verthlzydg') # True ,O(n)
|
|
e151a51cc1ed634f282f69a869af6ac9f85df780
|
api/user_service.py
|
api/user_service.py
|
import werkzeug.security as ws
from flask import Flask, request, jsonify, abort
app = Flask(__name__)
app.debug = True
LOGIN_DIFFICULTY = '0400'
SERVER_SECRET = 'SoSecret!'
@app.route('/salt')
def challenge():
uuid = request.args.get('uuid', '')
salt = ws.hashlib.sha256(SERVER_SECRET + uuid).hexdigest()
pow_challenge = ws.gen_salt(32)
response = {
'salt': salt,
'pow_challenge': pow_challenge
}
return jsonify(response)
@app.route('/create', methods=['POST'])
def create():
print request.form
uuid = request.form['uuid']
key = request.form['key']
nonce = request.form['nonce']
pow_challenge = request.form['pow_challenge']
#wallet_data = request.form['wallet_data']
challenge_response = ws.hashlib.sha256(nonce + pow_challenge).hexdigest()
if challenge_response[-len(LOGIN_DIFFICULTY):] != LOGIN_DIFFICULTY:
print 'Aborting: Challenge was not met'
abort(403)
return ""
|
import os
import werkzeug.security as ws
from flask import Flask, request, jsonify, abort, json
app = Flask(__name__)
app.debug = True
LOGIN_DIFFICULTY = '0400'
SERVER_SECRET = 'SoSecret!'
data_dir_root = os.environ.get('DATADIR')
@app.route('/salt')
def challenge():
uuid = request.args.get('uuid', '')
if exists(uuid):
abort(403)
salt = ws.hashlib.sha256(SERVER_SECRET + uuid).hexdigest()
pow_challenge = ws.gen_salt(32)
response = {
'salt': salt,
'pow_challenge': pow_challenge
}
return jsonify(response)
@app.route('/create', methods=['POST'])
def create():
uuid = request.form['uuid']
key = request.form['key']
nonce = request.form['nonce']
pow_challenge = request.form['pow_challenge']
#wallet_data = request.form['wallet_data']
wallet_data = "Walletstuff"
challenge_response = ws.hashlib.sha256(nonce + pow_challenge).hexdigest()
if challenge_response[-len(LOGIN_DIFFICULTY):] != LOGIN_DIFFICULTY:
print 'Aborting: Challenge was not met'
abort(403)
if exists(uuid):
abort(403)
wallet_file = { 'key': key, 'wallet': wallet_data }
write_wallet(uuid, wallet_file)
return ""
def write_wallet(uuid, wallet_file):
filename = data_dir_root + '/wallets/' + uuid + '.json'
with open(filename, 'w') as f:
json.dump(wallet_file, f)
def exists(uuid):
filename = data_dir_root + '/wallets/' + uuid + '.json'
return os.path.exists(filename)
|
Add wallet serialization to disk
|
Add wallet serialization to disk
|
Python
|
agpl-3.0
|
FuzzyBearBTC/omniwallet,OmniLayer/omniwallet,habibmasuro/omniwallet,VukDukic/omniwallet,VukDukic/omniwallet,OmniLayer/omniwallet,OmniLayer/omniwallet,Nevtep/omniwallet,achamely/omniwallet,FuzzyBearBTC/omniwallet,Nevtep/omniwallet,habibmasuro/omniwallet,dexX7/omniwallet,Nevtep/omniwallet,ripper234/omniwallet,ripper234/omniwallet,Nevtep/omniwallet,habibmasuro/omniwallet,curtislacy/omniwallet,arowser/omniwallet,VukDukic/omniwallet,achamely/omniwallet,arowser/omniwallet,OmniLayer/omniwallet,ripper234/omniwallet,curtislacy/omniwallet,dexX7/omniwallet,curtislacy/omniwallet,achamely/omniwallet,arowser/omniwallet,habibmasuro/omniwallet,dexX7/omniwallet,achamely/omniwallet,FuzzyBearBTC/omniwallet
|
import werkzeug.security as ws
from flask import Flask, request, jsonify, abort
app = Flask(__name__)
app.debug = True
LOGIN_DIFFICULTY = '0400'
SERVER_SECRET = 'SoSecret!'
@app.route('/salt')
def challenge():
uuid = request.args.get('uuid', '')
salt = ws.hashlib.sha256(SERVER_SECRET + uuid).hexdigest()
pow_challenge = ws.gen_salt(32)
response = {
'salt': salt,
'pow_challenge': pow_challenge
}
return jsonify(response)
@app.route('/create', methods=['POST'])
def create():
print request.form
uuid = request.form['uuid']
key = request.form['key']
nonce = request.form['nonce']
pow_challenge = request.form['pow_challenge']
#wallet_data = request.form['wallet_data']
challenge_response = ws.hashlib.sha256(nonce + pow_challenge).hexdigest()
if challenge_response[-len(LOGIN_DIFFICULTY):] != LOGIN_DIFFICULTY:
print 'Aborting: Challenge was not met'
abort(403)
return ""
Add wallet serialization to disk
|
import os
import werkzeug.security as ws
from flask import Flask, request, jsonify, abort, json
app = Flask(__name__)
app.debug = True
LOGIN_DIFFICULTY = '0400'
SERVER_SECRET = 'SoSecret!'
data_dir_root = os.environ.get('DATADIR')
@app.route('/salt')
def challenge():
uuid = request.args.get('uuid', '')
if exists(uuid):
abort(403)
salt = ws.hashlib.sha256(SERVER_SECRET + uuid).hexdigest()
pow_challenge = ws.gen_salt(32)
response = {
'salt': salt,
'pow_challenge': pow_challenge
}
return jsonify(response)
@app.route('/create', methods=['POST'])
def create():
uuid = request.form['uuid']
key = request.form['key']
nonce = request.form['nonce']
pow_challenge = request.form['pow_challenge']
#wallet_data = request.form['wallet_data']
wallet_data = "Walletstuff"
challenge_response = ws.hashlib.sha256(nonce + pow_challenge).hexdigest()
if challenge_response[-len(LOGIN_DIFFICULTY):] != LOGIN_DIFFICULTY:
print 'Aborting: Challenge was not met'
abort(403)
if exists(uuid):
abort(403)
wallet_file = { 'key': key, 'wallet': wallet_data }
write_wallet(uuid, wallet_file)
return ""
def write_wallet(uuid, wallet_file):
filename = data_dir_root + '/wallets/' + uuid + '.json'
with open(filename, 'w') as f:
json.dump(wallet_file, f)
def exists(uuid):
filename = data_dir_root + '/wallets/' + uuid + '.json'
return os.path.exists(filename)
|
<commit_before>import werkzeug.security as ws
from flask import Flask, request, jsonify, abort
app = Flask(__name__)
app.debug = True
LOGIN_DIFFICULTY = '0400'
SERVER_SECRET = 'SoSecret!'
@app.route('/salt')
def challenge():
uuid = request.args.get('uuid', '')
salt = ws.hashlib.sha256(SERVER_SECRET + uuid).hexdigest()
pow_challenge = ws.gen_salt(32)
response = {
'salt': salt,
'pow_challenge': pow_challenge
}
return jsonify(response)
@app.route('/create', methods=['POST'])
def create():
print request.form
uuid = request.form['uuid']
key = request.form['key']
nonce = request.form['nonce']
pow_challenge = request.form['pow_challenge']
#wallet_data = request.form['wallet_data']
challenge_response = ws.hashlib.sha256(nonce + pow_challenge).hexdigest()
if challenge_response[-len(LOGIN_DIFFICULTY):] != LOGIN_DIFFICULTY:
print 'Aborting: Challenge was not met'
abort(403)
return ""
<commit_msg>Add wallet serialization to disk<commit_after>
|
import os
import werkzeug.security as ws
from flask import Flask, request, jsonify, abort, json
app = Flask(__name__)
app.debug = True
LOGIN_DIFFICULTY = '0400'
SERVER_SECRET = 'SoSecret!'
data_dir_root = os.environ.get('DATADIR')
@app.route('/salt')
def challenge():
uuid = request.args.get('uuid', '')
if exists(uuid):
abort(403)
salt = ws.hashlib.sha256(SERVER_SECRET + uuid).hexdigest()
pow_challenge = ws.gen_salt(32)
response = {
'salt': salt,
'pow_challenge': pow_challenge
}
return jsonify(response)
@app.route('/create', methods=['POST'])
def create():
uuid = request.form['uuid']
key = request.form['key']
nonce = request.form['nonce']
pow_challenge = request.form['pow_challenge']
#wallet_data = request.form['wallet_data']
wallet_data = "Walletstuff"
challenge_response = ws.hashlib.sha256(nonce + pow_challenge).hexdigest()
if challenge_response[-len(LOGIN_DIFFICULTY):] != LOGIN_DIFFICULTY:
print 'Aborting: Challenge was not met'
abort(403)
if exists(uuid):
abort(403)
wallet_file = { 'key': key, 'wallet': wallet_data }
write_wallet(uuid, wallet_file)
return ""
def write_wallet(uuid, wallet_file):
filename = data_dir_root + '/wallets/' + uuid + '.json'
with open(filename, 'w') as f:
json.dump(wallet_file, f)
def exists(uuid):
filename = data_dir_root + '/wallets/' + uuid + '.json'
return os.path.exists(filename)
|
import werkzeug.security as ws
from flask import Flask, request, jsonify, abort
app = Flask(__name__)
app.debug = True
LOGIN_DIFFICULTY = '0400'
SERVER_SECRET = 'SoSecret!'
@app.route('/salt')
def challenge():
uuid = request.args.get('uuid', '')
salt = ws.hashlib.sha256(SERVER_SECRET + uuid).hexdigest()
pow_challenge = ws.gen_salt(32)
response = {
'salt': salt,
'pow_challenge': pow_challenge
}
return jsonify(response)
@app.route('/create', methods=['POST'])
def create():
print request.form
uuid = request.form['uuid']
key = request.form['key']
nonce = request.form['nonce']
pow_challenge = request.form['pow_challenge']
#wallet_data = request.form['wallet_data']
challenge_response = ws.hashlib.sha256(nonce + pow_challenge).hexdigest()
if challenge_response[-len(LOGIN_DIFFICULTY):] != LOGIN_DIFFICULTY:
print 'Aborting: Challenge was not met'
abort(403)
return ""
Add wallet serialization to diskimport os
import werkzeug.security as ws
from flask import Flask, request, jsonify, abort, json
app = Flask(__name__)
app.debug = True
LOGIN_DIFFICULTY = '0400'
SERVER_SECRET = 'SoSecret!'
data_dir_root = os.environ.get('DATADIR')
@app.route('/salt')
def challenge():
uuid = request.args.get('uuid', '')
if exists(uuid):
abort(403)
salt = ws.hashlib.sha256(SERVER_SECRET + uuid).hexdigest()
pow_challenge = ws.gen_salt(32)
response = {
'salt': salt,
'pow_challenge': pow_challenge
}
return jsonify(response)
@app.route('/create', methods=['POST'])
def create():
uuid = request.form['uuid']
key = request.form['key']
nonce = request.form['nonce']
pow_challenge = request.form['pow_challenge']
#wallet_data = request.form['wallet_data']
wallet_data = "Walletstuff"
challenge_response = ws.hashlib.sha256(nonce + pow_challenge).hexdigest()
if challenge_response[-len(LOGIN_DIFFICULTY):] != LOGIN_DIFFICULTY:
print 'Aborting: Challenge was not met'
abort(403)
if exists(uuid):
abort(403)
wallet_file = { 'key': key, 'wallet': wallet_data }
write_wallet(uuid, wallet_file)
return ""
def write_wallet(uuid, wallet_file):
filename = data_dir_root + '/wallets/' + uuid + '.json'
with open(filename, 'w') as f:
json.dump(wallet_file, f)
def exists(uuid):
filename = data_dir_root + '/wallets/' + uuid + '.json'
return os.path.exists(filename)
|
<commit_before>import werkzeug.security as ws
from flask import Flask, request, jsonify, abort
app = Flask(__name__)
app.debug = True
LOGIN_DIFFICULTY = '0400'
SERVER_SECRET = 'SoSecret!'
@app.route('/salt')
def challenge():
uuid = request.args.get('uuid', '')
salt = ws.hashlib.sha256(SERVER_SECRET + uuid).hexdigest()
pow_challenge = ws.gen_salt(32)
response = {
'salt': salt,
'pow_challenge': pow_challenge
}
return jsonify(response)
@app.route('/create', methods=['POST'])
def create():
print request.form
uuid = request.form['uuid']
key = request.form['key']
nonce = request.form['nonce']
pow_challenge = request.form['pow_challenge']
#wallet_data = request.form['wallet_data']
challenge_response = ws.hashlib.sha256(nonce + pow_challenge).hexdigest()
if challenge_response[-len(LOGIN_DIFFICULTY):] != LOGIN_DIFFICULTY:
print 'Aborting: Challenge was not met'
abort(403)
return ""
<commit_msg>Add wallet serialization to disk<commit_after>import os
import werkzeug.security as ws
from flask import Flask, request, jsonify, abort, json
app = Flask(__name__)
app.debug = True
LOGIN_DIFFICULTY = '0400'
SERVER_SECRET = 'SoSecret!'
data_dir_root = os.environ.get('DATADIR')
@app.route('/salt')
def challenge():
uuid = request.args.get('uuid', '')
if exists(uuid):
abort(403)
salt = ws.hashlib.sha256(SERVER_SECRET + uuid).hexdigest()
pow_challenge = ws.gen_salt(32)
response = {
'salt': salt,
'pow_challenge': pow_challenge
}
return jsonify(response)
@app.route('/create', methods=['POST'])
def create():
uuid = request.form['uuid']
key = request.form['key']
nonce = request.form['nonce']
pow_challenge = request.form['pow_challenge']
#wallet_data = request.form['wallet_data']
wallet_data = "Walletstuff"
challenge_response = ws.hashlib.sha256(nonce + pow_challenge).hexdigest()
if challenge_response[-len(LOGIN_DIFFICULTY):] != LOGIN_DIFFICULTY:
print 'Aborting: Challenge was not met'
abort(403)
if exists(uuid):
abort(403)
wallet_file = { 'key': key, 'wallet': wallet_data }
write_wallet(uuid, wallet_file)
return ""
def write_wallet(uuid, wallet_file):
filename = data_dir_root + '/wallets/' + uuid + '.json'
with open(filename, 'w') as f:
json.dump(wallet_file, f)
def exists(uuid):
filename = data_dir_root + '/wallets/' + uuid + '.json'
return os.path.exists(filename)
|
71bd24dd15295ade1b511a5dbff7d7a150ef2084
|
problems/problem_36.py
|
problems/problem_36.py
|
# Double-base palindromes
def decimal_to_binary(n):
binary = []
while n > 0:
binary.append(n % 2)
n /= 2
output = []
while len(binary) != 0:
output.append(str(binary.pop()))
return "".join(output)
def is_palindrome(product):
product = str(product)
reverse = product[::-1]
return product == reverse
def sum_of_double_base_palindromes():
return sum([x for x in range(1, 1000000)
if is_palindrome(x) and is_palindrome(decimal_to_binary(x))])
print sum_of_double_base_palindromes()
|
Add solution for problem 36
|
Add solution for problem 36
- Added solution for problem 36
(double-base palindromes)
|
Python
|
mit
|
edmondkotowski/project-euler
|
Add solution for problem 36
- Added solution for problem 36
(double-base palindromes)
|
# Double-base palindromes
def decimal_to_binary(n):
binary = []
while n > 0:
binary.append(n % 2)
n /= 2
output = []
while len(binary) != 0:
output.append(str(binary.pop()))
return "".join(output)
def is_palindrome(product):
product = str(product)
reverse = product[::-1]
return product == reverse
def sum_of_double_base_palindromes():
return sum([x for x in range(1, 1000000)
if is_palindrome(x) and is_palindrome(decimal_to_binary(x))])
print sum_of_double_base_palindromes()
|
<commit_before><commit_msg>Add solution for problem 36
- Added solution for problem 36
(double-base palindromes)<commit_after>
|
# Double-base palindromes
def decimal_to_binary(n):
binary = []
while n > 0:
binary.append(n % 2)
n /= 2
output = []
while len(binary) != 0:
output.append(str(binary.pop()))
return "".join(output)
def is_palindrome(product):
product = str(product)
reverse = product[::-1]
return product == reverse
def sum_of_double_base_palindromes():
return sum([x for x in range(1, 1000000)
if is_palindrome(x) and is_palindrome(decimal_to_binary(x))])
print sum_of_double_base_palindromes()
|
Add solution for problem 36
- Added solution for problem 36
(double-base palindromes)# Double-base palindromes
def decimal_to_binary(n):
binary = []
while n > 0:
binary.append(n % 2)
n /= 2
output = []
while len(binary) != 0:
output.append(str(binary.pop()))
return "".join(output)
def is_palindrome(product):
product = str(product)
reverse = product[::-1]
return product == reverse
def sum_of_double_base_palindromes():
return sum([x for x in range(1, 1000000)
if is_palindrome(x) and is_palindrome(decimal_to_binary(x))])
print sum_of_double_base_palindromes()
|
<commit_before><commit_msg>Add solution for problem 36
- Added solution for problem 36
(double-base palindromes)<commit_after># Double-base palindromes
def decimal_to_binary(n):
binary = []
while n > 0:
binary.append(n % 2)
n /= 2
output = []
while len(binary) != 0:
output.append(str(binary.pop()))
return "".join(output)
def is_palindrome(product):
product = str(product)
reverse = product[::-1]
return product == reverse
def sum_of_double_base_palindromes():
return sum([x for x in range(1, 1000000)
if is_palindrome(x) and is_palindrome(decimal_to_binary(x))])
print sum_of_double_base_palindromes()
|
|
ebfbe65c08ed8ee5d44c4c39f83f5e08bba8a1a7
|
tests/test_misc.py
|
tests/test_misc.py
|
from flask.ext.resty import Api, GenericModelView
from marshmallow import fields, Schema
import pytest
from sqlalchemy import Column, Integer
import helpers
# -----------------------------------------------------------------------------
@pytest.yield_fixture
def models(db):
class Widget(db.Model):
__tablename__ = 'widgets'
id = Column(Integer, primary_key=True)
db.create_all()
yield {
'widget': Widget,
}
db.drop_all()
@pytest.fixture
def schemas():
class WidgetSchema(Schema):
id = fields.Integer(as_string=True)
return {
'widget': WidgetSchema(),
}
@pytest.fixture
def views(models, schemas):
class WidgetViewBase(GenericModelView):
model = models['widget']
schema = schemas['widget']
class WidgetListView(WidgetViewBase):
def get(self):
return self.list()
def post(self):
return self.create(allow_client_id=True)
class WidgetView(WidgetViewBase):
def get(self, id):
return self.retrieve(id)
return {
'widget_list': WidgetListView,
'widget': WidgetView,
}
@pytest.fixture(autouse=True)
def data(db, models):
db.session.add(models['widget']())
db.session.commit()
# -----------------------------------------------------------------------------
def test_api_prefix(app, views, client):
api = Api(app, '/api')
api.add_resource('/widgets', views['widget_list'])
response = client.get('/api/widgets')
assert helpers.get_data(response) == [
{
'id': '1',
},
]
def test_create_client_id(app, views, client):
api = Api(app)
api.add_resource('/widgets', views['widget_list'], views['widget'])
response = helpers.request(
client,
'POST', '/widgets',
{
'id': '100',
},
)
assert response.status_code == 201
assert response.headers['Location'] == 'http://localhost/widgets/100'
assert helpers.get_data(response) == {
'id': '100',
}
def test_resource_rules(app, views, client):
api = Api(app)
api.add_resource(
base_rule='/widget/<id>',
base_view=views['widget'],
alternate_rule='/widgets',
alternate_view=views['widget_list'],
)
get_response = client.get('/widget/1')
assert get_response.status_code == 200
assert helpers.get_data(get_response) == {
'id': '1',
}
post_response = helpers.request(
client,
'POST', '/widgets',
{},
)
assert post_response.status_code == 201
assert post_response.headers['Location'] == 'http://localhost/widget/2'
assert helpers.get_data(post_response) == {
'id': '2',
}
|
Add miscellaneous tests for remaining code
|
Add miscellaneous tests for remaining code
|
Python
|
mit
|
taion/flask-jsonapiview,4Catalyzer/flask-resty,4Catalyzer/flask-jsonapiview
|
Add miscellaneous tests for remaining code
|
from flask.ext.resty import Api, GenericModelView
from marshmallow import fields, Schema
import pytest
from sqlalchemy import Column, Integer
import helpers
# -----------------------------------------------------------------------------
@pytest.yield_fixture
def models(db):
class Widget(db.Model):
__tablename__ = 'widgets'
id = Column(Integer, primary_key=True)
db.create_all()
yield {
'widget': Widget,
}
db.drop_all()
@pytest.fixture
def schemas():
class WidgetSchema(Schema):
id = fields.Integer(as_string=True)
return {
'widget': WidgetSchema(),
}
@pytest.fixture
def views(models, schemas):
class WidgetViewBase(GenericModelView):
model = models['widget']
schema = schemas['widget']
class WidgetListView(WidgetViewBase):
def get(self):
return self.list()
def post(self):
return self.create(allow_client_id=True)
class WidgetView(WidgetViewBase):
def get(self, id):
return self.retrieve(id)
return {
'widget_list': WidgetListView,
'widget': WidgetView,
}
@pytest.fixture(autouse=True)
def data(db, models):
db.session.add(models['widget']())
db.session.commit()
# -----------------------------------------------------------------------------
def test_api_prefix(app, views, client):
api = Api(app, '/api')
api.add_resource('/widgets', views['widget_list'])
response = client.get('/api/widgets')
assert helpers.get_data(response) == [
{
'id': '1',
},
]
def test_create_client_id(app, views, client):
api = Api(app)
api.add_resource('/widgets', views['widget_list'], views['widget'])
response = helpers.request(
client,
'POST', '/widgets',
{
'id': '100',
},
)
assert response.status_code == 201
assert response.headers['Location'] == 'http://localhost/widgets/100'
assert helpers.get_data(response) == {
'id': '100',
}
def test_resource_rules(app, views, client):
api = Api(app)
api.add_resource(
base_rule='/widget/<id>',
base_view=views['widget'],
alternate_rule='/widgets',
alternate_view=views['widget_list'],
)
get_response = client.get('/widget/1')
assert get_response.status_code == 200
assert helpers.get_data(get_response) == {
'id': '1',
}
post_response = helpers.request(
client,
'POST', '/widgets',
{},
)
assert post_response.status_code == 201
assert post_response.headers['Location'] == 'http://localhost/widget/2'
assert helpers.get_data(post_response) == {
'id': '2',
}
|
<commit_before><commit_msg>Add miscellaneous tests for remaining code<commit_after>
|
from flask.ext.resty import Api, GenericModelView
from marshmallow import fields, Schema
import pytest
from sqlalchemy import Column, Integer
import helpers
# -----------------------------------------------------------------------------
@pytest.yield_fixture
def models(db):
class Widget(db.Model):
__tablename__ = 'widgets'
id = Column(Integer, primary_key=True)
db.create_all()
yield {
'widget': Widget,
}
db.drop_all()
@pytest.fixture
def schemas():
class WidgetSchema(Schema):
id = fields.Integer(as_string=True)
return {
'widget': WidgetSchema(),
}
@pytest.fixture
def views(models, schemas):
class WidgetViewBase(GenericModelView):
model = models['widget']
schema = schemas['widget']
class WidgetListView(WidgetViewBase):
def get(self):
return self.list()
def post(self):
return self.create(allow_client_id=True)
class WidgetView(WidgetViewBase):
def get(self, id):
return self.retrieve(id)
return {
'widget_list': WidgetListView,
'widget': WidgetView,
}
@pytest.fixture(autouse=True)
def data(db, models):
db.session.add(models['widget']())
db.session.commit()
# -----------------------------------------------------------------------------
def test_api_prefix(app, views, client):
api = Api(app, '/api')
api.add_resource('/widgets', views['widget_list'])
response = client.get('/api/widgets')
assert helpers.get_data(response) == [
{
'id': '1',
},
]
def test_create_client_id(app, views, client):
api = Api(app)
api.add_resource('/widgets', views['widget_list'], views['widget'])
response = helpers.request(
client,
'POST', '/widgets',
{
'id': '100',
},
)
assert response.status_code == 201
assert response.headers['Location'] == 'http://localhost/widgets/100'
assert helpers.get_data(response) == {
'id': '100',
}
def test_resource_rules(app, views, client):
api = Api(app)
api.add_resource(
base_rule='/widget/<id>',
base_view=views['widget'],
alternate_rule='/widgets',
alternate_view=views['widget_list'],
)
get_response = client.get('/widget/1')
assert get_response.status_code == 200
assert helpers.get_data(get_response) == {
'id': '1',
}
post_response = helpers.request(
client,
'POST', '/widgets',
{},
)
assert post_response.status_code == 201
assert post_response.headers['Location'] == 'http://localhost/widget/2'
assert helpers.get_data(post_response) == {
'id': '2',
}
|
Add miscellaneous tests for remaining codefrom flask.ext.resty import Api, GenericModelView
from marshmallow import fields, Schema
import pytest
from sqlalchemy import Column, Integer
import helpers
# -----------------------------------------------------------------------------
@pytest.yield_fixture
def models(db):
class Widget(db.Model):
__tablename__ = 'widgets'
id = Column(Integer, primary_key=True)
db.create_all()
yield {
'widget': Widget,
}
db.drop_all()
@pytest.fixture
def schemas():
class WidgetSchema(Schema):
id = fields.Integer(as_string=True)
return {
'widget': WidgetSchema(),
}
@pytest.fixture
def views(models, schemas):
class WidgetViewBase(GenericModelView):
model = models['widget']
schema = schemas['widget']
class WidgetListView(WidgetViewBase):
def get(self):
return self.list()
def post(self):
return self.create(allow_client_id=True)
class WidgetView(WidgetViewBase):
def get(self, id):
return self.retrieve(id)
return {
'widget_list': WidgetListView,
'widget': WidgetView,
}
@pytest.fixture(autouse=True)
def data(db, models):
db.session.add(models['widget']())
db.session.commit()
# -----------------------------------------------------------------------------
def test_api_prefix(app, views, client):
api = Api(app, '/api')
api.add_resource('/widgets', views['widget_list'])
response = client.get('/api/widgets')
assert helpers.get_data(response) == [
{
'id': '1',
},
]
def test_create_client_id(app, views, client):
api = Api(app)
api.add_resource('/widgets', views['widget_list'], views['widget'])
response = helpers.request(
client,
'POST', '/widgets',
{
'id': '100',
},
)
assert response.status_code == 201
assert response.headers['Location'] == 'http://localhost/widgets/100'
assert helpers.get_data(response) == {
'id': '100',
}
def test_resource_rules(app, views, client):
api = Api(app)
api.add_resource(
base_rule='/widget/<id>',
base_view=views['widget'],
alternate_rule='/widgets',
alternate_view=views['widget_list'],
)
get_response = client.get('/widget/1')
assert get_response.status_code == 200
assert helpers.get_data(get_response) == {
'id': '1',
}
post_response = helpers.request(
client,
'POST', '/widgets',
{},
)
assert post_response.status_code == 201
assert post_response.headers['Location'] == 'http://localhost/widget/2'
assert helpers.get_data(post_response) == {
'id': '2',
}
|
<commit_before><commit_msg>Add miscellaneous tests for remaining code<commit_after>from flask.ext.resty import Api, GenericModelView
from marshmallow import fields, Schema
import pytest
from sqlalchemy import Column, Integer
import helpers
# -----------------------------------------------------------------------------
@pytest.yield_fixture
def models(db):
class Widget(db.Model):
__tablename__ = 'widgets'
id = Column(Integer, primary_key=True)
db.create_all()
yield {
'widget': Widget,
}
db.drop_all()
@pytest.fixture
def schemas():
class WidgetSchema(Schema):
id = fields.Integer(as_string=True)
return {
'widget': WidgetSchema(),
}
@pytest.fixture
def views(models, schemas):
class WidgetViewBase(GenericModelView):
model = models['widget']
schema = schemas['widget']
class WidgetListView(WidgetViewBase):
def get(self):
return self.list()
def post(self):
return self.create(allow_client_id=True)
class WidgetView(WidgetViewBase):
def get(self, id):
return self.retrieve(id)
return {
'widget_list': WidgetListView,
'widget': WidgetView,
}
@pytest.fixture(autouse=True)
def data(db, models):
db.session.add(models['widget']())
db.session.commit()
# -----------------------------------------------------------------------------
def test_api_prefix(app, views, client):
api = Api(app, '/api')
api.add_resource('/widgets', views['widget_list'])
response = client.get('/api/widgets')
assert helpers.get_data(response) == [
{
'id': '1',
},
]
def test_create_client_id(app, views, client):
api = Api(app)
api.add_resource('/widgets', views['widget_list'], views['widget'])
response = helpers.request(
client,
'POST', '/widgets',
{
'id': '100',
},
)
assert response.status_code == 201
assert response.headers['Location'] == 'http://localhost/widgets/100'
assert helpers.get_data(response) == {
'id': '100',
}
def test_resource_rules(app, views, client):
api = Api(app)
api.add_resource(
base_rule='/widget/<id>',
base_view=views['widget'],
alternate_rule='/widgets',
alternate_view=views['widget_list'],
)
get_response = client.get('/widget/1')
assert get_response.status_code == 200
assert helpers.get_data(get_response) == {
'id': '1',
}
post_response = helpers.request(
client,
'POST', '/widgets',
{},
)
assert post_response.status_code == 201
assert post_response.headers['Location'] == 'http://localhost/widget/2'
assert helpers.get_data(post_response) == {
'id': '2',
}
|
|
40711777de24d30cfe771f172b221cfdf460d8eb
|
rng.py
|
rng.py
|
from random import randint
def get_random_number(start=1, end=10):
"""Generates and returns random number between :start: and :end:"""
return randint(start, end)
|
def get_random_number(start=1, end=10):
"""https://xkcd.com/221/"""
return 4
|
Revert "Fix python random number generator."
|
Revert "Fix python random number generator."
|
Python
|
mit
|
1yvT0s/illacceptanything,dushmis/illacceptanything,dushmis/illacceptanything,ultranaut/illacceptanything,caioproiete/illacceptanything,triggerNZ/illacceptanything,dushmis/illacceptanything,oneminot/illacceptanything,TheWhiteLlama/illacceptanything,ds84182/illacceptanything,caioproiete/illacceptanything,paladique/illacceptanything,ultranaut/illacceptanything,TheWhiteLlama/illacceptanything,oneminot/illacceptanything,TheWhiteLlama/illacceptanything,caioproiete/illacceptanything,caioproiete/illacceptanything,paladique/illacceptanything,ds84182/illacceptanything,illacceptanything/illacceptanything,oneminot/illacceptanything,paladique/illacceptanything,tjhorner/illacceptanything,triggerNZ/illacceptanything,tjhorner/illacceptanything,triggerNZ/illacceptanything,oneminot/illacceptanything,dushmis/illacceptanything,illacceptanything/illacceptanything,TheWhiteLlama/illacceptanything,triggerNZ/illacceptanything,JeffreyCA/illacceptanything,ultranaut/illacceptanything,1yvT0s/illacceptanything,ultranaut/illacceptanything,caioproiete/illacceptanything,JeffreyCA/illacceptanything,1yvT0s/illacceptanything,oneminot/illacceptanything,dushmis/illacceptanything,paladique/illacceptanything,JeffreyCA/illacceptanything,1yvT0s/illacceptanything,paladique/illacceptanything,JeffreyCA/illacceptanything,1yvT0s/illacceptanything,tjhorner/illacceptanything,oneminot/illacceptanything,paladique/illacceptanything,tjhorner/illacceptanything,dushmis/illacceptanything,JeffreyCA/illacceptanything,tjhorner/illacceptanything,1yvT0s/illacceptanything,tjhorner/illacceptanything,ultranaut/illacceptanything,oneminot/illacceptanything,paladique/illacceptanything,ds84182/illacceptanything,JeffreyCA/illacceptanything,illacceptanything/illacceptanything,paladique/illacceptanything,tjhorner/illacceptanything,TheWhiteLlama/illacceptanything,1yvT0s/illacceptanything,illacceptanything/illacceptanything,1yvT0s/illacceptanything,JeffreyCA/illacceptanything,TheWhiteLlama/illacceptanything,ds84182/illacceptanything,caioproiete/illacceptanything,ds84182/illacceptanything,paladique/illacceptanything,triggerNZ/illacceptanything,1yvT0s/illacceptanything,illacceptanything/illacceptanything,oneminot/illacceptanything,dushmis/illacceptanything,TheWhiteLlama/illacceptanything,dushmis/illacceptanything,dushmis/illacceptanything,illacceptanything/illacceptanything,JeffreyCA/illacceptanything,tjhorner/illacceptanything,illacceptanything/illacceptanything,ds84182/illacceptanything,tjhorner/illacceptanything,illacceptanything/illacceptanything,dushmis/illacceptanything,caioproiete/illacceptanything,oneminot/illacceptanything,JeffreyCA/illacceptanything,ds84182/illacceptanything,oneminot/illacceptanything,paladique/illacceptanything,1yvT0s/illacceptanything,triggerNZ/illacceptanything,paladique/illacceptanything,caioproiete/illacceptanything,TheWhiteLlama/illacceptanything,JeffreyCA/illacceptanything,triggerNZ/illacceptanything,illacceptanything/illacceptanything,caioproiete/illacceptanything,tjhorner/illacceptanything,TheWhiteLlama/illacceptanything,1yvT0s/illacceptanything,ultranaut/illacceptanything,ultranaut/illacceptanything,paladique/illacceptanything,dushmis/illacceptanything,triggerNZ/illacceptanything,TheWhiteLlama/illacceptanything,TheWhiteLlama/illacceptanything,tjhorner/illacceptanything,ultranaut/illacceptanything,TheWhiteLlama/illacceptanything,caioproiete/illacceptanything,paladique/illacceptanything,JeffreyCA/illacceptanything,caioproiete/illacceptanything,oneminot/illacceptanything,triggerNZ/illacceptanything,ds84182/illacceptanything,ds84182/illacceptanything,oneminot/illacceptanything,ultranaut/illacceptanything,caioproiete/illacceptanything,1yvT0s/illacceptanything,caioproiete/illacceptanything,paladique/illacceptanything,ds84182/illacceptanything,illacceptanything/illacceptanything,triggerNZ/illacceptanything,JeffreyCA/illacceptanything,JeffreyCA/illacceptanything,tjhorner/illacceptanything,caioproiete/illacceptanything,ds84182/illacceptanything,ds84182/illacceptanything,triggerNZ/illacceptanything,tjhorner/illacceptanything,TheWhiteLlama/illacceptanything,ds84182/illacceptanything,ultranaut/illacceptanything,ds84182/illacceptanything,ultranaut/illacceptanything,tjhorner/illacceptanything,TheWhiteLlama/illacceptanything,1yvT0s/illacceptanything,JeffreyCA/illacceptanything,illacceptanything/illacceptanything,illacceptanything/illacceptanything,dushmis/illacceptanything,oneminot/illacceptanything,triggerNZ/illacceptanything,tjhorner/illacceptanything,TheWhiteLlama/illacceptanything,illacceptanything/illacceptanything,illacceptanything/illacceptanything,paladique/illacceptanything,1yvT0s/illacceptanything,oneminot/illacceptanything,oneminot/illacceptanything,ultranaut/illacceptanything,triggerNZ/illacceptanything,ultranaut/illacceptanything,triggerNZ/illacceptanything,JeffreyCA/illacceptanything,dushmis/illacceptanything,ultranaut/illacceptanything,ds84182/illacceptanything,1yvT0s/illacceptanything,caioproiete/illacceptanything,ultranaut/illacceptanything,dushmis/illacceptanything,illacceptanything/illacceptanything,triggerNZ/illacceptanything,dushmis/illacceptanything
|
from random import randint
def get_random_number(start=1, end=10):
"""Generates and returns random number between :start: and :end:"""
return randint(start, end)
Revert "Fix python random number generator."
|
def get_random_number(start=1, end=10):
"""https://xkcd.com/221/"""
return 4
|
<commit_before>from random import randint
def get_random_number(start=1, end=10):
"""Generates and returns random number between :start: and :end:"""
return randint(start, end)
<commit_msg>Revert "Fix python random number generator."<commit_after>
|
def get_random_number(start=1, end=10):
"""https://xkcd.com/221/"""
return 4
|
from random import randint
def get_random_number(start=1, end=10):
"""Generates and returns random number between :start: and :end:"""
return randint(start, end)
Revert "Fix python random number generator."def get_random_number(start=1, end=10):
"""https://xkcd.com/221/"""
return 4
|
<commit_before>from random import randint
def get_random_number(start=1, end=10):
"""Generates and returns random number between :start: and :end:"""
return randint(start, end)
<commit_msg>Revert "Fix python random number generator."<commit_after>def get_random_number(start=1, end=10):
"""https://xkcd.com/221/"""
return 4
|
191b6cb9b772efb9c03eff36f7295c59c6dcd026
|
web/examples/extendcube.py
|
web/examples/extendcube.py
|
import argparse
import empaths
import dbconfig
import dbconfighayworth5nm
import numpy as np
import urllib, urllib2
import cStringIO
import sys
import anncube
import anndb
import zindex
def main():
parser = argparse.ArgumentParser(description='Cutout a portion of the database.')
parser.add_argument('id', action="store", type=int )
parser.add_argument('xlow', action="store", type=int )
parser.add_argument('xhigh', action="store", type=int)
parser.add_argument('ylow', action="store", type=int)
parser.add_argument('yhigh', action="store", type=int)
parser.add_argument('zlow', action="store", type=int)
parser.add_argument('zhigh', action="store", type=int)
result = parser.parse_args()
voxlist= []
for k in range (result.zlow,result.zhigh):
for j in range (result.ylow,result.yhigh):
for i in range (result.xlow,result.xhigh):
voxlist.append ( [ i,j,k ] )
WS = False
# Use the Web services
if ( WS == True ):
url = 'http://0.0.0.0:8080/hayworth5nm.annotate/np/overwrite/'
# Encode the voxelist an pickle
fileobj = cStringIO.StringIO ()
np.save ( fileobj, voxlist )
# Build the post request
req = urllib2.Request(url, fileobj.getvalue())
response = urllib2.urlopen(req)
the_page = response.read()
print the_page
# Insert via object
else:
dbcfg = dbconfighayworth5nm.dbConfigHayworth5nm()
annoDB = anndb.AnnotateDB ( dbcfg )
# Build a grayscale file and display
annoDB.extendEntity ( result.id, voxlist, 'O' )
print "Extended entity with identifier = ", result.id
if __name__ == "__main__":
main()
|
Move to home. Not sure that this builds.
|
Move to home. Not sure that this builds.
|
Python
|
apache-2.0
|
openconnectome/open-connectome,neurodata/ndstore,openconnectome/open-connectome,neurodata/ndstore,openconnectome/open-connectome,neurodata/ndstore,openconnectome/open-connectome,openconnectome/open-connectome,openconnectome/open-connectome,neurodata/ndstore
|
Move to home. Not sure that this builds.
|
import argparse
import empaths
import dbconfig
import dbconfighayworth5nm
import numpy as np
import urllib, urllib2
import cStringIO
import sys
import anncube
import anndb
import zindex
def main():
parser = argparse.ArgumentParser(description='Cutout a portion of the database.')
parser.add_argument('id', action="store", type=int )
parser.add_argument('xlow', action="store", type=int )
parser.add_argument('xhigh', action="store", type=int)
parser.add_argument('ylow', action="store", type=int)
parser.add_argument('yhigh', action="store", type=int)
parser.add_argument('zlow', action="store", type=int)
parser.add_argument('zhigh', action="store", type=int)
result = parser.parse_args()
voxlist= []
for k in range (result.zlow,result.zhigh):
for j in range (result.ylow,result.yhigh):
for i in range (result.xlow,result.xhigh):
voxlist.append ( [ i,j,k ] )
WS = False
# Use the Web services
if ( WS == True ):
url = 'http://0.0.0.0:8080/hayworth5nm.annotate/np/overwrite/'
# Encode the voxelist an pickle
fileobj = cStringIO.StringIO ()
np.save ( fileobj, voxlist )
# Build the post request
req = urllib2.Request(url, fileobj.getvalue())
response = urllib2.urlopen(req)
the_page = response.read()
print the_page
# Insert via object
else:
dbcfg = dbconfighayworth5nm.dbConfigHayworth5nm()
annoDB = anndb.AnnotateDB ( dbcfg )
# Build a grayscale file and display
annoDB.extendEntity ( result.id, voxlist, 'O' )
print "Extended entity with identifier = ", result.id
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Move to home. Not sure that this builds.<commit_after>
|
import argparse
import empaths
import dbconfig
import dbconfighayworth5nm
import numpy as np
import urllib, urllib2
import cStringIO
import sys
import anncube
import anndb
import zindex
def main():
parser = argparse.ArgumentParser(description='Cutout a portion of the database.')
parser.add_argument('id', action="store", type=int )
parser.add_argument('xlow', action="store", type=int )
parser.add_argument('xhigh', action="store", type=int)
parser.add_argument('ylow', action="store", type=int)
parser.add_argument('yhigh', action="store", type=int)
parser.add_argument('zlow', action="store", type=int)
parser.add_argument('zhigh', action="store", type=int)
result = parser.parse_args()
voxlist= []
for k in range (result.zlow,result.zhigh):
for j in range (result.ylow,result.yhigh):
for i in range (result.xlow,result.xhigh):
voxlist.append ( [ i,j,k ] )
WS = False
# Use the Web services
if ( WS == True ):
url = 'http://0.0.0.0:8080/hayworth5nm.annotate/np/overwrite/'
# Encode the voxelist an pickle
fileobj = cStringIO.StringIO ()
np.save ( fileobj, voxlist )
# Build the post request
req = urllib2.Request(url, fileobj.getvalue())
response = urllib2.urlopen(req)
the_page = response.read()
print the_page
# Insert via object
else:
dbcfg = dbconfighayworth5nm.dbConfigHayworth5nm()
annoDB = anndb.AnnotateDB ( dbcfg )
# Build a grayscale file and display
annoDB.extendEntity ( result.id, voxlist, 'O' )
print "Extended entity with identifier = ", result.id
if __name__ == "__main__":
main()
|
Move to home. Not sure that this builds.import argparse
import empaths
import dbconfig
import dbconfighayworth5nm
import numpy as np
import urllib, urllib2
import cStringIO
import sys
import anncube
import anndb
import zindex
def main():
parser = argparse.ArgumentParser(description='Cutout a portion of the database.')
parser.add_argument('id', action="store", type=int )
parser.add_argument('xlow', action="store", type=int )
parser.add_argument('xhigh', action="store", type=int)
parser.add_argument('ylow', action="store", type=int)
parser.add_argument('yhigh', action="store", type=int)
parser.add_argument('zlow', action="store", type=int)
parser.add_argument('zhigh', action="store", type=int)
result = parser.parse_args()
voxlist= []
for k in range (result.zlow,result.zhigh):
for j in range (result.ylow,result.yhigh):
for i in range (result.xlow,result.xhigh):
voxlist.append ( [ i,j,k ] )
WS = False
# Use the Web services
if ( WS == True ):
url = 'http://0.0.0.0:8080/hayworth5nm.annotate/np/overwrite/'
# Encode the voxelist an pickle
fileobj = cStringIO.StringIO ()
np.save ( fileobj, voxlist )
# Build the post request
req = urllib2.Request(url, fileobj.getvalue())
response = urllib2.urlopen(req)
the_page = response.read()
print the_page
# Insert via object
else:
dbcfg = dbconfighayworth5nm.dbConfigHayworth5nm()
annoDB = anndb.AnnotateDB ( dbcfg )
# Build a grayscale file and display
annoDB.extendEntity ( result.id, voxlist, 'O' )
print "Extended entity with identifier = ", result.id
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Move to home. Not sure that this builds.<commit_after>import argparse
import empaths
import dbconfig
import dbconfighayworth5nm
import numpy as np
import urllib, urllib2
import cStringIO
import sys
import anncube
import anndb
import zindex
def main():
parser = argparse.ArgumentParser(description='Cutout a portion of the database.')
parser.add_argument('id', action="store", type=int )
parser.add_argument('xlow', action="store", type=int )
parser.add_argument('xhigh', action="store", type=int)
parser.add_argument('ylow', action="store", type=int)
parser.add_argument('yhigh', action="store", type=int)
parser.add_argument('zlow', action="store", type=int)
parser.add_argument('zhigh', action="store", type=int)
result = parser.parse_args()
voxlist= []
for k in range (result.zlow,result.zhigh):
for j in range (result.ylow,result.yhigh):
for i in range (result.xlow,result.xhigh):
voxlist.append ( [ i,j,k ] )
WS = False
# Use the Web services
if ( WS == True ):
url = 'http://0.0.0.0:8080/hayworth5nm.annotate/np/overwrite/'
# Encode the voxelist an pickle
fileobj = cStringIO.StringIO ()
np.save ( fileobj, voxlist )
# Build the post request
req = urllib2.Request(url, fileobj.getvalue())
response = urllib2.urlopen(req)
the_page = response.read()
print the_page
# Insert via object
else:
dbcfg = dbconfighayworth5nm.dbConfigHayworth5nm()
annoDB = anndb.AnnotateDB ( dbcfg )
# Build a grayscale file and display
annoDB.extendEntity ( result.id, voxlist, 'O' )
print "Extended entity with identifier = ", result.id
if __name__ == "__main__":
main()
|
|
13c5b37377d0d937a9675d71f66865548ab1089e
|
sk_nb.py
|
sk_nb.py
|
import numpy as np
import feature_extractor as fe
from sklearn.naive_bayes import MultinomialNB
# X = np.random.randint(5, size=(6, 100))
# y = np.array([1, 2, 3, 4, 5, 6])
(features, targets) = fe.extract_train()
(features_test, targets_test) = fe.extract_test()
# print X.shape
# print y.shape
# print features.shape
# print targets.shape
classifier = MultinomialNB(alpha = 0.1, class_prior = None, fit_prior = True)
classifier.fit(features, targets)
target_test_hat = classifier.predict(features_test)
accuracy = (1.0 * (target_test_hat == targets_test)).sum(0) / targets_test.shape
print accuracy
|
Implement first version of NB classifier
|
Implement first version of NB classifier
|
Python
|
mit
|
trein/quora-classifier
|
Implement first version of NB classifier
|
import numpy as np
import feature_extractor as fe
from sklearn.naive_bayes import MultinomialNB
# X = np.random.randint(5, size=(6, 100))
# y = np.array([1, 2, 3, 4, 5, 6])
(features, targets) = fe.extract_train()
(features_test, targets_test) = fe.extract_test()
# print X.shape
# print y.shape
# print features.shape
# print targets.shape
classifier = MultinomialNB(alpha = 0.1, class_prior = None, fit_prior = True)
classifier.fit(features, targets)
target_test_hat = classifier.predict(features_test)
accuracy = (1.0 * (target_test_hat == targets_test)).sum(0) / targets_test.shape
print accuracy
|
<commit_before><commit_msg>Implement first version of NB classifier<commit_after>
|
import numpy as np
import feature_extractor as fe
from sklearn.naive_bayes import MultinomialNB
# X = np.random.randint(5, size=(6, 100))
# y = np.array([1, 2, 3, 4, 5, 6])
(features, targets) = fe.extract_train()
(features_test, targets_test) = fe.extract_test()
# print X.shape
# print y.shape
# print features.shape
# print targets.shape
classifier = MultinomialNB(alpha = 0.1, class_prior = None, fit_prior = True)
classifier.fit(features, targets)
target_test_hat = classifier.predict(features_test)
accuracy = (1.0 * (target_test_hat == targets_test)).sum(0) / targets_test.shape
print accuracy
|
Implement first version of NB classifierimport numpy as np
import feature_extractor as fe
from sklearn.naive_bayes import MultinomialNB
# X = np.random.randint(5, size=(6, 100))
# y = np.array([1, 2, 3, 4, 5, 6])
(features, targets) = fe.extract_train()
(features_test, targets_test) = fe.extract_test()
# print X.shape
# print y.shape
# print features.shape
# print targets.shape
classifier = MultinomialNB(alpha = 0.1, class_prior = None, fit_prior = True)
classifier.fit(features, targets)
target_test_hat = classifier.predict(features_test)
accuracy = (1.0 * (target_test_hat == targets_test)).sum(0) / targets_test.shape
print accuracy
|
<commit_before><commit_msg>Implement first version of NB classifier<commit_after>import numpy as np
import feature_extractor as fe
from sklearn.naive_bayes import MultinomialNB
# X = np.random.randint(5, size=(6, 100))
# y = np.array([1, 2, 3, 4, 5, 6])
(features, targets) = fe.extract_train()
(features_test, targets_test) = fe.extract_test()
# print X.shape
# print y.shape
# print features.shape
# print targets.shape
classifier = MultinomialNB(alpha = 0.1, class_prior = None, fit_prior = True)
classifier.fit(features, targets)
target_test_hat = classifier.predict(features_test)
accuracy = (1.0 * (target_test_hat == targets_test)).sum(0) / targets_test.shape
print accuracy
|
|
0e2e6865c1ad7fb0a74ae673c405d1c6b4ef1b36
|
tasks.py
|
tasks.py
|
import os
from invoke import task
@task
def docs(ctx, version=None):
targets = ["latest"]
if version:
targets.append(version)
for target in targets:
try:
os.makedirs("docs/{}".format(target))
except:
pass
ctx.run("python -m robot.libdoc -f html src/SQLAlchemyLibrary docs/{}/SQLAlchemyLibrary.html".format(target))
|
Add an Invoke task for generating documentation.
|
Add an Invoke task for generating documentation.
|
Python
|
apache-2.0
|
edbrannin/Robotframework-SQLAlchemy-Library
|
Add an Invoke task for generating documentation.
|
import os
from invoke import task
@task
def docs(ctx, version=None):
targets = ["latest"]
if version:
targets.append(version)
for target in targets:
try:
os.makedirs("docs/{}".format(target))
except:
pass
ctx.run("python -m robot.libdoc -f html src/SQLAlchemyLibrary docs/{}/SQLAlchemyLibrary.html".format(target))
|
<commit_before><commit_msg>Add an Invoke task for generating documentation.<commit_after>
|
import os
from invoke import task
@task
def docs(ctx, version=None):
targets = ["latest"]
if version:
targets.append(version)
for target in targets:
try:
os.makedirs("docs/{}".format(target))
except:
pass
ctx.run("python -m robot.libdoc -f html src/SQLAlchemyLibrary docs/{}/SQLAlchemyLibrary.html".format(target))
|
Add an Invoke task for generating documentation.import os
from invoke import task
@task
def docs(ctx, version=None):
targets = ["latest"]
if version:
targets.append(version)
for target in targets:
try:
os.makedirs("docs/{}".format(target))
except:
pass
ctx.run("python -m robot.libdoc -f html src/SQLAlchemyLibrary docs/{}/SQLAlchemyLibrary.html".format(target))
|
<commit_before><commit_msg>Add an Invoke task for generating documentation.<commit_after>import os
from invoke import task
@task
def docs(ctx, version=None):
targets = ["latest"]
if version:
targets.append(version)
for target in targets:
try:
os.makedirs("docs/{}".format(target))
except:
pass
ctx.run("python -m robot.libdoc -f html src/SQLAlchemyLibrary docs/{}/SQLAlchemyLibrary.html".format(target))
|
|
878d5e9c1ebaa20c9eb07c865293dd500924f321
|
IPython/frontend.py
|
IPython/frontend.py
|
import sys
import types
class ShimModule(types.ModuleType):
def __getattribute__(self, key):
exec 'from IPython import %s' % key
return eval(key)
sys.modules['IPython.frontend'] = ShimModule('frontend')
|
Add shim module to allow flattening of namespace.
|
Add shim module to allow flattening of namespace.
|
Python
|
bsd-3-clause
|
ipython/ipython,ipython/ipython
|
Add shim module to allow flattening of namespace.
|
import sys
import types
class ShimModule(types.ModuleType):
def __getattribute__(self, key):
exec 'from IPython import %s' % key
return eval(key)
sys.modules['IPython.frontend'] = ShimModule('frontend')
|
<commit_before><commit_msg>Add shim module to allow flattening of namespace.<commit_after>
|
import sys
import types
class ShimModule(types.ModuleType):
def __getattribute__(self, key):
exec 'from IPython import %s' % key
return eval(key)
sys.modules['IPython.frontend'] = ShimModule('frontend')
|
Add shim module to allow flattening of namespace.import sys
import types
class ShimModule(types.ModuleType):
def __getattribute__(self, key):
exec 'from IPython import %s' % key
return eval(key)
sys.modules['IPython.frontend'] = ShimModule('frontend')
|
<commit_before><commit_msg>Add shim module to allow flattening of namespace.<commit_after>import sys
import types
class ShimModule(types.ModuleType):
def __getattribute__(self, key):
exec 'from IPython import %s' % key
return eval(key)
sys.modules['IPython.frontend'] = ShimModule('frontend')
|
|
aee1d362f52b2f3a2649669468552c2899e43bf8
|
openmmtools/data/alanine-dipeptide-explicit/generate-pdb.py
|
openmmtools/data/alanine-dipeptide-explicit/generate-pdb.py
|
"""
Generate PDB file containing periodic box data.
"""
from simtk import openmm, unit
from simtk.openmm import app
prmtop_filename = 'alanine-dipeptide.prmtop'
crd_filename = 'alanine-dipeptide.crd'
pdb_filename = 'alanine-dipeptide.pdb'
# Read topology and positions.
prmtop = app.AmberPrmtopFile(prmtop_filename)
inpcrd = app.AmberInpcrdFile(crd_filename)
# Write PDB.
outfile = open(pdb_filename, 'w')
app.PDBFile.writeFile(prmtop.topology, inpcrd.positions, file=outfile, keepIds=True)
outfile.close()
|
Add script to generate proper alanine-dipeptide explicit solvent PDB file with CRYST record.
|
Add script to generate proper alanine-dipeptide explicit solvent PDB file with CRYST record.
|
Python
|
mit
|
choderalab/openmmtools,choderalab/openmmtools
|
Add script to generate proper alanine-dipeptide explicit solvent PDB file with CRYST record.
|
"""
Generate PDB file containing periodic box data.
"""
from simtk import openmm, unit
from simtk.openmm import app
prmtop_filename = 'alanine-dipeptide.prmtop'
crd_filename = 'alanine-dipeptide.crd'
pdb_filename = 'alanine-dipeptide.pdb'
# Read topology and positions.
prmtop = app.AmberPrmtopFile(prmtop_filename)
inpcrd = app.AmberInpcrdFile(crd_filename)
# Write PDB.
outfile = open(pdb_filename, 'w')
app.PDBFile.writeFile(prmtop.topology, inpcrd.positions, file=outfile, keepIds=True)
outfile.close()
|
<commit_before><commit_msg>Add script to generate proper alanine-dipeptide explicit solvent PDB file with CRYST record.<commit_after>
|
"""
Generate PDB file containing periodic box data.
"""
from simtk import openmm, unit
from simtk.openmm import app
prmtop_filename = 'alanine-dipeptide.prmtop'
crd_filename = 'alanine-dipeptide.crd'
pdb_filename = 'alanine-dipeptide.pdb'
# Read topology and positions.
prmtop = app.AmberPrmtopFile(prmtop_filename)
inpcrd = app.AmberInpcrdFile(crd_filename)
# Write PDB.
outfile = open(pdb_filename, 'w')
app.PDBFile.writeFile(prmtop.topology, inpcrd.positions, file=outfile, keepIds=True)
outfile.close()
|
Add script to generate proper alanine-dipeptide explicit solvent PDB file with CRYST record."""
Generate PDB file containing periodic box data.
"""
from simtk import openmm, unit
from simtk.openmm import app
prmtop_filename = 'alanine-dipeptide.prmtop'
crd_filename = 'alanine-dipeptide.crd'
pdb_filename = 'alanine-dipeptide.pdb'
# Read topology and positions.
prmtop = app.AmberPrmtopFile(prmtop_filename)
inpcrd = app.AmberInpcrdFile(crd_filename)
# Write PDB.
outfile = open(pdb_filename, 'w')
app.PDBFile.writeFile(prmtop.topology, inpcrd.positions, file=outfile, keepIds=True)
outfile.close()
|
<commit_before><commit_msg>Add script to generate proper alanine-dipeptide explicit solvent PDB file with CRYST record.<commit_after>"""
Generate PDB file containing periodic box data.
"""
from simtk import openmm, unit
from simtk.openmm import app
prmtop_filename = 'alanine-dipeptide.prmtop'
crd_filename = 'alanine-dipeptide.crd'
pdb_filename = 'alanine-dipeptide.pdb'
# Read topology and positions.
prmtop = app.AmberPrmtopFile(prmtop_filename)
inpcrd = app.AmberInpcrdFile(crd_filename)
# Write PDB.
outfile = open(pdb_filename, 'w')
app.PDBFile.writeFile(prmtop.topology, inpcrd.positions, file=outfile, keepIds=True)
outfile.close()
|
|
5d3cf8074bc50e5e269fab047d64db8cc60d16e6
|
netbox/users/migrations/0009_replicate_permissions.py
|
netbox/users/migrations/0009_replicate_permissions.py
|
from django.db import migrations
ACTIONS = ['view', 'add', 'change', 'delete']
def replicate_permissions(apps, schema_editor):
"""
Replicate all Permission assignments as ObjectPermissions.
"""
Permission = apps.get_model('auth', 'Permission')
ObjectPermission = apps.get_model('users', 'ObjectPermission')
# TODO: Optimize this iteration so that ObjectPermissions with identical sets of users and groups
# are combined into a single ObjectPermission instance.
for perm in Permission.objects.all():
print(f'Replicating permission {perm.codename}')
action, model_name = perm.codename.split('_')
if perm.group_set.exists() or perm.user_set.exists():
obj_perm = ObjectPermission(actions=[action])
obj_perm.save()
obj_perm.content_types.add(perm.content_type)
if perm.group_set.exists():
obj_perm.groups.add(*list(perm.group_set.all()))
if perm.user_set.exists():
obj_perm.users.add(*list(perm.user_set.all()))
class Migration(migrations.Migration):
dependencies = [
('users', '0008_objectpermission'),
]
operations = [
migrations.RunPython(
code=replicate_permissions,
reverse_code=migrations.RunPython.noop
)
]
|
Add migration for replicating legact permissions to ObjectPermissions
|
Add migration for replicating legact permissions to ObjectPermissions
|
Python
|
apache-2.0
|
digitalocean/netbox,digitalocean/netbox,digitalocean/netbox,digitalocean/netbox
|
Add migration for replicating legact permissions to ObjectPermissions
|
from django.db import migrations
ACTIONS = ['view', 'add', 'change', 'delete']
def replicate_permissions(apps, schema_editor):
"""
Replicate all Permission assignments as ObjectPermissions.
"""
Permission = apps.get_model('auth', 'Permission')
ObjectPermission = apps.get_model('users', 'ObjectPermission')
# TODO: Optimize this iteration so that ObjectPermissions with identical sets of users and groups
# are combined into a single ObjectPermission instance.
for perm in Permission.objects.all():
print(f'Replicating permission {perm.codename}')
action, model_name = perm.codename.split('_')
if perm.group_set.exists() or perm.user_set.exists():
obj_perm = ObjectPermission(actions=[action])
obj_perm.save()
obj_perm.content_types.add(perm.content_type)
if perm.group_set.exists():
obj_perm.groups.add(*list(perm.group_set.all()))
if perm.user_set.exists():
obj_perm.users.add(*list(perm.user_set.all()))
class Migration(migrations.Migration):
dependencies = [
('users', '0008_objectpermission'),
]
operations = [
migrations.RunPython(
code=replicate_permissions,
reverse_code=migrations.RunPython.noop
)
]
|
<commit_before><commit_msg>Add migration for replicating legact permissions to ObjectPermissions<commit_after>
|
from django.db import migrations
ACTIONS = ['view', 'add', 'change', 'delete']
def replicate_permissions(apps, schema_editor):
"""
Replicate all Permission assignments as ObjectPermissions.
"""
Permission = apps.get_model('auth', 'Permission')
ObjectPermission = apps.get_model('users', 'ObjectPermission')
# TODO: Optimize this iteration so that ObjectPermissions with identical sets of users and groups
# are combined into a single ObjectPermission instance.
for perm in Permission.objects.all():
print(f'Replicating permission {perm.codename}')
action, model_name = perm.codename.split('_')
if perm.group_set.exists() or perm.user_set.exists():
obj_perm = ObjectPermission(actions=[action])
obj_perm.save()
obj_perm.content_types.add(perm.content_type)
if perm.group_set.exists():
obj_perm.groups.add(*list(perm.group_set.all()))
if perm.user_set.exists():
obj_perm.users.add(*list(perm.user_set.all()))
class Migration(migrations.Migration):
dependencies = [
('users', '0008_objectpermission'),
]
operations = [
migrations.RunPython(
code=replicate_permissions,
reverse_code=migrations.RunPython.noop
)
]
|
Add migration for replicating legact permissions to ObjectPermissionsfrom django.db import migrations
ACTIONS = ['view', 'add', 'change', 'delete']
def replicate_permissions(apps, schema_editor):
"""
Replicate all Permission assignments as ObjectPermissions.
"""
Permission = apps.get_model('auth', 'Permission')
ObjectPermission = apps.get_model('users', 'ObjectPermission')
# TODO: Optimize this iteration so that ObjectPermissions with identical sets of users and groups
# are combined into a single ObjectPermission instance.
for perm in Permission.objects.all():
print(f'Replicating permission {perm.codename}')
action, model_name = perm.codename.split('_')
if perm.group_set.exists() or perm.user_set.exists():
obj_perm = ObjectPermission(actions=[action])
obj_perm.save()
obj_perm.content_types.add(perm.content_type)
if perm.group_set.exists():
obj_perm.groups.add(*list(perm.group_set.all()))
if perm.user_set.exists():
obj_perm.users.add(*list(perm.user_set.all()))
class Migration(migrations.Migration):
dependencies = [
('users', '0008_objectpermission'),
]
operations = [
migrations.RunPython(
code=replicate_permissions,
reverse_code=migrations.RunPython.noop
)
]
|
<commit_before><commit_msg>Add migration for replicating legact permissions to ObjectPermissions<commit_after>from django.db import migrations
ACTIONS = ['view', 'add', 'change', 'delete']
def replicate_permissions(apps, schema_editor):
"""
Replicate all Permission assignments as ObjectPermissions.
"""
Permission = apps.get_model('auth', 'Permission')
ObjectPermission = apps.get_model('users', 'ObjectPermission')
# TODO: Optimize this iteration so that ObjectPermissions with identical sets of users and groups
# are combined into a single ObjectPermission instance.
for perm in Permission.objects.all():
print(f'Replicating permission {perm.codename}')
action, model_name = perm.codename.split('_')
if perm.group_set.exists() or perm.user_set.exists():
obj_perm = ObjectPermission(actions=[action])
obj_perm.save()
obj_perm.content_types.add(perm.content_type)
if perm.group_set.exists():
obj_perm.groups.add(*list(perm.group_set.all()))
if perm.user_set.exists():
obj_perm.users.add(*list(perm.user_set.all()))
class Migration(migrations.Migration):
dependencies = [
('users', '0008_objectpermission'),
]
operations = [
migrations.RunPython(
code=replicate_permissions,
reverse_code=migrations.RunPython.noop
)
]
|
|
255e8d5b32b57a851be9254052bfb9279f80f76c
|
alerts/ssh_password_auth_violation.py
|
alerts/ssh_password_auth_violation.py
|
#!/usr/bin/env python
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Copyright (c) 2017 Mozilla Corporation
from lib.alerttask import AlertTask
from query_models import SearchQuery, TermMatch, PhraseMatch
class AlertSSHPasswordAuthViolation(AlertTask):
def main(self):
search_query = SearchQuery(hours=4)
search_query.add_must([
TermMatch('_type', 'event'),
PhraseMatch('tags', 'ssh_password_auth_policy_violation'),
])
self.filtersManual(search_query)
# Search aggregations on field 'sourceipaddress', keep X samples of
# events at most
self.searchEventsAggregated('details.destinationipaddress', samplesLimit=100)
# alert when >= X matching events in an aggregation
self.walkAggregations(threshold=1)
# Set alert properties
def onAggregation(self, aggreg):
# aggreg['count']: number of items in the aggregation, ex: number of failed login attempts
# aggreg['value']: value of the aggregation field, ex: toto@example.com
# aggreg['events']: list of events in the aggregation
category = 'ssh_password_auth_policy_violation'
tags = ['ssh_password_auth_policy_violation']
severity = 'WARNING'
summary = ('SSH password authentication allowed on {0} ('.format(aggreg['value']))
for event in aggreg['events'][:5]:
summary += str(event['_source']['details']['destinationport']) + ' '
summary += ')'
# Create the alert object based on these properties
return self.createAlertDict(summary, category, tags, aggreg['events'], severity)
|
Add ssh password auth violation alert
|
Add ssh password auth violation alert
|
Python
|
mpl-2.0
|
mpurzynski/MozDef,mpurzynski/MozDef,gdestuynder/MozDef,Phrozyn/MozDef,gdestuynder/MozDef,jeffbryner/MozDef,mozilla/MozDef,mozilla/MozDef,gdestuynder/MozDef,Phrozyn/MozDef,mozilla/MozDef,mpurzynski/MozDef,Phrozyn/MozDef,Phrozyn/MozDef,mozilla/MozDef,gdestuynder/MozDef,jeffbryner/MozDef,mpurzynski/MozDef,jeffbryner/MozDef,jeffbryner/MozDef
|
Add ssh password auth violation alert
|
#!/usr/bin/env python
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Copyright (c) 2017 Mozilla Corporation
from lib.alerttask import AlertTask
from query_models import SearchQuery, TermMatch, PhraseMatch
class AlertSSHPasswordAuthViolation(AlertTask):
def main(self):
search_query = SearchQuery(hours=4)
search_query.add_must([
TermMatch('_type', 'event'),
PhraseMatch('tags', 'ssh_password_auth_policy_violation'),
])
self.filtersManual(search_query)
# Search aggregations on field 'sourceipaddress', keep X samples of
# events at most
self.searchEventsAggregated('details.destinationipaddress', samplesLimit=100)
# alert when >= X matching events in an aggregation
self.walkAggregations(threshold=1)
# Set alert properties
def onAggregation(self, aggreg):
# aggreg['count']: number of items in the aggregation, ex: number of failed login attempts
# aggreg['value']: value of the aggregation field, ex: toto@example.com
# aggreg['events']: list of events in the aggregation
category = 'ssh_password_auth_policy_violation'
tags = ['ssh_password_auth_policy_violation']
severity = 'WARNING'
summary = ('SSH password authentication allowed on {0} ('.format(aggreg['value']))
for event in aggreg['events'][:5]:
summary += str(event['_source']['details']['destinationport']) + ' '
summary += ')'
# Create the alert object based on these properties
return self.createAlertDict(summary, category, tags, aggreg['events'], severity)
|
<commit_before><commit_msg>Add ssh password auth violation alert<commit_after>
|
#!/usr/bin/env python
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Copyright (c) 2017 Mozilla Corporation
from lib.alerttask import AlertTask
from query_models import SearchQuery, TermMatch, PhraseMatch
class AlertSSHPasswordAuthViolation(AlertTask):
def main(self):
search_query = SearchQuery(hours=4)
search_query.add_must([
TermMatch('_type', 'event'),
PhraseMatch('tags', 'ssh_password_auth_policy_violation'),
])
self.filtersManual(search_query)
# Search aggregations on field 'sourceipaddress', keep X samples of
# events at most
self.searchEventsAggregated('details.destinationipaddress', samplesLimit=100)
# alert when >= X matching events in an aggregation
self.walkAggregations(threshold=1)
# Set alert properties
def onAggregation(self, aggreg):
# aggreg['count']: number of items in the aggregation, ex: number of failed login attempts
# aggreg['value']: value of the aggregation field, ex: toto@example.com
# aggreg['events']: list of events in the aggregation
category = 'ssh_password_auth_policy_violation'
tags = ['ssh_password_auth_policy_violation']
severity = 'WARNING'
summary = ('SSH password authentication allowed on {0} ('.format(aggreg['value']))
for event in aggreg['events'][:5]:
summary += str(event['_source']['details']['destinationport']) + ' '
summary += ')'
# Create the alert object based on these properties
return self.createAlertDict(summary, category, tags, aggreg['events'], severity)
|
Add ssh password auth violation alert#!/usr/bin/env python
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Copyright (c) 2017 Mozilla Corporation
from lib.alerttask import AlertTask
from query_models import SearchQuery, TermMatch, PhraseMatch
class AlertSSHPasswordAuthViolation(AlertTask):
def main(self):
search_query = SearchQuery(hours=4)
search_query.add_must([
TermMatch('_type', 'event'),
PhraseMatch('tags', 'ssh_password_auth_policy_violation'),
])
self.filtersManual(search_query)
# Search aggregations on field 'sourceipaddress', keep X samples of
# events at most
self.searchEventsAggregated('details.destinationipaddress', samplesLimit=100)
# alert when >= X matching events in an aggregation
self.walkAggregations(threshold=1)
# Set alert properties
def onAggregation(self, aggreg):
# aggreg['count']: number of items in the aggregation, ex: number of failed login attempts
# aggreg['value']: value of the aggregation field, ex: toto@example.com
# aggreg['events']: list of events in the aggregation
category = 'ssh_password_auth_policy_violation'
tags = ['ssh_password_auth_policy_violation']
severity = 'WARNING'
summary = ('SSH password authentication allowed on {0} ('.format(aggreg['value']))
for event in aggreg['events'][:5]:
summary += str(event['_source']['details']['destinationport']) + ' '
summary += ')'
# Create the alert object based on these properties
return self.createAlertDict(summary, category, tags, aggreg['events'], severity)
|
<commit_before><commit_msg>Add ssh password auth violation alert<commit_after>#!/usr/bin/env python
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Copyright (c) 2017 Mozilla Corporation
from lib.alerttask import AlertTask
from query_models import SearchQuery, TermMatch, PhraseMatch
class AlertSSHPasswordAuthViolation(AlertTask):
def main(self):
search_query = SearchQuery(hours=4)
search_query.add_must([
TermMatch('_type', 'event'),
PhraseMatch('tags', 'ssh_password_auth_policy_violation'),
])
self.filtersManual(search_query)
# Search aggregations on field 'sourceipaddress', keep X samples of
# events at most
self.searchEventsAggregated('details.destinationipaddress', samplesLimit=100)
# alert when >= X matching events in an aggregation
self.walkAggregations(threshold=1)
# Set alert properties
def onAggregation(self, aggreg):
# aggreg['count']: number of items in the aggregation, ex: number of failed login attempts
# aggreg['value']: value of the aggregation field, ex: toto@example.com
# aggreg['events']: list of events in the aggregation
category = 'ssh_password_auth_policy_violation'
tags = ['ssh_password_auth_policy_violation']
severity = 'WARNING'
summary = ('SSH password authentication allowed on {0} ('.format(aggreg['value']))
for event in aggreg['events'][:5]:
summary += str(event['_source']['details']['destinationport']) + ' '
summary += ')'
# Create the alert object based on these properties
return self.createAlertDict(summary, category, tags, aggreg['events'], severity)
|
|
f18ca1330a435765a7b06562522c80723dc24402
|
admin/update_sparkle_xml.py
|
admin/update_sparkle_xml.py
|
#!/usr/bin/env python
# Updates a sparkle appcast xml file with a new release
from optparse import OptionParser
from xml.dom.minidom import parse
from datetime import datetime
import os
parser = OptionParser(r"""
%prog --f xmlfile -s "signature" -v X.X.X -l <bytelength>""")
parser.add_option("-f", "--file", dest="filename",
help="XML appcast file to update", metavar="FILE")
parser.add_option("-s", "--sig", dest="sig",
help="Signature from Sparkle signing process")
parser.add_option("-v", "--version", dest="version",
help="New version to update the appcast file with")
parser.add_option("-l", "--length", dest="length",
help="Length in bytes of .tar.bz2 file")
parser.add_option("-w", "--windows", dest="windows",
help="Write a windows sparkle formatted XML file",
action="store_true", default=False)
(options, _) = parser.parse_args()
if not options.windows and (options.filename is None or
options.sig is None or options.version is None or
options.length is None):
parser.error("Please pass all four required arguments")
elif options.windows and (options.filename is None or options.version is None):
parser.error("Please provide an XML filename and version string")
try:
xml = parse(options.filename)
except IOError:
print "Failed to parse filename: %s" % options.filename
os.exit(1)
pubDate = datetime.now().strftime("%a, %d %b %Y %H:%M:%S %z")
version = "Version %s" % options.version
if options.windows:
path = "dist/apps/win/zulip-%s.exe" % (options.version,)
else:
path = "dist/apps/mac/Zulip-%s.tar.bz2" % (options.version,)
url = "https://zulip.com/%s" % (path,)
channel = xml.getElementsByTagName('channel')[0]
latest = channel.getElementsByTagName('item')[0]
newItem = latest.cloneNode(True)
newItem.getElementsByTagName('title')[0].firstChild.replaceWholeText(version)
newItem.getElementsByTagName('pubDate')[0].firstChild.replaceWholeText(pubDate)
newItem.getElementsByTagName('enclosure')[0].setAttribute("url", url)
newItem.getElementsByTagName('enclosure')[0].setAttribute("sparkle:version",
options.version)
if not options.windows:
newItem.getElementsByTagName('enclosure')[0].setAttribute("length",
options.length)
newItem.getElementsByTagName('enclosure')[0].setAttribute("sparkle:dsaSignature",
options.sig)
channel.insertBefore(newItem, latest)
outfile = open(options.filename, 'w')
xml.writexml(outfile)
outfile.close()
|
Add a script to update our sparkle XML files
|
Add a script to update our sparkle XML files
|
Python
|
apache-2.0
|
zofuthan/zulip-desktop,zofuthan/zulip-desktop,zofuthan/zulip-desktop,zofuthan/zulip-desktop,zofuthan/zulip-desktop,zofuthan/zulip-desktop,zofuthan/zulip-desktop
|
Add a script to update our sparkle XML files
|
#!/usr/bin/env python
# Updates a sparkle appcast xml file with a new release
from optparse import OptionParser
from xml.dom.minidom import parse
from datetime import datetime
import os
parser = OptionParser(r"""
%prog --f xmlfile -s "signature" -v X.X.X -l <bytelength>""")
parser.add_option("-f", "--file", dest="filename",
help="XML appcast file to update", metavar="FILE")
parser.add_option("-s", "--sig", dest="sig",
help="Signature from Sparkle signing process")
parser.add_option("-v", "--version", dest="version",
help="New version to update the appcast file with")
parser.add_option("-l", "--length", dest="length",
help="Length in bytes of .tar.bz2 file")
parser.add_option("-w", "--windows", dest="windows",
help="Write a windows sparkle formatted XML file",
action="store_true", default=False)
(options, _) = parser.parse_args()
if not options.windows and (options.filename is None or
options.sig is None or options.version is None or
options.length is None):
parser.error("Please pass all four required arguments")
elif options.windows and (options.filename is None or options.version is None):
parser.error("Please provide an XML filename and version string")
try:
xml = parse(options.filename)
except IOError:
print "Failed to parse filename: %s" % options.filename
os.exit(1)
pubDate = datetime.now().strftime("%a, %d %b %Y %H:%M:%S %z")
version = "Version %s" % options.version
if options.windows:
path = "dist/apps/win/zulip-%s.exe" % (options.version,)
else:
path = "dist/apps/mac/Zulip-%s.tar.bz2" % (options.version,)
url = "https://zulip.com/%s" % (path,)
channel = xml.getElementsByTagName('channel')[0]
latest = channel.getElementsByTagName('item')[0]
newItem = latest.cloneNode(True)
newItem.getElementsByTagName('title')[0].firstChild.replaceWholeText(version)
newItem.getElementsByTagName('pubDate')[0].firstChild.replaceWholeText(pubDate)
newItem.getElementsByTagName('enclosure')[0].setAttribute("url", url)
newItem.getElementsByTagName('enclosure')[0].setAttribute("sparkle:version",
options.version)
if not options.windows:
newItem.getElementsByTagName('enclosure')[0].setAttribute("length",
options.length)
newItem.getElementsByTagName('enclosure')[0].setAttribute("sparkle:dsaSignature",
options.sig)
channel.insertBefore(newItem, latest)
outfile = open(options.filename, 'w')
xml.writexml(outfile)
outfile.close()
|
<commit_before><commit_msg>Add a script to update our sparkle XML files<commit_after>
|
#!/usr/bin/env python
# Updates a sparkle appcast xml file with a new release
from optparse import OptionParser
from xml.dom.minidom import parse
from datetime import datetime
import os
parser = OptionParser(r"""
%prog --f xmlfile -s "signature" -v X.X.X -l <bytelength>""")
parser.add_option("-f", "--file", dest="filename",
help="XML appcast file to update", metavar="FILE")
parser.add_option("-s", "--sig", dest="sig",
help="Signature from Sparkle signing process")
parser.add_option("-v", "--version", dest="version",
help="New version to update the appcast file with")
parser.add_option("-l", "--length", dest="length",
help="Length in bytes of .tar.bz2 file")
parser.add_option("-w", "--windows", dest="windows",
help="Write a windows sparkle formatted XML file",
action="store_true", default=False)
(options, _) = parser.parse_args()
if not options.windows and (options.filename is None or
options.sig is None or options.version is None or
options.length is None):
parser.error("Please pass all four required arguments")
elif options.windows and (options.filename is None or options.version is None):
parser.error("Please provide an XML filename and version string")
try:
xml = parse(options.filename)
except IOError:
print "Failed to parse filename: %s" % options.filename
os.exit(1)
pubDate = datetime.now().strftime("%a, %d %b %Y %H:%M:%S %z")
version = "Version %s" % options.version
if options.windows:
path = "dist/apps/win/zulip-%s.exe" % (options.version,)
else:
path = "dist/apps/mac/Zulip-%s.tar.bz2" % (options.version,)
url = "https://zulip.com/%s" % (path,)
channel = xml.getElementsByTagName('channel')[0]
latest = channel.getElementsByTagName('item')[0]
newItem = latest.cloneNode(True)
newItem.getElementsByTagName('title')[0].firstChild.replaceWholeText(version)
newItem.getElementsByTagName('pubDate')[0].firstChild.replaceWholeText(pubDate)
newItem.getElementsByTagName('enclosure')[0].setAttribute("url", url)
newItem.getElementsByTagName('enclosure')[0].setAttribute("sparkle:version",
options.version)
if not options.windows:
newItem.getElementsByTagName('enclosure')[0].setAttribute("length",
options.length)
newItem.getElementsByTagName('enclosure')[0].setAttribute("sparkle:dsaSignature",
options.sig)
channel.insertBefore(newItem, latest)
outfile = open(options.filename, 'w')
xml.writexml(outfile)
outfile.close()
|
Add a script to update our sparkle XML files#!/usr/bin/env python
# Updates a sparkle appcast xml file with a new release
from optparse import OptionParser
from xml.dom.minidom import parse
from datetime import datetime
import os
parser = OptionParser(r"""
%prog --f xmlfile -s "signature" -v X.X.X -l <bytelength>""")
parser.add_option("-f", "--file", dest="filename",
help="XML appcast file to update", metavar="FILE")
parser.add_option("-s", "--sig", dest="sig",
help="Signature from Sparkle signing process")
parser.add_option("-v", "--version", dest="version",
help="New version to update the appcast file with")
parser.add_option("-l", "--length", dest="length",
help="Length in bytes of .tar.bz2 file")
parser.add_option("-w", "--windows", dest="windows",
help="Write a windows sparkle formatted XML file",
action="store_true", default=False)
(options, _) = parser.parse_args()
if not options.windows and (options.filename is None or
options.sig is None or options.version is None or
options.length is None):
parser.error("Please pass all four required arguments")
elif options.windows and (options.filename is None or options.version is None):
parser.error("Please provide an XML filename and version string")
try:
xml = parse(options.filename)
except IOError:
print "Failed to parse filename: %s" % options.filename
os.exit(1)
pubDate = datetime.now().strftime("%a, %d %b %Y %H:%M:%S %z")
version = "Version %s" % options.version
if options.windows:
path = "dist/apps/win/zulip-%s.exe" % (options.version,)
else:
path = "dist/apps/mac/Zulip-%s.tar.bz2" % (options.version,)
url = "https://zulip.com/%s" % (path,)
channel = xml.getElementsByTagName('channel')[0]
latest = channel.getElementsByTagName('item')[0]
newItem = latest.cloneNode(True)
newItem.getElementsByTagName('title')[0].firstChild.replaceWholeText(version)
newItem.getElementsByTagName('pubDate')[0].firstChild.replaceWholeText(pubDate)
newItem.getElementsByTagName('enclosure')[0].setAttribute("url", url)
newItem.getElementsByTagName('enclosure')[0].setAttribute("sparkle:version",
options.version)
if not options.windows:
newItem.getElementsByTagName('enclosure')[0].setAttribute("length",
options.length)
newItem.getElementsByTagName('enclosure')[0].setAttribute("sparkle:dsaSignature",
options.sig)
channel.insertBefore(newItem, latest)
outfile = open(options.filename, 'w')
xml.writexml(outfile)
outfile.close()
|
<commit_before><commit_msg>Add a script to update our sparkle XML files<commit_after>#!/usr/bin/env python
# Updates a sparkle appcast xml file with a new release
from optparse import OptionParser
from xml.dom.minidom import parse
from datetime import datetime
import os
parser = OptionParser(r"""
%prog --f xmlfile -s "signature" -v X.X.X -l <bytelength>""")
parser.add_option("-f", "--file", dest="filename",
help="XML appcast file to update", metavar="FILE")
parser.add_option("-s", "--sig", dest="sig",
help="Signature from Sparkle signing process")
parser.add_option("-v", "--version", dest="version",
help="New version to update the appcast file with")
parser.add_option("-l", "--length", dest="length",
help="Length in bytes of .tar.bz2 file")
parser.add_option("-w", "--windows", dest="windows",
help="Write a windows sparkle formatted XML file",
action="store_true", default=False)
(options, _) = parser.parse_args()
if not options.windows and (options.filename is None or
options.sig is None or options.version is None or
options.length is None):
parser.error("Please pass all four required arguments")
elif options.windows and (options.filename is None or options.version is None):
parser.error("Please provide an XML filename and version string")
try:
xml = parse(options.filename)
except IOError:
print "Failed to parse filename: %s" % options.filename
os.exit(1)
pubDate = datetime.now().strftime("%a, %d %b %Y %H:%M:%S %z")
version = "Version %s" % options.version
if options.windows:
path = "dist/apps/win/zulip-%s.exe" % (options.version,)
else:
path = "dist/apps/mac/Zulip-%s.tar.bz2" % (options.version,)
url = "https://zulip.com/%s" % (path,)
channel = xml.getElementsByTagName('channel')[0]
latest = channel.getElementsByTagName('item')[0]
newItem = latest.cloneNode(True)
newItem.getElementsByTagName('title')[0].firstChild.replaceWholeText(version)
newItem.getElementsByTagName('pubDate')[0].firstChild.replaceWholeText(pubDate)
newItem.getElementsByTagName('enclosure')[0].setAttribute("url", url)
newItem.getElementsByTagName('enclosure')[0].setAttribute("sparkle:version",
options.version)
if not options.windows:
newItem.getElementsByTagName('enclosure')[0].setAttribute("length",
options.length)
newItem.getElementsByTagName('enclosure')[0].setAttribute("sparkle:dsaSignature",
options.sig)
channel.insertBefore(newItem, latest)
outfile = open(options.filename, 'w')
xml.writexml(outfile)
outfile.close()
|
|
3a0ffa04344df4d8e2ec124d4f4115887b1e7da8
|
virtual_machine.py
|
virtual_machine.py
|
class VirtualMachine:
def __init__(self, ram_size=256, stack_size=32):
self.data = [None]*ram_size
self.stack = [None]*stack_size
self.stack_size = stack_size
self.stack_top = 0
def push(self, value):
"""Push something onto the stack."""
if self.stack_top+1 > self.stack_size:
raise IndexError
self.stack[self.stack_top] = value
self.stack_top += 1
def pop(self):
"""Pop something from the stack. Crash if empty."""
if self.stack_top == 0:
raise IndexError
self.stack_top -= 1
return self.stack.pop(self.stack_top)
def read_memory(self, index):
"""Read from memory, crashing if index is out of bounds."""
return self.data[index]
def write_memory(self, index, value):
"""Write to memory. Crash if index is out of bounds."""
self.data[index] = value
class BytecodeBase:
def execute(self, machine):
pass
|
Add a virtual machine, with a stub for the bytecode
|
Add a virtual machine, with a stub for the bytecode
|
Python
|
bsd-3-clause
|
darbaga/simple_compiler
|
Add a virtual machine, with a stub for the bytecode
|
class VirtualMachine:
def __init__(self, ram_size=256, stack_size=32):
self.data = [None]*ram_size
self.stack = [None]*stack_size
self.stack_size = stack_size
self.stack_top = 0
def push(self, value):
"""Push something onto the stack."""
if self.stack_top+1 > self.stack_size:
raise IndexError
self.stack[self.stack_top] = value
self.stack_top += 1
def pop(self):
"""Pop something from the stack. Crash if empty."""
if self.stack_top == 0:
raise IndexError
self.stack_top -= 1
return self.stack.pop(self.stack_top)
def read_memory(self, index):
"""Read from memory, crashing if index is out of bounds."""
return self.data[index]
def write_memory(self, index, value):
"""Write to memory. Crash if index is out of bounds."""
self.data[index] = value
class BytecodeBase:
def execute(self, machine):
pass
|
<commit_before><commit_msg>Add a virtual machine, with a stub for the bytecode<commit_after>
|
class VirtualMachine:
def __init__(self, ram_size=256, stack_size=32):
self.data = [None]*ram_size
self.stack = [None]*stack_size
self.stack_size = stack_size
self.stack_top = 0
def push(self, value):
"""Push something onto the stack."""
if self.stack_top+1 > self.stack_size:
raise IndexError
self.stack[self.stack_top] = value
self.stack_top += 1
def pop(self):
"""Pop something from the stack. Crash if empty."""
if self.stack_top == 0:
raise IndexError
self.stack_top -= 1
return self.stack.pop(self.stack_top)
def read_memory(self, index):
"""Read from memory, crashing if index is out of bounds."""
return self.data[index]
def write_memory(self, index, value):
"""Write to memory. Crash if index is out of bounds."""
self.data[index] = value
class BytecodeBase:
def execute(self, machine):
pass
|
Add a virtual machine, with a stub for the bytecodeclass VirtualMachine:
def __init__(self, ram_size=256, stack_size=32):
self.data = [None]*ram_size
self.stack = [None]*stack_size
self.stack_size = stack_size
self.stack_top = 0
def push(self, value):
"""Push something onto the stack."""
if self.stack_top+1 > self.stack_size:
raise IndexError
self.stack[self.stack_top] = value
self.stack_top += 1
def pop(self):
"""Pop something from the stack. Crash if empty."""
if self.stack_top == 0:
raise IndexError
self.stack_top -= 1
return self.stack.pop(self.stack_top)
def read_memory(self, index):
"""Read from memory, crashing if index is out of bounds."""
return self.data[index]
def write_memory(self, index, value):
"""Write to memory. Crash if index is out of bounds."""
self.data[index] = value
class BytecodeBase:
def execute(self, machine):
pass
|
<commit_before><commit_msg>Add a virtual machine, with a stub for the bytecode<commit_after>class VirtualMachine:
def __init__(self, ram_size=256, stack_size=32):
self.data = [None]*ram_size
self.stack = [None]*stack_size
self.stack_size = stack_size
self.stack_top = 0
def push(self, value):
"""Push something onto the stack."""
if self.stack_top+1 > self.stack_size:
raise IndexError
self.stack[self.stack_top] = value
self.stack_top += 1
def pop(self):
"""Pop something from the stack. Crash if empty."""
if self.stack_top == 0:
raise IndexError
self.stack_top -= 1
return self.stack.pop(self.stack_top)
def read_memory(self, index):
"""Read from memory, crashing if index is out of bounds."""
return self.data[index]
def write_memory(self, index, value):
"""Write to memory. Crash if index is out of bounds."""
self.data[index] = value
class BytecodeBase:
def execute(self, machine):
pass
|
|
10b65412f477de18527bdaee5d270fae826a2161
|
utils/export_pixels_mask.py
|
utils/export_pixels_mask.py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (c) 2016 Jérémie DECOCK (http://www.jdhp.org)
# This script is provided under the terms and conditions of the MIT license:
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""
... TODO
"""
__all__ = ['export_pixel_mask']
import argparse
import numpy as np
from datapipe.io import images
def export_pixel_mask(input_file_path, output_file_path):
fits_images_dict, fits_metadata_dict = images.load_benchmark_images(input_file_path)
mask = fits_images_dict["pixels_mask"].astype(np.uint8, copy=True)
images.save(mask, output_file_path)
def main():
# PARSE OPTIONS ###########################################################
desc = "Export the pixel mask in a FITS file."
parser = argparse.ArgumentParser(description=desc)
parser.add_argument("--output", "-o",
metavar="FILE",
help="The output FITS file")
parser.add_argument("fileargs", nargs=1, metavar="FILE",
help="The FITS file to process")
args = parser.parse_args()
output_file_path = args.output
input_file_path = args.fileargs[0]
export_pixel_mask(input_file_path , output_file_path)
if __name__ == "__main__":
main()
|
Add a script to export pixels masks for mr_filter's -I option.
|
Add a script to export pixels masks for mr_filter's -I option.
|
Python
|
mit
|
jdhp-sap/data-pipeline-standalone-scripts,jdhp-sap/data-pipeline-standalone-scripts,jdhp-sap/sap-cta-data-pipeline,jdhp-sap/sap-cta-data-pipeline
|
Add a script to export pixels masks for mr_filter's -I option.
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (c) 2016 Jérémie DECOCK (http://www.jdhp.org)
# This script is provided under the terms and conditions of the MIT license:
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""
... TODO
"""
__all__ = ['export_pixel_mask']
import argparse
import numpy as np
from datapipe.io import images
def export_pixel_mask(input_file_path, output_file_path):
fits_images_dict, fits_metadata_dict = images.load_benchmark_images(input_file_path)
mask = fits_images_dict["pixels_mask"].astype(np.uint8, copy=True)
images.save(mask, output_file_path)
def main():
# PARSE OPTIONS ###########################################################
desc = "Export the pixel mask in a FITS file."
parser = argparse.ArgumentParser(description=desc)
parser.add_argument("--output", "-o",
metavar="FILE",
help="The output FITS file")
parser.add_argument("fileargs", nargs=1, metavar="FILE",
help="The FITS file to process")
args = parser.parse_args()
output_file_path = args.output
input_file_path = args.fileargs[0]
export_pixel_mask(input_file_path , output_file_path)
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Add a script to export pixels masks for mr_filter's -I option.<commit_after>
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (c) 2016 Jérémie DECOCK (http://www.jdhp.org)
# This script is provided under the terms and conditions of the MIT license:
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""
... TODO
"""
__all__ = ['export_pixel_mask']
import argparse
import numpy as np
from datapipe.io import images
def export_pixel_mask(input_file_path, output_file_path):
fits_images_dict, fits_metadata_dict = images.load_benchmark_images(input_file_path)
mask = fits_images_dict["pixels_mask"].astype(np.uint8, copy=True)
images.save(mask, output_file_path)
def main():
# PARSE OPTIONS ###########################################################
desc = "Export the pixel mask in a FITS file."
parser = argparse.ArgumentParser(description=desc)
parser.add_argument("--output", "-o",
metavar="FILE",
help="The output FITS file")
parser.add_argument("fileargs", nargs=1, metavar="FILE",
help="The FITS file to process")
args = parser.parse_args()
output_file_path = args.output
input_file_path = args.fileargs[0]
export_pixel_mask(input_file_path , output_file_path)
if __name__ == "__main__":
main()
|
Add a script to export pixels masks for mr_filter's -I option.#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (c) 2016 Jérémie DECOCK (http://www.jdhp.org)
# This script is provided under the terms and conditions of the MIT license:
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""
... TODO
"""
__all__ = ['export_pixel_mask']
import argparse
import numpy as np
from datapipe.io import images
def export_pixel_mask(input_file_path, output_file_path):
fits_images_dict, fits_metadata_dict = images.load_benchmark_images(input_file_path)
mask = fits_images_dict["pixels_mask"].astype(np.uint8, copy=True)
images.save(mask, output_file_path)
def main():
# PARSE OPTIONS ###########################################################
desc = "Export the pixel mask in a FITS file."
parser = argparse.ArgumentParser(description=desc)
parser.add_argument("--output", "-o",
metavar="FILE",
help="The output FITS file")
parser.add_argument("fileargs", nargs=1, metavar="FILE",
help="The FITS file to process")
args = parser.parse_args()
output_file_path = args.output
input_file_path = args.fileargs[0]
export_pixel_mask(input_file_path , output_file_path)
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Add a script to export pixels masks for mr_filter's -I option.<commit_after>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (c) 2016 Jérémie DECOCK (http://www.jdhp.org)
# This script is provided under the terms and conditions of the MIT license:
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""
... TODO
"""
__all__ = ['export_pixel_mask']
import argparse
import numpy as np
from datapipe.io import images
def export_pixel_mask(input_file_path, output_file_path):
fits_images_dict, fits_metadata_dict = images.load_benchmark_images(input_file_path)
mask = fits_images_dict["pixels_mask"].astype(np.uint8, copy=True)
images.save(mask, output_file_path)
def main():
# PARSE OPTIONS ###########################################################
desc = "Export the pixel mask in a FITS file."
parser = argparse.ArgumentParser(description=desc)
parser.add_argument("--output", "-o",
metavar="FILE",
help="The output FITS file")
parser.add_argument("fileargs", nargs=1, metavar="FILE",
help="The FITS file to process")
args = parser.parse_args()
output_file_path = args.output
input_file_path = args.fileargs[0]
export_pixel_mask(input_file_path , output_file_path)
if __name__ == "__main__":
main()
|
|
1eff1c6b604718414c95099f3833d58a3d2b463c
|
fireplace/actions.py
|
fireplace/actions.py
|
import logging
from .entity import Entity
class Action: # Lawsuit
args = ()
def __init__(self, target, *args, **kwargs):
self.target = target
self.times = 1
self._args = args
for k, v in zip(self.args, args):
setattr(self, k, v)
def __repr__(self):
args = ["%s=%r" % (k, v) for k, v in zip(self.args, self._args)]
return "<Action: %s(%s)>" % (self.__class__.__name__, ", ".join(args))
def __mul__(self, value):
self.times *= value
return self
def eval(self, selector, source, game):
if isinstance(selector, Entity):
return [selector]
else:
return selector.eval(game, source)
def trigger(self, source, game):
targets = self.eval(self.target, source, game)
for i in range(self.times):
logging.info("%r triggering %r targeting %r", source, self, targets)
for target in targets:
self.do(source, target, game)
|
Implement a basic Action class
|
Implement a basic Action class
|
Python
|
agpl-3.0
|
Meerkov/fireplace,butozerca/fireplace,NightKev/fireplace,Ragowit/fireplace,jleclanche/fireplace,Ragowit/fireplace,amw2104/fireplace,oftc-ftw/fireplace,liujimj/fireplace,smallnamespace/fireplace,beheh/fireplace,smallnamespace/fireplace,amw2104/fireplace,Meerkov/fireplace,liujimj/fireplace,butozerca/fireplace,oftc-ftw/fireplace
|
Implement a basic Action class
|
import logging
from .entity import Entity
class Action: # Lawsuit
args = ()
def __init__(self, target, *args, **kwargs):
self.target = target
self.times = 1
self._args = args
for k, v in zip(self.args, args):
setattr(self, k, v)
def __repr__(self):
args = ["%s=%r" % (k, v) for k, v in zip(self.args, self._args)]
return "<Action: %s(%s)>" % (self.__class__.__name__, ", ".join(args))
def __mul__(self, value):
self.times *= value
return self
def eval(self, selector, source, game):
if isinstance(selector, Entity):
return [selector]
else:
return selector.eval(game, source)
def trigger(self, source, game):
targets = self.eval(self.target, source, game)
for i in range(self.times):
logging.info("%r triggering %r targeting %r", source, self, targets)
for target in targets:
self.do(source, target, game)
|
<commit_before><commit_msg>Implement a basic Action class<commit_after>
|
import logging
from .entity import Entity
class Action: # Lawsuit
args = ()
def __init__(self, target, *args, **kwargs):
self.target = target
self.times = 1
self._args = args
for k, v in zip(self.args, args):
setattr(self, k, v)
def __repr__(self):
args = ["%s=%r" % (k, v) for k, v in zip(self.args, self._args)]
return "<Action: %s(%s)>" % (self.__class__.__name__, ", ".join(args))
def __mul__(self, value):
self.times *= value
return self
def eval(self, selector, source, game):
if isinstance(selector, Entity):
return [selector]
else:
return selector.eval(game, source)
def trigger(self, source, game):
targets = self.eval(self.target, source, game)
for i in range(self.times):
logging.info("%r triggering %r targeting %r", source, self, targets)
for target in targets:
self.do(source, target, game)
|
Implement a basic Action classimport logging
from .entity import Entity
class Action: # Lawsuit
args = ()
def __init__(self, target, *args, **kwargs):
self.target = target
self.times = 1
self._args = args
for k, v in zip(self.args, args):
setattr(self, k, v)
def __repr__(self):
args = ["%s=%r" % (k, v) for k, v in zip(self.args, self._args)]
return "<Action: %s(%s)>" % (self.__class__.__name__, ", ".join(args))
def __mul__(self, value):
self.times *= value
return self
def eval(self, selector, source, game):
if isinstance(selector, Entity):
return [selector]
else:
return selector.eval(game, source)
def trigger(self, source, game):
targets = self.eval(self.target, source, game)
for i in range(self.times):
logging.info("%r triggering %r targeting %r", source, self, targets)
for target in targets:
self.do(source, target, game)
|
<commit_before><commit_msg>Implement a basic Action class<commit_after>import logging
from .entity import Entity
class Action: # Lawsuit
args = ()
def __init__(self, target, *args, **kwargs):
self.target = target
self.times = 1
self._args = args
for k, v in zip(self.args, args):
setattr(self, k, v)
def __repr__(self):
args = ["%s=%r" % (k, v) for k, v in zip(self.args, self._args)]
return "<Action: %s(%s)>" % (self.__class__.__name__, ", ".join(args))
def __mul__(self, value):
self.times *= value
return self
def eval(self, selector, source, game):
if isinstance(selector, Entity):
return [selector]
else:
return selector.eval(game, source)
def trigger(self, source, game):
targets = self.eval(self.target, source, game)
for i in range(self.times):
logging.info("%r triggering %r targeting %r", source, self, targets)
for target in targets:
self.do(source, target, game)
|
|
10204708652f463984c9f21e2b1bdc898049071d
|
zephyrus/script.py
|
zephyrus/script.py
|
import abc
import json
class Script(list):
def __init__(self, filename=None, iterable=None):
if iterable is not None:
super().__init__(iterable)
else:
super().__init__()
self.filename = filename
def save(self, filename=None):
if filename is not None:
self.filename = filename
save = open(self.filename, 'w')
json.dump(self, save)
save.close()
def load(self, filename):
self.clean()
self.extend(json.load(open(filename)))
def update(self, filename, index):
d = json.load(open(filename))
up = self[index]
for key in up.keys():
d[key] = up[key]
json.dump(d, open(filename, 'w'))
class Parameter:
def __init__(self, name, message, parser):
self.name = name
self.message = message
self.parser = parser
def __call__(self, *args, **kwargs):
return self.parser(input(self.message))
# TODO get a better name for this
class AutoParameter(abc.ABC):
def __init__(self, name):
self.name = name
def __call__(self, *args, parameters, **kwargs):
return self.parser(parameters)
@abc.abstractmethod
def parser(self, parameters):
pass
class ConfigSection:
@classmethod
def get_dict(cls):
section = {}
for parameter in cls.parameters:
section[parameter.name] = parameter(parameters=section)
return section
class ConfigBuilder:
def get_dict(self):
config = {}
for section in self.sections:
config.update(section.get_dict())
return config
def generate_config_file(self, filename):
with open(filename, 'w') as output:
json.dump(output, self.get_dict())
|
Add module that helps to generate configuration files.
|
Add module that helps to generate configuration files.
|
Python
|
mit
|
wairton/zephyrus-mas
|
Add module that helps to generate configuration files.
|
import abc
import json
class Script(list):
def __init__(self, filename=None, iterable=None):
if iterable is not None:
super().__init__(iterable)
else:
super().__init__()
self.filename = filename
def save(self, filename=None):
if filename is not None:
self.filename = filename
save = open(self.filename, 'w')
json.dump(self, save)
save.close()
def load(self, filename):
self.clean()
self.extend(json.load(open(filename)))
def update(self, filename, index):
d = json.load(open(filename))
up = self[index]
for key in up.keys():
d[key] = up[key]
json.dump(d, open(filename, 'w'))
class Parameter:
def __init__(self, name, message, parser):
self.name = name
self.message = message
self.parser = parser
def __call__(self, *args, **kwargs):
return self.parser(input(self.message))
# TODO get a better name for this
class AutoParameter(abc.ABC):
def __init__(self, name):
self.name = name
def __call__(self, *args, parameters, **kwargs):
return self.parser(parameters)
@abc.abstractmethod
def parser(self, parameters):
pass
class ConfigSection:
@classmethod
def get_dict(cls):
section = {}
for parameter in cls.parameters:
section[parameter.name] = parameter(parameters=section)
return section
class ConfigBuilder:
def get_dict(self):
config = {}
for section in self.sections:
config.update(section.get_dict())
return config
def generate_config_file(self, filename):
with open(filename, 'w') as output:
json.dump(output, self.get_dict())
|
<commit_before><commit_msg>Add module that helps to generate configuration files.<commit_after>
|
import abc
import json
class Script(list):
def __init__(self, filename=None, iterable=None):
if iterable is not None:
super().__init__(iterable)
else:
super().__init__()
self.filename = filename
def save(self, filename=None):
if filename is not None:
self.filename = filename
save = open(self.filename, 'w')
json.dump(self, save)
save.close()
def load(self, filename):
self.clean()
self.extend(json.load(open(filename)))
def update(self, filename, index):
d = json.load(open(filename))
up = self[index]
for key in up.keys():
d[key] = up[key]
json.dump(d, open(filename, 'w'))
class Parameter:
def __init__(self, name, message, parser):
self.name = name
self.message = message
self.parser = parser
def __call__(self, *args, **kwargs):
return self.parser(input(self.message))
# TODO get a better name for this
class AutoParameter(abc.ABC):
def __init__(self, name):
self.name = name
def __call__(self, *args, parameters, **kwargs):
return self.parser(parameters)
@abc.abstractmethod
def parser(self, parameters):
pass
class ConfigSection:
@classmethod
def get_dict(cls):
section = {}
for parameter in cls.parameters:
section[parameter.name] = parameter(parameters=section)
return section
class ConfigBuilder:
def get_dict(self):
config = {}
for section in self.sections:
config.update(section.get_dict())
return config
def generate_config_file(self, filename):
with open(filename, 'w') as output:
json.dump(output, self.get_dict())
|
Add module that helps to generate configuration files.import abc
import json
class Script(list):
def __init__(self, filename=None, iterable=None):
if iterable is not None:
super().__init__(iterable)
else:
super().__init__()
self.filename = filename
def save(self, filename=None):
if filename is not None:
self.filename = filename
save = open(self.filename, 'w')
json.dump(self, save)
save.close()
def load(self, filename):
self.clean()
self.extend(json.load(open(filename)))
def update(self, filename, index):
d = json.load(open(filename))
up = self[index]
for key in up.keys():
d[key] = up[key]
json.dump(d, open(filename, 'w'))
class Parameter:
def __init__(self, name, message, parser):
self.name = name
self.message = message
self.parser = parser
def __call__(self, *args, **kwargs):
return self.parser(input(self.message))
# TODO get a better name for this
class AutoParameter(abc.ABC):
def __init__(self, name):
self.name = name
def __call__(self, *args, parameters, **kwargs):
return self.parser(parameters)
@abc.abstractmethod
def parser(self, parameters):
pass
class ConfigSection:
@classmethod
def get_dict(cls):
section = {}
for parameter in cls.parameters:
section[parameter.name] = parameter(parameters=section)
return section
class ConfigBuilder:
def get_dict(self):
config = {}
for section in self.sections:
config.update(section.get_dict())
return config
def generate_config_file(self, filename):
with open(filename, 'w') as output:
json.dump(output, self.get_dict())
|
<commit_before><commit_msg>Add module that helps to generate configuration files.<commit_after>import abc
import json
class Script(list):
def __init__(self, filename=None, iterable=None):
if iterable is not None:
super().__init__(iterable)
else:
super().__init__()
self.filename = filename
def save(self, filename=None):
if filename is not None:
self.filename = filename
save = open(self.filename, 'w')
json.dump(self, save)
save.close()
def load(self, filename):
self.clean()
self.extend(json.load(open(filename)))
def update(self, filename, index):
d = json.load(open(filename))
up = self[index]
for key in up.keys():
d[key] = up[key]
json.dump(d, open(filename, 'w'))
class Parameter:
def __init__(self, name, message, parser):
self.name = name
self.message = message
self.parser = parser
def __call__(self, *args, **kwargs):
return self.parser(input(self.message))
# TODO get a better name for this
class AutoParameter(abc.ABC):
def __init__(self, name):
self.name = name
def __call__(self, *args, parameters, **kwargs):
return self.parser(parameters)
@abc.abstractmethod
def parser(self, parameters):
pass
class ConfigSection:
@classmethod
def get_dict(cls):
section = {}
for parameter in cls.parameters:
section[parameter.name] = parameter(parameters=section)
return section
class ConfigBuilder:
def get_dict(self):
config = {}
for section in self.sections:
config.update(section.get_dict())
return config
def generate_config_file(self, filename):
with open(filename, 'w') as output:
json.dump(output, self.get_dict())
|
|
82826f468186b737d63dffb2c79cfeff5a8d47a0
|
examples/python3-urllib/run.py
|
examples/python3-urllib/run.py
|
import sys
import ssl
import urllib.error
import urllib.request
host = sys.argv[1]
port = sys.argv[2]
cafile = sys.argv[3] if len(sys.argv) > 3 else None
try:
urllib.request.urlopen("https://" + host + ":" + port, cafile=cafile)
except urllib.error.URLError as exc:
if not isinstance(exc.reason, ssl.SSLError):
raise
print("FAIL")
else:
print("OK")
|
Add a Python3 + urllib example
|
Add a Python3 + urllib example
|
Python
|
mit
|
ouspg/trytls,ouspg/trytls,ouspg/trytls,ouspg/trytls,ouspg/trytls,ouspg/trytls,ouspg/trytls,ouspg/trytls
|
Add a Python3 + urllib example
|
import sys
import ssl
import urllib.error
import urllib.request
host = sys.argv[1]
port = sys.argv[2]
cafile = sys.argv[3] if len(sys.argv) > 3 else None
try:
urllib.request.urlopen("https://" + host + ":" + port, cafile=cafile)
except urllib.error.URLError as exc:
if not isinstance(exc.reason, ssl.SSLError):
raise
print("FAIL")
else:
print("OK")
|
<commit_before><commit_msg>Add a Python3 + urllib example<commit_after>
|
import sys
import ssl
import urllib.error
import urllib.request
host = sys.argv[1]
port = sys.argv[2]
cafile = sys.argv[3] if len(sys.argv) > 3 else None
try:
urllib.request.urlopen("https://" + host + ":" + port, cafile=cafile)
except urllib.error.URLError as exc:
if not isinstance(exc.reason, ssl.SSLError):
raise
print("FAIL")
else:
print("OK")
|
Add a Python3 + urllib exampleimport sys
import ssl
import urllib.error
import urllib.request
host = sys.argv[1]
port = sys.argv[2]
cafile = sys.argv[3] if len(sys.argv) > 3 else None
try:
urllib.request.urlopen("https://" + host + ":" + port, cafile=cafile)
except urllib.error.URLError as exc:
if not isinstance(exc.reason, ssl.SSLError):
raise
print("FAIL")
else:
print("OK")
|
<commit_before><commit_msg>Add a Python3 + urllib example<commit_after>import sys
import ssl
import urllib.error
import urllib.request
host = sys.argv[1]
port = sys.argv[2]
cafile = sys.argv[3] if len(sys.argv) > 3 else None
try:
urllib.request.urlopen("https://" + host + ":" + port, cafile=cafile)
except urllib.error.URLError as exc:
if not isinstance(exc.reason, ssl.SSLError):
raise
print("FAIL")
else:
print("OK")
|
|
52fcf23a15cda393dabde53f513687ec92d03598
|
src/stratuslab/FileAppender.py
|
src/stratuslab/FileAppender.py
|
import os
import shutil
class FileAppender(object):
def __init__(self, filename):
self.filename = filename
self.lines = []
self.newLines = []
self.foundExit = False
def insertAtTheEnd(self, newLine):
''' Append at the end of the file (e.g. rc.local) the newLine
at the end of the file, unless the file ends with 'exit',
in which case it inserts just before.'''
self._backupIfNecessary()
self._reverseLines()
newLine = newLine + '\n'
self.foundExit = False
for line in self.lines:
if line.strip() == '':
self._appendNewLine(line)
continue
if self._containsExit(line):
self._insertLines(line,newLine)
continue
if self.foundExit:
self._appendNewLine(line)
continue
self._insertLines(newLine,line)
self.newLines.reverse()
self._writeAndClose()
def _backupIfNecessary(self):
originalFilename = self.filename + '.orig'
if not os.path.exists(originalFilename):
shutil.copyfile(self.filename, originalFilename)
def _reverseLines(self):
file = open(self.filename)
self.lines = file.readlines()
newLines = []
self.lines.reverse()
def _appendNewLine(self, line):
self.newLines.append(line)
def _containsExit(self, line):
return line.strip().startswith('exit')
def _insertLines(self, first, second):
self.foundExit = True
self._appendNewLine(first)
self._appendNewLine(second)
def _writeAndClose(self):
newfile = open(self.filename,'w')
newfile.writelines(self.newLines)
newfile.close()
os.chmod(self.filename, 0755)
|
Append intelligently at the end of a file
|
Append intelligently at the end of a file
|
Python
|
apache-2.0
|
StratusLab/client,StratusLab/client,StratusLab/client,StratusLab/client
|
Append intelligently at the end of a file
|
import os
import shutil
class FileAppender(object):
def __init__(self, filename):
self.filename = filename
self.lines = []
self.newLines = []
self.foundExit = False
def insertAtTheEnd(self, newLine):
''' Append at the end of the file (e.g. rc.local) the newLine
at the end of the file, unless the file ends with 'exit',
in which case it inserts just before.'''
self._backupIfNecessary()
self._reverseLines()
newLine = newLine + '\n'
self.foundExit = False
for line in self.lines:
if line.strip() == '':
self._appendNewLine(line)
continue
if self._containsExit(line):
self._insertLines(line,newLine)
continue
if self.foundExit:
self._appendNewLine(line)
continue
self._insertLines(newLine,line)
self.newLines.reverse()
self._writeAndClose()
def _backupIfNecessary(self):
originalFilename = self.filename + '.orig'
if not os.path.exists(originalFilename):
shutil.copyfile(self.filename, originalFilename)
def _reverseLines(self):
file = open(self.filename)
self.lines = file.readlines()
newLines = []
self.lines.reverse()
def _appendNewLine(self, line):
self.newLines.append(line)
def _containsExit(self, line):
return line.strip().startswith('exit')
def _insertLines(self, first, second):
self.foundExit = True
self._appendNewLine(first)
self._appendNewLine(second)
def _writeAndClose(self):
newfile = open(self.filename,'w')
newfile.writelines(self.newLines)
newfile.close()
os.chmod(self.filename, 0755)
|
<commit_before><commit_msg>Append intelligently at the end of a file<commit_after>
|
import os
import shutil
class FileAppender(object):
def __init__(self, filename):
self.filename = filename
self.lines = []
self.newLines = []
self.foundExit = False
def insertAtTheEnd(self, newLine):
''' Append at the end of the file (e.g. rc.local) the newLine
at the end of the file, unless the file ends with 'exit',
in which case it inserts just before.'''
self._backupIfNecessary()
self._reverseLines()
newLine = newLine + '\n'
self.foundExit = False
for line in self.lines:
if line.strip() == '':
self._appendNewLine(line)
continue
if self._containsExit(line):
self._insertLines(line,newLine)
continue
if self.foundExit:
self._appendNewLine(line)
continue
self._insertLines(newLine,line)
self.newLines.reverse()
self._writeAndClose()
def _backupIfNecessary(self):
originalFilename = self.filename + '.orig'
if not os.path.exists(originalFilename):
shutil.copyfile(self.filename, originalFilename)
def _reverseLines(self):
file = open(self.filename)
self.lines = file.readlines()
newLines = []
self.lines.reverse()
def _appendNewLine(self, line):
self.newLines.append(line)
def _containsExit(self, line):
return line.strip().startswith('exit')
def _insertLines(self, first, second):
self.foundExit = True
self._appendNewLine(first)
self._appendNewLine(second)
def _writeAndClose(self):
newfile = open(self.filename,'w')
newfile.writelines(self.newLines)
newfile.close()
os.chmod(self.filename, 0755)
|
Append intelligently at the end of a fileimport os
import shutil
class FileAppender(object):
def __init__(self, filename):
self.filename = filename
self.lines = []
self.newLines = []
self.foundExit = False
def insertAtTheEnd(self, newLine):
''' Append at the end of the file (e.g. rc.local) the newLine
at the end of the file, unless the file ends with 'exit',
in which case it inserts just before.'''
self._backupIfNecessary()
self._reverseLines()
newLine = newLine + '\n'
self.foundExit = False
for line in self.lines:
if line.strip() == '':
self._appendNewLine(line)
continue
if self._containsExit(line):
self._insertLines(line,newLine)
continue
if self.foundExit:
self._appendNewLine(line)
continue
self._insertLines(newLine,line)
self.newLines.reverse()
self._writeAndClose()
def _backupIfNecessary(self):
originalFilename = self.filename + '.orig'
if not os.path.exists(originalFilename):
shutil.copyfile(self.filename, originalFilename)
def _reverseLines(self):
file = open(self.filename)
self.lines = file.readlines()
newLines = []
self.lines.reverse()
def _appendNewLine(self, line):
self.newLines.append(line)
def _containsExit(self, line):
return line.strip().startswith('exit')
def _insertLines(self, first, second):
self.foundExit = True
self._appendNewLine(first)
self._appendNewLine(second)
def _writeAndClose(self):
newfile = open(self.filename,'w')
newfile.writelines(self.newLines)
newfile.close()
os.chmod(self.filename, 0755)
|
<commit_before><commit_msg>Append intelligently at the end of a file<commit_after>import os
import shutil
class FileAppender(object):
def __init__(self, filename):
self.filename = filename
self.lines = []
self.newLines = []
self.foundExit = False
def insertAtTheEnd(self, newLine):
''' Append at the end of the file (e.g. rc.local) the newLine
at the end of the file, unless the file ends with 'exit',
in which case it inserts just before.'''
self._backupIfNecessary()
self._reverseLines()
newLine = newLine + '\n'
self.foundExit = False
for line in self.lines:
if line.strip() == '':
self._appendNewLine(line)
continue
if self._containsExit(line):
self._insertLines(line,newLine)
continue
if self.foundExit:
self._appendNewLine(line)
continue
self._insertLines(newLine,line)
self.newLines.reverse()
self._writeAndClose()
def _backupIfNecessary(self):
originalFilename = self.filename + '.orig'
if not os.path.exists(originalFilename):
shutil.copyfile(self.filename, originalFilename)
def _reverseLines(self):
file = open(self.filename)
self.lines = file.readlines()
newLines = []
self.lines.reverse()
def _appendNewLine(self, line):
self.newLines.append(line)
def _containsExit(self, line):
return line.strip().startswith('exit')
def _insertLines(self, first, second):
self.foundExit = True
self._appendNewLine(first)
self._appendNewLine(second)
def _writeAndClose(self):
newfile = open(self.filename,'w')
newfile.writelines(self.newLines)
newfile.close()
os.chmod(self.filename, 0755)
|
|
8d35dad5fc63de919936d0407d105c36c87a1b14
|
tests/test_no_extra_queries.py
|
tests/test_no_extra_queries.py
|
from nose.tools import assert_false
from mock import Mock, PropertyMock, patch
from .models import Photo
def test_dont_access_source():
"""
Touching the source may trigger an unneeded query.
See <https://github.com/matthewwithanm/django-imagekit/issues/295>
"""
pmock = PropertyMock()
pmock.__get__ = Mock()
with patch.object(Photo, 'original_image', pmock):
photo = Photo() # noqa
assert_false(pmock.__get__.called)
|
Add test to illustrate GH-295
|
Add test to illustrate GH-295
|
Python
|
bsd-3-clause
|
FundedByMe/django-imagekit,tawanda/django-imagekit,tawanda/django-imagekit,FundedByMe/django-imagekit
|
Add test to illustrate GH-295
|
from nose.tools import assert_false
from mock import Mock, PropertyMock, patch
from .models import Photo
def test_dont_access_source():
"""
Touching the source may trigger an unneeded query.
See <https://github.com/matthewwithanm/django-imagekit/issues/295>
"""
pmock = PropertyMock()
pmock.__get__ = Mock()
with patch.object(Photo, 'original_image', pmock):
photo = Photo() # noqa
assert_false(pmock.__get__.called)
|
<commit_before><commit_msg>Add test to illustrate GH-295<commit_after>
|
from nose.tools import assert_false
from mock import Mock, PropertyMock, patch
from .models import Photo
def test_dont_access_source():
"""
Touching the source may trigger an unneeded query.
See <https://github.com/matthewwithanm/django-imagekit/issues/295>
"""
pmock = PropertyMock()
pmock.__get__ = Mock()
with patch.object(Photo, 'original_image', pmock):
photo = Photo() # noqa
assert_false(pmock.__get__.called)
|
Add test to illustrate GH-295from nose.tools import assert_false
from mock import Mock, PropertyMock, patch
from .models import Photo
def test_dont_access_source():
"""
Touching the source may trigger an unneeded query.
See <https://github.com/matthewwithanm/django-imagekit/issues/295>
"""
pmock = PropertyMock()
pmock.__get__ = Mock()
with patch.object(Photo, 'original_image', pmock):
photo = Photo() # noqa
assert_false(pmock.__get__.called)
|
<commit_before><commit_msg>Add test to illustrate GH-295<commit_after>from nose.tools import assert_false
from mock import Mock, PropertyMock, patch
from .models import Photo
def test_dont_access_source():
"""
Touching the source may trigger an unneeded query.
See <https://github.com/matthewwithanm/django-imagekit/issues/295>
"""
pmock = PropertyMock()
pmock.__get__ = Mock()
with patch.object(Photo, 'original_image', pmock):
photo = Photo() # noqa
assert_false(pmock.__get__.called)
|
|
24e9f6f1f9f7d6c48715a5e57c0dbc0b0271b8e0
|
perftest.py
|
perftest.py
|
"""
Simple peformance tests.
"""
import sys
import time
import couchdb
def main():
print 'sys.version : %r' % (sys.version,)
print 'sys.platform : %r' % (sys.platform,)
tests = [create_doc, create_bulk_docs]
if len(sys.argv) > 1:
tests = [test for test in tests if test.__name__ in sys.argv[1:]]
server = couchdb.Server()
for test in tests:
_run(server, test)
def _run(server, func):
"""Run a test in a clean db and log its execution time."""
sys.stdout.write("* [%s] %s ... " % (func.__name__, func.__doc__.strip()))
sys.stdout.flush()
db_name = 'couchdb-python/perftest'
db = server.create(db_name)
try:
start = time.time()
func(db)
stop = time.time()
sys.stdout.write("%0.2fs\n" % (stop-start,))
sys.stdout.flush()
except Exception, e:
sys.stdout.write("FAILED - %r\n" % (unicode(e),))
sys.stdout.flush()
finally:
server.delete(db_name)
def create_doc(db):
"""Create lots of docs, one at a time"""
for i in range(1000):
db.save({'_id': unicode(i)})
def create_bulk_docs(db):
"""Create lots of docs, lots at a time"""
batch_size = 100
num_batches = 1000
for i in range(num_batches):
db.update([{'_id': unicode((i * batch_size) + j)} for j in range(batch_size)])
if __name__ == '__main__':
main()
|
Add a very simple performance testing tool.
|
Add a very simple performance testing tool.
|
Python
|
bsd-3-clause
|
ssaavedra/couchdb-python,hdmessaging/couchbase-mapping-python,oliora/couchdb-python
|
Add a very simple performance testing tool.
|
"""
Simple peformance tests.
"""
import sys
import time
import couchdb
def main():
print 'sys.version : %r' % (sys.version,)
print 'sys.platform : %r' % (sys.platform,)
tests = [create_doc, create_bulk_docs]
if len(sys.argv) > 1:
tests = [test for test in tests if test.__name__ in sys.argv[1:]]
server = couchdb.Server()
for test in tests:
_run(server, test)
def _run(server, func):
"""Run a test in a clean db and log its execution time."""
sys.stdout.write("* [%s] %s ... " % (func.__name__, func.__doc__.strip()))
sys.stdout.flush()
db_name = 'couchdb-python/perftest'
db = server.create(db_name)
try:
start = time.time()
func(db)
stop = time.time()
sys.stdout.write("%0.2fs\n" % (stop-start,))
sys.stdout.flush()
except Exception, e:
sys.stdout.write("FAILED - %r\n" % (unicode(e),))
sys.stdout.flush()
finally:
server.delete(db_name)
def create_doc(db):
"""Create lots of docs, one at a time"""
for i in range(1000):
db.save({'_id': unicode(i)})
def create_bulk_docs(db):
"""Create lots of docs, lots at a time"""
batch_size = 100
num_batches = 1000
for i in range(num_batches):
db.update([{'_id': unicode((i * batch_size) + j)} for j in range(batch_size)])
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add a very simple performance testing tool.<commit_after>
|
"""
Simple peformance tests.
"""
import sys
import time
import couchdb
def main():
print 'sys.version : %r' % (sys.version,)
print 'sys.platform : %r' % (sys.platform,)
tests = [create_doc, create_bulk_docs]
if len(sys.argv) > 1:
tests = [test for test in tests if test.__name__ in sys.argv[1:]]
server = couchdb.Server()
for test in tests:
_run(server, test)
def _run(server, func):
"""Run a test in a clean db and log its execution time."""
sys.stdout.write("* [%s] %s ... " % (func.__name__, func.__doc__.strip()))
sys.stdout.flush()
db_name = 'couchdb-python/perftest'
db = server.create(db_name)
try:
start = time.time()
func(db)
stop = time.time()
sys.stdout.write("%0.2fs\n" % (stop-start,))
sys.stdout.flush()
except Exception, e:
sys.stdout.write("FAILED - %r\n" % (unicode(e),))
sys.stdout.flush()
finally:
server.delete(db_name)
def create_doc(db):
"""Create lots of docs, one at a time"""
for i in range(1000):
db.save({'_id': unicode(i)})
def create_bulk_docs(db):
"""Create lots of docs, lots at a time"""
batch_size = 100
num_batches = 1000
for i in range(num_batches):
db.update([{'_id': unicode((i * batch_size) + j)} for j in range(batch_size)])
if __name__ == '__main__':
main()
|
Add a very simple performance testing tool."""
Simple peformance tests.
"""
import sys
import time
import couchdb
def main():
print 'sys.version : %r' % (sys.version,)
print 'sys.platform : %r' % (sys.platform,)
tests = [create_doc, create_bulk_docs]
if len(sys.argv) > 1:
tests = [test for test in tests if test.__name__ in sys.argv[1:]]
server = couchdb.Server()
for test in tests:
_run(server, test)
def _run(server, func):
"""Run a test in a clean db and log its execution time."""
sys.stdout.write("* [%s] %s ... " % (func.__name__, func.__doc__.strip()))
sys.stdout.flush()
db_name = 'couchdb-python/perftest'
db = server.create(db_name)
try:
start = time.time()
func(db)
stop = time.time()
sys.stdout.write("%0.2fs\n" % (stop-start,))
sys.stdout.flush()
except Exception, e:
sys.stdout.write("FAILED - %r\n" % (unicode(e),))
sys.stdout.flush()
finally:
server.delete(db_name)
def create_doc(db):
"""Create lots of docs, one at a time"""
for i in range(1000):
db.save({'_id': unicode(i)})
def create_bulk_docs(db):
"""Create lots of docs, lots at a time"""
batch_size = 100
num_batches = 1000
for i in range(num_batches):
db.update([{'_id': unicode((i * batch_size) + j)} for j in range(batch_size)])
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add a very simple performance testing tool.<commit_after>"""
Simple peformance tests.
"""
import sys
import time
import couchdb
def main():
print 'sys.version : %r' % (sys.version,)
print 'sys.platform : %r' % (sys.platform,)
tests = [create_doc, create_bulk_docs]
if len(sys.argv) > 1:
tests = [test for test in tests if test.__name__ in sys.argv[1:]]
server = couchdb.Server()
for test in tests:
_run(server, test)
def _run(server, func):
"""Run a test in a clean db and log its execution time."""
sys.stdout.write("* [%s] %s ... " % (func.__name__, func.__doc__.strip()))
sys.stdout.flush()
db_name = 'couchdb-python/perftest'
db = server.create(db_name)
try:
start = time.time()
func(db)
stop = time.time()
sys.stdout.write("%0.2fs\n" % (stop-start,))
sys.stdout.flush()
except Exception, e:
sys.stdout.write("FAILED - %r\n" % (unicode(e),))
sys.stdout.flush()
finally:
server.delete(db_name)
def create_doc(db):
"""Create lots of docs, one at a time"""
for i in range(1000):
db.save({'_id': unicode(i)})
def create_bulk_docs(db):
"""Create lots of docs, lots at a time"""
batch_size = 100
num_batches = 1000
for i in range(num_batches):
db.update([{'_id': unicode((i * batch_size) + j)} for j in range(batch_size)])
if __name__ == '__main__':
main()
|
|
a0173b248c7fe54534e0caed048e5f8f408d7ca1
|
rejected/data.py
|
rejected/data.py
|
"""
Rejected data objects
"""
import copy
class DataObject(object):
"""A class that will return a plain text representation of all of the
attributes assigned to the object.
"""
def __repr__(self):
"""Return a string representation of the object and all of its
attributes.
:rtype: str
"""
items = list()
for key, value in self.__dict__.iteritems():
if getattr(self.__class__, key, None) != value:
items.append('%s=%s' % (key, value))
return "<%s(%s)>" % (self.__class__.__name__, items)
class Message(DataObject):
"""Class for containing all the attributes about a message object creating a
flatter, move convenient way to access the data while supporting the legacy
methods that were previously in place in rejected < 2.0
"""
def __init__(self, channel, method, header, body):
"""Initialize a message setting the attributes from the given channel,
method, header and body.
:param pika.channel.Channel channel: The channel the msg was received on
:param pika.frames.Method method: Pika Method Frame object
:param pika.frames.Header header: Pika Header Frame object
:param str body: Pika message body
"""
DataObject.__init__(self)
self.channel = channel
self.method = method
self.properties = Properties(header)
self.body = copy.copy(body)
# Map method properties
self.consumer_tag = method.consumer_tag
self.delivery_tag = method.delivery_tag
self.exchange = method.exchange
self.redelivered = method.redelivered
self.routing_key = method.routing_key
class Properties(DataObject):
"""A class that represents all of the field attributes of AMQP's
Basic.Properties
"""
def __init__(self, header):
"""Create a base object to contain all of the properties we need
:param pika.spec.BasicProperties header: A header object from Pika
"""
DataObject.__init__(self)
self.content_type = header.content_type
self.content_encoding = header.content_encoding
self.headers = copy.deepcopy(header.headers) or dict()
self.delivery_mode = header.delivery_mode
self.priority = header.priority
self.correlation_id = header.correlation_id
self.reply_to = header.reply_to
self.expiration = header.expiration
self.message_id = header.message_id
self.timestamp = header.timestamp
self.type = header.type
self.user_id = header.user_id
self.app_id = header.app_id
self.cluster_id = header.cluster_id
|
Move these classes into their own file
|
Move these classes into their own file
|
Python
|
bsd-3-clause
|
gmr/rejected,gmr/rejected
|
Move these classes into their own file
|
"""
Rejected data objects
"""
import copy
class DataObject(object):
"""A class that will return a plain text representation of all of the
attributes assigned to the object.
"""
def __repr__(self):
"""Return a string representation of the object and all of its
attributes.
:rtype: str
"""
items = list()
for key, value in self.__dict__.iteritems():
if getattr(self.__class__, key, None) != value:
items.append('%s=%s' % (key, value))
return "<%s(%s)>" % (self.__class__.__name__, items)
class Message(DataObject):
"""Class for containing all the attributes about a message object creating a
flatter, move convenient way to access the data while supporting the legacy
methods that were previously in place in rejected < 2.0
"""
def __init__(self, channel, method, header, body):
"""Initialize a message setting the attributes from the given channel,
method, header and body.
:param pika.channel.Channel channel: The channel the msg was received on
:param pika.frames.Method method: Pika Method Frame object
:param pika.frames.Header header: Pika Header Frame object
:param str body: Pika message body
"""
DataObject.__init__(self)
self.channel = channel
self.method = method
self.properties = Properties(header)
self.body = copy.copy(body)
# Map method properties
self.consumer_tag = method.consumer_tag
self.delivery_tag = method.delivery_tag
self.exchange = method.exchange
self.redelivered = method.redelivered
self.routing_key = method.routing_key
class Properties(DataObject):
"""A class that represents all of the field attributes of AMQP's
Basic.Properties
"""
def __init__(self, header):
"""Create a base object to contain all of the properties we need
:param pika.spec.BasicProperties header: A header object from Pika
"""
DataObject.__init__(self)
self.content_type = header.content_type
self.content_encoding = header.content_encoding
self.headers = copy.deepcopy(header.headers) or dict()
self.delivery_mode = header.delivery_mode
self.priority = header.priority
self.correlation_id = header.correlation_id
self.reply_to = header.reply_to
self.expiration = header.expiration
self.message_id = header.message_id
self.timestamp = header.timestamp
self.type = header.type
self.user_id = header.user_id
self.app_id = header.app_id
self.cluster_id = header.cluster_id
|
<commit_before><commit_msg>Move these classes into their own file<commit_after>
|
"""
Rejected data objects
"""
import copy
class DataObject(object):
"""A class that will return a plain text representation of all of the
attributes assigned to the object.
"""
def __repr__(self):
"""Return a string representation of the object and all of its
attributes.
:rtype: str
"""
items = list()
for key, value in self.__dict__.iteritems():
if getattr(self.__class__, key, None) != value:
items.append('%s=%s' % (key, value))
return "<%s(%s)>" % (self.__class__.__name__, items)
class Message(DataObject):
"""Class for containing all the attributes about a message object creating a
flatter, move convenient way to access the data while supporting the legacy
methods that were previously in place in rejected < 2.0
"""
def __init__(self, channel, method, header, body):
"""Initialize a message setting the attributes from the given channel,
method, header and body.
:param pika.channel.Channel channel: The channel the msg was received on
:param pika.frames.Method method: Pika Method Frame object
:param pika.frames.Header header: Pika Header Frame object
:param str body: Pika message body
"""
DataObject.__init__(self)
self.channel = channel
self.method = method
self.properties = Properties(header)
self.body = copy.copy(body)
# Map method properties
self.consumer_tag = method.consumer_tag
self.delivery_tag = method.delivery_tag
self.exchange = method.exchange
self.redelivered = method.redelivered
self.routing_key = method.routing_key
class Properties(DataObject):
"""A class that represents all of the field attributes of AMQP's
Basic.Properties
"""
def __init__(self, header):
"""Create a base object to contain all of the properties we need
:param pika.spec.BasicProperties header: A header object from Pika
"""
DataObject.__init__(self)
self.content_type = header.content_type
self.content_encoding = header.content_encoding
self.headers = copy.deepcopy(header.headers) or dict()
self.delivery_mode = header.delivery_mode
self.priority = header.priority
self.correlation_id = header.correlation_id
self.reply_to = header.reply_to
self.expiration = header.expiration
self.message_id = header.message_id
self.timestamp = header.timestamp
self.type = header.type
self.user_id = header.user_id
self.app_id = header.app_id
self.cluster_id = header.cluster_id
|
Move these classes into their own file"""
Rejected data objects
"""
import copy
class DataObject(object):
"""A class that will return a plain text representation of all of the
attributes assigned to the object.
"""
def __repr__(self):
"""Return a string representation of the object and all of its
attributes.
:rtype: str
"""
items = list()
for key, value in self.__dict__.iteritems():
if getattr(self.__class__, key, None) != value:
items.append('%s=%s' % (key, value))
return "<%s(%s)>" % (self.__class__.__name__, items)
class Message(DataObject):
"""Class for containing all the attributes about a message object creating a
flatter, move convenient way to access the data while supporting the legacy
methods that were previously in place in rejected < 2.0
"""
def __init__(self, channel, method, header, body):
"""Initialize a message setting the attributes from the given channel,
method, header and body.
:param pika.channel.Channel channel: The channel the msg was received on
:param pika.frames.Method method: Pika Method Frame object
:param pika.frames.Header header: Pika Header Frame object
:param str body: Pika message body
"""
DataObject.__init__(self)
self.channel = channel
self.method = method
self.properties = Properties(header)
self.body = copy.copy(body)
# Map method properties
self.consumer_tag = method.consumer_tag
self.delivery_tag = method.delivery_tag
self.exchange = method.exchange
self.redelivered = method.redelivered
self.routing_key = method.routing_key
class Properties(DataObject):
"""A class that represents all of the field attributes of AMQP's
Basic.Properties
"""
def __init__(self, header):
"""Create a base object to contain all of the properties we need
:param pika.spec.BasicProperties header: A header object from Pika
"""
DataObject.__init__(self)
self.content_type = header.content_type
self.content_encoding = header.content_encoding
self.headers = copy.deepcopy(header.headers) or dict()
self.delivery_mode = header.delivery_mode
self.priority = header.priority
self.correlation_id = header.correlation_id
self.reply_to = header.reply_to
self.expiration = header.expiration
self.message_id = header.message_id
self.timestamp = header.timestamp
self.type = header.type
self.user_id = header.user_id
self.app_id = header.app_id
self.cluster_id = header.cluster_id
|
<commit_before><commit_msg>Move these classes into their own file<commit_after>"""
Rejected data objects
"""
import copy
class DataObject(object):
"""A class that will return a plain text representation of all of the
attributes assigned to the object.
"""
def __repr__(self):
"""Return a string representation of the object and all of its
attributes.
:rtype: str
"""
items = list()
for key, value in self.__dict__.iteritems():
if getattr(self.__class__, key, None) != value:
items.append('%s=%s' % (key, value))
return "<%s(%s)>" % (self.__class__.__name__, items)
class Message(DataObject):
"""Class for containing all the attributes about a message object creating a
flatter, move convenient way to access the data while supporting the legacy
methods that were previously in place in rejected < 2.0
"""
def __init__(self, channel, method, header, body):
"""Initialize a message setting the attributes from the given channel,
method, header and body.
:param pika.channel.Channel channel: The channel the msg was received on
:param pika.frames.Method method: Pika Method Frame object
:param pika.frames.Header header: Pika Header Frame object
:param str body: Pika message body
"""
DataObject.__init__(self)
self.channel = channel
self.method = method
self.properties = Properties(header)
self.body = copy.copy(body)
# Map method properties
self.consumer_tag = method.consumer_tag
self.delivery_tag = method.delivery_tag
self.exchange = method.exchange
self.redelivered = method.redelivered
self.routing_key = method.routing_key
class Properties(DataObject):
"""A class that represents all of the field attributes of AMQP's
Basic.Properties
"""
def __init__(self, header):
"""Create a base object to contain all of the properties we need
:param pika.spec.BasicProperties header: A header object from Pika
"""
DataObject.__init__(self)
self.content_type = header.content_type
self.content_encoding = header.content_encoding
self.headers = copy.deepcopy(header.headers) or dict()
self.delivery_mode = header.delivery_mode
self.priority = header.priority
self.correlation_id = header.correlation_id
self.reply_to = header.reply_to
self.expiration = header.expiration
self.message_id = header.message_id
self.timestamp = header.timestamp
self.type = header.type
self.user_id = header.user_id
self.app_id = header.app_id
self.cluster_id = header.cluster_id
|
|
127cf9f067ebb622d41a24fa70010cb46b111126
|
molly/batch_processing/migrations/0002_auto__add_field_batch_last_run_failed.py
|
molly/batch_processing/migrations/0002_auto__add_field_batch_last_run_failed.py
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Batch.last_run_failed'
db.add_column('batch_processing_batch', 'last_run_failed', self.gf('django.db.models.fields.BooleanField')(default=False), keep_default=False)
def backwards(self, orm):
# Deleting field 'Batch.last_run_failed'
db.delete_column('batch_processing_batch', 'last_run_failed')
models = {
'batch_processing.batch': {
'Meta': {'object_name': 'Batch'},
'_metadata': ('django.db.models.fields.TextField', [], {'default': "'null'"}),
'cron_stmt': ('django.db.models.fields.TextField', [], {}),
'currently_running': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_run': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'last_run_failed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'local_name': ('django.db.models.fields.TextField', [], {}),
'log': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'method_name': ('django.db.models.fields.TextField', [], {}),
'pending': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'provider_name': ('django.db.models.fields.TextField', [], {}),
'title': ('django.db.models.fields.TextField', [], {})
}
}
complete_apps = ['batch_processing']
|
Add migration for last commit
|
Add migration for last commit
|
Python
|
apache-2.0
|
mollyproject/mollyproject,mollyproject/mollyproject,mollyproject/mollyproject
|
Add migration for last commit
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Batch.last_run_failed'
db.add_column('batch_processing_batch', 'last_run_failed', self.gf('django.db.models.fields.BooleanField')(default=False), keep_default=False)
def backwards(self, orm):
# Deleting field 'Batch.last_run_failed'
db.delete_column('batch_processing_batch', 'last_run_failed')
models = {
'batch_processing.batch': {
'Meta': {'object_name': 'Batch'},
'_metadata': ('django.db.models.fields.TextField', [], {'default': "'null'"}),
'cron_stmt': ('django.db.models.fields.TextField', [], {}),
'currently_running': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_run': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'last_run_failed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'local_name': ('django.db.models.fields.TextField', [], {}),
'log': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'method_name': ('django.db.models.fields.TextField', [], {}),
'pending': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'provider_name': ('django.db.models.fields.TextField', [], {}),
'title': ('django.db.models.fields.TextField', [], {})
}
}
complete_apps = ['batch_processing']
|
<commit_before><commit_msg>Add migration for last commit<commit_after>
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Batch.last_run_failed'
db.add_column('batch_processing_batch', 'last_run_failed', self.gf('django.db.models.fields.BooleanField')(default=False), keep_default=False)
def backwards(self, orm):
# Deleting field 'Batch.last_run_failed'
db.delete_column('batch_processing_batch', 'last_run_failed')
models = {
'batch_processing.batch': {
'Meta': {'object_name': 'Batch'},
'_metadata': ('django.db.models.fields.TextField', [], {'default': "'null'"}),
'cron_stmt': ('django.db.models.fields.TextField', [], {}),
'currently_running': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_run': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'last_run_failed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'local_name': ('django.db.models.fields.TextField', [], {}),
'log': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'method_name': ('django.db.models.fields.TextField', [], {}),
'pending': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'provider_name': ('django.db.models.fields.TextField', [], {}),
'title': ('django.db.models.fields.TextField', [], {})
}
}
complete_apps = ['batch_processing']
|
Add migration for last commit# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Batch.last_run_failed'
db.add_column('batch_processing_batch', 'last_run_failed', self.gf('django.db.models.fields.BooleanField')(default=False), keep_default=False)
def backwards(self, orm):
# Deleting field 'Batch.last_run_failed'
db.delete_column('batch_processing_batch', 'last_run_failed')
models = {
'batch_processing.batch': {
'Meta': {'object_name': 'Batch'},
'_metadata': ('django.db.models.fields.TextField', [], {'default': "'null'"}),
'cron_stmt': ('django.db.models.fields.TextField', [], {}),
'currently_running': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_run': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'last_run_failed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'local_name': ('django.db.models.fields.TextField', [], {}),
'log': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'method_name': ('django.db.models.fields.TextField', [], {}),
'pending': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'provider_name': ('django.db.models.fields.TextField', [], {}),
'title': ('django.db.models.fields.TextField', [], {})
}
}
complete_apps = ['batch_processing']
|
<commit_before><commit_msg>Add migration for last commit<commit_after># encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Batch.last_run_failed'
db.add_column('batch_processing_batch', 'last_run_failed', self.gf('django.db.models.fields.BooleanField')(default=False), keep_default=False)
def backwards(self, orm):
# Deleting field 'Batch.last_run_failed'
db.delete_column('batch_processing_batch', 'last_run_failed')
models = {
'batch_processing.batch': {
'Meta': {'object_name': 'Batch'},
'_metadata': ('django.db.models.fields.TextField', [], {'default': "'null'"}),
'cron_stmt': ('django.db.models.fields.TextField', [], {}),
'currently_running': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_run': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'last_run_failed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'local_name': ('django.db.models.fields.TextField', [], {}),
'log': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'method_name': ('django.db.models.fields.TextField', [], {}),
'pending': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'provider_name': ('django.db.models.fields.TextField', [], {}),
'title': ('django.db.models.fields.TextField', [], {})
}
}
complete_apps = ['batch_processing']
|
|
446cb8846a221d055f7a3b121a200c0c2b5f34ba
|
test/example_controller_switchinghub_gevent.py
|
test/example_controller_switchinghub_gevent.py
|
import logging
import twink
import twink.ovs
import twink.gevent
import twink.ext
import twink.ofp4 as ofp4
import twink.ofp4.parse as ofp4parse
import twink.ofp4.build as b
import twink.ofp4.oxm as oxm
class TestChannel(twink.ovs.OvsChannel,
twink.ext.PortMonitorChannel,
twink.JackinChannel,
twink.gevent.ParallelMixin,
twink.LoggingChannel):
accept_versions=[4,]
init = False
def handle_async(self, message, channel):
if not self.init:
return
msg = ofp4parse.parse(message)
if msg.header.type == ofp4.OFPT_PACKET_IN:
print msg
in_port = [o for o in oxm.parse_list(msg.match.oxm_fields) if o.oxm_field==oxm.OXM_OF_IN_PORT][0].oxm_value
src_mac = ":".join(["%02x" % ord(a) for a in msg.data[6:12]])
channel.add_flow("table=0,priority=2,idle_timeout=300, dl_src=%s,in_port=%d, actions=goto_table:1" % (src_mac, in_port))
channel.add_flow("table=1,priority=2,idle_timeout=300, dl_dst=%s, actions=output:%d" % (src_mac, in_port))
channel.send(b.ofp_packet_out(None, msg.buffer_id, in_port, None, [b.ofp_action_output(None, None, ofp4.OFPP_TABLE, 0),], None))
print self.ofctl("dump-flows")
def handle(self, message, channel):
msg = ofp4parse.parse(message)
if msg.header.type == ofp4.OFPT_HELLO:
self.ofctl("add-group", "group_id=1,type=all"+",".join(["bucket=output:%d" % port.port_no for port in self.ports]))
self.add_flow("table=0,priority=1, actions=controller")
self.add_flow("table=1,priority=3, dl_dst=01:00:00:00:00:00/01:00:00:00:00:00, actions=group:1")
self.add_flow("table=1,priority=1, actions=group:1")
self.init = True
if __name__=="__main__":
logging.basicConfig(level=logging.DEBUG)
tcpserv = twink.gevent.ChannelStreamServer(("0.0.0.0", 6653), spawn=30)
tcpserv.channel_cls = TestChannel
twink.gevent.serve_forever(tcpserv)
|
Add gevent version of switching hub example
|
Add gevent version of switching hub example
|
Python
|
apache-2.0
|
hkwi/twink,yeardancing/twink
|
Add gevent version of switching hub example
|
import logging
import twink
import twink.ovs
import twink.gevent
import twink.ext
import twink.ofp4 as ofp4
import twink.ofp4.parse as ofp4parse
import twink.ofp4.build as b
import twink.ofp4.oxm as oxm
class TestChannel(twink.ovs.OvsChannel,
twink.ext.PortMonitorChannel,
twink.JackinChannel,
twink.gevent.ParallelMixin,
twink.LoggingChannel):
accept_versions=[4,]
init = False
def handle_async(self, message, channel):
if not self.init:
return
msg = ofp4parse.parse(message)
if msg.header.type == ofp4.OFPT_PACKET_IN:
print msg
in_port = [o for o in oxm.parse_list(msg.match.oxm_fields) if o.oxm_field==oxm.OXM_OF_IN_PORT][0].oxm_value
src_mac = ":".join(["%02x" % ord(a) for a in msg.data[6:12]])
channel.add_flow("table=0,priority=2,idle_timeout=300, dl_src=%s,in_port=%d, actions=goto_table:1" % (src_mac, in_port))
channel.add_flow("table=1,priority=2,idle_timeout=300, dl_dst=%s, actions=output:%d" % (src_mac, in_port))
channel.send(b.ofp_packet_out(None, msg.buffer_id, in_port, None, [b.ofp_action_output(None, None, ofp4.OFPP_TABLE, 0),], None))
print self.ofctl("dump-flows")
def handle(self, message, channel):
msg = ofp4parse.parse(message)
if msg.header.type == ofp4.OFPT_HELLO:
self.ofctl("add-group", "group_id=1,type=all"+",".join(["bucket=output:%d" % port.port_no for port in self.ports]))
self.add_flow("table=0,priority=1, actions=controller")
self.add_flow("table=1,priority=3, dl_dst=01:00:00:00:00:00/01:00:00:00:00:00, actions=group:1")
self.add_flow("table=1,priority=1, actions=group:1")
self.init = True
if __name__=="__main__":
logging.basicConfig(level=logging.DEBUG)
tcpserv = twink.gevent.ChannelStreamServer(("0.0.0.0", 6653), spawn=30)
tcpserv.channel_cls = TestChannel
twink.gevent.serve_forever(tcpserv)
|
<commit_before><commit_msg>Add gevent version of switching hub example<commit_after>
|
import logging
import twink
import twink.ovs
import twink.gevent
import twink.ext
import twink.ofp4 as ofp4
import twink.ofp4.parse as ofp4parse
import twink.ofp4.build as b
import twink.ofp4.oxm as oxm
class TestChannel(twink.ovs.OvsChannel,
twink.ext.PortMonitorChannel,
twink.JackinChannel,
twink.gevent.ParallelMixin,
twink.LoggingChannel):
accept_versions=[4,]
init = False
def handle_async(self, message, channel):
if not self.init:
return
msg = ofp4parse.parse(message)
if msg.header.type == ofp4.OFPT_PACKET_IN:
print msg
in_port = [o for o in oxm.parse_list(msg.match.oxm_fields) if o.oxm_field==oxm.OXM_OF_IN_PORT][0].oxm_value
src_mac = ":".join(["%02x" % ord(a) for a in msg.data[6:12]])
channel.add_flow("table=0,priority=2,idle_timeout=300, dl_src=%s,in_port=%d, actions=goto_table:1" % (src_mac, in_port))
channel.add_flow("table=1,priority=2,idle_timeout=300, dl_dst=%s, actions=output:%d" % (src_mac, in_port))
channel.send(b.ofp_packet_out(None, msg.buffer_id, in_port, None, [b.ofp_action_output(None, None, ofp4.OFPP_TABLE, 0),], None))
print self.ofctl("dump-flows")
def handle(self, message, channel):
msg = ofp4parse.parse(message)
if msg.header.type == ofp4.OFPT_HELLO:
self.ofctl("add-group", "group_id=1,type=all"+",".join(["bucket=output:%d" % port.port_no for port in self.ports]))
self.add_flow("table=0,priority=1, actions=controller")
self.add_flow("table=1,priority=3, dl_dst=01:00:00:00:00:00/01:00:00:00:00:00, actions=group:1")
self.add_flow("table=1,priority=1, actions=group:1")
self.init = True
if __name__=="__main__":
logging.basicConfig(level=logging.DEBUG)
tcpserv = twink.gevent.ChannelStreamServer(("0.0.0.0", 6653), spawn=30)
tcpserv.channel_cls = TestChannel
twink.gevent.serve_forever(tcpserv)
|
Add gevent version of switching hub exampleimport logging
import twink
import twink.ovs
import twink.gevent
import twink.ext
import twink.ofp4 as ofp4
import twink.ofp4.parse as ofp4parse
import twink.ofp4.build as b
import twink.ofp4.oxm as oxm
class TestChannel(twink.ovs.OvsChannel,
twink.ext.PortMonitorChannel,
twink.JackinChannel,
twink.gevent.ParallelMixin,
twink.LoggingChannel):
accept_versions=[4,]
init = False
def handle_async(self, message, channel):
if not self.init:
return
msg = ofp4parse.parse(message)
if msg.header.type == ofp4.OFPT_PACKET_IN:
print msg
in_port = [o for o in oxm.parse_list(msg.match.oxm_fields) if o.oxm_field==oxm.OXM_OF_IN_PORT][0].oxm_value
src_mac = ":".join(["%02x" % ord(a) for a in msg.data[6:12]])
channel.add_flow("table=0,priority=2,idle_timeout=300, dl_src=%s,in_port=%d, actions=goto_table:1" % (src_mac, in_port))
channel.add_flow("table=1,priority=2,idle_timeout=300, dl_dst=%s, actions=output:%d" % (src_mac, in_port))
channel.send(b.ofp_packet_out(None, msg.buffer_id, in_port, None, [b.ofp_action_output(None, None, ofp4.OFPP_TABLE, 0),], None))
print self.ofctl("dump-flows")
def handle(self, message, channel):
msg = ofp4parse.parse(message)
if msg.header.type == ofp4.OFPT_HELLO:
self.ofctl("add-group", "group_id=1,type=all"+",".join(["bucket=output:%d" % port.port_no for port in self.ports]))
self.add_flow("table=0,priority=1, actions=controller")
self.add_flow("table=1,priority=3, dl_dst=01:00:00:00:00:00/01:00:00:00:00:00, actions=group:1")
self.add_flow("table=1,priority=1, actions=group:1")
self.init = True
if __name__=="__main__":
logging.basicConfig(level=logging.DEBUG)
tcpserv = twink.gevent.ChannelStreamServer(("0.0.0.0", 6653), spawn=30)
tcpserv.channel_cls = TestChannel
twink.gevent.serve_forever(tcpserv)
|
<commit_before><commit_msg>Add gevent version of switching hub example<commit_after>import logging
import twink
import twink.ovs
import twink.gevent
import twink.ext
import twink.ofp4 as ofp4
import twink.ofp4.parse as ofp4parse
import twink.ofp4.build as b
import twink.ofp4.oxm as oxm
class TestChannel(twink.ovs.OvsChannel,
twink.ext.PortMonitorChannel,
twink.JackinChannel,
twink.gevent.ParallelMixin,
twink.LoggingChannel):
accept_versions=[4,]
init = False
def handle_async(self, message, channel):
if not self.init:
return
msg = ofp4parse.parse(message)
if msg.header.type == ofp4.OFPT_PACKET_IN:
print msg
in_port = [o for o in oxm.parse_list(msg.match.oxm_fields) if o.oxm_field==oxm.OXM_OF_IN_PORT][0].oxm_value
src_mac = ":".join(["%02x" % ord(a) for a in msg.data[6:12]])
channel.add_flow("table=0,priority=2,idle_timeout=300, dl_src=%s,in_port=%d, actions=goto_table:1" % (src_mac, in_port))
channel.add_flow("table=1,priority=2,idle_timeout=300, dl_dst=%s, actions=output:%d" % (src_mac, in_port))
channel.send(b.ofp_packet_out(None, msg.buffer_id, in_port, None, [b.ofp_action_output(None, None, ofp4.OFPP_TABLE, 0),], None))
print self.ofctl("dump-flows")
def handle(self, message, channel):
msg = ofp4parse.parse(message)
if msg.header.type == ofp4.OFPT_HELLO:
self.ofctl("add-group", "group_id=1,type=all"+",".join(["bucket=output:%d" % port.port_no for port in self.ports]))
self.add_flow("table=0,priority=1, actions=controller")
self.add_flow("table=1,priority=3, dl_dst=01:00:00:00:00:00/01:00:00:00:00:00, actions=group:1")
self.add_flow("table=1,priority=1, actions=group:1")
self.init = True
if __name__=="__main__":
logging.basicConfig(level=logging.DEBUG)
tcpserv = twink.gevent.ChannelStreamServer(("0.0.0.0", 6653), spawn=30)
tcpserv.channel_cls = TestChannel
twink.gevent.serve_forever(tcpserv)
|
|
f9a1945a1ea85273074c38de7be2d3cfed3e6551
|
tests/test_color.py
|
tests/test_color.py
|
#
import pytest
import re
from sdsc import printcolor
@pytest.mark.parametrize("msg", ("hello",)
)
@pytest.mark.parametrize("msgtype",
("error", "debug", None)
)
@pytest.mark.parametrize('isatty', (True, False))
def test_printcolor(capsys, monkeypatch, msg, msgtype, isatty):
"""Checks printcolor() function
:param capsys:
:param str msg:
:param str msgtype:
:param bool isatty:
"""
monkeypatch.setattr('sys.stdout.isatty', lambda: isatty)
printcolor(msg, msgtype)
if msgtype in ('debug', 'error'):
assert capsys.readouterr()[0].count(msg) == 0
else:
assert capsys.readouterr()[0].count(msg) > 0
|
Add test case for printcolor() function
|
Add test case for printcolor() function
Also adds isatty fixture and uses monkeypatch to actually test the
colored output
|
Python
|
lgpl-2.1
|
sknorr/suse-doc-style-checker,sknorr/suse-doc-style-checker,sknorr/suse-doc-style-checker
|
Add test case for printcolor() function
Also adds isatty fixture and uses monkeypatch to actually test the
colored output
|
#
import pytest
import re
from sdsc import printcolor
@pytest.mark.parametrize("msg", ("hello",)
)
@pytest.mark.parametrize("msgtype",
("error", "debug", None)
)
@pytest.mark.parametrize('isatty', (True, False))
def test_printcolor(capsys, monkeypatch, msg, msgtype, isatty):
"""Checks printcolor() function
:param capsys:
:param str msg:
:param str msgtype:
:param bool isatty:
"""
monkeypatch.setattr('sys.stdout.isatty', lambda: isatty)
printcolor(msg, msgtype)
if msgtype in ('debug', 'error'):
assert capsys.readouterr()[0].count(msg) == 0
else:
assert capsys.readouterr()[0].count(msg) > 0
|
<commit_before><commit_msg>Add test case for printcolor() function
Also adds isatty fixture and uses monkeypatch to actually test the
colored output<commit_after>
|
#
import pytest
import re
from sdsc import printcolor
@pytest.mark.parametrize("msg", ("hello",)
)
@pytest.mark.parametrize("msgtype",
("error", "debug", None)
)
@pytest.mark.parametrize('isatty', (True, False))
def test_printcolor(capsys, monkeypatch, msg, msgtype, isatty):
"""Checks printcolor() function
:param capsys:
:param str msg:
:param str msgtype:
:param bool isatty:
"""
monkeypatch.setattr('sys.stdout.isatty', lambda: isatty)
printcolor(msg, msgtype)
if msgtype in ('debug', 'error'):
assert capsys.readouterr()[0].count(msg) == 0
else:
assert capsys.readouterr()[0].count(msg) > 0
|
Add test case for printcolor() function
Also adds isatty fixture and uses monkeypatch to actually test the
colored output#
import pytest
import re
from sdsc import printcolor
@pytest.mark.parametrize("msg", ("hello",)
)
@pytest.mark.parametrize("msgtype",
("error", "debug", None)
)
@pytest.mark.parametrize('isatty', (True, False))
def test_printcolor(capsys, monkeypatch, msg, msgtype, isatty):
"""Checks printcolor() function
:param capsys:
:param str msg:
:param str msgtype:
:param bool isatty:
"""
monkeypatch.setattr('sys.stdout.isatty', lambda: isatty)
printcolor(msg, msgtype)
if msgtype in ('debug', 'error'):
assert capsys.readouterr()[0].count(msg) == 0
else:
assert capsys.readouterr()[0].count(msg) > 0
|
<commit_before><commit_msg>Add test case for printcolor() function
Also adds isatty fixture and uses monkeypatch to actually test the
colored output<commit_after>#
import pytest
import re
from sdsc import printcolor
@pytest.mark.parametrize("msg", ("hello",)
)
@pytest.mark.parametrize("msgtype",
("error", "debug", None)
)
@pytest.mark.parametrize('isatty', (True, False))
def test_printcolor(capsys, monkeypatch, msg, msgtype, isatty):
"""Checks printcolor() function
:param capsys:
:param str msg:
:param str msgtype:
:param bool isatty:
"""
monkeypatch.setattr('sys.stdout.isatty', lambda: isatty)
printcolor(msg, msgtype)
if msgtype in ('debug', 'error'):
assert capsys.readouterr()[0].count(msg) == 0
else:
assert capsys.readouterr()[0].count(msg) > 0
|
|
130df743b14cf329c09f0c514ec0d6991b21dd45
|
examples/mnist-deepautoencoder.py
|
examples/mnist-deepautoencoder.py
|
#!/usr/bin/env python
import matplotlib.pyplot as plt
import theanets
from utils import load_mnist, plot_layers, plot_images
train, valid, _ = load_mnist()
e = theanets.Experiment(
theanets.Autoencoder,
layers=(784, 256, 64, 36, 64, 256, 784),
train_batches=100,
tied_weights=True,
)
e.train(train, valid, optimize='layerwise')
e.train(train, valid)
plot_layers([e.network.get_weights(i) for i in (1, 2, 3)], tied_weights=True)
plt.tight_layout()
plt.show()
valid = valid[:16*16]
plot_images(valid, 121, 'Sample data')
plot_images(e.network.predict(valid), 122, 'Reconstructed data')
plt.tight_layout()
plt.show()
|
#!/usr/bin/env python
import matplotlib.pyplot as plt
import theanets
from utils import load_mnist, plot_layers, plot_images
train, valid, _ = load_mnist()
e = theanets.Experiment(
theanets.Autoencoder,
layers=(784, 256, 64, 36, 64, 256, 784),
train_batches=100,
tied_weights=True,
)
e.train(train, valid, optimize='layerwise', patience=1, min_improvement=0.1)
e.train(train, valid)
plot_layers([e.network.get_weights(i) for i in (1, 2, 3)], tied_weights=True)
plt.tight_layout()
plt.show()
valid = valid[:16*16]
plot_images(valid, 121, 'Sample data')
plot_images(e.network.predict(valid), 122, 'Reconstructed data')
plt.tight_layout()
plt.show()
|
Decrease patience for each layerwise trainer.
|
Decrease patience for each layerwise trainer.
|
Python
|
mit
|
chrinide/theanets,lmjohns3/theanets,devdoer/theanets
|
#!/usr/bin/env python
import matplotlib.pyplot as plt
import theanets
from utils import load_mnist, plot_layers, plot_images
train, valid, _ = load_mnist()
e = theanets.Experiment(
theanets.Autoencoder,
layers=(784, 256, 64, 36, 64, 256, 784),
train_batches=100,
tied_weights=True,
)
e.train(train, valid, optimize='layerwise')
e.train(train, valid)
plot_layers([e.network.get_weights(i) for i in (1, 2, 3)], tied_weights=True)
plt.tight_layout()
plt.show()
valid = valid[:16*16]
plot_images(valid, 121, 'Sample data')
plot_images(e.network.predict(valid), 122, 'Reconstructed data')
plt.tight_layout()
plt.show()
Decrease patience for each layerwise trainer.
|
#!/usr/bin/env python
import matplotlib.pyplot as plt
import theanets
from utils import load_mnist, plot_layers, plot_images
train, valid, _ = load_mnist()
e = theanets.Experiment(
theanets.Autoencoder,
layers=(784, 256, 64, 36, 64, 256, 784),
train_batches=100,
tied_weights=True,
)
e.train(train, valid, optimize='layerwise', patience=1, min_improvement=0.1)
e.train(train, valid)
plot_layers([e.network.get_weights(i) for i in (1, 2, 3)], tied_weights=True)
plt.tight_layout()
plt.show()
valid = valid[:16*16]
plot_images(valid, 121, 'Sample data')
plot_images(e.network.predict(valid), 122, 'Reconstructed data')
plt.tight_layout()
plt.show()
|
<commit_before>#!/usr/bin/env python
import matplotlib.pyplot as plt
import theanets
from utils import load_mnist, plot_layers, plot_images
train, valid, _ = load_mnist()
e = theanets.Experiment(
theanets.Autoencoder,
layers=(784, 256, 64, 36, 64, 256, 784),
train_batches=100,
tied_weights=True,
)
e.train(train, valid, optimize='layerwise')
e.train(train, valid)
plot_layers([e.network.get_weights(i) for i in (1, 2, 3)], tied_weights=True)
plt.tight_layout()
plt.show()
valid = valid[:16*16]
plot_images(valid, 121, 'Sample data')
plot_images(e.network.predict(valid), 122, 'Reconstructed data')
plt.tight_layout()
plt.show()
<commit_msg>Decrease patience for each layerwise trainer.<commit_after>
|
#!/usr/bin/env python
import matplotlib.pyplot as plt
import theanets
from utils import load_mnist, plot_layers, plot_images
train, valid, _ = load_mnist()
e = theanets.Experiment(
theanets.Autoencoder,
layers=(784, 256, 64, 36, 64, 256, 784),
train_batches=100,
tied_weights=True,
)
e.train(train, valid, optimize='layerwise', patience=1, min_improvement=0.1)
e.train(train, valid)
plot_layers([e.network.get_weights(i) for i in (1, 2, 3)], tied_weights=True)
plt.tight_layout()
plt.show()
valid = valid[:16*16]
plot_images(valid, 121, 'Sample data')
plot_images(e.network.predict(valid), 122, 'Reconstructed data')
plt.tight_layout()
plt.show()
|
#!/usr/bin/env python
import matplotlib.pyplot as plt
import theanets
from utils import load_mnist, plot_layers, plot_images
train, valid, _ = load_mnist()
e = theanets.Experiment(
theanets.Autoencoder,
layers=(784, 256, 64, 36, 64, 256, 784),
train_batches=100,
tied_weights=True,
)
e.train(train, valid, optimize='layerwise')
e.train(train, valid)
plot_layers([e.network.get_weights(i) for i in (1, 2, 3)], tied_weights=True)
plt.tight_layout()
plt.show()
valid = valid[:16*16]
plot_images(valid, 121, 'Sample data')
plot_images(e.network.predict(valid), 122, 'Reconstructed data')
plt.tight_layout()
plt.show()
Decrease patience for each layerwise trainer.#!/usr/bin/env python
import matplotlib.pyplot as plt
import theanets
from utils import load_mnist, plot_layers, plot_images
train, valid, _ = load_mnist()
e = theanets.Experiment(
theanets.Autoencoder,
layers=(784, 256, 64, 36, 64, 256, 784),
train_batches=100,
tied_weights=True,
)
e.train(train, valid, optimize='layerwise', patience=1, min_improvement=0.1)
e.train(train, valid)
plot_layers([e.network.get_weights(i) for i in (1, 2, 3)], tied_weights=True)
plt.tight_layout()
plt.show()
valid = valid[:16*16]
plot_images(valid, 121, 'Sample data')
plot_images(e.network.predict(valid), 122, 'Reconstructed data')
plt.tight_layout()
plt.show()
|
<commit_before>#!/usr/bin/env python
import matplotlib.pyplot as plt
import theanets
from utils import load_mnist, plot_layers, plot_images
train, valid, _ = load_mnist()
e = theanets.Experiment(
theanets.Autoencoder,
layers=(784, 256, 64, 36, 64, 256, 784),
train_batches=100,
tied_weights=True,
)
e.train(train, valid, optimize='layerwise')
e.train(train, valid)
plot_layers([e.network.get_weights(i) for i in (1, 2, 3)], tied_weights=True)
plt.tight_layout()
plt.show()
valid = valid[:16*16]
plot_images(valid, 121, 'Sample data')
plot_images(e.network.predict(valid), 122, 'Reconstructed data')
plt.tight_layout()
plt.show()
<commit_msg>Decrease patience for each layerwise trainer.<commit_after>#!/usr/bin/env python
import matplotlib.pyplot as plt
import theanets
from utils import load_mnist, plot_layers, plot_images
train, valid, _ = load_mnist()
e = theanets.Experiment(
theanets.Autoencoder,
layers=(784, 256, 64, 36, 64, 256, 784),
train_batches=100,
tied_weights=True,
)
e.train(train, valid, optimize='layerwise', patience=1, min_improvement=0.1)
e.train(train, valid)
plot_layers([e.network.get_weights(i) for i in (1, 2, 3)], tied_weights=True)
plt.tight_layout()
plt.show()
valid = valid[:16*16]
plot_images(valid, 121, 'Sample data')
plot_images(e.network.predict(valid), 122, 'Reconstructed data')
plt.tight_layout()
plt.show()
|
46493648e16ea99f2dbc86175b2fc8b134628b61
|
tests/test_http_api.py
|
tests/test_http_api.py
|
import json
import six
from twisted.trial import unittest
from twisted.internet.defer import inlineCallbacks
from twisted.web import server
from twisted.internet.defer import succeed
from twisted.web.test.test_web import DummyRequest
from tests.mockserver import MockServer, PORT
from autologin.http_api import AutologinAPI
class WebTest(unittest.TestCase):
@inlineCallbacks
def test(self):
url = 'http://localhost:{}'.format(PORT)
view = AutologinAPI()
with MockServer():
request = api_request(
url=url + '?foo=', username='admin', password='secret')
yield render(view, request)
result = api_result(request)
print(result)
assert result['status'] == 'solved'
assert result['start_url'] == 'http://localhost:8781/'
assert {c['name']: c['value'] for c in result['cookies']} == \
{'_auth': 'yes'}
def api_request(**kwargs):
request = DummyRequest([''])
request.method = b'POST'
request.content = six.BytesIO(
json.dumps(kwargs).encode('utf-8'))
return request
def api_result(request):
return json.loads(b''.join(request.written).decode('utf-8'))
def render(resource, request):
result = resource.render(request)
if isinstance(result, str):
request.write(result)
request.finish()
return succeed(None)
elif result is server.NOT_DONE_YET:
if request.finished:
return succeed(None)
else:
return request.notifyFinish()
else:
raise ValueError('Unexpected return value: %r' % (result,))
|
Test http-api (only the success path)
|
Test http-api (only the success path)
|
Python
|
apache-2.0
|
TeamHG-Memex/autologin,TeamHG-Memex/autologin,TeamHG-Memex/autologin
|
Test http-api (only the success path)
|
import json
import six
from twisted.trial import unittest
from twisted.internet.defer import inlineCallbacks
from twisted.web import server
from twisted.internet.defer import succeed
from twisted.web.test.test_web import DummyRequest
from tests.mockserver import MockServer, PORT
from autologin.http_api import AutologinAPI
class WebTest(unittest.TestCase):
@inlineCallbacks
def test(self):
url = 'http://localhost:{}'.format(PORT)
view = AutologinAPI()
with MockServer():
request = api_request(
url=url + '?foo=', username='admin', password='secret')
yield render(view, request)
result = api_result(request)
print(result)
assert result['status'] == 'solved'
assert result['start_url'] == 'http://localhost:8781/'
assert {c['name']: c['value'] for c in result['cookies']} == \
{'_auth': 'yes'}
def api_request(**kwargs):
request = DummyRequest([''])
request.method = b'POST'
request.content = six.BytesIO(
json.dumps(kwargs).encode('utf-8'))
return request
def api_result(request):
return json.loads(b''.join(request.written).decode('utf-8'))
def render(resource, request):
result = resource.render(request)
if isinstance(result, str):
request.write(result)
request.finish()
return succeed(None)
elif result is server.NOT_DONE_YET:
if request.finished:
return succeed(None)
else:
return request.notifyFinish()
else:
raise ValueError('Unexpected return value: %r' % (result,))
|
<commit_before><commit_msg>Test http-api (only the success path)<commit_after>
|
import json
import six
from twisted.trial import unittest
from twisted.internet.defer import inlineCallbacks
from twisted.web import server
from twisted.internet.defer import succeed
from twisted.web.test.test_web import DummyRequest
from tests.mockserver import MockServer, PORT
from autologin.http_api import AutologinAPI
class WebTest(unittest.TestCase):
@inlineCallbacks
def test(self):
url = 'http://localhost:{}'.format(PORT)
view = AutologinAPI()
with MockServer():
request = api_request(
url=url + '?foo=', username='admin', password='secret')
yield render(view, request)
result = api_result(request)
print(result)
assert result['status'] == 'solved'
assert result['start_url'] == 'http://localhost:8781/'
assert {c['name']: c['value'] for c in result['cookies']} == \
{'_auth': 'yes'}
def api_request(**kwargs):
request = DummyRequest([''])
request.method = b'POST'
request.content = six.BytesIO(
json.dumps(kwargs).encode('utf-8'))
return request
def api_result(request):
return json.loads(b''.join(request.written).decode('utf-8'))
def render(resource, request):
result = resource.render(request)
if isinstance(result, str):
request.write(result)
request.finish()
return succeed(None)
elif result is server.NOT_DONE_YET:
if request.finished:
return succeed(None)
else:
return request.notifyFinish()
else:
raise ValueError('Unexpected return value: %r' % (result,))
|
Test http-api (only the success path)import json
import six
from twisted.trial import unittest
from twisted.internet.defer import inlineCallbacks
from twisted.web import server
from twisted.internet.defer import succeed
from twisted.web.test.test_web import DummyRequest
from tests.mockserver import MockServer, PORT
from autologin.http_api import AutologinAPI
class WebTest(unittest.TestCase):
@inlineCallbacks
def test(self):
url = 'http://localhost:{}'.format(PORT)
view = AutologinAPI()
with MockServer():
request = api_request(
url=url + '?foo=', username='admin', password='secret')
yield render(view, request)
result = api_result(request)
print(result)
assert result['status'] == 'solved'
assert result['start_url'] == 'http://localhost:8781/'
assert {c['name']: c['value'] for c in result['cookies']} == \
{'_auth': 'yes'}
def api_request(**kwargs):
request = DummyRequest([''])
request.method = b'POST'
request.content = six.BytesIO(
json.dumps(kwargs).encode('utf-8'))
return request
def api_result(request):
return json.loads(b''.join(request.written).decode('utf-8'))
def render(resource, request):
result = resource.render(request)
if isinstance(result, str):
request.write(result)
request.finish()
return succeed(None)
elif result is server.NOT_DONE_YET:
if request.finished:
return succeed(None)
else:
return request.notifyFinish()
else:
raise ValueError('Unexpected return value: %r' % (result,))
|
<commit_before><commit_msg>Test http-api (only the success path)<commit_after>import json
import six
from twisted.trial import unittest
from twisted.internet.defer import inlineCallbacks
from twisted.web import server
from twisted.internet.defer import succeed
from twisted.web.test.test_web import DummyRequest
from tests.mockserver import MockServer, PORT
from autologin.http_api import AutologinAPI
class WebTest(unittest.TestCase):
@inlineCallbacks
def test(self):
url = 'http://localhost:{}'.format(PORT)
view = AutologinAPI()
with MockServer():
request = api_request(
url=url + '?foo=', username='admin', password='secret')
yield render(view, request)
result = api_result(request)
print(result)
assert result['status'] == 'solved'
assert result['start_url'] == 'http://localhost:8781/'
assert {c['name']: c['value'] for c in result['cookies']} == \
{'_auth': 'yes'}
def api_request(**kwargs):
request = DummyRequest([''])
request.method = b'POST'
request.content = six.BytesIO(
json.dumps(kwargs).encode('utf-8'))
return request
def api_result(request):
return json.loads(b''.join(request.written).decode('utf-8'))
def render(resource, request):
result = resource.render(request)
if isinstance(result, str):
request.write(result)
request.finish()
return succeed(None)
elif result is server.NOT_DONE_YET:
if request.finished:
return succeed(None)
else:
return request.notifyFinish()
else:
raise ValueError('Unexpected return value: %r' % (result,))
|
|
5cc9cd1f3c05b1f83c4f57cfc86918135c20764f
|
lackawanna/datapoint/migrations/0002_datapoint_large_file.py
|
lackawanna/datapoint/migrations/0002_datapoint_large_file.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from s3direct.fields import S3DirectField
class Migration(migrations.Migration):
dependencies = [
('datapoint', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='datapoint',
name='large_file',
field=S3DirectField(dest='all', blank=True),
preserve_default=False,
),
]
|
Add hand written migration for large_file inclusion in datapoint model
|
Add hand written migration for large_file inclusion in datapoint model
|
Python
|
bsd-3-clause
|
allyjweir/lackawanna,allyjweir/lackawanna,allyjweir/lackawanna,allyjweir/lackawanna
|
Add hand written migration for large_file inclusion in datapoint model
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from s3direct.fields import S3DirectField
class Migration(migrations.Migration):
dependencies = [
('datapoint', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='datapoint',
name='large_file',
field=S3DirectField(dest='all', blank=True),
preserve_default=False,
),
]
|
<commit_before><commit_msg>Add hand written migration for large_file inclusion in datapoint model<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from s3direct.fields import S3DirectField
class Migration(migrations.Migration):
dependencies = [
('datapoint', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='datapoint',
name='large_file',
field=S3DirectField(dest='all', blank=True),
preserve_default=False,
),
]
|
Add hand written migration for large_file inclusion in datapoint model# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from s3direct.fields import S3DirectField
class Migration(migrations.Migration):
dependencies = [
('datapoint', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='datapoint',
name='large_file',
field=S3DirectField(dest='all', blank=True),
preserve_default=False,
),
]
|
<commit_before><commit_msg>Add hand written migration for large_file inclusion in datapoint model<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from s3direct.fields import S3DirectField
class Migration(migrations.Migration):
dependencies = [
('datapoint', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='datapoint',
name='large_file',
field=S3DirectField(dest='all', blank=True),
preserve_default=False,
),
]
|
|
8c2f30e6de8b4d5237d39d178a07c8a3fddc0b7f
|
tests/test_interface.py
|
tests/test_interface.py
|
"""tests/test_interface.py.
Tests hug's defined interfaces (HTTP, CLI, & Local)
Copyright (C) 2016 Timothy Edmund Crosley
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or
substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
import pytest
import hug
@hug.http(('/namer', '/namer/{name}'), ('GET', 'POST'), versions=(None, 2))
def namer(name=None):
return name
class TestHTTP(object):
"""Tests the functionality provided by hug.interface.HTTP"""
def test_urls(self):
"""Test to ensure HTTP interface correctly returns URLs associated with it"""
assert namer.interface.http.urls() == ['/namer', '/namer/{name}']
def test_url(self):
"""Test to ensure HTTP interface correctly automatically returns URL associated with it"""
assert namer.interface.http.url() == '/namer'
assert namer.interface.http.url(name='tim') == '/namer/tim'
assert namer.interface.http.url(name='tim', version=2) == '/v2/namer/tim'
with pytest.raises(KeyError):
namer.interface.http.url(undefined='not a variable')
with pytest.raises(KeyError):
namer.interface.http.url(version=10)
|
Add tests for desired automatic url creation feature
|
Add tests for desired automatic url creation feature
|
Python
|
mit
|
MuhammadAlkarouri/hug,timothycrosley/hug,timothycrosley/hug,MuhammadAlkarouri/hug,MuhammadAlkarouri/hug,timothycrosley/hug
|
Add tests for desired automatic url creation feature
|
"""tests/test_interface.py.
Tests hug's defined interfaces (HTTP, CLI, & Local)
Copyright (C) 2016 Timothy Edmund Crosley
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or
substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
import pytest
import hug
@hug.http(('/namer', '/namer/{name}'), ('GET', 'POST'), versions=(None, 2))
def namer(name=None):
return name
class TestHTTP(object):
"""Tests the functionality provided by hug.interface.HTTP"""
def test_urls(self):
"""Test to ensure HTTP interface correctly returns URLs associated with it"""
assert namer.interface.http.urls() == ['/namer', '/namer/{name}']
def test_url(self):
"""Test to ensure HTTP interface correctly automatically returns URL associated with it"""
assert namer.interface.http.url() == '/namer'
assert namer.interface.http.url(name='tim') == '/namer/tim'
assert namer.interface.http.url(name='tim', version=2) == '/v2/namer/tim'
with pytest.raises(KeyError):
namer.interface.http.url(undefined='not a variable')
with pytest.raises(KeyError):
namer.interface.http.url(version=10)
|
<commit_before><commit_msg>Add tests for desired automatic url creation feature<commit_after>
|
"""tests/test_interface.py.
Tests hug's defined interfaces (HTTP, CLI, & Local)
Copyright (C) 2016 Timothy Edmund Crosley
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or
substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
import pytest
import hug
@hug.http(('/namer', '/namer/{name}'), ('GET', 'POST'), versions=(None, 2))
def namer(name=None):
return name
class TestHTTP(object):
"""Tests the functionality provided by hug.interface.HTTP"""
def test_urls(self):
"""Test to ensure HTTP interface correctly returns URLs associated with it"""
assert namer.interface.http.urls() == ['/namer', '/namer/{name}']
def test_url(self):
"""Test to ensure HTTP interface correctly automatically returns URL associated with it"""
assert namer.interface.http.url() == '/namer'
assert namer.interface.http.url(name='tim') == '/namer/tim'
assert namer.interface.http.url(name='tim', version=2) == '/v2/namer/tim'
with pytest.raises(KeyError):
namer.interface.http.url(undefined='not a variable')
with pytest.raises(KeyError):
namer.interface.http.url(version=10)
|
Add tests for desired automatic url creation feature"""tests/test_interface.py.
Tests hug's defined interfaces (HTTP, CLI, & Local)
Copyright (C) 2016 Timothy Edmund Crosley
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or
substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
import pytest
import hug
@hug.http(('/namer', '/namer/{name}'), ('GET', 'POST'), versions=(None, 2))
def namer(name=None):
return name
class TestHTTP(object):
"""Tests the functionality provided by hug.interface.HTTP"""
def test_urls(self):
"""Test to ensure HTTP interface correctly returns URLs associated with it"""
assert namer.interface.http.urls() == ['/namer', '/namer/{name}']
def test_url(self):
"""Test to ensure HTTP interface correctly automatically returns URL associated with it"""
assert namer.interface.http.url() == '/namer'
assert namer.interface.http.url(name='tim') == '/namer/tim'
assert namer.interface.http.url(name='tim', version=2) == '/v2/namer/tim'
with pytest.raises(KeyError):
namer.interface.http.url(undefined='not a variable')
with pytest.raises(KeyError):
namer.interface.http.url(version=10)
|
<commit_before><commit_msg>Add tests for desired automatic url creation feature<commit_after>"""tests/test_interface.py.
Tests hug's defined interfaces (HTTP, CLI, & Local)
Copyright (C) 2016 Timothy Edmund Crosley
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or
substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
import pytest
import hug
@hug.http(('/namer', '/namer/{name}'), ('GET', 'POST'), versions=(None, 2))
def namer(name=None):
return name
class TestHTTP(object):
"""Tests the functionality provided by hug.interface.HTTP"""
def test_urls(self):
"""Test to ensure HTTP interface correctly returns URLs associated with it"""
assert namer.interface.http.urls() == ['/namer', '/namer/{name}']
def test_url(self):
"""Test to ensure HTTP interface correctly automatically returns URL associated with it"""
assert namer.interface.http.url() == '/namer'
assert namer.interface.http.url(name='tim') == '/namer/tim'
assert namer.interface.http.url(name='tim', version=2) == '/v2/namer/tim'
with pytest.raises(KeyError):
namer.interface.http.url(undefined='not a variable')
with pytest.raises(KeyError):
namer.interface.http.url(version=10)
|
|
58b6f41b7ba67ce9720e069eab8d2f78af113eb3
|
evennia/typeclasses/migrations/0010_delete_old_player_tables.py
|
evennia/typeclasses/migrations/0010_delete_old_player_tables.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-07-13 18:47
from __future__ import unicode_literals
from django.db import migrations, OperationalError, connection
def _table_exists(db_cursor, tablename):
"Returns bool if table exists or not"
sql_check_exists = "SELECT * from %s;" % tablename
try:
db_cursor.execute(sql_check_exists)
return True
except OperationalError:
return False
def _drop_table(db_cursor, table_name):
if _table_exists(db_cursor, table_name):
sql_drop = "DROP TABLE %s;" % table_name
db_cursor.execute(sql_drop)
def drop_tables(apps, schema_migrator):
db_cursor = connection.cursor()
_drop_table(db_cursor, "players_playerdb")
_drop_table(db_cursor, "players_playerdb_db_attributes")
_drop_table(db_cursor, "players_playerdb_db_tags")
_drop_table(db_cursor, "players_playerdb_groups")
_drop_table(db_cursor, "players_playerdb_user_permissions")
class Migration(migrations.Migration):
dependencies = [
('typeclasses', '0009_rename_player_cmdsets_typeclasses'),
]
operations = [
migrations.RunPython(drop_tables)
]
|
Add migration to finally remove the last playerdb_ tables
|
Add migration to finally remove the last playerdb_ tables
|
Python
|
bsd-3-clause
|
jamesbeebop/evennia,feend78/evennia,feend78/evennia,feend78/evennia,feend78/evennia,jamesbeebop/evennia,jamesbeebop/evennia
|
Add migration to finally remove the last playerdb_ tables
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-07-13 18:47
from __future__ import unicode_literals
from django.db import migrations, OperationalError, connection
def _table_exists(db_cursor, tablename):
"Returns bool if table exists or not"
sql_check_exists = "SELECT * from %s;" % tablename
try:
db_cursor.execute(sql_check_exists)
return True
except OperationalError:
return False
def _drop_table(db_cursor, table_name):
if _table_exists(db_cursor, table_name):
sql_drop = "DROP TABLE %s;" % table_name
db_cursor.execute(sql_drop)
def drop_tables(apps, schema_migrator):
db_cursor = connection.cursor()
_drop_table(db_cursor, "players_playerdb")
_drop_table(db_cursor, "players_playerdb_db_attributes")
_drop_table(db_cursor, "players_playerdb_db_tags")
_drop_table(db_cursor, "players_playerdb_groups")
_drop_table(db_cursor, "players_playerdb_user_permissions")
class Migration(migrations.Migration):
dependencies = [
('typeclasses', '0009_rename_player_cmdsets_typeclasses'),
]
operations = [
migrations.RunPython(drop_tables)
]
|
<commit_before><commit_msg>Add migration to finally remove the last playerdb_ tables<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-07-13 18:47
from __future__ import unicode_literals
from django.db import migrations, OperationalError, connection
def _table_exists(db_cursor, tablename):
"Returns bool if table exists or not"
sql_check_exists = "SELECT * from %s;" % tablename
try:
db_cursor.execute(sql_check_exists)
return True
except OperationalError:
return False
def _drop_table(db_cursor, table_name):
if _table_exists(db_cursor, table_name):
sql_drop = "DROP TABLE %s;" % table_name
db_cursor.execute(sql_drop)
def drop_tables(apps, schema_migrator):
db_cursor = connection.cursor()
_drop_table(db_cursor, "players_playerdb")
_drop_table(db_cursor, "players_playerdb_db_attributes")
_drop_table(db_cursor, "players_playerdb_db_tags")
_drop_table(db_cursor, "players_playerdb_groups")
_drop_table(db_cursor, "players_playerdb_user_permissions")
class Migration(migrations.Migration):
dependencies = [
('typeclasses', '0009_rename_player_cmdsets_typeclasses'),
]
operations = [
migrations.RunPython(drop_tables)
]
|
Add migration to finally remove the last playerdb_ tables# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-07-13 18:47
from __future__ import unicode_literals
from django.db import migrations, OperationalError, connection
def _table_exists(db_cursor, tablename):
"Returns bool if table exists or not"
sql_check_exists = "SELECT * from %s;" % tablename
try:
db_cursor.execute(sql_check_exists)
return True
except OperationalError:
return False
def _drop_table(db_cursor, table_name):
if _table_exists(db_cursor, table_name):
sql_drop = "DROP TABLE %s;" % table_name
db_cursor.execute(sql_drop)
def drop_tables(apps, schema_migrator):
db_cursor = connection.cursor()
_drop_table(db_cursor, "players_playerdb")
_drop_table(db_cursor, "players_playerdb_db_attributes")
_drop_table(db_cursor, "players_playerdb_db_tags")
_drop_table(db_cursor, "players_playerdb_groups")
_drop_table(db_cursor, "players_playerdb_user_permissions")
class Migration(migrations.Migration):
dependencies = [
('typeclasses', '0009_rename_player_cmdsets_typeclasses'),
]
operations = [
migrations.RunPython(drop_tables)
]
|
<commit_before><commit_msg>Add migration to finally remove the last playerdb_ tables<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-07-13 18:47
from __future__ import unicode_literals
from django.db import migrations, OperationalError, connection
def _table_exists(db_cursor, tablename):
"Returns bool if table exists or not"
sql_check_exists = "SELECT * from %s;" % tablename
try:
db_cursor.execute(sql_check_exists)
return True
except OperationalError:
return False
def _drop_table(db_cursor, table_name):
if _table_exists(db_cursor, table_name):
sql_drop = "DROP TABLE %s;" % table_name
db_cursor.execute(sql_drop)
def drop_tables(apps, schema_migrator):
db_cursor = connection.cursor()
_drop_table(db_cursor, "players_playerdb")
_drop_table(db_cursor, "players_playerdb_db_attributes")
_drop_table(db_cursor, "players_playerdb_db_tags")
_drop_table(db_cursor, "players_playerdb_groups")
_drop_table(db_cursor, "players_playerdb_user_permissions")
class Migration(migrations.Migration):
dependencies = [
('typeclasses', '0009_rename_player_cmdsets_typeclasses'),
]
operations = [
migrations.RunPython(drop_tables)
]
|
|
e8fdda66fcda2be3d002fc256abd2dcb2b55edc1
|
create.py
|
create.py
|
def ClassFactory(name, argnames, BaseClass=BaseClass):
def __init__(self, **kwargs):
for key, value in kwargs.items():
if key not in argnames:
raise TypeError("Argument %s not valid for %s"
% (key, self.__class__.__name__))
setattr(self, key, value)
BaseClass.__init__(self, name[:-len("Class")])
newclass = type(name, (BaseClass,),{"__init__": __init__})
return newclass
|
Add the secondary ClassFactory class to generate models on the fly
|
Add the secondary ClassFactory class to generate models on the fly
|
Python
|
mit
|
mathur/modelstruct
|
Add the secondary ClassFactory class to generate models on the fly
|
def ClassFactory(name, argnames, BaseClass=BaseClass):
def __init__(self, **kwargs):
for key, value in kwargs.items():
if key not in argnames:
raise TypeError("Argument %s not valid for %s"
% (key, self.__class__.__name__))
setattr(self, key, value)
BaseClass.__init__(self, name[:-len("Class")])
newclass = type(name, (BaseClass,),{"__init__": __init__})
return newclass
|
<commit_before><commit_msg>Add the secondary ClassFactory class to generate models on the fly<commit_after>
|
def ClassFactory(name, argnames, BaseClass=BaseClass):
def __init__(self, **kwargs):
for key, value in kwargs.items():
if key not in argnames:
raise TypeError("Argument %s not valid for %s"
% (key, self.__class__.__name__))
setattr(self, key, value)
BaseClass.__init__(self, name[:-len("Class")])
newclass = type(name, (BaseClass,),{"__init__": __init__})
return newclass
|
Add the secondary ClassFactory class to generate models on the flydef ClassFactory(name, argnames, BaseClass=BaseClass):
def __init__(self, **kwargs):
for key, value in kwargs.items():
if key not in argnames:
raise TypeError("Argument %s not valid for %s"
% (key, self.__class__.__name__))
setattr(self, key, value)
BaseClass.__init__(self, name[:-len("Class")])
newclass = type(name, (BaseClass,),{"__init__": __init__})
return newclass
|
<commit_before><commit_msg>Add the secondary ClassFactory class to generate models on the fly<commit_after>def ClassFactory(name, argnames, BaseClass=BaseClass):
def __init__(self, **kwargs):
for key, value in kwargs.items():
if key not in argnames:
raise TypeError("Argument %s not valid for %s"
% (key, self.__class__.__name__))
setattr(self, key, value)
BaseClass.__init__(self, name[:-len("Class")])
newclass = type(name, (BaseClass,),{"__init__": __init__})
return newclass
|
|
d9ccb59f26f0ff2d7b2d70e2d17e7f33e56e2b0b
|
code/ex4.2-tornado_multiple_requests.py
|
code/ex4.2-tornado_multiple_requests.py
|
from tornado.ioloop import IOLoop
from tornado.httpclient import AsyncHTTPClient
from tornado.gen import coroutine
import time
URLS = [
'http://127.0.0.1:8000',
'http://127.0.0.1:8000',
'http://127.0.0.1:8000',
]
@coroutine
def get_greetings():
http_client = AsyncHTTPClient()
responses = yield [http_client.fetch(url) for url in URLS]
texts = [resp.body.decode('utf8') for resp in responses]
return '\n'.join(texts)
if __name__ == "__main__":
loop = IOLoop.instance()
t1 = time.time()
text = loop.run_sync(get_greetings)
print(time.time() - t1, "seconds passed")
print(text)
|
Add ex4.2: multiple tornado requests
|
Add ex4.2: multiple tornado requests
|
Python
|
mit
|
MA3STR0/PythonAsyncWorkshop
|
Add ex4.2: multiple tornado requests
|
from tornado.ioloop import IOLoop
from tornado.httpclient import AsyncHTTPClient
from tornado.gen import coroutine
import time
URLS = [
'http://127.0.0.1:8000',
'http://127.0.0.1:8000',
'http://127.0.0.1:8000',
]
@coroutine
def get_greetings():
http_client = AsyncHTTPClient()
responses = yield [http_client.fetch(url) for url in URLS]
texts = [resp.body.decode('utf8') for resp in responses]
return '\n'.join(texts)
if __name__ == "__main__":
loop = IOLoop.instance()
t1 = time.time()
text = loop.run_sync(get_greetings)
print(time.time() - t1, "seconds passed")
print(text)
|
<commit_before><commit_msg>Add ex4.2: multiple tornado requests<commit_after>
|
from tornado.ioloop import IOLoop
from tornado.httpclient import AsyncHTTPClient
from tornado.gen import coroutine
import time
URLS = [
'http://127.0.0.1:8000',
'http://127.0.0.1:8000',
'http://127.0.0.1:8000',
]
@coroutine
def get_greetings():
http_client = AsyncHTTPClient()
responses = yield [http_client.fetch(url) for url in URLS]
texts = [resp.body.decode('utf8') for resp in responses]
return '\n'.join(texts)
if __name__ == "__main__":
loop = IOLoop.instance()
t1 = time.time()
text = loop.run_sync(get_greetings)
print(time.time() - t1, "seconds passed")
print(text)
|
Add ex4.2: multiple tornado requestsfrom tornado.ioloop import IOLoop
from tornado.httpclient import AsyncHTTPClient
from tornado.gen import coroutine
import time
URLS = [
'http://127.0.0.1:8000',
'http://127.0.0.1:8000',
'http://127.0.0.1:8000',
]
@coroutine
def get_greetings():
http_client = AsyncHTTPClient()
responses = yield [http_client.fetch(url) for url in URLS]
texts = [resp.body.decode('utf8') for resp in responses]
return '\n'.join(texts)
if __name__ == "__main__":
loop = IOLoop.instance()
t1 = time.time()
text = loop.run_sync(get_greetings)
print(time.time() - t1, "seconds passed")
print(text)
|
<commit_before><commit_msg>Add ex4.2: multiple tornado requests<commit_after>from tornado.ioloop import IOLoop
from tornado.httpclient import AsyncHTTPClient
from tornado.gen import coroutine
import time
URLS = [
'http://127.0.0.1:8000',
'http://127.0.0.1:8000',
'http://127.0.0.1:8000',
]
@coroutine
def get_greetings():
http_client = AsyncHTTPClient()
responses = yield [http_client.fetch(url) for url in URLS]
texts = [resp.body.decode('utf8') for resp in responses]
return '\n'.join(texts)
if __name__ == "__main__":
loop = IOLoop.instance()
t1 = time.time()
text = loop.run_sync(get_greetings)
print(time.time() - t1, "seconds passed")
print(text)
|
|
5b99ef78b0982c03448b967dc6c98361f0244896
|
examples/multiple_joint_kde.py
|
examples/multiple_joint_kde.py
|
"""
Multiple bivariate KDE plots
============================
_thumb: .6, .4
"""
import seaborn as sns
import matplotlib.pyplot as plt
sns.set(style="darkgrid")
iris = sns.load_dataset("iris")
# Subset the iris dataset by species
setosa = iris.query("species == 'setosa'")
virginica = iris.query("species == 'virginica'")
# Set up the figure
f, ax = plt.subplots(figsize=(8, 8))
ax.set_aspect("equal")
# Draw the two density plots
ax = sns.kdeplot(setosa.sepal_width, setosa.sepal_length,
cmap="Reds", shade=True, shade_lowest=False)
ax = sns.kdeplot(virginica.sepal_width, virginica.sepal_length,
cmap="Blues", shade=True, shade_lowest=False)
# Add labels to the plot
red = sns.color_palette("Reds")[-2]
blue = sns.color_palette("Blues")[-2]
ax.text(2.5, 8.2, "virginica", size=16, color=blue)
ax.text(3.8, 4.5, "setosa", size=16, color=red)
|
Add new multiple bivariate KDE example
|
Add new multiple bivariate KDE example
|
Python
|
bsd-3-clause
|
anntzer/seaborn,nileracecrew/seaborn,bsipocz/seaborn,jat255/seaborn,mia1rab/seaborn,sinhrks/seaborn,huongttlan/seaborn,mwaskom/seaborn,mwaskom/seaborn,dimarkov/seaborn,lukauskas/seaborn,phobson/seaborn,lukauskas/seaborn,kyleam/seaborn,wrobstory/seaborn,gef756/seaborn,petebachant/seaborn,clarkfitzg/seaborn,uhjish/seaborn,q1ang/seaborn,tim777z/seaborn,JWarmenhoven/seaborn,sauliusl/seaborn,oesteban/seaborn,ashhher3/seaborn,parantapa/seaborn,lypzln/seaborn,drewokane/seaborn,anntzer/seaborn,Lx37/seaborn,muku42/seaborn,dhimmel/seaborn,arokem/seaborn,mclevey/seaborn,arokem/seaborn,phobson/seaborn,olgabot/seaborn
|
Add new multiple bivariate KDE example
|
"""
Multiple bivariate KDE plots
============================
_thumb: .6, .4
"""
import seaborn as sns
import matplotlib.pyplot as plt
sns.set(style="darkgrid")
iris = sns.load_dataset("iris")
# Subset the iris dataset by species
setosa = iris.query("species == 'setosa'")
virginica = iris.query("species == 'virginica'")
# Set up the figure
f, ax = plt.subplots(figsize=(8, 8))
ax.set_aspect("equal")
# Draw the two density plots
ax = sns.kdeplot(setosa.sepal_width, setosa.sepal_length,
cmap="Reds", shade=True, shade_lowest=False)
ax = sns.kdeplot(virginica.sepal_width, virginica.sepal_length,
cmap="Blues", shade=True, shade_lowest=False)
# Add labels to the plot
red = sns.color_palette("Reds")[-2]
blue = sns.color_palette("Blues")[-2]
ax.text(2.5, 8.2, "virginica", size=16, color=blue)
ax.text(3.8, 4.5, "setosa", size=16, color=red)
|
<commit_before><commit_msg>Add new multiple bivariate KDE example<commit_after>
|
"""
Multiple bivariate KDE plots
============================
_thumb: .6, .4
"""
import seaborn as sns
import matplotlib.pyplot as plt
sns.set(style="darkgrid")
iris = sns.load_dataset("iris")
# Subset the iris dataset by species
setosa = iris.query("species == 'setosa'")
virginica = iris.query("species == 'virginica'")
# Set up the figure
f, ax = plt.subplots(figsize=(8, 8))
ax.set_aspect("equal")
# Draw the two density plots
ax = sns.kdeplot(setosa.sepal_width, setosa.sepal_length,
cmap="Reds", shade=True, shade_lowest=False)
ax = sns.kdeplot(virginica.sepal_width, virginica.sepal_length,
cmap="Blues", shade=True, shade_lowest=False)
# Add labels to the plot
red = sns.color_palette("Reds")[-2]
blue = sns.color_palette("Blues")[-2]
ax.text(2.5, 8.2, "virginica", size=16, color=blue)
ax.text(3.8, 4.5, "setosa", size=16, color=red)
|
Add new multiple bivariate KDE example"""
Multiple bivariate KDE plots
============================
_thumb: .6, .4
"""
import seaborn as sns
import matplotlib.pyplot as plt
sns.set(style="darkgrid")
iris = sns.load_dataset("iris")
# Subset the iris dataset by species
setosa = iris.query("species == 'setosa'")
virginica = iris.query("species == 'virginica'")
# Set up the figure
f, ax = plt.subplots(figsize=(8, 8))
ax.set_aspect("equal")
# Draw the two density plots
ax = sns.kdeplot(setosa.sepal_width, setosa.sepal_length,
cmap="Reds", shade=True, shade_lowest=False)
ax = sns.kdeplot(virginica.sepal_width, virginica.sepal_length,
cmap="Blues", shade=True, shade_lowest=False)
# Add labels to the plot
red = sns.color_palette("Reds")[-2]
blue = sns.color_palette("Blues")[-2]
ax.text(2.5, 8.2, "virginica", size=16, color=blue)
ax.text(3.8, 4.5, "setosa", size=16, color=red)
|
<commit_before><commit_msg>Add new multiple bivariate KDE example<commit_after>"""
Multiple bivariate KDE plots
============================
_thumb: .6, .4
"""
import seaborn as sns
import matplotlib.pyplot as plt
sns.set(style="darkgrid")
iris = sns.load_dataset("iris")
# Subset the iris dataset by species
setosa = iris.query("species == 'setosa'")
virginica = iris.query("species == 'virginica'")
# Set up the figure
f, ax = plt.subplots(figsize=(8, 8))
ax.set_aspect("equal")
# Draw the two density plots
ax = sns.kdeplot(setosa.sepal_width, setosa.sepal_length,
cmap="Reds", shade=True, shade_lowest=False)
ax = sns.kdeplot(virginica.sepal_width, virginica.sepal_length,
cmap="Blues", shade=True, shade_lowest=False)
# Add labels to the plot
red = sns.color_palette("Reds")[-2]
blue = sns.color_palette("Blues")[-2]
ax.text(2.5, 8.2, "virginica", size=16, color=blue)
ax.text(3.8, 4.5, "setosa", size=16, color=red)
|
|
73ab8125e8248d22b475419c81180c7ba0bf4535
|
api/models/preview_email.py
|
api/models/preview_email.py
|
from django.db import models
from api.models import Hackathon
from django.contrib import admin
from hackfsu_com.admin import hackfsu_admin
class PreviewEmail(models.Model):
hackathon = models.ForeignKey(to=Hackathon, on_delete=models.CASCADE)
email = models.CharField(max_length=100)
interest = models.CharField(max_length=100)
submit_time = models.DateTimeField(auto_now_add=True)
def __str__(self):
return '[PreviewEmail {} - {}]'.format(self.email, self.interest)
@admin.register(PreviewEmail, site=hackfsu_admin)
class PreviewEmailAdmin(admin.ModelAdmin):
list_filter = ('hackathon',)
list_display = ('id', 'email', 'interest', 'submit_time')
list_editable = ('email', 'interest')
list_display_links = ('id',)
search_fields = ('email', 'interest')
ordering = ('-submit_time',)
|
Add model for preview emails
|
Add model for preview emails
|
Python
|
apache-2.0
|
andrewsosa/hackfsu_com,andrewsosa/hackfsu_com,andrewsosa/hackfsu_com,andrewsosa/hackfsu_com
|
Add model for preview emails
|
from django.db import models
from api.models import Hackathon
from django.contrib import admin
from hackfsu_com.admin import hackfsu_admin
class PreviewEmail(models.Model):
hackathon = models.ForeignKey(to=Hackathon, on_delete=models.CASCADE)
email = models.CharField(max_length=100)
interest = models.CharField(max_length=100)
submit_time = models.DateTimeField(auto_now_add=True)
def __str__(self):
return '[PreviewEmail {} - {}]'.format(self.email, self.interest)
@admin.register(PreviewEmail, site=hackfsu_admin)
class PreviewEmailAdmin(admin.ModelAdmin):
list_filter = ('hackathon',)
list_display = ('id', 'email', 'interest', 'submit_time')
list_editable = ('email', 'interest')
list_display_links = ('id',)
search_fields = ('email', 'interest')
ordering = ('-submit_time',)
|
<commit_before><commit_msg>Add model for preview emails<commit_after>
|
from django.db import models
from api.models import Hackathon
from django.contrib import admin
from hackfsu_com.admin import hackfsu_admin
class PreviewEmail(models.Model):
hackathon = models.ForeignKey(to=Hackathon, on_delete=models.CASCADE)
email = models.CharField(max_length=100)
interest = models.CharField(max_length=100)
submit_time = models.DateTimeField(auto_now_add=True)
def __str__(self):
return '[PreviewEmail {} - {}]'.format(self.email, self.interest)
@admin.register(PreviewEmail, site=hackfsu_admin)
class PreviewEmailAdmin(admin.ModelAdmin):
list_filter = ('hackathon',)
list_display = ('id', 'email', 'interest', 'submit_time')
list_editable = ('email', 'interest')
list_display_links = ('id',)
search_fields = ('email', 'interest')
ordering = ('-submit_time',)
|
Add model for preview emailsfrom django.db import models
from api.models import Hackathon
from django.contrib import admin
from hackfsu_com.admin import hackfsu_admin
class PreviewEmail(models.Model):
hackathon = models.ForeignKey(to=Hackathon, on_delete=models.CASCADE)
email = models.CharField(max_length=100)
interest = models.CharField(max_length=100)
submit_time = models.DateTimeField(auto_now_add=True)
def __str__(self):
return '[PreviewEmail {} - {}]'.format(self.email, self.interest)
@admin.register(PreviewEmail, site=hackfsu_admin)
class PreviewEmailAdmin(admin.ModelAdmin):
list_filter = ('hackathon',)
list_display = ('id', 'email', 'interest', 'submit_time')
list_editable = ('email', 'interest')
list_display_links = ('id',)
search_fields = ('email', 'interest')
ordering = ('-submit_time',)
|
<commit_before><commit_msg>Add model for preview emails<commit_after>from django.db import models
from api.models import Hackathon
from django.contrib import admin
from hackfsu_com.admin import hackfsu_admin
class PreviewEmail(models.Model):
hackathon = models.ForeignKey(to=Hackathon, on_delete=models.CASCADE)
email = models.CharField(max_length=100)
interest = models.CharField(max_length=100)
submit_time = models.DateTimeField(auto_now_add=True)
def __str__(self):
return '[PreviewEmail {} - {}]'.format(self.email, self.interest)
@admin.register(PreviewEmail, site=hackfsu_admin)
class PreviewEmailAdmin(admin.ModelAdmin):
list_filter = ('hackathon',)
list_display = ('id', 'email', 'interest', 'submit_time')
list_editable = ('email', 'interest')
list_display_links = ('id',)
search_fields = ('email', 'interest')
ordering = ('-submit_time',)
|
|
496e3ada83d3c6d41df535c5433522edaa75e085
|
mozillians/users/migrations/0036_auto_20180704_0634.py
|
mozillians/users/migrations/0036_auto_20180704_0634.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.13 on 2018-07-04 13:34
from __future__ import unicode_literals
from django.db import migrations
def migrate_privacy_email(apps, schema_editor):
UserProfile = apps.get_model('users', 'UserProfile')
IdpProfile = apps.get_model('users', 'IdpProfile')
for idp in IdpProfile.objects.filter(primary_contact_identity=True):
UserProfile.objects.filter(pk=idp.profile.pk).update(privacy_email=idp.privacy)
def backwards(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [
('users', '0035_auto_20180604_0439'),
]
operations = [
migrations.RunPython(migrate_privacy_email, backwards),
]
|
Add data migration for email privacy field.
|
Add data migration for email privacy field.
|
Python
|
bsd-3-clause
|
akatsoulas/mozillians,akatsoulas/mozillians,akatsoulas/mozillians,akatsoulas/mozillians,mozilla/mozillians,johngian/mozillians,mozilla/mozillians,johngian/mozillians,johngian/mozillians,mozilla/mozillians,johngian/mozillians,mozilla/mozillians
|
Add data migration for email privacy field.
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.13 on 2018-07-04 13:34
from __future__ import unicode_literals
from django.db import migrations
def migrate_privacy_email(apps, schema_editor):
UserProfile = apps.get_model('users', 'UserProfile')
IdpProfile = apps.get_model('users', 'IdpProfile')
for idp in IdpProfile.objects.filter(primary_contact_identity=True):
UserProfile.objects.filter(pk=idp.profile.pk).update(privacy_email=idp.privacy)
def backwards(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [
('users', '0035_auto_20180604_0439'),
]
operations = [
migrations.RunPython(migrate_privacy_email, backwards),
]
|
<commit_before><commit_msg>Add data migration for email privacy field.<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.13 on 2018-07-04 13:34
from __future__ import unicode_literals
from django.db import migrations
def migrate_privacy_email(apps, schema_editor):
UserProfile = apps.get_model('users', 'UserProfile')
IdpProfile = apps.get_model('users', 'IdpProfile')
for idp in IdpProfile.objects.filter(primary_contact_identity=True):
UserProfile.objects.filter(pk=idp.profile.pk).update(privacy_email=idp.privacy)
def backwards(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [
('users', '0035_auto_20180604_0439'),
]
operations = [
migrations.RunPython(migrate_privacy_email, backwards),
]
|
Add data migration for email privacy field.# -*- coding: utf-8 -*-
# Generated by Django 1.11.13 on 2018-07-04 13:34
from __future__ import unicode_literals
from django.db import migrations
def migrate_privacy_email(apps, schema_editor):
UserProfile = apps.get_model('users', 'UserProfile')
IdpProfile = apps.get_model('users', 'IdpProfile')
for idp in IdpProfile.objects.filter(primary_contact_identity=True):
UserProfile.objects.filter(pk=idp.profile.pk).update(privacy_email=idp.privacy)
def backwards(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [
('users', '0035_auto_20180604_0439'),
]
operations = [
migrations.RunPython(migrate_privacy_email, backwards),
]
|
<commit_before><commit_msg>Add data migration for email privacy field.<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11.13 on 2018-07-04 13:34
from __future__ import unicode_literals
from django.db import migrations
def migrate_privacy_email(apps, schema_editor):
UserProfile = apps.get_model('users', 'UserProfile')
IdpProfile = apps.get_model('users', 'IdpProfile')
for idp in IdpProfile.objects.filter(primary_contact_identity=True):
UserProfile.objects.filter(pk=idp.profile.pk).update(privacy_email=idp.privacy)
def backwards(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [
('users', '0035_auto_20180604_0439'),
]
operations = [
migrations.RunPython(migrate_privacy_email, backwards),
]
|
|
6748d60d650d0b2383c62ae076aec799c0f8fda3
|
numpy/core/tests/test_print.py
|
numpy/core/tests/test_print.py
|
import numpy as np
from numpy.testing import *
class TestPrint(TestCase):
def test_float_types(self) :
""" Check formatting.
This is only for the str function, and only for simple types.
The precision of np.float and np.longdouble aren't the same as the
python float precision.
"""
for t in [np.float, np.double, np.longdouble] :
for x in [0, 1,-1, 1e10, 1e20] :
assert_equal(str(t(x)), str(float(x)))
def test_complex_types(self) :
"""Check formatting.
This is only for the str function, and only for simple types.
The precision of np.float and np.longdouble aren't the same as the
python float precision.
"""
for t in [np.cfloat, np.cdouble, np.clongdouble] :
for x in [0, 1,-1, 1e10, 1e20] :
assert_equal(str(t(x)), str(complex(x)))
assert_equal(str(t(x*1j)), str(complex(x*1j)))
assert_equal(str(t(x + x*1j)), str(complex(x + x*1j)))
if __name__ == "__main__":
run_module_suite()
|
Add basic tests of number str() formatting.
|
Add basic tests of number str() formatting.
|
Python
|
bsd-3-clause
|
numpy/numpy,rherault-insa/numpy,numpy/numpy-refactor,ddasilva/numpy,bertrand-l/numpy,mattip/numpy,rajathkumarmp/numpy,sigma-random/numpy,kirillzhuravlev/numpy,madphysicist/numpy,madphysicist/numpy,cowlicks/numpy,ESSS/numpy,ChristopherHogan/numpy,felipebetancur/numpy,mwiebe/numpy,CMartelLML/numpy,NextThought/pypy-numpy,leifdenby/numpy,CMartelLML/numpy,GrimDerp/numpy,mingwpy/numpy,ewmoore/numpy,simongibbons/numpy,argriffing/numpy,ddasilva/numpy,matthew-brett/numpy,sinhrks/numpy,mathdd/numpy,pdebuyl/numpy,jakirkham/numpy,b-carter/numpy,chiffa/numpy,brandon-rhodes/numpy,andsor/numpy,githubmlai/numpy,astrofrog/numpy,andsor/numpy,jschueller/numpy,tdsmith/numpy,simongibbons/numpy,Srisai85/numpy,rmcgibbo/numpy,NextThought/pypy-numpy,seberg/numpy,anntzer/numpy,rgommers/numpy,moreati/numpy,Dapid/numpy,MichaelAquilina/numpy,bertrand-l/numpy,ogrisel/numpy,skymanaditya1/numpy,immerrr/numpy,dwillmer/numpy,brandon-rhodes/numpy,yiakwy/numpy,naritta/numpy,dch312/numpy,ahaldane/numpy,mathdd/numpy,dato-code/numpy,chiffa/numpy,rudimeier/numpy,githubmlai/numpy,pelson/numpy,nbeaver/numpy,SiccarPoint/numpy,jorisvandenbossche/numpy,mortada/numpy,jankoslavic/numpy,jakirkham/numpy,skwbc/numpy,BMJHayward/numpy,maniteja123/numpy,njase/numpy,dch312/numpy,mhvk/numpy,ContinuumIO/numpy,KaelChen/numpy,Eric89GXL/numpy,empeeu/numpy,stefanv/numpy,jorisvandenbossche/numpy,KaelChen/numpy,sonnyhu/numpy,shoyer/numpy,abalkin/numpy,bmorris3/numpy,Yusa95/numpy,sonnyhu/numpy,jorisvandenbossche/numpy,pizzathief/numpy,pizzathief/numpy,ajdawson/numpy,musically-ut/numpy,chatcannon/numpy,chatcannon/numpy,WillieMaddox/numpy,ChristopherHogan/numpy,rudimeier/numpy,stefanv/numpy,dwillmer/numpy,grlee77/numpy,nbeaver/numpy,larsmans/numpy,mwiebe/numpy,jorisvandenbossche/numpy,WarrenWeckesser/numpy,jakirkham/numpy,dwf/numpy,mhvk/numpy,astrofrog/numpy,bringingheavendown/numpy,jschueller/numpy,ogrisel/numpy,ekalosak/numpy,cowlicks/numpy,mindw/numpy,simongibbons/numpy,MichaelAquilina/numpy,drasmuss/numpy,kiwifb/numpy,pizzathief/numpy,numpy/numpy-refactor,gmcastil/numpy,matthew-brett/numpy,cjermain/numpy,gmcastil/numpy,jankoslavic/numpy,stefanv/numpy,pyparallel/numpy,larsmans/numpy,MSeifert04/numpy,GrimDerp/numpy,MaPePeR/numpy,AustereCuriosity/numpy,maniteja123/numpy,mattip/numpy,mhvk/numpy,KaelChen/numpy,mattip/numpy,ESSS/numpy,immerrr/numpy,charris/numpy,rhythmsosad/numpy,musically-ut/numpy,GrimDerp/numpy,naritta/numpy,WarrenWeckesser/numpy,anntzer/numpy,pelson/numpy,KaelChen/numpy,ewmoore/numpy,grlee77/numpy,ContinuumIO/numpy,pyparallel/numpy,NextThought/pypy-numpy,CMartelLML/numpy,dimasad/numpy,GaZ3ll3/numpy,drasmuss/numpy,dch312/numpy,madphysicist/numpy,sonnyhu/numpy,Eric89GXL/numpy,BabeNovelty/numpy,shoyer/numpy,Anwesh43/numpy,ChanderG/numpy,endolith/numpy,tdsmith/numpy,Yusa95/numpy,grlee77/numpy,nguyentu1602/numpy,ahaldane/numpy,kirillzhuravlev/numpy,ChristopherHogan/numpy,yiakwy/numpy,numpy/numpy-refactor,Linkid/numpy,mindw/numpy,pelson/numpy,sigma-random/numpy,mindw/numpy,sinhrks/numpy,empeeu/numpy,embray/numpy,jschueller/numpy,dato-code/numpy,WillieMaddox/numpy,dimasad/numpy,sonnyhu/numpy,MSeifert04/numpy,brandon-rhodes/numpy,stuarteberg/numpy,jankoslavic/numpy,GaZ3ll3/numpy,dwf/numpy,solarjoe/numpy,kiwifb/numpy,ajdawson/numpy,endolith/numpy,gfyoung/numpy,sigma-random/numpy,leifdenby/numpy,Srisai85/numpy,BMJHayward/numpy,bertrand-l/numpy,has2k1/numpy,joferkington/numpy,skymanaditya1/numpy,WarrenWeckesser/numpy,pbrod/numpy,stuarteberg/numpy,Linkid/numpy,mortada/numpy,stefanv/numpy,mathdd/numpy,BabeNovelty/numpy,pbrod/numpy,Anwesh43/numpy,dato-code/numpy,nbeaver/numpy,dwf/numpy,ssanderson/numpy,numpy/numpy,embray/numpy,ahaldane/numpy,ogrisel/numpy,gmcastil/numpy,abalkin/numpy,charris/numpy,maniteja123/numpy,skwbc/numpy,hainm/numpy,embray/numpy,rmcgibbo/numpy,stefanv/numpy,ChanderG/numpy,larsmans/numpy,ewmoore/numpy,groutr/numpy,stuarteberg/numpy,dato-code/numpy,GrimDerp/numpy,ViralLeadership/numpy,njase/numpy,jakirkham/numpy,numpy/numpy,astrofrog/numpy,hainm/numpy,dimasad/numpy,ContinuumIO/numpy,AustereCuriosity/numpy,mattip/numpy,pdebuyl/numpy,ChanderG/numpy,jschueller/numpy,seberg/numpy,felipebetancur/numpy,mortada/numpy,matthew-brett/numpy,ViralLeadership/numpy,SiccarPoint/numpy,MSeifert04/numpy,mindw/numpy,rhythmsosad/numpy,musically-ut/numpy,hainm/numpy,tdsmith/numpy,skymanaditya1/numpy,seberg/numpy,Linkid/numpy,rmcgibbo/numpy,sinhrks/numpy,sinhrks/numpy,joferkington/numpy,solarjoe/numpy,rajathkumarmp/numpy,jankoslavic/numpy,BabeNovelty/numpy,matthew-brett/numpy,charris/numpy,ssanderson/numpy,behzadnouri/numpy,Linkid/numpy,behzadnouri/numpy,tacaswell/numpy,behzadnouri/numpy,ahaldane/numpy,skwbc/numpy,jakirkham/numpy,b-carter/numpy,dch312/numpy,BMJHayward/numpy,githubmlai/numpy,rhythmsosad/numpy,SunghanKim/numpy,BabeNovelty/numpy,Anwesh43/numpy,endolith/numpy,numpy/numpy-refactor,naritta/numpy,trankmichael/numpy,rgommers/numpy,nguyentu1602/numpy,trankmichael/numpy,MaPePeR/numpy,endolith/numpy,groutr/numpy,shoyer/numpy,ewmoore/numpy,MSeifert04/numpy,kirillzhuravlev/numpy,dwillmer/numpy,rgommers/numpy,immerrr/numpy,ekalosak/numpy,yiakwy/numpy,joferkington/numpy,grlee77/numpy,bringingheavendown/numpy,empeeu/numpy,numpy/numpy-refactor,moreati/numpy,sigma-random/numpy,mingwpy/numpy,mhvk/numpy,rhythmsosad/numpy,njase/numpy,SiccarPoint/numpy,ajdawson/numpy,AustereCuriosity/numpy,pelson/numpy,Yusa95/numpy,madphysicist/numpy,trankmichael/numpy,rherault-insa/numpy,MichaelAquilina/numpy,pizzathief/numpy,ChristopherHogan/numpy,felipebetancur/numpy,pbrod/numpy,embray/numpy,jonathanunderwood/numpy,MaPePeR/numpy,drasmuss/numpy,yiakwy/numpy,WarrenWeckesser/numpy,astrofrog/numpy,gfyoung/numpy,solarjoe/numpy,tynn/numpy,WillieMaddox/numpy,ajdawson/numpy,felipebetancur/numpy,dwf/numpy,WarrenWeckesser/numpy,pbrod/numpy,utke1/numpy,empeeu/numpy,grlee77/numpy,Dapid/numpy,abalkin/numpy,mathdd/numpy,embray/numpy,joferkington/numpy,anntzer/numpy,MaPePeR/numpy,shoyer/numpy,mingwpy/numpy,dwillmer/numpy,nguyentu1602/numpy,ewmoore/numpy,tacaswell/numpy,cowlicks/numpy,seberg/numpy,kiwifb/numpy,ssanderson/numpy,cjermain/numpy,groutr/numpy,rudimeier/numpy,argriffing/numpy,ekalosak/numpy,Eric89GXL/numpy,MichaelAquilina/numpy,SunghanKim/numpy,kirillzhuravlev/numpy,has2k1/numpy,gfyoung/numpy,b-carter/numpy,Srisai85/numpy,chatcannon/numpy,ahaldane/numpy,ChanderG/numpy,argriffing/numpy,ViralLeadership/numpy,pizzathief/numpy,rgommers/numpy,bringingheavendown/numpy,ogrisel/numpy,naritta/numpy,cjermain/numpy,mwiebe/numpy,Yusa95/numpy,GaZ3ll3/numpy,skymanaditya1/numpy,jorisvandenbossche/numpy,Dapid/numpy,jonathanunderwood/numpy,cowlicks/numpy,has2k1/numpy,pelson/numpy,mortada/numpy,rajathkumarmp/numpy,GaZ3ll3/numpy,shoyer/numpy,immerrr/numpy,pbrod/numpy,githubmlai/numpy,chiffa/numpy,hainm/numpy,ogrisel/numpy,bmorris3/numpy,tacaswell/numpy,brandon-rhodes/numpy,astrofrog/numpy,utke1/numpy,MSeifert04/numpy,mingwpy/numpy,rajathkumarmp/numpy,SiccarPoint/numpy,Anwesh43/numpy,Eric89GXL/numpy,utke1/numpy,andsor/numpy,bmorris3/numpy,numpy/numpy,rherault-insa/numpy,bmorris3/numpy,tynn/numpy,tdsmith/numpy,musically-ut/numpy,jonathanunderwood/numpy,ddasilva/numpy,SunghanKim/numpy,pdebuyl/numpy,ESSS/numpy,charris/numpy,SunghanKim/numpy,larsmans/numpy,NextThought/pypy-numpy,dwf/numpy,pdebuyl/numpy,mhvk/numpy,BMJHayward/numpy,dimasad/numpy,andsor/numpy,matthew-brett/numpy,Srisai85/numpy,ekalosak/numpy,pyparallel/numpy,has2k1/numpy,rudimeier/numpy,trankmichael/numpy,stuarteberg/numpy,nguyentu1602/numpy,madphysicist/numpy,tynn/numpy,anntzer/numpy,CMartelLML/numpy,simongibbons/numpy,moreati/numpy,simongibbons/numpy,cjermain/numpy,rmcgibbo/numpy,leifdenby/numpy
|
Add basic tests of number str() formatting.
|
import numpy as np
from numpy.testing import *
class TestPrint(TestCase):
def test_float_types(self) :
""" Check formatting.
This is only for the str function, and only for simple types.
The precision of np.float and np.longdouble aren't the same as the
python float precision.
"""
for t in [np.float, np.double, np.longdouble] :
for x in [0, 1,-1, 1e10, 1e20] :
assert_equal(str(t(x)), str(float(x)))
def test_complex_types(self) :
"""Check formatting.
This is only for the str function, and only for simple types.
The precision of np.float and np.longdouble aren't the same as the
python float precision.
"""
for t in [np.cfloat, np.cdouble, np.clongdouble] :
for x in [0, 1,-1, 1e10, 1e20] :
assert_equal(str(t(x)), str(complex(x)))
assert_equal(str(t(x*1j)), str(complex(x*1j)))
assert_equal(str(t(x + x*1j)), str(complex(x + x*1j)))
if __name__ == "__main__":
run_module_suite()
|
<commit_before><commit_msg>Add basic tests of number str() formatting.<commit_after>
|
import numpy as np
from numpy.testing import *
class TestPrint(TestCase):
def test_float_types(self) :
""" Check formatting.
This is only for the str function, and only for simple types.
The precision of np.float and np.longdouble aren't the same as the
python float precision.
"""
for t in [np.float, np.double, np.longdouble] :
for x in [0, 1,-1, 1e10, 1e20] :
assert_equal(str(t(x)), str(float(x)))
def test_complex_types(self) :
"""Check formatting.
This is only for the str function, and only for simple types.
The precision of np.float and np.longdouble aren't the same as the
python float precision.
"""
for t in [np.cfloat, np.cdouble, np.clongdouble] :
for x in [0, 1,-1, 1e10, 1e20] :
assert_equal(str(t(x)), str(complex(x)))
assert_equal(str(t(x*1j)), str(complex(x*1j)))
assert_equal(str(t(x + x*1j)), str(complex(x + x*1j)))
if __name__ == "__main__":
run_module_suite()
|
Add basic tests of number str() formatting.import numpy as np
from numpy.testing import *
class TestPrint(TestCase):
def test_float_types(self) :
""" Check formatting.
This is only for the str function, and only for simple types.
The precision of np.float and np.longdouble aren't the same as the
python float precision.
"""
for t in [np.float, np.double, np.longdouble] :
for x in [0, 1,-1, 1e10, 1e20] :
assert_equal(str(t(x)), str(float(x)))
def test_complex_types(self) :
"""Check formatting.
This is only for the str function, and only for simple types.
The precision of np.float and np.longdouble aren't the same as the
python float precision.
"""
for t in [np.cfloat, np.cdouble, np.clongdouble] :
for x in [0, 1,-1, 1e10, 1e20] :
assert_equal(str(t(x)), str(complex(x)))
assert_equal(str(t(x*1j)), str(complex(x*1j)))
assert_equal(str(t(x + x*1j)), str(complex(x + x*1j)))
if __name__ == "__main__":
run_module_suite()
|
<commit_before><commit_msg>Add basic tests of number str() formatting.<commit_after>import numpy as np
from numpy.testing import *
class TestPrint(TestCase):
def test_float_types(self) :
""" Check formatting.
This is only for the str function, and only for simple types.
The precision of np.float and np.longdouble aren't the same as the
python float precision.
"""
for t in [np.float, np.double, np.longdouble] :
for x in [0, 1,-1, 1e10, 1e20] :
assert_equal(str(t(x)), str(float(x)))
def test_complex_types(self) :
"""Check formatting.
This is only for the str function, and only for simple types.
The precision of np.float and np.longdouble aren't the same as the
python float precision.
"""
for t in [np.cfloat, np.cdouble, np.clongdouble] :
for x in [0, 1,-1, 1e10, 1e20] :
assert_equal(str(t(x)), str(complex(x)))
assert_equal(str(t(x*1j)), str(complex(x*1j)))
assert_equal(str(t(x + x*1j)), str(complex(x + x*1j)))
if __name__ == "__main__":
run_module_suite()
|
|
ab61e71f083817f575bb6652402e62d1f949230e
|
opps/article/search_indexes.py
|
opps/article/search_indexes.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from datetime import datetime
from haystack.indexes import SearchIndex, CharField, DateTimeField
from haystack import site
from .models import Post
class PostIndex(SearchIndex):
text = CharField(document=True, use_template=True)
date_available = DateTimeField(model_attr='date_available')
def get_updated_field(self):
return 'date_available'
def index_queryset(self):
return Post.objects.filter(
date_available__lte=datetime.datetime.now(),
published=True)
site.register(Post, PostIndex)
|
Create search indexes article on post models
|
Create search indexes article on post models
|
Python
|
mit
|
williamroot/opps,YACOWS/opps,YACOWS/opps,opps/opps,williamroot/opps,williamroot/opps,YACOWS/opps,williamroot/opps,jeanmask/opps,YACOWS/opps,opps/opps,jeanmask/opps,opps/opps,jeanmask/opps,jeanmask/opps,opps/opps
|
Create search indexes article on post models
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from datetime import datetime
from haystack.indexes import SearchIndex, CharField, DateTimeField
from haystack import site
from .models import Post
class PostIndex(SearchIndex):
text = CharField(document=True, use_template=True)
date_available = DateTimeField(model_attr='date_available')
def get_updated_field(self):
return 'date_available'
def index_queryset(self):
return Post.objects.filter(
date_available__lte=datetime.datetime.now(),
published=True)
site.register(Post, PostIndex)
|
<commit_before><commit_msg>Create search indexes article on post models<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from datetime import datetime
from haystack.indexes import SearchIndex, CharField, DateTimeField
from haystack import site
from .models import Post
class PostIndex(SearchIndex):
text = CharField(document=True, use_template=True)
date_available = DateTimeField(model_attr='date_available')
def get_updated_field(self):
return 'date_available'
def index_queryset(self):
return Post.objects.filter(
date_available__lte=datetime.datetime.now(),
published=True)
site.register(Post, PostIndex)
|
Create search indexes article on post models#!/usr/bin/env python
# -*- coding: utf-8 -*-
from datetime import datetime
from haystack.indexes import SearchIndex, CharField, DateTimeField
from haystack import site
from .models import Post
class PostIndex(SearchIndex):
text = CharField(document=True, use_template=True)
date_available = DateTimeField(model_attr='date_available')
def get_updated_field(self):
return 'date_available'
def index_queryset(self):
return Post.objects.filter(
date_available__lte=datetime.datetime.now(),
published=True)
site.register(Post, PostIndex)
|
<commit_before><commit_msg>Create search indexes article on post models<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from datetime import datetime
from haystack.indexes import SearchIndex, CharField, DateTimeField
from haystack import site
from .models import Post
class PostIndex(SearchIndex):
text = CharField(document=True, use_template=True)
date_available = DateTimeField(model_attr='date_available')
def get_updated_field(self):
return 'date_available'
def index_queryset(self):
return Post.objects.filter(
date_available__lte=datetime.datetime.now(),
published=True)
site.register(Post, PostIndex)
|
|
3bbaa8c922dafcce17e522d61a4869446e6e2f70
|
ironic/drivers/__init__.py
|
ironic/drivers/__init__.py
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
|
Add new base and fake driver classes.
|
Add new base and fake driver classes.
|
Python
|
apache-2.0
|
rdo-management/tuskar,tuskar/tuskar,rdo-management/tuskar,rdo-management/tuskar
|
Add new base and fake driver classes.
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
|
<commit_before><commit_msg>Add new base and fake driver classes.<commit_after>
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
|
Add new base and fake driver classes.# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
|
<commit_before><commit_msg>Add new base and fake driver classes.<commit_after># vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
|
|
390b5b7e2aa9373b94acde2364e6e19c3cb19489
|
bayespy/inference/vmp/nodes/pdf.py
|
bayespy/inference/vmp/nodes/pdf.py
|
######################################################################
# Copyright (C) 2015 Jaakko Luttinen
#
# This file is licensed under Version 3.0 of the GNU General Public
# License. See LICENSE for a text of the license.
######################################################################
######################################################################
# This file is part of BayesPy.
#
# BayesPy is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# BayesPy is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with BayesPy. If not, see <http://www.gnu.org/licenses/>.
######################################################################
import numpy as np
from .expfamily import ExponentialFamily
class PDF(ExponentialFamily):
"""
General node with arbitrary probability density function
"""
# Sub-classes must over-write this
_distribution = None
def __init__(self, pdf, *parents, approximation=None, **kwargs):
if approximation is not None:
raise NotImplementedError() #self._distribution = approximation._constructor
super().__init__(*parents,
dims=dims,
**kwargs)
|
Create file for the black box node
|
ENH: Create file for the black box node
|
Python
|
mit
|
SalemAmeen/bayespy,jluttine/bayespy,bayespy/bayespy,fivejjs/bayespy
|
ENH: Create file for the black box node
|
######################################################################
# Copyright (C) 2015 Jaakko Luttinen
#
# This file is licensed under Version 3.0 of the GNU General Public
# License. See LICENSE for a text of the license.
######################################################################
######################################################################
# This file is part of BayesPy.
#
# BayesPy is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# BayesPy is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with BayesPy. If not, see <http://www.gnu.org/licenses/>.
######################################################################
import numpy as np
from .expfamily import ExponentialFamily
class PDF(ExponentialFamily):
"""
General node with arbitrary probability density function
"""
# Sub-classes must over-write this
_distribution = None
def __init__(self, pdf, *parents, approximation=None, **kwargs):
if approximation is not None:
raise NotImplementedError() #self._distribution = approximation._constructor
super().__init__(*parents,
dims=dims,
**kwargs)
|
<commit_before><commit_msg>ENH: Create file for the black box node<commit_after>
|
######################################################################
# Copyright (C) 2015 Jaakko Luttinen
#
# This file is licensed under Version 3.0 of the GNU General Public
# License. See LICENSE for a text of the license.
######################################################################
######################################################################
# This file is part of BayesPy.
#
# BayesPy is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# BayesPy is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with BayesPy. If not, see <http://www.gnu.org/licenses/>.
######################################################################
import numpy as np
from .expfamily import ExponentialFamily
class PDF(ExponentialFamily):
"""
General node with arbitrary probability density function
"""
# Sub-classes must over-write this
_distribution = None
def __init__(self, pdf, *parents, approximation=None, **kwargs):
if approximation is not None:
raise NotImplementedError() #self._distribution = approximation._constructor
super().__init__(*parents,
dims=dims,
**kwargs)
|
ENH: Create file for the black box node######################################################################
# Copyright (C) 2015 Jaakko Luttinen
#
# This file is licensed under Version 3.0 of the GNU General Public
# License. See LICENSE for a text of the license.
######################################################################
######################################################################
# This file is part of BayesPy.
#
# BayesPy is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# BayesPy is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with BayesPy. If not, see <http://www.gnu.org/licenses/>.
######################################################################
import numpy as np
from .expfamily import ExponentialFamily
class PDF(ExponentialFamily):
"""
General node with arbitrary probability density function
"""
# Sub-classes must over-write this
_distribution = None
def __init__(self, pdf, *parents, approximation=None, **kwargs):
if approximation is not None:
raise NotImplementedError() #self._distribution = approximation._constructor
super().__init__(*parents,
dims=dims,
**kwargs)
|
<commit_before><commit_msg>ENH: Create file for the black box node<commit_after>######################################################################
# Copyright (C) 2015 Jaakko Luttinen
#
# This file is licensed under Version 3.0 of the GNU General Public
# License. See LICENSE for a text of the license.
######################################################################
######################################################################
# This file is part of BayesPy.
#
# BayesPy is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# BayesPy is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with BayesPy. If not, see <http://www.gnu.org/licenses/>.
######################################################################
import numpy as np
from .expfamily import ExponentialFamily
class PDF(ExponentialFamily):
"""
General node with arbitrary probability density function
"""
# Sub-classes must over-write this
_distribution = None
def __init__(self, pdf, *parents, approximation=None, **kwargs):
if approximation is not None:
raise NotImplementedError() #self._distribution = approximation._constructor
super().__init__(*parents,
dims=dims,
**kwargs)
|
|
80d10488196bb9090719e45bd17aa45f1c350455
|
canvas_and_schedule/course_list.py
|
canvas_and_schedule/course_list.py
|
"""
Module for searching on SVSU Course Schedule website
"""
import bs4
import re
from selenium import webdriver
from selenium.webdriver.support.ui import Select
from time import sleep
def enter_selection(driver, id, regexp):
"""
Search drop-down menu options for a regexp and select the first match
"""
element = driver.find_element_by_id(id)
choice = next(s for s in element.text.split("\n") if regexp.search(s))
selector = Select(element)
selector.select_by_visible_text(choice)
sleep(1)
def parse_course(course):
"""
Extract interesting information from a single course listing, and build
a dictionary from it.
"""
course_re = r"^([^*]*)\*([^*]*)\*([^(]*)\((\d*)\)\s*(.*)$"
m = re.match(course_re, course[0])
return {'dept': m[1],
'number': m[2],
'section': m[3],
'lineno': m[4],
'name': m[5],
'instructor': course[2].strip()
}
def get_course_list(semester_re, dept_re):
"""
Given a semester regular expression and a department regular expression,
lists all courses offered by that department in the semester.
"""
searchurl = "https://webtech.svsu.edu/courses/#!/home#top"
semester = re.compile(semester_re)
dept = re.compile(dept_re)
options = webdriver.ChromeOptions()
options.add_argument('headless')
driver = webdriver.Chrome(options=options)
driver.get(searchurl)
enter_selection(driver, "selectterm", semester)
enter_selection(driver, "selectdepartment", dept)
submitbutt = driver.find_element_by_id("courseLookupButton")
submitbutt.click()
sleep(1)
page = bs4.BeautifulSoup(driver.page_source, "lxml")
driver.quit()
rowlist = [[cell.getText() for cell in row.find_all("td")]
for row in page.find_all(id="courseTable")]
return [parse_course(row) for row in rowlist]
|
Add a module for listing courses on schedule website
|
Add a module for listing courses on schedule website
|
Python
|
mit
|
lahvak/svsu-utils
|
Add a module for listing courses on schedule website
|
"""
Module for searching on SVSU Course Schedule website
"""
import bs4
import re
from selenium import webdriver
from selenium.webdriver.support.ui import Select
from time import sleep
def enter_selection(driver, id, regexp):
"""
Search drop-down menu options for a regexp and select the first match
"""
element = driver.find_element_by_id(id)
choice = next(s for s in element.text.split("\n") if regexp.search(s))
selector = Select(element)
selector.select_by_visible_text(choice)
sleep(1)
def parse_course(course):
"""
Extract interesting information from a single course listing, and build
a dictionary from it.
"""
course_re = r"^([^*]*)\*([^*]*)\*([^(]*)\((\d*)\)\s*(.*)$"
m = re.match(course_re, course[0])
return {'dept': m[1],
'number': m[2],
'section': m[3],
'lineno': m[4],
'name': m[5],
'instructor': course[2].strip()
}
def get_course_list(semester_re, dept_re):
"""
Given a semester regular expression and a department regular expression,
lists all courses offered by that department in the semester.
"""
searchurl = "https://webtech.svsu.edu/courses/#!/home#top"
semester = re.compile(semester_re)
dept = re.compile(dept_re)
options = webdriver.ChromeOptions()
options.add_argument('headless')
driver = webdriver.Chrome(options=options)
driver.get(searchurl)
enter_selection(driver, "selectterm", semester)
enter_selection(driver, "selectdepartment", dept)
submitbutt = driver.find_element_by_id("courseLookupButton")
submitbutt.click()
sleep(1)
page = bs4.BeautifulSoup(driver.page_source, "lxml")
driver.quit()
rowlist = [[cell.getText() for cell in row.find_all("td")]
for row in page.find_all(id="courseTable")]
return [parse_course(row) for row in rowlist]
|
<commit_before><commit_msg>Add a module for listing courses on schedule website<commit_after>
|
"""
Module for searching on SVSU Course Schedule website
"""
import bs4
import re
from selenium import webdriver
from selenium.webdriver.support.ui import Select
from time import sleep
def enter_selection(driver, id, regexp):
"""
Search drop-down menu options for a regexp and select the first match
"""
element = driver.find_element_by_id(id)
choice = next(s for s in element.text.split("\n") if regexp.search(s))
selector = Select(element)
selector.select_by_visible_text(choice)
sleep(1)
def parse_course(course):
"""
Extract interesting information from a single course listing, and build
a dictionary from it.
"""
course_re = r"^([^*]*)\*([^*]*)\*([^(]*)\((\d*)\)\s*(.*)$"
m = re.match(course_re, course[0])
return {'dept': m[1],
'number': m[2],
'section': m[3],
'lineno': m[4],
'name': m[5],
'instructor': course[2].strip()
}
def get_course_list(semester_re, dept_re):
"""
Given a semester regular expression and a department regular expression,
lists all courses offered by that department in the semester.
"""
searchurl = "https://webtech.svsu.edu/courses/#!/home#top"
semester = re.compile(semester_re)
dept = re.compile(dept_re)
options = webdriver.ChromeOptions()
options.add_argument('headless')
driver = webdriver.Chrome(options=options)
driver.get(searchurl)
enter_selection(driver, "selectterm", semester)
enter_selection(driver, "selectdepartment", dept)
submitbutt = driver.find_element_by_id("courseLookupButton")
submitbutt.click()
sleep(1)
page = bs4.BeautifulSoup(driver.page_source, "lxml")
driver.quit()
rowlist = [[cell.getText() for cell in row.find_all("td")]
for row in page.find_all(id="courseTable")]
return [parse_course(row) for row in rowlist]
|
Add a module for listing courses on schedule website"""
Module for searching on SVSU Course Schedule website
"""
import bs4
import re
from selenium import webdriver
from selenium.webdriver.support.ui import Select
from time import sleep
def enter_selection(driver, id, regexp):
"""
Search drop-down menu options for a regexp and select the first match
"""
element = driver.find_element_by_id(id)
choice = next(s for s in element.text.split("\n") if regexp.search(s))
selector = Select(element)
selector.select_by_visible_text(choice)
sleep(1)
def parse_course(course):
"""
Extract interesting information from a single course listing, and build
a dictionary from it.
"""
course_re = r"^([^*]*)\*([^*]*)\*([^(]*)\((\d*)\)\s*(.*)$"
m = re.match(course_re, course[0])
return {'dept': m[1],
'number': m[2],
'section': m[3],
'lineno': m[4],
'name': m[5],
'instructor': course[2].strip()
}
def get_course_list(semester_re, dept_re):
"""
Given a semester regular expression and a department regular expression,
lists all courses offered by that department in the semester.
"""
searchurl = "https://webtech.svsu.edu/courses/#!/home#top"
semester = re.compile(semester_re)
dept = re.compile(dept_re)
options = webdriver.ChromeOptions()
options.add_argument('headless')
driver = webdriver.Chrome(options=options)
driver.get(searchurl)
enter_selection(driver, "selectterm", semester)
enter_selection(driver, "selectdepartment", dept)
submitbutt = driver.find_element_by_id("courseLookupButton")
submitbutt.click()
sleep(1)
page = bs4.BeautifulSoup(driver.page_source, "lxml")
driver.quit()
rowlist = [[cell.getText() for cell in row.find_all("td")]
for row in page.find_all(id="courseTable")]
return [parse_course(row) for row in rowlist]
|
<commit_before><commit_msg>Add a module for listing courses on schedule website<commit_after>"""
Module for searching on SVSU Course Schedule website
"""
import bs4
import re
from selenium import webdriver
from selenium.webdriver.support.ui import Select
from time import sleep
def enter_selection(driver, id, regexp):
"""
Search drop-down menu options for a regexp and select the first match
"""
element = driver.find_element_by_id(id)
choice = next(s for s in element.text.split("\n") if regexp.search(s))
selector = Select(element)
selector.select_by_visible_text(choice)
sleep(1)
def parse_course(course):
"""
Extract interesting information from a single course listing, and build
a dictionary from it.
"""
course_re = r"^([^*]*)\*([^*]*)\*([^(]*)\((\d*)\)\s*(.*)$"
m = re.match(course_re, course[0])
return {'dept': m[1],
'number': m[2],
'section': m[3],
'lineno': m[4],
'name': m[5],
'instructor': course[2].strip()
}
def get_course_list(semester_re, dept_re):
"""
Given a semester regular expression and a department regular expression,
lists all courses offered by that department in the semester.
"""
searchurl = "https://webtech.svsu.edu/courses/#!/home#top"
semester = re.compile(semester_re)
dept = re.compile(dept_re)
options = webdriver.ChromeOptions()
options.add_argument('headless')
driver = webdriver.Chrome(options=options)
driver.get(searchurl)
enter_selection(driver, "selectterm", semester)
enter_selection(driver, "selectdepartment", dept)
submitbutt = driver.find_element_by_id("courseLookupButton")
submitbutt.click()
sleep(1)
page = bs4.BeautifulSoup(driver.page_source, "lxml")
driver.quit()
rowlist = [[cell.getText() for cell in row.find_all("td")]
for row in page.find_all(id="courseTable")]
return [parse_course(row) for row in rowlist]
|
|
2477c94314acbfd7c5687b1ea0b17db812964552
|
python/robotics/sensors/sharp_ir_distance_sensor.py
|
python/robotics/sensors/sharp_ir_distance_sensor.py
|
class SharpIrDistanceSensor(object):
def __init__(self, spi_interface, pin_id):
self.spi_interface = spi_interface
self.pin_id = pin_id
def _voltageToMeters(self, voltage):
return 67.84 / (voltage - 3) - 0.04
def readDistance(self):
'''Returns distance in meters.'''
voltage = self.spi_interface.read(self.pin_id)
distance = self._voltageToMeters(voltage)
return distance
|
Add implementation for Sharp IR distance sensor
|
Add implementation for Sharp IR distance sensor
|
Python
|
mit
|
asydorchuk/robotics,asydorchuk/robotics
|
Add implementation for Sharp IR distance sensor
|
class SharpIrDistanceSensor(object):
def __init__(self, spi_interface, pin_id):
self.spi_interface = spi_interface
self.pin_id = pin_id
def _voltageToMeters(self, voltage):
return 67.84 / (voltage - 3) - 0.04
def readDistance(self):
'''Returns distance in meters.'''
voltage = self.spi_interface.read(self.pin_id)
distance = self._voltageToMeters(voltage)
return distance
|
<commit_before><commit_msg>Add implementation for Sharp IR distance sensor<commit_after>
|
class SharpIrDistanceSensor(object):
def __init__(self, spi_interface, pin_id):
self.spi_interface = spi_interface
self.pin_id = pin_id
def _voltageToMeters(self, voltage):
return 67.84 / (voltage - 3) - 0.04
def readDistance(self):
'''Returns distance in meters.'''
voltage = self.spi_interface.read(self.pin_id)
distance = self._voltageToMeters(voltage)
return distance
|
Add implementation for Sharp IR distance sensorclass SharpIrDistanceSensor(object):
def __init__(self, spi_interface, pin_id):
self.spi_interface = spi_interface
self.pin_id = pin_id
def _voltageToMeters(self, voltage):
return 67.84 / (voltage - 3) - 0.04
def readDistance(self):
'''Returns distance in meters.'''
voltage = self.spi_interface.read(self.pin_id)
distance = self._voltageToMeters(voltage)
return distance
|
<commit_before><commit_msg>Add implementation for Sharp IR distance sensor<commit_after>class SharpIrDistanceSensor(object):
def __init__(self, spi_interface, pin_id):
self.spi_interface = spi_interface
self.pin_id = pin_id
def _voltageToMeters(self, voltage):
return 67.84 / (voltage - 3) - 0.04
def readDistance(self):
'''Returns distance in meters.'''
voltage = self.spi_interface.read(self.pin_id)
distance = self._voltageToMeters(voltage)
return distance
|
|
cb4f876c7bd52f66de955c0a800a3fd0de612ead
|
tests/health_checks/test_per_gwas_snp_AND_disease.py
|
tests/health_checks/test_per_gwas_snp_AND_disease.py
|
# ------------------------------------------------
# built-ins
import unittest
# local
from utils.base import TestPostgapBase
# ------------------------------------------------
class TestPostgapPerGwasSnpANDDisease(TestPostgapBase):
def setUp(self):
self.per_gwas_snp_and_disease = self.pg.groupby(['gwas_snp', 'disease_efo_id'])
def test_each_gwas_snp_and_disease_efo_id_pair_has_unique_gwas_pvalue(self):
self.skipTest('CHECK FOR UNIQUENESS OF gwas_pvalue')
def test_each_gwas_snp_and_disease_efo_id_pair_has_unique_gwas_pvalue_description(self):
self.skipTest('CHECK FOR UNIQUENESS OF gwas_pvalue_description')
def test_each_gwas_snp_and_disease_efo_id_pair_has_unique_gwas_odds_ratio(self):
self.skipTest('CHECK FOR UNIQUENESS OF gwas_odds_ratio')
def test_each_gwas_snp_and_disease_efo_id_pair_has_unique_gwas_beta(self):
self.skipTest('CHECK FOR UNIQUENESS OF gwas_beta')
def test_each_gwas_snp_and_disease_efo_id_pair_has_unique_gwas_size(self):
self.skipTest('CHECK FOR UNIQUENESS OF gwas_size')
def test_each_gwas_snp_and_disease_efo_id_pair_has_unique_gwas_pmid(self):
self.skipTest('CHECK FOR UNIQUENESS OF gwas_pmid')
def test_each_gwas_snp_and_disease_efo_id_pair_has_unique_gwas_study(self):
self.skipTest('CHECK FOR UNIQUENESS OF gwas_study')
def test_each_gwas_snp_and_disease_efo_id_pair_has_unique_gwas_reported_trait(self):
self.skipTest('CHECK FOR UNIQUENESS OF gwas_reported_trait')
if __name__ == '__main__':
unittest.main()
|
Add tests per gwas_snp and disease
|
Add tests per gwas_snp and disease
|
Python
|
apache-2.0
|
Ensembl/cttv024,Ensembl/cttv024
|
Add tests per gwas_snp and disease
|
# ------------------------------------------------
# built-ins
import unittest
# local
from utils.base import TestPostgapBase
# ------------------------------------------------
class TestPostgapPerGwasSnpANDDisease(TestPostgapBase):
def setUp(self):
self.per_gwas_snp_and_disease = self.pg.groupby(['gwas_snp', 'disease_efo_id'])
def test_each_gwas_snp_and_disease_efo_id_pair_has_unique_gwas_pvalue(self):
self.skipTest('CHECK FOR UNIQUENESS OF gwas_pvalue')
def test_each_gwas_snp_and_disease_efo_id_pair_has_unique_gwas_pvalue_description(self):
self.skipTest('CHECK FOR UNIQUENESS OF gwas_pvalue_description')
def test_each_gwas_snp_and_disease_efo_id_pair_has_unique_gwas_odds_ratio(self):
self.skipTest('CHECK FOR UNIQUENESS OF gwas_odds_ratio')
def test_each_gwas_snp_and_disease_efo_id_pair_has_unique_gwas_beta(self):
self.skipTest('CHECK FOR UNIQUENESS OF gwas_beta')
def test_each_gwas_snp_and_disease_efo_id_pair_has_unique_gwas_size(self):
self.skipTest('CHECK FOR UNIQUENESS OF gwas_size')
def test_each_gwas_snp_and_disease_efo_id_pair_has_unique_gwas_pmid(self):
self.skipTest('CHECK FOR UNIQUENESS OF gwas_pmid')
def test_each_gwas_snp_and_disease_efo_id_pair_has_unique_gwas_study(self):
self.skipTest('CHECK FOR UNIQUENESS OF gwas_study')
def test_each_gwas_snp_and_disease_efo_id_pair_has_unique_gwas_reported_trait(self):
self.skipTest('CHECK FOR UNIQUENESS OF gwas_reported_trait')
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add tests per gwas_snp and disease<commit_after>
|
# ------------------------------------------------
# built-ins
import unittest
# local
from utils.base import TestPostgapBase
# ------------------------------------------------
class TestPostgapPerGwasSnpANDDisease(TestPostgapBase):
def setUp(self):
self.per_gwas_snp_and_disease = self.pg.groupby(['gwas_snp', 'disease_efo_id'])
def test_each_gwas_snp_and_disease_efo_id_pair_has_unique_gwas_pvalue(self):
self.skipTest('CHECK FOR UNIQUENESS OF gwas_pvalue')
def test_each_gwas_snp_and_disease_efo_id_pair_has_unique_gwas_pvalue_description(self):
self.skipTest('CHECK FOR UNIQUENESS OF gwas_pvalue_description')
def test_each_gwas_snp_and_disease_efo_id_pair_has_unique_gwas_odds_ratio(self):
self.skipTest('CHECK FOR UNIQUENESS OF gwas_odds_ratio')
def test_each_gwas_snp_and_disease_efo_id_pair_has_unique_gwas_beta(self):
self.skipTest('CHECK FOR UNIQUENESS OF gwas_beta')
def test_each_gwas_snp_and_disease_efo_id_pair_has_unique_gwas_size(self):
self.skipTest('CHECK FOR UNIQUENESS OF gwas_size')
def test_each_gwas_snp_and_disease_efo_id_pair_has_unique_gwas_pmid(self):
self.skipTest('CHECK FOR UNIQUENESS OF gwas_pmid')
def test_each_gwas_snp_and_disease_efo_id_pair_has_unique_gwas_study(self):
self.skipTest('CHECK FOR UNIQUENESS OF gwas_study')
def test_each_gwas_snp_and_disease_efo_id_pair_has_unique_gwas_reported_trait(self):
self.skipTest('CHECK FOR UNIQUENESS OF gwas_reported_trait')
if __name__ == '__main__':
unittest.main()
|
Add tests per gwas_snp and disease# ------------------------------------------------
# built-ins
import unittest
# local
from utils.base import TestPostgapBase
# ------------------------------------------------
class TestPostgapPerGwasSnpANDDisease(TestPostgapBase):
def setUp(self):
self.per_gwas_snp_and_disease = self.pg.groupby(['gwas_snp', 'disease_efo_id'])
def test_each_gwas_snp_and_disease_efo_id_pair_has_unique_gwas_pvalue(self):
self.skipTest('CHECK FOR UNIQUENESS OF gwas_pvalue')
def test_each_gwas_snp_and_disease_efo_id_pair_has_unique_gwas_pvalue_description(self):
self.skipTest('CHECK FOR UNIQUENESS OF gwas_pvalue_description')
def test_each_gwas_snp_and_disease_efo_id_pair_has_unique_gwas_odds_ratio(self):
self.skipTest('CHECK FOR UNIQUENESS OF gwas_odds_ratio')
def test_each_gwas_snp_and_disease_efo_id_pair_has_unique_gwas_beta(self):
self.skipTest('CHECK FOR UNIQUENESS OF gwas_beta')
def test_each_gwas_snp_and_disease_efo_id_pair_has_unique_gwas_size(self):
self.skipTest('CHECK FOR UNIQUENESS OF gwas_size')
def test_each_gwas_snp_and_disease_efo_id_pair_has_unique_gwas_pmid(self):
self.skipTest('CHECK FOR UNIQUENESS OF gwas_pmid')
def test_each_gwas_snp_and_disease_efo_id_pair_has_unique_gwas_study(self):
self.skipTest('CHECK FOR UNIQUENESS OF gwas_study')
def test_each_gwas_snp_and_disease_efo_id_pair_has_unique_gwas_reported_trait(self):
self.skipTest('CHECK FOR UNIQUENESS OF gwas_reported_trait')
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add tests per gwas_snp and disease<commit_after># ------------------------------------------------
# built-ins
import unittest
# local
from utils.base import TestPostgapBase
# ------------------------------------------------
class TestPostgapPerGwasSnpANDDisease(TestPostgapBase):
def setUp(self):
self.per_gwas_snp_and_disease = self.pg.groupby(['gwas_snp', 'disease_efo_id'])
def test_each_gwas_snp_and_disease_efo_id_pair_has_unique_gwas_pvalue(self):
self.skipTest('CHECK FOR UNIQUENESS OF gwas_pvalue')
def test_each_gwas_snp_and_disease_efo_id_pair_has_unique_gwas_pvalue_description(self):
self.skipTest('CHECK FOR UNIQUENESS OF gwas_pvalue_description')
def test_each_gwas_snp_and_disease_efo_id_pair_has_unique_gwas_odds_ratio(self):
self.skipTest('CHECK FOR UNIQUENESS OF gwas_odds_ratio')
def test_each_gwas_snp_and_disease_efo_id_pair_has_unique_gwas_beta(self):
self.skipTest('CHECK FOR UNIQUENESS OF gwas_beta')
def test_each_gwas_snp_and_disease_efo_id_pair_has_unique_gwas_size(self):
self.skipTest('CHECK FOR UNIQUENESS OF gwas_size')
def test_each_gwas_snp_and_disease_efo_id_pair_has_unique_gwas_pmid(self):
self.skipTest('CHECK FOR UNIQUENESS OF gwas_pmid')
def test_each_gwas_snp_and_disease_efo_id_pair_has_unique_gwas_study(self):
self.skipTest('CHECK FOR UNIQUENESS OF gwas_study')
def test_each_gwas_snp_and_disease_efo_id_pair_has_unique_gwas_reported_trait(self):
self.skipTest('CHECK FOR UNIQUENESS OF gwas_reported_trait')
if __name__ == '__main__':
unittest.main()
|
|
339790845461344ab7ea5a6f864b3bfdede0b9c0
|
dev-tools/get-bwc-version.py
|
dev-tools/get-bwc-version.py
|
# Licensed to Elasticsearch under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
# ownership. Elasticsearch licenses this file to you under
# the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on
# an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
'''
Downloads and extracts elasticsearch for backwards compatibility tests.
'''
import argparse
import os
import platform
import shutil
import subprocess
import urllib.request
import zipfile
def parse_config():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('--path', metavar='DIR', default='./backwards',
help='Where to extract elasticsearch')
parser.add_argument('--force', action='store_true', default=False,
help='Delete and redownload if the version already exists')
parser.add_argument('version', metavar='X.Y.Z',
help='Version of elasticsearch to grab')
return parser.parse_args()
def main():
c = parse_config()
if not os.path.exists(c.path):
print('Creating %s' % c.path)
os.mkdir(c.path)
is_windows = platform.system() == 'Windows'
os.chdir(c.path)
version_dir = 'elasticsearch-%s' % c.version
if os.path.exists(version_dir):
if c.force:
print('Removing old download %s' % version_dir)
shutil.rmtree(version_dir)
else:
print('Version %s exists at %s' % (c.version, version_dir))
return
# before 1.4.0, the zip file contains windows scripts, and tar.gz contained *nix scripts
if is_windows:
filename = '%s.zip' % version_dir
else:
filename = '%s.tar.gz' % version_dir
url = 'https://download.elasticsearch.org/elasticsearch/elasticsearch/%s' % filename
print('Downloading %s' % url)
urllib.request.urlretrieve(url, filename)
print('Extracting to %s' % version_dir)
if is_windows:
archive = zipfile.ZipFile(filename)
archive.extractall()
else:
# for some reason python's tarfile module has trouble with ES tgz?
subprocess.check_call('tar -xzf %s' % filename, shell=True)
print('Cleaning up %s' % filename)
os.remove(filename)
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
print('Ctrl-C caught, exiting')
|
Add script to grab ES version for BWC tests.
|
Tools: Add script to grab ES version for BWC tests.
closes #7653
|
Python
|
apache-2.0
|
myelin/elasticsearch,gmarz/elasticsearch,andrestc/elasticsearch,vrkansagara/elasticsearch,xpandan/elasticsearch,strapdata/elassandra5-rc,sposam/elasticsearch,bestwpw/elasticsearch,PhaedrusTheGreek/elasticsearch,maddin2016/elasticsearch,kaneshin/elasticsearch,huypx1292/elasticsearch,sjohnr/elasticsearch,wittyameta/elasticsearch,masterweb121/elasticsearch,alexbrasetvik/elasticsearch,nellicus/elasticsearch,wbowling/elasticsearch,cwurm/elasticsearch,kalburgimanjunath/elasticsearch,sc0ttkclark/elasticsearch,fooljohnny/elasticsearch,zeroctu/elasticsearch,mjason3/elasticsearch,andrejserafim/elasticsearch,Shepard1212/elasticsearch,vvcephei/elasticsearch,dataduke/elasticsearch,HarishAtGitHub/elasticsearch,janmejay/elasticsearch,vingupta3/elasticsearch,Asimov4/elasticsearch,ivansun1010/elasticsearch,Kakakakakku/elasticsearch,humandb/elasticsearch,wenpos/elasticsearch,YosuaMichael/elasticsearch,franklanganke/elasticsearch,JervyShi/elasticsearch,petmit/elasticsearch,luiseduardohdbackup/elasticsearch,ulkas/elasticsearch,tahaemin/elasticsearch,franklanganke/elasticsearch,lks21c/elasticsearch,myelin/elasticsearch,jbertouch/elasticsearch,sauravmondallive/elasticsearch,jeteve/elasticsearch,nknize/elasticsearch,lmtwga/elasticsearch,wuranbo/elasticsearch,HarishAtGitHub/elasticsearch,wenpos/elasticsearch,Brijeshrpatel9/elasticsearch,Shekharrajak/elasticsearch,overcome/elasticsearch,rmuir/elasticsearch,sauravmondallive/elasticsearch,lzo/elasticsearch-1,amit-shar/elasticsearch,tebriel/elasticsearch,truemped/elasticsearch,scorpionvicky/elasticsearch,elancom/elasticsearch,alexshadow007/elasticsearch,sdauletau/elasticsearch,mkis-/elasticsearch,mrorii/elasticsearch,PhaedrusTheGreek/elasticsearch,himanshuag/elasticsearch,qwerty4030/elasticsearch,girirajsharma/elasticsearch,nezirus/elasticsearch,opendatasoft/elasticsearch,gingerwizard/elasticsearch,himanshuag/elasticsearch,hanswang/elasticsearch,Widen/elasticsearch,caengcjd/elasticsearch,tsohil/elasticsearch,henakamaMSFT/elasticsearch,cwurm/elasticsearch,martinstuga/elasticsearch,sscarduzio/elasticsearch,masaruh/elasticsearch,kimimj/elasticsearch,Shekharrajak/elasticsearch,andrestc/elasticsearch,djschny/elasticsearch,yuy168/elasticsearch,xingguang2013/elasticsearch,GlenRSmith/elasticsearch,kingaj/elasticsearch,jbertouch/elasticsearch,iamjakob/elasticsearch,umeshdangat/elasticsearch,yanjunh/elasticsearch,lightslife/elasticsearch,weipinghe/elasticsearch,Ansh90/elasticsearch,pablocastro/elasticsearch,pranavraman/elasticsearch,maddin2016/elasticsearch,Charlesdong/elasticsearch,njlawton/elasticsearch,abibell/elasticsearch,palecur/elasticsearch,mortonsykes/elasticsearch,springning/elasticsearch,golubev/elasticsearch,MichaelLiZhou/elasticsearch,maddin2016/elasticsearch,luiseduardohdbackup/elasticsearch,Siddartha07/elasticsearch,tebriel/elasticsearch,tsohil/elasticsearch,mapr/elasticsearch,rhoml/elasticsearch,IanvsPoplicola/elasticsearch,iamjakob/elasticsearch,rento19962/elasticsearch,JackyMai/elasticsearch,franklanganke/elasticsearch,mortonsykes/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,mohit/elasticsearch,sposam/elasticsearch,KimTaehee/elasticsearch,ouyangkongtong/elasticsearch,nilabhsagar/elasticsearch,nrkkalyan/elasticsearch,Shekharrajak/elasticsearch,ThalaivaStars/OrgRepo1,MaineC/elasticsearch,hirdesh2008/elasticsearch,humandb/elasticsearch,golubev/elasticsearch,brandonkearby/elasticsearch,kenshin233/elasticsearch,xingguang2013/elasticsearch,StefanGor/elasticsearch,overcome/elasticsearch,areek/elasticsearch,mapr/elasticsearch,szroland/elasticsearch,clintongormley/elasticsearch,Kakakakakku/elasticsearch,zhiqinghuang/elasticsearch,mgalushka/elasticsearch,mute/elasticsearch,fernandozhu/elasticsearch,scottsom/elasticsearch,mrorii/elasticsearch,Siddartha07/elasticsearch,wimvds/elasticsearch,hirdesh2008/elasticsearch,girirajsharma/elasticsearch,sdauletau/elasticsearch,andrejserafim/elasticsearch,skearns64/elasticsearch,koxa29/elasticsearch,smflorentino/elasticsearch,lzo/elasticsearch-1,obourgain/elasticsearch,ckclark/elasticsearch,milodky/elasticsearch,elasticdog/elasticsearch,drewr/elasticsearch,sauravmondallive/elasticsearch,KimTaehee/elasticsearch,combinatorist/elasticsearch,lydonchandra/elasticsearch,jaynblue/elasticsearch,aglne/elasticsearch,dantuffery/elasticsearch,YosuaMichael/elasticsearch,F0lha/elasticsearch,springning/elasticsearch,EasonYi/elasticsearch,wuranbo/elasticsearch,jprante/elasticsearch,amit-shar/elasticsearch,ajhalani/elasticsearch,janmejay/elasticsearch,ydsakyclguozi/elasticsearch,luiseduardohdbackup/elasticsearch,rento19962/elasticsearch,ajhalani/elasticsearch,schonfeld/elasticsearch,slavau/elasticsearch,rmuir/elasticsearch,aglne/elasticsearch,fred84/elasticsearch,huanzhong/elasticsearch,Microsoft/elasticsearch,btiernay/elasticsearch,weipinghe/elasticsearch,tcucchietti/elasticsearch,nellicus/elasticsearch,jsgao0/elasticsearch,Microsoft/elasticsearch,clintongormley/elasticsearch,mjhennig/elasticsearch,chirilo/elasticsearch,vroyer/elasticassandra,mm0/elasticsearch,mcku/elasticsearch,petmit/elasticsearch,tahaemin/elasticsearch,micpalmia/elasticsearch,dantuffery/elasticsearch,fred84/elasticsearch,Clairebi/ElasticsearchClone,yuy168/elasticsearch,fred84/elasticsearch,micpalmia/elasticsearch,Shekharrajak/elasticsearch,hafkensite/elasticsearch,smflorentino/elasticsearch,amaliujia/elasticsearch,scottsom/elasticsearch,snikch/elasticsearch,truemped/elasticsearch,bestwpw/elasticsearch,kubum/elasticsearch,mcku/elasticsearch,achow/elasticsearch,combinatorist/elasticsearch,lydonchandra/elasticsearch,drewr/elasticsearch,jprante/elasticsearch,djschny/elasticsearch,knight1128/elasticsearch,EasonYi/elasticsearch,szroland/elasticsearch,linglaiyao1314/elasticsearch,fforbeck/elasticsearch,MetSystem/elasticsearch,schonfeld/elasticsearch,kcompher/elasticsearch,uschindler/elasticsearch,snikch/elasticsearch,yuy168/elasticsearch,petabytedata/elasticsearch,vingupta3/elasticsearch,pablocastro/elasticsearch,sreeramjayan/elasticsearch,skearns64/elasticsearch,mm0/elasticsearch,elancom/elasticsearch,lks21c/elasticsearch,Brijeshrpatel9/elasticsearch,lchennup/elasticsearch,EasonYi/elasticsearch,caengcjd/elasticsearch,ulkas/elasticsearch,mmaracic/elasticsearch,lchennup/elasticsearch,kunallimaye/elasticsearch,JSCooke/elasticsearch,mm0/elasticsearch,wenpos/elasticsearch,gfyoung/elasticsearch,hydro2k/elasticsearch,wuranbo/elasticsearch,jsgao0/elasticsearch,Shekharrajak/elasticsearch,nomoa/elasticsearch,fforbeck/elasticsearch,thecocce/elasticsearch,pozhidaevak/elasticsearch,Charlesdong/elasticsearch,davidvgalbraith/elasticsearch,weipinghe/elasticsearch,njlawton/elasticsearch,Brijeshrpatel9/elasticsearch,Stacey-Gammon/elasticsearch,a2lin/elasticsearch,dataduke/elasticsearch,lzo/elasticsearch-1,fernandozhu/elasticsearch,combinatorist/elasticsearch,huanzhong/elasticsearch,jsgao0/elasticsearch,mcku/elasticsearch,achow/elasticsearch,KimTaehee/elasticsearch,elancom/elasticsearch,GlenRSmith/elasticsearch,rhoml/elasticsearch,ulkas/elasticsearch,kimimj/elasticsearch,HarishAtGitHub/elasticsearch,mgalushka/elasticsearch,huanzhong/elasticsearch,vroyer/elassandra,Shepard1212/elasticsearch,sc0ttkclark/elasticsearch,golubev/elasticsearch,JackyMai/elasticsearch,dylan8902/elasticsearch,strapdata/elassandra-test,weipinghe/elasticsearch,tkssharma/elasticsearch,vietlq/elasticsearch,ImpressTV/elasticsearch,kevinkluge/elasticsearch,hechunwen/elasticsearch,xpandan/elasticsearch,rento19962/elasticsearch,avikurapati/elasticsearch,episerver/elasticsearch,kenshin233/elasticsearch,i-am-Nathan/elasticsearch,strapdata/elassandra-test,Siddartha07/elasticsearch,awislowski/elasticsearch,polyfractal/elasticsearch,myelin/elasticsearch,ivansun1010/elasticsearch,Ansh90/elasticsearch,ckclark/elasticsearch,winstonewert/elasticsearch,s1monw/elasticsearch,YosuaMichael/elasticsearch,tahaemin/elasticsearch,iamjakob/elasticsearch,Asimov4/elasticsearch,overcome/elasticsearch,jeteve/elasticsearch,xpandan/elasticsearch,beiske/elasticsearch,karthikjaps/elasticsearch,Asimov4/elasticsearch,tkssharma/elasticsearch,MjAbuz/elasticsearch,mgalushka/elasticsearch,jw0201/elastic,jpountz/elasticsearch,gmarz/elasticsearch,LewayneNaidoo/elasticsearch,mortonsykes/elasticsearch,micpalmia/elasticsearch,lmtwga/elasticsearch,kubum/elasticsearch,Siddartha07/elasticsearch,areek/elasticsearch,iacdingping/elasticsearch,loconsolutions/elasticsearch,adrianbk/elasticsearch,episerver/elasticsearch,likaiwalkman/elasticsearch,heng4fun/elasticsearch,LeoYao/elasticsearch,nomoa/elasticsearch,yynil/elasticsearch,alexshadow007/elasticsearch,bawse/elasticsearch,Clairebi/ElasticsearchClone,MisterAndersen/elasticsearch,AleksKochev/elasticsearch,Asimov4/elasticsearch,huypx1292/elasticsearch,acchen97/elasticsearch,heng4fun/elasticsearch,dpursehouse/elasticsearch,HarishAtGitHub/elasticsearch,chrismwendt/elasticsearch,hechunwen/elasticsearch,wbowling/elasticsearch,Helen-Zhao/elasticsearch,anti-social/elasticsearch,infusionsoft/elasticsearch,szroland/elasticsearch,skearns64/elasticsearch,avikurapati/elasticsearch,MjAbuz/elasticsearch,LewayneNaidoo/elasticsearch,wittyameta/elasticsearch,markharwood/elasticsearch,javachengwc/elasticsearch,gingerwizard/elasticsearch,linglaiyao1314/elasticsearch,schonfeld/elasticsearch,IanvsPoplicola/elasticsearch,s1monw/elasticsearch,drewr/elasticsearch,scottsom/elasticsearch,abibell/elasticsearch,wittyameta/elasticsearch,jimhooker2002/elasticsearch,ckclark/elasticsearch,dylan8902/elasticsearch,yanjunh/elasticsearch,socialrank/elasticsearch,luiseduardohdbackup/elasticsearch,obourgain/elasticsearch,SergVro/elasticsearch,socialrank/elasticsearch,hydro2k/elasticsearch,pozhidaevak/elasticsearch,ajhalani/elasticsearch,lightslife/elasticsearch,18098924759/elasticsearch,GlenRSmith/elasticsearch,iantruslove/elasticsearch,obourgain/elasticsearch,wittyameta/elasticsearch,scorpionvicky/elasticsearch,xingguang2013/elasticsearch,spiegela/elasticsearch,nrkkalyan/elasticsearch,cnfire/elasticsearch-1,camilojd/elasticsearch,andrejserafim/elasticsearch,awislowski/elasticsearch,girirajsharma/elasticsearch,knight1128/elasticsearch,StefanGor/elasticsearch,infusionsoft/elasticsearch,boliza/elasticsearch,xingguang2013/elasticsearch,kalimatas/elasticsearch,yynil/elasticsearch,kaneshin/elasticsearch,spiegela/elasticsearch,janmejay/elasticsearch,diendt/elasticsearch,jbertouch/elasticsearch,drewr/elasticsearch,mnylen/elasticsearch,dataduke/elasticsearch,HonzaKral/elasticsearch,i-am-Nathan/elasticsearch,MjAbuz/elasticsearch,lchennup/elasticsearch,hanst/elasticsearch,xingguang2013/elasticsearch,MichaelLiZhou/elasticsearch,elasticdog/elasticsearch,chirilo/elasticsearch,polyfractal/elasticsearch,cwurm/elasticsearch,Shekharrajak/elasticsearch,areek/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,SergVro/elasticsearch,MisterAndersen/elasticsearch,sposam/elasticsearch,hechunwen/elasticsearch,JSCooke/elasticsearch,jsgao0/elasticsearch,Flipkart/elasticsearch,trangvh/elasticsearch,kcompher/elasticsearch,caengcjd/elasticsearch,LeoYao/elasticsearch,slavau/elasticsearch,ricardocerq/elasticsearch,jimczi/elasticsearch,infusionsoft/elasticsearch,schonfeld/elasticsearch,tahaemin/elasticsearch,Collaborne/elasticsearch,fernandozhu/elasticsearch,umeshdangat/elasticsearch,zhiqinghuang/elasticsearch,clintongormley/elasticsearch,huypx1292/elasticsearch,Rygbee/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,zeroctu/elasticsearch,coding0011/elasticsearch,knight1128/elasticsearch,beiske/elasticsearch,girirajsharma/elasticsearch,Widen/elasticsearch,tkssharma/elasticsearch,zeroctu/elasticsearch,tkssharma/elasticsearch,davidvgalbraith/elasticsearch,dantuffery/elasticsearch,jango2015/elasticsearch,ESamir/elasticsearch,kenshin233/elasticsearch,lightslife/elasticsearch,gingerwizard/elasticsearch,MjAbuz/elasticsearch,anti-social/elasticsearch,xuzha/elasticsearch,karthikjaps/elasticsearch,rlugojr/elasticsearch,jprante/elasticsearch,sjohnr/elasticsearch,a2lin/elasticsearch,MaineC/elasticsearch,Charlesdong/elasticsearch,Clairebi/ElasticsearchClone,Liziyao/elasticsearch,LeoYao/elasticsearch,umeshdangat/elasticsearch,wangtuo/elasticsearch,Helen-Zhao/elasticsearch,girirajsharma/elasticsearch,thecocce/elasticsearch,kcompher/elasticsearch,huypx1292/elasticsearch,brandonkearby/elasticsearch,liweinan0423/elasticsearch,socialrank/elasticsearch,masterweb121/elasticsearch,PhaedrusTheGreek/elasticsearch,F0lha/elasticsearch,nilabhsagar/elasticsearch,liweinan0423/elasticsearch,Uiho/elasticsearch,hydro2k/elasticsearch,lchennup/elasticsearch,avikurapati/elasticsearch,a2lin/elasticsearch,bawse/elasticsearch,hafkensite/elasticsearch,mgalushka/elasticsearch,Stacey-Gammon/elasticsearch,ydsakyclguozi/elasticsearch,episerver/elasticsearch,ImpressTV/elasticsearch,MetSystem/elasticsearch,scottsom/elasticsearch,baishuo/elasticsearch_v2.1.0-baishuo,djschny/elasticsearch,apepper/elasticsearch,himanshuag/elasticsearch,jpountz/elasticsearch,MichaelLiZhou/elasticsearch,zkidkid/elasticsearch,brandonkearby/elasticsearch,qwerty4030/elasticsearch,avikurapati/elasticsearch,JSCooke/elasticsearch,mnylen/elasticsearch,fekaputra/elasticsearch,wbowling/elasticsearch,sauravmondallive/elasticsearch,Liziyao/elasticsearch,Liziyao/elasticsearch,szroland/elasticsearch,sposam/elasticsearch,sposam/elasticsearch,codebunt/elasticsearch,henakamaMSFT/elasticsearch,ulkas/elasticsearch,mbrukman/elasticsearch,areek/elasticsearch,wbowling/elasticsearch,Chhunlong/elasticsearch,humandb/elasticsearch,coding0011/elasticsearch,franklanganke/elasticsearch,vroyer/elassandra,uschindler/elasticsearch,jaynblue/elasticsearch,lmtwga/elasticsearch,hydro2k/elasticsearch,lchennup/elasticsearch,vrkansagara/elasticsearch,phani546/elasticsearch,pozhidaevak/elasticsearch,sc0ttkclark/elasticsearch,ydsakyclguozi/elasticsearch,ImpressTV/elasticsearch,TonyChai24/ESSource,AndreKR/elasticsearch,lmtwga/elasticsearch,Stacey-Gammon/elasticsearch,MetSystem/elasticsearch,jprante/elasticsearch,szroland/elasticsearch,Flipkart/elasticsearch,humandb/elasticsearch,sscarduzio/elasticsearch,mute/elasticsearch,AndreKR/elasticsearch,strapdata/elassandra,hanswang/elasticsearch,mkis-/elasticsearch,rlugojr/elasticsearch,C-Bish/elasticsearch,mmaracic/elasticsearch,hirdesh2008/elasticsearch,jimczi/elasticsearch,JackyMai/elasticsearch,nazarewk/elasticsearch,Flipkart/elasticsearch,Uiho/elasticsearch,Kakakakakku/elasticsearch,uschindler/elasticsearch,robin13/elasticsearch,kunallimaye/elasticsearch,jsgao0/elasticsearch,lmtwga/elasticsearch,acchen97/elasticsearch,drewr/elasticsearch,HonzaKral/elasticsearch,MisterAndersen/elasticsearch,ckclark/elasticsearch,ThiagoGarciaAlves/elasticsearch,hydro2k/elasticsearch,baishuo/elasticsearch_v2.1.0-baishuo,xpandan/elasticsearch,hanswang/elasticsearch,abibell/elasticsearch,chrismwendt/elasticsearch,wayeast/elasticsearch,andrejserafim/elasticsearch,markllama/elasticsearch,easonC/elasticsearch,petabytedata/elasticsearch,strapdata/elassandra-test,Uiho/elasticsearch,truemped/elasticsearch,slavau/elasticsearch,heng4fun/elasticsearch,fooljohnny/elasticsearch,gfyoung/elasticsearch,EasonYi/elasticsearch,alexbrasetvik/elasticsearch,aglne/elasticsearch,dantuffery/elasticsearch,alexkuk/elasticsearch,coding0011/elasticsearch,diendt/elasticsearch,snikch/elasticsearch,alexkuk/elasticsearch,pritishppai/elasticsearch,truemped/elasticsearch,kevinkluge/elasticsearch,Fsero/elasticsearch,alexbrasetvik/elasticsearch,KimTaehee/elasticsearch,fred84/elasticsearch,tcucchietti/elasticsearch,artnowo/elasticsearch,nilabhsagar/elasticsearch,adrianbk/elasticsearch,luiseduardohdbackup/elasticsearch,mjason3/elasticsearch,18098924759/elasticsearch,lmtwga/elasticsearch,mikemccand/elasticsearch,smflorentino/elasticsearch,C-Bish/elasticsearch,ckclark/elasticsearch,areek/elasticsearch,fernandozhu/elasticsearch,sreeramjayan/elasticsearch,mohit/elasticsearch,kevinkluge/elasticsearch,amit-shar/elasticsearch,mcku/elasticsearch,socialrank/elasticsearch,sreeramjayan/elasticsearch,sdauletau/elasticsearch,ouyangkongtong/elasticsearch,JackyMai/elasticsearch,hechunwen/elasticsearch,mjhennig/elasticsearch,bawse/elasticsearch,lydonchandra/elasticsearch,kkirsche/elasticsearch,strapdata/elassandra5-rc,kimimj/elasticsearch,achow/elasticsearch,mute/elasticsearch,jaynblue/elasticsearch,masterweb121/elasticsearch,mapr/elasticsearch,ckclark/elasticsearch,likaiwalkman/elasticsearch,kevinkluge/elasticsearch,Liziyao/elasticsearch,tkssharma/elasticsearch,petmit/elasticsearch,rhoml/elasticsearch,GlenRSmith/elasticsearch,elancom/elasticsearch,i-am-Nathan/elasticsearch,ESamir/elasticsearch,jchampion/elasticsearch,sc0ttkclark/elasticsearch,markwalkom/elasticsearch,polyfractal/elasticsearch,MetSystem/elasticsearch,queirozfcom/elasticsearch,markllama/elasticsearch,fooljohnny/elasticsearch,golubev/elasticsearch,TonyChai24/ESSource,Flipkart/elasticsearch,liweinan0423/elasticsearch,weipinghe/elasticsearch,Rygbee/elasticsearch,opendatasoft/elasticsearch,PhaedrusTheGreek/elasticsearch,xpandan/elasticsearch,chirilo/elasticsearch,kimimj/elasticsearch,MetSystem/elasticsearch,ivansun1010/elasticsearch,easonC/elasticsearch,areek/elasticsearch,overcome/elasticsearch,andrestc/elasticsearch,Fsero/elasticsearch,vroyer/elasticassandra,mbrukman/elasticsearch,SergVro/elasticsearch,YosuaMichael/elasticsearch,VukDukic/elasticsearch,mikemccand/elasticsearch,lzo/elasticsearch-1,awislowski/elasticsearch,kaneshin/elasticsearch,nilabhsagar/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,baishuo/elasticsearch_v2.1.0-baishuo,kaneshin/elasticsearch,mjason3/elasticsearch,zkidkid/elasticsearch,yanjunh/elasticsearch,MjAbuz/elasticsearch,davidvgalbraith/elasticsearch,episerver/elasticsearch,jeteve/elasticsearch,Kakakakakku/elasticsearch,fforbeck/elasticsearch,cwurm/elasticsearch,njlawton/elasticsearch,huanzhong/elasticsearch,AleksKochev/elasticsearch,strapdata/elassandra-test,ouyangkongtong/elasticsearch,loconsolutions/elasticsearch,EasonYi/elasticsearch,nellicus/elasticsearch,linglaiyao1314/elasticsearch,pablocastro/elasticsearch,javachengwc/elasticsearch,Chhunlong/elasticsearch,tebriel/elasticsearch,lks21c/elasticsearch,wbowling/elasticsearch,s1monw/elasticsearch,pritishppai/elasticsearch,jchampion/elasticsearch,ZTE-PaaS/elasticsearch,onegambler/elasticsearch,kkirsche/elasticsearch,fooljohnny/elasticsearch,karthikjaps/elasticsearch,ivansun1010/elasticsearch,pranavraman/elasticsearch,jimhooker2002/elasticsearch,karthikjaps/elasticsearch,pablocastro/elasticsearch,EasonYi/elasticsearch,maddin2016/elasticsearch,spiegela/elasticsearch,trangvh/elasticsearch,s1monw/elasticsearch,naveenhooda2000/elasticsearch,tkssharma/elasticsearch,mrorii/elasticsearch,jimhooker2002/elasticsearch,dataduke/elasticsearch,alexbrasetvik/elasticsearch,jimczi/elasticsearch,smflorentino/elasticsearch,F0lha/elasticsearch,iamjakob/elasticsearch,wimvds/elasticsearch,nazarewk/elasticsearch,khiraiwa/elasticsearch,myelin/elasticsearch,JackyMai/elasticsearch,luiseduardohdbackup/elasticsearch,adrianbk/elasticsearch,hafkensite/elasticsearch,mbrukman/elasticsearch,nknize/elasticsearch,wayeast/elasticsearch,episerver/elasticsearch,rhoml/elasticsearch,Clairebi/ElasticsearchClone,kubum/elasticsearch,linglaiyao1314/elasticsearch,Widen/elasticsearch,truemped/elasticsearch,mkis-/elasticsearch,xuzha/elasticsearch,anti-social/elasticsearch,mute/elasticsearch,iantruslove/elasticsearch,diendt/elasticsearch,a2lin/elasticsearch,ImpressTV/elasticsearch,EasonYi/elasticsearch,rhoml/elasticsearch,vietlq/elasticsearch,kkirsche/elasticsearch,i-am-Nathan/elasticsearch,18098924759/elasticsearch,awislowski/elasticsearch,girirajsharma/elasticsearch,wittyameta/elasticsearch,rajanm/elasticsearch,strapdata/elassandra,ouyangkongtong/elasticsearch,jw0201/elastic,mjhennig/elasticsearch,Microsoft/elasticsearch,Ansh90/elasticsearch,jimhooker2002/elasticsearch,jaynblue/elasticsearch,beiske/elasticsearch,glefloch/elasticsearch,slavau/elasticsearch,MichaelLiZhou/elasticsearch,winstonewert/elasticsearch,njlawton/elasticsearch,obourgain/elasticsearch,brandonkearby/elasticsearch,sreeramjayan/elasticsearch,linglaiyao1314/elasticsearch,hirdesh2008/elasticsearch,Uiho/elasticsearch,lydonchandra/elasticsearch,mm0/elasticsearch,brandonkearby/elasticsearch,geidies/elasticsearch,skearns64/elasticsearch,Shepard1212/elasticsearch,Asimov4/elasticsearch,MichaelLiZhou/elasticsearch,AshishThakur/elasticsearch,kubum/elasticsearch,nomoa/elasticsearch,kevinkluge/elasticsearch,chrismwendt/elasticsearch,queirozfcom/elasticsearch,franklanganke/elasticsearch,rento19962/elasticsearch,gmarz/elasticsearch,artnowo/elasticsearch,kalburgimanjunath/elasticsearch,tebriel/elasticsearch,acchen97/elasticsearch,tcucchietti/elasticsearch,C-Bish/elasticsearch,abibell/elasticsearch,dongjoon-hyun/elasticsearch,JervyShi/elasticsearch,javachengwc/elasticsearch,knight1128/elasticsearch,lchennup/elasticsearch,tsohil/elasticsearch,iacdingping/elasticsearch,zhiqinghuang/elasticsearch,myelin/elasticsearch,areek/elasticsearch,apepper/elasticsearch,MetSystem/elasticsearch,kkirsche/elasticsearch,ulkas/elasticsearch,artnowo/elasticsearch,wangtuo/elasticsearch,bawse/elasticsearch,feiqitian/elasticsearch,pranavraman/elasticsearch,yongminxia/elasticsearch,truemped/elasticsearch,lzo/elasticsearch-1,tahaemin/elasticsearch,nrkkalyan/elasticsearch,markwalkom/elasticsearch,kkirsche/elasticsearch,tebriel/elasticsearch,fekaputra/elasticsearch,zkidkid/elasticsearch,alexbrasetvik/elasticsearch,Flipkart/elasticsearch,kcompher/elasticsearch,18098924759/elasticsearch,micpalmia/elasticsearch,acchen97/elasticsearch,Chhunlong/elasticsearch,sjohnr/elasticsearch,codebunt/elasticsearch,wangtuo/elasticsearch,alexshadow007/elasticsearch,vvcephei/elasticsearch,caengcjd/elasticsearch,wayeast/elasticsearch,diendt/elasticsearch,ThalaivaStars/OrgRepo1,Collaborne/elasticsearch,zeroctu/elasticsearch,markharwood/elasticsearch,IanvsPoplicola/elasticsearch,ESamir/elasticsearch,martinstuga/elasticsearch,nilabhsagar/elasticsearch,markharwood/elasticsearch,amit-shar/elasticsearch,nezirus/elasticsearch,liweinan0423/elasticsearch,rajanm/elasticsearch,phani546/elasticsearch,Clairebi/ElasticsearchClone,petabytedata/elasticsearch,a2lin/elasticsearch,JervyShi/elasticsearch,shreejay/elasticsearch,ricardocerq/elasticsearch,dantuffery/elasticsearch,martinstuga/elasticsearch,khiraiwa/elasticsearch,markllama/elasticsearch,clintongormley/elasticsearch,vingupta3/elasticsearch,winstonewert/elasticsearch,sposam/elasticsearch,robin13/elasticsearch,masaruh/elasticsearch,ZTE-PaaS/elasticsearch,ThiagoGarciaAlves/elasticsearch,mmaracic/elasticsearch,wuranbo/elasticsearch,nellicus/elasticsearch,likaiwalkman/elasticsearch,masaruh/elasticsearch,jpountz/elasticsearch,fforbeck/elasticsearch,iacdingping/elasticsearch,ThalaivaStars/OrgRepo1,kingaj/elasticsearch,Stacey-Gammon/elasticsearch,mnylen/elasticsearch,nezirus/elasticsearch,Kakakakakku/elasticsearch,camilojd/elasticsearch,mute/elasticsearch,JSCooke/elasticsearch,ulkas/elasticsearch,mrorii/elasticsearch,bawse/elasticsearch,huanzhong/elasticsearch,LeoYao/elasticsearch,jpountz/elasticsearch,glefloch/elasticsearch,mjason3/elasticsearch,pozhidaevak/elasticsearch,pritishppai/elasticsearch,wangyuxue/elasticsearch,gfyoung/elasticsearch,Rygbee/elasticsearch,anti-social/elasticsearch,trangvh/elasticsearch,janmejay/elasticsearch,kingaj/elasticsearch,feiqitian/elasticsearch,queirozfcom/elasticsearch,btiernay/elasticsearch,drewr/elasticsearch,jaynblue/elasticsearch,Fsero/elasticsearch,koxa29/elasticsearch,schonfeld/elasticsearch,vietlq/elasticsearch,18098924759/elasticsearch,nrkkalyan/elasticsearch,socialrank/elasticsearch,jbertouch/elasticsearch,beiske/elasticsearch,Uiho/elasticsearch,btiernay/elasticsearch,fekaputra/elasticsearch,rmuir/elasticsearch,mgalushka/elasticsearch,lydonchandra/elasticsearch,MetSystem/elasticsearch,Shepard1212/elasticsearch,dylan8902/elasticsearch,sneivandt/elasticsearch,MichaelLiZhou/elasticsearch,sdauletau/elasticsearch,iacdingping/elasticsearch,ThiagoGarciaAlves/elasticsearch,bestwpw/elasticsearch,jpountz/elasticsearch,acchen97/elasticsearch,Collaborne/elasticsearch,VukDukic/elasticsearch,Charlesdong/elasticsearch,onegambler/elasticsearch,vrkansagara/elasticsearch,xuzha/elasticsearch,clintongormley/elasticsearch,18098924759/elasticsearch,jbertouch/elasticsearch,kubum/elasticsearch,artnowo/elasticsearch,mm0/elasticsearch,yynil/elasticsearch,AshishThakur/elasticsearch,Widen/elasticsearch,mcku/elasticsearch,vingupta3/elasticsearch,abibell/elasticsearch,tkssharma/elasticsearch,elancom/elasticsearch,smflorentino/elasticsearch,hechunwen/elasticsearch,mm0/elasticsearch,chirilo/elasticsearch,Widen/elasticsearch,huypx1292/elasticsearch,tahaemin/elasticsearch,onegambler/elasticsearch,yuy168/elasticsearch,queirozfcom/elasticsearch,Chhunlong/elasticsearch,ouyangkongtong/elasticsearch,sarwarbhuiyan/elasticsearch,adrianbk/elasticsearch,nomoa/elasticsearch,F0lha/elasticsearch,mrorii/elasticsearch,kalimatas/elasticsearch,mkis-/elasticsearch,onegambler/elasticsearch,mnylen/elasticsearch,aglne/elasticsearch,feiqitian/elasticsearch,xuzha/elasticsearch,mkis-/elasticsearch,lks21c/elasticsearch,markharwood/elasticsearch,njlawton/elasticsearch,Brijeshrpatel9/elasticsearch,vroyer/elasticassandra,kunallimaye/elasticsearch,karthikjaps/elasticsearch,palecur/elasticsearch,Kakakakakku/elasticsearch,btiernay/elasticsearch,abibell/elasticsearch,queirozfcom/elasticsearch,loconsolutions/elasticsearch,markllama/elasticsearch,MaineC/elasticsearch,franklanganke/elasticsearch,markharwood/elasticsearch,yongminxia/elasticsearch,wbowling/elasticsearch,pritishppai/elasticsearch,jchampion/elasticsearch,LewayneNaidoo/elasticsearch,fernandozhu/elasticsearch,Shekharrajak/elasticsearch,mmaracic/elasticsearch,ajhalani/elasticsearch,feiqitian/elasticsearch,kevinkluge/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,onegambler/elasticsearch,dylan8902/elasticsearch,chrismwendt/elasticsearch,iacdingping/elasticsearch,geidies/elasticsearch,Charlesdong/elasticsearch,coding0011/elasticsearch,Chhunlong/elasticsearch,kalburgimanjunath/elasticsearch,adrianbk/elasticsearch,trangvh/elasticsearch,onegambler/elasticsearch,jango2015/elasticsearch,nezirus/elasticsearch,mapr/elasticsearch,cnfire/elasticsearch-1,ESamir/elasticsearch,milodky/elasticsearch,glefloch/elasticsearch,camilojd/elasticsearch,dpursehouse/elasticsearch,sjohnr/elasticsearch,jango2015/elasticsearch,uschindler/elasticsearch,baishuo/elasticsearch_v2.1.0-baishuo,NBSW/elasticsearch,boliza/elasticsearch,sdauletau/elasticsearch,javachengwc/elasticsearch,xuzha/elasticsearch,mkis-/elasticsearch,hanst/elasticsearch,andrejserafim/elasticsearch,vrkansagara/elasticsearch,alexkuk/elasticsearch,gingerwizard/elasticsearch,tsohil/elasticsearch,kenshin233/elasticsearch,ZTE-PaaS/elasticsearch,dpursehouse/elasticsearch,skearns64/elasticsearch,cwurm/elasticsearch,naveenhooda2000/elasticsearch,sarwarbhuiyan/elasticsearch,codebunt/elasticsearch,amaliujia/elasticsearch,zkidkid/elasticsearch,adrianbk/elasticsearch,hechunwen/elasticsearch,F0lha/elasticsearch,hafkensite/elasticsearch,janmejay/elasticsearch,winstonewert/elasticsearch,sc0ttkclark/elasticsearch,SergVro/elasticsearch,naveenhooda2000/elasticsearch,MichaelLiZhou/elasticsearch,jimhooker2002/elasticsearch,heng4fun/elasticsearch,Liziyao/elasticsearch,mbrukman/elasticsearch,iamjakob/elasticsearch,Ansh90/elasticsearch,vvcephei/elasticsearch,TonyChai24/ESSource,andrejserafim/elasticsearch,naveenhooda2000/elasticsearch,jeteve/elasticsearch,jimczi/elasticsearch,likaiwalkman/elasticsearch,fred84/elasticsearch,mjhennig/elasticsearch,baishuo/elasticsearch_v2.1.0-baishuo,jw0201/elastic,knight1128/elasticsearch,dongjoon-hyun/elasticsearch,kubum/elasticsearch,rmuir/elasticsearch,humandb/elasticsearch,geidies/elasticsearch,iantruslove/elasticsearch,Collaborne/elasticsearch,mnylen/elasticsearch,qwerty4030/elasticsearch,dongjoon-hyun/elasticsearch,iantruslove/elasticsearch,pritishppai/elasticsearch,caengcjd/elasticsearch,PhaedrusTheGreek/elasticsearch,nellicus/elasticsearch,AshishThakur/elasticsearch,pozhidaevak/elasticsearch,lmtwga/elasticsearch,milodky/elasticsearch,LeoYao/elasticsearch,kunallimaye/elasticsearch,mnylen/elasticsearch,zhiqinghuang/elasticsearch,apepper/elasticsearch,achow/elasticsearch,alexkuk/elasticsearch,rento19962/elasticsearch,martinstuga/elasticsearch,mapr/elasticsearch,scorpionvicky/elasticsearch,khiraiwa/elasticsearch,sdauletau/elasticsearch,amaliujia/elasticsearch,xpandan/elasticsearch,shreejay/elasticsearch,palecur/elasticsearch,infusionsoft/elasticsearch,AshishThakur/elasticsearch,kingaj/elasticsearch,JSCooke/elasticsearch,Asimov4/elasticsearch,rento19962/elasticsearch,IanvsPoplicola/elasticsearch,sscarduzio/elasticsearch,amit-shar/elasticsearch,tcucchietti/elasticsearch,sposam/elasticsearch,franklanganke/elasticsearch,kalimatas/elasticsearch,nellicus/elasticsearch,AleksKochev/elasticsearch,alexbrasetvik/elasticsearch,djschny/elasticsearch,chirilo/elasticsearch,camilojd/elasticsearch,TonyChai24/ESSource,rlugojr/elasticsearch,ricardocerq/elasticsearch,ouyangkongtong/elasticsearch,Rygbee/elasticsearch,zhiqinghuang/elasticsearch,codebunt/elasticsearch,snikch/elasticsearch,kingaj/elasticsearch,ajhalani/elasticsearch,apepper/elasticsearch,TonyChai24/ESSource,rajanm/elasticsearch,diendt/elasticsearch,rhoml/elasticsearch,mmaracic/elasticsearch,queirozfcom/elasticsearch,sauravmondallive/elasticsearch,ThiagoGarciaAlves/elasticsearch,NBSW/elasticsearch,avikurapati/elasticsearch,caengcjd/elasticsearch,ivansun1010/elasticsearch,geidies/elasticsearch,umeshdangat/elasticsearch,diendt/elasticsearch,elasticdog/elasticsearch,strapdata/elassandra5-rc,javachengwc/elasticsearch,YosuaMichael/elasticsearch,nknize/elasticsearch,iantruslove/elasticsearch,chirilo/elasticsearch,jw0201/elastic,sarwarbhuiyan/elasticsearch,wayeast/elasticsearch,NBSW/elasticsearch,Brijeshrpatel9/elasticsearch,sjohnr/elasticsearch,Chhunlong/elasticsearch,AndreKR/elasticsearch,davidvgalbraith/elasticsearch,umeshdangat/elasticsearch,nrkkalyan/elasticsearch,vingupta3/elasticsearch,rlugojr/elasticsearch,sarwarbhuiyan/elasticsearch,scorpionvicky/elasticsearch,overcome/elasticsearch,PhaedrusTheGreek/elasticsearch,cnfire/elasticsearch-1,KimTaehee/elasticsearch,davidvgalbraith/elasticsearch,polyfractal/elasticsearch,markllama/elasticsearch,wenpos/elasticsearch,boliza/elasticsearch,strapdata/elassandra5-rc,sneivandt/elasticsearch,strapdata/elassandra-test,slavau/elasticsearch,yongminxia/elasticsearch,hirdesh2008/elasticsearch,HarishAtGitHub/elasticsearch,likaiwalkman/elasticsearch,janmejay/elasticsearch,henakamaMSFT/elasticsearch,amaliujia/elasticsearch,petabytedata/elasticsearch,markwalkom/elasticsearch,mrorii/elasticsearch,xingguang2013/elasticsearch,xuzha/elasticsearch,nomoa/elasticsearch,wimvds/elasticsearch,jchampion/elasticsearch,camilojd/elasticsearch,achow/elasticsearch,kunallimaye/elasticsearch,Fsero/elasticsearch,ThiagoGarciaAlves/elasticsearch,ckclark/elasticsearch,markllama/elasticsearch,kaneshin/elasticsearch,thecocce/elasticsearch,szroland/elasticsearch,loconsolutions/elasticsearch,vvcephei/elasticsearch,feiqitian/elasticsearch,dataduke/elasticsearch,scottsom/elasticsearch,naveenhooda2000/elasticsearch,opendatasoft/elasticsearch,hirdesh2008/elasticsearch,ouyangkongtong/elasticsearch,milodky/elasticsearch,ImpressTV/elasticsearch,strapdata/elassandra,rajanm/elasticsearch,MisterAndersen/elasticsearch,combinatorist/elasticsearch,kingaj/elasticsearch,JervyShi/elasticsearch,djschny/elasticsearch,zeroctu/elasticsearch,ydsakyclguozi/elasticsearch,AndreKR/elasticsearch,lks21c/elasticsearch,Uiho/elasticsearch,himanshuag/elasticsearch,milodky/elasticsearch,feiqitian/elasticsearch,TonyChai24/ESSource,kunallimaye/elasticsearch,jpountz/elasticsearch,amaliujia/elasticsearch,apepper/elasticsearch,robin13/elasticsearch,mmaracic/elasticsearch,ydsakyclguozi/elasticsearch,iamjakob/elasticsearch,luiseduardohdbackup/elasticsearch,hafkensite/elasticsearch,apepper/elasticsearch,queirozfcom/elasticsearch,VukDukic/elasticsearch,tsohil/elasticsearch,socialrank/elasticsearch,vingupta3/elasticsearch,hirdesh2008/elasticsearch,sarwarbhuiyan/elasticsearch,schonfeld/elasticsearch,Ansh90/elasticsearch,hanst/elasticsearch,iantruslove/elasticsearch,rmuir/elasticsearch,wuranbo/elasticsearch,pranavraman/elasticsearch,elasticdog/elasticsearch,wimvds/elasticsearch,geidies/elasticsearch,kimimj/elasticsearch,MisterAndersen/elasticsearch,mbrukman/elasticsearch,elancom/elasticsearch,lightslife/elasticsearch,btiernay/elasticsearch,Fsero/elasticsearch,jw0201/elastic,sarwarbhuiyan/elasticsearch,mm0/elasticsearch,Rygbee/elasticsearch,yongminxia/elasticsearch,martinstuga/elasticsearch,SergVro/elasticsearch,jeteve/elasticsearch,shreejay/elasticsearch,masterweb121/elasticsearch,NBSW/elasticsearch,kalimatas/elasticsearch,zkidkid/elasticsearch,loconsolutions/elasticsearch,obourgain/elasticsearch,lightslife/elasticsearch,HarishAtGitHub/elasticsearch,lydonchandra/elasticsearch,dylan8902/elasticsearch,Liziyao/elasticsearch,jango2015/elasticsearch,himanshuag/elasticsearch,mcku/elasticsearch,ESamir/elasticsearch,ydsakyclguozi/elasticsearch,sneivandt/elasticsearch,hanswang/elasticsearch,mikemccand/elasticsearch,mjhennig/elasticsearch,kalburgimanjunath/elasticsearch,wenpos/elasticsearch,cnfire/elasticsearch-1,ESamir/elasticsearch,easonC/elasticsearch,zhiqinghuang/elasticsearch,wangtuo/elasticsearch,karthikjaps/elasticsearch,pranavraman/elasticsearch,Fsero/elasticsearch,sauravmondallive/elasticsearch,infusionsoft/elasticsearch,mute/elasticsearch,AndreKR/elasticsearch,pranavraman/elasticsearch,masterweb121/elasticsearch,strapdata/elassandra,springning/elasticsearch,Helen-Zhao/elasticsearch,i-am-Nathan/elasticsearch,VukDukic/elasticsearch,sreeramjayan/elasticsearch,glefloch/elasticsearch,jchampion/elasticsearch,henakamaMSFT/elasticsearch,scorpionvicky/elasticsearch,sneivandt/elasticsearch,Helen-Zhao/elasticsearch,kenshin233/elasticsearch,rmuir/elasticsearch,sc0ttkclark/elasticsearch,lightslife/elasticsearch,kalburgimanjunath/elasticsearch,loconsolutions/elasticsearch,dataduke/elasticsearch,ricardocerq/elasticsearch,infusionsoft/elasticsearch,hafkensite/elasticsearch,likaiwalkman/elasticsearch,alexkuk/elasticsearch,clintongormley/elasticsearch,C-Bish/elasticsearch,markwalkom/elasticsearch,martinstuga/elasticsearch,yuy168/elasticsearch,dongjoon-hyun/elasticsearch,hanswang/elasticsearch,HonzaKral/elasticsearch,jango2015/elasticsearch,khiraiwa/elasticsearch,humandb/elasticsearch,glefloch/elasticsearch,gingerwizard/elasticsearch,mute/elasticsearch,Shepard1212/elasticsearch,wayeast/elasticsearch,qwerty4030/elasticsearch,AndreKR/elasticsearch,kalburgimanjunath/elasticsearch,dongjoon-hyun/elasticsearch,yanjunh/elasticsearch,hanswang/elasticsearch,zeroctu/elasticsearch,socialrank/elasticsearch,snikch/elasticsearch,ImpressTV/elasticsearch,hydro2k/elasticsearch,huanzhong/elasticsearch,anti-social/elasticsearch,gmarz/elasticsearch,jchampion/elasticsearch,MaineC/elasticsearch,ThalaivaStars/OrgRepo1,elasticdog/elasticsearch,pablocastro/elasticsearch,himanshuag/elasticsearch,btiernay/elasticsearch,hanst/elasticsearch,gfyoung/elasticsearch,beiske/elasticsearch,vietlq/elasticsearch,ricardocerq/elasticsearch,awislowski/elasticsearch,easonC/elasticsearch,Siddartha07/elasticsearch,AshishThakur/elasticsearch,pranavraman/elasticsearch,Collaborne/elasticsearch,btiernay/elasticsearch,coding0011/elasticsearch,SergVro/elasticsearch,vvcephei/elasticsearch,mgalushka/elasticsearch,combinatorist/elasticsearch,zeroctu/elasticsearch,mjhennig/elasticsearch,cnfire/elasticsearch-1,mapr/elasticsearch,Fsero/elasticsearch,sarwarbhuiyan/elasticsearch,Chhunlong/elasticsearch,ThalaivaStars/OrgRepo1,NBSW/elasticsearch,weipinghe/elasticsearch,koxa29/elasticsearch,nellicus/elasticsearch,fekaputra/elasticsearch,djschny/elasticsearch,wangyuxue/elasticsearch,baishuo/elasticsearch_v2.1.0-baishuo,alexshadow007/elasticsearch,karthikjaps/elasticsearch,drewr/elasticsearch,rento19962/elasticsearch,qwerty4030/elasticsearch,slavau/elasticsearch,kubum/elasticsearch,koxa29/elasticsearch,Microsoft/elasticsearch,ZTE-PaaS/elasticsearch,robin13/elasticsearch,wangyuxue/elasticsearch,wbowling/elasticsearch,mgalushka/elasticsearch,davidvgalbraith/elasticsearch,phani546/elasticsearch,yongminxia/elasticsearch,koxa29/elasticsearch,petabytedata/elasticsearch,ivansun1010/elasticsearch,ImpressTV/elasticsearch,liweinan0423/elasticsearch,thecocce/elasticsearch,Widen/elasticsearch,jimczi/elasticsearch,pritishppai/elasticsearch,baishuo/elasticsearch_v2.1.0-baishuo,yanjunh/elasticsearch,yuy168/elasticsearch,hydro2k/elasticsearch,wittyameta/elasticsearch,anti-social/elasticsearch,tahaemin/elasticsearch,phani546/elasticsearch,kkirsche/elasticsearch,kaneshin/elasticsearch,beiske/elasticsearch,markwalkom/elasticsearch,springning/elasticsearch,hanst/elasticsearch,pritishppai/elasticsearch,Ansh90/elasticsearch,bestwpw/elasticsearch,dylan8902/elasticsearch,jeteve/elasticsearch,masaruh/elasticsearch,henakamaMSFT/elasticsearch,nazarewk/elasticsearch,mortonsykes/elasticsearch,lydonchandra/elasticsearch,fforbeck/elasticsearch,tsohil/elasticsearch,adrianbk/elasticsearch,linglaiyao1314/elasticsearch,mbrukman/elasticsearch,wimvds/elasticsearch,jw0201/elastic,fekaputra/elasticsearch,caengcjd/elasticsearch,kenshin233/elasticsearch,Brijeshrpatel9/elasticsearch,beiske/elasticsearch,uschindler/elasticsearch,winstonewert/elasticsearch,Brijeshrpatel9/elasticsearch,sscarduzio/elasticsearch,LewayneNaidoo/elasticsearch,cnfire/elasticsearch-1,Liziyao/elasticsearch,aglne/elasticsearch,ulkas/elasticsearch,mjason3/elasticsearch,artnowo/elasticsearch,thecocce/elasticsearch,Clairebi/ElasticsearchClone,petabytedata/elasticsearch,lchennup/elasticsearch,fooljohnny/elasticsearch,sreeramjayan/elasticsearch,kingaj/elasticsearch,fekaputra/elasticsearch,himanshuag/elasticsearch,wangtuo/elasticsearch,vrkansagara/elasticsearch,mnylen/elasticsearch,jimhooker2002/elasticsearch,humandb/elasticsearch,maddin2016/elasticsearch,lzo/elasticsearch-1,easonC/elasticsearch,AleksKochev/elasticsearch,LewayneNaidoo/elasticsearch,gfyoung/elasticsearch,mohit/elasticsearch,polyfractal/elasticsearch,springning/elasticsearch,nezirus/elasticsearch,snikch/elasticsearch,yynil/elasticsearch,abibell/elasticsearch,petmit/elasticsearch,golubev/elasticsearch,markwalkom/elasticsearch,kenshin233/elasticsearch,schonfeld/elasticsearch,yongminxia/elasticsearch,phani546/elasticsearch,Widen/elasticsearch,GlenRSmith/elasticsearch,xingguang2013/elasticsearch,dpursehouse/elasticsearch,rajanm/elasticsearch,mjhennig/elasticsearch,jprante/elasticsearch,andrestc/elasticsearch,cnfire/elasticsearch-1,lightslife/elasticsearch,18098924759/elasticsearch,javachengwc/elasticsearch,kalimatas/elasticsearch,apepper/elasticsearch,jeteve/elasticsearch,TonyChai24/ESSource,boliza/elasticsearch,jaynblue/elasticsearch,MaineC/elasticsearch,palecur/elasticsearch,masaruh/elasticsearch,tebriel/elasticsearch,iantruslove/elasticsearch,vvcephei/elasticsearch,vrkansagara/elasticsearch,aglne/elasticsearch,skearns64/elasticsearch,bestwpw/elasticsearch,StefanGor/elasticsearch,AleksKochev/elasticsearch,IanvsPoplicola/elasticsearch,dpursehouse/elasticsearch,Flipkart/elasticsearch,opendatasoft/elasticsearch,sjohnr/elasticsearch,polyfractal/elasticsearch,easonC/elasticsearch,MjAbuz/elasticsearch,truemped/elasticsearch,fooljohnny/elasticsearch,yuy168/elasticsearch,Rygbee/elasticsearch,heng4fun/elasticsearch,jimhooker2002/elasticsearch,nazarewk/elasticsearch,sneivandt/elasticsearch,micpalmia/elasticsearch,golubev/elasticsearch,NBSW/elasticsearch,Charlesdong/elasticsearch,iacdingping/elasticsearch,acchen97/elasticsearch,masterweb121/elasticsearch,weipinghe/elasticsearch,mikemccand/elasticsearch,wayeast/elasticsearch,nrkkalyan/elasticsearch,MjAbuz/elasticsearch,springning/elasticsearch,C-Bish/elasticsearch,amaliujia/elasticsearch,vietlq/elasticsearch,Microsoft/elasticsearch,nknize/elasticsearch,nknize/elasticsearch,kimimj/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,ThalaivaStars/OrgRepo1,ZTE-PaaS/elasticsearch,acchen97/elasticsearch,Siddartha07/elasticsearch,shreejay/elasticsearch,gmarz/elasticsearch,mcku/elasticsearch,kevinkluge/elasticsearch,linglaiyao1314/elasticsearch,JervyShi/elasticsearch,YosuaMichael/elasticsearch,vroyer/elassandra,petmit/elasticsearch,pablocastro/elasticsearch,andrestc/elasticsearch,strapdata/elassandra,phani546/elasticsearch,yynil/elasticsearch,onegambler/elasticsearch,wittyameta/elasticsearch,dylan8902/elasticsearch,NBSW/elasticsearch,Siddartha07/elasticsearch,gingerwizard/elasticsearch,yynil/elasticsearch,pablocastro/elasticsearch,tsohil/elasticsearch,fekaputra/elasticsearch,Charlesdong/elasticsearch,mohit/elasticsearch,petabytedata/elasticsearch,alexkuk/elasticsearch,codebunt/elasticsearch,kimimj/elasticsearch,alexshadow007/elasticsearch,Ansh90/elasticsearch,YosuaMichael/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,s1monw/elasticsearch,markllama/elasticsearch,djschny/elasticsearch,dataduke/elasticsearch,kcompher/elasticsearch,camilojd/elasticsearch,hanswang/elasticsearch,khiraiwa/elasticsearch,masterweb121/elasticsearch,strapdata/elassandra-test,geidies/elasticsearch,knight1128/elasticsearch,LeoYao/elasticsearch,yongminxia/elasticsearch,boliza/elasticsearch,lzo/elasticsearch-1,hanst/elasticsearch,tcucchietti/elasticsearch,mikemccand/elasticsearch,mohit/elasticsearch,khiraiwa/elasticsearch,kcompher/elasticsearch,springning/elasticsearch,opendatasoft/elasticsearch,andrestc/elasticsearch,rajanm/elasticsearch,Collaborne/elasticsearch,Helen-Zhao/elasticsearch,achow/elasticsearch,vietlq/elasticsearch,elancom/elasticsearch,StefanGor/elasticsearch,amit-shar/elasticsearch,jbertouch/elasticsearch,amit-shar/elasticsearch,spiegela/elasticsearch,kunallimaye/elasticsearch,JervyShi/elasticsearch,HarishAtGitHub/elasticsearch,ThiagoGarciaAlves/elasticsearch,Uiho/elasticsearch,rlugojr/elasticsearch,nazarewk/elasticsearch,Stacey-Gammon/elasticsearch,strapdata/elassandra5-rc,LeoYao/elasticsearch,huypx1292/elasticsearch,achow/elasticsearch,likaiwalkman/elasticsearch,iamjakob/elasticsearch,kcompher/elasticsearch,markharwood/elasticsearch,bestwpw/elasticsearch,jango2015/elasticsearch,spiegela/elasticsearch,trangvh/elasticsearch,AshishThakur/elasticsearch,chrismwendt/elasticsearch,sscarduzio/elasticsearch,bestwpw/elasticsearch,opendatasoft/elasticsearch,codebunt/elasticsearch,shreejay/elasticsearch,KimTaehee/elasticsearch,kalburgimanjunath/elasticsearch,huanzhong/elasticsearch,sdauletau/elasticsearch,Rygbee/elasticsearch,mortonsykes/elasticsearch,iacdingping/elasticsearch,milodky/elasticsearch,smflorentino/elasticsearch,KimTaehee/elasticsearch,palecur/elasticsearch,zhiqinghuang/elasticsearch,knight1128/elasticsearch,StefanGor/elasticsearch,nrkkalyan/elasticsearch,mbrukman/elasticsearch,jsgao0/elasticsearch,robin13/elasticsearch,hafkensite/elasticsearch,vietlq/elasticsearch,strapdata/elassandra-test,gingerwizard/elasticsearch,infusionsoft/elasticsearch,thecocce/elasticsearch,wimvds/elasticsearch,Collaborne/elasticsearch,F0lha/elasticsearch,jango2015/elasticsearch,vingupta3/elasticsearch,wimvds/elasticsearch,VukDukic/elasticsearch,wayeast/elasticsearch,andrestc/elasticsearch,PhaedrusTheGreek/elasticsearch,sc0ttkclark/elasticsearch,HonzaKral/elasticsearch,slavau/elasticsearch,koxa29/elasticsearch,overcome/elasticsearch
|
Tools: Add script to grab ES version for BWC tests.
closes #7653
|
# Licensed to Elasticsearch under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
# ownership. Elasticsearch licenses this file to you under
# the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on
# an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
'''
Downloads and extracts elasticsearch for backwards compatibility tests.
'''
import argparse
import os
import platform
import shutil
import subprocess
import urllib.request
import zipfile
def parse_config():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('--path', metavar='DIR', default='./backwards',
help='Where to extract elasticsearch')
parser.add_argument('--force', action='store_true', default=False,
help='Delete and redownload if the version already exists')
parser.add_argument('version', metavar='X.Y.Z',
help='Version of elasticsearch to grab')
return parser.parse_args()
def main():
c = parse_config()
if not os.path.exists(c.path):
print('Creating %s' % c.path)
os.mkdir(c.path)
is_windows = platform.system() == 'Windows'
os.chdir(c.path)
version_dir = 'elasticsearch-%s' % c.version
if os.path.exists(version_dir):
if c.force:
print('Removing old download %s' % version_dir)
shutil.rmtree(version_dir)
else:
print('Version %s exists at %s' % (c.version, version_dir))
return
# before 1.4.0, the zip file contains windows scripts, and tar.gz contained *nix scripts
if is_windows:
filename = '%s.zip' % version_dir
else:
filename = '%s.tar.gz' % version_dir
url = 'https://download.elasticsearch.org/elasticsearch/elasticsearch/%s' % filename
print('Downloading %s' % url)
urllib.request.urlretrieve(url, filename)
print('Extracting to %s' % version_dir)
if is_windows:
archive = zipfile.ZipFile(filename)
archive.extractall()
else:
# for some reason python's tarfile module has trouble with ES tgz?
subprocess.check_call('tar -xzf %s' % filename, shell=True)
print('Cleaning up %s' % filename)
os.remove(filename)
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
print('Ctrl-C caught, exiting')
|
<commit_before><commit_msg>Tools: Add script to grab ES version for BWC tests.
closes #7653<commit_after>
|
# Licensed to Elasticsearch under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
# ownership. Elasticsearch licenses this file to you under
# the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on
# an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
'''
Downloads and extracts elasticsearch for backwards compatibility tests.
'''
import argparse
import os
import platform
import shutil
import subprocess
import urllib.request
import zipfile
def parse_config():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('--path', metavar='DIR', default='./backwards',
help='Where to extract elasticsearch')
parser.add_argument('--force', action='store_true', default=False,
help='Delete and redownload if the version already exists')
parser.add_argument('version', metavar='X.Y.Z',
help='Version of elasticsearch to grab')
return parser.parse_args()
def main():
c = parse_config()
if not os.path.exists(c.path):
print('Creating %s' % c.path)
os.mkdir(c.path)
is_windows = platform.system() == 'Windows'
os.chdir(c.path)
version_dir = 'elasticsearch-%s' % c.version
if os.path.exists(version_dir):
if c.force:
print('Removing old download %s' % version_dir)
shutil.rmtree(version_dir)
else:
print('Version %s exists at %s' % (c.version, version_dir))
return
# before 1.4.0, the zip file contains windows scripts, and tar.gz contained *nix scripts
if is_windows:
filename = '%s.zip' % version_dir
else:
filename = '%s.tar.gz' % version_dir
url = 'https://download.elasticsearch.org/elasticsearch/elasticsearch/%s' % filename
print('Downloading %s' % url)
urllib.request.urlretrieve(url, filename)
print('Extracting to %s' % version_dir)
if is_windows:
archive = zipfile.ZipFile(filename)
archive.extractall()
else:
# for some reason python's tarfile module has trouble with ES tgz?
subprocess.check_call('tar -xzf %s' % filename, shell=True)
print('Cleaning up %s' % filename)
os.remove(filename)
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
print('Ctrl-C caught, exiting')
|
Tools: Add script to grab ES version for BWC tests.
closes #7653# Licensed to Elasticsearch under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
# ownership. Elasticsearch licenses this file to you under
# the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on
# an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
'''
Downloads and extracts elasticsearch for backwards compatibility tests.
'''
import argparse
import os
import platform
import shutil
import subprocess
import urllib.request
import zipfile
def parse_config():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('--path', metavar='DIR', default='./backwards',
help='Where to extract elasticsearch')
parser.add_argument('--force', action='store_true', default=False,
help='Delete and redownload if the version already exists')
parser.add_argument('version', metavar='X.Y.Z',
help='Version of elasticsearch to grab')
return parser.parse_args()
def main():
c = parse_config()
if not os.path.exists(c.path):
print('Creating %s' % c.path)
os.mkdir(c.path)
is_windows = platform.system() == 'Windows'
os.chdir(c.path)
version_dir = 'elasticsearch-%s' % c.version
if os.path.exists(version_dir):
if c.force:
print('Removing old download %s' % version_dir)
shutil.rmtree(version_dir)
else:
print('Version %s exists at %s' % (c.version, version_dir))
return
# before 1.4.0, the zip file contains windows scripts, and tar.gz contained *nix scripts
if is_windows:
filename = '%s.zip' % version_dir
else:
filename = '%s.tar.gz' % version_dir
url = 'https://download.elasticsearch.org/elasticsearch/elasticsearch/%s' % filename
print('Downloading %s' % url)
urllib.request.urlretrieve(url, filename)
print('Extracting to %s' % version_dir)
if is_windows:
archive = zipfile.ZipFile(filename)
archive.extractall()
else:
# for some reason python's tarfile module has trouble with ES tgz?
subprocess.check_call('tar -xzf %s' % filename, shell=True)
print('Cleaning up %s' % filename)
os.remove(filename)
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
print('Ctrl-C caught, exiting')
|
<commit_before><commit_msg>Tools: Add script to grab ES version for BWC tests.
closes #7653<commit_after># Licensed to Elasticsearch under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
# ownership. Elasticsearch licenses this file to you under
# the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on
# an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
'''
Downloads and extracts elasticsearch for backwards compatibility tests.
'''
import argparse
import os
import platform
import shutil
import subprocess
import urllib.request
import zipfile
def parse_config():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('--path', metavar='DIR', default='./backwards',
help='Where to extract elasticsearch')
parser.add_argument('--force', action='store_true', default=False,
help='Delete and redownload if the version already exists')
parser.add_argument('version', metavar='X.Y.Z',
help='Version of elasticsearch to grab')
return parser.parse_args()
def main():
c = parse_config()
if not os.path.exists(c.path):
print('Creating %s' % c.path)
os.mkdir(c.path)
is_windows = platform.system() == 'Windows'
os.chdir(c.path)
version_dir = 'elasticsearch-%s' % c.version
if os.path.exists(version_dir):
if c.force:
print('Removing old download %s' % version_dir)
shutil.rmtree(version_dir)
else:
print('Version %s exists at %s' % (c.version, version_dir))
return
# before 1.4.0, the zip file contains windows scripts, and tar.gz contained *nix scripts
if is_windows:
filename = '%s.zip' % version_dir
else:
filename = '%s.tar.gz' % version_dir
url = 'https://download.elasticsearch.org/elasticsearch/elasticsearch/%s' % filename
print('Downloading %s' % url)
urllib.request.urlretrieve(url, filename)
print('Extracting to %s' % version_dir)
if is_windows:
archive = zipfile.ZipFile(filename)
archive.extractall()
else:
# for some reason python's tarfile module has trouble with ES tgz?
subprocess.check_call('tar -xzf %s' % filename, shell=True)
print('Cleaning up %s' % filename)
os.remove(filename)
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
print('Ctrl-C caught, exiting')
|
|
d0d10749c7e9bd37c5ce4eed1574390e46914b8f
|
sklearn/utils/tests/test_ransac.py
|
sklearn/utils/tests/test_ransac.py
|
import numpy as np
from numpy.testing import assert_equal
from sklearn import linear_model
from sklearn.utils import ransac
def test_ransac_inliers_outliers():
np.random.seed(1)
# Generate coordinates of line
X = np.arange(-200, 200)
y = 0.2 * X + 20
data = np.column_stack([X, y])
# Add some faulty data
outliers = np.array((10, 30, 200))
data[outliers[0], :] = (1000, 1000)
data[outliers[1], :] = (-1000, -1000)
data[outliers[2], :] = (-100, -50)
X = data[:, 0][:, np.newaxis]
y = data[:, 1]
# Estimate parameters of corrupted data
inlier_mask = ransac(X, y, linear_model.LinearRegression(), 3, 5)
# Ground truth / reference inlier mask
ref_inlier_mask = np.ones_like(inlier_mask, dtype=np.bool_)
ref_inlier_mask[outliers] = False
assert_equal(inlier_mask, ref_inlier_mask)
if __name__ == "__main__":
np.testing.run_module_suite()
|
Add simple RANSAC unit test
|
Add simple RANSAC unit test
|
Python
|
bsd-3-clause
|
xyguo/scikit-learn,imaculate/scikit-learn,walterreade/scikit-learn,ClimbsRocks/scikit-learn,vshtanko/scikit-learn,liangz0707/scikit-learn,sumspr/scikit-learn,DonBeo/scikit-learn,tmhm/scikit-learn,fredhusser/scikit-learn,aetilley/scikit-learn,pypot/scikit-learn,carrillo/scikit-learn,xwolf12/scikit-learn,xiaoxiamii/scikit-learn,madjelan/scikit-learn,OshynSong/scikit-learn,ChanderG/scikit-learn,NelisVerhoef/scikit-learn,RPGOne/scikit-learn,nmayorov/scikit-learn,Adai0808/scikit-learn,JeanKossaifi/scikit-learn,xiaoxiamii/scikit-learn,alexsavio/scikit-learn,zaxtax/scikit-learn,saiwing-yeung/scikit-learn,yask123/scikit-learn,NelisVerhoef/scikit-learn,trungnt13/scikit-learn,jmschrei/scikit-learn,wzbozon/scikit-learn,PatrickOReilly/scikit-learn,IndraVikas/scikit-learn,mugizico/scikit-learn,ishanic/scikit-learn,Obus/scikit-learn,jorge2703/scikit-learn,beepee14/scikit-learn,aflaxman/scikit-learn,lbishal/scikit-learn,frank-tancf/scikit-learn,xzh86/scikit-learn,bnaul/scikit-learn,ogrisel/scikit-learn,yunfeilu/scikit-learn,jaidevd/scikit-learn,aewhatley/scikit-learn,ycaihua/scikit-learn,manashmndl/scikit-learn,hsuantien/scikit-learn,abhishekgahlot/scikit-learn,kagayakidan/scikit-learn,Myasuka/scikit-learn,MechCoder/scikit-learn,loli/sklearn-ensembletrees,krez13/scikit-learn,trungnt13/scikit-learn,mwv/scikit-learn,ltiao/scikit-learn,Obus/scikit-learn,TomDLT/scikit-learn,ogrisel/scikit-learn,arabenjamin/scikit-learn,jereze/scikit-learn,themrmax/scikit-learn,RayMick/scikit-learn,lazywei/scikit-learn,macks22/scikit-learn,jakobworldpeace/scikit-learn,jkarnows/scikit-learn,Titan-C/scikit-learn,mwv/scikit-learn,ycaihua/scikit-learn,sergeyf/scikit-learn,AlexandreAbraham/scikit-learn,yonglehou/scikit-learn,frank-tancf/scikit-learn,imaculate/scikit-learn,0x0all/scikit-learn,khkaminska/scikit-learn,akionakamura/scikit-learn,icdishb/scikit-learn,Clyde-fare/scikit-learn,xyguo/scikit-learn,fabianp/scikit-learn,mxjl620/scikit-learn,ZENGXH/scikit-learn,kylerbrown/scikit-learn,sonnyhu/scikit-learn,sonnyhu/scikit-learn,qifeigit/scikit-learn,fabianp/scikit-learn,0x0all/scikit-learn,sergeyf/scikit-learn,jjx02230808/project0223,krez13/scikit-learn,phdowling/scikit-learn,cwu2011/scikit-learn,RomainBrault/scikit-learn,zhenv5/scikit-learn,shyamalschandra/scikit-learn,mhdella/scikit-learn,appapantula/scikit-learn,krez13/scikit-learn,fzalkow/scikit-learn,khkaminska/scikit-learn,liangz0707/scikit-learn,beepee14/scikit-learn,maheshakya/scikit-learn,dingocuster/scikit-learn,rahul-c1/scikit-learn,hsuantien/scikit-learn,vortex-ape/scikit-learn,ZenDevelopmentSystems/scikit-learn,dsquareindia/scikit-learn,tomlof/scikit-learn,robin-lai/scikit-learn,Aasmi/scikit-learn,poryfly/scikit-learn,kaichogami/scikit-learn,costypetrisor/scikit-learn,andaag/scikit-learn,heli522/scikit-learn,cauchycui/scikit-learn,bnaul/scikit-learn,ycaihua/scikit-learn,mjgrav2001/scikit-learn,jakirkham/scikit-learn,anirudhjayaraman/scikit-learn,jmschrei/scikit-learn,Akshay0724/scikit-learn,r-mart/scikit-learn,nrhine1/scikit-learn,eickenberg/scikit-learn,elkingtonmcb/scikit-learn,IssamLaradji/scikit-learn,jaidevd/scikit-learn,h2educ/scikit-learn,wlamond/scikit-learn,MartinSavc/scikit-learn,robbymeals/scikit-learn,ClimbsRocks/scikit-learn,henrykironde/scikit-learn,justincassidy/scikit-learn,spallavolu/scikit-learn,glennq/scikit-learn,samzhang111/scikit-learn,beepee14/scikit-learn,massmutual/scikit-learn,vshtanko/scikit-learn,Garrett-R/scikit-learn,sarahgrogan/scikit-learn,espg/scikit-learn,kaichogami/scikit-learn,h2educ/scikit-learn,jayflo/scikit-learn,cybernet14/scikit-learn,djgagne/scikit-learn,AlexanderFabisch/scikit-learn,eg-zhang/scikit-learn,imaculate/scikit-learn,phdowling/scikit-learn,mattilyra/scikit-learn,ElDeveloper/scikit-learn,hugobowne/scikit-learn,harshaneelhg/scikit-learn,hdmetor/scikit-learn,LohithBlaze/scikit-learn,nomadcube/scikit-learn,mattgiguere/scikit-learn,michigraber/scikit-learn,andaag/scikit-learn,nelson-liu/scikit-learn,spallavolu/scikit-learn,Akshay0724/scikit-learn,raghavrv/scikit-learn,beepee14/scikit-learn,qifeigit/scikit-learn,jmschrei/scikit-learn,arahuja/scikit-learn,ldirer/scikit-learn,AIML/scikit-learn,xiaoxiamii/scikit-learn,UNR-AERIAL/scikit-learn,tawsifkhan/scikit-learn,jlegendary/scikit-learn,MatthieuBizien/scikit-learn,lbishal/scikit-learn,ChanChiChoi/scikit-learn,aabadie/scikit-learn,arabenjamin/scikit-learn,robbymeals/scikit-learn,mojoboss/scikit-learn,RPGOne/scikit-learn,henridwyer/scikit-learn,ky822/scikit-learn,liberatorqjw/scikit-learn,arjoly/scikit-learn,larsmans/scikit-learn,victorbergelin/scikit-learn,jlegendary/scikit-learn,stylianos-kampakis/scikit-learn,JsNoNo/scikit-learn,mjgrav2001/scikit-learn,victorbergelin/scikit-learn,pv/scikit-learn,shangwuhencc/scikit-learn,devanshdalal/scikit-learn,jpautom/scikit-learn,sanketloke/scikit-learn,0asa/scikit-learn,belltailjp/scikit-learn,Adai0808/scikit-learn,RomainBrault/scikit-learn,mojoboss/scikit-learn,joshloyal/scikit-learn,JosmanPS/scikit-learn,xubenben/scikit-learn,mattgiguere/scikit-learn,kagayakidan/scikit-learn,aminert/scikit-learn,vibhorag/scikit-learn,jjx02230808/project0223,olologin/scikit-learn,wanggang3333/scikit-learn,vybstat/scikit-learn,shangwuhencc/scikit-learn,voxlol/scikit-learn,fyffyt/scikit-learn,ChanChiChoi/scikit-learn,yask123/scikit-learn,zuku1985/scikit-learn,mjudsp/Tsallis,MatthieuBizien/scikit-learn,pnedunuri/scikit-learn,pkruskal/scikit-learn,rahuldhote/scikit-learn,potash/scikit-learn,joernhees/scikit-learn,Windy-Ground/scikit-learn,quheng/scikit-learn,mattgiguere/scikit-learn,Srisai85/scikit-learn,luo66/scikit-learn,vybstat/scikit-learn,yask123/scikit-learn,tomlof/scikit-learn,abhishekkrthakur/scikit-learn,yonglehou/scikit-learn,ankurankan/scikit-learn,zorroblue/scikit-learn,davidgbe/scikit-learn,arahuja/scikit-learn,moutai/scikit-learn,dingocuster/scikit-learn,mugizico/scikit-learn,aabadie/scikit-learn,DSLituiev/scikit-learn,yanlend/scikit-learn,jzt5132/scikit-learn,ankurankan/scikit-learn,khkaminska/scikit-learn,lesteve/scikit-learn,appapantula/scikit-learn,glemaitre/scikit-learn,hsuantien/scikit-learn,shikhardb/scikit-learn,alexeyum/scikit-learn,mattilyra/scikit-learn,madjelan/scikit-learn,f3r/scikit-learn,adamgreenhall/scikit-learn,theoryno3/scikit-learn,ilo10/scikit-learn,YinongLong/scikit-learn,idlead/scikit-learn,giorgiop/scikit-learn,Fireblend/scikit-learn,hlin117/scikit-learn,YinongLong/scikit-learn,hrjn/scikit-learn,wlamond/scikit-learn,rexshihaoren/scikit-learn,aflaxman/scikit-learn,lenovor/scikit-learn,robin-lai/scikit-learn,sarahgrogan/scikit-learn,nvoron23/scikit-learn,rishikksh20/scikit-learn,adamgreenhall/scikit-learn,henridwyer/scikit-learn,russel1237/scikit-learn,olologin/scikit-learn,zuku1985/scikit-learn,Clyde-fare/scikit-learn,Myasuka/scikit-learn,deepesch/scikit-learn,bthirion/scikit-learn,mblondel/scikit-learn,yonglehou/scikit-learn,xwolf12/scikit-learn,NunoEdgarGub1/scikit-learn,hlin117/scikit-learn,glouppe/scikit-learn,procoder317/scikit-learn,alvarofierroclavero/scikit-learn,dsquareindia/scikit-learn,djgagne/scikit-learn,robbymeals/scikit-learn,phdowling/scikit-learn,mayblue9/scikit-learn,pkruskal/scikit-learn,Akshay0724/scikit-learn,ngoix/OCRF,shikhardb/scikit-learn,RachitKansal/scikit-learn,LohithBlaze/scikit-learn,henrykironde/scikit-learn,potash/scikit-learn,toastedcornflakes/scikit-learn,nesterione/scikit-learn,petosegan/scikit-learn,Aasmi/scikit-learn,untom/scikit-learn,robin-lai/scikit-learn,xuewei4d/scikit-learn,xuewei4d/scikit-learn,liberatorqjw/scikit-learn,AnasGhrab/scikit-learn,scikit-learn/scikit-learn,victorbergelin/scikit-learn,scikit-learn/scikit-learn,Titan-C/scikit-learn,rvraghav93/scikit-learn,r-mart/scikit-learn,dingocuster/scikit-learn,jorge2703/scikit-learn,anurag313/scikit-learn,RomainBrault/scikit-learn,hugobowne/scikit-learn,0asa/scikit-learn,pianomania/scikit-learn,PatrickChrist/scikit-learn,jorge2703/scikit-learn,maheshakya/scikit-learn,fredhusser/scikit-learn,kashif/scikit-learn,alvarofierroclavero/scikit-learn,nelson-liu/scikit-learn,MartinDelzant/scikit-learn,Myasuka/scikit-learn,glemaitre/scikit-learn,luo66/scikit-learn,manashmndl/scikit-learn,NelisVerhoef/scikit-learn,zorojean/scikit-learn,MechCoder/scikit-learn,larsmans/scikit-learn,jmschrei/scikit-learn,olologin/scikit-learn,PatrickOReilly/scikit-learn,LiaoPan/scikit-learn,Jimmy-Morzaria/scikit-learn,jzt5132/scikit-learn,LiaoPan/scikit-learn,poryfly/scikit-learn,frank-tancf/scikit-learn,jorik041/scikit-learn,pkruskal/scikit-learn,jereze/scikit-learn,mxjl620/scikit-learn,PrashntS/scikit-learn,rishikksh20/scikit-learn,ankurankan/scikit-learn,alvarofierroclavero/scikit-learn,amueller/scikit-learn,altairpearl/scikit-learn,samzhang111/scikit-learn,ogrisel/scikit-learn,nikitasingh981/scikit-learn,hitszxp/scikit-learn,tmhm/scikit-learn,466152112/scikit-learn,siutanwong/scikit-learn,samuel1208/scikit-learn,roxyboy/scikit-learn,plissonf/scikit-learn,thientu/scikit-learn,abimannans/scikit-learn,pythonvietnam/scikit-learn,0asa/scikit-learn,hitszxp/scikit-learn,mugizico/scikit-learn,jorik041/scikit-learn,RayMick/scikit-learn,evgchz/scikit-learn,waterponey/scikit-learn,belltailjp/scikit-learn,shangwuhencc/scikit-learn,xuewei4d/scikit-learn,kylerbrown/scikit-learn,Garrett-R/scikit-learn,rrohan/scikit-learn,tawsifkhan/scikit-learn,heli522/scikit-learn,vigilv/scikit-learn,quheng/scikit-learn,potash/scikit-learn,MohammedWasim/scikit-learn,elkingtonmcb/scikit-learn,pratapvardhan/scikit-learn,ningchi/scikit-learn,jseabold/scikit-learn,vshtanko/scikit-learn,jlegendary/scikit-learn,manashmndl/scikit-learn,mblondel/scikit-learn,mjudsp/Tsallis,bthirion/scikit-learn,jmetzen/scikit-learn,anurag313/scikit-learn,mhue/scikit-learn,LohithBlaze/scikit-learn,ogrisel/scikit-learn,MohammedWasim/scikit-learn,voxlol/scikit-learn,NelisVerhoef/scikit-learn,ishanic/scikit-learn,heli522/scikit-learn,Fireblend/scikit-learn,lbishal/scikit-learn,manhhomienbienthuy/scikit-learn,CVML/scikit-learn,vigilv/scikit-learn,mwv/scikit-learn,michigraber/scikit-learn,zaxtax/scikit-learn,henrykironde/scikit-learn,tmhm/scikit-learn,rohanp/scikit-learn,ChanderG/scikit-learn,joernhees/scikit-learn,IshankGulati/scikit-learn,raghavrv/scikit-learn,Garrett-R/scikit-learn,xubenben/scikit-learn,mfjb/scikit-learn,arabenjamin/scikit-learn,schets/scikit-learn,Obus/scikit-learn,massmutual/scikit-learn,huzq/scikit-learn,massmutual/scikit-learn,fyffyt/scikit-learn,huobaowangxi/scikit-learn,wzbozon/scikit-learn,cl4rke/scikit-learn,vinayak-mehta/scikit-learn,MohammedWasim/scikit-learn,tdhopper/scikit-learn,rajat1994/scikit-learn,henrykironde/scikit-learn,TomDLT/scikit-learn,AnasGhrab/scikit-learn,LiaoPan/scikit-learn,0asa/scikit-learn,zorojean/scikit-learn,PrashntS/scikit-learn,pv/scikit-learn,Aasmi/scikit-learn,glouppe/scikit-learn,xzh86/scikit-learn,chrsrds/scikit-learn,loli/sklearn-ensembletrees,abhishekgahlot/scikit-learn,anntzer/scikit-learn,jlegendary/scikit-learn,glouppe/scikit-learn,Barmaley-exe/scikit-learn,jkarnows/scikit-learn,MartinDelzant/scikit-learn,moutai/scikit-learn,cybernet14/scikit-learn,vigilv/scikit-learn,amueller/scikit-learn,xiaoxiamii/scikit-learn,zhenv5/scikit-learn,sonnyhu/scikit-learn,trankmichael/scikit-learn,voxlol/scikit-learn,sumspr/scikit-learn,f3r/scikit-learn,mhdella/scikit-learn,theoryno3/scikit-learn,pnedunuri/scikit-learn,mxjl620/scikit-learn,btabibian/scikit-learn,Adai0808/scikit-learn,Djabbz/scikit-learn,MechCoder/scikit-learn,DSLituiev/scikit-learn,rahuldhote/scikit-learn,eickenberg/scikit-learn,zhenv5/scikit-learn,terkkila/scikit-learn,cauchycui/scikit-learn,clemkoa/scikit-learn,hainm/scikit-learn,abimannans/scikit-learn,arjoly/scikit-learn,imaculate/scikit-learn,shenzebang/scikit-learn,ndingwall/scikit-learn,tawsifkhan/scikit-learn,nhejazi/scikit-learn,larsmans/scikit-learn,mhdella/scikit-learn,mehdidc/scikit-learn,AlexanderFabisch/scikit-learn,rvraghav93/scikit-learn,JPFrancoia/scikit-learn,aetilley/scikit-learn,TomDLT/scikit-learn,dhruv13J/scikit-learn,fzalkow/scikit-learn,gclenaghan/scikit-learn,eg-zhang/scikit-learn,costypetrisor/scikit-learn,scikit-learn/scikit-learn,giorgiop/scikit-learn,shyamalschandra/scikit-learn,jakobworldpeace/scikit-learn,ankurankan/scikit-learn,ycaihua/scikit-learn,nikitasingh981/scikit-learn,icdishb/scikit-learn,fengzhyuan/scikit-learn,ephes/scikit-learn,nikitasingh981/scikit-learn,xubenben/scikit-learn,DSLituiev/scikit-learn,hitszxp/scikit-learn,abhishekgahlot/scikit-learn,shusenl/scikit-learn,btabibian/scikit-learn,theoryno3/scikit-learn,mehdidc/scikit-learn,mlyundin/scikit-learn,ltiao/scikit-learn,mjudsp/Tsallis,betatim/scikit-learn,equialgo/scikit-learn,ngoix/OCRF,davidgbe/scikit-learn,ClimbsRocks/scikit-learn,roxyboy/scikit-learn,shenzebang/scikit-learn,ssaeger/scikit-learn,walterreade/scikit-learn,ilyes14/scikit-learn,jm-begon/scikit-learn,BiaDarkia/scikit-learn,nmayorov/scikit-learn,lbishal/scikit-learn,altairpearl/scikit-learn,kagayakidan/scikit-learn,kaichogami/scikit-learn,sanketloke/scikit-learn,lin-credible/scikit-learn,xuewei4d/scikit-learn,IndraVikas/scikit-learn,dsquareindia/scikit-learn,yyjiang/scikit-learn,bhargav/scikit-learn,zorroblue/scikit-learn,mlyundin/scikit-learn,huobaowangxi/scikit-learn,AIML/scikit-learn,marcocaccin/scikit-learn,Windy-Ground/scikit-learn,JPFrancoia/scikit-learn,sergeyf/scikit-learn,costypetrisor/scikit-learn,mjgrav2001/scikit-learn,ky822/scikit-learn,bikong2/scikit-learn,UNR-AERIAL/scikit-learn,espg/scikit-learn,costypetrisor/scikit-learn,jaidevd/scikit-learn,etkirsch/scikit-learn,Titan-C/scikit-learn,MohammedWasim/scikit-learn,untom/scikit-learn,hdmetor/scikit-learn,JeanKossaifi/scikit-learn,bhargav/scikit-learn,jseabold/scikit-learn,theoryno3/scikit-learn,xwolf12/scikit-learn,Clyde-fare/scikit-learn,jseabold/scikit-learn,rexshihaoren/scikit-learn,idlead/scikit-learn,manashmndl/scikit-learn,Jimmy-Morzaria/scikit-learn,fbagirov/scikit-learn,IshankGulati/scikit-learn,phdowling/scikit-learn,russel1237/scikit-learn,mjudsp/Tsallis,bthirion/scikit-learn,clemkoa/scikit-learn,manhhomienbienthuy/scikit-learn,nomadcube/scikit-learn,anntzer/scikit-learn,harshaneelhg/scikit-learn,ChanChiChoi/scikit-learn,pythonvietnam/scikit-learn,ZenDevelopmentSystems/scikit-learn,xzh86/scikit-learn,roxyboy/scikit-learn,MartinDelzant/scikit-learn,h2educ/scikit-learn,altairpearl/scikit-learn,vivekmishra1991/scikit-learn,ssaeger/scikit-learn,ltiao/scikit-learn,gclenaghan/scikit-learn,robbymeals/scikit-learn,walterreade/scikit-learn,ilyes14/scikit-learn,ChanderG/scikit-learn,xwolf12/scikit-learn,wzbozon/scikit-learn,kashif/scikit-learn,ishanic/scikit-learn,JeanKossaifi/scikit-learn,vortex-ape/scikit-learn,rohanp/scikit-learn,kjung/scikit-learn,yunfeilu/scikit-learn,yyjiang/scikit-learn,ngoix/OCRF,tosolveit/scikit-learn,nrhine1/scikit-learn,cainiaocome/scikit-learn,ssaeger/scikit-learn,mfjb/scikit-learn,moutai/scikit-learn,wlamond/scikit-learn,shenzebang/scikit-learn,cwu2011/scikit-learn,wanggang3333/scikit-learn,treycausey/scikit-learn,ZENGXH/scikit-learn,gclenaghan/scikit-learn,mojoboss/scikit-learn,quheng/scikit-learn,tdhopper/scikit-learn,Barmaley-exe/scikit-learn,fabianp/scikit-learn,Nyker510/scikit-learn,michigraber/scikit-learn,eickenberg/scikit-learn,iismd17/scikit-learn,vibhorag/scikit-learn,jpautom/scikit-learn,vybstat/scikit-learn,Garrett-R/scikit-learn,PrashntS/scikit-learn,samzhang111/scikit-learn,chrsrds/scikit-learn,Barmaley-exe/scikit-learn,jayflo/scikit-learn,olologin/scikit-learn,billy-inn/scikit-learn,davidgbe/scikit-learn,alexsavio/scikit-learn,kjung/scikit-learn,luo66/scikit-learn,zhenv5/scikit-learn,CforED/Machine-Learning,jblackburne/scikit-learn,macks22/scikit-learn,liyu1990/sklearn,sumspr/scikit-learn,zaxtax/scikit-learn,alexsavio/scikit-learn,hugobowne/scikit-learn,BiaDarkia/scikit-learn,ashhher3/scikit-learn,mattilyra/scikit-learn,mayblue9/scikit-learn,q1ang/scikit-learn,aetilley/scikit-learn,JPFrancoia/scikit-learn,dsullivan7/scikit-learn,raghavrv/scikit-learn,zihua/scikit-learn,dsullivan7/scikit-learn,spallavolu/scikit-learn,bigdataelephants/scikit-learn,Windy-Ground/scikit-learn,chrisburr/scikit-learn,toastedcornflakes/scikit-learn,RayMick/scikit-learn,ahoyosid/scikit-learn,ilo10/scikit-learn,tawsifkhan/scikit-learn,cl4rke/scikit-learn,jakirkham/scikit-learn,3manuek/scikit-learn,jereze/scikit-learn,fbagirov/scikit-learn,vinayak-mehta/scikit-learn,ngoix/OCRF,murali-munna/scikit-learn,ZENGXH/scikit-learn,loli/semisupervisedforests,aewhatley/scikit-learn,Achuth17/scikit-learn,ltiao/scikit-learn,hrjn/scikit-learn,gotomypc/scikit-learn,3manuek/scikit-learn,jblackburne/scikit-learn,liyu1990/sklearn,MartinDelzant/scikit-learn,trungnt13/scikit-learn,trankmichael/scikit-learn,eg-zhang/scikit-learn,thientu/scikit-learn,Titan-C/scikit-learn,rohanp/scikit-learn,ilyes14/scikit-learn,Nyker510/scikit-learn,arjoly/scikit-learn,cl4rke/scikit-learn,anirudhjayaraman/scikit-learn,IssamLaradji/scikit-learn,vermouthmjl/scikit-learn,Achuth17/scikit-learn,NunoEdgarGub1/scikit-learn,liberatorqjw/scikit-learn,voxlol/scikit-learn,MatthieuBizien/scikit-learn,r-mart/scikit-learn,JosmanPS/scikit-learn,bhargav/scikit-learn,pianomania/scikit-learn,henridwyer/scikit-learn,maheshakya/scikit-learn,arahuja/scikit-learn,smartscheduling/scikit-learn-categorical-tree,rvraghav93/scikit-learn,JsNoNo/scikit-learn,zihua/scikit-learn,aminert/scikit-learn,icdishb/scikit-learn,zorroblue/scikit-learn,chrisburr/scikit-learn,simon-pepin/scikit-learn,elkingtonmcb/scikit-learn,iismd17/scikit-learn,petosegan/scikit-learn,equialgo/scikit-learn,ssaeger/scikit-learn,DonBeo/scikit-learn,andrewnc/scikit-learn,shangwuhencc/scikit-learn,themrmax/scikit-learn,CforED/Machine-Learning,aewhatley/scikit-learn,gclenaghan/scikit-learn,DonBeo/scikit-learn,ky822/scikit-learn,scikit-learn/scikit-learn,hitszxp/scikit-learn,jorge2703/scikit-learn,jm-begon/scikit-learn,MatthieuBizien/scikit-learn,xyguo/scikit-learn,nomadcube/scikit-learn,Aasmi/scikit-learn,lenovor/scikit-learn,trungnt13/scikit-learn,bikong2/scikit-learn,vortex-ape/scikit-learn,andrewnc/scikit-learn,ankurankan/scikit-learn,vivekmishra1991/scikit-learn,lesteve/scikit-learn,pompiduskus/scikit-learn,HolgerPeters/scikit-learn,q1ang/scikit-learn,hsiaoyi0504/scikit-learn,wzbozon/scikit-learn,ngoix/OCRF,rahul-c1/scikit-learn,Vimos/scikit-learn,anntzer/scikit-learn,glemaitre/scikit-learn,OshynSong/scikit-learn,lazywei/scikit-learn,kevin-intel/scikit-learn,liangz0707/scikit-learn,shenzebang/scikit-learn,justincassidy/scikit-learn,shikhardb/scikit-learn,meduz/scikit-learn,jkarnows/scikit-learn,shahankhatch/scikit-learn,robin-lai/scikit-learn,shyamalschandra/scikit-learn,sanketloke/scikit-learn,Akshay0724/scikit-learn,yonglehou/scikit-learn,simon-pepin/scikit-learn,ndingwall/scikit-learn,giorgiop/scikit-learn,xzh86/scikit-learn,huzq/scikit-learn,fabioticconi/scikit-learn,schets/scikit-learn,thilbern/scikit-learn,dingocuster/scikit-learn,jmetzen/scikit-learn,pypot/scikit-learn,xavierwu/scikit-learn,justincassidy/scikit-learn,TomDLT/scikit-learn,iismd17/scikit-learn,Srisai85/scikit-learn,andrewnc/scikit-learn,yanlend/scikit-learn,glouppe/scikit-learn,loli/sklearn-ensembletrees,devanshdalal/scikit-learn,rrohan/scikit-learn,zaxtax/scikit-learn,chrsrds/scikit-learn,ZenDevelopmentSystems/scikit-learn,ldirer/scikit-learn,luo66/scikit-learn,ChanderG/scikit-learn,ivannz/scikit-learn,poryfly/scikit-learn,kashif/scikit-learn,pkruskal/scikit-learn,vybstat/scikit-learn,fabianp/scikit-learn,jzt5132/scikit-learn,sarahgrogan/scikit-learn,amueller/scikit-learn,JosmanPS/scikit-learn,IssamLaradji/scikit-learn,jm-begon/scikit-learn,procoder317/scikit-learn,trankmichael/scikit-learn,zorojean/scikit-learn,abhishekkrthakur/scikit-learn,zorojean/scikit-learn,Myasuka/scikit-learn,saiwing-yeung/scikit-learn,tmhm/scikit-learn,AnasGhrab/scikit-learn,466152112/scikit-learn,russel1237/scikit-learn,dhruv13J/scikit-learn,cainiaocome/scikit-learn,xyguo/scikit-learn,victorbergelin/scikit-learn,Fireblend/scikit-learn,macks22/scikit-learn,tomlof/scikit-learn,jayflo/scikit-learn,jayflo/scikit-learn,Lawrence-Liu/scikit-learn,RachitKansal/scikit-learn,bhargav/scikit-learn,pythonvietnam/scikit-learn,pratapvardhan/scikit-learn,meduz/scikit-learn,treycausey/scikit-learn,equialgo/scikit-learn,murali-munna/scikit-learn,nvoron23/scikit-learn,jereze/scikit-learn,appapantula/scikit-learn,sonnyhu/scikit-learn,evgchz/scikit-learn,jakirkham/scikit-learn,aabadie/scikit-learn,joernhees/scikit-learn,hsuantien/scikit-learn,siutanwong/scikit-learn,ilo10/scikit-learn,JsNoNo/scikit-learn,wazeerzulfikar/scikit-learn,ishanic/scikit-learn,rrohan/scikit-learn,AIML/scikit-learn,hrjn/scikit-learn,hainm/scikit-learn,alexeyum/scikit-learn,lazywei/scikit-learn,ivannz/scikit-learn,PatrickChrist/scikit-learn,h2educ/scikit-learn,bthirion/scikit-learn,cl4rke/scikit-learn,ngoix/OCRF,abhishekgahlot/scikit-learn,schets/scikit-learn,shusenl/scikit-learn,cybernet14/scikit-learn,terkkila/scikit-learn,vinayak-mehta/scikit-learn,PatrickChrist/scikit-learn,fredhusser/scikit-learn,lenovor/scikit-learn,cauchycui/scikit-learn,elkingtonmcb/scikit-learn,treycausey/scikit-learn,yanlend/scikit-learn,AlexandreAbraham/scikit-learn,aabadie/scikit-learn,abimannans/scikit-learn,Sentient07/scikit-learn,ivannz/scikit-learn,jpautom/scikit-learn,CforED/Machine-Learning,ndingwall/scikit-learn,ElDeveloper/scikit-learn,andrewnc/scikit-learn,mxjl620/scikit-learn,alexeyum/scikit-learn,loli/semisupervisedforests,anirudhjayaraman/scikit-learn,Lawrence-Liu/scikit-learn,ashhher3/scikit-learn,nelson-liu/scikit-learn,procoder317/scikit-learn,fengzhyuan/scikit-learn,fyffyt/scikit-learn,HolgerPeters/scikit-learn,fredhusser/scikit-learn,abhishekkrthakur/scikit-learn,BiaDarkia/scikit-learn,sarahgrogan/scikit-learn,vigilv/scikit-learn,aminert/scikit-learn,samuel1208/scikit-learn,petosegan/scikit-learn,pypot/scikit-learn,kaichogami/scikit-learn,AnasGhrab/scikit-learn,devanshdalal/scikit-learn,Jimmy-Morzaria/scikit-learn,shikhardb/scikit-learn,lenovor/scikit-learn,nhejazi/scikit-learn,raghavrv/scikit-learn,ClimbsRocks/scikit-learn,cwu2011/scikit-learn,glemaitre/scikit-learn,espg/scikit-learn,quheng/scikit-learn,Nyker510/scikit-learn,anurag313/scikit-learn,abhishekgahlot/scikit-learn,treycausey/scikit-learn,etkirsch/scikit-learn,moutai/scikit-learn,UNR-AERIAL/scikit-learn,icdishb/scikit-learn,fzalkow/scikit-learn,anntzer/scikit-learn,altairpearl/scikit-learn,bnaul/scikit-learn,gotomypc/scikit-learn,yyjiang/scikit-learn,rahuldhote/scikit-learn,billy-inn/scikit-learn,devanshdalal/scikit-learn,CVML/scikit-learn,CforED/Machine-Learning,nikitasingh981/scikit-learn,alvarofierroclavero/scikit-learn,Djabbz/scikit-learn,0x0all/scikit-learn,JPFrancoia/scikit-learn,Vimos/scikit-learn,marcocaccin/scikit-learn,qifeigit/scikit-learn,ephes/scikit-learn,hsiaoyi0504/scikit-learn,zuku1985/scikit-learn,yyjiang/scikit-learn,HolgerPeters/scikit-learn,vivekmishra1991/scikit-learn,NunoEdgarGub1/scikit-learn,belltailjp/scikit-learn,madjelan/scikit-learn,OshynSong/scikit-learn,frank-tancf/scikit-learn,adamgreenhall/scikit-learn,LiaoPan/scikit-learn,jaidevd/scikit-learn,Djabbz/scikit-learn,gotomypc/scikit-learn,bigdataelephants/scikit-learn,kevin-intel/scikit-learn,466152112/scikit-learn,mjudsp/Tsallis,sergeyf/scikit-learn,nesterione/scikit-learn,Windy-Ground/scikit-learn,nhejazi/scikit-learn,Djabbz/scikit-learn,f3r/scikit-learn,hitszxp/scikit-learn,shahankhatch/scikit-learn,harshaneelhg/scikit-learn,simon-pepin/scikit-learn,rishikksh20/scikit-learn,mwv/scikit-learn,jjx02230808/project0223,Srisai85/scikit-learn,AlexandreAbraham/scikit-learn,rishikksh20/scikit-learn,wanggang3333/scikit-learn,tosolveit/scikit-learn,marcocaccin/scikit-learn,dsquareindia/scikit-learn,spallavolu/scikit-learn,arabenjamin/scikit-learn,herilalaina/scikit-learn,equialgo/scikit-learn,zihua/scikit-learn,IndraVikas/scikit-learn,plissonf/scikit-learn,samuel1208/scikit-learn,herilalaina/scikit-learn,jakobworldpeace/scikit-learn,mblondel/scikit-learn,shahankhatch/scikit-learn,clemkoa/scikit-learn,mlyundin/scikit-learn,potash/scikit-learn,mattilyra/scikit-learn,anirudhjayaraman/scikit-learn,xubenben/scikit-learn,idlead/scikit-learn,shusenl/scikit-learn,mfjb/scikit-learn,michigraber/scikit-learn,yunfeilu/scikit-learn,petosegan/scikit-learn,sinhrks/scikit-learn,Sentient07/scikit-learn,AlexanderFabisch/scikit-learn,akionakamura/scikit-learn,appapantula/scikit-learn,saiwing-yeung/scikit-learn,nvoron23/scikit-learn,mikebenfield/scikit-learn,MartinSavc/scikit-learn,harshaneelhg/scikit-learn,glennq/scikit-learn,jblackburne/scikit-learn,ElDeveloper/scikit-learn,q1ang/scikit-learn,MartinSavc/scikit-learn,etkirsch/scikit-learn,fengzhyuan/scikit-learn,liangz0707/scikit-learn,q1ang/scikit-learn,billy-inn/scikit-learn,vibhorag/scikit-learn,clemkoa/scikit-learn,hsiaoyi0504/scikit-learn,adamgreenhall/scikit-learn,stylianos-kampakis/scikit-learn,toastedcornflakes/scikit-learn,PatrickOReilly/scikit-learn,jakobworldpeace/scikit-learn,r-mart/scikit-learn,cainiaocome/scikit-learn,jzt5132/scikit-learn,toastedcornflakes/scikit-learn,AIML/scikit-learn,schets/scikit-learn,thientu/scikit-learn,samzhang111/scikit-learn,anurag313/scikit-learn,chrisburr/scikit-learn,plissonf/scikit-learn,hugobowne/scikit-learn,rohanp/scikit-learn,themrmax/scikit-learn,lin-credible/scikit-learn,mfjb/scikit-learn,loli/sklearn-ensembletrees,CVML/scikit-learn,wazeerzulfikar/scikit-learn,mehdidc/scikit-learn,poryfly/scikit-learn,lin-credible/scikit-learn,pianomania/scikit-learn,Nyker510/scikit-learn,kevin-intel/scikit-learn,vortex-ape/scikit-learn,ningchi/scikit-learn,aflaxman/scikit-learn,DSLituiev/scikit-learn,loli/semisupervisedforests,vivekmishra1991/scikit-learn,eg-zhang/scikit-learn,ldirer/scikit-learn,treycausey/scikit-learn,Clyde-fare/scikit-learn,PatrickChrist/scikit-learn,kagayakidan/scikit-learn,alexsavio/scikit-learn,khkaminska/scikit-learn,siutanwong/scikit-learn,maheshakya/scikit-learn,ephes/scikit-learn,rahul-c1/scikit-learn,Barmaley-exe/scikit-learn,murali-munna/scikit-learn,belltailjp/scikit-learn,sinhrks/scikit-learn,ilyes14/scikit-learn,thilbern/scikit-learn,lesteve/scikit-learn,JeanKossaifi/scikit-learn,davidgbe/scikit-learn,RayMick/scikit-learn,nhejazi/scikit-learn,pv/scikit-learn,russel1237/scikit-learn,xavierwu/scikit-learn,smartscheduling/scikit-learn-categorical-tree,andaag/scikit-learn,mayblue9/scikit-learn,pratapvardhan/scikit-learn,PatrickOReilly/scikit-learn,manhhomienbienthuy/scikit-learn,pnedunuri/scikit-learn,sinhrks/scikit-learn,thientu/scikit-learn,AlexanderFabisch/scikit-learn,thilbern/scikit-learn,OshynSong/scikit-learn,ZenDevelopmentSystems/scikit-learn,mojoboss/scikit-learn,HolgerPeters/scikit-learn,billy-inn/scikit-learn,wazeerzulfikar/scikit-learn,YinongLong/scikit-learn,pratapvardhan/scikit-learn,IssamLaradji/scikit-learn,bikong2/scikit-learn,zorroblue/scikit-learn,tomlof/scikit-learn,idlead/scikit-learn,rahul-c1/scikit-learn,loli/semisupervisedforests,pompiduskus/scikit-learn,YinongLong/scikit-learn,fyffyt/scikit-learn,liyu1990/sklearn,betatim/scikit-learn,ashhher3/scikit-learn,aewhatley/scikit-learn,ChanChiChoi/scikit-learn,arahuja/scikit-learn,RomainBrault/scikit-learn,Vimos/scikit-learn,abimannans/scikit-learn,ahoyosid/scikit-learn,mayblue9/scikit-learn,rvraghav93/scikit-learn,liyu1990/sklearn,466152112/scikit-learn,etkirsch/scikit-learn,cauchycui/scikit-learn,meduz/scikit-learn,simon-pepin/scikit-learn,hainm/scikit-learn,dhruv13J/scikit-learn,espg/scikit-learn,madjelan/scikit-learn,AlexandreAbraham/scikit-learn,Obus/scikit-learn,qifeigit/scikit-learn,sumspr/scikit-learn,sinhrks/scikit-learn,ahoyosid/scikit-learn,JosmanPS/scikit-learn,RachitKansal/scikit-learn,nesterione/scikit-learn,zihua/scikit-learn,jmetzen/scikit-learn,nesterione/scikit-learn,procoder317/scikit-learn,bigdataelephants/scikit-learn,deepesch/scikit-learn,pypot/scikit-learn,AlexRobson/scikit-learn,rajat1994/scikit-learn,fabioticconi/scikit-learn,djgagne/scikit-learn,jblackburne/scikit-learn,waterponey/scikit-learn,samuel1208/scikit-learn,tdhopper/scikit-learn,vermouthmjl/scikit-learn,chrisburr/scikit-learn,jorik041/scikit-learn,Achuth17/scikit-learn,jpautom/scikit-learn,tdhopper/scikit-learn,heli522/scikit-learn,RachitKansal/scikit-learn,jm-begon/scikit-learn,jmetzen/scikit-learn,0asa/scikit-learn,ivannz/scikit-learn,3manuek/scikit-learn,UNR-AERIAL/scikit-learn,MechCoder/scikit-learn,rajat1994/scikit-learn,DonBeo/scikit-learn,stylianos-kampakis/scikit-learn,smartscheduling/scikit-learn-categorical-tree,mattilyra/scikit-learn,RPGOne/scikit-learn,Lawrence-Liu/scikit-learn,vermouthmjl/scikit-learn,betatim/scikit-learn,iismd17/scikit-learn,Srisai85/scikit-learn,nelson-liu/scikit-learn,IshankGulati/scikit-learn,carrillo/scikit-learn,vibhorag/scikit-learn,dsullivan7/scikit-learn,wlamond/scikit-learn,jorik041/scikit-learn,jjx02230808/project0223,nvoron23/scikit-learn,pnedunuri/scikit-learn,herilalaina/scikit-learn,cainiaocome/scikit-learn,siutanwong/scikit-learn,deepesch/scikit-learn,larsmans/scikit-learn,hdmetor/scikit-learn,jseabold/scikit-learn,tosolveit/scikit-learn,shyamalschandra/scikit-learn,hdmetor/scikit-learn,untom/scikit-learn,rajat1994/scikit-learn,huzq/scikit-learn,nmayorov/scikit-learn,hlin117/scikit-learn,ephes/scikit-learn,lin-credible/scikit-learn,pianomania/scikit-learn,deepesch/scikit-learn,trankmichael/scikit-learn,yanlend/scikit-learn,cwu2011/scikit-learn,lazywei/scikit-learn,andaag/scikit-learn,LohithBlaze/scikit-learn,hlin117/scikit-learn,liberatorqjw/scikit-learn,roxyboy/scikit-learn,aflaxman/scikit-learn,glennq/scikit-learn,pompiduskus/scikit-learn,mhue/scikit-learn,Jimmy-Morzaria/scikit-learn,sanketloke/scikit-learn,0x0all/scikit-learn,kylerbrown/scikit-learn,mugizico/scikit-learn,vshtanko/scikit-learn,nmayorov/scikit-learn,Vimos/scikit-learn,betatim/scikit-learn,AlexRobson/scikit-learn,Sentient07/scikit-learn,ndingwall/scikit-learn,CVML/scikit-learn,ElDeveloper/scikit-learn,Lawrence-Liu/scikit-learn,mhue/scikit-learn,joshloyal/scikit-learn,shusenl/scikit-learn,Sentient07/scikit-learn,mikebenfield/scikit-learn,themrmax/scikit-learn,MartinSavc/scikit-learn,plissonf/scikit-learn,saiwing-yeung/scikit-learn,gotomypc/scikit-learn,bigdataelephants/scikit-learn,dsullivan7/scikit-learn,alexeyum/scikit-learn,mikebenfield/scikit-learn,cybernet14/scikit-learn,wazeerzulfikar/scikit-learn,glennq/scikit-learn,wanggang3333/scikit-learn,carrillo/scikit-learn,3manuek/scikit-learn,pythonvietnam/scikit-learn,pv/scikit-learn,nrhine1/scikit-learn,krez13/scikit-learn,henridwyer/scikit-learn,larsmans/scikit-learn,meduz/scikit-learn,AlexRobson/scikit-learn,lesteve/scikit-learn,waterponey/scikit-learn,aminert/scikit-learn,ldirer/scikit-learn,aetilley/scikit-learn,vinayak-mehta/scikit-learn,bnaul/scikit-learn,fabioticconi/scikit-learn,rexshihaoren/scikit-learn,massmutual/scikit-learn,bikong2/scikit-learn,ky822/scikit-learn,ilo10/scikit-learn,ningchi/scikit-learn,fbagirov/scikit-learn,smartscheduling/scikit-learn-categorical-tree,mlyundin/scikit-learn,zuku1985/scikit-learn,manhhomienbienthuy/scikit-learn,terkkila/scikit-learn,joernhees/scikit-learn,rexshihaoren/scikit-learn,f3r/scikit-learn,huobaowangxi/scikit-learn,ashhher3/scikit-learn,thilbern/scikit-learn,terkkila/scikit-learn,justincassidy/scikit-learn,huobaowangxi/scikit-learn,murali-munna/scikit-learn,kjung/scikit-learn,akionakamura/scikit-learn,Fireblend/scikit-learn,stylianos-kampakis/scikit-learn,IndraVikas/scikit-learn,macks22/scikit-learn,djgagne/scikit-learn,mhue/scikit-learn,untom/scikit-learn,waterponey/scikit-learn,shahankhatch/scikit-learn,walterreade/scikit-learn,jakirkham/scikit-learn,huzq/scikit-learn,xavierwu/scikit-learn,mhdella/scikit-learn,xavierwu/scikit-learn,fengzhyuan/scikit-learn,fbagirov/scikit-learn,chrsrds/scikit-learn,hrjn/scikit-learn,joshloyal/scikit-learn,pompiduskus/scikit-learn,herilalaina/scikit-learn,0x0all/scikit-learn,IshankGulati/scikit-learn,arjoly/scikit-learn,Adai0808/scikit-learn,mattgiguere/scikit-learn,amueller/scikit-learn,akionakamura/scikit-learn,ZENGXH/scikit-learn,evgchz/scikit-learn,kylerbrown/scikit-learn,kjung/scikit-learn,kevin-intel/scikit-learn,giorgiop/scikit-learn,ningchi/scikit-learn,ycaihua/scikit-learn,AlexRobson/scikit-learn,BiaDarkia/scikit-learn,fzalkow/scikit-learn,PrashntS/scikit-learn,RPGOne/scikit-learn,nomadcube/scikit-learn,Garrett-R/scikit-learn,yunfeilu/scikit-learn,mikebenfield/scikit-learn,marcocaccin/scikit-learn,tosolveit/scikit-learn,loli/sklearn-ensembletrees,kashif/scikit-learn,joshloyal/scikit-learn,Achuth17/scikit-learn,fabioticconi/scikit-learn,vermouthmjl/scikit-learn,hainm/scikit-learn,JsNoNo/scikit-learn,evgchz/scikit-learn,carrillo/scikit-learn,eickenberg/scikit-learn,evgchz/scikit-learn,mblondel/scikit-learn,eickenberg/scikit-learn,dhruv13J/scikit-learn,ahoyosid/scikit-learn,btabibian/scikit-learn,yask123/scikit-learn,mehdidc/scikit-learn,btabibian/scikit-learn,abhishekkrthakur/scikit-learn,maheshakya/scikit-learn,mjgrav2001/scikit-learn,rrohan/scikit-learn,jkarnows/scikit-learn,NunoEdgarGub1/scikit-learn,nrhine1/scikit-learn,rahuldhote/scikit-learn,hsiaoyi0504/scikit-learn
|
Add simple RANSAC unit test
|
import numpy as np
from numpy.testing import assert_equal
from sklearn import linear_model
from sklearn.utils import ransac
def test_ransac_inliers_outliers():
np.random.seed(1)
# Generate coordinates of line
X = np.arange(-200, 200)
y = 0.2 * X + 20
data = np.column_stack([X, y])
# Add some faulty data
outliers = np.array((10, 30, 200))
data[outliers[0], :] = (1000, 1000)
data[outliers[1], :] = (-1000, -1000)
data[outliers[2], :] = (-100, -50)
X = data[:, 0][:, np.newaxis]
y = data[:, 1]
# Estimate parameters of corrupted data
inlier_mask = ransac(X, y, linear_model.LinearRegression(), 3, 5)
# Ground truth / reference inlier mask
ref_inlier_mask = np.ones_like(inlier_mask, dtype=np.bool_)
ref_inlier_mask[outliers] = False
assert_equal(inlier_mask, ref_inlier_mask)
if __name__ == "__main__":
np.testing.run_module_suite()
|
<commit_before><commit_msg>Add simple RANSAC unit test<commit_after>
|
import numpy as np
from numpy.testing import assert_equal
from sklearn import linear_model
from sklearn.utils import ransac
def test_ransac_inliers_outliers():
np.random.seed(1)
# Generate coordinates of line
X = np.arange(-200, 200)
y = 0.2 * X + 20
data = np.column_stack([X, y])
# Add some faulty data
outliers = np.array((10, 30, 200))
data[outliers[0], :] = (1000, 1000)
data[outliers[1], :] = (-1000, -1000)
data[outliers[2], :] = (-100, -50)
X = data[:, 0][:, np.newaxis]
y = data[:, 1]
# Estimate parameters of corrupted data
inlier_mask = ransac(X, y, linear_model.LinearRegression(), 3, 5)
# Ground truth / reference inlier mask
ref_inlier_mask = np.ones_like(inlier_mask, dtype=np.bool_)
ref_inlier_mask[outliers] = False
assert_equal(inlier_mask, ref_inlier_mask)
if __name__ == "__main__":
np.testing.run_module_suite()
|
Add simple RANSAC unit testimport numpy as np
from numpy.testing import assert_equal
from sklearn import linear_model
from sklearn.utils import ransac
def test_ransac_inliers_outliers():
np.random.seed(1)
# Generate coordinates of line
X = np.arange(-200, 200)
y = 0.2 * X + 20
data = np.column_stack([X, y])
# Add some faulty data
outliers = np.array((10, 30, 200))
data[outliers[0], :] = (1000, 1000)
data[outliers[1], :] = (-1000, -1000)
data[outliers[2], :] = (-100, -50)
X = data[:, 0][:, np.newaxis]
y = data[:, 1]
# Estimate parameters of corrupted data
inlier_mask = ransac(X, y, linear_model.LinearRegression(), 3, 5)
# Ground truth / reference inlier mask
ref_inlier_mask = np.ones_like(inlier_mask, dtype=np.bool_)
ref_inlier_mask[outliers] = False
assert_equal(inlier_mask, ref_inlier_mask)
if __name__ == "__main__":
np.testing.run_module_suite()
|
<commit_before><commit_msg>Add simple RANSAC unit test<commit_after>import numpy as np
from numpy.testing import assert_equal
from sklearn import linear_model
from sklearn.utils import ransac
def test_ransac_inliers_outliers():
np.random.seed(1)
# Generate coordinates of line
X = np.arange(-200, 200)
y = 0.2 * X + 20
data = np.column_stack([X, y])
# Add some faulty data
outliers = np.array((10, 30, 200))
data[outliers[0], :] = (1000, 1000)
data[outliers[1], :] = (-1000, -1000)
data[outliers[2], :] = (-100, -50)
X = data[:, 0][:, np.newaxis]
y = data[:, 1]
# Estimate parameters of corrupted data
inlier_mask = ransac(X, y, linear_model.LinearRegression(), 3, 5)
# Ground truth / reference inlier mask
ref_inlier_mask = np.ones_like(inlier_mask, dtype=np.bool_)
ref_inlier_mask[outliers] = False
assert_equal(inlier_mask, ref_inlier_mask)
if __name__ == "__main__":
np.testing.run_module_suite()
|
|
7aac6f1b6407300033e5ba46b61b9b8f0ae089c3
|
boardinghouse/tests/test_sql.py
|
boardinghouse/tests/test_sql.py
|
"""
Tests for the RAW sql functions.
"""
from django.conf import settings
from django.test import TestCase
from django.db.models import connection
from boardinghouse.models import Schema
class TestRejectSchemaColumnChange(TestCase):
def test_exception_is_raised(self):
Schema.objects.mass_create('a')
cursor = connection.cursor()
UPDATE = "UPDATE boardinghouse_schema SET schema='foo' WHERE schema='a'"
self.assertRaises(Exception, cursor.execute, UPDATE)
|
Add test for exception raising.
|
Add test for exception raising.
|
Python
|
bsd-3-clause
|
luzfcb/django-boardinghouse,luzfcb/django-boardinghouse,luzfcb/django-boardinghouse
|
Add test for exception raising.
|
"""
Tests for the RAW sql functions.
"""
from django.conf import settings
from django.test import TestCase
from django.db.models import connection
from boardinghouse.models import Schema
class TestRejectSchemaColumnChange(TestCase):
def test_exception_is_raised(self):
Schema.objects.mass_create('a')
cursor = connection.cursor()
UPDATE = "UPDATE boardinghouse_schema SET schema='foo' WHERE schema='a'"
self.assertRaises(Exception, cursor.execute, UPDATE)
|
<commit_before><commit_msg>Add test for exception raising.<commit_after>
|
"""
Tests for the RAW sql functions.
"""
from django.conf import settings
from django.test import TestCase
from django.db.models import connection
from boardinghouse.models import Schema
class TestRejectSchemaColumnChange(TestCase):
def test_exception_is_raised(self):
Schema.objects.mass_create('a')
cursor = connection.cursor()
UPDATE = "UPDATE boardinghouse_schema SET schema='foo' WHERE schema='a'"
self.assertRaises(Exception, cursor.execute, UPDATE)
|
Add test for exception raising."""
Tests for the RAW sql functions.
"""
from django.conf import settings
from django.test import TestCase
from django.db.models import connection
from boardinghouse.models import Schema
class TestRejectSchemaColumnChange(TestCase):
def test_exception_is_raised(self):
Schema.objects.mass_create('a')
cursor = connection.cursor()
UPDATE = "UPDATE boardinghouse_schema SET schema='foo' WHERE schema='a'"
self.assertRaises(Exception, cursor.execute, UPDATE)
|
<commit_before><commit_msg>Add test for exception raising.<commit_after>"""
Tests for the RAW sql functions.
"""
from django.conf import settings
from django.test import TestCase
from django.db.models import connection
from boardinghouse.models import Schema
class TestRejectSchemaColumnChange(TestCase):
def test_exception_is_raised(self):
Schema.objects.mass_create('a')
cursor = connection.cursor()
UPDATE = "UPDATE boardinghouse_schema SET schema='foo' WHERE schema='a'"
self.assertRaises(Exception, cursor.execute, UPDATE)
|
|
033bddcfc933191397e4f01cd5ce5b10b2344c92
|
boundary_cli/plugin_manifest.py
|
boundary_cli/plugin_manifest.py
|
#!/usr/bin/env python
###
### Copyright 2014-2015, Boundary
###
### Licensed under the Apache License, Version 2.0 (the "License");
### you may not use this file except in compliance with the License.
### You may obtain a copy of the License at
###
### http://www.apache.org/licenses/LICENSE-2.0
###
### Unless required by applicable law or agreed to in writing, software
### distributed under the License is distributed on an "AS IS" BASIS,
### WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
### See the License for the specific language governing permissions and
### limitations under the License.
###
import json
from pprint import pprint
import sys
'''
Reads and provides access to a plugin.json file the manifest of plugins.
'''
class PluginManifest():
def __init__(self,path=None):
'''
Initialize the PluginManifest instance
'''
self.path = path
self.manifest_json = None
self.manifest = None
def getMetricNames(self):
'''
Returns the list of metrics associated with the plugin manifest
'''
return self.manifest['metrics']
def load(self):
'''
Load the metrics file from the given path
'''
f = open(self.path,"r")
self.manifest_json = f.read()
def parse(self):
'''
Parses the manifest JSON into a dictionary
'''
self.manifest = json.loads(self.manifest_json)
def get(self):
'''
Read the JSON file and parse into a dictionary
'''
self.load()
self.parse()
def getManifest(self):
'''
Returns the dictionary from the parse JSON plugin manifest
'''
return self.manifest
if __name__ == "__main__":
if len(sys.argv) != 2:
sys.stderr.write("No file")
sys.exit(1)
p = PluginManifest(sys.argv[1])
p.get()
pprint(p.getManifest())
pprint(p.getMetricNames())
|
Add python class for handling the plugin manifest plugin.json
|
Add python class for handling the plugin manifest plugin.json
|
Python
|
apache-2.0
|
jdgwartney/pulse-api-cli,boundary/pulse-api-cli,boundary/boundary-api-cli,boundary/boundary-api-cli,jdgwartney/boundary-api-cli,jdgwartney/pulse-api-cli,wcainboundary/boundary-api-cli,boundary/pulse-api-cli,jdgwartney/boundary-api-cli,wcainboundary/boundary-api-cli
|
Add python class for handling the plugin manifest plugin.json
|
#!/usr/bin/env python
###
### Copyright 2014-2015, Boundary
###
### Licensed under the Apache License, Version 2.0 (the "License");
### you may not use this file except in compliance with the License.
### You may obtain a copy of the License at
###
### http://www.apache.org/licenses/LICENSE-2.0
###
### Unless required by applicable law or agreed to in writing, software
### distributed under the License is distributed on an "AS IS" BASIS,
### WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
### See the License for the specific language governing permissions and
### limitations under the License.
###
import json
from pprint import pprint
import sys
'''
Reads and provides access to a plugin.json file the manifest of plugins.
'''
class PluginManifest():
def __init__(self,path=None):
'''
Initialize the PluginManifest instance
'''
self.path = path
self.manifest_json = None
self.manifest = None
def getMetricNames(self):
'''
Returns the list of metrics associated with the plugin manifest
'''
return self.manifest['metrics']
def load(self):
'''
Load the metrics file from the given path
'''
f = open(self.path,"r")
self.manifest_json = f.read()
def parse(self):
'''
Parses the manifest JSON into a dictionary
'''
self.manifest = json.loads(self.manifest_json)
def get(self):
'''
Read the JSON file and parse into a dictionary
'''
self.load()
self.parse()
def getManifest(self):
'''
Returns the dictionary from the parse JSON plugin manifest
'''
return self.manifest
if __name__ == "__main__":
if len(sys.argv) != 2:
sys.stderr.write("No file")
sys.exit(1)
p = PluginManifest(sys.argv[1])
p.get()
pprint(p.getManifest())
pprint(p.getMetricNames())
|
<commit_before><commit_msg>Add python class for handling the plugin manifest plugin.json<commit_after>
|
#!/usr/bin/env python
###
### Copyright 2014-2015, Boundary
###
### Licensed under the Apache License, Version 2.0 (the "License");
### you may not use this file except in compliance with the License.
### You may obtain a copy of the License at
###
### http://www.apache.org/licenses/LICENSE-2.0
###
### Unless required by applicable law or agreed to in writing, software
### distributed under the License is distributed on an "AS IS" BASIS,
### WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
### See the License for the specific language governing permissions and
### limitations under the License.
###
import json
from pprint import pprint
import sys
'''
Reads and provides access to a plugin.json file the manifest of plugins.
'''
class PluginManifest():
def __init__(self,path=None):
'''
Initialize the PluginManifest instance
'''
self.path = path
self.manifest_json = None
self.manifest = None
def getMetricNames(self):
'''
Returns the list of metrics associated with the plugin manifest
'''
return self.manifest['metrics']
def load(self):
'''
Load the metrics file from the given path
'''
f = open(self.path,"r")
self.manifest_json = f.read()
def parse(self):
'''
Parses the manifest JSON into a dictionary
'''
self.manifest = json.loads(self.manifest_json)
def get(self):
'''
Read the JSON file and parse into a dictionary
'''
self.load()
self.parse()
def getManifest(self):
'''
Returns the dictionary from the parse JSON plugin manifest
'''
return self.manifest
if __name__ == "__main__":
if len(sys.argv) != 2:
sys.stderr.write("No file")
sys.exit(1)
p = PluginManifest(sys.argv[1])
p.get()
pprint(p.getManifest())
pprint(p.getMetricNames())
|
Add python class for handling the plugin manifest plugin.json#!/usr/bin/env python
###
### Copyright 2014-2015, Boundary
###
### Licensed under the Apache License, Version 2.0 (the "License");
### you may not use this file except in compliance with the License.
### You may obtain a copy of the License at
###
### http://www.apache.org/licenses/LICENSE-2.0
###
### Unless required by applicable law or agreed to in writing, software
### distributed under the License is distributed on an "AS IS" BASIS,
### WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
### See the License for the specific language governing permissions and
### limitations under the License.
###
import json
from pprint import pprint
import sys
'''
Reads and provides access to a plugin.json file the manifest of plugins.
'''
class PluginManifest():
def __init__(self,path=None):
'''
Initialize the PluginManifest instance
'''
self.path = path
self.manifest_json = None
self.manifest = None
def getMetricNames(self):
'''
Returns the list of metrics associated with the plugin manifest
'''
return self.manifest['metrics']
def load(self):
'''
Load the metrics file from the given path
'''
f = open(self.path,"r")
self.manifest_json = f.read()
def parse(self):
'''
Parses the manifest JSON into a dictionary
'''
self.manifest = json.loads(self.manifest_json)
def get(self):
'''
Read the JSON file and parse into a dictionary
'''
self.load()
self.parse()
def getManifest(self):
'''
Returns the dictionary from the parse JSON plugin manifest
'''
return self.manifest
if __name__ == "__main__":
if len(sys.argv) != 2:
sys.stderr.write("No file")
sys.exit(1)
p = PluginManifest(sys.argv[1])
p.get()
pprint(p.getManifest())
pprint(p.getMetricNames())
|
<commit_before><commit_msg>Add python class for handling the plugin manifest plugin.json<commit_after>#!/usr/bin/env python
###
### Copyright 2014-2015, Boundary
###
### Licensed under the Apache License, Version 2.0 (the "License");
### you may not use this file except in compliance with the License.
### You may obtain a copy of the License at
###
### http://www.apache.org/licenses/LICENSE-2.0
###
### Unless required by applicable law or agreed to in writing, software
### distributed under the License is distributed on an "AS IS" BASIS,
### WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
### See the License for the specific language governing permissions and
### limitations under the License.
###
import json
from pprint import pprint
import sys
'''
Reads and provides access to a plugin.json file the manifest of plugins.
'''
class PluginManifest():
def __init__(self,path=None):
'''
Initialize the PluginManifest instance
'''
self.path = path
self.manifest_json = None
self.manifest = None
def getMetricNames(self):
'''
Returns the list of metrics associated with the plugin manifest
'''
return self.manifest['metrics']
def load(self):
'''
Load the metrics file from the given path
'''
f = open(self.path,"r")
self.manifest_json = f.read()
def parse(self):
'''
Parses the manifest JSON into a dictionary
'''
self.manifest = json.loads(self.manifest_json)
def get(self):
'''
Read the JSON file and parse into a dictionary
'''
self.load()
self.parse()
def getManifest(self):
'''
Returns the dictionary from the parse JSON plugin manifest
'''
return self.manifest
if __name__ == "__main__":
if len(sys.argv) != 2:
sys.stderr.write("No file")
sys.exit(1)
p = PluginManifest(sys.argv[1])
p.get()
pprint(p.getManifest())
pprint(p.getMetricNames())
|
|
e3d54a9f85a98acd774a22281288b9224fa18b12
|
djangae/settings_base.py
|
djangae/settings_base.py
|
DEFAULT_FILE_STORAGE = 'djangae.storage.BlobstoreStorage'
FILE_UPLOAD_MAX_MEMORY_SIZE = 1024 * 1024
FILE_UPLOAD_HANDLERS = (
'djangae.storage.BlobstoreFileUploadHandler',
'django.core.files.uploadhandler.MemoryFileUploadHandler',
)
DATABASES = {
'default': {
'ENGINE': 'djangae.db.backends.appengine'
}
}
GENERATE_SPECIAL_INDEXES_DURING_TESTING = False
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
}
}
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
TEST_RUNNER = 'djangae.test_runner.DjangaeTestSuiteRunner'
|
DEFAULT_FILE_STORAGE = 'djangae.storage.BlobstoreStorage'
FILE_UPLOAD_MAX_MEMORY_SIZE = 1024 * 1024
FILE_UPLOAD_HANDLERS = (
'djangae.storage.BlobstoreFileUploadHandler',
'django.core.files.uploadhandler.MemoryFileUploadHandler',
)
DATABASES = {
'default': {
'ENGINE': 'djangae.db.backends.appengine'
}
}
GENERATE_SPECIAL_INDEXES_DURING_TESTING = False
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
}
}
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
TEST_RUNNER = 'djangae.test_runner.DjangaeTestSuiteRunner'
EMAIL_BACKEND = 'djangae.mail.AsyncEmailBackend'
|
Make sure we set the EMAIL_BACKEND by default
|
Make sure we set the EMAIL_BACKEND by default
|
Python
|
bsd-3-clause
|
armirusco/djangae,SiPiggles/djangae,nealedj/djangae,armirusco/djangae,martinogden/djangae,wangjun/djangae,nealedj/djangae,chargrizzle/djangae,SiPiggles/djangae,leekchan/djangae,trik/djangae,asendecka/djangae,martinogden/djangae,leekchan/djangae,grzes/djangae,jscissr/djangae,SiPiggles/djangae,wangjun/djangae,kirberich/djangae,nealedj/djangae,chargrizzle/djangae,stucox/djangae,grzes/djangae,trik/djangae,leekchan/djangae,stucox/djangae,asendecka/djangae,jscissr/djangae,potatolondon/djangae,pablorecio/djangae,pablorecio/djangae,wangjun/djangae,chargrizzle/djangae,armirusco/djangae,kirberich/djangae,asendecka/djangae,stucox/djangae,martinogden/djangae,trik/djangae,b-cannon/my_djae,pablorecio/djangae,kirberich/djangae,potatolondon/djangae,jscissr/djangae,grzes/djangae
|
DEFAULT_FILE_STORAGE = 'djangae.storage.BlobstoreStorage'
FILE_UPLOAD_MAX_MEMORY_SIZE = 1024 * 1024
FILE_UPLOAD_HANDLERS = (
'djangae.storage.BlobstoreFileUploadHandler',
'django.core.files.uploadhandler.MemoryFileUploadHandler',
)
DATABASES = {
'default': {
'ENGINE': 'djangae.db.backends.appengine'
}
}
GENERATE_SPECIAL_INDEXES_DURING_TESTING = False
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
}
}
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
TEST_RUNNER = 'djangae.test_runner.DjangaeTestSuiteRunner'Make sure we set the EMAIL_BACKEND by default
|
DEFAULT_FILE_STORAGE = 'djangae.storage.BlobstoreStorage'
FILE_UPLOAD_MAX_MEMORY_SIZE = 1024 * 1024
FILE_UPLOAD_HANDLERS = (
'djangae.storage.BlobstoreFileUploadHandler',
'django.core.files.uploadhandler.MemoryFileUploadHandler',
)
DATABASES = {
'default': {
'ENGINE': 'djangae.db.backends.appengine'
}
}
GENERATE_SPECIAL_INDEXES_DURING_TESTING = False
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
}
}
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
TEST_RUNNER = 'djangae.test_runner.DjangaeTestSuiteRunner'
EMAIL_BACKEND = 'djangae.mail.AsyncEmailBackend'
|
<commit_before>
DEFAULT_FILE_STORAGE = 'djangae.storage.BlobstoreStorage'
FILE_UPLOAD_MAX_MEMORY_SIZE = 1024 * 1024
FILE_UPLOAD_HANDLERS = (
'djangae.storage.BlobstoreFileUploadHandler',
'django.core.files.uploadhandler.MemoryFileUploadHandler',
)
DATABASES = {
'default': {
'ENGINE': 'djangae.db.backends.appengine'
}
}
GENERATE_SPECIAL_INDEXES_DURING_TESTING = False
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
}
}
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
TEST_RUNNER = 'djangae.test_runner.DjangaeTestSuiteRunner'<commit_msg>Make sure we set the EMAIL_BACKEND by default<commit_after>
|
DEFAULT_FILE_STORAGE = 'djangae.storage.BlobstoreStorage'
FILE_UPLOAD_MAX_MEMORY_SIZE = 1024 * 1024
FILE_UPLOAD_HANDLERS = (
'djangae.storage.BlobstoreFileUploadHandler',
'django.core.files.uploadhandler.MemoryFileUploadHandler',
)
DATABASES = {
'default': {
'ENGINE': 'djangae.db.backends.appengine'
}
}
GENERATE_SPECIAL_INDEXES_DURING_TESTING = False
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
}
}
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
TEST_RUNNER = 'djangae.test_runner.DjangaeTestSuiteRunner'
EMAIL_BACKEND = 'djangae.mail.AsyncEmailBackend'
|
DEFAULT_FILE_STORAGE = 'djangae.storage.BlobstoreStorage'
FILE_UPLOAD_MAX_MEMORY_SIZE = 1024 * 1024
FILE_UPLOAD_HANDLERS = (
'djangae.storage.BlobstoreFileUploadHandler',
'django.core.files.uploadhandler.MemoryFileUploadHandler',
)
DATABASES = {
'default': {
'ENGINE': 'djangae.db.backends.appengine'
}
}
GENERATE_SPECIAL_INDEXES_DURING_TESTING = False
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
}
}
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
TEST_RUNNER = 'djangae.test_runner.DjangaeTestSuiteRunner'Make sure we set the EMAIL_BACKEND by default
DEFAULT_FILE_STORAGE = 'djangae.storage.BlobstoreStorage'
FILE_UPLOAD_MAX_MEMORY_SIZE = 1024 * 1024
FILE_UPLOAD_HANDLERS = (
'djangae.storage.BlobstoreFileUploadHandler',
'django.core.files.uploadhandler.MemoryFileUploadHandler',
)
DATABASES = {
'default': {
'ENGINE': 'djangae.db.backends.appengine'
}
}
GENERATE_SPECIAL_INDEXES_DURING_TESTING = False
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
}
}
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
TEST_RUNNER = 'djangae.test_runner.DjangaeTestSuiteRunner'
EMAIL_BACKEND = 'djangae.mail.AsyncEmailBackend'
|
<commit_before>
DEFAULT_FILE_STORAGE = 'djangae.storage.BlobstoreStorage'
FILE_UPLOAD_MAX_MEMORY_SIZE = 1024 * 1024
FILE_UPLOAD_HANDLERS = (
'djangae.storage.BlobstoreFileUploadHandler',
'django.core.files.uploadhandler.MemoryFileUploadHandler',
)
DATABASES = {
'default': {
'ENGINE': 'djangae.db.backends.appengine'
}
}
GENERATE_SPECIAL_INDEXES_DURING_TESTING = False
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
}
}
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
TEST_RUNNER = 'djangae.test_runner.DjangaeTestSuiteRunner'<commit_msg>Make sure we set the EMAIL_BACKEND by default<commit_after>
DEFAULT_FILE_STORAGE = 'djangae.storage.BlobstoreStorage'
FILE_UPLOAD_MAX_MEMORY_SIZE = 1024 * 1024
FILE_UPLOAD_HANDLERS = (
'djangae.storage.BlobstoreFileUploadHandler',
'django.core.files.uploadhandler.MemoryFileUploadHandler',
)
DATABASES = {
'default': {
'ENGINE': 'djangae.db.backends.appengine'
}
}
GENERATE_SPECIAL_INDEXES_DURING_TESTING = False
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
}
}
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
TEST_RUNNER = 'djangae.test_runner.DjangaeTestSuiteRunner'
EMAIL_BACKEND = 'djangae.mail.AsyncEmailBackend'
|
e1e8dbaec0717a1bcbc709458da0dcfdfba03bd0
|
glaciercmd/command_list_vaults.py
|
glaciercmd/command_list_vaults.py
|
import boto
class CommandListVaults(object):
def execute(self, args, config):
glacier_connection = boto.connect_glacier(aws_access_key_id=config.get('configuration', 'aws_key'), aws_secret_access_key=config.get('configuration', 'aws_secret'))
for index, vault in enumerate(glacier_connection.list_vaults()):
print "{}.\tname={}\n\tarn={}\n\tcreation_date={}\n\tlast_inventory_date={}\n\tsize={}\n\tnumber_of_archives={}".format(index+1, vault.name, vault.arn, vault.creation_date, vault.last_inventory_date, vault.size, vault.number_of_archives)
def accept(self, args):
return len(args) >= 2 and args[0] == 'list' and args[1] == 'vaults'
def command_init():
return CommandListVaults()
|
Add a command to list vaults
|
Add a command to list vaults
|
Python
|
mit
|
carsonmcdonald/glacier-cmd
|
Add a command to list vaults
|
import boto
class CommandListVaults(object):
def execute(self, args, config):
glacier_connection = boto.connect_glacier(aws_access_key_id=config.get('configuration', 'aws_key'), aws_secret_access_key=config.get('configuration', 'aws_secret'))
for index, vault in enumerate(glacier_connection.list_vaults()):
print "{}.\tname={}\n\tarn={}\n\tcreation_date={}\n\tlast_inventory_date={}\n\tsize={}\n\tnumber_of_archives={}".format(index+1, vault.name, vault.arn, vault.creation_date, vault.last_inventory_date, vault.size, vault.number_of_archives)
def accept(self, args):
return len(args) >= 2 and args[0] == 'list' and args[1] == 'vaults'
def command_init():
return CommandListVaults()
|
<commit_before><commit_msg>Add a command to list vaults<commit_after>
|
import boto
class CommandListVaults(object):
def execute(self, args, config):
glacier_connection = boto.connect_glacier(aws_access_key_id=config.get('configuration', 'aws_key'), aws_secret_access_key=config.get('configuration', 'aws_secret'))
for index, vault in enumerate(glacier_connection.list_vaults()):
print "{}.\tname={}\n\tarn={}\n\tcreation_date={}\n\tlast_inventory_date={}\n\tsize={}\n\tnumber_of_archives={}".format(index+1, vault.name, vault.arn, vault.creation_date, vault.last_inventory_date, vault.size, vault.number_of_archives)
def accept(self, args):
return len(args) >= 2 and args[0] == 'list' and args[1] == 'vaults'
def command_init():
return CommandListVaults()
|
Add a command to list vaultsimport boto
class CommandListVaults(object):
def execute(self, args, config):
glacier_connection = boto.connect_glacier(aws_access_key_id=config.get('configuration', 'aws_key'), aws_secret_access_key=config.get('configuration', 'aws_secret'))
for index, vault in enumerate(glacier_connection.list_vaults()):
print "{}.\tname={}\n\tarn={}\n\tcreation_date={}\n\tlast_inventory_date={}\n\tsize={}\n\tnumber_of_archives={}".format(index+1, vault.name, vault.arn, vault.creation_date, vault.last_inventory_date, vault.size, vault.number_of_archives)
def accept(self, args):
return len(args) >= 2 and args[0] == 'list' and args[1] == 'vaults'
def command_init():
return CommandListVaults()
|
<commit_before><commit_msg>Add a command to list vaults<commit_after>import boto
class CommandListVaults(object):
def execute(self, args, config):
glacier_connection = boto.connect_glacier(aws_access_key_id=config.get('configuration', 'aws_key'), aws_secret_access_key=config.get('configuration', 'aws_secret'))
for index, vault in enumerate(glacier_connection.list_vaults()):
print "{}.\tname={}\n\tarn={}\n\tcreation_date={}\n\tlast_inventory_date={}\n\tsize={}\n\tnumber_of_archives={}".format(index+1, vault.name, vault.arn, vault.creation_date, vault.last_inventory_date, vault.size, vault.number_of_archives)
def accept(self, args):
return len(args) >= 2 and args[0] == 'list' and args[1] == 'vaults'
def command_init():
return CommandListVaults()
|
|
e3abfebdc57d364ded08034a6f1dead556fc4b57
|
regscrape/regscrape_lib/commands/cancel_jobs.py
|
regscrape/regscrape_lib/commands/cancel_jobs.py
|
from regscrape_lib.util import get_db
import settings
def run():
query = settings.FILTER.copy()
query['_job_id'] = {'$exists': True}
db = get_db()
db.docs.update(query, {'$unset': {'_job_id': True}}, multi=True, safe=True)
print 'Canceled all currently-assigned jobs.'
|
Add command to cancel existing jobs to make it easier to resume failed scrapes.
|
Add command to cancel existing jobs to make it easier to resume failed scrapes.
|
Python
|
bsd-3-clause
|
sunlightlabs/regulations-scraper,sunlightlabs/regulations-scraper,sunlightlabs/regulations-scraper
|
Add command to cancel existing jobs to make it easier to resume failed scrapes.
|
from regscrape_lib.util import get_db
import settings
def run():
query = settings.FILTER.copy()
query['_job_id'] = {'$exists': True}
db = get_db()
db.docs.update(query, {'$unset': {'_job_id': True}}, multi=True, safe=True)
print 'Canceled all currently-assigned jobs.'
|
<commit_before><commit_msg>Add command to cancel existing jobs to make it easier to resume failed scrapes.<commit_after>
|
from regscrape_lib.util import get_db
import settings
def run():
query = settings.FILTER.copy()
query['_job_id'] = {'$exists': True}
db = get_db()
db.docs.update(query, {'$unset': {'_job_id': True}}, multi=True, safe=True)
print 'Canceled all currently-assigned jobs.'
|
Add command to cancel existing jobs to make it easier to resume failed scrapes.from regscrape_lib.util import get_db
import settings
def run():
query = settings.FILTER.copy()
query['_job_id'] = {'$exists': True}
db = get_db()
db.docs.update(query, {'$unset': {'_job_id': True}}, multi=True, safe=True)
print 'Canceled all currently-assigned jobs.'
|
<commit_before><commit_msg>Add command to cancel existing jobs to make it easier to resume failed scrapes.<commit_after>from regscrape_lib.util import get_db
import settings
def run():
query = settings.FILTER.copy()
query['_job_id'] = {'$exists': True}
db = get_db()
db.docs.update(query, {'$unset': {'_job_id': True}}, multi=True, safe=True)
print 'Canceled all currently-assigned jobs.'
|
|
fda37167996c1c1550ef94a033977982248add64
|
word2vec_convert.py
|
word2vec_convert.py
|
from flask import Flask, request
from flask.ext.restful import reqparse, Api, Resource
from gensim.models.word2vec import Word2Vec
import json
print 'loading model'
MODEL = Word2Vec.load_word2vec_format(
'GoogleNews-vectors-negative300.bin.gz', binary=True)
print 'model loaded'
print 'dumping model'
MODEL.save('GoogleNews-vectors-negative300.gensim')
|
Convert the word2vec to gensim format
|
convert: Convert the word2vec to gensim format
|
Python
|
mit
|
mdbecker/word2vec_demo,mdbecker/word2vec_demo
|
convert: Convert the word2vec to gensim format
|
from flask import Flask, request
from flask.ext.restful import reqparse, Api, Resource
from gensim.models.word2vec import Word2Vec
import json
print 'loading model'
MODEL = Word2Vec.load_word2vec_format(
'GoogleNews-vectors-negative300.bin.gz', binary=True)
print 'model loaded'
print 'dumping model'
MODEL.save('GoogleNews-vectors-negative300.gensim')
|
<commit_before><commit_msg>convert: Convert the word2vec to gensim format<commit_after>
|
from flask import Flask, request
from flask.ext.restful import reqparse, Api, Resource
from gensim.models.word2vec import Word2Vec
import json
print 'loading model'
MODEL = Word2Vec.load_word2vec_format(
'GoogleNews-vectors-negative300.bin.gz', binary=True)
print 'model loaded'
print 'dumping model'
MODEL.save('GoogleNews-vectors-negative300.gensim')
|
convert: Convert the word2vec to gensim formatfrom flask import Flask, request
from flask.ext.restful import reqparse, Api, Resource
from gensim.models.word2vec import Word2Vec
import json
print 'loading model'
MODEL = Word2Vec.load_word2vec_format(
'GoogleNews-vectors-negative300.bin.gz', binary=True)
print 'model loaded'
print 'dumping model'
MODEL.save('GoogleNews-vectors-negative300.gensim')
|
<commit_before><commit_msg>convert: Convert the word2vec to gensim format<commit_after>from flask import Flask, request
from flask.ext.restful import reqparse, Api, Resource
from gensim.models.word2vec import Word2Vec
import json
print 'loading model'
MODEL = Word2Vec.load_word2vec_format(
'GoogleNews-vectors-negative300.bin.gz', binary=True)
print 'model loaded'
print 'dumping model'
MODEL.save('GoogleNews-vectors-negative300.gensim')
|
|
e74ebce60cc1de84465b16175320aa32f4795c0a
|
src/pyddl/__init__.py
|
src/pyddl/__init__.py
|
from abc import abstractmethod
__author__ = "Jonathan Hale"
class DdlStructure:
"""
An OpenDDL structure.
"""
def __init__(self, name):
self.structures = []
self.properties = dict()
self.name = name
class DdlDocument:
"""
An OpenDDL document.
"""
def __init__(self):
self.structures = []
class DdlWriter:
"""
Abstract class for classes responsible for writing OpenDdlDocuments.
"""
def __init__(self, document):
self.doc = document
def get_document(self):
"""
:return: document to be written by this writer.
"""
return self.doc
@abstractmethod
def write(self, filename):
"""
Write the writers document to a specified file.
:param file: file to write to
:return: nothing
"""
pass
class DdlTextWriter(DdlWriter):
"""
OpenDdlWriter which writes OpenDdlDocuments in human-readable text form.
"""
def __init__(self, document):
DdlWriter.__init__(self, document)
def write(self, filename):
# TODO: not implemented yet!
pass
# Space reserved for a specification based OpenDdlBinaryWriter ;)
# Hope there will be some specification for it some day.
|
Add initial DdlStructure, DdlDocument, DdlWriter and DdlTextWriter
|
Add initial DdlStructure, DdlDocument, DdlWriter and DdlTextWriter
Empty classes for a general structure.
Signed-off-by: Squareys <0f6a03d4883e012ba4cb2c581a68f35544703cd6@googlemail.com>
|
Python
|
mit
|
Squareys/PyDDL
|
Add initial DdlStructure, DdlDocument, DdlWriter and DdlTextWriter
Empty classes for a general structure.
Signed-off-by: Squareys <0f6a03d4883e012ba4cb2c581a68f35544703cd6@googlemail.com>
|
from abc import abstractmethod
__author__ = "Jonathan Hale"
class DdlStructure:
"""
An OpenDDL structure.
"""
def __init__(self, name):
self.structures = []
self.properties = dict()
self.name = name
class DdlDocument:
"""
An OpenDDL document.
"""
def __init__(self):
self.structures = []
class DdlWriter:
"""
Abstract class for classes responsible for writing OpenDdlDocuments.
"""
def __init__(self, document):
self.doc = document
def get_document(self):
"""
:return: document to be written by this writer.
"""
return self.doc
@abstractmethod
def write(self, filename):
"""
Write the writers document to a specified file.
:param file: file to write to
:return: nothing
"""
pass
class DdlTextWriter(DdlWriter):
"""
OpenDdlWriter which writes OpenDdlDocuments in human-readable text form.
"""
def __init__(self, document):
DdlWriter.__init__(self, document)
def write(self, filename):
# TODO: not implemented yet!
pass
# Space reserved for a specification based OpenDdlBinaryWriter ;)
# Hope there will be some specification for it some day.
|
<commit_before><commit_msg>Add initial DdlStructure, DdlDocument, DdlWriter and DdlTextWriter
Empty classes for a general structure.
Signed-off-by: Squareys <0f6a03d4883e012ba4cb2c581a68f35544703cd6@googlemail.com><commit_after>
|
from abc import abstractmethod
__author__ = "Jonathan Hale"
class DdlStructure:
"""
An OpenDDL structure.
"""
def __init__(self, name):
self.structures = []
self.properties = dict()
self.name = name
class DdlDocument:
"""
An OpenDDL document.
"""
def __init__(self):
self.structures = []
class DdlWriter:
"""
Abstract class for classes responsible for writing OpenDdlDocuments.
"""
def __init__(self, document):
self.doc = document
def get_document(self):
"""
:return: document to be written by this writer.
"""
return self.doc
@abstractmethod
def write(self, filename):
"""
Write the writers document to a specified file.
:param file: file to write to
:return: nothing
"""
pass
class DdlTextWriter(DdlWriter):
"""
OpenDdlWriter which writes OpenDdlDocuments in human-readable text form.
"""
def __init__(self, document):
DdlWriter.__init__(self, document)
def write(self, filename):
# TODO: not implemented yet!
pass
# Space reserved for a specification based OpenDdlBinaryWriter ;)
# Hope there will be some specification for it some day.
|
Add initial DdlStructure, DdlDocument, DdlWriter and DdlTextWriter
Empty classes for a general structure.
Signed-off-by: Squareys <0f6a03d4883e012ba4cb2c581a68f35544703cd6@googlemail.com>from abc import abstractmethod
__author__ = "Jonathan Hale"
class DdlStructure:
"""
An OpenDDL structure.
"""
def __init__(self, name):
self.structures = []
self.properties = dict()
self.name = name
class DdlDocument:
"""
An OpenDDL document.
"""
def __init__(self):
self.structures = []
class DdlWriter:
"""
Abstract class for classes responsible for writing OpenDdlDocuments.
"""
def __init__(self, document):
self.doc = document
def get_document(self):
"""
:return: document to be written by this writer.
"""
return self.doc
@abstractmethod
def write(self, filename):
"""
Write the writers document to a specified file.
:param file: file to write to
:return: nothing
"""
pass
class DdlTextWriter(DdlWriter):
"""
OpenDdlWriter which writes OpenDdlDocuments in human-readable text form.
"""
def __init__(self, document):
DdlWriter.__init__(self, document)
def write(self, filename):
# TODO: not implemented yet!
pass
# Space reserved for a specification based OpenDdlBinaryWriter ;)
# Hope there will be some specification for it some day.
|
<commit_before><commit_msg>Add initial DdlStructure, DdlDocument, DdlWriter and DdlTextWriter
Empty classes for a general structure.
Signed-off-by: Squareys <0f6a03d4883e012ba4cb2c581a68f35544703cd6@googlemail.com><commit_after>from abc import abstractmethod
__author__ = "Jonathan Hale"
class DdlStructure:
"""
An OpenDDL structure.
"""
def __init__(self, name):
self.structures = []
self.properties = dict()
self.name = name
class DdlDocument:
"""
An OpenDDL document.
"""
def __init__(self):
self.structures = []
class DdlWriter:
"""
Abstract class for classes responsible for writing OpenDdlDocuments.
"""
def __init__(self, document):
self.doc = document
def get_document(self):
"""
:return: document to be written by this writer.
"""
return self.doc
@abstractmethod
def write(self, filename):
"""
Write the writers document to a specified file.
:param file: file to write to
:return: nothing
"""
pass
class DdlTextWriter(DdlWriter):
"""
OpenDdlWriter which writes OpenDdlDocuments in human-readable text form.
"""
def __init__(self, document):
DdlWriter.__init__(self, document)
def write(self, filename):
# TODO: not implemented yet!
pass
# Space reserved for a specification based OpenDdlBinaryWriter ;)
# Hope there will be some specification for it some day.
|
|
4bd16a83c2cc6202edd0e2a9c3fa49df46519d59
|
scrapi/harvesters/iowaresearch.py
|
scrapi/harvesters/iowaresearch.py
|
'''
Harvester for the Iowa Research Online for the SHARE project
Example API call: http://ir.uiowa.edu/do/oai/?verb=ListRecords&metadataPrefix=oai_dc
'''
from __future__ import unicode_literals
from scrapi.base import OAIHarvester
class IowaresearchHarvester(OAIHarvester):
short_name = 'iowaresearch'
long_name = 'Iowa Research Online'
url = 'http://ir.uiowa.edu/do/oai/'
base_url = 'http://ir.uiowa.edu/do/oai/'
property_list = ['date', 'source', 'identifier', 'type']
timezone_granularity = True
|
Add Iowa Research Online harvester
|
Add Iowa Research Online harvester
|
Python
|
apache-2.0
|
icereval/scrapi,felliott/scrapi,jeffreyliu3230/scrapi,ostwald/scrapi,mehanig/scrapi,mehanig/scrapi,felliott/scrapi,erinspace/scrapi,erinspace/scrapi,CenterForOpenScience/scrapi,CenterForOpenScience/scrapi,fabianvf/scrapi,alexgarciac/scrapi,fabianvf/scrapi
|
Add Iowa Research Online harvester
|
'''
Harvester for the Iowa Research Online for the SHARE project
Example API call: http://ir.uiowa.edu/do/oai/?verb=ListRecords&metadataPrefix=oai_dc
'''
from __future__ import unicode_literals
from scrapi.base import OAIHarvester
class IowaresearchHarvester(OAIHarvester):
short_name = 'iowaresearch'
long_name = 'Iowa Research Online'
url = 'http://ir.uiowa.edu/do/oai/'
base_url = 'http://ir.uiowa.edu/do/oai/'
property_list = ['date', 'source', 'identifier', 'type']
timezone_granularity = True
|
<commit_before><commit_msg>Add Iowa Research Online harvester<commit_after>
|
'''
Harvester for the Iowa Research Online for the SHARE project
Example API call: http://ir.uiowa.edu/do/oai/?verb=ListRecords&metadataPrefix=oai_dc
'''
from __future__ import unicode_literals
from scrapi.base import OAIHarvester
class IowaresearchHarvester(OAIHarvester):
short_name = 'iowaresearch'
long_name = 'Iowa Research Online'
url = 'http://ir.uiowa.edu/do/oai/'
base_url = 'http://ir.uiowa.edu/do/oai/'
property_list = ['date', 'source', 'identifier', 'type']
timezone_granularity = True
|
Add Iowa Research Online harvester'''
Harvester for the Iowa Research Online for the SHARE project
Example API call: http://ir.uiowa.edu/do/oai/?verb=ListRecords&metadataPrefix=oai_dc
'''
from __future__ import unicode_literals
from scrapi.base import OAIHarvester
class IowaresearchHarvester(OAIHarvester):
short_name = 'iowaresearch'
long_name = 'Iowa Research Online'
url = 'http://ir.uiowa.edu/do/oai/'
base_url = 'http://ir.uiowa.edu/do/oai/'
property_list = ['date', 'source', 'identifier', 'type']
timezone_granularity = True
|
<commit_before><commit_msg>Add Iowa Research Online harvester<commit_after>'''
Harvester for the Iowa Research Online for the SHARE project
Example API call: http://ir.uiowa.edu/do/oai/?verb=ListRecords&metadataPrefix=oai_dc
'''
from __future__ import unicode_literals
from scrapi.base import OAIHarvester
class IowaresearchHarvester(OAIHarvester):
short_name = 'iowaresearch'
long_name = 'Iowa Research Online'
url = 'http://ir.uiowa.edu/do/oai/'
base_url = 'http://ir.uiowa.edu/do/oai/'
property_list = ['date', 'source', 'identifier', 'type']
timezone_granularity = True
|
|
3eb0baa7f00a3ec77cc5ebf0d43c0f6918c62161
|
enable/constraints_container.py
|
enable/constraints_container.py
|
#------------------------------------------------------------------------------
# Copyright (c) 2013, Enthought, Inc.
# All rights reserved.
#------------------------------------------------------------------------------
# traits imports
from traits.api import Dict
# local imports
from container import Container
class ConstraintsContainer(Container):
""" A Container which lays out its child components using a
constraints-based layout solver.
"""
# A dictionary of components added to this container
_component_map = Dict
#------------------------------------------------------------------------
# Public methods
#------------------------------------------------------------------------
def relayout(self):
""" Re-run the constraints solver in response to a resize or
component removal.
"""
pass
#------------------------------------------------------------------------
# Traits methods
#------------------------------------------------------------------------
def __components_items_changed(self, event):
""" Make sure components that are added can be used with constraints.
"""
# Check the added components
self._check_and_add_components(event.added)
# Remove stale components from the map
for item in event.removed:
del self._component_map[item.id]
def __components_changed(self, new):
""" Make sure components that are added can be used with constraints.
"""
# Clear the component map
self._component_map = {}
# Check the new components
self._check_and_add_components(new)
#------------------------------------------------------------------------
# Protected methods
#------------------------------------------------------------------------
def _check_and_add_components(self, components):
""" Make sure components can be used with constraints.
"""
for item in components:
if len(item.id) == 0:
msg = "Components added to a {0} must have a valid 'id' trait."
name = type(self).__name__
raise ValueError(msg.format(name))
elif item.id in self._component_map:
msg = "A Component with that id has already been added."
raise ValueError(msg)
self._component_map[item.id] = item
|
Add a ConstraintsContainer which doesn't do much yet.
|
Add a ConstraintsContainer which doesn't do much yet.
|
Python
|
bsd-3-clause
|
tommy-u/enable,tommy-u/enable,tommy-u/enable,tommy-u/enable
|
Add a ConstraintsContainer which doesn't do much yet.
|
#------------------------------------------------------------------------------
# Copyright (c) 2013, Enthought, Inc.
# All rights reserved.
#------------------------------------------------------------------------------
# traits imports
from traits.api import Dict
# local imports
from container import Container
class ConstraintsContainer(Container):
""" A Container which lays out its child components using a
constraints-based layout solver.
"""
# A dictionary of components added to this container
_component_map = Dict
#------------------------------------------------------------------------
# Public methods
#------------------------------------------------------------------------
def relayout(self):
""" Re-run the constraints solver in response to a resize or
component removal.
"""
pass
#------------------------------------------------------------------------
# Traits methods
#------------------------------------------------------------------------
def __components_items_changed(self, event):
""" Make sure components that are added can be used with constraints.
"""
# Check the added components
self._check_and_add_components(event.added)
# Remove stale components from the map
for item in event.removed:
del self._component_map[item.id]
def __components_changed(self, new):
""" Make sure components that are added can be used with constraints.
"""
# Clear the component map
self._component_map = {}
# Check the new components
self._check_and_add_components(new)
#------------------------------------------------------------------------
# Protected methods
#------------------------------------------------------------------------
def _check_and_add_components(self, components):
""" Make sure components can be used with constraints.
"""
for item in components:
if len(item.id) == 0:
msg = "Components added to a {0} must have a valid 'id' trait."
name = type(self).__name__
raise ValueError(msg.format(name))
elif item.id in self._component_map:
msg = "A Component with that id has already been added."
raise ValueError(msg)
self._component_map[item.id] = item
|
<commit_before><commit_msg>Add a ConstraintsContainer which doesn't do much yet.<commit_after>
|
#------------------------------------------------------------------------------
# Copyright (c) 2013, Enthought, Inc.
# All rights reserved.
#------------------------------------------------------------------------------
# traits imports
from traits.api import Dict
# local imports
from container import Container
class ConstraintsContainer(Container):
""" A Container which lays out its child components using a
constraints-based layout solver.
"""
# A dictionary of components added to this container
_component_map = Dict
#------------------------------------------------------------------------
# Public methods
#------------------------------------------------------------------------
def relayout(self):
""" Re-run the constraints solver in response to a resize or
component removal.
"""
pass
#------------------------------------------------------------------------
# Traits methods
#------------------------------------------------------------------------
def __components_items_changed(self, event):
""" Make sure components that are added can be used with constraints.
"""
# Check the added components
self._check_and_add_components(event.added)
# Remove stale components from the map
for item in event.removed:
del self._component_map[item.id]
def __components_changed(self, new):
""" Make sure components that are added can be used with constraints.
"""
# Clear the component map
self._component_map = {}
# Check the new components
self._check_and_add_components(new)
#------------------------------------------------------------------------
# Protected methods
#------------------------------------------------------------------------
def _check_and_add_components(self, components):
""" Make sure components can be used with constraints.
"""
for item in components:
if len(item.id) == 0:
msg = "Components added to a {0} must have a valid 'id' trait."
name = type(self).__name__
raise ValueError(msg.format(name))
elif item.id in self._component_map:
msg = "A Component with that id has already been added."
raise ValueError(msg)
self._component_map[item.id] = item
|
Add a ConstraintsContainer which doesn't do much yet.#------------------------------------------------------------------------------
# Copyright (c) 2013, Enthought, Inc.
# All rights reserved.
#------------------------------------------------------------------------------
# traits imports
from traits.api import Dict
# local imports
from container import Container
class ConstraintsContainer(Container):
""" A Container which lays out its child components using a
constraints-based layout solver.
"""
# A dictionary of components added to this container
_component_map = Dict
#------------------------------------------------------------------------
# Public methods
#------------------------------------------------------------------------
def relayout(self):
""" Re-run the constraints solver in response to a resize or
component removal.
"""
pass
#------------------------------------------------------------------------
# Traits methods
#------------------------------------------------------------------------
def __components_items_changed(self, event):
""" Make sure components that are added can be used with constraints.
"""
# Check the added components
self._check_and_add_components(event.added)
# Remove stale components from the map
for item in event.removed:
del self._component_map[item.id]
def __components_changed(self, new):
""" Make sure components that are added can be used with constraints.
"""
# Clear the component map
self._component_map = {}
# Check the new components
self._check_and_add_components(new)
#------------------------------------------------------------------------
# Protected methods
#------------------------------------------------------------------------
def _check_and_add_components(self, components):
""" Make sure components can be used with constraints.
"""
for item in components:
if len(item.id) == 0:
msg = "Components added to a {0} must have a valid 'id' trait."
name = type(self).__name__
raise ValueError(msg.format(name))
elif item.id in self._component_map:
msg = "A Component with that id has already been added."
raise ValueError(msg)
self._component_map[item.id] = item
|
<commit_before><commit_msg>Add a ConstraintsContainer which doesn't do much yet.<commit_after>#------------------------------------------------------------------------------
# Copyright (c) 2013, Enthought, Inc.
# All rights reserved.
#------------------------------------------------------------------------------
# traits imports
from traits.api import Dict
# local imports
from container import Container
class ConstraintsContainer(Container):
""" A Container which lays out its child components using a
constraints-based layout solver.
"""
# A dictionary of components added to this container
_component_map = Dict
#------------------------------------------------------------------------
# Public methods
#------------------------------------------------------------------------
def relayout(self):
""" Re-run the constraints solver in response to a resize or
component removal.
"""
pass
#------------------------------------------------------------------------
# Traits methods
#------------------------------------------------------------------------
def __components_items_changed(self, event):
""" Make sure components that are added can be used with constraints.
"""
# Check the added components
self._check_and_add_components(event.added)
# Remove stale components from the map
for item in event.removed:
del self._component_map[item.id]
def __components_changed(self, new):
""" Make sure components that are added can be used with constraints.
"""
# Clear the component map
self._component_map = {}
# Check the new components
self._check_and_add_components(new)
#------------------------------------------------------------------------
# Protected methods
#------------------------------------------------------------------------
def _check_and_add_components(self, components):
""" Make sure components can be used with constraints.
"""
for item in components:
if len(item.id) == 0:
msg = "Components added to a {0} must have a valid 'id' trait."
name = type(self).__name__
raise ValueError(msg.format(name))
elif item.id in self._component_map:
msg = "A Component with that id has already been added."
raise ValueError(msg)
self._component_map[item.id] = item
|
|
09fa9bc28d7265c013737b2c44c94991880877d1
|
octoprint/cura/tests/test_cura.py
|
octoprint/cura/tests/test_cura.py
|
import unittest
from cura import CuraFactory
from cura import CuraEngine
class CuraFactoryTestCase(unittest.TestCase):
def test_cura_factory(self):
fake_path = 'my/temp/path'
result = CuraFactory.create_slicer(fake_path)
self.assertEqual(fake_path, result.cura_path)
def test_cura_engine_process_file(self):
cura_engine = CuraFactory.create_slicer()
file_path = './cura/tests/test_02.stl'
config_path = './cura/tests/config'
gcode_filename= './cura/tests/output.gcode'
cura_engine.process_file(config_path, gcode_filename, file_path)
|
import unittest
from cura import CuraFactory
from cura import CuraEngine
class CuraFactoryTestCase(unittest.TestCase):
def test_cura_factory(self):
fake_path = 'my/temp/path'
result = CuraFactory.create_slicer(fake_path)
self.assertEqual(fake_path, result.cura_path)
def test_cura_engine_process_file(self):
cura_engine = CuraFactory.create_slicer()
file_path = './cura/tests/test.stl'
config_path = './cura/tests/config'
gcode_filename= './cura/tests/output.gcode'
cura_engine.process_file(config_path, gcode_filename, file_path)
|
Fix error in test path
|
Fix error in test path
|
Python
|
agpl-3.0
|
abinashk-inf/AstroBox,Mikk36/OctoPrint,EZ3-India/EZ-Remote,ymilord/OctoPrint-MrBeam,beeverycreative/BEEweb,Voxel8/OctoPrint,Catrodigious/OctoPrint-TAM,ryanneufeld/OctoPrint,ryanneufeld/OctoPrint,eddieparker/OctoPrint,mcanes/OctoPrint,Voxel8/OctoPrint,shaggythesheep/OctoPrint,nicanor-romero/OctoPrint,dansantee/OctoPrint,Jaesin/OctoPrint,sstocker46/OctoPrint,abinashk-inf/AstroBox,javivi001/OctoPrint,punkkeks/OctoPrint,ErikDeBruijn/OctoPrint,Catrodigious/OctoPrint-TAM,beeverycreative/BEEweb,ErikDeBruijn/OctoPrint,Voxel8/OctoPrint,nickverschoor/OctoPrint,bicephale/OctoPrint,MoonshineSG/OctoPrint,CapnBry/OctoPrint,nicanor-romero/OctoPrint,leductan-nguyen/RaionPi,MoonshineSG/OctoPrint,aerickson/OctoPrint,madhuni/AstroBox,mayoff/OctoPrint,CapnBry/OctoPrint,nickverschoor/OctoPrint,d42/octoprint-fork,markwal/OctoPrint,EZ3-India/EZ-Remote,spapadim/OctoPrint,SeveQ/OctoPrint,punkkeks/OctoPrint,rurkowce/octoprint-fork,JackGavin13/octoprint-test-not-finished,nicanor-romero/OctoPrint,dansantee/OctoPrint,foosel/OctoPrint,shohei/Octoprint,beeverycreative/BEEweb,dansantee/OctoPrint,uuv/OctoPrint,bicephale/OctoPrint,ymilord/OctoPrint-MrBeam,dragondgold/OctoPrint,eliasbakken/OctoPrint,Salandora/OctoPrint,Mikk36/OctoPrint,chriskoz/OctoPrint,javivi001/OctoPrint,Javierma/OctoPrint-TFG,ymilord/OctoPrint-MrBeam,mrbeam/OctoPrint,madhuni/AstroBox,ryanneufeld/OctoPrint,Javierma/OctoPrint-TFG,MolarAmbiguity/OctoPrint,senttech/OctoPrint,mayoff/OctoPrint,Skeen/OctoPrint,mayoff/OctoPrint,nickverschoor/OctoPrint,JackGavin13/octoprint-test-not-finished,MaxOLydian/OctoPrint,dragondgold/OctoPrint,senttech/OctoPrint,eliasbakken/OctoPrint,chriskoz/OctoPrint,CapnBry/OctoPrint,MaxOLydian/OctoPrint,dragondgold/OctoPrint,spapadim/OctoPrint,JackGavin13/octoprint-test-not-finished,mrbeam/OctoPrint,uuv/OctoPrint,leductan-nguyen/RaionPi,AstroPrint/AstroBox,Mikk36/OctoPrint,leductan-nguyen/RaionPi,jneves/OctoPrint,JackGavin13/octoprint-test-not-finished,mcanes/OctoPrint,sstocker46/OctoPrint,Salandora/OctoPrint,EZ3-India/EZ-Remote,MolarAmbiguity/OctoPrint,chriskoz/OctoPrint,MaxOLydian/OctoPrint,skieast/OctoPrint,alex1818/OctoPrint,beeverycreative/BEEweb,foosel/OctoPrint,foosel/OctoPrint,abinashk-inf/AstroBox,Javierma/OctoPrint-TFG,madhuni/AstroBox,skieast/OctoPrint,sstocker46/OctoPrint,ryanneufeld/OctoPrint,bicephale/OctoPrint,d42/octoprint-fork,MoonshineSG/OctoPrint,masterhou/OctoPrint,SeveQ/OctoPrint,EZ3-India/EZ-Remote,alex1818/OctoPrint,Salandora/OctoPrint,MoonshineSG/OctoPrint,masterhou/OctoPrint,ymilord/OctoPrint-MrBeam,jneves/OctoPrint,nickverschoor/OctoPrint,eddieparker/OctoPrint,Jaesin/OctoPrint,Jaesin/OctoPrint,Javierma/OctoPrint-TFG,AstroPrint/AstroBox,bicephale/OctoPrint,ymilord/OctoPrint-MrBeam,aerickson/OctoPrint,AstroPrint/AstroBox,spapadim/OctoPrint,uuv/OctoPrint,MolarAmbiguity/OctoPrint,mrbeam/OctoPrint,shaggythesheep/OctoPrint,rurkowce/octoprint-fork,C-o-r-E/OctoPrint,senttech/OctoPrint,leductan-nguyen/RaionPi,masterhou/OctoPrint,Salandora/OctoPrint,Jaesin/OctoPrint,aerickson/OctoPrint,SeveQ/OctoPrint,eliasbakken/OctoPrint,shohei/Octoprint,Skeen/OctoPrint,mcanes/OctoPrint,skieast/OctoPrint,markwal/OctoPrint,C-o-r-E/OctoPrint,Catrodigious/OctoPrint-TAM,markwal/OctoPrint,eddieparker/OctoPrint,Skeen/OctoPrint,senttech/OctoPrint,ErikDeBruijn/OctoPrint,shohei/Octoprint,shaggythesheep/OctoPrint,shohei/Octoprint,chriskoz/OctoPrint,foosel/OctoPrint,shohei/Octoprint,javivi001/OctoPrint,alex1818/OctoPrint,madhuni/AstroBox,abinashk-inf/AstroBox,C-o-r-E/OctoPrint,CapnBry/OctoPrint,jneves/OctoPrint,punkkeks/OctoPrint
|
import unittest
from cura import CuraFactory
from cura import CuraEngine
class CuraFactoryTestCase(unittest.TestCase):
def test_cura_factory(self):
fake_path = 'my/temp/path'
result = CuraFactory.create_slicer(fake_path)
self.assertEqual(fake_path, result.cura_path)
def test_cura_engine_process_file(self):
cura_engine = CuraFactory.create_slicer()
file_path = './cura/tests/test_02.stl'
config_path = './cura/tests/config'
gcode_filename= './cura/tests/output.gcode'
cura_engine.process_file(config_path, gcode_filename, file_path)
Fix error in test path
|
import unittest
from cura import CuraFactory
from cura import CuraEngine
class CuraFactoryTestCase(unittest.TestCase):
def test_cura_factory(self):
fake_path = 'my/temp/path'
result = CuraFactory.create_slicer(fake_path)
self.assertEqual(fake_path, result.cura_path)
def test_cura_engine_process_file(self):
cura_engine = CuraFactory.create_slicer()
file_path = './cura/tests/test.stl'
config_path = './cura/tests/config'
gcode_filename= './cura/tests/output.gcode'
cura_engine.process_file(config_path, gcode_filename, file_path)
|
<commit_before>
import unittest
from cura import CuraFactory
from cura import CuraEngine
class CuraFactoryTestCase(unittest.TestCase):
def test_cura_factory(self):
fake_path = 'my/temp/path'
result = CuraFactory.create_slicer(fake_path)
self.assertEqual(fake_path, result.cura_path)
def test_cura_engine_process_file(self):
cura_engine = CuraFactory.create_slicer()
file_path = './cura/tests/test_02.stl'
config_path = './cura/tests/config'
gcode_filename= './cura/tests/output.gcode'
cura_engine.process_file(config_path, gcode_filename, file_path)
<commit_msg>Fix error in test path<commit_after>
|
import unittest
from cura import CuraFactory
from cura import CuraEngine
class CuraFactoryTestCase(unittest.TestCase):
def test_cura_factory(self):
fake_path = 'my/temp/path'
result = CuraFactory.create_slicer(fake_path)
self.assertEqual(fake_path, result.cura_path)
def test_cura_engine_process_file(self):
cura_engine = CuraFactory.create_slicer()
file_path = './cura/tests/test.stl'
config_path = './cura/tests/config'
gcode_filename= './cura/tests/output.gcode'
cura_engine.process_file(config_path, gcode_filename, file_path)
|
import unittest
from cura import CuraFactory
from cura import CuraEngine
class CuraFactoryTestCase(unittest.TestCase):
def test_cura_factory(self):
fake_path = 'my/temp/path'
result = CuraFactory.create_slicer(fake_path)
self.assertEqual(fake_path, result.cura_path)
def test_cura_engine_process_file(self):
cura_engine = CuraFactory.create_slicer()
file_path = './cura/tests/test_02.stl'
config_path = './cura/tests/config'
gcode_filename= './cura/tests/output.gcode'
cura_engine.process_file(config_path, gcode_filename, file_path)
Fix error in test path
import unittest
from cura import CuraFactory
from cura import CuraEngine
class CuraFactoryTestCase(unittest.TestCase):
def test_cura_factory(self):
fake_path = 'my/temp/path'
result = CuraFactory.create_slicer(fake_path)
self.assertEqual(fake_path, result.cura_path)
def test_cura_engine_process_file(self):
cura_engine = CuraFactory.create_slicer()
file_path = './cura/tests/test.stl'
config_path = './cura/tests/config'
gcode_filename= './cura/tests/output.gcode'
cura_engine.process_file(config_path, gcode_filename, file_path)
|
<commit_before>
import unittest
from cura import CuraFactory
from cura import CuraEngine
class CuraFactoryTestCase(unittest.TestCase):
def test_cura_factory(self):
fake_path = 'my/temp/path'
result = CuraFactory.create_slicer(fake_path)
self.assertEqual(fake_path, result.cura_path)
def test_cura_engine_process_file(self):
cura_engine = CuraFactory.create_slicer()
file_path = './cura/tests/test_02.stl'
config_path = './cura/tests/config'
gcode_filename= './cura/tests/output.gcode'
cura_engine.process_file(config_path, gcode_filename, file_path)
<commit_msg>Fix error in test path<commit_after>
import unittest
from cura import CuraFactory
from cura import CuraEngine
class CuraFactoryTestCase(unittest.TestCase):
def test_cura_factory(self):
fake_path = 'my/temp/path'
result = CuraFactory.create_slicer(fake_path)
self.assertEqual(fake_path, result.cura_path)
def test_cura_engine_process_file(self):
cura_engine = CuraFactory.create_slicer()
file_path = './cura/tests/test.stl'
config_path = './cura/tests/config'
gcode_filename= './cura/tests/output.gcode'
cura_engine.process_file(config_path, gcode_filename, file_path)
|
9e38f0c54faa4b4bdfa15dd5139d562a0661a2a0
|
test/unit/builtins/test_version.py
|
test/unit/builtins/test_version.py
|
import unittest
from bfg9000.builtins.version import bfg9000_required_version, bfg9000_version
from bfg9000.versioning import bfg_version, VersionError
class TestRequiredVersion(unittest.TestCase):
def test_bfg_version(self):
bfg9000_required_version('>=0.1.0')
self.assertRaises(VersionError, bfg9000_required_version, '<=0.1.0')
def test_python_version(self):
bfg9000_required_version(python_version='>=2.7.0')
self.assertRaises(VersionError, bfg9000_required_version, None,
'<=2.0.0')
def test_both_versions(self):
bfg9000_required_version('>=0.1.0', '>=2.7.0')
self.assertRaises(VersionError, bfg9000_required_version, '<=0.1.0',
'<=2.0.0')
class TestVersion(unittest.TestCase):
def test_version(self):
self.assertEqual(bfg9000_version(), bfg_version)
|
Add unit tests for versioning builtins
|
Add unit tests for versioning builtins
|
Python
|
bsd-3-clause
|
jimporter/bfg9000,jimporter/bfg9000,jimporter/bfg9000,jimporter/bfg9000
|
Add unit tests for versioning builtins
|
import unittest
from bfg9000.builtins.version import bfg9000_required_version, bfg9000_version
from bfg9000.versioning import bfg_version, VersionError
class TestRequiredVersion(unittest.TestCase):
def test_bfg_version(self):
bfg9000_required_version('>=0.1.0')
self.assertRaises(VersionError, bfg9000_required_version, '<=0.1.0')
def test_python_version(self):
bfg9000_required_version(python_version='>=2.7.0')
self.assertRaises(VersionError, bfg9000_required_version, None,
'<=2.0.0')
def test_both_versions(self):
bfg9000_required_version('>=0.1.0', '>=2.7.0')
self.assertRaises(VersionError, bfg9000_required_version, '<=0.1.0',
'<=2.0.0')
class TestVersion(unittest.TestCase):
def test_version(self):
self.assertEqual(bfg9000_version(), bfg_version)
|
<commit_before><commit_msg>Add unit tests for versioning builtins<commit_after>
|
import unittest
from bfg9000.builtins.version import bfg9000_required_version, bfg9000_version
from bfg9000.versioning import bfg_version, VersionError
class TestRequiredVersion(unittest.TestCase):
def test_bfg_version(self):
bfg9000_required_version('>=0.1.0')
self.assertRaises(VersionError, bfg9000_required_version, '<=0.1.0')
def test_python_version(self):
bfg9000_required_version(python_version='>=2.7.0')
self.assertRaises(VersionError, bfg9000_required_version, None,
'<=2.0.0')
def test_both_versions(self):
bfg9000_required_version('>=0.1.0', '>=2.7.0')
self.assertRaises(VersionError, bfg9000_required_version, '<=0.1.0',
'<=2.0.0')
class TestVersion(unittest.TestCase):
def test_version(self):
self.assertEqual(bfg9000_version(), bfg_version)
|
Add unit tests for versioning builtinsimport unittest
from bfg9000.builtins.version import bfg9000_required_version, bfg9000_version
from bfg9000.versioning import bfg_version, VersionError
class TestRequiredVersion(unittest.TestCase):
def test_bfg_version(self):
bfg9000_required_version('>=0.1.0')
self.assertRaises(VersionError, bfg9000_required_version, '<=0.1.0')
def test_python_version(self):
bfg9000_required_version(python_version='>=2.7.0')
self.assertRaises(VersionError, bfg9000_required_version, None,
'<=2.0.0')
def test_both_versions(self):
bfg9000_required_version('>=0.1.0', '>=2.7.0')
self.assertRaises(VersionError, bfg9000_required_version, '<=0.1.0',
'<=2.0.0')
class TestVersion(unittest.TestCase):
def test_version(self):
self.assertEqual(bfg9000_version(), bfg_version)
|
<commit_before><commit_msg>Add unit tests for versioning builtins<commit_after>import unittest
from bfg9000.builtins.version import bfg9000_required_version, bfg9000_version
from bfg9000.versioning import bfg_version, VersionError
class TestRequiredVersion(unittest.TestCase):
def test_bfg_version(self):
bfg9000_required_version('>=0.1.0')
self.assertRaises(VersionError, bfg9000_required_version, '<=0.1.0')
def test_python_version(self):
bfg9000_required_version(python_version='>=2.7.0')
self.assertRaises(VersionError, bfg9000_required_version, None,
'<=2.0.0')
def test_both_versions(self):
bfg9000_required_version('>=0.1.0', '>=2.7.0')
self.assertRaises(VersionError, bfg9000_required_version, '<=0.1.0',
'<=2.0.0')
class TestVersion(unittest.TestCase):
def test_version(self):
self.assertEqual(bfg9000_version(), bfg_version)
|
|
14759bf0a40025745c11f4110f88e5f58115d2c9
|
ci_helpers_usage.py
|
ci_helpers_usage.py
|
import requests
from github import Github
from common import get_credentials
username, password = get_credentials()
gh = Github(username, password)
gh_search_result = gh.search_code('filename:.travis.yml "astropy/ci-helpers"')
gh_repo = []
gh_name = []
for i in gh_search_result:
gh_repo.append(i.repository.full_name)
gh_name.append(i.repository.name)
gh_name = set(gh_name)
pypi_name = []
for i in gh_name:
response = requests.get("http://pypi.python.org/pypi/{}/json".format(i))
if response.status_code == 200:
pypi_name.append(i)
print(len(gh_name), len(pypi_name))
|
Add script to check ci-helpers usage
|
Add script to check ci-helpers usage
|
Python
|
bsd-3-clause
|
astropy/astropy-tools,astropy/astropy-tools
|
Add script to check ci-helpers usage
|
import requests
from github import Github
from common import get_credentials
username, password = get_credentials()
gh = Github(username, password)
gh_search_result = gh.search_code('filename:.travis.yml "astropy/ci-helpers"')
gh_repo = []
gh_name = []
for i in gh_search_result:
gh_repo.append(i.repository.full_name)
gh_name.append(i.repository.name)
gh_name = set(gh_name)
pypi_name = []
for i in gh_name:
response = requests.get("http://pypi.python.org/pypi/{}/json".format(i))
if response.status_code == 200:
pypi_name.append(i)
print(len(gh_name), len(pypi_name))
|
<commit_before><commit_msg>Add script to check ci-helpers usage<commit_after>
|
import requests
from github import Github
from common import get_credentials
username, password = get_credentials()
gh = Github(username, password)
gh_search_result = gh.search_code('filename:.travis.yml "astropy/ci-helpers"')
gh_repo = []
gh_name = []
for i in gh_search_result:
gh_repo.append(i.repository.full_name)
gh_name.append(i.repository.name)
gh_name = set(gh_name)
pypi_name = []
for i in gh_name:
response = requests.get("http://pypi.python.org/pypi/{}/json".format(i))
if response.status_code == 200:
pypi_name.append(i)
print(len(gh_name), len(pypi_name))
|
Add script to check ci-helpers usageimport requests
from github import Github
from common import get_credentials
username, password = get_credentials()
gh = Github(username, password)
gh_search_result = gh.search_code('filename:.travis.yml "astropy/ci-helpers"')
gh_repo = []
gh_name = []
for i in gh_search_result:
gh_repo.append(i.repository.full_name)
gh_name.append(i.repository.name)
gh_name = set(gh_name)
pypi_name = []
for i in gh_name:
response = requests.get("http://pypi.python.org/pypi/{}/json".format(i))
if response.status_code == 200:
pypi_name.append(i)
print(len(gh_name), len(pypi_name))
|
<commit_before><commit_msg>Add script to check ci-helpers usage<commit_after>import requests
from github import Github
from common import get_credentials
username, password = get_credentials()
gh = Github(username, password)
gh_search_result = gh.search_code('filename:.travis.yml "astropy/ci-helpers"')
gh_repo = []
gh_name = []
for i in gh_search_result:
gh_repo.append(i.repository.full_name)
gh_name.append(i.repository.name)
gh_name = set(gh_name)
pypi_name = []
for i in gh_name:
response = requests.get("http://pypi.python.org/pypi/{}/json".format(i))
if response.status_code == 200:
pypi_name.append(i)
print(len(gh_name), len(pypi_name))
|
|
62bce0ee3ea80f41d7184c6199defba55fc257f2
|
tests/external/py2/testfixture_test.py
|
tests/external/py2/testfixture_test.py
|
#!/usr/bin/env python
# ----------------------------------------------------------------------
# Copyright (C) 2013 Numenta Inc. All rights reserved.
#
# The information and source code contained herein is the
# exclusive property of Numenta Inc. No part of this software
# may be used, reproduced, stored or distributed in any form,
# without explicit written authorization from Numenta Inc.
# ----------------------------------------------------------------------
"""
Unit tests for our dependencies in the pytest package; at the time of this
writing, we were using an unreleased version of pytest that added support for
the unittest setUpModule fixture and friends. Some of our tests rely on
setUpModule. Once, there was a conflict with pytest installation in our build
system, and an older version of pytest was installed that didn't support
setUpModule, which resulted in suble side-effects in some of these tests.
"""
import unittest2 as unittest
g_setUpModuleCalled = False
def setUpModule():
global g_setUpModuleCalled
g_setUpModuleCalled = True
class TestPytest(unittest.TestCase):
def testSetUpModuleCalled(self):
self.assertTrue(g_setUpModuleCalled)
if __name__ == '__main__':
unittest.main()
|
Make sure setUpModule is called by the test framework. We brought in pytest-2.4.0.dev8 for that specific functionality. However, one time we regressed, and our tests started misbehaving. So, this test is here to keep us honest.
|
Make sure setUpModule is called by the test framework. We brought in pytest-2.4.0.dev8 for that specific functionality. However, one time we regressed, and our tests started misbehaving. So, this test is here to keep us honest.
|
Python
|
agpl-3.0
|
passiweinberger/nupic,lscheinkman/nupic,subutai/nupic,akhilaananthram/nupic,brev/nupic,BeiLuoShiMen/nupic,cngo-github/nupic,cngo-github/nupic,glorizen/nupic,blueburningcoder/nupic,rhyolight/nupic,rcrowder/nupic,marionleborgne/nupic,lscheinkman/nupic,alfonsokim/nupic,metaml/nupic,chanceraine/nupic,cogmission/nupic,EricSB/nupic,numenta/nupic,GeraldLoeffler/nupic,darshanthaker/nupic,SaganBolliger/nupic,jcasner/nupic,darshanthaker/nupic,ben-hopps/nupic,numenta/nupic,cogmission/nupic,metaml/nupic,scottpurdy/nupic,pulinagrawal/nupic,allanino/nupic,akhilaananthram/nupic,brev/nupic,SaganBolliger/nupic,BeiLuoShiMen/nupic,go-bears/nupic,brev/nupic,arhik/nupic,markneville/nupic,ywcui1990/nupic,sambitgaan/nupic,chen0031/nupic,sambitgaan/nupic,scottpurdy/nupic,arhik/nupic,rcrowder/nupic,passiweinberger/nupic,markneville/nupic,rhyolight/nupic,chanceraine/nupic,cogmission/nupic,glorizen/nupic,markneville/nupic,breznak/nupic,mcanthony/nupic,pap/nupic,chen0031/nupic,loretoparisi/nupic,EricSB/nupic,numenta-ci/nupic,marionleborgne/nupic,wanghaven/nupic,go-bears/nupic,chen0031/nupic,virneo/nupic,vitaly-krugl/nupic,chanceraine/nupic,ben-hopps/nupic,ben-hopps/nupic,breznak/nupic,badlogicmanpreet/nupic,fergalbyrne/nupic,neuroidss/nupic,virneo/nupic,jcasner/nupic,vamsirajendra/nupic,vamsirajendra/nupic,elkingtonmcb/nupic,marionleborgne/nupic,subutai/nupic,lscheinkman/nupic,glorizen/nupic,numenta/nupic,neuroidss/nupic,arhik/nupic,rayNymous/nupic,vitaly-krugl/nupic,loretoparisi/nupic,rhyolight/nupic,pulinagrawal/nupic,cngo-github/nupic,SaganBolliger/nupic,wanghaven/nupic,pap/nupic,BoltzmannBrain/nupic,blueburningcoder/nupic,vitaly-krugl/nupic,metaml/nupic,wanghaven/nupic,sambitgaan/nupic,BoltzmannBrain/nupic,GeraldLoeffler/nupic,numenta-ci/nupic,elkingtonmcb/nupic,pap/nupic,blueburningcoder/nupic,eranchetz/nupic,mcanthony/nupic,badlogicmanpreet/nupic,BeiLuoShiMen/nupic,scottpurdy/nupic,rayNymous/nupic,eranchetz/nupic,rayNymous/nupic,badlogicmanpreet/nupic,alfonsokim/nupic,breznak/nupic,EricSB/nupic,virneo/nupic,subutai/nupic,allanino/nupic,go-bears/nupic,loretoparisi/nupic,runt18/nupic,vamsirajendra/nupic,alfonsokim/nupic,BoltzmannBrain/nupic,ywcui1990/nupic,numenta-ci/nupic,fergalbyrne/nupic,mcanthony/nupic,eranchetz/nupic,fergalbyrne/nupic,elkingtonmcb/nupic,darshanthaker/nupic,neuroidss/nupic,allanino/nupic,rcrowder/nupic,runt18/nupic,pulinagrawal/nupic,ywcui1990/nupic,passiweinberger/nupic,jcasner/nupic,GeraldLoeffler/nupic,akhilaananthram/nupic,runt18/nupic
|
Make sure setUpModule is called by the test framework. We brought in pytest-2.4.0.dev8 for that specific functionality. However, one time we regressed, and our tests started misbehaving. So, this test is here to keep us honest.
|
#!/usr/bin/env python
# ----------------------------------------------------------------------
# Copyright (C) 2013 Numenta Inc. All rights reserved.
#
# The information and source code contained herein is the
# exclusive property of Numenta Inc. No part of this software
# may be used, reproduced, stored or distributed in any form,
# without explicit written authorization from Numenta Inc.
# ----------------------------------------------------------------------
"""
Unit tests for our dependencies in the pytest package; at the time of this
writing, we were using an unreleased version of pytest that added support for
the unittest setUpModule fixture and friends. Some of our tests rely on
setUpModule. Once, there was a conflict with pytest installation in our build
system, and an older version of pytest was installed that didn't support
setUpModule, which resulted in suble side-effects in some of these tests.
"""
import unittest2 as unittest
g_setUpModuleCalled = False
def setUpModule():
global g_setUpModuleCalled
g_setUpModuleCalled = True
class TestPytest(unittest.TestCase):
def testSetUpModuleCalled(self):
self.assertTrue(g_setUpModuleCalled)
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Make sure setUpModule is called by the test framework. We brought in pytest-2.4.0.dev8 for that specific functionality. However, one time we regressed, and our tests started misbehaving. So, this test is here to keep us honest.<commit_after>
|
#!/usr/bin/env python
# ----------------------------------------------------------------------
# Copyright (C) 2013 Numenta Inc. All rights reserved.
#
# The information and source code contained herein is the
# exclusive property of Numenta Inc. No part of this software
# may be used, reproduced, stored or distributed in any form,
# without explicit written authorization from Numenta Inc.
# ----------------------------------------------------------------------
"""
Unit tests for our dependencies in the pytest package; at the time of this
writing, we were using an unreleased version of pytest that added support for
the unittest setUpModule fixture and friends. Some of our tests rely on
setUpModule. Once, there was a conflict with pytest installation in our build
system, and an older version of pytest was installed that didn't support
setUpModule, which resulted in suble side-effects in some of these tests.
"""
import unittest2 as unittest
g_setUpModuleCalled = False
def setUpModule():
global g_setUpModuleCalled
g_setUpModuleCalled = True
class TestPytest(unittest.TestCase):
def testSetUpModuleCalled(self):
self.assertTrue(g_setUpModuleCalled)
if __name__ == '__main__':
unittest.main()
|
Make sure setUpModule is called by the test framework. We brought in pytest-2.4.0.dev8 for that specific functionality. However, one time we regressed, and our tests started misbehaving. So, this test is here to keep us honest.#!/usr/bin/env python
# ----------------------------------------------------------------------
# Copyright (C) 2013 Numenta Inc. All rights reserved.
#
# The information and source code contained herein is the
# exclusive property of Numenta Inc. No part of this software
# may be used, reproduced, stored or distributed in any form,
# without explicit written authorization from Numenta Inc.
# ----------------------------------------------------------------------
"""
Unit tests for our dependencies in the pytest package; at the time of this
writing, we were using an unreleased version of pytest that added support for
the unittest setUpModule fixture and friends. Some of our tests rely on
setUpModule. Once, there was a conflict with pytest installation in our build
system, and an older version of pytest was installed that didn't support
setUpModule, which resulted in suble side-effects in some of these tests.
"""
import unittest2 as unittest
g_setUpModuleCalled = False
def setUpModule():
global g_setUpModuleCalled
g_setUpModuleCalled = True
class TestPytest(unittest.TestCase):
def testSetUpModuleCalled(self):
self.assertTrue(g_setUpModuleCalled)
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Make sure setUpModule is called by the test framework. We brought in pytest-2.4.0.dev8 for that specific functionality. However, one time we regressed, and our tests started misbehaving. So, this test is here to keep us honest.<commit_after>#!/usr/bin/env python
# ----------------------------------------------------------------------
# Copyright (C) 2013 Numenta Inc. All rights reserved.
#
# The information and source code contained herein is the
# exclusive property of Numenta Inc. No part of this software
# may be used, reproduced, stored or distributed in any form,
# without explicit written authorization from Numenta Inc.
# ----------------------------------------------------------------------
"""
Unit tests for our dependencies in the pytest package; at the time of this
writing, we were using an unreleased version of pytest that added support for
the unittest setUpModule fixture and friends. Some of our tests rely on
setUpModule. Once, there was a conflict with pytest installation in our build
system, and an older version of pytest was installed that didn't support
setUpModule, which resulted in suble side-effects in some of these tests.
"""
import unittest2 as unittest
g_setUpModuleCalled = False
def setUpModule():
global g_setUpModuleCalled
g_setUpModuleCalled = True
class TestPytest(unittest.TestCase):
def testSetUpModuleCalled(self):
self.assertTrue(g_setUpModuleCalled)
if __name__ == '__main__':
unittest.main()
|
|
b9dfb22f5676226a77b89a994843a27b43823391
|
tests/functional/test_waiter_config.py
|
tests/functional/test_waiter_config.py
|
# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from nose.tools import assert_equals
import botocore.session
def test_lint_waiter_configs():
session = botocore.session.get_session()
for service_name in session.get_available_services():
client = session.create_client(service_name, 'us-east-1')
service_model = client.meta.service_model
for waiter_name in client.waiter_names:
waiter = client.get_waiter(waiter_name)
yield _lint_single_waiter, waiter, service_model
def _lint_single_waiter(waiter, service_model):
operation_name = waiter.config.operation
# Needs to reference an existing operation name.
if operation_name not in service_model.operation_names:
raise AssertionError("Waiter config references unknown "
"operation: %s" % operation_name)
# Needs to have at least one acceptor.
if not waiter.config.acceptors:
raise AssertionError("Waiter config must have at least "
"one acceptor state: %s" % waiter.name)
# Additional things to add:
# 1. Verify the error acceptors correspond to a 'code' in the model
# 2. Verify JMESPath expressions can resolve to something in
# the response.
|
Add start of basic waiter model validation
|
Add start of basic waiter model validation
Makes it quicker to review contributions for waiter
configs.
|
Python
|
apache-2.0
|
boto/botocore,pplu/botocore
|
Add start of basic waiter model validation
Makes it quicker to review contributions for waiter
configs.
|
# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from nose.tools import assert_equals
import botocore.session
def test_lint_waiter_configs():
session = botocore.session.get_session()
for service_name in session.get_available_services():
client = session.create_client(service_name, 'us-east-1')
service_model = client.meta.service_model
for waiter_name in client.waiter_names:
waiter = client.get_waiter(waiter_name)
yield _lint_single_waiter, waiter, service_model
def _lint_single_waiter(waiter, service_model):
operation_name = waiter.config.operation
# Needs to reference an existing operation name.
if operation_name not in service_model.operation_names:
raise AssertionError("Waiter config references unknown "
"operation: %s" % operation_name)
# Needs to have at least one acceptor.
if not waiter.config.acceptors:
raise AssertionError("Waiter config must have at least "
"one acceptor state: %s" % waiter.name)
# Additional things to add:
# 1. Verify the error acceptors correspond to a 'code' in the model
# 2. Verify JMESPath expressions can resolve to something in
# the response.
|
<commit_before><commit_msg>Add start of basic waiter model validation
Makes it quicker to review contributions for waiter
configs.<commit_after>
|
# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from nose.tools import assert_equals
import botocore.session
def test_lint_waiter_configs():
session = botocore.session.get_session()
for service_name in session.get_available_services():
client = session.create_client(service_name, 'us-east-1')
service_model = client.meta.service_model
for waiter_name in client.waiter_names:
waiter = client.get_waiter(waiter_name)
yield _lint_single_waiter, waiter, service_model
def _lint_single_waiter(waiter, service_model):
operation_name = waiter.config.operation
# Needs to reference an existing operation name.
if operation_name not in service_model.operation_names:
raise AssertionError("Waiter config references unknown "
"operation: %s" % operation_name)
# Needs to have at least one acceptor.
if not waiter.config.acceptors:
raise AssertionError("Waiter config must have at least "
"one acceptor state: %s" % waiter.name)
# Additional things to add:
# 1. Verify the error acceptors correspond to a 'code' in the model
# 2. Verify JMESPath expressions can resolve to something in
# the response.
|
Add start of basic waiter model validation
Makes it quicker to review contributions for waiter
configs.# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from nose.tools import assert_equals
import botocore.session
def test_lint_waiter_configs():
session = botocore.session.get_session()
for service_name in session.get_available_services():
client = session.create_client(service_name, 'us-east-1')
service_model = client.meta.service_model
for waiter_name in client.waiter_names:
waiter = client.get_waiter(waiter_name)
yield _lint_single_waiter, waiter, service_model
def _lint_single_waiter(waiter, service_model):
operation_name = waiter.config.operation
# Needs to reference an existing operation name.
if operation_name not in service_model.operation_names:
raise AssertionError("Waiter config references unknown "
"operation: %s" % operation_name)
# Needs to have at least one acceptor.
if not waiter.config.acceptors:
raise AssertionError("Waiter config must have at least "
"one acceptor state: %s" % waiter.name)
# Additional things to add:
# 1. Verify the error acceptors correspond to a 'code' in the model
# 2. Verify JMESPath expressions can resolve to something in
# the response.
|
<commit_before><commit_msg>Add start of basic waiter model validation
Makes it quicker to review contributions for waiter
configs.<commit_after># Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from nose.tools import assert_equals
import botocore.session
def test_lint_waiter_configs():
session = botocore.session.get_session()
for service_name in session.get_available_services():
client = session.create_client(service_name, 'us-east-1')
service_model = client.meta.service_model
for waiter_name in client.waiter_names:
waiter = client.get_waiter(waiter_name)
yield _lint_single_waiter, waiter, service_model
def _lint_single_waiter(waiter, service_model):
operation_name = waiter.config.operation
# Needs to reference an existing operation name.
if operation_name not in service_model.operation_names:
raise AssertionError("Waiter config references unknown "
"operation: %s" % operation_name)
# Needs to have at least one acceptor.
if not waiter.config.acceptors:
raise AssertionError("Waiter config must have at least "
"one acceptor state: %s" % waiter.name)
# Additional things to add:
# 1. Verify the error acceptors correspond to a 'code' in the model
# 2. Verify JMESPath expressions can resolve to something in
# the response.
|
|
d72961536570695ec1a6a160f118aff51b9b1328
|
cra_helper/views.py
|
cra_helper/views.py
|
from django.views.decorators.csrf import csrf_exempt
from proxy.views import proxy_view
from cra_helper import CRA_URL
@csrf_exempt
def proxy_cra_requests(request, path):
'''
Proxy various requests sent by Create-React-App projects in dev mode ("npm start"), within
Django-hosted views, to the Create-React-App liveserver
Works well with the following re_path definitions in your project's urls.py:
re_path(r'^sockjs-node/(?P<path>.*)$', proxy_sockjs),
re_path(r'^__webpack_dev_server__/(?P<path>.*)$', proxy_sockjs),
'''
path = request.path
url = '{}{}'.format(CRA_URL, path)
return proxy_view(request, url)
|
Add a reverse-proxy view for hot-reloading
|
Add a reverse-proxy view for hot-reloading
|
Python
|
mit
|
MasterKale/django-cra-helper
|
Add a reverse-proxy view for hot-reloading
|
from django.views.decorators.csrf import csrf_exempt
from proxy.views import proxy_view
from cra_helper import CRA_URL
@csrf_exempt
def proxy_cra_requests(request, path):
'''
Proxy various requests sent by Create-React-App projects in dev mode ("npm start"), within
Django-hosted views, to the Create-React-App liveserver
Works well with the following re_path definitions in your project's urls.py:
re_path(r'^sockjs-node/(?P<path>.*)$', proxy_sockjs),
re_path(r'^__webpack_dev_server__/(?P<path>.*)$', proxy_sockjs),
'''
path = request.path
url = '{}{}'.format(CRA_URL, path)
return proxy_view(request, url)
|
<commit_before><commit_msg>Add a reverse-proxy view for hot-reloading<commit_after>
|
from django.views.decorators.csrf import csrf_exempt
from proxy.views import proxy_view
from cra_helper import CRA_URL
@csrf_exempt
def proxy_cra_requests(request, path):
'''
Proxy various requests sent by Create-React-App projects in dev mode ("npm start"), within
Django-hosted views, to the Create-React-App liveserver
Works well with the following re_path definitions in your project's urls.py:
re_path(r'^sockjs-node/(?P<path>.*)$', proxy_sockjs),
re_path(r'^__webpack_dev_server__/(?P<path>.*)$', proxy_sockjs),
'''
path = request.path
url = '{}{}'.format(CRA_URL, path)
return proxy_view(request, url)
|
Add a reverse-proxy view for hot-reloadingfrom django.views.decorators.csrf import csrf_exempt
from proxy.views import proxy_view
from cra_helper import CRA_URL
@csrf_exempt
def proxy_cra_requests(request, path):
'''
Proxy various requests sent by Create-React-App projects in dev mode ("npm start"), within
Django-hosted views, to the Create-React-App liveserver
Works well with the following re_path definitions in your project's urls.py:
re_path(r'^sockjs-node/(?P<path>.*)$', proxy_sockjs),
re_path(r'^__webpack_dev_server__/(?P<path>.*)$', proxy_sockjs),
'''
path = request.path
url = '{}{}'.format(CRA_URL, path)
return proxy_view(request, url)
|
<commit_before><commit_msg>Add a reverse-proxy view for hot-reloading<commit_after>from django.views.decorators.csrf import csrf_exempt
from proxy.views import proxy_view
from cra_helper import CRA_URL
@csrf_exempt
def proxy_cra_requests(request, path):
'''
Proxy various requests sent by Create-React-App projects in dev mode ("npm start"), within
Django-hosted views, to the Create-React-App liveserver
Works well with the following re_path definitions in your project's urls.py:
re_path(r'^sockjs-node/(?P<path>.*)$', proxy_sockjs),
re_path(r'^__webpack_dev_server__/(?P<path>.*)$', proxy_sockjs),
'''
path = request.path
url = '{}{}'.format(CRA_URL, path)
return proxy_view(request, url)
|
|
3a63b8986b347091be613a23a1029bb744eb20f1
|
tests/passthrough/test_passthrough.py
|
tests/passthrough/test_passthrough.py
|
import __builtin__
from pytest import raises
from fuse import FuseOSError
from mock import MagicMock, patch, call
from gitfs.views import PassthroughView
class TestPassthrough(object):
def setup(self):
def mock_super(*args, **kwargs):
if args and issubclass(PassthroughView, args[0]):
return MagicMock()
return original_super(*args, **kwargs)
__builtin__.original_super = super
__builtin__.super = mock_super
self.repo_path = '/the/root/path'
def teardown(self):
__builtin__.super = __builtin__.original_super
del __builtin__.original_super
def test_access(self):
mocked_access = MagicMock()
mocked_access.side_effect = [True, True, False]
with patch('gitfs.views.passthrough.os.access', mocked_access):
view = PassthroughView(repo_path=self.repo_path)
# normal, easy test
view.access('good/relative/path', 777)
# test if _full_path works
view.access('/good/relative/path', 777)
# test if proper exception is raised
with raises(FuseOSError):
view.access('/relative/path', 777)
mocked_access.assert_has_calls([call('/the/root/path/good/relative/path', 777),
call('/the/root/path/good/relative/path', 777),
call('/the/root/path/relative/path', 777)])
assert mocked_access.call_count == 3
|
Add test for the access method.
|
Add test for the access method.
|
Python
|
apache-2.0
|
ksmaheshkumar/gitfs,PressLabs/gitfs,rowhit/gitfs,PressLabs/gitfs,bussiere/gitfs
|
Add test for the access method.
|
import __builtin__
from pytest import raises
from fuse import FuseOSError
from mock import MagicMock, patch, call
from gitfs.views import PassthroughView
class TestPassthrough(object):
def setup(self):
def mock_super(*args, **kwargs):
if args and issubclass(PassthroughView, args[0]):
return MagicMock()
return original_super(*args, **kwargs)
__builtin__.original_super = super
__builtin__.super = mock_super
self.repo_path = '/the/root/path'
def teardown(self):
__builtin__.super = __builtin__.original_super
del __builtin__.original_super
def test_access(self):
mocked_access = MagicMock()
mocked_access.side_effect = [True, True, False]
with patch('gitfs.views.passthrough.os.access', mocked_access):
view = PassthroughView(repo_path=self.repo_path)
# normal, easy test
view.access('good/relative/path', 777)
# test if _full_path works
view.access('/good/relative/path', 777)
# test if proper exception is raised
with raises(FuseOSError):
view.access('/relative/path', 777)
mocked_access.assert_has_calls([call('/the/root/path/good/relative/path', 777),
call('/the/root/path/good/relative/path', 777),
call('/the/root/path/relative/path', 777)])
assert mocked_access.call_count == 3
|
<commit_before><commit_msg>Add test for the access method.<commit_after>
|
import __builtin__
from pytest import raises
from fuse import FuseOSError
from mock import MagicMock, patch, call
from gitfs.views import PassthroughView
class TestPassthrough(object):
def setup(self):
def mock_super(*args, **kwargs):
if args and issubclass(PassthroughView, args[0]):
return MagicMock()
return original_super(*args, **kwargs)
__builtin__.original_super = super
__builtin__.super = mock_super
self.repo_path = '/the/root/path'
def teardown(self):
__builtin__.super = __builtin__.original_super
del __builtin__.original_super
def test_access(self):
mocked_access = MagicMock()
mocked_access.side_effect = [True, True, False]
with patch('gitfs.views.passthrough.os.access', mocked_access):
view = PassthroughView(repo_path=self.repo_path)
# normal, easy test
view.access('good/relative/path', 777)
# test if _full_path works
view.access('/good/relative/path', 777)
# test if proper exception is raised
with raises(FuseOSError):
view.access('/relative/path', 777)
mocked_access.assert_has_calls([call('/the/root/path/good/relative/path', 777),
call('/the/root/path/good/relative/path', 777),
call('/the/root/path/relative/path', 777)])
assert mocked_access.call_count == 3
|
Add test for the access method.import __builtin__
from pytest import raises
from fuse import FuseOSError
from mock import MagicMock, patch, call
from gitfs.views import PassthroughView
class TestPassthrough(object):
def setup(self):
def mock_super(*args, **kwargs):
if args and issubclass(PassthroughView, args[0]):
return MagicMock()
return original_super(*args, **kwargs)
__builtin__.original_super = super
__builtin__.super = mock_super
self.repo_path = '/the/root/path'
def teardown(self):
__builtin__.super = __builtin__.original_super
del __builtin__.original_super
def test_access(self):
mocked_access = MagicMock()
mocked_access.side_effect = [True, True, False]
with patch('gitfs.views.passthrough.os.access', mocked_access):
view = PassthroughView(repo_path=self.repo_path)
# normal, easy test
view.access('good/relative/path', 777)
# test if _full_path works
view.access('/good/relative/path', 777)
# test if proper exception is raised
with raises(FuseOSError):
view.access('/relative/path', 777)
mocked_access.assert_has_calls([call('/the/root/path/good/relative/path', 777),
call('/the/root/path/good/relative/path', 777),
call('/the/root/path/relative/path', 777)])
assert mocked_access.call_count == 3
|
<commit_before><commit_msg>Add test for the access method.<commit_after>import __builtin__
from pytest import raises
from fuse import FuseOSError
from mock import MagicMock, patch, call
from gitfs.views import PassthroughView
class TestPassthrough(object):
def setup(self):
def mock_super(*args, **kwargs):
if args and issubclass(PassthroughView, args[0]):
return MagicMock()
return original_super(*args, **kwargs)
__builtin__.original_super = super
__builtin__.super = mock_super
self.repo_path = '/the/root/path'
def teardown(self):
__builtin__.super = __builtin__.original_super
del __builtin__.original_super
def test_access(self):
mocked_access = MagicMock()
mocked_access.side_effect = [True, True, False]
with patch('gitfs.views.passthrough.os.access', mocked_access):
view = PassthroughView(repo_path=self.repo_path)
# normal, easy test
view.access('good/relative/path', 777)
# test if _full_path works
view.access('/good/relative/path', 777)
# test if proper exception is raised
with raises(FuseOSError):
view.access('/relative/path', 777)
mocked_access.assert_has_calls([call('/the/root/path/good/relative/path', 777),
call('/the/root/path/good/relative/path', 777),
call('/the/root/path/relative/path', 777)])
assert mocked_access.call_count == 3
|
|
eb1ae63b247b668453b55e0622a5aa6018bb82ab
|
semillas_backend/users/management/commands/anonymize_all_data.py
|
semillas_backend/users/management/commands/anonymize_all_data.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals, print_function
from factory import Faker
# Django imports
from django.conf import settings
from django.core.management.base import BaseCommand
from semillas_backend.users.models import User
class Command(BaseCommand):
help = "This command will create dummy tokens for OAuth based providers of `allauth`"
def handle(self, *args, **kwargs):
faker = Faker('email')
for user in User.objects.filter(is_superuser=False):
user.email = faker.generate('')
user.telegram_id = ''
user.phone = ''
user.save()
|
Add script to anonymize all data
|
Add script to anonymize all data
|
Python
|
mit
|
Semillas/semillas_backend,Semillas/semillas_platform,Semillas/semillas_platform,Semillas/semillas_backend,Semillas/semillas_backend,Semillas/semillas_platform,Semillas/semillas_platform,Semillas/semillas_backend
|
Add script to anonymize all data
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals, print_function
from factory import Faker
# Django imports
from django.conf import settings
from django.core.management.base import BaseCommand
from semillas_backend.users.models import User
class Command(BaseCommand):
help = "This command will create dummy tokens for OAuth based providers of `allauth`"
def handle(self, *args, **kwargs):
faker = Faker('email')
for user in User.objects.filter(is_superuser=False):
user.email = faker.generate('')
user.telegram_id = ''
user.phone = ''
user.save()
|
<commit_before><commit_msg>Add script to anonymize all data<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals, print_function
from factory import Faker
# Django imports
from django.conf import settings
from django.core.management.base import BaseCommand
from semillas_backend.users.models import User
class Command(BaseCommand):
help = "This command will create dummy tokens for OAuth based providers of `allauth`"
def handle(self, *args, **kwargs):
faker = Faker('email')
for user in User.objects.filter(is_superuser=False):
user.email = faker.generate('')
user.telegram_id = ''
user.phone = ''
user.save()
|
Add script to anonymize all data# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals, print_function
from factory import Faker
# Django imports
from django.conf import settings
from django.core.management.base import BaseCommand
from semillas_backend.users.models import User
class Command(BaseCommand):
help = "This command will create dummy tokens for OAuth based providers of `allauth`"
def handle(self, *args, **kwargs):
faker = Faker('email')
for user in User.objects.filter(is_superuser=False):
user.email = faker.generate('')
user.telegram_id = ''
user.phone = ''
user.save()
|
<commit_before><commit_msg>Add script to anonymize all data<commit_after># -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals, print_function
from factory import Faker
# Django imports
from django.conf import settings
from django.core.management.base import BaseCommand
from semillas_backend.users.models import User
class Command(BaseCommand):
help = "This command will create dummy tokens for OAuth based providers of `allauth`"
def handle(self, *args, **kwargs):
faker = Faker('email')
for user in User.objects.filter(is_superuser=False):
user.email = faker.generate('')
user.telegram_id = ''
user.phone = ''
user.save()
|
|
dffa63270d9bcd54fad9beb6847ad1e8aa9f86ba
|
hs_core/management/commands/debug_composite_resource.py
|
hs_core/management/commands/debug_composite_resource.py
|
"""This prints the state of a logical file.
* By default, prints errors on stdout.
* Optional argument --log: logs output to system log.
"""
from django.core.management.base import BaseCommand
from hs_core.models import BaseResource, ResourceFile
def debug_resource(short_id):
""" Debug view for resource depicts output of various integrity checking scripts """
try:
res = BaseResource.objects.get(short_id=short_id)
except BaseResource.DoesNotExist:
print("{} does not exist".format(short_id))
resource = res.get_content_model()
assert resource, (res, res.content_model)
if resource.resource_type == 'CompositeResource':
istorage = resource.get_irods_storage()
resource.create_aggregation_xml_documents()
print("resource {}".format(resource.short_id))
for f in ResourceFile.objects.filter(object_id=resource.id):
if f.has_logical_file and f.logical_file.is_single_file_aggregation:
print(" {} is single file aggregation".format(f.short_path))
class Command(BaseCommand):
help = "Print debugging information about logical files."
def add_arguments(self, parser):
# a list of resource id's: none does nothing.
parser.add_argument('resource_ids', nargs='*', type=str)
# Named (optional) arguments
parser.add_argument(
'--log',
action='store_true', # True for presence, False for absence
dest='log', # value is options['log']
help='log errors to system log',
)
def handle(self, *args, **options):
if len(options['resource_ids']) > 0: # an array of resource short_id to check.
for rid in options['resource_ids']:
debug_resource(rid)
else:
for r in BaseResource.objects.filter(resource_type="CompositeResource"):
debug_resource(r.short_id)
print("No resources to check.")
|
Debug the contents of composite resources after conversion.
|
Debug the contents of composite resources after conversion.
|
Python
|
bsd-3-clause
|
hydroshare/hydroshare,hydroshare/hydroshare,hydroshare/hydroshare,hydroshare/hydroshare,hydroshare/hydroshare
|
Debug the contents of composite resources after conversion.
|
"""This prints the state of a logical file.
* By default, prints errors on stdout.
* Optional argument --log: logs output to system log.
"""
from django.core.management.base import BaseCommand
from hs_core.models import BaseResource, ResourceFile
def debug_resource(short_id):
""" Debug view for resource depicts output of various integrity checking scripts """
try:
res = BaseResource.objects.get(short_id=short_id)
except BaseResource.DoesNotExist:
print("{} does not exist".format(short_id))
resource = res.get_content_model()
assert resource, (res, res.content_model)
if resource.resource_type == 'CompositeResource':
istorage = resource.get_irods_storage()
resource.create_aggregation_xml_documents()
print("resource {}".format(resource.short_id))
for f in ResourceFile.objects.filter(object_id=resource.id):
if f.has_logical_file and f.logical_file.is_single_file_aggregation:
print(" {} is single file aggregation".format(f.short_path))
class Command(BaseCommand):
help = "Print debugging information about logical files."
def add_arguments(self, parser):
# a list of resource id's: none does nothing.
parser.add_argument('resource_ids', nargs='*', type=str)
# Named (optional) arguments
parser.add_argument(
'--log',
action='store_true', # True for presence, False for absence
dest='log', # value is options['log']
help='log errors to system log',
)
def handle(self, *args, **options):
if len(options['resource_ids']) > 0: # an array of resource short_id to check.
for rid in options['resource_ids']:
debug_resource(rid)
else:
for r in BaseResource.objects.filter(resource_type="CompositeResource"):
debug_resource(r.short_id)
print("No resources to check.")
|
<commit_before><commit_msg>Debug the contents of composite resources after conversion.<commit_after>
|
"""This prints the state of a logical file.
* By default, prints errors on stdout.
* Optional argument --log: logs output to system log.
"""
from django.core.management.base import BaseCommand
from hs_core.models import BaseResource, ResourceFile
def debug_resource(short_id):
""" Debug view for resource depicts output of various integrity checking scripts """
try:
res = BaseResource.objects.get(short_id=short_id)
except BaseResource.DoesNotExist:
print("{} does not exist".format(short_id))
resource = res.get_content_model()
assert resource, (res, res.content_model)
if resource.resource_type == 'CompositeResource':
istorage = resource.get_irods_storage()
resource.create_aggregation_xml_documents()
print("resource {}".format(resource.short_id))
for f in ResourceFile.objects.filter(object_id=resource.id):
if f.has_logical_file and f.logical_file.is_single_file_aggregation:
print(" {} is single file aggregation".format(f.short_path))
class Command(BaseCommand):
help = "Print debugging information about logical files."
def add_arguments(self, parser):
# a list of resource id's: none does nothing.
parser.add_argument('resource_ids', nargs='*', type=str)
# Named (optional) arguments
parser.add_argument(
'--log',
action='store_true', # True for presence, False for absence
dest='log', # value is options['log']
help='log errors to system log',
)
def handle(self, *args, **options):
if len(options['resource_ids']) > 0: # an array of resource short_id to check.
for rid in options['resource_ids']:
debug_resource(rid)
else:
for r in BaseResource.objects.filter(resource_type="CompositeResource"):
debug_resource(r.short_id)
print("No resources to check.")
|
Debug the contents of composite resources after conversion."""This prints the state of a logical file.
* By default, prints errors on stdout.
* Optional argument --log: logs output to system log.
"""
from django.core.management.base import BaseCommand
from hs_core.models import BaseResource, ResourceFile
def debug_resource(short_id):
""" Debug view for resource depicts output of various integrity checking scripts """
try:
res = BaseResource.objects.get(short_id=short_id)
except BaseResource.DoesNotExist:
print("{} does not exist".format(short_id))
resource = res.get_content_model()
assert resource, (res, res.content_model)
if resource.resource_type == 'CompositeResource':
istorage = resource.get_irods_storage()
resource.create_aggregation_xml_documents()
print("resource {}".format(resource.short_id))
for f in ResourceFile.objects.filter(object_id=resource.id):
if f.has_logical_file and f.logical_file.is_single_file_aggregation:
print(" {} is single file aggregation".format(f.short_path))
class Command(BaseCommand):
help = "Print debugging information about logical files."
def add_arguments(self, parser):
# a list of resource id's: none does nothing.
parser.add_argument('resource_ids', nargs='*', type=str)
# Named (optional) arguments
parser.add_argument(
'--log',
action='store_true', # True for presence, False for absence
dest='log', # value is options['log']
help='log errors to system log',
)
def handle(self, *args, **options):
if len(options['resource_ids']) > 0: # an array of resource short_id to check.
for rid in options['resource_ids']:
debug_resource(rid)
else:
for r in BaseResource.objects.filter(resource_type="CompositeResource"):
debug_resource(r.short_id)
print("No resources to check.")
|
<commit_before><commit_msg>Debug the contents of composite resources after conversion.<commit_after>"""This prints the state of a logical file.
* By default, prints errors on stdout.
* Optional argument --log: logs output to system log.
"""
from django.core.management.base import BaseCommand
from hs_core.models import BaseResource, ResourceFile
def debug_resource(short_id):
""" Debug view for resource depicts output of various integrity checking scripts """
try:
res = BaseResource.objects.get(short_id=short_id)
except BaseResource.DoesNotExist:
print("{} does not exist".format(short_id))
resource = res.get_content_model()
assert resource, (res, res.content_model)
if resource.resource_type == 'CompositeResource':
istorage = resource.get_irods_storage()
resource.create_aggregation_xml_documents()
print("resource {}".format(resource.short_id))
for f in ResourceFile.objects.filter(object_id=resource.id):
if f.has_logical_file and f.logical_file.is_single_file_aggregation:
print(" {} is single file aggregation".format(f.short_path))
class Command(BaseCommand):
help = "Print debugging information about logical files."
def add_arguments(self, parser):
# a list of resource id's: none does nothing.
parser.add_argument('resource_ids', nargs='*', type=str)
# Named (optional) arguments
parser.add_argument(
'--log',
action='store_true', # True for presence, False for absence
dest='log', # value is options['log']
help='log errors to system log',
)
def handle(self, *args, **options):
if len(options['resource_ids']) > 0: # an array of resource short_id to check.
for rid in options['resource_ids']:
debug_resource(rid)
else:
for r in BaseResource.objects.filter(resource_type="CompositeResource"):
debug_resource(r.short_id)
print("No resources to check.")
|
|
fd1062be8acbb09ca60dd0d87e657d9417d9b4d7
|
scripts/checkInstalledFiles.py
|
scripts/checkInstalledFiles.py
|
#!/usr/bin/env python
import os
import sys
import stat
import difflib
import inspect
import getopt
def referenceFile():
if sys.platform.startswith('linux'):
filename = 'makeinstall.linux'
elif sys.platform.startswith('win'):
filename = 'makeinstall.windows'
elif sys.platform == 'darwin':
filename = 'makeinstall.darwin'
else:
print "Unsupported platform: ", sys.platform
sys.exit(-1)
scriptDir = os.path.dirname(inspect.getfile(inspect.currentframe()))
return scriptDir+'/../tests/reference/'+filename
def readReferenceFile():
# read file with old diff
f = open(referenceFile(), 'r');
filelist = []
for line in f:
filelist.append(line)
f.close()
return filelist
def generateReference(rootdir):
fileDict = {}
for root, subFolders, files in os.walk(rootdir):
for file in (subFolders + files):
f = os.path.join(root,file)
perm = os.stat(f).st_mode & 0777
if os.path.getsize(f) == 0:
print "'%s' is empty!" % f
fileDict[f[len(rootdir)+1:]] = perm
# generate new list
formattedlist = []
for name, perm in sorted(fileDict.iteritems()):
formattedlist.append("%o %s\n"% (perm, name))
return formattedlist;
def usage():
print "Usage: %s [-g | --generate] <dir>" % os.path.basename(sys.argv[0])
def main():
generateMode = False
try:
opts, args = getopt.gnu_getopt(sys.argv[1:], 'hg', ['help', 'generate'])
except:
print str(err)
usage()
sys.exit(2)
for o, a in opts:
if o in ('-h', '--help'):
usage()
sys.exit(0)
if o in ('-g', '--generate'):
generateMode = True
if len(args) != 1:
usage()
sys.exit(2)
rootdir = args[0]
if generateMode:
f = open(referenceFile(), 'w')
for item in generateReference(rootdir):
f.write(item)
f.close()
print "Do not forget to commit", referenceFile()
else:
hasDiff = False
for line in difflib.context_diff(readReferenceFile(), generateReference(rootdir), fromfile=referenceFile(), tofile="generated"):
sys.stdout.write(line)
hasDiff = True
if hasDiff:
sys.exit(1)
if __name__ == "__main__":
main()
|
Add script that can check builds for completeness.
|
Add script that can check builds for completeness.
It checks against os-dependent lists which will be submitted
in follow-up commits.
Change-Id: Ieb40b19dbd85c30b28062b46320a6ee60ba672af
Reviewed-by: Bill King <2ab503764bfba23a6f2e273493fc021dc3d4cd8f@nokia.com>
|
Python
|
lgpl-2.1
|
xianian/qt-creator,farseerri/git_code,xianian/qt-creator,kuba1/qtcreator,kuba1/qtcreator,syntheticpp/qt-creator,richardmg/qtcreator,malikcjm/qtcreator,farseerri/git_code,duythanhphan/qt-creator,xianian/qt-creator,colede/qtcreator,danimo/qt-creator,malikcjm/qtcreator,jonnor/qt-creator,xianian/qt-creator,Distrotech/qtcreator,omniacreator/qtcreator,darksylinc/qt-creator,kuba1/qtcreator,syntheticpp/qt-creator,jonnor/qt-creator,AltarBeastiful/qt-creator,darksylinc/qt-creator,syntheticpp/qt-creator,KDAB/KDAB-Creator,amyvmiwei/qt-creator,danimo/qt-creator,hdweiss/qt-creator-visualizer,syntheticpp/qt-creator,martyone/sailfish-qtcreator,AltarBeastiful/qt-creator,AltarBeastiful/qt-creator,amyvmiwei/qt-creator,colede/qtcreator,maui-packages/qt-creator,azat/qtcreator,duythanhphan/qt-creator,maui-packages/qt-creator,omniacreator/qtcreator,azat/qtcreator,colede/qtcreator,kuba1/qtcreator,maui-packages/qt-creator,KDAB/KDAB-Creator,maui-packages/qt-creator,colede/qtcreator,ostash/qt-creator-i18n-uk,duythanhphan/qt-creator,AltarBeastiful/qt-creator,darksylinc/qt-creator,danimo/qt-creator,richardmg/qtcreator,maui-packages/qt-creator,ostash/qt-creator-i18n-uk,amyvmiwei/qt-creator,omniacreator/qtcreator,richardmg/qtcreator,xianian/qt-creator,KDAB/KDAB-Creator,hdweiss/qt-creator-visualizer,azat/qtcreator,darksylinc/qt-creator,KDE/android-qt-creator,KDE/android-qt-creator,malikcjm/qtcreator,AltarBeastiful/qt-creator,malikcjm/qtcreator,danimo/qt-creator,syntheticpp/qt-creator,colede/qtcreator,hdweiss/qt-creator-visualizer,azat/qtcreator,Distrotech/qtcreator,amyvmiwei/qt-creator,KDE/android-qt-creator,jonnor/qt-creator,farseerri/git_code,KDE/android-qt-creator,ostash/qt-creator-i18n-uk,xianian/qt-creator,duythanhphan/qt-creator,danimo/qt-creator,malikcjm/qtcreator,duythanhphan/qt-creator,darksylinc/qt-creator,AltarBeastiful/qt-creator,KDAB/KDAB-Creator,duythanhphan/qt-creator,martyone/sailfish-qtcreator,kuba1/qtcreator,hdweiss/qt-creator-visualizer,richardmg/qtcreator,danimo/qt-creator,martyone/sailfish-qtcreator,hdweiss/qt-creator-visualizer,amyvmiwei/qt-creator,xianian/qt-creator,KDE/android-qt-creator,KDE/android-qt-creator,KDAB/KDAB-Creator,xianian/qt-creator,amyvmiwei/qt-creator,syntheticpp/qt-creator,Distrotech/qtcreator,jonnor/qt-creator,maui-packages/qt-creator,amyvmiwei/qt-creator,danimo/qt-creator,azat/qtcreator,martyone/sailfish-qtcreator,hdweiss/qt-creator-visualizer,farseerri/git_code,ostash/qt-creator-i18n-uk,martyone/sailfish-qtcreator,ostash/qt-creator-i18n-uk,jonnor/qt-creator,KDE/android-qt-creator,colede/qtcreator,Distrotech/qtcreator,richardmg/qtcreator,darksylinc/qt-creator,amyvmiwei/qt-creator,martyone/sailfish-qtcreator,kuba1/qtcreator,darksylinc/qt-creator,kuba1/qtcreator,omniacreator/qtcreator,duythanhphan/qt-creator,farseerri/git_code,KDAB/KDAB-Creator,malikcjm/qtcreator,omniacreator/qtcreator,ostash/qt-creator-i18n-uk,KDE/android-qt-creator,Distrotech/qtcreator,richardmg/qtcreator,syntheticpp/qt-creator,omniacreator/qtcreator,azat/qtcreator,farseerri/git_code,colede/qtcreator,farseerri/git_code,ostash/qt-creator-i18n-uk,jonnor/qt-creator,omniacreator/qtcreator,AltarBeastiful/qt-creator,xianian/qt-creator,farseerri/git_code,martyone/sailfish-qtcreator,kuba1/qtcreator,Distrotech/qtcreator,Distrotech/qtcreator,martyone/sailfish-qtcreator,kuba1/qtcreator,danimo/qt-creator,AltarBeastiful/qt-creator,malikcjm/qtcreator,martyone/sailfish-qtcreator,danimo/qt-creator,maui-packages/qt-creator,richardmg/qtcreator,darksylinc/qt-creator
|
Add script that can check builds for completeness.
It checks against os-dependent lists which will be submitted
in follow-up commits.
Change-Id: Ieb40b19dbd85c30b28062b46320a6ee60ba672af
Reviewed-by: Bill King <2ab503764bfba23a6f2e273493fc021dc3d4cd8f@nokia.com>
|
#!/usr/bin/env python
import os
import sys
import stat
import difflib
import inspect
import getopt
def referenceFile():
if sys.platform.startswith('linux'):
filename = 'makeinstall.linux'
elif sys.platform.startswith('win'):
filename = 'makeinstall.windows'
elif sys.platform == 'darwin':
filename = 'makeinstall.darwin'
else:
print "Unsupported platform: ", sys.platform
sys.exit(-1)
scriptDir = os.path.dirname(inspect.getfile(inspect.currentframe()))
return scriptDir+'/../tests/reference/'+filename
def readReferenceFile():
# read file with old diff
f = open(referenceFile(), 'r');
filelist = []
for line in f:
filelist.append(line)
f.close()
return filelist
def generateReference(rootdir):
fileDict = {}
for root, subFolders, files in os.walk(rootdir):
for file in (subFolders + files):
f = os.path.join(root,file)
perm = os.stat(f).st_mode & 0777
if os.path.getsize(f) == 0:
print "'%s' is empty!" % f
fileDict[f[len(rootdir)+1:]] = perm
# generate new list
formattedlist = []
for name, perm in sorted(fileDict.iteritems()):
formattedlist.append("%o %s\n"% (perm, name))
return formattedlist;
def usage():
print "Usage: %s [-g | --generate] <dir>" % os.path.basename(sys.argv[0])
def main():
generateMode = False
try:
opts, args = getopt.gnu_getopt(sys.argv[1:], 'hg', ['help', 'generate'])
except:
print str(err)
usage()
sys.exit(2)
for o, a in opts:
if o in ('-h', '--help'):
usage()
sys.exit(0)
if o in ('-g', '--generate'):
generateMode = True
if len(args) != 1:
usage()
sys.exit(2)
rootdir = args[0]
if generateMode:
f = open(referenceFile(), 'w')
for item in generateReference(rootdir):
f.write(item)
f.close()
print "Do not forget to commit", referenceFile()
else:
hasDiff = False
for line in difflib.context_diff(readReferenceFile(), generateReference(rootdir), fromfile=referenceFile(), tofile="generated"):
sys.stdout.write(line)
hasDiff = True
if hasDiff:
sys.exit(1)
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Add script that can check builds for completeness.
It checks against os-dependent lists which will be submitted
in follow-up commits.
Change-Id: Ieb40b19dbd85c30b28062b46320a6ee60ba672af
Reviewed-by: Bill King <2ab503764bfba23a6f2e273493fc021dc3d4cd8f@nokia.com><commit_after>
|
#!/usr/bin/env python
import os
import sys
import stat
import difflib
import inspect
import getopt
def referenceFile():
if sys.platform.startswith('linux'):
filename = 'makeinstall.linux'
elif sys.platform.startswith('win'):
filename = 'makeinstall.windows'
elif sys.platform == 'darwin':
filename = 'makeinstall.darwin'
else:
print "Unsupported platform: ", sys.platform
sys.exit(-1)
scriptDir = os.path.dirname(inspect.getfile(inspect.currentframe()))
return scriptDir+'/../tests/reference/'+filename
def readReferenceFile():
# read file with old diff
f = open(referenceFile(), 'r');
filelist = []
for line in f:
filelist.append(line)
f.close()
return filelist
def generateReference(rootdir):
fileDict = {}
for root, subFolders, files in os.walk(rootdir):
for file in (subFolders + files):
f = os.path.join(root,file)
perm = os.stat(f).st_mode & 0777
if os.path.getsize(f) == 0:
print "'%s' is empty!" % f
fileDict[f[len(rootdir)+1:]] = perm
# generate new list
formattedlist = []
for name, perm in sorted(fileDict.iteritems()):
formattedlist.append("%o %s\n"% (perm, name))
return formattedlist;
def usage():
print "Usage: %s [-g | --generate] <dir>" % os.path.basename(sys.argv[0])
def main():
generateMode = False
try:
opts, args = getopt.gnu_getopt(sys.argv[1:], 'hg', ['help', 'generate'])
except:
print str(err)
usage()
sys.exit(2)
for o, a in opts:
if o in ('-h', '--help'):
usage()
sys.exit(0)
if o in ('-g', '--generate'):
generateMode = True
if len(args) != 1:
usage()
sys.exit(2)
rootdir = args[0]
if generateMode:
f = open(referenceFile(), 'w')
for item in generateReference(rootdir):
f.write(item)
f.close()
print "Do not forget to commit", referenceFile()
else:
hasDiff = False
for line in difflib.context_diff(readReferenceFile(), generateReference(rootdir), fromfile=referenceFile(), tofile="generated"):
sys.stdout.write(line)
hasDiff = True
if hasDiff:
sys.exit(1)
if __name__ == "__main__":
main()
|
Add script that can check builds for completeness.
It checks against os-dependent lists which will be submitted
in follow-up commits.
Change-Id: Ieb40b19dbd85c30b28062b46320a6ee60ba672af
Reviewed-by: Bill King <2ab503764bfba23a6f2e273493fc021dc3d4cd8f@nokia.com>#!/usr/bin/env python
import os
import sys
import stat
import difflib
import inspect
import getopt
def referenceFile():
if sys.platform.startswith('linux'):
filename = 'makeinstall.linux'
elif sys.platform.startswith('win'):
filename = 'makeinstall.windows'
elif sys.platform == 'darwin':
filename = 'makeinstall.darwin'
else:
print "Unsupported platform: ", sys.platform
sys.exit(-1)
scriptDir = os.path.dirname(inspect.getfile(inspect.currentframe()))
return scriptDir+'/../tests/reference/'+filename
def readReferenceFile():
# read file with old diff
f = open(referenceFile(), 'r');
filelist = []
for line in f:
filelist.append(line)
f.close()
return filelist
def generateReference(rootdir):
fileDict = {}
for root, subFolders, files in os.walk(rootdir):
for file in (subFolders + files):
f = os.path.join(root,file)
perm = os.stat(f).st_mode & 0777
if os.path.getsize(f) == 0:
print "'%s' is empty!" % f
fileDict[f[len(rootdir)+1:]] = perm
# generate new list
formattedlist = []
for name, perm in sorted(fileDict.iteritems()):
formattedlist.append("%o %s\n"% (perm, name))
return formattedlist;
def usage():
print "Usage: %s [-g | --generate] <dir>" % os.path.basename(sys.argv[0])
def main():
generateMode = False
try:
opts, args = getopt.gnu_getopt(sys.argv[1:], 'hg', ['help', 'generate'])
except:
print str(err)
usage()
sys.exit(2)
for o, a in opts:
if o in ('-h', '--help'):
usage()
sys.exit(0)
if o in ('-g', '--generate'):
generateMode = True
if len(args) != 1:
usage()
sys.exit(2)
rootdir = args[0]
if generateMode:
f = open(referenceFile(), 'w')
for item in generateReference(rootdir):
f.write(item)
f.close()
print "Do not forget to commit", referenceFile()
else:
hasDiff = False
for line in difflib.context_diff(readReferenceFile(), generateReference(rootdir), fromfile=referenceFile(), tofile="generated"):
sys.stdout.write(line)
hasDiff = True
if hasDiff:
sys.exit(1)
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Add script that can check builds for completeness.
It checks against os-dependent lists which will be submitted
in follow-up commits.
Change-Id: Ieb40b19dbd85c30b28062b46320a6ee60ba672af
Reviewed-by: Bill King <2ab503764bfba23a6f2e273493fc021dc3d4cd8f@nokia.com><commit_after>#!/usr/bin/env python
import os
import sys
import stat
import difflib
import inspect
import getopt
def referenceFile():
if sys.platform.startswith('linux'):
filename = 'makeinstall.linux'
elif sys.platform.startswith('win'):
filename = 'makeinstall.windows'
elif sys.platform == 'darwin':
filename = 'makeinstall.darwin'
else:
print "Unsupported platform: ", sys.platform
sys.exit(-1)
scriptDir = os.path.dirname(inspect.getfile(inspect.currentframe()))
return scriptDir+'/../tests/reference/'+filename
def readReferenceFile():
# read file with old diff
f = open(referenceFile(), 'r');
filelist = []
for line in f:
filelist.append(line)
f.close()
return filelist
def generateReference(rootdir):
fileDict = {}
for root, subFolders, files in os.walk(rootdir):
for file in (subFolders + files):
f = os.path.join(root,file)
perm = os.stat(f).st_mode & 0777
if os.path.getsize(f) == 0:
print "'%s' is empty!" % f
fileDict[f[len(rootdir)+1:]] = perm
# generate new list
formattedlist = []
for name, perm in sorted(fileDict.iteritems()):
formattedlist.append("%o %s\n"% (perm, name))
return formattedlist;
def usage():
print "Usage: %s [-g | --generate] <dir>" % os.path.basename(sys.argv[0])
def main():
generateMode = False
try:
opts, args = getopt.gnu_getopt(sys.argv[1:], 'hg', ['help', 'generate'])
except:
print str(err)
usage()
sys.exit(2)
for o, a in opts:
if o in ('-h', '--help'):
usage()
sys.exit(0)
if o in ('-g', '--generate'):
generateMode = True
if len(args) != 1:
usage()
sys.exit(2)
rootdir = args[0]
if generateMode:
f = open(referenceFile(), 'w')
for item in generateReference(rootdir):
f.write(item)
f.close()
print "Do not forget to commit", referenceFile()
else:
hasDiff = False
for line in difflib.context_diff(readReferenceFile(), generateReference(rootdir), fromfile=referenceFile(), tofile="generated"):
sys.stdout.write(line)
hasDiff = True
if hasDiff:
sys.exit(1)
if __name__ == "__main__":
main()
|
|
52191143671d2d9311f978c4f3ba043807b918e8
|
singularity_frobenius.py
|
singularity_frobenius.py
|
# -*- coding: utf-8 -*-
"""
Created on Thu Aug 21 13:23:31 2014
@author: Jens von der Linden
Implments Frobneius expansion around a singularity to determine the "small"
solution and check the Suydam condition.
"""
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from future.builtins import (ascii, bytes, chr, dict, filter, hex, input,
int, map, next, oct, open, pow, range, round,
str, super, zip)
"""Python 3.x compatibility"""
import numpy as np
def alpha_func(r, b_z, b_z_prime, b_theta, b_theta_prime):
r"""
Return alpha for Frobenius solution.
"""
mu = b_theta/(r*b_z)
mu_prime = (r*b_z*b_theta_prime - b_theta*(b_z + r*b_z_prime)) / (r*b_z)**2
return r*b_theta**2*b_z**2/(b_theta**2 + b_z**2)*(mu_prime / mu)**2
def beta_func(b_z, b_theta, p_prime):
r"""
Return beta for Frobenius solution.
"""
return 2*b_theta/(b_theta + b_z)**2 * p_prime
def nu_1_2(alpha, beta):
r"""
Return exponents of Frobenius solution.
"""
nu_1 = 0.5 + 0.5*np.sqrt(1. + 4.*beta/alpha)
nu_2 = 0.5 - 0.5*np.sqrt(1. + 4.*beta/alpha)
return nu_1, nu_2
def suydam_stable(alpha, beta):
r"""
Return Ture or False for suydam_stability.
"""
return alpha() + 4.*beta() > 0.
def small_solution(r, r_sing, nu_1, nu_2):
r"""
Returns xi and xi_der of the small solution close to a singularity.
"""
if nu_1 > nu_2:
return ((r-r_sing)**nu_2, nu_2*(r-r_sing)**(nu_2 - 1.))
else:
return ((r-r_sing)**nu_1, nu_1*(r-r_sing)**(nu_1 - 1.))
|
Add Frobenius solution methods for singular points.
|
Add Frobenius solution methods for singular points.
|
Python
|
mit
|
jensv/fluxtubestability,jensv/fluxtubestability
|
Add Frobenius solution methods for singular points.
|
# -*- coding: utf-8 -*-
"""
Created on Thu Aug 21 13:23:31 2014
@author: Jens von der Linden
Implments Frobneius expansion around a singularity to determine the "small"
solution and check the Suydam condition.
"""
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from future.builtins import (ascii, bytes, chr, dict, filter, hex, input,
int, map, next, oct, open, pow, range, round,
str, super, zip)
"""Python 3.x compatibility"""
import numpy as np
def alpha_func(r, b_z, b_z_prime, b_theta, b_theta_prime):
r"""
Return alpha for Frobenius solution.
"""
mu = b_theta/(r*b_z)
mu_prime = (r*b_z*b_theta_prime - b_theta*(b_z + r*b_z_prime)) / (r*b_z)**2
return r*b_theta**2*b_z**2/(b_theta**2 + b_z**2)*(mu_prime / mu)**2
def beta_func(b_z, b_theta, p_prime):
r"""
Return beta for Frobenius solution.
"""
return 2*b_theta/(b_theta + b_z)**2 * p_prime
def nu_1_2(alpha, beta):
r"""
Return exponents of Frobenius solution.
"""
nu_1 = 0.5 + 0.5*np.sqrt(1. + 4.*beta/alpha)
nu_2 = 0.5 - 0.5*np.sqrt(1. + 4.*beta/alpha)
return nu_1, nu_2
def suydam_stable(alpha, beta):
r"""
Return Ture or False for suydam_stability.
"""
return alpha() + 4.*beta() > 0.
def small_solution(r, r_sing, nu_1, nu_2):
r"""
Returns xi and xi_der of the small solution close to a singularity.
"""
if nu_1 > nu_2:
return ((r-r_sing)**nu_2, nu_2*(r-r_sing)**(nu_2 - 1.))
else:
return ((r-r_sing)**nu_1, nu_1*(r-r_sing)**(nu_1 - 1.))
|
<commit_before><commit_msg>Add Frobenius solution methods for singular points.<commit_after>
|
# -*- coding: utf-8 -*-
"""
Created on Thu Aug 21 13:23:31 2014
@author: Jens von der Linden
Implments Frobneius expansion around a singularity to determine the "small"
solution and check the Suydam condition.
"""
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from future.builtins import (ascii, bytes, chr, dict, filter, hex, input,
int, map, next, oct, open, pow, range, round,
str, super, zip)
"""Python 3.x compatibility"""
import numpy as np
def alpha_func(r, b_z, b_z_prime, b_theta, b_theta_prime):
r"""
Return alpha for Frobenius solution.
"""
mu = b_theta/(r*b_z)
mu_prime = (r*b_z*b_theta_prime - b_theta*(b_z + r*b_z_prime)) / (r*b_z)**2
return r*b_theta**2*b_z**2/(b_theta**2 + b_z**2)*(mu_prime / mu)**2
def beta_func(b_z, b_theta, p_prime):
r"""
Return beta for Frobenius solution.
"""
return 2*b_theta/(b_theta + b_z)**2 * p_prime
def nu_1_2(alpha, beta):
r"""
Return exponents of Frobenius solution.
"""
nu_1 = 0.5 + 0.5*np.sqrt(1. + 4.*beta/alpha)
nu_2 = 0.5 - 0.5*np.sqrt(1. + 4.*beta/alpha)
return nu_1, nu_2
def suydam_stable(alpha, beta):
r"""
Return Ture or False for suydam_stability.
"""
return alpha() + 4.*beta() > 0.
def small_solution(r, r_sing, nu_1, nu_2):
r"""
Returns xi and xi_der of the small solution close to a singularity.
"""
if nu_1 > nu_2:
return ((r-r_sing)**nu_2, nu_2*(r-r_sing)**(nu_2 - 1.))
else:
return ((r-r_sing)**nu_1, nu_1*(r-r_sing)**(nu_1 - 1.))
|
Add Frobenius solution methods for singular points.# -*- coding: utf-8 -*-
"""
Created on Thu Aug 21 13:23:31 2014
@author: Jens von der Linden
Implments Frobneius expansion around a singularity to determine the "small"
solution and check the Suydam condition.
"""
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from future.builtins import (ascii, bytes, chr, dict, filter, hex, input,
int, map, next, oct, open, pow, range, round,
str, super, zip)
"""Python 3.x compatibility"""
import numpy as np
def alpha_func(r, b_z, b_z_prime, b_theta, b_theta_prime):
r"""
Return alpha for Frobenius solution.
"""
mu = b_theta/(r*b_z)
mu_prime = (r*b_z*b_theta_prime - b_theta*(b_z + r*b_z_prime)) / (r*b_z)**2
return r*b_theta**2*b_z**2/(b_theta**2 + b_z**2)*(mu_prime / mu)**2
def beta_func(b_z, b_theta, p_prime):
r"""
Return beta for Frobenius solution.
"""
return 2*b_theta/(b_theta + b_z)**2 * p_prime
def nu_1_2(alpha, beta):
r"""
Return exponents of Frobenius solution.
"""
nu_1 = 0.5 + 0.5*np.sqrt(1. + 4.*beta/alpha)
nu_2 = 0.5 - 0.5*np.sqrt(1. + 4.*beta/alpha)
return nu_1, nu_2
def suydam_stable(alpha, beta):
r"""
Return Ture or False for suydam_stability.
"""
return alpha() + 4.*beta() > 0.
def small_solution(r, r_sing, nu_1, nu_2):
r"""
Returns xi and xi_der of the small solution close to a singularity.
"""
if nu_1 > nu_2:
return ((r-r_sing)**nu_2, nu_2*(r-r_sing)**(nu_2 - 1.))
else:
return ((r-r_sing)**nu_1, nu_1*(r-r_sing)**(nu_1 - 1.))
|
<commit_before><commit_msg>Add Frobenius solution methods for singular points.<commit_after># -*- coding: utf-8 -*-
"""
Created on Thu Aug 21 13:23:31 2014
@author: Jens von der Linden
Implments Frobneius expansion around a singularity to determine the "small"
solution and check the Suydam condition.
"""
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from future.builtins import (ascii, bytes, chr, dict, filter, hex, input,
int, map, next, oct, open, pow, range, round,
str, super, zip)
"""Python 3.x compatibility"""
import numpy as np
def alpha_func(r, b_z, b_z_prime, b_theta, b_theta_prime):
r"""
Return alpha for Frobenius solution.
"""
mu = b_theta/(r*b_z)
mu_prime = (r*b_z*b_theta_prime - b_theta*(b_z + r*b_z_prime)) / (r*b_z)**2
return r*b_theta**2*b_z**2/(b_theta**2 + b_z**2)*(mu_prime / mu)**2
def beta_func(b_z, b_theta, p_prime):
r"""
Return beta for Frobenius solution.
"""
return 2*b_theta/(b_theta + b_z)**2 * p_prime
def nu_1_2(alpha, beta):
r"""
Return exponents of Frobenius solution.
"""
nu_1 = 0.5 + 0.5*np.sqrt(1. + 4.*beta/alpha)
nu_2 = 0.5 - 0.5*np.sqrt(1. + 4.*beta/alpha)
return nu_1, nu_2
def suydam_stable(alpha, beta):
r"""
Return Ture or False for suydam_stability.
"""
return alpha() + 4.*beta() > 0.
def small_solution(r, r_sing, nu_1, nu_2):
r"""
Returns xi and xi_der of the small solution close to a singularity.
"""
if nu_1 > nu_2:
return ((r-r_sing)**nu_2, nu_2*(r-r_sing)**(nu_2 - 1.))
else:
return ((r-r_sing)**nu_1, nu_1*(r-r_sing)**(nu_1 - 1.))
|
|
556325cc8cb1032194c2d3739f303fe0a4cfa1a4
|
undercloud_heat_plugins/config.py
|
undercloud_heat_plugins/config.py
|
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from heat.engine.resources.openstack.heat import software_deployment
from heat.engine.resources.openstack.heat import structured_config
class SoftwareDeployment(software_deployment.SoftwareDeployment):
"""A custom subclass to allow reverting replacement."""
class StructuredDeployment(structured_config.StructuredDeployment):
"""A custom subclass to allow reverting replacement."""
def resource_mapping():
return {
'OS::TripleO::Heat::SoftwareDeployment': SoftwareDeployment,
'OS::TripleO::Heat::StructuredDeployment': StructuredDeployment
}
|
Add custom subclass to revert mapping
|
Add custom subclass to revert mapping
To be able to revert the custom mappings down by config-download, let's
add some tripleo specific subclasses which will be available in the
registry.
Change-Id: I6bd4107e8e1a6a9abc38d2dca7a91a6823f8b6c2
Related-Bug: #1758065
|
Python
|
apache-2.0
|
openstack/tripleo-common,openstack/tripleo-common
|
Add custom subclass to revert mapping
To be able to revert the custom mappings down by config-download, let's
add some tripleo specific subclasses which will be available in the
registry.
Change-Id: I6bd4107e8e1a6a9abc38d2dca7a91a6823f8b6c2
Related-Bug: #1758065
|
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from heat.engine.resources.openstack.heat import software_deployment
from heat.engine.resources.openstack.heat import structured_config
class SoftwareDeployment(software_deployment.SoftwareDeployment):
"""A custom subclass to allow reverting replacement."""
class StructuredDeployment(structured_config.StructuredDeployment):
"""A custom subclass to allow reverting replacement."""
def resource_mapping():
return {
'OS::TripleO::Heat::SoftwareDeployment': SoftwareDeployment,
'OS::TripleO::Heat::StructuredDeployment': StructuredDeployment
}
|
<commit_before><commit_msg>Add custom subclass to revert mapping
To be able to revert the custom mappings down by config-download, let's
add some tripleo specific subclasses which will be available in the
registry.
Change-Id: I6bd4107e8e1a6a9abc38d2dca7a91a6823f8b6c2
Related-Bug: #1758065<commit_after>
|
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from heat.engine.resources.openstack.heat import software_deployment
from heat.engine.resources.openstack.heat import structured_config
class SoftwareDeployment(software_deployment.SoftwareDeployment):
"""A custom subclass to allow reverting replacement."""
class StructuredDeployment(structured_config.StructuredDeployment):
"""A custom subclass to allow reverting replacement."""
def resource_mapping():
return {
'OS::TripleO::Heat::SoftwareDeployment': SoftwareDeployment,
'OS::TripleO::Heat::StructuredDeployment': StructuredDeployment
}
|
Add custom subclass to revert mapping
To be able to revert the custom mappings down by config-download, let's
add some tripleo specific subclasses which will be available in the
registry.
Change-Id: I6bd4107e8e1a6a9abc38d2dca7a91a6823f8b6c2
Related-Bug: #1758065#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from heat.engine.resources.openstack.heat import software_deployment
from heat.engine.resources.openstack.heat import structured_config
class SoftwareDeployment(software_deployment.SoftwareDeployment):
"""A custom subclass to allow reverting replacement."""
class StructuredDeployment(structured_config.StructuredDeployment):
"""A custom subclass to allow reverting replacement."""
def resource_mapping():
return {
'OS::TripleO::Heat::SoftwareDeployment': SoftwareDeployment,
'OS::TripleO::Heat::StructuredDeployment': StructuredDeployment
}
|
<commit_before><commit_msg>Add custom subclass to revert mapping
To be able to revert the custom mappings down by config-download, let's
add some tripleo specific subclasses which will be available in the
registry.
Change-Id: I6bd4107e8e1a6a9abc38d2dca7a91a6823f8b6c2
Related-Bug: #1758065<commit_after>#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from heat.engine.resources.openstack.heat import software_deployment
from heat.engine.resources.openstack.heat import structured_config
class SoftwareDeployment(software_deployment.SoftwareDeployment):
"""A custom subclass to allow reverting replacement."""
class StructuredDeployment(structured_config.StructuredDeployment):
"""A custom subclass to allow reverting replacement."""
def resource_mapping():
return {
'OS::TripleO::Heat::SoftwareDeployment': SoftwareDeployment,
'OS::TripleO::Heat::StructuredDeployment': StructuredDeployment
}
|
|
101738323720d7f23b714d0a0b8f40c2926f9645
|
midterm/problem8.py
|
midterm/problem8.py
|
# Problem 8
# 20.0 points possible (graded)
# Implement a function that meets the specifications below.
# For example, the following functions, f, g, and test code:
# def f(i):
# return i + 2
# def g(i):
# return i > 5
# L = [0, -10, 5, 6, -4]
# print(applyF_filterG(L, f, g))
# print(L)
# Should print:
# 6
# [5, 6]
def f(i):
return i + 2
def g(i):
return i > 5
def applyF_filterG(L, f, g):
"""
Assumes L is a list of integers
Assume functions f and g are defined for you.
f takes in an integer, applies a function, returns another integer
g takes in an integer, applies a Boolean function, returns either True or False
Mutates L such that, for each element i originally in L, L contains i if g(f(i)) returns True, and no other elements
Returns the largest element in the mutated L or -1 if the list is empty
"""
l = L[:]
for i in l:
if g(f(i)) is False:
L.remove(i)
if len(L) == 0:
return -1
else:
return max(L)
L = [0, -10, 5, 6, -4]
print(applyF_filterG(L, f, g))
print(L)
|
Implement applyF_filterG function (1 test case missing)
|
Implement applyF_filterG function (1 test case missing)
|
Python
|
mit
|
Kunal57/MIT_6.00.1x
|
Implement applyF_filterG function (1 test case missing)
|
# Problem 8
# 20.0 points possible (graded)
# Implement a function that meets the specifications below.
# For example, the following functions, f, g, and test code:
# def f(i):
# return i + 2
# def g(i):
# return i > 5
# L = [0, -10, 5, 6, -4]
# print(applyF_filterG(L, f, g))
# print(L)
# Should print:
# 6
# [5, 6]
def f(i):
return i + 2
def g(i):
return i > 5
def applyF_filterG(L, f, g):
"""
Assumes L is a list of integers
Assume functions f and g are defined for you.
f takes in an integer, applies a function, returns another integer
g takes in an integer, applies a Boolean function, returns either True or False
Mutates L such that, for each element i originally in L, L contains i if g(f(i)) returns True, and no other elements
Returns the largest element in the mutated L or -1 if the list is empty
"""
l = L[:]
for i in l:
if g(f(i)) is False:
L.remove(i)
if len(L) == 0:
return -1
else:
return max(L)
L = [0, -10, 5, 6, -4]
print(applyF_filterG(L, f, g))
print(L)
|
<commit_before><commit_msg>Implement applyF_filterG function (1 test case missing)<commit_after>
|
# Problem 8
# 20.0 points possible (graded)
# Implement a function that meets the specifications below.
# For example, the following functions, f, g, and test code:
# def f(i):
# return i + 2
# def g(i):
# return i > 5
# L = [0, -10, 5, 6, -4]
# print(applyF_filterG(L, f, g))
# print(L)
# Should print:
# 6
# [5, 6]
def f(i):
return i + 2
def g(i):
return i > 5
def applyF_filterG(L, f, g):
"""
Assumes L is a list of integers
Assume functions f and g are defined for you.
f takes in an integer, applies a function, returns another integer
g takes in an integer, applies a Boolean function, returns either True or False
Mutates L such that, for each element i originally in L, L contains i if g(f(i)) returns True, and no other elements
Returns the largest element in the mutated L or -1 if the list is empty
"""
l = L[:]
for i in l:
if g(f(i)) is False:
L.remove(i)
if len(L) == 0:
return -1
else:
return max(L)
L = [0, -10, 5, 6, -4]
print(applyF_filterG(L, f, g))
print(L)
|
Implement applyF_filterG function (1 test case missing)# Problem 8
# 20.0 points possible (graded)
# Implement a function that meets the specifications below.
# For example, the following functions, f, g, and test code:
# def f(i):
# return i + 2
# def g(i):
# return i > 5
# L = [0, -10, 5, 6, -4]
# print(applyF_filterG(L, f, g))
# print(L)
# Should print:
# 6
# [5, 6]
def f(i):
return i + 2
def g(i):
return i > 5
def applyF_filterG(L, f, g):
"""
Assumes L is a list of integers
Assume functions f and g are defined for you.
f takes in an integer, applies a function, returns another integer
g takes in an integer, applies a Boolean function, returns either True or False
Mutates L such that, for each element i originally in L, L contains i if g(f(i)) returns True, and no other elements
Returns the largest element in the mutated L or -1 if the list is empty
"""
l = L[:]
for i in l:
if g(f(i)) is False:
L.remove(i)
if len(L) == 0:
return -1
else:
return max(L)
L = [0, -10, 5, 6, -4]
print(applyF_filterG(L, f, g))
print(L)
|
<commit_before><commit_msg>Implement applyF_filterG function (1 test case missing)<commit_after># Problem 8
# 20.0 points possible (graded)
# Implement a function that meets the specifications below.
# For example, the following functions, f, g, and test code:
# def f(i):
# return i + 2
# def g(i):
# return i > 5
# L = [0, -10, 5, 6, -4]
# print(applyF_filterG(L, f, g))
# print(L)
# Should print:
# 6
# [5, 6]
def f(i):
return i + 2
def g(i):
return i > 5
def applyF_filterG(L, f, g):
"""
Assumes L is a list of integers
Assume functions f and g are defined for you.
f takes in an integer, applies a function, returns another integer
g takes in an integer, applies a Boolean function, returns either True or False
Mutates L such that, for each element i originally in L, L contains i if g(f(i)) returns True, and no other elements
Returns the largest element in the mutated L or -1 if the list is empty
"""
l = L[:]
for i in l:
if g(f(i)) is False:
L.remove(i)
if len(L) == 0:
return -1
else:
return max(L)
L = [0, -10, 5, 6, -4]
print(applyF_filterG(L, f, g))
print(L)
|
|
6a42d70a9f74478ed9d650d5b96a385ea84213b7
|
nltk/test/unit/test_chunk.py
|
nltk/test/unit/test_chunk.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import unittest
from nltk import RegexpParser
class TestChunkRule(unittest.TestCase):
def test_tag_pattern2re_pattern_quantifier(self):
"""Test for bug https://github.com/nltk/nltk/issues/1597
Ensures that curly bracket quantifiers can be used inside a chunk rule.
This type of quantifier has been used for the supplementary example
in http://www.nltk.org/book/ch07.html#exploring-text-corpora.
"""
sent = [('The', 'AT'), ('September-October', 'NP'), ('term', 'NN'), ('jury', 'NN'), ('had', 'HVD'), ('been', 'BEN'), ('charged', 'VBN'), ('by', 'IN'), ('Fulton', 'NP-TL'), ('Superior', 'JJ-TL'), ('Court', 'NN-TL'), ('Judge', 'NN-TL'), ('Durwood', 'NP'), ('Pye', 'NP'), ('to', 'TO'), ('investigate', 'VB'), ('reports', 'NNS'), ('of', 'IN'), ('possible', 'JJ'), ('``', '``'), ('irregularities', 'NNS'), ("''", "''"), ('in', 'IN'), ('the', 'AT'), ('hard-fought', 'JJ'), ('primary', 'NN'), ('which', 'WDT'), ('was', 'BEDZ'), ('won', 'VBN'), ('by', 'IN'), ('Mayor-nominate', 'NN-TL'), ('Ivan', 'NP'), ('Allen', 'NP'), ('Jr.', 'NP'), ('.', '.')] # source: brown corpus
cp = RegexpParser('CHUNK: {<N.*>{4,}}')
tree = cp.parse(sent)
assert tree.pformat() == """(S
The/AT
September-October/NP
term/NN
jury/NN
had/HVD
been/BEN
charged/VBN
by/IN
Fulton/NP-TL
Superior/JJ-TL
(CHUNK Court/NN-TL Judge/NN-TL Durwood/NP Pye/NP)
to/TO
investigate/VB
reports/NNS
of/IN
possible/JJ
``/``
irregularities/NNS
''/''
in/IN
the/AT
hard-fought/JJ
primary/NN
which/WDT
was/BEDZ
won/VBN
by/IN
(CHUNK Mayor-nominate/NN-TL Ivan/NP Allen/NP Jr./NP)
./.)"""
|
Fix 1597. Allow for curly bracket quantifiers in nltk.chunk.regexp.CHUNK_TAG_PATTERN.
|
Fix 1597. Allow for curly bracket quantifiers in nltk.chunk.regexp.CHUNK_TAG_PATTERN.
|
Python
|
apache-2.0
|
nltk/nltk,nltk/nltk,nltk/nltk
|
Fix 1597. Allow for curly bracket quantifiers in nltk.chunk.regexp.CHUNK_TAG_PATTERN.
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import unittest
from nltk import RegexpParser
class TestChunkRule(unittest.TestCase):
def test_tag_pattern2re_pattern_quantifier(self):
"""Test for bug https://github.com/nltk/nltk/issues/1597
Ensures that curly bracket quantifiers can be used inside a chunk rule.
This type of quantifier has been used for the supplementary example
in http://www.nltk.org/book/ch07.html#exploring-text-corpora.
"""
sent = [('The', 'AT'), ('September-October', 'NP'), ('term', 'NN'), ('jury', 'NN'), ('had', 'HVD'), ('been', 'BEN'), ('charged', 'VBN'), ('by', 'IN'), ('Fulton', 'NP-TL'), ('Superior', 'JJ-TL'), ('Court', 'NN-TL'), ('Judge', 'NN-TL'), ('Durwood', 'NP'), ('Pye', 'NP'), ('to', 'TO'), ('investigate', 'VB'), ('reports', 'NNS'), ('of', 'IN'), ('possible', 'JJ'), ('``', '``'), ('irregularities', 'NNS'), ("''", "''"), ('in', 'IN'), ('the', 'AT'), ('hard-fought', 'JJ'), ('primary', 'NN'), ('which', 'WDT'), ('was', 'BEDZ'), ('won', 'VBN'), ('by', 'IN'), ('Mayor-nominate', 'NN-TL'), ('Ivan', 'NP'), ('Allen', 'NP'), ('Jr.', 'NP'), ('.', '.')] # source: brown corpus
cp = RegexpParser('CHUNK: {<N.*>{4,}}')
tree = cp.parse(sent)
assert tree.pformat() == """(S
The/AT
September-October/NP
term/NN
jury/NN
had/HVD
been/BEN
charged/VBN
by/IN
Fulton/NP-TL
Superior/JJ-TL
(CHUNK Court/NN-TL Judge/NN-TL Durwood/NP Pye/NP)
to/TO
investigate/VB
reports/NNS
of/IN
possible/JJ
``/``
irregularities/NNS
''/''
in/IN
the/AT
hard-fought/JJ
primary/NN
which/WDT
was/BEDZ
won/VBN
by/IN
(CHUNK Mayor-nominate/NN-TL Ivan/NP Allen/NP Jr./NP)
./.)"""
|
<commit_before><commit_msg>Fix 1597. Allow for curly bracket quantifiers in nltk.chunk.regexp.CHUNK_TAG_PATTERN.<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import unittest
from nltk import RegexpParser
class TestChunkRule(unittest.TestCase):
def test_tag_pattern2re_pattern_quantifier(self):
"""Test for bug https://github.com/nltk/nltk/issues/1597
Ensures that curly bracket quantifiers can be used inside a chunk rule.
This type of quantifier has been used for the supplementary example
in http://www.nltk.org/book/ch07.html#exploring-text-corpora.
"""
sent = [('The', 'AT'), ('September-October', 'NP'), ('term', 'NN'), ('jury', 'NN'), ('had', 'HVD'), ('been', 'BEN'), ('charged', 'VBN'), ('by', 'IN'), ('Fulton', 'NP-TL'), ('Superior', 'JJ-TL'), ('Court', 'NN-TL'), ('Judge', 'NN-TL'), ('Durwood', 'NP'), ('Pye', 'NP'), ('to', 'TO'), ('investigate', 'VB'), ('reports', 'NNS'), ('of', 'IN'), ('possible', 'JJ'), ('``', '``'), ('irregularities', 'NNS'), ("''", "''"), ('in', 'IN'), ('the', 'AT'), ('hard-fought', 'JJ'), ('primary', 'NN'), ('which', 'WDT'), ('was', 'BEDZ'), ('won', 'VBN'), ('by', 'IN'), ('Mayor-nominate', 'NN-TL'), ('Ivan', 'NP'), ('Allen', 'NP'), ('Jr.', 'NP'), ('.', '.')] # source: brown corpus
cp = RegexpParser('CHUNK: {<N.*>{4,}}')
tree = cp.parse(sent)
assert tree.pformat() == """(S
The/AT
September-October/NP
term/NN
jury/NN
had/HVD
been/BEN
charged/VBN
by/IN
Fulton/NP-TL
Superior/JJ-TL
(CHUNK Court/NN-TL Judge/NN-TL Durwood/NP Pye/NP)
to/TO
investigate/VB
reports/NNS
of/IN
possible/JJ
``/``
irregularities/NNS
''/''
in/IN
the/AT
hard-fought/JJ
primary/NN
which/WDT
was/BEDZ
won/VBN
by/IN
(CHUNK Mayor-nominate/NN-TL Ivan/NP Allen/NP Jr./NP)
./.)"""
|
Fix 1597. Allow for curly bracket quantifiers in nltk.chunk.regexp.CHUNK_TAG_PATTERN.# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import unittest
from nltk import RegexpParser
class TestChunkRule(unittest.TestCase):
def test_tag_pattern2re_pattern_quantifier(self):
"""Test for bug https://github.com/nltk/nltk/issues/1597
Ensures that curly bracket quantifiers can be used inside a chunk rule.
This type of quantifier has been used for the supplementary example
in http://www.nltk.org/book/ch07.html#exploring-text-corpora.
"""
sent = [('The', 'AT'), ('September-October', 'NP'), ('term', 'NN'), ('jury', 'NN'), ('had', 'HVD'), ('been', 'BEN'), ('charged', 'VBN'), ('by', 'IN'), ('Fulton', 'NP-TL'), ('Superior', 'JJ-TL'), ('Court', 'NN-TL'), ('Judge', 'NN-TL'), ('Durwood', 'NP'), ('Pye', 'NP'), ('to', 'TO'), ('investigate', 'VB'), ('reports', 'NNS'), ('of', 'IN'), ('possible', 'JJ'), ('``', '``'), ('irregularities', 'NNS'), ("''", "''"), ('in', 'IN'), ('the', 'AT'), ('hard-fought', 'JJ'), ('primary', 'NN'), ('which', 'WDT'), ('was', 'BEDZ'), ('won', 'VBN'), ('by', 'IN'), ('Mayor-nominate', 'NN-TL'), ('Ivan', 'NP'), ('Allen', 'NP'), ('Jr.', 'NP'), ('.', '.')] # source: brown corpus
cp = RegexpParser('CHUNK: {<N.*>{4,}}')
tree = cp.parse(sent)
assert tree.pformat() == """(S
The/AT
September-October/NP
term/NN
jury/NN
had/HVD
been/BEN
charged/VBN
by/IN
Fulton/NP-TL
Superior/JJ-TL
(CHUNK Court/NN-TL Judge/NN-TL Durwood/NP Pye/NP)
to/TO
investigate/VB
reports/NNS
of/IN
possible/JJ
``/``
irregularities/NNS
''/''
in/IN
the/AT
hard-fought/JJ
primary/NN
which/WDT
was/BEDZ
won/VBN
by/IN
(CHUNK Mayor-nominate/NN-TL Ivan/NP Allen/NP Jr./NP)
./.)"""
|
<commit_before><commit_msg>Fix 1597. Allow for curly bracket quantifiers in nltk.chunk.regexp.CHUNK_TAG_PATTERN.<commit_after># -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import unittest
from nltk import RegexpParser
class TestChunkRule(unittest.TestCase):
def test_tag_pattern2re_pattern_quantifier(self):
"""Test for bug https://github.com/nltk/nltk/issues/1597
Ensures that curly bracket quantifiers can be used inside a chunk rule.
This type of quantifier has been used for the supplementary example
in http://www.nltk.org/book/ch07.html#exploring-text-corpora.
"""
sent = [('The', 'AT'), ('September-October', 'NP'), ('term', 'NN'), ('jury', 'NN'), ('had', 'HVD'), ('been', 'BEN'), ('charged', 'VBN'), ('by', 'IN'), ('Fulton', 'NP-TL'), ('Superior', 'JJ-TL'), ('Court', 'NN-TL'), ('Judge', 'NN-TL'), ('Durwood', 'NP'), ('Pye', 'NP'), ('to', 'TO'), ('investigate', 'VB'), ('reports', 'NNS'), ('of', 'IN'), ('possible', 'JJ'), ('``', '``'), ('irregularities', 'NNS'), ("''", "''"), ('in', 'IN'), ('the', 'AT'), ('hard-fought', 'JJ'), ('primary', 'NN'), ('which', 'WDT'), ('was', 'BEDZ'), ('won', 'VBN'), ('by', 'IN'), ('Mayor-nominate', 'NN-TL'), ('Ivan', 'NP'), ('Allen', 'NP'), ('Jr.', 'NP'), ('.', '.')] # source: brown corpus
cp = RegexpParser('CHUNK: {<N.*>{4,}}')
tree = cp.parse(sent)
assert tree.pformat() == """(S
The/AT
September-October/NP
term/NN
jury/NN
had/HVD
been/BEN
charged/VBN
by/IN
Fulton/NP-TL
Superior/JJ-TL
(CHUNK Court/NN-TL Judge/NN-TL Durwood/NP Pye/NP)
to/TO
investigate/VB
reports/NNS
of/IN
possible/JJ
``/``
irregularities/NNS
''/''
in/IN
the/AT
hard-fought/JJ
primary/NN
which/WDT
was/BEDZ
won/VBN
by/IN
(CHUNK Mayor-nominate/NN-TL Ivan/NP Allen/NP Jr./NP)
./.)"""
|
|
417b95a9c95146be49feb3322d7e5f2481ea40dc
|
pambox/tests/test_experiment.py
|
pambox/tests/test_experiment.py
|
# -*- coding: utf-8 -*-
from __future__ import division, print_function
import os.path
import numpy as np
from numpy.testing import assert_allclose
import pytest
from pambox.speech import Experiment
__DATA_ROOT__ = os.path.join(os.path.dirname(__file__), 'data')
class TestExperiment(object):
@pytest.mark.parametrize("fixed_target, target, masker, snr, exp_target, "
"exp_masker", (
(True, [0, 1], [0, 1], 0, [0, 2514.86685937], [0, 2514.86685937]),
(True, [0, 1], [0, 1], 5, [0, 2514.86685937], [0, 1414.21356237]),
(False, [0, 1], [0, 1], 0, [0, 2514.86685937], [0, 2514.86685937]),
(False, [0, 1], [0, 1], 5, [0, 4472.135955], [0, 2514.86685937]),
))
def test_adjust_levels(self, fixed_target,
target, masker, snr,
exp_target, exp_masker):
exp = Experiment([], [], [], fixed_target=fixed_target, fixed_level=65)
target, masker = exp.adjust_levels(target, masker, snr)
assert_allclose(target, exp_target, atol=1e-6)
assert_allclose(masker, exp_masker, atol=1e-6)
def distort_passthrough(target, masker, *args, **kwargs):
return target, masker
def test_preprocessing(self):
params = {}
target = np.asarray([0, 1])
masker = np.asarray([0, 1])
snr = 0
exp_target = [0, 1]
exp_mix = [0, 2]
exp_masker = [0, 1]
exp = Experiment([], [], [], distortion=self.distort_passthrough,
dist_params=params, adjust_levels_bef_proc=True,
fixed_level=-3.0102999566398125)
target, mix, masker = exp.preprocessing(target, masker, snr, params)
assert_allclose(target, exp_target)
assert_allclose(mix, exp_mix)
assert_allclose(masker, exp_masker)
|
Add some tests for Experiment class
|
Add some tests for Experiment class
|
Python
|
bsd-3-clause
|
achabotl/pambox
|
Add some tests for Experiment class
|
# -*- coding: utf-8 -*-
from __future__ import division, print_function
import os.path
import numpy as np
from numpy.testing import assert_allclose
import pytest
from pambox.speech import Experiment
__DATA_ROOT__ = os.path.join(os.path.dirname(__file__), 'data')
class TestExperiment(object):
@pytest.mark.parametrize("fixed_target, target, masker, snr, exp_target, "
"exp_masker", (
(True, [0, 1], [0, 1], 0, [0, 2514.86685937], [0, 2514.86685937]),
(True, [0, 1], [0, 1], 5, [0, 2514.86685937], [0, 1414.21356237]),
(False, [0, 1], [0, 1], 0, [0, 2514.86685937], [0, 2514.86685937]),
(False, [0, 1], [0, 1], 5, [0, 4472.135955], [0, 2514.86685937]),
))
def test_adjust_levels(self, fixed_target,
target, masker, snr,
exp_target, exp_masker):
exp = Experiment([], [], [], fixed_target=fixed_target, fixed_level=65)
target, masker = exp.adjust_levels(target, masker, snr)
assert_allclose(target, exp_target, atol=1e-6)
assert_allclose(masker, exp_masker, atol=1e-6)
def distort_passthrough(target, masker, *args, **kwargs):
return target, masker
def test_preprocessing(self):
params = {}
target = np.asarray([0, 1])
masker = np.asarray([0, 1])
snr = 0
exp_target = [0, 1]
exp_mix = [0, 2]
exp_masker = [0, 1]
exp = Experiment([], [], [], distortion=self.distort_passthrough,
dist_params=params, adjust_levels_bef_proc=True,
fixed_level=-3.0102999566398125)
target, mix, masker = exp.preprocessing(target, masker, snr, params)
assert_allclose(target, exp_target)
assert_allclose(mix, exp_mix)
assert_allclose(masker, exp_masker)
|
<commit_before><commit_msg>Add some tests for Experiment class<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import division, print_function
import os.path
import numpy as np
from numpy.testing import assert_allclose
import pytest
from pambox.speech import Experiment
__DATA_ROOT__ = os.path.join(os.path.dirname(__file__), 'data')
class TestExperiment(object):
@pytest.mark.parametrize("fixed_target, target, masker, snr, exp_target, "
"exp_masker", (
(True, [0, 1], [0, 1], 0, [0, 2514.86685937], [0, 2514.86685937]),
(True, [0, 1], [0, 1], 5, [0, 2514.86685937], [0, 1414.21356237]),
(False, [0, 1], [0, 1], 0, [0, 2514.86685937], [0, 2514.86685937]),
(False, [0, 1], [0, 1], 5, [0, 4472.135955], [0, 2514.86685937]),
))
def test_adjust_levels(self, fixed_target,
target, masker, snr,
exp_target, exp_masker):
exp = Experiment([], [], [], fixed_target=fixed_target, fixed_level=65)
target, masker = exp.adjust_levels(target, masker, snr)
assert_allclose(target, exp_target, atol=1e-6)
assert_allclose(masker, exp_masker, atol=1e-6)
def distort_passthrough(target, masker, *args, **kwargs):
return target, masker
def test_preprocessing(self):
params = {}
target = np.asarray([0, 1])
masker = np.asarray([0, 1])
snr = 0
exp_target = [0, 1]
exp_mix = [0, 2]
exp_masker = [0, 1]
exp = Experiment([], [], [], distortion=self.distort_passthrough,
dist_params=params, adjust_levels_bef_proc=True,
fixed_level=-3.0102999566398125)
target, mix, masker = exp.preprocessing(target, masker, snr, params)
assert_allclose(target, exp_target)
assert_allclose(mix, exp_mix)
assert_allclose(masker, exp_masker)
|
Add some tests for Experiment class# -*- coding: utf-8 -*-
from __future__ import division, print_function
import os.path
import numpy as np
from numpy.testing import assert_allclose
import pytest
from pambox.speech import Experiment
__DATA_ROOT__ = os.path.join(os.path.dirname(__file__), 'data')
class TestExperiment(object):
@pytest.mark.parametrize("fixed_target, target, masker, snr, exp_target, "
"exp_masker", (
(True, [0, 1], [0, 1], 0, [0, 2514.86685937], [0, 2514.86685937]),
(True, [0, 1], [0, 1], 5, [0, 2514.86685937], [0, 1414.21356237]),
(False, [0, 1], [0, 1], 0, [0, 2514.86685937], [0, 2514.86685937]),
(False, [0, 1], [0, 1], 5, [0, 4472.135955], [0, 2514.86685937]),
))
def test_adjust_levels(self, fixed_target,
target, masker, snr,
exp_target, exp_masker):
exp = Experiment([], [], [], fixed_target=fixed_target, fixed_level=65)
target, masker = exp.adjust_levels(target, masker, snr)
assert_allclose(target, exp_target, atol=1e-6)
assert_allclose(masker, exp_masker, atol=1e-6)
def distort_passthrough(target, masker, *args, **kwargs):
return target, masker
def test_preprocessing(self):
params = {}
target = np.asarray([0, 1])
masker = np.asarray([0, 1])
snr = 0
exp_target = [0, 1]
exp_mix = [0, 2]
exp_masker = [0, 1]
exp = Experiment([], [], [], distortion=self.distort_passthrough,
dist_params=params, adjust_levels_bef_proc=True,
fixed_level=-3.0102999566398125)
target, mix, masker = exp.preprocessing(target, masker, snr, params)
assert_allclose(target, exp_target)
assert_allclose(mix, exp_mix)
assert_allclose(masker, exp_masker)
|
<commit_before><commit_msg>Add some tests for Experiment class<commit_after># -*- coding: utf-8 -*-
from __future__ import division, print_function
import os.path
import numpy as np
from numpy.testing import assert_allclose
import pytest
from pambox.speech import Experiment
__DATA_ROOT__ = os.path.join(os.path.dirname(__file__), 'data')
class TestExperiment(object):
@pytest.mark.parametrize("fixed_target, target, masker, snr, exp_target, "
"exp_masker", (
(True, [0, 1], [0, 1], 0, [0, 2514.86685937], [0, 2514.86685937]),
(True, [0, 1], [0, 1], 5, [0, 2514.86685937], [0, 1414.21356237]),
(False, [0, 1], [0, 1], 0, [0, 2514.86685937], [0, 2514.86685937]),
(False, [0, 1], [0, 1], 5, [0, 4472.135955], [0, 2514.86685937]),
))
def test_adjust_levels(self, fixed_target,
target, masker, snr,
exp_target, exp_masker):
exp = Experiment([], [], [], fixed_target=fixed_target, fixed_level=65)
target, masker = exp.adjust_levels(target, masker, snr)
assert_allclose(target, exp_target, atol=1e-6)
assert_allclose(masker, exp_masker, atol=1e-6)
def distort_passthrough(target, masker, *args, **kwargs):
return target, masker
def test_preprocessing(self):
params = {}
target = np.asarray([0, 1])
masker = np.asarray([0, 1])
snr = 0
exp_target = [0, 1]
exp_mix = [0, 2]
exp_masker = [0, 1]
exp = Experiment([], [], [], distortion=self.distort_passthrough,
dist_params=params, adjust_levels_bef_proc=True,
fixed_level=-3.0102999566398125)
target, mix, masker = exp.preprocessing(target, masker, snr, params)
assert_allclose(target, exp_target)
assert_allclose(mix, exp_mix)
assert_allclose(masker, exp_masker)
|
|
896160f5291158132c670eae65b7e45dd4a8748f
|
pox/messenger/mux.py
|
pox/messenger/mux.py
|
from pox.core import core
from pox.messenger.messenger import *
log = pox.core.getLogger()
class MuxConnection (MessengerConnection):
def __init__ (self, source, channelName, con):
MessengerConnection.__init__(self, source, ID=str(id(self)))
self.channelName = channelName
self.con = con
claimed = False
e = core.messenger.raiseEventNoErrors(ConnectionStarted, self)
if e is not None:
claimed = e._claimed
if not claimed:
# Unclaimed events get forwarded to here too
self.addListener(MessageRecieved, self._defaultMessageRecieved, priority=-1) # Low priority
self._newlines = False
def send (self, whatever, **kw):
whatever = dict(whatever)
whatever['_mux'] = self.channelName
MessengerConnection.send(self, whatever, **kw)
def sendRaw (self, data):
self.con.sendRaw(data)
class MuxSource (object):
def __init__ (self, con):
self.listenTo(con)
self.channels = {}
def _forget (self, connection):
if connection in self.channels:
del self.channels[connection.channelName]
else:
log.warn("Tried to forget a channel I didn't know")
def _handle_MessageRecieved (self, event):
if event.con.isReadable():
r = event.con.read()
if type(r) is dict:
channelName = r.get("_mux", None)
del r['_mux']
if channelName is not None:
if channelName not in self.channels:
# New channel
channel = MuxConnection(self, channelName, event.con)
self.channels[channelName] = channel
else:
channel = self.channels[channelName]
elif r.get("_mux_bye",False):
event.con.close()
else:
log.warn("Message to demuxer didn't specify a channel or valid command")
else:
log.warn("Demuxer only handlers dictionaries")
else:
self._closeAll()
def _handle_ConnectionClosed (self, event):
self._closeAll()
def _closeAll (self):
channels = self.channels.values()
for connection in channels:
connection._close()
class MuxHub (object):
"""
"""
def __init__ (self):
core.messenger.addListener(MessageRecieved, self._handle_global_MessageRecieved)#, weak=True)
def _handle_global_MessageRecieved (self, event):
try:
n = event.con.read()['hello']
if n['hello'] == 'mux':
# It's for me!
event.claim()
m = MuxSource(event.con)
print self.__class__.__name__, "- started conversation with", event.con
except:
pass
def launch ():
# core.register("demux", MessengerHub())
global hub
hub = MuxHub()
|
Add totally untested messenger multiplexer
|
Add totally untested messenger multiplexer
messenger.mux theoretically lets you use a single messenger connection to talk with
multiple messenger servers. Send a hello:mux message to have the muxer claim the
connection. Now you can send messages that include _mux:<conID>. For every unique
conID, a subconnection will be created. Messages sent by a service back down that
conID will also include _mux:<conID> key/value pairs.
|
Python
|
apache-2.0
|
adusia/pox,adusia/pox,diogommartins/pox,noxrepo/pox,kpengboy/pox-exercise,carlye566/IoT-POX,jacobq/csci5221-viro-project,kpengboy/pox-exercise,xAKLx/pox,xAKLx/pox,diogommartins/pox,kavitshah8/SDNDeveloper,xAKLx/pox,PrincetonUniversity/pox,chenyuntc/pox,kulawczukmarcin/mypox,PrincetonUniversity/pox,adusia/pox,VamsikrishnaNallabothu/pox,kpengboy/pox-exercise,diogommartins/pox,chenyuntc/pox,noxrepo/pox,pthien92/sdn,noxrepo/pox,diogommartins/pox,carlye566/IoT-POX,MurphyMc/pox,kulawczukmarcin/mypox,adusia/pox,diogommartins/pox,andiwundsam/_of_normalize,pthien92/sdn,kpengboy/pox-exercise,PrincetonUniversity/pox,VamsikrishnaNallabothu/pox,xAKLx/pox,MurphyMc/pox,jacobq/csci5221-viro-project,kavitshah8/SDNDeveloper,jacobq/csci5221-viro-project,VamsikrishnaNallabothu/pox,andiwundsam/_of_normalize,PrincetonUniversity/pox,andiwundsam/_of_normalize,carlye566/IoT-POX,waltznetworks/pox,kulawczukmarcin/mypox,VamsikrishnaNallabothu/pox,xAKLx/pox,waltznetworks/pox,MurphyMc/pox,MurphyMc/pox,jacobq/csci5221-viro-project,denovogroup/pox,denovogroup/pox,carlye566/IoT-POX,adusia/pox,PrincetonUniversity/pox,kavitshah8/SDNDeveloper,pthien92/sdn,waltznetworks/pox,MurphyMc/pox,kulawczukmarcin/mypox,chenyuntc/pox,pthien92/sdn,kulawczukmarcin/mypox,kpengboy/pox-exercise,waltznetworks/pox,noxrepo/pox,kavitshah8/SDNDeveloper,chenyuntc/pox,chenyuntc/pox,denovogroup/pox,denovogroup/pox,VamsikrishnaNallabothu/pox,andiwundsam/_of_normalize,carlye566/IoT-POX,waltznetworks/pox,jacobq/csci5221-viro-project,denovogroup/pox,pthien92/sdn
|
Add totally untested messenger multiplexer
messenger.mux theoretically lets you use a single messenger connection to talk with
multiple messenger servers. Send a hello:mux message to have the muxer claim the
connection. Now you can send messages that include _mux:<conID>. For every unique
conID, a subconnection will be created. Messages sent by a service back down that
conID will also include _mux:<conID> key/value pairs.
|
from pox.core import core
from pox.messenger.messenger import *
log = pox.core.getLogger()
class MuxConnection (MessengerConnection):
def __init__ (self, source, channelName, con):
MessengerConnection.__init__(self, source, ID=str(id(self)))
self.channelName = channelName
self.con = con
claimed = False
e = core.messenger.raiseEventNoErrors(ConnectionStarted, self)
if e is not None:
claimed = e._claimed
if not claimed:
# Unclaimed events get forwarded to here too
self.addListener(MessageRecieved, self._defaultMessageRecieved, priority=-1) # Low priority
self._newlines = False
def send (self, whatever, **kw):
whatever = dict(whatever)
whatever['_mux'] = self.channelName
MessengerConnection.send(self, whatever, **kw)
def sendRaw (self, data):
self.con.sendRaw(data)
class MuxSource (object):
def __init__ (self, con):
self.listenTo(con)
self.channels = {}
def _forget (self, connection):
if connection in self.channels:
del self.channels[connection.channelName]
else:
log.warn("Tried to forget a channel I didn't know")
def _handle_MessageRecieved (self, event):
if event.con.isReadable():
r = event.con.read()
if type(r) is dict:
channelName = r.get("_mux", None)
del r['_mux']
if channelName is not None:
if channelName not in self.channels:
# New channel
channel = MuxConnection(self, channelName, event.con)
self.channels[channelName] = channel
else:
channel = self.channels[channelName]
elif r.get("_mux_bye",False):
event.con.close()
else:
log.warn("Message to demuxer didn't specify a channel or valid command")
else:
log.warn("Demuxer only handlers dictionaries")
else:
self._closeAll()
def _handle_ConnectionClosed (self, event):
self._closeAll()
def _closeAll (self):
channels = self.channels.values()
for connection in channels:
connection._close()
class MuxHub (object):
"""
"""
def __init__ (self):
core.messenger.addListener(MessageRecieved, self._handle_global_MessageRecieved)#, weak=True)
def _handle_global_MessageRecieved (self, event):
try:
n = event.con.read()['hello']
if n['hello'] == 'mux':
# It's for me!
event.claim()
m = MuxSource(event.con)
print self.__class__.__name__, "- started conversation with", event.con
except:
pass
def launch ():
# core.register("demux", MessengerHub())
global hub
hub = MuxHub()
|
<commit_before><commit_msg>Add totally untested messenger multiplexer
messenger.mux theoretically lets you use a single messenger connection to talk with
multiple messenger servers. Send a hello:mux message to have the muxer claim the
connection. Now you can send messages that include _mux:<conID>. For every unique
conID, a subconnection will be created. Messages sent by a service back down that
conID will also include _mux:<conID> key/value pairs.<commit_after>
|
from pox.core import core
from pox.messenger.messenger import *
log = pox.core.getLogger()
class MuxConnection (MessengerConnection):
def __init__ (self, source, channelName, con):
MessengerConnection.__init__(self, source, ID=str(id(self)))
self.channelName = channelName
self.con = con
claimed = False
e = core.messenger.raiseEventNoErrors(ConnectionStarted, self)
if e is not None:
claimed = e._claimed
if not claimed:
# Unclaimed events get forwarded to here too
self.addListener(MessageRecieved, self._defaultMessageRecieved, priority=-1) # Low priority
self._newlines = False
def send (self, whatever, **kw):
whatever = dict(whatever)
whatever['_mux'] = self.channelName
MessengerConnection.send(self, whatever, **kw)
def sendRaw (self, data):
self.con.sendRaw(data)
class MuxSource (object):
def __init__ (self, con):
self.listenTo(con)
self.channels = {}
def _forget (self, connection):
if connection in self.channels:
del self.channels[connection.channelName]
else:
log.warn("Tried to forget a channel I didn't know")
def _handle_MessageRecieved (self, event):
if event.con.isReadable():
r = event.con.read()
if type(r) is dict:
channelName = r.get("_mux", None)
del r['_mux']
if channelName is not None:
if channelName not in self.channels:
# New channel
channel = MuxConnection(self, channelName, event.con)
self.channels[channelName] = channel
else:
channel = self.channels[channelName]
elif r.get("_mux_bye",False):
event.con.close()
else:
log.warn("Message to demuxer didn't specify a channel or valid command")
else:
log.warn("Demuxer only handlers dictionaries")
else:
self._closeAll()
def _handle_ConnectionClosed (self, event):
self._closeAll()
def _closeAll (self):
channels = self.channels.values()
for connection in channels:
connection._close()
class MuxHub (object):
"""
"""
def __init__ (self):
core.messenger.addListener(MessageRecieved, self._handle_global_MessageRecieved)#, weak=True)
def _handle_global_MessageRecieved (self, event):
try:
n = event.con.read()['hello']
if n['hello'] == 'mux':
# It's for me!
event.claim()
m = MuxSource(event.con)
print self.__class__.__name__, "- started conversation with", event.con
except:
pass
def launch ():
# core.register("demux", MessengerHub())
global hub
hub = MuxHub()
|
Add totally untested messenger multiplexer
messenger.mux theoretically lets you use a single messenger connection to talk with
multiple messenger servers. Send a hello:mux message to have the muxer claim the
connection. Now you can send messages that include _mux:<conID>. For every unique
conID, a subconnection will be created. Messages sent by a service back down that
conID will also include _mux:<conID> key/value pairs.from pox.core import core
from pox.messenger.messenger import *
log = pox.core.getLogger()
class MuxConnection (MessengerConnection):
def __init__ (self, source, channelName, con):
MessengerConnection.__init__(self, source, ID=str(id(self)))
self.channelName = channelName
self.con = con
claimed = False
e = core.messenger.raiseEventNoErrors(ConnectionStarted, self)
if e is not None:
claimed = e._claimed
if not claimed:
# Unclaimed events get forwarded to here too
self.addListener(MessageRecieved, self._defaultMessageRecieved, priority=-1) # Low priority
self._newlines = False
def send (self, whatever, **kw):
whatever = dict(whatever)
whatever['_mux'] = self.channelName
MessengerConnection.send(self, whatever, **kw)
def sendRaw (self, data):
self.con.sendRaw(data)
class MuxSource (object):
def __init__ (self, con):
self.listenTo(con)
self.channels = {}
def _forget (self, connection):
if connection in self.channels:
del self.channels[connection.channelName]
else:
log.warn("Tried to forget a channel I didn't know")
def _handle_MessageRecieved (self, event):
if event.con.isReadable():
r = event.con.read()
if type(r) is dict:
channelName = r.get("_mux", None)
del r['_mux']
if channelName is not None:
if channelName not in self.channels:
# New channel
channel = MuxConnection(self, channelName, event.con)
self.channels[channelName] = channel
else:
channel = self.channels[channelName]
elif r.get("_mux_bye",False):
event.con.close()
else:
log.warn("Message to demuxer didn't specify a channel or valid command")
else:
log.warn("Demuxer only handlers dictionaries")
else:
self._closeAll()
def _handle_ConnectionClosed (self, event):
self._closeAll()
def _closeAll (self):
channels = self.channels.values()
for connection in channels:
connection._close()
class MuxHub (object):
"""
"""
def __init__ (self):
core.messenger.addListener(MessageRecieved, self._handle_global_MessageRecieved)#, weak=True)
def _handle_global_MessageRecieved (self, event):
try:
n = event.con.read()['hello']
if n['hello'] == 'mux':
# It's for me!
event.claim()
m = MuxSource(event.con)
print self.__class__.__name__, "- started conversation with", event.con
except:
pass
def launch ():
# core.register("demux", MessengerHub())
global hub
hub = MuxHub()
|
<commit_before><commit_msg>Add totally untested messenger multiplexer
messenger.mux theoretically lets you use a single messenger connection to talk with
multiple messenger servers. Send a hello:mux message to have the muxer claim the
connection. Now you can send messages that include _mux:<conID>. For every unique
conID, a subconnection will be created. Messages sent by a service back down that
conID will also include _mux:<conID> key/value pairs.<commit_after>from pox.core import core
from pox.messenger.messenger import *
log = pox.core.getLogger()
class MuxConnection (MessengerConnection):
def __init__ (self, source, channelName, con):
MessengerConnection.__init__(self, source, ID=str(id(self)))
self.channelName = channelName
self.con = con
claimed = False
e = core.messenger.raiseEventNoErrors(ConnectionStarted, self)
if e is not None:
claimed = e._claimed
if not claimed:
# Unclaimed events get forwarded to here too
self.addListener(MessageRecieved, self._defaultMessageRecieved, priority=-1) # Low priority
self._newlines = False
def send (self, whatever, **kw):
whatever = dict(whatever)
whatever['_mux'] = self.channelName
MessengerConnection.send(self, whatever, **kw)
def sendRaw (self, data):
self.con.sendRaw(data)
class MuxSource (object):
def __init__ (self, con):
self.listenTo(con)
self.channels = {}
def _forget (self, connection):
if connection in self.channels:
del self.channels[connection.channelName]
else:
log.warn("Tried to forget a channel I didn't know")
def _handle_MessageRecieved (self, event):
if event.con.isReadable():
r = event.con.read()
if type(r) is dict:
channelName = r.get("_mux", None)
del r['_mux']
if channelName is not None:
if channelName not in self.channels:
# New channel
channel = MuxConnection(self, channelName, event.con)
self.channels[channelName] = channel
else:
channel = self.channels[channelName]
elif r.get("_mux_bye",False):
event.con.close()
else:
log.warn("Message to demuxer didn't specify a channel or valid command")
else:
log.warn("Demuxer only handlers dictionaries")
else:
self._closeAll()
def _handle_ConnectionClosed (self, event):
self._closeAll()
def _closeAll (self):
channels = self.channels.values()
for connection in channels:
connection._close()
class MuxHub (object):
"""
"""
def __init__ (self):
core.messenger.addListener(MessageRecieved, self._handle_global_MessageRecieved)#, weak=True)
def _handle_global_MessageRecieved (self, event):
try:
n = event.con.read()['hello']
if n['hello'] == 'mux':
# It's for me!
event.claim()
m = MuxSource(event.con)
print self.__class__.__name__, "- started conversation with", event.con
except:
pass
def launch ():
# core.register("demux", MessengerHub())
global hub
hub = MuxHub()
|
|
0a08696add6080c37e6083cedbb950f1ab19cee8
|
test/test_convert.py
|
test/test_convert.py
|
# This file is part of beets.
# Copyright 2014, Thomas Scholtes.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
import os.path
from _common import unittest
from helper import TestHelper
class ImportConvertTest(unittest.TestCase, TestHelper):
def setUp(self):
self.setup_beets()
self.importer = self.create_importer()
self.load_plugins('convert')
def tearDown(self):
self.teardown_beets()
self.unload_plugins()
def test_import_original_on_convert_error(self):
# `false` exits with non-zero code
self.config['convert']['command'] = u'false'
self.config['convert']['auto'] = True
# Enforce running convert
self.config['convert']['max_bitrate'] = 1
self.config['convert']['quiet'] = False
self.importer.run()
item = self.lib.items().get()
self.assertIsNotNone(item)
self.assertTrue(os.path.isfile(item.path))
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
Test convert error during import
|
Test convert error during import
When the conversion of an audio file fails during import the original should be
imported. See #659
|
Python
|
mit
|
Andypsamp/CODfinalJUNIT,pkess/beets,SusannaMaria/beets,sampsyo/beets,artemutin/beets,jmwatte/beets,gabrielaraujof/beets,Wen777/beets,moodboom/beets,tima/beets,mried/beets,diego-plan9/beets,YetAnotherNerd/beets,jcoady9/beets,Freso/beets,untitaker/beets,untitaker/beets,sampsyo/beets,kelvinhammond/beets,kareemallen/beets,andremiller/beets,imsparsh/beets,m-urban/beets,sadatay/beets,Kraymer/beets,dfc/beets,Dishwishy/beets,beetbox/beets,LordSputnik/beets,jackwilsdon/beets,kareemallen/beets,jmwatte/beets,xsteadfastx/beets,MyTunesFreeMusic/privacy-policy,Andypsamp/CODfinalJUNIT,jackwilsdon/beets,Andypsamp/CODfinalJUNIT,lightwang1/beets,beetbox/beets,Andypsamp/CODjunit,kelvinhammond/beets,LordSputnik/beets,ibmibmibm/beets,arabenjamin/beets,mathstuf/beets,drm00/beets,parapente/beets,parapente/beets,Andypsamp/CODfinalJUNIT,moodboom/beets,arabenjamin/beets,asteven/beets,mosesfistos1/beetbox,Kraymer/beets,sampsyo/beets,marcuskrahl/beets,MyTunesFreeMusic/privacy-policy,Dishwishy/beets,diego-plan9/beets,Dishwishy/beets,gabrielaraujof/beets,mried/beets,SusannaMaria/beets,Kraymer/beets,parapente/beets,mosesfistos1/beetbox,lightwang1/beets,m-urban/beets,m-urban/beets,kareemallen/beets,randybias/beets,bj-yinyan/beets,lengtche/beets,Andypsamp/CODjunit,beetbox/beets,madmouser1/beets,andremiller/beets,shamangeorge/beets,ttsda/beets,YetAnotherNerd/beets,dfc/beets,mathstuf/beets,jackwilsdon/beets,mried/beets,multikatt/beets,marcuskrahl/beets,sadatay/beets,jcoady9/beets,dfc/beets,sampsyo/beets,gabrielaraujof/beets,imsparsh/beets,diego-plan9/beets,xsteadfastx/beets,PierreRust/beets,lengtche/beets,tima/beets,jcoady9/beets,shanemikel/beets,pkess/beets,moodboom/beets,madmouser1/beets,shanemikel/beets,ttsda/beets,ibmibmibm/beets,mosesfistos1/beetbox,ibmibmibm/beets,Andypsamp/CODjunit,lightwang1/beets,shanemikel/beets,artemutin/beets,pkess/beets,randybias/beets,madmouser1/beets,swt30/beets,lengtche/beets,untitaker/beets,ibmibmibm/beets,parapente/beets,imsparsh/beets,lengtche/beets,LordSputnik/beets,bj-yinyan/beets,xsteadfastx/beets,moodboom/beets,artemutin/beets,Andypsamp/CODjunit,beetbox/beets,marcuskrahl/beets,YetAnotherNerd/beets,Andypsamp/CODjunit,kelvinhammond/beets,dfc/beets,ttsda/beets,ttsda/beets,untitaker/beets,diego-plan9/beets,marcuskrahl/beets,multikatt/beets,drm00/beets,artemutin/beets,randybias/beets,shamangeorge/beets,mosesfistos1/beetbox,MyTunesFreeMusic/privacy-policy,jcoady9/beets,gabrielaraujof/beets,shanemikel/beets,PierreRust/beets,kelvinhammond/beets,Dishwishy/beets,pkess/beets,multikatt/beets,shamangeorge/beets,Wen777/beets,ruippeixotog/beets,PierreRust/beets,multikatt/beets,sadatay/beets,bj-yinyan/beets,Andypsamp/CODfinalJUNIT,xsteadfastx/beets,andremiller/beets,SusannaMaria/beets,Kraymer/beets,sadatay/beets,imsparsh/beets,swt30/beets,arabenjamin/beets,shamangeorge/beets,jmwatte/beets,asteven/beets,jackwilsdon/beets,SusannaMaria/beets,Wen777/beets,drm00/beets,m-urban/beets,arabenjamin/beets,swt30/beets,Freso/beets,PierreRust/beets,kareemallen/beets,jmwatte/beets,Freso/beets,LordSputnik/beets,asteven/beets,tima/beets,randybias/beets,mathstuf/beets,YetAnotherNerd/beets,tima/beets,mried/beets,ruippeixotog/beets,MyTunesFreeMusic/privacy-policy,asteven/beets,drm00/beets,swt30/beets,mathstuf/beets,ruippeixotog/beets,bj-yinyan/beets,ruippeixotog/beets,madmouser1/beets,Freso/beets,lightwang1/beets
|
Test convert error during import
When the conversion of an audio file fails during import the original should be
imported. See #659
|
# This file is part of beets.
# Copyright 2014, Thomas Scholtes.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
import os.path
from _common import unittest
from helper import TestHelper
class ImportConvertTest(unittest.TestCase, TestHelper):
def setUp(self):
self.setup_beets()
self.importer = self.create_importer()
self.load_plugins('convert')
def tearDown(self):
self.teardown_beets()
self.unload_plugins()
def test_import_original_on_convert_error(self):
# `false` exits with non-zero code
self.config['convert']['command'] = u'false'
self.config['convert']['auto'] = True
# Enforce running convert
self.config['convert']['max_bitrate'] = 1
self.config['convert']['quiet'] = False
self.importer.run()
item = self.lib.items().get()
self.assertIsNotNone(item)
self.assertTrue(os.path.isfile(item.path))
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
<commit_before><commit_msg>Test convert error during import
When the conversion of an audio file fails during import the original should be
imported. See #659<commit_after>
|
# This file is part of beets.
# Copyright 2014, Thomas Scholtes.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
import os.path
from _common import unittest
from helper import TestHelper
class ImportConvertTest(unittest.TestCase, TestHelper):
def setUp(self):
self.setup_beets()
self.importer = self.create_importer()
self.load_plugins('convert')
def tearDown(self):
self.teardown_beets()
self.unload_plugins()
def test_import_original_on_convert_error(self):
# `false` exits with non-zero code
self.config['convert']['command'] = u'false'
self.config['convert']['auto'] = True
# Enforce running convert
self.config['convert']['max_bitrate'] = 1
self.config['convert']['quiet'] = False
self.importer.run()
item = self.lib.items().get()
self.assertIsNotNone(item)
self.assertTrue(os.path.isfile(item.path))
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
Test convert error during import
When the conversion of an audio file fails during import the original should be
imported. See #659# This file is part of beets.
# Copyright 2014, Thomas Scholtes.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
import os.path
from _common import unittest
from helper import TestHelper
class ImportConvertTest(unittest.TestCase, TestHelper):
def setUp(self):
self.setup_beets()
self.importer = self.create_importer()
self.load_plugins('convert')
def tearDown(self):
self.teardown_beets()
self.unload_plugins()
def test_import_original_on_convert_error(self):
# `false` exits with non-zero code
self.config['convert']['command'] = u'false'
self.config['convert']['auto'] = True
# Enforce running convert
self.config['convert']['max_bitrate'] = 1
self.config['convert']['quiet'] = False
self.importer.run()
item = self.lib.items().get()
self.assertIsNotNone(item)
self.assertTrue(os.path.isfile(item.path))
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
<commit_before><commit_msg>Test convert error during import
When the conversion of an audio file fails during import the original should be
imported. See #659<commit_after># This file is part of beets.
# Copyright 2014, Thomas Scholtes.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
import os.path
from _common import unittest
from helper import TestHelper
class ImportConvertTest(unittest.TestCase, TestHelper):
def setUp(self):
self.setup_beets()
self.importer = self.create_importer()
self.load_plugins('convert')
def tearDown(self):
self.teardown_beets()
self.unload_plugins()
def test_import_original_on_convert_error(self):
# `false` exits with non-zero code
self.config['convert']['command'] = u'false'
self.config['convert']['auto'] = True
# Enforce running convert
self.config['convert']['max_bitrate'] = 1
self.config['convert']['quiet'] = False
self.importer.run()
item = self.lib.items().get()
self.assertIsNotNone(item)
self.assertTrue(os.path.isfile(item.path))
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.