commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
73e4789517c8de480d1b5e8c05f3dbe9b31883e5
|
bouncer/embed_detector.py
|
bouncer/embed_detector.py
|
import re
from urllib.parse import urlparse
"""
Hardcoded URL patterns where client is assumed to be embedded.
Only the hostname and path are included in the pattern. The path must be
specified.
These are regular expressions so periods must be escaped.
"""
PATTERNS = [
"h\.readthedocs\.io/.*",
"web\.hypothes\.is/blog/.*",
]
COMPILED_PATTERNS = [re.compile(pat) for pat in PATTERNS]
def url_embeds_client(url):
"""
Test whether ``url`` is known to embed the client.
This currently just tests the URL against the hardcoded regex list
``PATTERNS``.
Only the hostname and path of the URL are tested. Returns false for non-HTTP
URLs.
:return: True if the URL matches a pattern.
"""
parsed_url = urlparse(url)
if not parsed_url.scheme.startswith("http"):
return False
path = parsed_url.path
if not path:
path = "/"
netloc_and_path = parsed_url.netloc + path
for pat in COMPILED_PATTERNS:
if pat.fullmatch(netloc_and_path):
return True
return False
|
import fnmatch
import re
from urllib.parse import urlparse
# Hardcoded URL patterns where client is assumed to be embedded.
#
# Only the hostname and path are included in the pattern. The path must be
# specified; use "example.com/*" to match all URLs on a particular domain.
#
# Patterns are shell-style wildcards ('*' matches any number of chars, '?'
# matches a single char).
PATTERNS = [
"h.readthedocs.io/*",
"web.hypothes.is/blog/*",
]
COMPILED_PATTERNS = [re.compile(fnmatch.translate(pat)) for pat in PATTERNS]
def url_embeds_client(url):
"""
Test whether ``url`` is known to embed the client.
This currently just tests the URL against the pattern list ``PATTERNS``.
Only the hostname and path of the URL are tested. Returns false for non-HTTP
URLs.
:return: True if the URL matches a pattern.
"""
parsed_url = urlparse(url)
if not parsed_url.scheme.startswith("http"):
return False
path = parsed_url.path
if not path:
path = "/"
netloc_and_path = parsed_url.netloc + path
for pat in COMPILED_PATTERNS:
if pat.fullmatch(netloc_and_path):
return True
return False
|
Use fnmatch patterns instead of regexes for URL patterns
|
Use fnmatch patterns instead of regexes for URL patterns
fnmatch patterns have enough flexibility for this use case and this avoids the
need to remember to escape periods, which is easy to forget otherwise. The
resulting patterns are also easier to read.
|
Python
|
bsd-2-clause
|
hypothesis/bouncer,hypothesis/bouncer,hypothesis/bouncer
|
import re
from urllib.parse import urlparse
"""
Hardcoded URL patterns where client is assumed to be embedded.
Only the hostname and path are included in the pattern. The path must be
specified.
These are regular expressions so periods must be escaped.
"""
PATTERNS = [
"h\.readthedocs\.io/.*",
"web\.hypothes\.is/blog/.*",
]
COMPILED_PATTERNS = [re.compile(pat) for pat in PATTERNS]
def url_embeds_client(url):
"""
Test whether ``url`` is known to embed the client.
This currently just tests the URL against the hardcoded regex list
``PATTERNS``.
Only the hostname and path of the URL are tested. Returns false for non-HTTP
URLs.
:return: True if the URL matches a pattern.
"""
parsed_url = urlparse(url)
if not parsed_url.scheme.startswith("http"):
return False
path = parsed_url.path
if not path:
path = "/"
netloc_and_path = parsed_url.netloc + path
for pat in COMPILED_PATTERNS:
if pat.fullmatch(netloc_and_path):
return True
return False
Use fnmatch patterns instead of regexes for URL patterns
fnmatch patterns have enough flexibility for this use case and this avoids the
need to remember to escape periods, which is easy to forget otherwise. The
resulting patterns are also easier to read.
|
import fnmatch
import re
from urllib.parse import urlparse
# Hardcoded URL patterns where client is assumed to be embedded.
#
# Only the hostname and path are included in the pattern. The path must be
# specified; use "example.com/*" to match all URLs on a particular domain.
#
# Patterns are shell-style wildcards ('*' matches any number of chars, '?'
# matches a single char).
PATTERNS = [
"h.readthedocs.io/*",
"web.hypothes.is/blog/*",
]
COMPILED_PATTERNS = [re.compile(fnmatch.translate(pat)) for pat in PATTERNS]
def url_embeds_client(url):
"""
Test whether ``url`` is known to embed the client.
This currently just tests the URL against the pattern list ``PATTERNS``.
Only the hostname and path of the URL are tested. Returns false for non-HTTP
URLs.
:return: True if the URL matches a pattern.
"""
parsed_url = urlparse(url)
if not parsed_url.scheme.startswith("http"):
return False
path = parsed_url.path
if not path:
path = "/"
netloc_and_path = parsed_url.netloc + path
for pat in COMPILED_PATTERNS:
if pat.fullmatch(netloc_and_path):
return True
return False
|
<commit_before>import re
from urllib.parse import urlparse
"""
Hardcoded URL patterns where client is assumed to be embedded.
Only the hostname and path are included in the pattern. The path must be
specified.
These are regular expressions so periods must be escaped.
"""
PATTERNS = [
"h\.readthedocs\.io/.*",
"web\.hypothes\.is/blog/.*",
]
COMPILED_PATTERNS = [re.compile(pat) for pat in PATTERNS]
def url_embeds_client(url):
"""
Test whether ``url`` is known to embed the client.
This currently just tests the URL against the hardcoded regex list
``PATTERNS``.
Only the hostname and path of the URL are tested. Returns false for non-HTTP
URLs.
:return: True if the URL matches a pattern.
"""
parsed_url = urlparse(url)
if not parsed_url.scheme.startswith("http"):
return False
path = parsed_url.path
if not path:
path = "/"
netloc_and_path = parsed_url.netloc + path
for pat in COMPILED_PATTERNS:
if pat.fullmatch(netloc_and_path):
return True
return False
<commit_msg>Use fnmatch patterns instead of regexes for URL patterns
fnmatch patterns have enough flexibility for this use case and this avoids the
need to remember to escape periods, which is easy to forget otherwise. The
resulting patterns are also easier to read.<commit_after>
|
import fnmatch
import re
from urllib.parse import urlparse
# Hardcoded URL patterns where client is assumed to be embedded.
#
# Only the hostname and path are included in the pattern. The path must be
# specified; use "example.com/*" to match all URLs on a particular domain.
#
# Patterns are shell-style wildcards ('*' matches any number of chars, '?'
# matches a single char).
PATTERNS = [
"h.readthedocs.io/*",
"web.hypothes.is/blog/*",
]
COMPILED_PATTERNS = [re.compile(fnmatch.translate(pat)) for pat in PATTERNS]
def url_embeds_client(url):
"""
Test whether ``url`` is known to embed the client.
This currently just tests the URL against the pattern list ``PATTERNS``.
Only the hostname and path of the URL are tested. Returns false for non-HTTP
URLs.
:return: True if the URL matches a pattern.
"""
parsed_url = urlparse(url)
if not parsed_url.scheme.startswith("http"):
return False
path = parsed_url.path
if not path:
path = "/"
netloc_and_path = parsed_url.netloc + path
for pat in COMPILED_PATTERNS:
if pat.fullmatch(netloc_and_path):
return True
return False
|
import re
from urllib.parse import urlparse
"""
Hardcoded URL patterns where client is assumed to be embedded.
Only the hostname and path are included in the pattern. The path must be
specified.
These are regular expressions so periods must be escaped.
"""
PATTERNS = [
"h\.readthedocs\.io/.*",
"web\.hypothes\.is/blog/.*",
]
COMPILED_PATTERNS = [re.compile(pat) for pat in PATTERNS]
def url_embeds_client(url):
"""
Test whether ``url`` is known to embed the client.
This currently just tests the URL against the hardcoded regex list
``PATTERNS``.
Only the hostname and path of the URL are tested. Returns false for non-HTTP
URLs.
:return: True if the URL matches a pattern.
"""
parsed_url = urlparse(url)
if not parsed_url.scheme.startswith("http"):
return False
path = parsed_url.path
if not path:
path = "/"
netloc_and_path = parsed_url.netloc + path
for pat in COMPILED_PATTERNS:
if pat.fullmatch(netloc_and_path):
return True
return False
Use fnmatch patterns instead of regexes for URL patterns
fnmatch patterns have enough flexibility for this use case and this avoids the
need to remember to escape periods, which is easy to forget otherwise. The
resulting patterns are also easier to read.import fnmatch
import re
from urllib.parse import urlparse
# Hardcoded URL patterns where client is assumed to be embedded.
#
# Only the hostname and path are included in the pattern. The path must be
# specified; use "example.com/*" to match all URLs on a particular domain.
#
# Patterns are shell-style wildcards ('*' matches any number of chars, '?'
# matches a single char).
PATTERNS = [
"h.readthedocs.io/*",
"web.hypothes.is/blog/*",
]
COMPILED_PATTERNS = [re.compile(fnmatch.translate(pat)) for pat in PATTERNS]
def url_embeds_client(url):
"""
Test whether ``url`` is known to embed the client.
This currently just tests the URL against the pattern list ``PATTERNS``.
Only the hostname and path of the URL are tested. Returns false for non-HTTP
URLs.
:return: True if the URL matches a pattern.
"""
parsed_url = urlparse(url)
if not parsed_url.scheme.startswith("http"):
return False
path = parsed_url.path
if not path:
path = "/"
netloc_and_path = parsed_url.netloc + path
for pat in COMPILED_PATTERNS:
if pat.fullmatch(netloc_and_path):
return True
return False
|
<commit_before>import re
from urllib.parse import urlparse
"""
Hardcoded URL patterns where client is assumed to be embedded.
Only the hostname and path are included in the pattern. The path must be
specified.
These are regular expressions so periods must be escaped.
"""
PATTERNS = [
"h\.readthedocs\.io/.*",
"web\.hypothes\.is/blog/.*",
]
COMPILED_PATTERNS = [re.compile(pat) for pat in PATTERNS]
def url_embeds_client(url):
"""
Test whether ``url`` is known to embed the client.
This currently just tests the URL against the hardcoded regex list
``PATTERNS``.
Only the hostname and path of the URL are tested. Returns false for non-HTTP
URLs.
:return: True if the URL matches a pattern.
"""
parsed_url = urlparse(url)
if not parsed_url.scheme.startswith("http"):
return False
path = parsed_url.path
if not path:
path = "/"
netloc_and_path = parsed_url.netloc + path
for pat in COMPILED_PATTERNS:
if pat.fullmatch(netloc_and_path):
return True
return False
<commit_msg>Use fnmatch patterns instead of regexes for URL patterns
fnmatch patterns have enough flexibility for this use case and this avoids the
need to remember to escape periods, which is easy to forget otherwise. The
resulting patterns are also easier to read.<commit_after>import fnmatch
import re
from urllib.parse import urlparse
# Hardcoded URL patterns where client is assumed to be embedded.
#
# Only the hostname and path are included in the pattern. The path must be
# specified; use "example.com/*" to match all URLs on a particular domain.
#
# Patterns are shell-style wildcards ('*' matches any number of chars, '?'
# matches a single char).
PATTERNS = [
"h.readthedocs.io/*",
"web.hypothes.is/blog/*",
]
COMPILED_PATTERNS = [re.compile(fnmatch.translate(pat)) for pat in PATTERNS]
def url_embeds_client(url):
"""
Test whether ``url`` is known to embed the client.
This currently just tests the URL against the pattern list ``PATTERNS``.
Only the hostname and path of the URL are tested. Returns false for non-HTTP
URLs.
:return: True if the URL matches a pattern.
"""
parsed_url = urlparse(url)
if not parsed_url.scheme.startswith("http"):
return False
path = parsed_url.path
if not path:
path = "/"
netloc_and_path = parsed_url.netloc + path
for pat in COMPILED_PATTERNS:
if pat.fullmatch(netloc_and_path):
return True
return False
|
a64024959a36e1a03dbd3ecf27f08a56702ecec4
|
eche/tests/test_step1_raed_print.py
|
eche/tests/test_step1_raed_print.py
|
import pytest
from eche.reader import read_str
from eche.printer import print_str
import math
@pytest.mark.parametrize("test_input", [
'1',
'-1',
'0',
str(math.pi),
str(math.e)
])
def test_numbers(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'*',
'+',
'abc',
'test1',
'abc-def',
])
def test_eche_type_symbol(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'()',
'(* 1 2)',
'(+ (* 1 5) (/ 1 0))'
])
def test_eche_type_list(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'nil',
])
def test_nil(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'true',
'false',
])
def test_bool(test_input):
assert print_str(read_str(test_input)) == test_input
|
import pytest
from eche.reader import read_str
from eche.printer import print_str
import math
@pytest.mark.parametrize("test_input", [
'1',
'-1',
'0',
str(math.pi),
str(math.e)
])
def test_numbers(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'*',
'+',
'abc',
'test1',
'abc-def',
])
def test_eche_type_symbol(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'((9 8))',
'()',
'(* 1 2)',
'(+ (* 1 5) (/ 1 0))'
])
def test_eche_type_list(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'nil',
])
def test_nil(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'true',
'false',
])
def test_bool(test_input):
assert print_str(read_str(test_input)) == test_input
|
Add list with list test.
|
Add list with list test.
|
Python
|
mit
|
skk/eche
|
import pytest
from eche.reader import read_str
from eche.printer import print_str
import math
@pytest.mark.parametrize("test_input", [
'1',
'-1',
'0',
str(math.pi),
str(math.e)
])
def test_numbers(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'*',
'+',
'abc',
'test1',
'abc-def',
])
def test_eche_type_symbol(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'()',
'(* 1 2)',
'(+ (* 1 5) (/ 1 0))'
])
def test_eche_type_list(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'nil',
])
def test_nil(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'true',
'false',
])
def test_bool(test_input):
assert print_str(read_str(test_input)) == test_input
Add list with list test.
|
import pytest
from eche.reader import read_str
from eche.printer import print_str
import math
@pytest.mark.parametrize("test_input", [
'1',
'-1',
'0',
str(math.pi),
str(math.e)
])
def test_numbers(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'*',
'+',
'abc',
'test1',
'abc-def',
])
def test_eche_type_symbol(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'((9 8))',
'()',
'(* 1 2)',
'(+ (* 1 5) (/ 1 0))'
])
def test_eche_type_list(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'nil',
])
def test_nil(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'true',
'false',
])
def test_bool(test_input):
assert print_str(read_str(test_input)) == test_input
|
<commit_before>import pytest
from eche.reader import read_str
from eche.printer import print_str
import math
@pytest.mark.parametrize("test_input", [
'1',
'-1',
'0',
str(math.pi),
str(math.e)
])
def test_numbers(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'*',
'+',
'abc',
'test1',
'abc-def',
])
def test_eche_type_symbol(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'()',
'(* 1 2)',
'(+ (* 1 5) (/ 1 0))'
])
def test_eche_type_list(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'nil',
])
def test_nil(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'true',
'false',
])
def test_bool(test_input):
assert print_str(read_str(test_input)) == test_input
<commit_msg>Add list with list test.<commit_after>
|
import pytest
from eche.reader import read_str
from eche.printer import print_str
import math
@pytest.mark.parametrize("test_input", [
'1',
'-1',
'0',
str(math.pi),
str(math.e)
])
def test_numbers(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'*',
'+',
'abc',
'test1',
'abc-def',
])
def test_eche_type_symbol(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'((9 8))',
'()',
'(* 1 2)',
'(+ (* 1 5) (/ 1 0))'
])
def test_eche_type_list(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'nil',
])
def test_nil(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'true',
'false',
])
def test_bool(test_input):
assert print_str(read_str(test_input)) == test_input
|
import pytest
from eche.reader import read_str
from eche.printer import print_str
import math
@pytest.mark.parametrize("test_input", [
'1',
'-1',
'0',
str(math.pi),
str(math.e)
])
def test_numbers(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'*',
'+',
'abc',
'test1',
'abc-def',
])
def test_eche_type_symbol(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'()',
'(* 1 2)',
'(+ (* 1 5) (/ 1 0))'
])
def test_eche_type_list(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'nil',
])
def test_nil(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'true',
'false',
])
def test_bool(test_input):
assert print_str(read_str(test_input)) == test_input
Add list with list test.import pytest
from eche.reader import read_str
from eche.printer import print_str
import math
@pytest.mark.parametrize("test_input", [
'1',
'-1',
'0',
str(math.pi),
str(math.e)
])
def test_numbers(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'*',
'+',
'abc',
'test1',
'abc-def',
])
def test_eche_type_symbol(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'((9 8))',
'()',
'(* 1 2)',
'(+ (* 1 5) (/ 1 0))'
])
def test_eche_type_list(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'nil',
])
def test_nil(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'true',
'false',
])
def test_bool(test_input):
assert print_str(read_str(test_input)) == test_input
|
<commit_before>import pytest
from eche.reader import read_str
from eche.printer import print_str
import math
@pytest.mark.parametrize("test_input", [
'1',
'-1',
'0',
str(math.pi),
str(math.e)
])
def test_numbers(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'*',
'+',
'abc',
'test1',
'abc-def',
])
def test_eche_type_symbol(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'()',
'(* 1 2)',
'(+ (* 1 5) (/ 1 0))'
])
def test_eche_type_list(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'nil',
])
def test_nil(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'true',
'false',
])
def test_bool(test_input):
assert print_str(read_str(test_input)) == test_input
<commit_msg>Add list with list test.<commit_after>import pytest
from eche.reader import read_str
from eche.printer import print_str
import math
@pytest.mark.parametrize("test_input", [
'1',
'-1',
'0',
str(math.pi),
str(math.e)
])
def test_numbers(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'*',
'+',
'abc',
'test1',
'abc-def',
])
def test_eche_type_symbol(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'((9 8))',
'()',
'(* 1 2)',
'(+ (* 1 5) (/ 1 0))'
])
def test_eche_type_list(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'nil',
])
def test_nil(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'true',
'false',
])
def test_bool(test_input):
assert print_str(read_str(test_input)) == test_input
|
e20aaffc908a762757b6b4cb73f6d607b15ac03a
|
tracker.py
|
tracker.py
|
#-*- coding: utf-8 -*-
import time
import sys
from tweepy.streaming import StreamListener
from tweepy import OAuthHandler
from tweepy import Stream
import tweepy
import json
#Get Hashtag to track
argTag = sys.argv[1]
#Class for listening to all tweets
class TweetListener(StreamListener):
def on_status(self, status):
print status.created_at
#Write timestamp to file
f = open("logs/" + argTag + ".txt", "a")
f.write(str(status.created_at) + "\n")
f.close()
return True
def on_error(self, status):
print status
if __name__ == '__main__':
listener = TweetListener()
#Keys
CONSUMER_KEY = ''
CONSUMER_SECRET = ''
ACCESS_KEY = ''
ACCESS_SECRET = ''
#Initialise and Authorise
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_KEY, ACCESS_SECRET)
stream = Stream(auth, listener)
stream.filter(track = [argTag])
|
import sys
from tweepy.streaming import StreamListener
from tweepy import OAuthHandler
from tweepy import Stream
import tweepy
#Get Hashtag to track
argTag = sys.argv[1]
#Class for listening to all tweets
class TweetListener(StreamListener):
def on_status(self, status):
print status.created_at
#Write timestamp to file
f = open("logs/" + argTag + ".txt", "a")
f.write(str(status.created_at) + "\n")
f.close()
return True
def on_error(self, status):
print status
if __name__ == '__main__':
listener = TweetListener()
#Keys
CONSUMER_KEY = ''
CONSUMER_SECRET = ''
ACCESS_KEY = ''
ACCESS_SECRET = ''
#Initialise and Authorise
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_KEY, ACCESS_SECRET)
stream = Stream(auth, listener)
stream.filter(track = [argTag])
|
Remove unnecessary code and libraries
|
Remove unnecessary code and libraries
|
Python
|
mit
|
tim-thompson/TweetTimeTracker
|
#-*- coding: utf-8 -*-
import time
import sys
from tweepy.streaming import StreamListener
from tweepy import OAuthHandler
from tweepy import Stream
import tweepy
import json
#Get Hashtag to track
argTag = sys.argv[1]
#Class for listening to all tweets
class TweetListener(StreamListener):
def on_status(self, status):
print status.created_at
#Write timestamp to file
f = open("logs/" + argTag + ".txt", "a")
f.write(str(status.created_at) + "\n")
f.close()
return True
def on_error(self, status):
print status
if __name__ == '__main__':
listener = TweetListener()
#Keys
CONSUMER_KEY = ''
CONSUMER_SECRET = ''
ACCESS_KEY = ''
ACCESS_SECRET = ''
#Initialise and Authorise
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_KEY, ACCESS_SECRET)
stream = Stream(auth, listener)
stream.filter(track = [argTag])
Remove unnecessary code and libraries
|
import sys
from tweepy.streaming import StreamListener
from tweepy import OAuthHandler
from tweepy import Stream
import tweepy
#Get Hashtag to track
argTag = sys.argv[1]
#Class for listening to all tweets
class TweetListener(StreamListener):
def on_status(self, status):
print status.created_at
#Write timestamp to file
f = open("logs/" + argTag + ".txt", "a")
f.write(str(status.created_at) + "\n")
f.close()
return True
def on_error(self, status):
print status
if __name__ == '__main__':
listener = TweetListener()
#Keys
CONSUMER_KEY = ''
CONSUMER_SECRET = ''
ACCESS_KEY = ''
ACCESS_SECRET = ''
#Initialise and Authorise
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_KEY, ACCESS_SECRET)
stream = Stream(auth, listener)
stream.filter(track = [argTag])
|
<commit_before>#-*- coding: utf-8 -*-
import time
import sys
from tweepy.streaming import StreamListener
from tweepy import OAuthHandler
from tweepy import Stream
import tweepy
import json
#Get Hashtag to track
argTag = sys.argv[1]
#Class for listening to all tweets
class TweetListener(StreamListener):
def on_status(self, status):
print status.created_at
#Write timestamp to file
f = open("logs/" + argTag + ".txt", "a")
f.write(str(status.created_at) + "\n")
f.close()
return True
def on_error(self, status):
print status
if __name__ == '__main__':
listener = TweetListener()
#Keys
CONSUMER_KEY = ''
CONSUMER_SECRET = ''
ACCESS_KEY = ''
ACCESS_SECRET = ''
#Initialise and Authorise
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_KEY, ACCESS_SECRET)
stream = Stream(auth, listener)
stream.filter(track = [argTag])
<commit_msg>Remove unnecessary code and libraries<commit_after>
|
import sys
from tweepy.streaming import StreamListener
from tweepy import OAuthHandler
from tweepy import Stream
import tweepy
#Get Hashtag to track
argTag = sys.argv[1]
#Class for listening to all tweets
class TweetListener(StreamListener):
def on_status(self, status):
print status.created_at
#Write timestamp to file
f = open("logs/" + argTag + ".txt", "a")
f.write(str(status.created_at) + "\n")
f.close()
return True
def on_error(self, status):
print status
if __name__ == '__main__':
listener = TweetListener()
#Keys
CONSUMER_KEY = ''
CONSUMER_SECRET = ''
ACCESS_KEY = ''
ACCESS_SECRET = ''
#Initialise and Authorise
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_KEY, ACCESS_SECRET)
stream = Stream(auth, listener)
stream.filter(track = [argTag])
|
#-*- coding: utf-8 -*-
import time
import sys
from tweepy.streaming import StreamListener
from tweepy import OAuthHandler
from tweepy import Stream
import tweepy
import json
#Get Hashtag to track
argTag = sys.argv[1]
#Class for listening to all tweets
class TweetListener(StreamListener):
def on_status(self, status):
print status.created_at
#Write timestamp to file
f = open("logs/" + argTag + ".txt", "a")
f.write(str(status.created_at) + "\n")
f.close()
return True
def on_error(self, status):
print status
if __name__ == '__main__':
listener = TweetListener()
#Keys
CONSUMER_KEY = ''
CONSUMER_SECRET = ''
ACCESS_KEY = ''
ACCESS_SECRET = ''
#Initialise and Authorise
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_KEY, ACCESS_SECRET)
stream = Stream(auth, listener)
stream.filter(track = [argTag])
Remove unnecessary code and librariesimport sys
from tweepy.streaming import StreamListener
from tweepy import OAuthHandler
from tweepy import Stream
import tweepy
#Get Hashtag to track
argTag = sys.argv[1]
#Class for listening to all tweets
class TweetListener(StreamListener):
def on_status(self, status):
print status.created_at
#Write timestamp to file
f = open("logs/" + argTag + ".txt", "a")
f.write(str(status.created_at) + "\n")
f.close()
return True
def on_error(self, status):
print status
if __name__ == '__main__':
listener = TweetListener()
#Keys
CONSUMER_KEY = ''
CONSUMER_SECRET = ''
ACCESS_KEY = ''
ACCESS_SECRET = ''
#Initialise and Authorise
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_KEY, ACCESS_SECRET)
stream = Stream(auth, listener)
stream.filter(track = [argTag])
|
<commit_before>#-*- coding: utf-8 -*-
import time
import sys
from tweepy.streaming import StreamListener
from tweepy import OAuthHandler
from tweepy import Stream
import tweepy
import json
#Get Hashtag to track
argTag = sys.argv[1]
#Class for listening to all tweets
class TweetListener(StreamListener):
def on_status(self, status):
print status.created_at
#Write timestamp to file
f = open("logs/" + argTag + ".txt", "a")
f.write(str(status.created_at) + "\n")
f.close()
return True
def on_error(self, status):
print status
if __name__ == '__main__':
listener = TweetListener()
#Keys
CONSUMER_KEY = ''
CONSUMER_SECRET = ''
ACCESS_KEY = ''
ACCESS_SECRET = ''
#Initialise and Authorise
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_KEY, ACCESS_SECRET)
stream = Stream(auth, listener)
stream.filter(track = [argTag])
<commit_msg>Remove unnecessary code and libraries<commit_after>import sys
from tweepy.streaming import StreamListener
from tweepy import OAuthHandler
from tweepy import Stream
import tweepy
#Get Hashtag to track
argTag = sys.argv[1]
#Class for listening to all tweets
class TweetListener(StreamListener):
def on_status(self, status):
print status.created_at
#Write timestamp to file
f = open("logs/" + argTag + ".txt", "a")
f.write(str(status.created_at) + "\n")
f.close()
return True
def on_error(self, status):
print status
if __name__ == '__main__':
listener = TweetListener()
#Keys
CONSUMER_KEY = ''
CONSUMER_SECRET = ''
ACCESS_KEY = ''
ACCESS_SECRET = ''
#Initialise and Authorise
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_KEY, ACCESS_SECRET)
stream = Stream(auth, listener)
stream.filter(track = [argTag])
|
c2d9a7a276b4f0442663a62bafb3c70bd7373f7e
|
distarray/local/tests/paralleltest_io.py
|
distarray/local/tests/paralleltest_io.py
|
import tempfile
from numpy.testing import assert_allclose
from os import path
from distarray.local import LocalArray, save, load
from distarray.testing import comm_null_passes, MpiTestCase
class TestFlatFileIO(MpiTestCase):
@comm_null_passes
def test_flat_file_read_write(self):
larr0 = LocalArray((7,))
output_dir = tempfile.gettempdir()
filename = 'outfile'
output_path = path.join(output_dir, filename)
save(output_path, larr0)
larr1 = load(output_path, comm=self.comm)
self.assertTrue(isinstance(larr1, LocalArray))
assert_allclose(larr0, larr1)
|
import tempfile
from numpy.testing import assert_allclose
from os import path
from distarray.local import LocalArray, save, load
from distarray.testing import comm_null_passes, MpiTestCase
class TestFlatFileIO(MpiTestCase):
@comm_null_passes
def test_flat_file_read_write(self):
larr0 = LocalArray((7,), comm=self.comm)
output_dir = tempfile.gettempdir()
filename = 'outfile'
output_path = path.join(output_dir, filename)
save(output_path, larr0)
larr1 = load(output_path, comm=self.comm)
self.assertTrue(isinstance(larr1, LocalArray))
assert_allclose(larr0, larr1)
|
Add missing `comm` argument to LocalArray constructor.
|
Add missing `comm` argument to LocalArray constructor.
Segfaults otherwise...
|
Python
|
bsd-3-clause
|
RaoUmer/distarray,RaoUmer/distarray,enthought/distarray,enthought/distarray
|
import tempfile
from numpy.testing import assert_allclose
from os import path
from distarray.local import LocalArray, save, load
from distarray.testing import comm_null_passes, MpiTestCase
class TestFlatFileIO(MpiTestCase):
@comm_null_passes
def test_flat_file_read_write(self):
larr0 = LocalArray((7,))
output_dir = tempfile.gettempdir()
filename = 'outfile'
output_path = path.join(output_dir, filename)
save(output_path, larr0)
larr1 = load(output_path, comm=self.comm)
self.assertTrue(isinstance(larr1, LocalArray))
assert_allclose(larr0, larr1)
Add missing `comm` argument to LocalArray constructor.
Segfaults otherwise...
|
import tempfile
from numpy.testing import assert_allclose
from os import path
from distarray.local import LocalArray, save, load
from distarray.testing import comm_null_passes, MpiTestCase
class TestFlatFileIO(MpiTestCase):
@comm_null_passes
def test_flat_file_read_write(self):
larr0 = LocalArray((7,), comm=self.comm)
output_dir = tempfile.gettempdir()
filename = 'outfile'
output_path = path.join(output_dir, filename)
save(output_path, larr0)
larr1 = load(output_path, comm=self.comm)
self.assertTrue(isinstance(larr1, LocalArray))
assert_allclose(larr0, larr1)
|
<commit_before>import tempfile
from numpy.testing import assert_allclose
from os import path
from distarray.local import LocalArray, save, load
from distarray.testing import comm_null_passes, MpiTestCase
class TestFlatFileIO(MpiTestCase):
@comm_null_passes
def test_flat_file_read_write(self):
larr0 = LocalArray((7,))
output_dir = tempfile.gettempdir()
filename = 'outfile'
output_path = path.join(output_dir, filename)
save(output_path, larr0)
larr1 = load(output_path, comm=self.comm)
self.assertTrue(isinstance(larr1, LocalArray))
assert_allclose(larr0, larr1)
<commit_msg>Add missing `comm` argument to LocalArray constructor.
Segfaults otherwise...<commit_after>
|
import tempfile
from numpy.testing import assert_allclose
from os import path
from distarray.local import LocalArray, save, load
from distarray.testing import comm_null_passes, MpiTestCase
class TestFlatFileIO(MpiTestCase):
@comm_null_passes
def test_flat_file_read_write(self):
larr0 = LocalArray((7,), comm=self.comm)
output_dir = tempfile.gettempdir()
filename = 'outfile'
output_path = path.join(output_dir, filename)
save(output_path, larr0)
larr1 = load(output_path, comm=self.comm)
self.assertTrue(isinstance(larr1, LocalArray))
assert_allclose(larr0, larr1)
|
import tempfile
from numpy.testing import assert_allclose
from os import path
from distarray.local import LocalArray, save, load
from distarray.testing import comm_null_passes, MpiTestCase
class TestFlatFileIO(MpiTestCase):
@comm_null_passes
def test_flat_file_read_write(self):
larr0 = LocalArray((7,))
output_dir = tempfile.gettempdir()
filename = 'outfile'
output_path = path.join(output_dir, filename)
save(output_path, larr0)
larr1 = load(output_path, comm=self.comm)
self.assertTrue(isinstance(larr1, LocalArray))
assert_allclose(larr0, larr1)
Add missing `comm` argument to LocalArray constructor.
Segfaults otherwise...import tempfile
from numpy.testing import assert_allclose
from os import path
from distarray.local import LocalArray, save, load
from distarray.testing import comm_null_passes, MpiTestCase
class TestFlatFileIO(MpiTestCase):
@comm_null_passes
def test_flat_file_read_write(self):
larr0 = LocalArray((7,), comm=self.comm)
output_dir = tempfile.gettempdir()
filename = 'outfile'
output_path = path.join(output_dir, filename)
save(output_path, larr0)
larr1 = load(output_path, comm=self.comm)
self.assertTrue(isinstance(larr1, LocalArray))
assert_allclose(larr0, larr1)
|
<commit_before>import tempfile
from numpy.testing import assert_allclose
from os import path
from distarray.local import LocalArray, save, load
from distarray.testing import comm_null_passes, MpiTestCase
class TestFlatFileIO(MpiTestCase):
@comm_null_passes
def test_flat_file_read_write(self):
larr0 = LocalArray((7,))
output_dir = tempfile.gettempdir()
filename = 'outfile'
output_path = path.join(output_dir, filename)
save(output_path, larr0)
larr1 = load(output_path, comm=self.comm)
self.assertTrue(isinstance(larr1, LocalArray))
assert_allclose(larr0, larr1)
<commit_msg>Add missing `comm` argument to LocalArray constructor.
Segfaults otherwise...<commit_after>import tempfile
from numpy.testing import assert_allclose
from os import path
from distarray.local import LocalArray, save, load
from distarray.testing import comm_null_passes, MpiTestCase
class TestFlatFileIO(MpiTestCase):
@comm_null_passes
def test_flat_file_read_write(self):
larr0 = LocalArray((7,), comm=self.comm)
output_dir = tempfile.gettempdir()
filename = 'outfile'
output_path = path.join(output_dir, filename)
save(output_path, larr0)
larr1 = load(output_path, comm=self.comm)
self.assertTrue(isinstance(larr1, LocalArray))
assert_allclose(larr0, larr1)
|
12fc9a49a0dd55836165d89df6bb59ffecdd03eb
|
bayespy/inference/vmp/nodes/__init__.py
|
bayespy/inference/vmp/nodes/__init__.py
|
################################################################################
# Copyright (C) 2011-2012 Jaakko Luttinen
#
# This file is licensed under the MIT License.
################################################################################
# Import some most commonly used nodes
from . import *
from .bernoulli import Bernoulli
from .binomial import Binomial
from .categorical import Categorical
from .multinomial import Multinomial
from .poisson import Poisson
from .beta import Beta
from .beta import Complement
from .dirichlet import Dirichlet, DirichletConcentration
from .exponential import Exponential
from .gaussian import Gaussian, GaussianARD
from .wishart import Wishart
from .gamma import Gamma, GammaShape
from .gaussian import (GaussianGamma,
GaussianWishart)
from .gaussian_markov_chain import GaussianMarkovChain
from .gaussian_markov_chain import VaryingGaussianMarkovChain
from .gaussian_markov_chain import SwitchingGaussianMarkovChain
from .categorical_markov_chain import CategoricalMarkovChain
from .mixture import Mixture, MultiMixture
from .gate import Gate
from .concatenate import Concatenate
from .dot import Dot
from .dot import SumMultiply
from .add import Add
from .take import Take
from .gaussian import ConcatGaussian
from .logpdf import LogPDF
from .constant import Constant
|
################################################################################
# Copyright (C) 2011-2012 Jaakko Luttinen
#
# This file is licensed under the MIT License.
################################################################################
# Import some most commonly used nodes
from . import *
from .bernoulli import Bernoulli
from .binomial import Binomial
from .categorical import Categorical
from .multinomial import Multinomial
from .poisson import Poisson
from .beta import Beta
from .beta import Complement
from .dirichlet import Dirichlet, DirichletConcentration
from .exponential import Exponential
from .gaussian import Gaussian, GaussianARD
from .wishart import Wishart
from .gamma import Gamma, GammaShape
from .gaussian import (GaussianGamma,
GaussianWishart)
from .gaussian_markov_chain import GaussianMarkovChain
from .gaussian_markov_chain import VaryingGaussianMarkovChain
from .gaussian_markov_chain import SwitchingGaussianMarkovChain
from .categorical_markov_chain import CategoricalMarkovChain
from .mixture import Mixture, MultiMixture
from .gate import Gate
from .gate import Choose
from .concatenate import Concatenate
from .dot import Dot
from .dot import SumMultiply
from .add import Add
from .take import Take
from .gaussian import ConcatGaussian
from .logpdf import LogPDF
from .constant import Constant
|
Add Choose node to imported nodes
|
ENH: Add Choose node to imported nodes
|
Python
|
mit
|
bayespy/bayespy,jluttine/bayespy
|
################################################################################
# Copyright (C) 2011-2012 Jaakko Luttinen
#
# This file is licensed under the MIT License.
################################################################################
# Import some most commonly used nodes
from . import *
from .bernoulli import Bernoulli
from .binomial import Binomial
from .categorical import Categorical
from .multinomial import Multinomial
from .poisson import Poisson
from .beta import Beta
from .beta import Complement
from .dirichlet import Dirichlet, DirichletConcentration
from .exponential import Exponential
from .gaussian import Gaussian, GaussianARD
from .wishart import Wishart
from .gamma import Gamma, GammaShape
from .gaussian import (GaussianGamma,
GaussianWishart)
from .gaussian_markov_chain import GaussianMarkovChain
from .gaussian_markov_chain import VaryingGaussianMarkovChain
from .gaussian_markov_chain import SwitchingGaussianMarkovChain
from .categorical_markov_chain import CategoricalMarkovChain
from .mixture import Mixture, MultiMixture
from .gate import Gate
from .concatenate import Concatenate
from .dot import Dot
from .dot import SumMultiply
from .add import Add
from .take import Take
from .gaussian import ConcatGaussian
from .logpdf import LogPDF
from .constant import Constant
ENH: Add Choose node to imported nodes
|
################################################################################
# Copyright (C) 2011-2012 Jaakko Luttinen
#
# This file is licensed under the MIT License.
################################################################################
# Import some most commonly used nodes
from . import *
from .bernoulli import Bernoulli
from .binomial import Binomial
from .categorical import Categorical
from .multinomial import Multinomial
from .poisson import Poisson
from .beta import Beta
from .beta import Complement
from .dirichlet import Dirichlet, DirichletConcentration
from .exponential import Exponential
from .gaussian import Gaussian, GaussianARD
from .wishart import Wishart
from .gamma import Gamma, GammaShape
from .gaussian import (GaussianGamma,
GaussianWishart)
from .gaussian_markov_chain import GaussianMarkovChain
from .gaussian_markov_chain import VaryingGaussianMarkovChain
from .gaussian_markov_chain import SwitchingGaussianMarkovChain
from .categorical_markov_chain import CategoricalMarkovChain
from .mixture import Mixture, MultiMixture
from .gate import Gate
from .gate import Choose
from .concatenate import Concatenate
from .dot import Dot
from .dot import SumMultiply
from .add import Add
from .take import Take
from .gaussian import ConcatGaussian
from .logpdf import LogPDF
from .constant import Constant
|
<commit_before>################################################################################
# Copyright (C) 2011-2012 Jaakko Luttinen
#
# This file is licensed under the MIT License.
################################################################################
# Import some most commonly used nodes
from . import *
from .bernoulli import Bernoulli
from .binomial import Binomial
from .categorical import Categorical
from .multinomial import Multinomial
from .poisson import Poisson
from .beta import Beta
from .beta import Complement
from .dirichlet import Dirichlet, DirichletConcentration
from .exponential import Exponential
from .gaussian import Gaussian, GaussianARD
from .wishart import Wishart
from .gamma import Gamma, GammaShape
from .gaussian import (GaussianGamma,
GaussianWishart)
from .gaussian_markov_chain import GaussianMarkovChain
from .gaussian_markov_chain import VaryingGaussianMarkovChain
from .gaussian_markov_chain import SwitchingGaussianMarkovChain
from .categorical_markov_chain import CategoricalMarkovChain
from .mixture import Mixture, MultiMixture
from .gate import Gate
from .concatenate import Concatenate
from .dot import Dot
from .dot import SumMultiply
from .add import Add
from .take import Take
from .gaussian import ConcatGaussian
from .logpdf import LogPDF
from .constant import Constant
<commit_msg>ENH: Add Choose node to imported nodes<commit_after>
|
################################################################################
# Copyright (C) 2011-2012 Jaakko Luttinen
#
# This file is licensed under the MIT License.
################################################################################
# Import some most commonly used nodes
from . import *
from .bernoulli import Bernoulli
from .binomial import Binomial
from .categorical import Categorical
from .multinomial import Multinomial
from .poisson import Poisson
from .beta import Beta
from .beta import Complement
from .dirichlet import Dirichlet, DirichletConcentration
from .exponential import Exponential
from .gaussian import Gaussian, GaussianARD
from .wishart import Wishart
from .gamma import Gamma, GammaShape
from .gaussian import (GaussianGamma,
GaussianWishart)
from .gaussian_markov_chain import GaussianMarkovChain
from .gaussian_markov_chain import VaryingGaussianMarkovChain
from .gaussian_markov_chain import SwitchingGaussianMarkovChain
from .categorical_markov_chain import CategoricalMarkovChain
from .mixture import Mixture, MultiMixture
from .gate import Gate
from .gate import Choose
from .concatenate import Concatenate
from .dot import Dot
from .dot import SumMultiply
from .add import Add
from .take import Take
from .gaussian import ConcatGaussian
from .logpdf import LogPDF
from .constant import Constant
|
################################################################################
# Copyright (C) 2011-2012 Jaakko Luttinen
#
# This file is licensed under the MIT License.
################################################################################
# Import some most commonly used nodes
from . import *
from .bernoulli import Bernoulli
from .binomial import Binomial
from .categorical import Categorical
from .multinomial import Multinomial
from .poisson import Poisson
from .beta import Beta
from .beta import Complement
from .dirichlet import Dirichlet, DirichletConcentration
from .exponential import Exponential
from .gaussian import Gaussian, GaussianARD
from .wishart import Wishart
from .gamma import Gamma, GammaShape
from .gaussian import (GaussianGamma,
GaussianWishart)
from .gaussian_markov_chain import GaussianMarkovChain
from .gaussian_markov_chain import VaryingGaussianMarkovChain
from .gaussian_markov_chain import SwitchingGaussianMarkovChain
from .categorical_markov_chain import CategoricalMarkovChain
from .mixture import Mixture, MultiMixture
from .gate import Gate
from .concatenate import Concatenate
from .dot import Dot
from .dot import SumMultiply
from .add import Add
from .take import Take
from .gaussian import ConcatGaussian
from .logpdf import LogPDF
from .constant import Constant
ENH: Add Choose node to imported nodes################################################################################
# Copyright (C) 2011-2012 Jaakko Luttinen
#
# This file is licensed under the MIT License.
################################################################################
# Import some most commonly used nodes
from . import *
from .bernoulli import Bernoulli
from .binomial import Binomial
from .categorical import Categorical
from .multinomial import Multinomial
from .poisson import Poisson
from .beta import Beta
from .beta import Complement
from .dirichlet import Dirichlet, DirichletConcentration
from .exponential import Exponential
from .gaussian import Gaussian, GaussianARD
from .wishart import Wishart
from .gamma import Gamma, GammaShape
from .gaussian import (GaussianGamma,
GaussianWishart)
from .gaussian_markov_chain import GaussianMarkovChain
from .gaussian_markov_chain import VaryingGaussianMarkovChain
from .gaussian_markov_chain import SwitchingGaussianMarkovChain
from .categorical_markov_chain import CategoricalMarkovChain
from .mixture import Mixture, MultiMixture
from .gate import Gate
from .gate import Choose
from .concatenate import Concatenate
from .dot import Dot
from .dot import SumMultiply
from .add import Add
from .take import Take
from .gaussian import ConcatGaussian
from .logpdf import LogPDF
from .constant import Constant
|
<commit_before>################################################################################
# Copyright (C) 2011-2012 Jaakko Luttinen
#
# This file is licensed under the MIT License.
################################################################################
# Import some most commonly used nodes
from . import *
from .bernoulli import Bernoulli
from .binomial import Binomial
from .categorical import Categorical
from .multinomial import Multinomial
from .poisson import Poisson
from .beta import Beta
from .beta import Complement
from .dirichlet import Dirichlet, DirichletConcentration
from .exponential import Exponential
from .gaussian import Gaussian, GaussianARD
from .wishart import Wishart
from .gamma import Gamma, GammaShape
from .gaussian import (GaussianGamma,
GaussianWishart)
from .gaussian_markov_chain import GaussianMarkovChain
from .gaussian_markov_chain import VaryingGaussianMarkovChain
from .gaussian_markov_chain import SwitchingGaussianMarkovChain
from .categorical_markov_chain import CategoricalMarkovChain
from .mixture import Mixture, MultiMixture
from .gate import Gate
from .concatenate import Concatenate
from .dot import Dot
from .dot import SumMultiply
from .add import Add
from .take import Take
from .gaussian import ConcatGaussian
from .logpdf import LogPDF
from .constant import Constant
<commit_msg>ENH: Add Choose node to imported nodes<commit_after>################################################################################
# Copyright (C) 2011-2012 Jaakko Luttinen
#
# This file is licensed under the MIT License.
################################################################################
# Import some most commonly used nodes
from . import *
from .bernoulli import Bernoulli
from .binomial import Binomial
from .categorical import Categorical
from .multinomial import Multinomial
from .poisson import Poisson
from .beta import Beta
from .beta import Complement
from .dirichlet import Dirichlet, DirichletConcentration
from .exponential import Exponential
from .gaussian import Gaussian, GaussianARD
from .wishart import Wishart
from .gamma import Gamma, GammaShape
from .gaussian import (GaussianGamma,
GaussianWishart)
from .gaussian_markov_chain import GaussianMarkovChain
from .gaussian_markov_chain import VaryingGaussianMarkovChain
from .gaussian_markov_chain import SwitchingGaussianMarkovChain
from .categorical_markov_chain import CategoricalMarkovChain
from .mixture import Mixture, MultiMixture
from .gate import Gate
from .gate import Choose
from .concatenate import Concatenate
from .dot import Dot
from .dot import SumMultiply
from .add import Add
from .take import Take
from .gaussian import ConcatGaussian
from .logpdf import LogPDF
from .constant import Constant
|
53dc86ace10f73832c0cbca9fcbc0389999a0e1c
|
hyperion/util/convenience.py
|
hyperion/util/convenience.py
|
class OptThinRadius(object):
def __init__(self, temperature, value=1.):
self.temperature = temperature
self.value = value
def __mul__(self, value):
return OptThinRadius(self.temperature, value=self.value * value)
def __rmul__(self, value):
return OptThinRadius(self.temperature, value=self.value * value)
def __str__(self):
return "%g times the dust sublimation radius" % self.n
def evaluate(self, star, dust):
rstar = star.radius
tstar = star.effective_temperature()
nu, fnu = star.total_spectrum()
return self.value * rstar \
* (1. - (1. - 2. * (self.temperature / tstar) ** 4. \
* dust.kappa_planck_temperature(self.temperature) \
/ dust.kappa_planck_spectrum(nu, fnu)) ** 2.) ** -0.5
|
import numpy as np
class OptThinRadius(object):
def __init__(self, temperature, value=1.):
self.temperature = temperature
self.value = value
def __mul__(self, value):
return OptThinRadius(self.temperature, value=self.value * value)
def __rmul__(self, value):
return OptThinRadius(self.temperature, value=self.value * value)
def __str__(self):
return "%g times the dust sublimation radius" % self.n
def evaluate(self, star, dust):
rstar = star.radius
tstar = star.effective_temperature()
nu, fnu = star.total_spectrum()
x = (self.temperature / tstar) ** 4. \
* dust.kappa_planck_temperature(self.temperature) \
/ dust.kappa_planck_spectrum(nu, fnu)
if x < 0.001:
r = self.value * rstar / 2. / np.sqrt(x)
else:
r = self.value * rstar / np.sqrt(1. - (1. - 2. * x) ** 2.)
return r
|
Deal with the case of large radii for optically thin temperature radius
|
Deal with the case of large radii for optically thin temperature radius
|
Python
|
bsd-2-clause
|
hyperion-rt/hyperion,bluescarni/hyperion,hyperion-rt/hyperion,astrofrog/hyperion,astrofrog/hyperion,bluescarni/hyperion,hyperion-rt/hyperion
|
class OptThinRadius(object):
def __init__(self, temperature, value=1.):
self.temperature = temperature
self.value = value
def __mul__(self, value):
return OptThinRadius(self.temperature, value=self.value * value)
def __rmul__(self, value):
return OptThinRadius(self.temperature, value=self.value * value)
def __str__(self):
return "%g times the dust sublimation radius" % self.n
def evaluate(self, star, dust):
rstar = star.radius
tstar = star.effective_temperature()
nu, fnu = star.total_spectrum()
return self.value * rstar \
* (1. - (1. - 2. * (self.temperature / tstar) ** 4. \
* dust.kappa_planck_temperature(self.temperature) \
/ dust.kappa_planck_spectrum(nu, fnu)) ** 2.) ** -0.5
Deal with the case of large radii for optically thin temperature radius
|
import numpy as np
class OptThinRadius(object):
def __init__(self, temperature, value=1.):
self.temperature = temperature
self.value = value
def __mul__(self, value):
return OptThinRadius(self.temperature, value=self.value * value)
def __rmul__(self, value):
return OptThinRadius(self.temperature, value=self.value * value)
def __str__(self):
return "%g times the dust sublimation radius" % self.n
def evaluate(self, star, dust):
rstar = star.radius
tstar = star.effective_temperature()
nu, fnu = star.total_spectrum()
x = (self.temperature / tstar) ** 4. \
* dust.kappa_planck_temperature(self.temperature) \
/ dust.kappa_planck_spectrum(nu, fnu)
if x < 0.001:
r = self.value * rstar / 2. / np.sqrt(x)
else:
r = self.value * rstar / np.sqrt(1. - (1. - 2. * x) ** 2.)
return r
|
<commit_before>class OptThinRadius(object):
def __init__(self, temperature, value=1.):
self.temperature = temperature
self.value = value
def __mul__(self, value):
return OptThinRadius(self.temperature, value=self.value * value)
def __rmul__(self, value):
return OptThinRadius(self.temperature, value=self.value * value)
def __str__(self):
return "%g times the dust sublimation radius" % self.n
def evaluate(self, star, dust):
rstar = star.radius
tstar = star.effective_temperature()
nu, fnu = star.total_spectrum()
return self.value * rstar \
* (1. - (1. - 2. * (self.temperature / tstar) ** 4. \
* dust.kappa_planck_temperature(self.temperature) \
/ dust.kappa_planck_spectrum(nu, fnu)) ** 2.) ** -0.5
<commit_msg>Deal with the case of large radii for optically thin temperature radius<commit_after>
|
import numpy as np
class OptThinRadius(object):
def __init__(self, temperature, value=1.):
self.temperature = temperature
self.value = value
def __mul__(self, value):
return OptThinRadius(self.temperature, value=self.value * value)
def __rmul__(self, value):
return OptThinRadius(self.temperature, value=self.value * value)
def __str__(self):
return "%g times the dust sublimation radius" % self.n
def evaluate(self, star, dust):
rstar = star.radius
tstar = star.effective_temperature()
nu, fnu = star.total_spectrum()
x = (self.temperature / tstar) ** 4. \
* dust.kappa_planck_temperature(self.temperature) \
/ dust.kappa_planck_spectrum(nu, fnu)
if x < 0.001:
r = self.value * rstar / 2. / np.sqrt(x)
else:
r = self.value * rstar / np.sqrt(1. - (1. - 2. * x) ** 2.)
return r
|
class OptThinRadius(object):
def __init__(self, temperature, value=1.):
self.temperature = temperature
self.value = value
def __mul__(self, value):
return OptThinRadius(self.temperature, value=self.value * value)
def __rmul__(self, value):
return OptThinRadius(self.temperature, value=self.value * value)
def __str__(self):
return "%g times the dust sublimation radius" % self.n
def evaluate(self, star, dust):
rstar = star.radius
tstar = star.effective_temperature()
nu, fnu = star.total_spectrum()
return self.value * rstar \
* (1. - (1. - 2. * (self.temperature / tstar) ** 4. \
* dust.kappa_planck_temperature(self.temperature) \
/ dust.kappa_planck_spectrum(nu, fnu)) ** 2.) ** -0.5
Deal with the case of large radii for optically thin temperature radiusimport numpy as np
class OptThinRadius(object):
def __init__(self, temperature, value=1.):
self.temperature = temperature
self.value = value
def __mul__(self, value):
return OptThinRadius(self.temperature, value=self.value * value)
def __rmul__(self, value):
return OptThinRadius(self.temperature, value=self.value * value)
def __str__(self):
return "%g times the dust sublimation radius" % self.n
def evaluate(self, star, dust):
rstar = star.radius
tstar = star.effective_temperature()
nu, fnu = star.total_spectrum()
x = (self.temperature / tstar) ** 4. \
* dust.kappa_planck_temperature(self.temperature) \
/ dust.kappa_planck_spectrum(nu, fnu)
if x < 0.001:
r = self.value * rstar / 2. / np.sqrt(x)
else:
r = self.value * rstar / np.sqrt(1. - (1. - 2. * x) ** 2.)
return r
|
<commit_before>class OptThinRadius(object):
def __init__(self, temperature, value=1.):
self.temperature = temperature
self.value = value
def __mul__(self, value):
return OptThinRadius(self.temperature, value=self.value * value)
def __rmul__(self, value):
return OptThinRadius(self.temperature, value=self.value * value)
def __str__(self):
return "%g times the dust sublimation radius" % self.n
def evaluate(self, star, dust):
rstar = star.radius
tstar = star.effective_temperature()
nu, fnu = star.total_spectrum()
return self.value * rstar \
* (1. - (1. - 2. * (self.temperature / tstar) ** 4. \
* dust.kappa_planck_temperature(self.temperature) \
/ dust.kappa_planck_spectrum(nu, fnu)) ** 2.) ** -0.5
<commit_msg>Deal with the case of large radii for optically thin temperature radius<commit_after>import numpy as np
class OptThinRadius(object):
def __init__(self, temperature, value=1.):
self.temperature = temperature
self.value = value
def __mul__(self, value):
return OptThinRadius(self.temperature, value=self.value * value)
def __rmul__(self, value):
return OptThinRadius(self.temperature, value=self.value * value)
def __str__(self):
return "%g times the dust sublimation radius" % self.n
def evaluate(self, star, dust):
rstar = star.radius
tstar = star.effective_temperature()
nu, fnu = star.total_spectrum()
x = (self.temperature / tstar) ** 4. \
* dust.kappa_planck_temperature(self.temperature) \
/ dust.kappa_planck_spectrum(nu, fnu)
if x < 0.001:
r = self.value * rstar / 2. / np.sqrt(x)
else:
r = self.value * rstar / np.sqrt(1. - (1. - 2. * x) ** 2.)
return r
|
8dd06e484b1b1bf71fbeb131340fd8358aa001b9
|
cattle/plugins/core/publisher.py
|
cattle/plugins/core/publisher.py
|
import logging
import requests
import time
from cattle import type_manager
from cattle.utils import log_request
log = logging.getLogger("agent")
class Publisher:
def __init__(self, url, auth):
self._url = url
self._auth = auth
self._marshaller = type_manager.get_type(type_manager.MARSHALLER)
self._session = requests.Session()
def publish(self, resp):
line = self._marshaller.to_string(resp)
start = time.time()
try:
r = self._session.post(self._url, data=line, auth=self._auth,
timeout=5)
if r.status_code != 201:
log.error("Error [%s], Request [%s]", r.text, line)
finally:
log_request(resp, log, 'Response: %s [%s] seconds', line,
time.time() - start)
@property
def url(self):
return self._url
@property
def auth(self):
return self._auth
|
import logging
import requests
import time
from cattle import type_manager
from cattle.utils import log_request
log = logging.getLogger("agent")
class Publisher:
def __init__(self, url, auth):
self._url = url
self._auth = auth
self._marshaller = type_manager.get_type(type_manager.MARSHALLER)
self._session = requests.Session()
def publish(self, resp):
line = self._marshaller.to_string(resp)
start = time.time()
try:
r = self._session.post(self._url, data=line, auth=self._auth,
timeout=60)
if r.status_code != 201:
log.error("Error [%s], Request [%s]", r.text, line)
finally:
log_request(resp, log, 'Response: %s [%s] seconds', line,
time.time() - start)
@property
def url(self):
return self._url
@property
def auth(self):
return self._auth
|
Increase HTTP timeout to 60 seconds
|
Increase HTTP timeout to 60 seconds
|
Python
|
apache-2.0
|
wlan0/python-agent,rancher/python-agent,wlan0/python-agent,cjellick/python-agent,rancherio/python-agent,rancherio/python-agent,cjellick/python-agent,rancher/python-agent
|
import logging
import requests
import time
from cattle import type_manager
from cattle.utils import log_request
log = logging.getLogger("agent")
class Publisher:
def __init__(self, url, auth):
self._url = url
self._auth = auth
self._marshaller = type_manager.get_type(type_manager.MARSHALLER)
self._session = requests.Session()
def publish(self, resp):
line = self._marshaller.to_string(resp)
start = time.time()
try:
r = self._session.post(self._url, data=line, auth=self._auth,
timeout=5)
if r.status_code != 201:
log.error("Error [%s], Request [%s]", r.text, line)
finally:
log_request(resp, log, 'Response: %s [%s] seconds', line,
time.time() - start)
@property
def url(self):
return self._url
@property
def auth(self):
return self._auth
Increase HTTP timeout to 60 seconds
|
import logging
import requests
import time
from cattle import type_manager
from cattle.utils import log_request
log = logging.getLogger("agent")
class Publisher:
def __init__(self, url, auth):
self._url = url
self._auth = auth
self._marshaller = type_manager.get_type(type_manager.MARSHALLER)
self._session = requests.Session()
def publish(self, resp):
line = self._marshaller.to_string(resp)
start = time.time()
try:
r = self._session.post(self._url, data=line, auth=self._auth,
timeout=60)
if r.status_code != 201:
log.error("Error [%s], Request [%s]", r.text, line)
finally:
log_request(resp, log, 'Response: %s [%s] seconds', line,
time.time() - start)
@property
def url(self):
return self._url
@property
def auth(self):
return self._auth
|
<commit_before>import logging
import requests
import time
from cattle import type_manager
from cattle.utils import log_request
log = logging.getLogger("agent")
class Publisher:
def __init__(self, url, auth):
self._url = url
self._auth = auth
self._marshaller = type_manager.get_type(type_manager.MARSHALLER)
self._session = requests.Session()
def publish(self, resp):
line = self._marshaller.to_string(resp)
start = time.time()
try:
r = self._session.post(self._url, data=line, auth=self._auth,
timeout=5)
if r.status_code != 201:
log.error("Error [%s], Request [%s]", r.text, line)
finally:
log_request(resp, log, 'Response: %s [%s] seconds', line,
time.time() - start)
@property
def url(self):
return self._url
@property
def auth(self):
return self._auth
<commit_msg>Increase HTTP timeout to 60 seconds<commit_after>
|
import logging
import requests
import time
from cattle import type_manager
from cattle.utils import log_request
log = logging.getLogger("agent")
class Publisher:
def __init__(self, url, auth):
self._url = url
self._auth = auth
self._marshaller = type_manager.get_type(type_manager.MARSHALLER)
self._session = requests.Session()
def publish(self, resp):
line = self._marshaller.to_string(resp)
start = time.time()
try:
r = self._session.post(self._url, data=line, auth=self._auth,
timeout=60)
if r.status_code != 201:
log.error("Error [%s], Request [%s]", r.text, line)
finally:
log_request(resp, log, 'Response: %s [%s] seconds', line,
time.time() - start)
@property
def url(self):
return self._url
@property
def auth(self):
return self._auth
|
import logging
import requests
import time
from cattle import type_manager
from cattle.utils import log_request
log = logging.getLogger("agent")
class Publisher:
def __init__(self, url, auth):
self._url = url
self._auth = auth
self._marshaller = type_manager.get_type(type_manager.MARSHALLER)
self._session = requests.Session()
def publish(self, resp):
line = self._marshaller.to_string(resp)
start = time.time()
try:
r = self._session.post(self._url, data=line, auth=self._auth,
timeout=5)
if r.status_code != 201:
log.error("Error [%s], Request [%s]", r.text, line)
finally:
log_request(resp, log, 'Response: %s [%s] seconds', line,
time.time() - start)
@property
def url(self):
return self._url
@property
def auth(self):
return self._auth
Increase HTTP timeout to 60 secondsimport logging
import requests
import time
from cattle import type_manager
from cattle.utils import log_request
log = logging.getLogger("agent")
class Publisher:
def __init__(self, url, auth):
self._url = url
self._auth = auth
self._marshaller = type_manager.get_type(type_manager.MARSHALLER)
self._session = requests.Session()
def publish(self, resp):
line = self._marshaller.to_string(resp)
start = time.time()
try:
r = self._session.post(self._url, data=line, auth=self._auth,
timeout=60)
if r.status_code != 201:
log.error("Error [%s], Request [%s]", r.text, line)
finally:
log_request(resp, log, 'Response: %s [%s] seconds', line,
time.time() - start)
@property
def url(self):
return self._url
@property
def auth(self):
return self._auth
|
<commit_before>import logging
import requests
import time
from cattle import type_manager
from cattle.utils import log_request
log = logging.getLogger("agent")
class Publisher:
def __init__(self, url, auth):
self._url = url
self._auth = auth
self._marshaller = type_manager.get_type(type_manager.MARSHALLER)
self._session = requests.Session()
def publish(self, resp):
line = self._marshaller.to_string(resp)
start = time.time()
try:
r = self._session.post(self._url, data=line, auth=self._auth,
timeout=5)
if r.status_code != 201:
log.error("Error [%s], Request [%s]", r.text, line)
finally:
log_request(resp, log, 'Response: %s [%s] seconds', line,
time.time() - start)
@property
def url(self):
return self._url
@property
def auth(self):
return self._auth
<commit_msg>Increase HTTP timeout to 60 seconds<commit_after>import logging
import requests
import time
from cattle import type_manager
from cattle.utils import log_request
log = logging.getLogger("agent")
class Publisher:
def __init__(self, url, auth):
self._url = url
self._auth = auth
self._marshaller = type_manager.get_type(type_manager.MARSHALLER)
self._session = requests.Session()
def publish(self, resp):
line = self._marshaller.to_string(resp)
start = time.time()
try:
r = self._session.post(self._url, data=line, auth=self._auth,
timeout=60)
if r.status_code != 201:
log.error("Error [%s], Request [%s]", r.text, line)
finally:
log_request(resp, log, 'Response: %s [%s] seconds', line,
time.time() - start)
@property
def url(self):
return self._url
@property
def auth(self):
return self._auth
|
d0b56003b2b508a5db43986064d2e01fecefe155
|
virtool/indexes/models.py
|
virtool/indexes/models.py
|
from sqlalchemy import Column, Enum, Integer, String
from virtool.pg.utils import Base, SQLEnum
class IndexType(str, SQLEnum):
"""
Enumerated type for index file types
"""
json = "json"
fasta = "fasta"
bowtie2 = "bowtie2"
class IndexFile(Base):
"""
SQL model to store new index files
"""
__tablename__ = "index_files"
id = Column(Integer, primary_key=True)
name = Column(String, nullable=False)
index = Column(String, nullable=False)
type = Column(Enum(IndexType))
size = Column(Integer)
|
from sqlalchemy import Column, Enum, Integer, String, UniqueConstraint
from virtool.pg.utils import Base, SQLEnum
class IndexType(str, SQLEnum):
"""
Enumerated type for index file types
"""
json = "json"
fasta = "fasta"
bowtie2 = "bowtie2"
class IndexFile(Base):
"""
SQL model to store new index files
"""
__tablename__ = "index_files"
__table_args__ = (UniqueConstraint("index", "name"),)
id = Column(Integer, primary_key=True)
name = Column(String, nullable=False)
index = Column(String, nullable=False)
type = Column(Enum(IndexType))
size = Column(Integer)
|
Add UniqueConstraint for IndexFile SQL model
|
Add UniqueConstraint for IndexFile SQL model
|
Python
|
mit
|
virtool/virtool,igboyes/virtool,igboyes/virtool,virtool/virtool
|
from sqlalchemy import Column, Enum, Integer, String
from virtool.pg.utils import Base, SQLEnum
class IndexType(str, SQLEnum):
"""
Enumerated type for index file types
"""
json = "json"
fasta = "fasta"
bowtie2 = "bowtie2"
class IndexFile(Base):
"""
SQL model to store new index files
"""
__tablename__ = "index_files"
id = Column(Integer, primary_key=True)
name = Column(String, nullable=False)
index = Column(String, nullable=False)
type = Column(Enum(IndexType))
size = Column(Integer)
Add UniqueConstraint for IndexFile SQL model
|
from sqlalchemy import Column, Enum, Integer, String, UniqueConstraint
from virtool.pg.utils import Base, SQLEnum
class IndexType(str, SQLEnum):
"""
Enumerated type for index file types
"""
json = "json"
fasta = "fasta"
bowtie2 = "bowtie2"
class IndexFile(Base):
"""
SQL model to store new index files
"""
__tablename__ = "index_files"
__table_args__ = (UniqueConstraint("index", "name"),)
id = Column(Integer, primary_key=True)
name = Column(String, nullable=False)
index = Column(String, nullable=False)
type = Column(Enum(IndexType))
size = Column(Integer)
|
<commit_before>from sqlalchemy import Column, Enum, Integer, String
from virtool.pg.utils import Base, SQLEnum
class IndexType(str, SQLEnum):
"""
Enumerated type for index file types
"""
json = "json"
fasta = "fasta"
bowtie2 = "bowtie2"
class IndexFile(Base):
"""
SQL model to store new index files
"""
__tablename__ = "index_files"
id = Column(Integer, primary_key=True)
name = Column(String, nullable=False)
index = Column(String, nullable=False)
type = Column(Enum(IndexType))
size = Column(Integer)
<commit_msg>Add UniqueConstraint for IndexFile SQL model<commit_after>
|
from sqlalchemy import Column, Enum, Integer, String, UniqueConstraint
from virtool.pg.utils import Base, SQLEnum
class IndexType(str, SQLEnum):
"""
Enumerated type for index file types
"""
json = "json"
fasta = "fasta"
bowtie2 = "bowtie2"
class IndexFile(Base):
"""
SQL model to store new index files
"""
__tablename__ = "index_files"
__table_args__ = (UniqueConstraint("index", "name"),)
id = Column(Integer, primary_key=True)
name = Column(String, nullable=False)
index = Column(String, nullable=False)
type = Column(Enum(IndexType))
size = Column(Integer)
|
from sqlalchemy import Column, Enum, Integer, String
from virtool.pg.utils import Base, SQLEnum
class IndexType(str, SQLEnum):
"""
Enumerated type for index file types
"""
json = "json"
fasta = "fasta"
bowtie2 = "bowtie2"
class IndexFile(Base):
"""
SQL model to store new index files
"""
__tablename__ = "index_files"
id = Column(Integer, primary_key=True)
name = Column(String, nullable=False)
index = Column(String, nullable=False)
type = Column(Enum(IndexType))
size = Column(Integer)
Add UniqueConstraint for IndexFile SQL modelfrom sqlalchemy import Column, Enum, Integer, String, UniqueConstraint
from virtool.pg.utils import Base, SQLEnum
class IndexType(str, SQLEnum):
"""
Enumerated type for index file types
"""
json = "json"
fasta = "fasta"
bowtie2 = "bowtie2"
class IndexFile(Base):
"""
SQL model to store new index files
"""
__tablename__ = "index_files"
__table_args__ = (UniqueConstraint("index", "name"),)
id = Column(Integer, primary_key=True)
name = Column(String, nullable=False)
index = Column(String, nullable=False)
type = Column(Enum(IndexType))
size = Column(Integer)
|
<commit_before>from sqlalchemy import Column, Enum, Integer, String
from virtool.pg.utils import Base, SQLEnum
class IndexType(str, SQLEnum):
"""
Enumerated type for index file types
"""
json = "json"
fasta = "fasta"
bowtie2 = "bowtie2"
class IndexFile(Base):
"""
SQL model to store new index files
"""
__tablename__ = "index_files"
id = Column(Integer, primary_key=True)
name = Column(String, nullable=False)
index = Column(String, nullable=False)
type = Column(Enum(IndexType))
size = Column(Integer)
<commit_msg>Add UniqueConstraint for IndexFile SQL model<commit_after>from sqlalchemy import Column, Enum, Integer, String, UniqueConstraint
from virtool.pg.utils import Base, SQLEnum
class IndexType(str, SQLEnum):
"""
Enumerated type for index file types
"""
json = "json"
fasta = "fasta"
bowtie2 = "bowtie2"
class IndexFile(Base):
"""
SQL model to store new index files
"""
__tablename__ = "index_files"
__table_args__ = (UniqueConstraint("index", "name"),)
id = Column(Integer, primary_key=True)
name = Column(String, nullable=False)
index = Column(String, nullable=False)
type = Column(Enum(IndexType))
size = Column(Integer)
|
b415e265f6b725c7e1a99d2ee1dae77f0cd555a7
|
config.py
|
config.py
|
# Paths to key files
FIREBASE_KEY_PATH="firebase.local.json"
# Firebase config for prox-server
# These are public-facing and can be found in the console under Auth > Web Setup (in the top-right corner)
FIREBASE_CONFIG = {
"apiKey": "AIzaSyCksV_AC0oB9OnJmj0YgXNOrmnJawNbFeE",
"authDomain": "prox-server-cf63e.firebaseapp.com",
"databaseURL": "https://prox-server-cf63e.firebaseio.com",
"storageBucket": "prox-server-cf63e.appspot.com",
"messagingSenderId": "888537898788",
"serviceAccount": FIREBASE_KEY_PATH
# Using the service account will authenticate as admin by default
}
yelpSearchCategories = ["beaches", "hotels", "restaurants"]
|
# Paths to key files
FIREBASE_KEY_PATH="firebase.local.json"
# Firebase config for prox-server
# These are public-facing and can be found in the console under Auth > Web Setup (in the top-right corner)
FIREBASE_CONFIG = {
"apiKey": "AIzaSyCksV_AC0oB9OnJmj0YgXNOrmnJawNbFeE",
"authDomain": "prox-server-cf63e.firebaseapp.com",
"databaseURL": "https://prox-server-cf63e.firebaseio.com",
"storageBucket": "prox-server-cf63e.appspot.com",
"messagingSenderId": "888537898788",
"serviceAccount": FIREBASE_KEY_PATH
# Using the service account will authenticate as admin by default
}
yelpSearchCategories = ["active",
"arts",
"beautysvc",
"eventservices",
"food",
"hotelstravel",
"localflavor",
"massmedia",
"nightlife",
"pets",
"publicservicesgovt",
"restaurants",
"shopping"]
|
Add category filtering on the server side to save client fetching.
|
Add category filtering on the server side to save client fetching.
|
Python
|
mpl-2.0
|
liuche/prox-server
|
# Paths to key files
FIREBASE_KEY_PATH="firebase.local.json"
# Firebase config for prox-server
# These are public-facing and can be found in the console under Auth > Web Setup (in the top-right corner)
FIREBASE_CONFIG = {
"apiKey": "AIzaSyCksV_AC0oB9OnJmj0YgXNOrmnJawNbFeE",
"authDomain": "prox-server-cf63e.firebaseapp.com",
"databaseURL": "https://prox-server-cf63e.firebaseio.com",
"storageBucket": "prox-server-cf63e.appspot.com",
"messagingSenderId": "888537898788",
"serviceAccount": FIREBASE_KEY_PATH
# Using the service account will authenticate as admin by default
}
yelpSearchCategories = ["beaches", "hotels", "restaurants"]Add category filtering on the server side to save client fetching.
|
# Paths to key files
FIREBASE_KEY_PATH="firebase.local.json"
# Firebase config for prox-server
# These are public-facing and can be found in the console under Auth > Web Setup (in the top-right corner)
FIREBASE_CONFIG = {
"apiKey": "AIzaSyCksV_AC0oB9OnJmj0YgXNOrmnJawNbFeE",
"authDomain": "prox-server-cf63e.firebaseapp.com",
"databaseURL": "https://prox-server-cf63e.firebaseio.com",
"storageBucket": "prox-server-cf63e.appspot.com",
"messagingSenderId": "888537898788",
"serviceAccount": FIREBASE_KEY_PATH
# Using the service account will authenticate as admin by default
}
yelpSearchCategories = ["active",
"arts",
"beautysvc",
"eventservices",
"food",
"hotelstravel",
"localflavor",
"massmedia",
"nightlife",
"pets",
"publicservicesgovt",
"restaurants",
"shopping"]
|
<commit_before># Paths to key files
FIREBASE_KEY_PATH="firebase.local.json"
# Firebase config for prox-server
# These are public-facing and can be found in the console under Auth > Web Setup (in the top-right corner)
FIREBASE_CONFIG = {
"apiKey": "AIzaSyCksV_AC0oB9OnJmj0YgXNOrmnJawNbFeE",
"authDomain": "prox-server-cf63e.firebaseapp.com",
"databaseURL": "https://prox-server-cf63e.firebaseio.com",
"storageBucket": "prox-server-cf63e.appspot.com",
"messagingSenderId": "888537898788",
"serviceAccount": FIREBASE_KEY_PATH
# Using the service account will authenticate as admin by default
}
yelpSearchCategories = ["beaches", "hotels", "restaurants"]<commit_msg>Add category filtering on the server side to save client fetching.<commit_after>
|
# Paths to key files
FIREBASE_KEY_PATH="firebase.local.json"
# Firebase config for prox-server
# These are public-facing and can be found in the console under Auth > Web Setup (in the top-right corner)
FIREBASE_CONFIG = {
"apiKey": "AIzaSyCksV_AC0oB9OnJmj0YgXNOrmnJawNbFeE",
"authDomain": "prox-server-cf63e.firebaseapp.com",
"databaseURL": "https://prox-server-cf63e.firebaseio.com",
"storageBucket": "prox-server-cf63e.appspot.com",
"messagingSenderId": "888537898788",
"serviceAccount": FIREBASE_KEY_PATH
# Using the service account will authenticate as admin by default
}
yelpSearchCategories = ["active",
"arts",
"beautysvc",
"eventservices",
"food",
"hotelstravel",
"localflavor",
"massmedia",
"nightlife",
"pets",
"publicservicesgovt",
"restaurants",
"shopping"]
|
# Paths to key files
FIREBASE_KEY_PATH="firebase.local.json"
# Firebase config for prox-server
# These are public-facing and can be found in the console under Auth > Web Setup (in the top-right corner)
FIREBASE_CONFIG = {
"apiKey": "AIzaSyCksV_AC0oB9OnJmj0YgXNOrmnJawNbFeE",
"authDomain": "prox-server-cf63e.firebaseapp.com",
"databaseURL": "https://prox-server-cf63e.firebaseio.com",
"storageBucket": "prox-server-cf63e.appspot.com",
"messagingSenderId": "888537898788",
"serviceAccount": FIREBASE_KEY_PATH
# Using the service account will authenticate as admin by default
}
yelpSearchCategories = ["beaches", "hotels", "restaurants"]Add category filtering on the server side to save client fetching.# Paths to key files
FIREBASE_KEY_PATH="firebase.local.json"
# Firebase config for prox-server
# These are public-facing and can be found in the console under Auth > Web Setup (in the top-right corner)
FIREBASE_CONFIG = {
"apiKey": "AIzaSyCksV_AC0oB9OnJmj0YgXNOrmnJawNbFeE",
"authDomain": "prox-server-cf63e.firebaseapp.com",
"databaseURL": "https://prox-server-cf63e.firebaseio.com",
"storageBucket": "prox-server-cf63e.appspot.com",
"messagingSenderId": "888537898788",
"serviceAccount": FIREBASE_KEY_PATH
# Using the service account will authenticate as admin by default
}
yelpSearchCategories = ["active",
"arts",
"beautysvc",
"eventservices",
"food",
"hotelstravel",
"localflavor",
"massmedia",
"nightlife",
"pets",
"publicservicesgovt",
"restaurants",
"shopping"]
|
<commit_before># Paths to key files
FIREBASE_KEY_PATH="firebase.local.json"
# Firebase config for prox-server
# These are public-facing and can be found in the console under Auth > Web Setup (in the top-right corner)
FIREBASE_CONFIG = {
"apiKey": "AIzaSyCksV_AC0oB9OnJmj0YgXNOrmnJawNbFeE",
"authDomain": "prox-server-cf63e.firebaseapp.com",
"databaseURL": "https://prox-server-cf63e.firebaseio.com",
"storageBucket": "prox-server-cf63e.appspot.com",
"messagingSenderId": "888537898788",
"serviceAccount": FIREBASE_KEY_PATH
# Using the service account will authenticate as admin by default
}
yelpSearchCategories = ["beaches", "hotels", "restaurants"]<commit_msg>Add category filtering on the server side to save client fetching.<commit_after># Paths to key files
FIREBASE_KEY_PATH="firebase.local.json"
# Firebase config for prox-server
# These are public-facing and can be found in the console under Auth > Web Setup (in the top-right corner)
FIREBASE_CONFIG = {
"apiKey": "AIzaSyCksV_AC0oB9OnJmj0YgXNOrmnJawNbFeE",
"authDomain": "prox-server-cf63e.firebaseapp.com",
"databaseURL": "https://prox-server-cf63e.firebaseio.com",
"storageBucket": "prox-server-cf63e.appspot.com",
"messagingSenderId": "888537898788",
"serviceAccount": FIREBASE_KEY_PATH
# Using the service account will authenticate as admin by default
}
yelpSearchCategories = ["active",
"arts",
"beautysvc",
"eventservices",
"food",
"hotelstravel",
"localflavor",
"massmedia",
"nightlife",
"pets",
"publicservicesgovt",
"restaurants",
"shopping"]
|
cd2ecd3bede2886c384e4761f7052cfacb7d24ae
|
modules/serialize.py
|
modules/serialize.py
|
import sublime
import json
import os
from ..json import encoder
from ..json import decoder
from . import settings
_DEFAULT_PATH = os.path.join('User', 'sessions')
_DEFAULT_EXTENSION = 'json'
def dump(name, session):
session_path = _generate_path(name)
with open(session_path, 'w') as f:
json.dump(session, f, cls=encoder.SessionEncoder)
def load(name):
session_path = _generate_path(name)
with open(session_path, 'r') as f:
return json.load(f, cls=decoder.SessionDecoder)
def _generate_path(name):
path = settings.get('session_path')
if not path:
path = _DEFAULT_PATH
folder = os.path.join(sublime.packages_path(), path)
# Ensure the folder exists
os.makedirs(folder, exist_ok=True)
return os.path.join(folder, _generate_name(name))
def _generate_name(name, extension=_DEFAULT_EXTENSION):
return '.'.join([name, extension])
|
import sublime
import json
import os
from ..json import encoder
from ..json import decoder
from . import settings
_DEFAULT_PATH = os.path.join('User', 'sessions')
_DEFAULT_EXTENSION = '.sublime-session'
def dump(name, session):
session_path = _generate_path(name)
with open(session_path, 'w') as f:
json.dump(session, f, cls=encoder.SessionEncoder)
def load(name):
session_path = _generate_path(name)
with open(session_path, 'r') as f:
return json.load(f, cls=decoder.SessionDecoder)
def _generate_path(name):
return os.path.join(_generate_folder(), _generate_name(name))
def _generate_folder():
folder = settings.get('session_path')
if folder:
folder = os.path.normpath(folder)
else:
folder = os.path.join(sublime.packages_path(), _DEFAULT_PATH)
# Ensure the folder exists
os.makedirs(folder, exist_ok=True)
return folder
def _generate_name(name, extension=_DEFAULT_EXTENSION):
return ''.join([name, extension])
|
Use "sublime-session" as file extension
|
Use "sublime-session" as file extension
Furthermore fix some bugs in serialize.py
|
Python
|
mit
|
Zeeker/sublime-SessionManager
|
import sublime
import json
import os
from ..json import encoder
from ..json import decoder
from . import settings
_DEFAULT_PATH = os.path.join('User', 'sessions')
_DEFAULT_EXTENSION = 'json'
def dump(name, session):
session_path = _generate_path(name)
with open(session_path, 'w') as f:
json.dump(session, f, cls=encoder.SessionEncoder)
def load(name):
session_path = _generate_path(name)
with open(session_path, 'r') as f:
return json.load(f, cls=decoder.SessionDecoder)
def _generate_path(name):
path = settings.get('session_path')
if not path:
path = _DEFAULT_PATH
folder = os.path.join(sublime.packages_path(), path)
# Ensure the folder exists
os.makedirs(folder, exist_ok=True)
return os.path.join(folder, _generate_name(name))
def _generate_name(name, extension=_DEFAULT_EXTENSION):
return '.'.join([name, extension])
Use "sublime-session" as file extension
Furthermore fix some bugs in serialize.py
|
import sublime
import json
import os
from ..json import encoder
from ..json import decoder
from . import settings
_DEFAULT_PATH = os.path.join('User', 'sessions')
_DEFAULT_EXTENSION = '.sublime-session'
def dump(name, session):
session_path = _generate_path(name)
with open(session_path, 'w') as f:
json.dump(session, f, cls=encoder.SessionEncoder)
def load(name):
session_path = _generate_path(name)
with open(session_path, 'r') as f:
return json.load(f, cls=decoder.SessionDecoder)
def _generate_path(name):
return os.path.join(_generate_folder(), _generate_name(name))
def _generate_folder():
folder = settings.get('session_path')
if folder:
folder = os.path.normpath(folder)
else:
folder = os.path.join(sublime.packages_path(), _DEFAULT_PATH)
# Ensure the folder exists
os.makedirs(folder, exist_ok=True)
return folder
def _generate_name(name, extension=_DEFAULT_EXTENSION):
return ''.join([name, extension])
|
<commit_before>import sublime
import json
import os
from ..json import encoder
from ..json import decoder
from . import settings
_DEFAULT_PATH = os.path.join('User', 'sessions')
_DEFAULT_EXTENSION = 'json'
def dump(name, session):
session_path = _generate_path(name)
with open(session_path, 'w') as f:
json.dump(session, f, cls=encoder.SessionEncoder)
def load(name):
session_path = _generate_path(name)
with open(session_path, 'r') as f:
return json.load(f, cls=decoder.SessionDecoder)
def _generate_path(name):
path = settings.get('session_path')
if not path:
path = _DEFAULT_PATH
folder = os.path.join(sublime.packages_path(), path)
# Ensure the folder exists
os.makedirs(folder, exist_ok=True)
return os.path.join(folder, _generate_name(name))
def _generate_name(name, extension=_DEFAULT_EXTENSION):
return '.'.join([name, extension])
<commit_msg>Use "sublime-session" as file extension
Furthermore fix some bugs in serialize.py<commit_after>
|
import sublime
import json
import os
from ..json import encoder
from ..json import decoder
from . import settings
_DEFAULT_PATH = os.path.join('User', 'sessions')
_DEFAULT_EXTENSION = '.sublime-session'
def dump(name, session):
session_path = _generate_path(name)
with open(session_path, 'w') as f:
json.dump(session, f, cls=encoder.SessionEncoder)
def load(name):
session_path = _generate_path(name)
with open(session_path, 'r') as f:
return json.load(f, cls=decoder.SessionDecoder)
def _generate_path(name):
return os.path.join(_generate_folder(), _generate_name(name))
def _generate_folder():
folder = settings.get('session_path')
if folder:
folder = os.path.normpath(folder)
else:
folder = os.path.join(sublime.packages_path(), _DEFAULT_PATH)
# Ensure the folder exists
os.makedirs(folder, exist_ok=True)
return folder
def _generate_name(name, extension=_DEFAULT_EXTENSION):
return ''.join([name, extension])
|
import sublime
import json
import os
from ..json import encoder
from ..json import decoder
from . import settings
_DEFAULT_PATH = os.path.join('User', 'sessions')
_DEFAULT_EXTENSION = 'json'
def dump(name, session):
session_path = _generate_path(name)
with open(session_path, 'w') as f:
json.dump(session, f, cls=encoder.SessionEncoder)
def load(name):
session_path = _generate_path(name)
with open(session_path, 'r') as f:
return json.load(f, cls=decoder.SessionDecoder)
def _generate_path(name):
path = settings.get('session_path')
if not path:
path = _DEFAULT_PATH
folder = os.path.join(sublime.packages_path(), path)
# Ensure the folder exists
os.makedirs(folder, exist_ok=True)
return os.path.join(folder, _generate_name(name))
def _generate_name(name, extension=_DEFAULT_EXTENSION):
return '.'.join([name, extension])
Use "sublime-session" as file extension
Furthermore fix some bugs in serialize.pyimport sublime
import json
import os
from ..json import encoder
from ..json import decoder
from . import settings
_DEFAULT_PATH = os.path.join('User', 'sessions')
_DEFAULT_EXTENSION = '.sublime-session'
def dump(name, session):
session_path = _generate_path(name)
with open(session_path, 'w') as f:
json.dump(session, f, cls=encoder.SessionEncoder)
def load(name):
session_path = _generate_path(name)
with open(session_path, 'r') as f:
return json.load(f, cls=decoder.SessionDecoder)
def _generate_path(name):
return os.path.join(_generate_folder(), _generate_name(name))
def _generate_folder():
folder = settings.get('session_path')
if folder:
folder = os.path.normpath(folder)
else:
folder = os.path.join(sublime.packages_path(), _DEFAULT_PATH)
# Ensure the folder exists
os.makedirs(folder, exist_ok=True)
return folder
def _generate_name(name, extension=_DEFAULT_EXTENSION):
return ''.join([name, extension])
|
<commit_before>import sublime
import json
import os
from ..json import encoder
from ..json import decoder
from . import settings
_DEFAULT_PATH = os.path.join('User', 'sessions')
_DEFAULT_EXTENSION = 'json'
def dump(name, session):
session_path = _generate_path(name)
with open(session_path, 'w') as f:
json.dump(session, f, cls=encoder.SessionEncoder)
def load(name):
session_path = _generate_path(name)
with open(session_path, 'r') as f:
return json.load(f, cls=decoder.SessionDecoder)
def _generate_path(name):
path = settings.get('session_path')
if not path:
path = _DEFAULT_PATH
folder = os.path.join(sublime.packages_path(), path)
# Ensure the folder exists
os.makedirs(folder, exist_ok=True)
return os.path.join(folder, _generate_name(name))
def _generate_name(name, extension=_DEFAULT_EXTENSION):
return '.'.join([name, extension])
<commit_msg>Use "sublime-session" as file extension
Furthermore fix some bugs in serialize.py<commit_after>import sublime
import json
import os
from ..json import encoder
from ..json import decoder
from . import settings
_DEFAULT_PATH = os.path.join('User', 'sessions')
_DEFAULT_EXTENSION = '.sublime-session'
def dump(name, session):
session_path = _generate_path(name)
with open(session_path, 'w') as f:
json.dump(session, f, cls=encoder.SessionEncoder)
def load(name):
session_path = _generate_path(name)
with open(session_path, 'r') as f:
return json.load(f, cls=decoder.SessionDecoder)
def _generate_path(name):
return os.path.join(_generate_folder(), _generate_name(name))
def _generate_folder():
folder = settings.get('session_path')
if folder:
folder = os.path.normpath(folder)
else:
folder = os.path.join(sublime.packages_path(), _DEFAULT_PATH)
# Ensure the folder exists
os.makedirs(folder, exist_ok=True)
return folder
def _generate_name(name, extension=_DEFAULT_EXTENSION):
return ''.join([name, extension])
|
17d66bad1fd2ad75294dde5cbf0c1b5c694ae54c
|
bin/get_templates.py
|
bin/get_templates.py
|
#!/usr/bin/env python
import json
import os
from lib.functional import multi_map
from engine import types, consts
template_dir = os.path.join(os.environ['PORTER'], 'templates')
structs = (
(types.new_unit, "Tank", (consts.RED,)),
(types.new_attack, "RegularCannon", ()),
(types.new_armor, "WeakMetal", ()),
(types.new_movement, "Treads", ()),
)
def without_trailing_whitespace(string):
def remove_trailing_whitespace(line):
return line.rstrip()
return '\n'.join(map(remove_trailing_whitespace, string.split('\n')))
def generate_template(new_, name, args):
with open(os.path.join(template_dir, '%s.json' % (name,)), 'w') as f:
f.write(
without_trailing_whitespace(
json.dumps(
json.loads(
repr(
new_(name, *args))),
indent=4)))
def main():
if not os.path.exists(template_dir):
os.mkdir(template_dir)
multi_map(generate_template, structs)
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
import json
import os
from lib.functional import multi_map
from engine import types, consts
template_dir = os.path.join(os.environ['PORTER'], 'templates')
structs = (
(types.new_unit, "Tank", (consts.RED,)),
(types.new_attack, "RegularCannon", ()),
(types.new_armor, "WeakMetal", ()),
(types.new_movement, "Treads", ()),
)
def without_trailing_whitespace(string):
def remove_trailing_whitespace(line):
return line.rstrip()
return '\n'.join(map(remove_trailing_whitespace, string.split('\n')))
def delete_all_templates():
do_delete = raw_input('Print remove contents of %s? (y/n) ' % (template_dir,))
if do_delete == 'y':
multi_map(delete_template, structs)
os.rmdir(template_dir)
else:
print 'Aborting on user request'
def delete_template(new_, name, args):
os.remove(os.path.join(template_dir, '%s.json' % (name,)))
def generate_template(new_, name, args):
with open(os.path.join(template_dir, '%s.json' % (name,)), 'w') as f:
f.write(
without_trailing_whitespace(
json.dumps(
json.loads(
repr(
new_(name, *args))),
indent=4)))
def main():
if not os.path.exists(template_dir):
os.mkdir(template_dir)
multi_map(generate_template, structs)
if __name__ == '__main__':
main()
|
Add functionality to delete all templates
|
Add functionality to delete all templates
|
Python
|
mit
|
Tactique/game_engine,Tactique/game_engine
|
#!/usr/bin/env python
import json
import os
from lib.functional import multi_map
from engine import types, consts
template_dir = os.path.join(os.environ['PORTER'], 'templates')
structs = (
(types.new_unit, "Tank", (consts.RED,)),
(types.new_attack, "RegularCannon", ()),
(types.new_armor, "WeakMetal", ()),
(types.new_movement, "Treads", ()),
)
def without_trailing_whitespace(string):
def remove_trailing_whitespace(line):
return line.rstrip()
return '\n'.join(map(remove_trailing_whitespace, string.split('\n')))
def generate_template(new_, name, args):
with open(os.path.join(template_dir, '%s.json' % (name,)), 'w') as f:
f.write(
without_trailing_whitespace(
json.dumps(
json.loads(
repr(
new_(name, *args))),
indent=4)))
def main():
if not os.path.exists(template_dir):
os.mkdir(template_dir)
multi_map(generate_template, structs)
if __name__ == '__main__':
main()
Add functionality to delete all templates
|
#!/usr/bin/env python
import json
import os
from lib.functional import multi_map
from engine import types, consts
template_dir = os.path.join(os.environ['PORTER'], 'templates')
structs = (
(types.new_unit, "Tank", (consts.RED,)),
(types.new_attack, "RegularCannon", ()),
(types.new_armor, "WeakMetal", ()),
(types.new_movement, "Treads", ()),
)
def without_trailing_whitespace(string):
def remove_trailing_whitespace(line):
return line.rstrip()
return '\n'.join(map(remove_trailing_whitespace, string.split('\n')))
def delete_all_templates():
do_delete = raw_input('Print remove contents of %s? (y/n) ' % (template_dir,))
if do_delete == 'y':
multi_map(delete_template, structs)
os.rmdir(template_dir)
else:
print 'Aborting on user request'
def delete_template(new_, name, args):
os.remove(os.path.join(template_dir, '%s.json' % (name,)))
def generate_template(new_, name, args):
with open(os.path.join(template_dir, '%s.json' % (name,)), 'w') as f:
f.write(
without_trailing_whitespace(
json.dumps(
json.loads(
repr(
new_(name, *args))),
indent=4)))
def main():
if not os.path.exists(template_dir):
os.mkdir(template_dir)
multi_map(generate_template, structs)
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
import json
import os
from lib.functional import multi_map
from engine import types, consts
template_dir = os.path.join(os.environ['PORTER'], 'templates')
structs = (
(types.new_unit, "Tank", (consts.RED,)),
(types.new_attack, "RegularCannon", ()),
(types.new_armor, "WeakMetal", ()),
(types.new_movement, "Treads", ()),
)
def without_trailing_whitespace(string):
def remove_trailing_whitespace(line):
return line.rstrip()
return '\n'.join(map(remove_trailing_whitespace, string.split('\n')))
def generate_template(new_, name, args):
with open(os.path.join(template_dir, '%s.json' % (name,)), 'w') as f:
f.write(
without_trailing_whitespace(
json.dumps(
json.loads(
repr(
new_(name, *args))),
indent=4)))
def main():
if not os.path.exists(template_dir):
os.mkdir(template_dir)
multi_map(generate_template, structs)
if __name__ == '__main__':
main()
<commit_msg>Add functionality to delete all templates<commit_after>
|
#!/usr/bin/env python
import json
import os
from lib.functional import multi_map
from engine import types, consts
template_dir = os.path.join(os.environ['PORTER'], 'templates')
structs = (
(types.new_unit, "Tank", (consts.RED,)),
(types.new_attack, "RegularCannon", ()),
(types.new_armor, "WeakMetal", ()),
(types.new_movement, "Treads", ()),
)
def without_trailing_whitespace(string):
def remove_trailing_whitespace(line):
return line.rstrip()
return '\n'.join(map(remove_trailing_whitespace, string.split('\n')))
def delete_all_templates():
do_delete = raw_input('Print remove contents of %s? (y/n) ' % (template_dir,))
if do_delete == 'y':
multi_map(delete_template, structs)
os.rmdir(template_dir)
else:
print 'Aborting on user request'
def delete_template(new_, name, args):
os.remove(os.path.join(template_dir, '%s.json' % (name,)))
def generate_template(new_, name, args):
with open(os.path.join(template_dir, '%s.json' % (name,)), 'w') as f:
f.write(
without_trailing_whitespace(
json.dumps(
json.loads(
repr(
new_(name, *args))),
indent=4)))
def main():
if not os.path.exists(template_dir):
os.mkdir(template_dir)
multi_map(generate_template, structs)
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
import json
import os
from lib.functional import multi_map
from engine import types, consts
template_dir = os.path.join(os.environ['PORTER'], 'templates')
structs = (
(types.new_unit, "Tank", (consts.RED,)),
(types.new_attack, "RegularCannon", ()),
(types.new_armor, "WeakMetal", ()),
(types.new_movement, "Treads", ()),
)
def without_trailing_whitespace(string):
def remove_trailing_whitespace(line):
return line.rstrip()
return '\n'.join(map(remove_trailing_whitespace, string.split('\n')))
def generate_template(new_, name, args):
with open(os.path.join(template_dir, '%s.json' % (name,)), 'w') as f:
f.write(
without_trailing_whitespace(
json.dumps(
json.loads(
repr(
new_(name, *args))),
indent=4)))
def main():
if not os.path.exists(template_dir):
os.mkdir(template_dir)
multi_map(generate_template, structs)
if __name__ == '__main__':
main()
Add functionality to delete all templates#!/usr/bin/env python
import json
import os
from lib.functional import multi_map
from engine import types, consts
template_dir = os.path.join(os.environ['PORTER'], 'templates')
structs = (
(types.new_unit, "Tank", (consts.RED,)),
(types.new_attack, "RegularCannon", ()),
(types.new_armor, "WeakMetal", ()),
(types.new_movement, "Treads", ()),
)
def without_trailing_whitespace(string):
def remove_trailing_whitespace(line):
return line.rstrip()
return '\n'.join(map(remove_trailing_whitespace, string.split('\n')))
def delete_all_templates():
do_delete = raw_input('Print remove contents of %s? (y/n) ' % (template_dir,))
if do_delete == 'y':
multi_map(delete_template, structs)
os.rmdir(template_dir)
else:
print 'Aborting on user request'
def delete_template(new_, name, args):
os.remove(os.path.join(template_dir, '%s.json' % (name,)))
def generate_template(new_, name, args):
with open(os.path.join(template_dir, '%s.json' % (name,)), 'w') as f:
f.write(
without_trailing_whitespace(
json.dumps(
json.loads(
repr(
new_(name, *args))),
indent=4)))
def main():
if not os.path.exists(template_dir):
os.mkdir(template_dir)
multi_map(generate_template, structs)
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
import json
import os
from lib.functional import multi_map
from engine import types, consts
template_dir = os.path.join(os.environ['PORTER'], 'templates')
structs = (
(types.new_unit, "Tank", (consts.RED,)),
(types.new_attack, "RegularCannon", ()),
(types.new_armor, "WeakMetal", ()),
(types.new_movement, "Treads", ()),
)
def without_trailing_whitespace(string):
def remove_trailing_whitespace(line):
return line.rstrip()
return '\n'.join(map(remove_trailing_whitespace, string.split('\n')))
def generate_template(new_, name, args):
with open(os.path.join(template_dir, '%s.json' % (name,)), 'w') as f:
f.write(
without_trailing_whitespace(
json.dumps(
json.loads(
repr(
new_(name, *args))),
indent=4)))
def main():
if not os.path.exists(template_dir):
os.mkdir(template_dir)
multi_map(generate_template, structs)
if __name__ == '__main__':
main()
<commit_msg>Add functionality to delete all templates<commit_after>#!/usr/bin/env python
import json
import os
from lib.functional import multi_map
from engine import types, consts
template_dir = os.path.join(os.environ['PORTER'], 'templates')
structs = (
(types.new_unit, "Tank", (consts.RED,)),
(types.new_attack, "RegularCannon", ()),
(types.new_armor, "WeakMetal", ()),
(types.new_movement, "Treads", ()),
)
def without_trailing_whitespace(string):
def remove_trailing_whitespace(line):
return line.rstrip()
return '\n'.join(map(remove_trailing_whitespace, string.split('\n')))
def delete_all_templates():
do_delete = raw_input('Print remove contents of %s? (y/n) ' % (template_dir,))
if do_delete == 'y':
multi_map(delete_template, structs)
os.rmdir(template_dir)
else:
print 'Aborting on user request'
def delete_template(new_, name, args):
os.remove(os.path.join(template_dir, '%s.json' % (name,)))
def generate_template(new_, name, args):
with open(os.path.join(template_dir, '%s.json' % (name,)), 'w') as f:
f.write(
without_trailing_whitespace(
json.dumps(
json.loads(
repr(
new_(name, *args))),
indent=4)))
def main():
if not os.path.exists(template_dir):
os.mkdir(template_dir)
multi_map(generate_template, structs)
if __name__ == '__main__':
main()
|
464e13cc9065b966eadd1413802c32c536c478fd
|
tests/optvis/param/test_cppn.py
|
tests/optvis/param/test_cppn.py
|
from __future__ import absolute_import, division, print_function
import pytest
import numpy as np
import tensorflow as tf
import logging
from lucid.optvis.param.cppn import cppn
log = logging.getLogger(__name__)
@pytest.mark.slow
def test_cppn_fits_xor():
with tf.Graph().as_default(), tf.Session() as sess:
cppn_param = cppn(16, num_output_channels=1)[0]
def xor_objective(a):
return -(
tf.square(a[0, 0])
+ tf.square(a[-1, -1])
+ tf.square(1.0 - a[-1, 0])
+ tf.square(1.0 - a[0, -1])
)
loss_t = xor_objective(cppn_param)
optimizer = tf.train.AdamOptimizer(0.01)
objective = optimizer.minimize(loss_t)
tf.global_variables_initializer().run()
loss = loss_t.eval()
for i in range(100):
_, vis = sess.run([objective, cppn_param])
close_enough = (
vis[0, 0] > .99
and vis[-1, -1] > .99
and vis[-1, 0] < .01
and vis[0, -1] < .01
)
if close_enough:
return
assert False, "fitting XOR took more than 100 steps, failing test"
|
from __future__ import absolute_import, division, print_function
import pytest
import numpy as np
import tensorflow as tf
import logging
from lucid.optvis.param.cppn import cppn
log = logging.getLogger(__name__)
@pytest.mark.slow
def test_cppn_fits_xor():
with tf.Graph().as_default(), tf.Session() as sess:
cppn_param = cppn(16, num_output_channels=1)[0]
def xor_objective(a):
return -(
tf.square(a[0, 0])
+ tf.square(a[-1, -1])
+ tf.square(1.0 - a[-1, 0])
+ tf.square(1.0 - a[0, -1])
)
loss_t = xor_objective(cppn_param)
optimizer = tf.train.AdamOptimizer(0.01)
objective = optimizer.minimize(loss_t)
for try_i in range(3):
tf.global_variables_initializer().run()
# loss = loss_t.eval()
for i in range(200):
_, vis = sess.run([objective, cppn_param])
close_enough = (
vis[0, 0] > .99
and vis[-1, -1] > .99
and vis[-1, 0] < .01
and vis[0, -1] < .01
)
if close_enough:
return
assert False, "fitting XOR took more than 200 steps, failing test"
|
Add retries to cppn param test
|
Add retries to cppn param test
|
Python
|
apache-2.0
|
tensorflow/lucid,tensorflow/lucid,tensorflow/lucid,tensorflow/lucid
|
from __future__ import absolute_import, division, print_function
import pytest
import numpy as np
import tensorflow as tf
import logging
from lucid.optvis.param.cppn import cppn
log = logging.getLogger(__name__)
@pytest.mark.slow
def test_cppn_fits_xor():
with tf.Graph().as_default(), tf.Session() as sess:
cppn_param = cppn(16, num_output_channels=1)[0]
def xor_objective(a):
return -(
tf.square(a[0, 0])
+ tf.square(a[-1, -1])
+ tf.square(1.0 - a[-1, 0])
+ tf.square(1.0 - a[0, -1])
)
loss_t = xor_objective(cppn_param)
optimizer = tf.train.AdamOptimizer(0.01)
objective = optimizer.minimize(loss_t)
tf.global_variables_initializer().run()
loss = loss_t.eval()
for i in range(100):
_, vis = sess.run([objective, cppn_param])
close_enough = (
vis[0, 0] > .99
and vis[-1, -1] > .99
and vis[-1, 0] < .01
and vis[0, -1] < .01
)
if close_enough:
return
assert False, "fitting XOR took more than 100 steps, failing test"
Add retries to cppn param test
|
from __future__ import absolute_import, division, print_function
import pytest
import numpy as np
import tensorflow as tf
import logging
from lucid.optvis.param.cppn import cppn
log = logging.getLogger(__name__)
@pytest.mark.slow
def test_cppn_fits_xor():
with tf.Graph().as_default(), tf.Session() as sess:
cppn_param = cppn(16, num_output_channels=1)[0]
def xor_objective(a):
return -(
tf.square(a[0, 0])
+ tf.square(a[-1, -1])
+ tf.square(1.0 - a[-1, 0])
+ tf.square(1.0 - a[0, -1])
)
loss_t = xor_objective(cppn_param)
optimizer = tf.train.AdamOptimizer(0.01)
objective = optimizer.minimize(loss_t)
for try_i in range(3):
tf.global_variables_initializer().run()
# loss = loss_t.eval()
for i in range(200):
_, vis = sess.run([objective, cppn_param])
close_enough = (
vis[0, 0] > .99
and vis[-1, -1] > .99
and vis[-1, 0] < .01
and vis[0, -1] < .01
)
if close_enough:
return
assert False, "fitting XOR took more than 200 steps, failing test"
|
<commit_before>from __future__ import absolute_import, division, print_function
import pytest
import numpy as np
import tensorflow as tf
import logging
from lucid.optvis.param.cppn import cppn
log = logging.getLogger(__name__)
@pytest.mark.slow
def test_cppn_fits_xor():
with tf.Graph().as_default(), tf.Session() as sess:
cppn_param = cppn(16, num_output_channels=1)[0]
def xor_objective(a):
return -(
tf.square(a[0, 0])
+ tf.square(a[-1, -1])
+ tf.square(1.0 - a[-1, 0])
+ tf.square(1.0 - a[0, -1])
)
loss_t = xor_objective(cppn_param)
optimizer = tf.train.AdamOptimizer(0.01)
objective = optimizer.minimize(loss_t)
tf.global_variables_initializer().run()
loss = loss_t.eval()
for i in range(100):
_, vis = sess.run([objective, cppn_param])
close_enough = (
vis[0, 0] > .99
and vis[-1, -1] > .99
and vis[-1, 0] < .01
and vis[0, -1] < .01
)
if close_enough:
return
assert False, "fitting XOR took more than 100 steps, failing test"
<commit_msg>Add retries to cppn param test<commit_after>
|
from __future__ import absolute_import, division, print_function
import pytest
import numpy as np
import tensorflow as tf
import logging
from lucid.optvis.param.cppn import cppn
log = logging.getLogger(__name__)
@pytest.mark.slow
def test_cppn_fits_xor():
with tf.Graph().as_default(), tf.Session() as sess:
cppn_param = cppn(16, num_output_channels=1)[0]
def xor_objective(a):
return -(
tf.square(a[0, 0])
+ tf.square(a[-1, -1])
+ tf.square(1.0 - a[-1, 0])
+ tf.square(1.0 - a[0, -1])
)
loss_t = xor_objective(cppn_param)
optimizer = tf.train.AdamOptimizer(0.01)
objective = optimizer.minimize(loss_t)
for try_i in range(3):
tf.global_variables_initializer().run()
# loss = loss_t.eval()
for i in range(200):
_, vis = sess.run([objective, cppn_param])
close_enough = (
vis[0, 0] > .99
and vis[-1, -1] > .99
and vis[-1, 0] < .01
and vis[0, -1] < .01
)
if close_enough:
return
assert False, "fitting XOR took more than 200 steps, failing test"
|
from __future__ import absolute_import, division, print_function
import pytest
import numpy as np
import tensorflow as tf
import logging
from lucid.optvis.param.cppn import cppn
log = logging.getLogger(__name__)
@pytest.mark.slow
def test_cppn_fits_xor():
with tf.Graph().as_default(), tf.Session() as sess:
cppn_param = cppn(16, num_output_channels=1)[0]
def xor_objective(a):
return -(
tf.square(a[0, 0])
+ tf.square(a[-1, -1])
+ tf.square(1.0 - a[-1, 0])
+ tf.square(1.0 - a[0, -1])
)
loss_t = xor_objective(cppn_param)
optimizer = tf.train.AdamOptimizer(0.01)
objective = optimizer.minimize(loss_t)
tf.global_variables_initializer().run()
loss = loss_t.eval()
for i in range(100):
_, vis = sess.run([objective, cppn_param])
close_enough = (
vis[0, 0] > .99
and vis[-1, -1] > .99
and vis[-1, 0] < .01
and vis[0, -1] < .01
)
if close_enough:
return
assert False, "fitting XOR took more than 100 steps, failing test"
Add retries to cppn param testfrom __future__ import absolute_import, division, print_function
import pytest
import numpy as np
import tensorflow as tf
import logging
from lucid.optvis.param.cppn import cppn
log = logging.getLogger(__name__)
@pytest.mark.slow
def test_cppn_fits_xor():
with tf.Graph().as_default(), tf.Session() as sess:
cppn_param = cppn(16, num_output_channels=1)[0]
def xor_objective(a):
return -(
tf.square(a[0, 0])
+ tf.square(a[-1, -1])
+ tf.square(1.0 - a[-1, 0])
+ tf.square(1.0 - a[0, -1])
)
loss_t = xor_objective(cppn_param)
optimizer = tf.train.AdamOptimizer(0.01)
objective = optimizer.minimize(loss_t)
for try_i in range(3):
tf.global_variables_initializer().run()
# loss = loss_t.eval()
for i in range(200):
_, vis = sess.run([objective, cppn_param])
close_enough = (
vis[0, 0] > .99
and vis[-1, -1] > .99
and vis[-1, 0] < .01
and vis[0, -1] < .01
)
if close_enough:
return
assert False, "fitting XOR took more than 200 steps, failing test"
|
<commit_before>from __future__ import absolute_import, division, print_function
import pytest
import numpy as np
import tensorflow as tf
import logging
from lucid.optvis.param.cppn import cppn
log = logging.getLogger(__name__)
@pytest.mark.slow
def test_cppn_fits_xor():
with tf.Graph().as_default(), tf.Session() as sess:
cppn_param = cppn(16, num_output_channels=1)[0]
def xor_objective(a):
return -(
tf.square(a[0, 0])
+ tf.square(a[-1, -1])
+ tf.square(1.0 - a[-1, 0])
+ tf.square(1.0 - a[0, -1])
)
loss_t = xor_objective(cppn_param)
optimizer = tf.train.AdamOptimizer(0.01)
objective = optimizer.minimize(loss_t)
tf.global_variables_initializer().run()
loss = loss_t.eval()
for i in range(100):
_, vis = sess.run([objective, cppn_param])
close_enough = (
vis[0, 0] > .99
and vis[-1, -1] > .99
and vis[-1, 0] < .01
and vis[0, -1] < .01
)
if close_enough:
return
assert False, "fitting XOR took more than 100 steps, failing test"
<commit_msg>Add retries to cppn param test<commit_after>from __future__ import absolute_import, division, print_function
import pytest
import numpy as np
import tensorflow as tf
import logging
from lucid.optvis.param.cppn import cppn
log = logging.getLogger(__name__)
@pytest.mark.slow
def test_cppn_fits_xor():
with tf.Graph().as_default(), tf.Session() as sess:
cppn_param = cppn(16, num_output_channels=1)[0]
def xor_objective(a):
return -(
tf.square(a[0, 0])
+ tf.square(a[-1, -1])
+ tf.square(1.0 - a[-1, 0])
+ tf.square(1.0 - a[0, -1])
)
loss_t = xor_objective(cppn_param)
optimizer = tf.train.AdamOptimizer(0.01)
objective = optimizer.minimize(loss_t)
for try_i in range(3):
tf.global_variables_initializer().run()
# loss = loss_t.eval()
for i in range(200):
_, vis = sess.run([objective, cppn_param])
close_enough = (
vis[0, 0] > .99
and vis[-1, -1] > .99
and vis[-1, 0] < .01
and vis[0, -1] < .01
)
if close_enough:
return
assert False, "fitting XOR took more than 200 steps, failing test"
|
90f56855d992dc03ddcc8e5c2db08ed0e5917e39
|
ipython/profile/00_import_sciqnd.py
|
ipython/profile/00_import_sciqnd.py
|
import cPickle as pickle
import os
import sys
import matplotlib as mpl
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import scipy as sp
import scipy.io
import scipy.stats
import skimage
import skimage.transform
import skimage.io
import cv2
# The following lines call magic commands
get_ipython().run_line_magic(u"pdb", u"")
# Insert breakpoints as breakpoint()() instead of Tracer()()
from IPython.core.debugger import Tracer as breakpoint
|
import cPickle as pickle
import glob
import json
import math
import os
import sys
import matplotlib as mpl
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import scipy as sp
import scipy.io
import scipy.stats
import skimage
import skimage.transform
import skimage.io
import cv2
# The following lines call magic commands
get_ipython().run_line_magic(u"pdb", u"")
# Insert breakpoints as breakpoint()() instead of Tracer()()
from IPython.core.debugger import Tracer as breakpoint
|
Add useful modules/pckg from the standard library
|
Add useful modules/pckg from the standard library
|
Python
|
mit
|
escorciav/linux-utils,escorciav/linux-utils
|
import cPickle as pickle
import os
import sys
import matplotlib as mpl
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import scipy as sp
import scipy.io
import scipy.stats
import skimage
import skimage.transform
import skimage.io
import cv2
# The following lines call magic commands
get_ipython().run_line_magic(u"pdb", u"")
# Insert breakpoints as breakpoint()() instead of Tracer()()
from IPython.core.debugger import Tracer as breakpoint
Add useful modules/pckg from the standard library
|
import cPickle as pickle
import glob
import json
import math
import os
import sys
import matplotlib as mpl
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import scipy as sp
import scipy.io
import scipy.stats
import skimage
import skimage.transform
import skimage.io
import cv2
# The following lines call magic commands
get_ipython().run_line_magic(u"pdb", u"")
# Insert breakpoints as breakpoint()() instead of Tracer()()
from IPython.core.debugger import Tracer as breakpoint
|
<commit_before>import cPickle as pickle
import os
import sys
import matplotlib as mpl
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import scipy as sp
import scipy.io
import scipy.stats
import skimage
import skimage.transform
import skimage.io
import cv2
# The following lines call magic commands
get_ipython().run_line_magic(u"pdb", u"")
# Insert breakpoints as breakpoint()() instead of Tracer()()
from IPython.core.debugger import Tracer as breakpoint
<commit_msg>Add useful modules/pckg from the standard library<commit_after>
|
import cPickle as pickle
import glob
import json
import math
import os
import sys
import matplotlib as mpl
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import scipy as sp
import scipy.io
import scipy.stats
import skimage
import skimage.transform
import skimage.io
import cv2
# The following lines call magic commands
get_ipython().run_line_magic(u"pdb", u"")
# Insert breakpoints as breakpoint()() instead of Tracer()()
from IPython.core.debugger import Tracer as breakpoint
|
import cPickle as pickle
import os
import sys
import matplotlib as mpl
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import scipy as sp
import scipy.io
import scipy.stats
import skimage
import skimage.transform
import skimage.io
import cv2
# The following lines call magic commands
get_ipython().run_line_magic(u"pdb", u"")
# Insert breakpoints as breakpoint()() instead of Tracer()()
from IPython.core.debugger import Tracer as breakpoint
Add useful modules/pckg from the standard libraryimport cPickle as pickle
import glob
import json
import math
import os
import sys
import matplotlib as mpl
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import scipy as sp
import scipy.io
import scipy.stats
import skimage
import skimage.transform
import skimage.io
import cv2
# The following lines call magic commands
get_ipython().run_line_magic(u"pdb", u"")
# Insert breakpoints as breakpoint()() instead of Tracer()()
from IPython.core.debugger import Tracer as breakpoint
|
<commit_before>import cPickle as pickle
import os
import sys
import matplotlib as mpl
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import scipy as sp
import scipy.io
import scipy.stats
import skimage
import skimage.transform
import skimage.io
import cv2
# The following lines call magic commands
get_ipython().run_line_magic(u"pdb", u"")
# Insert breakpoints as breakpoint()() instead of Tracer()()
from IPython.core.debugger import Tracer as breakpoint
<commit_msg>Add useful modules/pckg from the standard library<commit_after>import cPickle as pickle
import glob
import json
import math
import os
import sys
import matplotlib as mpl
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import scipy as sp
import scipy.io
import scipy.stats
import skimage
import skimage.transform
import skimage.io
import cv2
# The following lines call magic commands
get_ipython().run_line_magic(u"pdb", u"")
# Insert breakpoints as breakpoint()() instead of Tracer()()
from IPython.core.debugger import Tracer as breakpoint
|
9a6150ca2303bb1c682cdc037853e2cf182a1baa
|
halng/commands.py
|
halng/commands.py
|
import logging
import os
import brain
from cmdparse import Command
log = logging.getLogger("hal")
class InitCommand(Command):
def __init__(self):
Command.__init__(self, "init", summary="Initialize a new brain")
self.add_option("", "--force", action="store_true")
self.add_option("", "--order", type="int", default=5)
def run(self, options, args):
filename = "hal.brain"
if os.path.exists(filename):
if options.force:
os.remove(filename)
else:
log.error("%s already exists!", filename)
return
b = brain.Brain(filename)
b.init(order)
class CloneCommand(Command):
def __init__(self):
Command.__init__(self, "clone", summary="Clone a MegaHAL brain")
def run(self, options, args):
if len(args) != 1:
log.error("usage: clone <MegaHAL brain>")
return
if os.path.exists("hal.brain"):
log.error("hal.brain already exists")
return
megahal_brain = args[0]
b = brain.Brain("hal.brain")
b.init()
b.clone(megahal_brain)
|
import logging
import os
from brain import Brain
from cmdparse import Command
log = logging.getLogger("hal")
class InitCommand(Command):
def __init__(self):
Command.__init__(self, "init", summary="Initialize a new brain")
self.add_option("", "--force", action="store_true")
self.add_option("", "--order", type="int", default=5)
def run(self, options, args):
filename = "hal.brain"
if os.path.exists(filename):
if options.force:
os.remove(filename)
else:
log.error("%s already exists!", filename)
return
Brain.init(filename, options.order)
class CloneCommand(Command):
def __init__(self):
Command.__init__(self, "clone", summary="Clone a MegaHAL brain")
def run(self, options, args):
if len(args) != 1:
log.error("usage: clone <MegaHAL brain>")
return
if os.path.exists("hal.brain"):
log.error("hal.brain already exists")
return
megahal_brain = args[0]
Brain.init("hal.brain")
b.clone(megahal_brain)
|
Update to use the static Brain.init()
|
Update to use the static Brain.init()
|
Python
|
mit
|
DarkMio/cobe,tiagochiavericosta/cobe,wodim/cobe-ng,pteichman/cobe,tiagochiavericosta/cobe,meska/cobe,wodim/cobe-ng,DarkMio/cobe,meska/cobe,LeMagnesium/cobe,pteichman/cobe,LeMagnesium/cobe
|
import logging
import os
import brain
from cmdparse import Command
log = logging.getLogger("hal")
class InitCommand(Command):
def __init__(self):
Command.__init__(self, "init", summary="Initialize a new brain")
self.add_option("", "--force", action="store_true")
self.add_option("", "--order", type="int", default=5)
def run(self, options, args):
filename = "hal.brain"
if os.path.exists(filename):
if options.force:
os.remove(filename)
else:
log.error("%s already exists!", filename)
return
b = brain.Brain(filename)
b.init(order)
class CloneCommand(Command):
def __init__(self):
Command.__init__(self, "clone", summary="Clone a MegaHAL brain")
def run(self, options, args):
if len(args) != 1:
log.error("usage: clone <MegaHAL brain>")
return
if os.path.exists("hal.brain"):
log.error("hal.brain already exists")
return
megahal_brain = args[0]
b = brain.Brain("hal.brain")
b.init()
b.clone(megahal_brain)
Update to use the static Brain.init()
|
import logging
import os
from brain import Brain
from cmdparse import Command
log = logging.getLogger("hal")
class InitCommand(Command):
def __init__(self):
Command.__init__(self, "init", summary="Initialize a new brain")
self.add_option("", "--force", action="store_true")
self.add_option("", "--order", type="int", default=5)
def run(self, options, args):
filename = "hal.brain"
if os.path.exists(filename):
if options.force:
os.remove(filename)
else:
log.error("%s already exists!", filename)
return
Brain.init(filename, options.order)
class CloneCommand(Command):
def __init__(self):
Command.__init__(self, "clone", summary="Clone a MegaHAL brain")
def run(self, options, args):
if len(args) != 1:
log.error("usage: clone <MegaHAL brain>")
return
if os.path.exists("hal.brain"):
log.error("hal.brain already exists")
return
megahal_brain = args[0]
Brain.init("hal.brain")
b.clone(megahal_brain)
|
<commit_before>import logging
import os
import brain
from cmdparse import Command
log = logging.getLogger("hal")
class InitCommand(Command):
def __init__(self):
Command.__init__(self, "init", summary="Initialize a new brain")
self.add_option("", "--force", action="store_true")
self.add_option("", "--order", type="int", default=5)
def run(self, options, args):
filename = "hal.brain"
if os.path.exists(filename):
if options.force:
os.remove(filename)
else:
log.error("%s already exists!", filename)
return
b = brain.Brain(filename)
b.init(order)
class CloneCommand(Command):
def __init__(self):
Command.__init__(self, "clone", summary="Clone a MegaHAL brain")
def run(self, options, args):
if len(args) != 1:
log.error("usage: clone <MegaHAL brain>")
return
if os.path.exists("hal.brain"):
log.error("hal.brain already exists")
return
megahal_brain = args[0]
b = brain.Brain("hal.brain")
b.init()
b.clone(megahal_brain)
<commit_msg>Update to use the static Brain.init()<commit_after>
|
import logging
import os
from brain import Brain
from cmdparse import Command
log = logging.getLogger("hal")
class InitCommand(Command):
def __init__(self):
Command.__init__(self, "init", summary="Initialize a new brain")
self.add_option("", "--force", action="store_true")
self.add_option("", "--order", type="int", default=5)
def run(self, options, args):
filename = "hal.brain"
if os.path.exists(filename):
if options.force:
os.remove(filename)
else:
log.error("%s already exists!", filename)
return
Brain.init(filename, options.order)
class CloneCommand(Command):
def __init__(self):
Command.__init__(self, "clone", summary="Clone a MegaHAL brain")
def run(self, options, args):
if len(args) != 1:
log.error("usage: clone <MegaHAL brain>")
return
if os.path.exists("hal.brain"):
log.error("hal.brain already exists")
return
megahal_brain = args[0]
Brain.init("hal.brain")
b.clone(megahal_brain)
|
import logging
import os
import brain
from cmdparse import Command
log = logging.getLogger("hal")
class InitCommand(Command):
def __init__(self):
Command.__init__(self, "init", summary="Initialize a new brain")
self.add_option("", "--force", action="store_true")
self.add_option("", "--order", type="int", default=5)
def run(self, options, args):
filename = "hal.brain"
if os.path.exists(filename):
if options.force:
os.remove(filename)
else:
log.error("%s already exists!", filename)
return
b = brain.Brain(filename)
b.init(order)
class CloneCommand(Command):
def __init__(self):
Command.__init__(self, "clone", summary="Clone a MegaHAL brain")
def run(self, options, args):
if len(args) != 1:
log.error("usage: clone <MegaHAL brain>")
return
if os.path.exists("hal.brain"):
log.error("hal.brain already exists")
return
megahal_brain = args[0]
b = brain.Brain("hal.brain")
b.init()
b.clone(megahal_brain)
Update to use the static Brain.init()import logging
import os
from brain import Brain
from cmdparse import Command
log = logging.getLogger("hal")
class InitCommand(Command):
def __init__(self):
Command.__init__(self, "init", summary="Initialize a new brain")
self.add_option("", "--force", action="store_true")
self.add_option("", "--order", type="int", default=5)
def run(self, options, args):
filename = "hal.brain"
if os.path.exists(filename):
if options.force:
os.remove(filename)
else:
log.error("%s already exists!", filename)
return
Brain.init(filename, options.order)
class CloneCommand(Command):
def __init__(self):
Command.__init__(self, "clone", summary="Clone a MegaHAL brain")
def run(self, options, args):
if len(args) != 1:
log.error("usage: clone <MegaHAL brain>")
return
if os.path.exists("hal.brain"):
log.error("hal.brain already exists")
return
megahal_brain = args[0]
Brain.init("hal.brain")
b.clone(megahal_brain)
|
<commit_before>import logging
import os
import brain
from cmdparse import Command
log = logging.getLogger("hal")
class InitCommand(Command):
def __init__(self):
Command.__init__(self, "init", summary="Initialize a new brain")
self.add_option("", "--force", action="store_true")
self.add_option("", "--order", type="int", default=5)
def run(self, options, args):
filename = "hal.brain"
if os.path.exists(filename):
if options.force:
os.remove(filename)
else:
log.error("%s already exists!", filename)
return
b = brain.Brain(filename)
b.init(order)
class CloneCommand(Command):
def __init__(self):
Command.__init__(self, "clone", summary="Clone a MegaHAL brain")
def run(self, options, args):
if len(args) != 1:
log.error("usage: clone <MegaHAL brain>")
return
if os.path.exists("hal.brain"):
log.error("hal.brain already exists")
return
megahal_brain = args[0]
b = brain.Brain("hal.brain")
b.init()
b.clone(megahal_brain)
<commit_msg>Update to use the static Brain.init()<commit_after>import logging
import os
from brain import Brain
from cmdparse import Command
log = logging.getLogger("hal")
class InitCommand(Command):
def __init__(self):
Command.__init__(self, "init", summary="Initialize a new brain")
self.add_option("", "--force", action="store_true")
self.add_option("", "--order", type="int", default=5)
def run(self, options, args):
filename = "hal.brain"
if os.path.exists(filename):
if options.force:
os.remove(filename)
else:
log.error("%s already exists!", filename)
return
Brain.init(filename, options.order)
class CloneCommand(Command):
def __init__(self):
Command.__init__(self, "clone", summary="Clone a MegaHAL brain")
def run(self, options, args):
if len(args) != 1:
log.error("usage: clone <MegaHAL brain>")
return
if os.path.exists("hal.brain"):
log.error("hal.brain already exists")
return
megahal_brain = args[0]
Brain.init("hal.brain")
b.clone(megahal_brain)
|
41b4b94470e777876c386b33ee6181f6193169e6
|
version.py
|
version.py
|
major = 0
minor=0
patch=10
branch="master"
timestamp=1376502388.26
|
major = 0
minor=0
patch=11
branch="master"
timestamp=1376505745.87
|
Tag commit for v0.0.11-master generated by gitmake.py
|
Tag commit for v0.0.11-master generated by gitmake.py
|
Python
|
mit
|
ryansturmer/gitmake
|
major = 0
minor=0
patch=10
branch="master"
timestamp=1376502388.26Tag commit for v0.0.11-master generated by gitmake.py
|
major = 0
minor=0
patch=11
branch="master"
timestamp=1376505745.87
|
<commit_before>major = 0
minor=0
patch=10
branch="master"
timestamp=1376502388.26<commit_msg>Tag commit for v0.0.11-master generated by gitmake.py<commit_after>
|
major = 0
minor=0
patch=11
branch="master"
timestamp=1376505745.87
|
major = 0
minor=0
patch=10
branch="master"
timestamp=1376502388.26Tag commit for v0.0.11-master generated by gitmake.pymajor = 0
minor=0
patch=11
branch="master"
timestamp=1376505745.87
|
<commit_before>major = 0
minor=0
patch=10
branch="master"
timestamp=1376502388.26<commit_msg>Tag commit for v0.0.11-master generated by gitmake.py<commit_after>major = 0
minor=0
patch=11
branch="master"
timestamp=1376505745.87
|
9c786c82671ade46e7af309fd597d5eac93a75b0
|
pycah/db/__init__.py
|
pycah/db/__init__.py
|
import psycopg2
c = psycopg2.connect(user='postgres', password='password')
c.set_session(autocommit=True)
cur = c.cursor()
try:
cur.execute('CREATE DATABASE pycah;')
c.commit()
c.close()
c = psycopg2.connect(database='pycah', user='postgres', password='password')
c.set_session(autocommit=True)
cur = c.cursor()
cur.execute(open('./pycah/db/create_database.sql').read())
c.commit()
c.close()
except psycopg2.ProgrammingError:
c.close()
connection = psycopg2.connect(database='pycah', user='postgres', password='password')
|
import psycopg2
c = psycopg2.connect(user='postgres', password='password', host='127.0.0.1')
c.set_session(autocommit=True)
cur = c.cursor()
try:
cur.execute('CREATE DATABASE pycah;')
c.commit()
c.close()
c = psycopg2.connect(database='pycah', user='postgres', password='password', host='127.0.0.1')
c.set_session(autocommit=True)
cur = c.cursor()
cur.execute(open('./pycah/db/create_database.sql').read())
c.commit()
c.close()
except psycopg2.ProgrammingError:
c.close()
connection = psycopg2.connect(database='pycah', user='postgres', password='password', host='127.0.0.1')
|
Fix database connectivity on Linux.
|
Fix database connectivity on Linux.
|
Python
|
mit
|
nhardy/pyCAH,nhardy/pyCAH,nhardy/pyCAH
|
import psycopg2
c = psycopg2.connect(user='postgres', password='password')
c.set_session(autocommit=True)
cur = c.cursor()
try:
cur.execute('CREATE DATABASE pycah;')
c.commit()
c.close()
c = psycopg2.connect(database='pycah', user='postgres', password='password')
c.set_session(autocommit=True)
cur = c.cursor()
cur.execute(open('./pycah/db/create_database.sql').read())
c.commit()
c.close()
except psycopg2.ProgrammingError:
c.close()
connection = psycopg2.connect(database='pycah', user='postgres', password='password')
Fix database connectivity on Linux.
|
import psycopg2
c = psycopg2.connect(user='postgres', password='password', host='127.0.0.1')
c.set_session(autocommit=True)
cur = c.cursor()
try:
cur.execute('CREATE DATABASE pycah;')
c.commit()
c.close()
c = psycopg2.connect(database='pycah', user='postgres', password='password', host='127.0.0.1')
c.set_session(autocommit=True)
cur = c.cursor()
cur.execute(open('./pycah/db/create_database.sql').read())
c.commit()
c.close()
except psycopg2.ProgrammingError:
c.close()
connection = psycopg2.connect(database='pycah', user='postgres', password='password', host='127.0.0.1')
|
<commit_before>import psycopg2
c = psycopg2.connect(user='postgres', password='password')
c.set_session(autocommit=True)
cur = c.cursor()
try:
cur.execute('CREATE DATABASE pycah;')
c.commit()
c.close()
c = psycopg2.connect(database='pycah', user='postgres', password='password')
c.set_session(autocommit=True)
cur = c.cursor()
cur.execute(open('./pycah/db/create_database.sql').read())
c.commit()
c.close()
except psycopg2.ProgrammingError:
c.close()
connection = psycopg2.connect(database='pycah', user='postgres', password='password')
<commit_msg>Fix database connectivity on Linux.<commit_after>
|
import psycopg2
c = psycopg2.connect(user='postgres', password='password', host='127.0.0.1')
c.set_session(autocommit=True)
cur = c.cursor()
try:
cur.execute('CREATE DATABASE pycah;')
c.commit()
c.close()
c = psycopg2.connect(database='pycah', user='postgres', password='password', host='127.0.0.1')
c.set_session(autocommit=True)
cur = c.cursor()
cur.execute(open('./pycah/db/create_database.sql').read())
c.commit()
c.close()
except psycopg2.ProgrammingError:
c.close()
connection = psycopg2.connect(database='pycah', user='postgres', password='password', host='127.0.0.1')
|
import psycopg2
c = psycopg2.connect(user='postgres', password='password')
c.set_session(autocommit=True)
cur = c.cursor()
try:
cur.execute('CREATE DATABASE pycah;')
c.commit()
c.close()
c = psycopg2.connect(database='pycah', user='postgres', password='password')
c.set_session(autocommit=True)
cur = c.cursor()
cur.execute(open('./pycah/db/create_database.sql').read())
c.commit()
c.close()
except psycopg2.ProgrammingError:
c.close()
connection = psycopg2.connect(database='pycah', user='postgres', password='password')
Fix database connectivity on Linux.import psycopg2
c = psycopg2.connect(user='postgres', password='password', host='127.0.0.1')
c.set_session(autocommit=True)
cur = c.cursor()
try:
cur.execute('CREATE DATABASE pycah;')
c.commit()
c.close()
c = psycopg2.connect(database='pycah', user='postgres', password='password', host='127.0.0.1')
c.set_session(autocommit=True)
cur = c.cursor()
cur.execute(open('./pycah/db/create_database.sql').read())
c.commit()
c.close()
except psycopg2.ProgrammingError:
c.close()
connection = psycopg2.connect(database='pycah', user='postgres', password='password', host='127.0.0.1')
|
<commit_before>import psycopg2
c = psycopg2.connect(user='postgres', password='password')
c.set_session(autocommit=True)
cur = c.cursor()
try:
cur.execute('CREATE DATABASE pycah;')
c.commit()
c.close()
c = psycopg2.connect(database='pycah', user='postgres', password='password')
c.set_session(autocommit=True)
cur = c.cursor()
cur.execute(open('./pycah/db/create_database.sql').read())
c.commit()
c.close()
except psycopg2.ProgrammingError:
c.close()
connection = psycopg2.connect(database='pycah', user='postgres', password='password')
<commit_msg>Fix database connectivity on Linux.<commit_after>import psycopg2
c = psycopg2.connect(user='postgres', password='password', host='127.0.0.1')
c.set_session(autocommit=True)
cur = c.cursor()
try:
cur.execute('CREATE DATABASE pycah;')
c.commit()
c.close()
c = psycopg2.connect(database='pycah', user='postgres', password='password', host='127.0.0.1')
c.set_session(autocommit=True)
cur = c.cursor()
cur.execute(open('./pycah/db/create_database.sql').read())
c.commit()
c.close()
except psycopg2.ProgrammingError:
c.close()
connection = psycopg2.connect(database='pycah', user='postgres', password='password', host='127.0.0.1')
|
a58a6b897370e82aa3625c36a00e2de74c16ab6c
|
cortex/__init__.py
|
cortex/__init__.py
|
from .dataset import Dataset, VolumeData, VertexData, DataView, View
from . import align, volume, quickflat, webgl, segment, options
from .database import db
from .utils import *
from .quickflat import make_figure as quickshow
openFile = Dataset.from_file
try:
from . import webgl
from .webgl import show as webshow
except ImportError:
pass
try:
from . import anat
except ImportError:
pass
# Create deprecated interface for database
import warnings
class dep(object):
def __getattr__(self, *args, **kwargs):
warnings.warn("cortex.surfs is deprecated, use cortex.db instead", Warning)
return db.__getattr__(*args, **kwargs)
def __dir__(self):
warnings.warn("cortex.surfs is deprecated, use cortex.db instead", Warning)
return db.__dir__()
surfs = dep()
|
from .dataset import Dataset, VolumeData, VertexData, DataView, View
from . import align, volume, quickflat, webgl, segment, options
from .database import db
from .utils import *
from .quickflat import make_figure as quickshow
openFile = Dataset.from_file
try:
from . import webgl
from .webgl import show as webshow
except ImportError:
pass
try:
from . import anat
except ImportError:
pass
# Create deprecated interface for database
import warnings
class dep(object):
def __getattr__(self, name):
warnings.warn("cortex.surfs is deprecated, use cortex.db instead", Warning)
return getattr(db, name)
def __dir__(self):
warnings.warn("cortex.surfs is deprecated, use cortex.db instead", Warning)
return db.__dir__()
surfs = dep()
|
Fix up the deprecate surfs object
|
Fix up the deprecate surfs object
|
Python
|
bsd-2-clause
|
gallantlab/pycortex,gallantlab/pycortex,CVML/pycortex,CVML/pycortex,smerdis/pycortex,smerdis/pycortex,smerdis/pycortex,gallantlab/pycortex,gallantlab/pycortex,gallantlab/pycortex,CVML/pycortex,smerdis/pycortex,CVML/pycortex,CVML/pycortex,smerdis/pycortex
|
from .dataset import Dataset, VolumeData, VertexData, DataView, View
from . import align, volume, quickflat, webgl, segment, options
from .database import db
from .utils import *
from .quickflat import make_figure as quickshow
openFile = Dataset.from_file
try:
from . import webgl
from .webgl import show as webshow
except ImportError:
pass
try:
from . import anat
except ImportError:
pass
# Create deprecated interface for database
import warnings
class dep(object):
def __getattr__(self, *args, **kwargs):
warnings.warn("cortex.surfs is deprecated, use cortex.db instead", Warning)
return db.__getattr__(*args, **kwargs)
def __dir__(self):
warnings.warn("cortex.surfs is deprecated, use cortex.db instead", Warning)
return db.__dir__()
surfs = dep()Fix up the deprecate surfs object
|
from .dataset import Dataset, VolumeData, VertexData, DataView, View
from . import align, volume, quickflat, webgl, segment, options
from .database import db
from .utils import *
from .quickflat import make_figure as quickshow
openFile = Dataset.from_file
try:
from . import webgl
from .webgl import show as webshow
except ImportError:
pass
try:
from . import anat
except ImportError:
pass
# Create deprecated interface for database
import warnings
class dep(object):
def __getattr__(self, name):
warnings.warn("cortex.surfs is deprecated, use cortex.db instead", Warning)
return getattr(db, name)
def __dir__(self):
warnings.warn("cortex.surfs is deprecated, use cortex.db instead", Warning)
return db.__dir__()
surfs = dep()
|
<commit_before>from .dataset import Dataset, VolumeData, VertexData, DataView, View
from . import align, volume, quickflat, webgl, segment, options
from .database import db
from .utils import *
from .quickflat import make_figure as quickshow
openFile = Dataset.from_file
try:
from . import webgl
from .webgl import show as webshow
except ImportError:
pass
try:
from . import anat
except ImportError:
pass
# Create deprecated interface for database
import warnings
class dep(object):
def __getattr__(self, *args, **kwargs):
warnings.warn("cortex.surfs is deprecated, use cortex.db instead", Warning)
return db.__getattr__(*args, **kwargs)
def __dir__(self):
warnings.warn("cortex.surfs is deprecated, use cortex.db instead", Warning)
return db.__dir__()
surfs = dep()<commit_msg>Fix up the deprecate surfs object<commit_after>
|
from .dataset import Dataset, VolumeData, VertexData, DataView, View
from . import align, volume, quickflat, webgl, segment, options
from .database import db
from .utils import *
from .quickflat import make_figure as quickshow
openFile = Dataset.from_file
try:
from . import webgl
from .webgl import show as webshow
except ImportError:
pass
try:
from . import anat
except ImportError:
pass
# Create deprecated interface for database
import warnings
class dep(object):
def __getattr__(self, name):
warnings.warn("cortex.surfs is deprecated, use cortex.db instead", Warning)
return getattr(db, name)
def __dir__(self):
warnings.warn("cortex.surfs is deprecated, use cortex.db instead", Warning)
return db.__dir__()
surfs = dep()
|
from .dataset import Dataset, VolumeData, VertexData, DataView, View
from . import align, volume, quickflat, webgl, segment, options
from .database import db
from .utils import *
from .quickflat import make_figure as quickshow
openFile = Dataset.from_file
try:
from . import webgl
from .webgl import show as webshow
except ImportError:
pass
try:
from . import anat
except ImportError:
pass
# Create deprecated interface for database
import warnings
class dep(object):
def __getattr__(self, *args, **kwargs):
warnings.warn("cortex.surfs is deprecated, use cortex.db instead", Warning)
return db.__getattr__(*args, **kwargs)
def __dir__(self):
warnings.warn("cortex.surfs is deprecated, use cortex.db instead", Warning)
return db.__dir__()
surfs = dep()Fix up the deprecate surfs objectfrom .dataset import Dataset, VolumeData, VertexData, DataView, View
from . import align, volume, quickflat, webgl, segment, options
from .database import db
from .utils import *
from .quickflat import make_figure as quickshow
openFile = Dataset.from_file
try:
from . import webgl
from .webgl import show as webshow
except ImportError:
pass
try:
from . import anat
except ImportError:
pass
# Create deprecated interface for database
import warnings
class dep(object):
def __getattr__(self, name):
warnings.warn("cortex.surfs is deprecated, use cortex.db instead", Warning)
return getattr(db, name)
def __dir__(self):
warnings.warn("cortex.surfs is deprecated, use cortex.db instead", Warning)
return db.__dir__()
surfs = dep()
|
<commit_before>from .dataset import Dataset, VolumeData, VertexData, DataView, View
from . import align, volume, quickflat, webgl, segment, options
from .database import db
from .utils import *
from .quickflat import make_figure as quickshow
openFile = Dataset.from_file
try:
from . import webgl
from .webgl import show as webshow
except ImportError:
pass
try:
from . import anat
except ImportError:
pass
# Create deprecated interface for database
import warnings
class dep(object):
def __getattr__(self, *args, **kwargs):
warnings.warn("cortex.surfs is deprecated, use cortex.db instead", Warning)
return db.__getattr__(*args, **kwargs)
def __dir__(self):
warnings.warn("cortex.surfs is deprecated, use cortex.db instead", Warning)
return db.__dir__()
surfs = dep()<commit_msg>Fix up the deprecate surfs object<commit_after>from .dataset import Dataset, VolumeData, VertexData, DataView, View
from . import align, volume, quickflat, webgl, segment, options
from .database import db
from .utils import *
from .quickflat import make_figure as quickshow
openFile = Dataset.from_file
try:
from . import webgl
from .webgl import show as webshow
except ImportError:
pass
try:
from . import anat
except ImportError:
pass
# Create deprecated interface for database
import warnings
class dep(object):
def __getattr__(self, name):
warnings.warn("cortex.surfs is deprecated, use cortex.db instead", Warning)
return getattr(db, name)
def __dir__(self):
warnings.warn("cortex.surfs is deprecated, use cortex.db instead", Warning)
return db.__dir__()
surfs = dep()
|
cae7a57304e207f319e9bb2e52837ee207d0d96e
|
mcdowell/src/main/python/ch1/ch1.py
|
mcdowell/src/main/python/ch1/ch1.py
|
def unique(string):
counter = {}
for c in string:
if c in counter:
counter[c] += 1
else:
counter[c] = 1
print(counter)
for k in counter:
if counter[k] > 1:
return False
else:
return True
def reverse(string):
result = []
for i in range(len(string)):
result.append(string[-(i+1)])
return "".join(result)
|
def unique(string):
counter = {}
for c in string:
if c in counter:
return False
else:
counter[c] = 1
else:
return True
def reverse(string):
result = []
for i in range(len(string)):
result.append(string[-(i+1)])
return "".join(result)
def is_permutation(str1, str2):
if len(str1) != len(str2):
return False
counter = {}
for i in range(len(str1)):
if str1[i] in counter:
counter[str1[i]] += 1
else:
counter[str1[i]] = 1
if str2[i] in counter:
counter[str2[i]] -= 1
else:
counter[str2[i]] = -1
for k in counter:
if counter[k] != 0:
return False
else:
return True
|
Add is_permutation function. Simplifiy unique function.
|
Add is_permutation function. Simplifiy unique function.
|
Python
|
mit
|
jamesewoo/tigeruppercut,jamesewoo/tigeruppercut
|
def unique(string):
counter = {}
for c in string:
if c in counter:
counter[c] += 1
else:
counter[c] = 1
print(counter)
for k in counter:
if counter[k] > 1:
return False
else:
return True
def reverse(string):
result = []
for i in range(len(string)):
result.append(string[-(i+1)])
return "".join(result)
Add is_permutation function. Simplifiy unique function.
|
def unique(string):
counter = {}
for c in string:
if c in counter:
return False
else:
counter[c] = 1
else:
return True
def reverse(string):
result = []
for i in range(len(string)):
result.append(string[-(i+1)])
return "".join(result)
def is_permutation(str1, str2):
if len(str1) != len(str2):
return False
counter = {}
for i in range(len(str1)):
if str1[i] in counter:
counter[str1[i]] += 1
else:
counter[str1[i]] = 1
if str2[i] in counter:
counter[str2[i]] -= 1
else:
counter[str2[i]] = -1
for k in counter:
if counter[k] != 0:
return False
else:
return True
|
<commit_before>def unique(string):
counter = {}
for c in string:
if c in counter:
counter[c] += 1
else:
counter[c] = 1
print(counter)
for k in counter:
if counter[k] > 1:
return False
else:
return True
def reverse(string):
result = []
for i in range(len(string)):
result.append(string[-(i+1)])
return "".join(result)
<commit_msg>Add is_permutation function. Simplifiy unique function.<commit_after>
|
def unique(string):
counter = {}
for c in string:
if c in counter:
return False
else:
counter[c] = 1
else:
return True
def reverse(string):
result = []
for i in range(len(string)):
result.append(string[-(i+1)])
return "".join(result)
def is_permutation(str1, str2):
if len(str1) != len(str2):
return False
counter = {}
for i in range(len(str1)):
if str1[i] in counter:
counter[str1[i]] += 1
else:
counter[str1[i]] = 1
if str2[i] in counter:
counter[str2[i]] -= 1
else:
counter[str2[i]] = -1
for k in counter:
if counter[k] != 0:
return False
else:
return True
|
def unique(string):
counter = {}
for c in string:
if c in counter:
counter[c] += 1
else:
counter[c] = 1
print(counter)
for k in counter:
if counter[k] > 1:
return False
else:
return True
def reverse(string):
result = []
for i in range(len(string)):
result.append(string[-(i+1)])
return "".join(result)
Add is_permutation function. Simplifiy unique function.def unique(string):
counter = {}
for c in string:
if c in counter:
return False
else:
counter[c] = 1
else:
return True
def reverse(string):
result = []
for i in range(len(string)):
result.append(string[-(i+1)])
return "".join(result)
def is_permutation(str1, str2):
if len(str1) != len(str2):
return False
counter = {}
for i in range(len(str1)):
if str1[i] in counter:
counter[str1[i]] += 1
else:
counter[str1[i]] = 1
if str2[i] in counter:
counter[str2[i]] -= 1
else:
counter[str2[i]] = -1
for k in counter:
if counter[k] != 0:
return False
else:
return True
|
<commit_before>def unique(string):
counter = {}
for c in string:
if c in counter:
counter[c] += 1
else:
counter[c] = 1
print(counter)
for k in counter:
if counter[k] > 1:
return False
else:
return True
def reverse(string):
result = []
for i in range(len(string)):
result.append(string[-(i+1)])
return "".join(result)
<commit_msg>Add is_permutation function. Simplifiy unique function.<commit_after>def unique(string):
counter = {}
for c in string:
if c in counter:
return False
else:
counter[c] = 1
else:
return True
def reverse(string):
result = []
for i in range(len(string)):
result.append(string[-(i+1)])
return "".join(result)
def is_permutation(str1, str2):
if len(str1) != len(str2):
return False
counter = {}
for i in range(len(str1)):
if str1[i] in counter:
counter[str1[i]] += 1
else:
counter[str1[i]] = 1
if str2[i] in counter:
counter[str2[i]] -= 1
else:
counter[str2[i]] = -1
for k in counter:
if counter[k] != 0:
return False
else:
return True
|
eda5e7e2bb83f35e18cd0b5402636d4e930e02b9
|
mamba/cli.py
|
mamba/cli.py
|
# -*- coding: utf-8 -*-
import sys
import argparse
from mamba import application_factory, __version__
from mamba.infrastructure import is_python3
def main():
arguments = _parse_arguments()
if arguments.version:
print(__version__)
return
factory = application_factory.ApplicationFactory(arguments)
runner = factory.create_runner()
runner.run()
if runner.has_failed_examples:
sys.exit(1)
def _parse_arguments():
parser = argparse.ArgumentParser()
parser.add_argument('--version', '-v', default=False, action='store_true', help='Display the version.')
parser.add_argument('--slow', '-s', default=0.075, type=float, help='Slow test threshold in seconds (default: %(default)s)')
parser.add_argument('--enable-coverage', default=False, action='store_true', help='Enable code coverage measurement (default: %(default)s)')
parser.add_argument('--format', '-f', default='documentation', action='store', help='Output format (default: %(default)s)')
parser.add_argument('specs', default=['spec'], nargs='*', help='Specs or directories with specs to run (default: %(default)s)')
if not is_python3():
parser.add_argument('--watch', '-w', default=False, action='store_true', help='Enable file watching support - not available with python3 (default: %(default)s)')
return parser.parse_args()
if __name__ == '__main__':
main()
|
# -*- coding: utf-8 -*-
import sys
import argparse
from mamba import application_factory, __version__
from mamba.infrastructure import is_python3
def main():
arguments = _parse_arguments()
if arguments.version:
print(__version__)
return
factory = application_factory.ApplicationFactory(arguments)
runner = factory.create_runner()
runner.run()
if runner.has_failed_examples:
sys.exit(1)
def _parse_arguments():
parser = argparse.ArgumentParser()
parser.add_argument('--version', '-v', default=False, action='store_true', help='Display the version.')
parser.add_argument('--slow', '-s', default=0.075, type=float, help='Slow test threshold in seconds (default: %(default)s)')
parser.add_argument('--enable-coverage', default=False, action='store_true', help='Enable code coverage measurement (default: %(default)s)')
parser.add_argument('--format', '-f', default='documentation', action='store', choices=['documentation', 'progress'], help='Output format (default: %(default)s)')
parser.add_argument('specs', default=['spec'], nargs='*', help='Specs or directories with specs to run (default: %(default)s)')
if not is_python3():
parser.add_argument('--watch', '-w', default=False, action='store_true', help='Enable file watching support - not available with python3 (default: %(default)s)')
return parser.parse_args()
if __name__ == '__main__':
main()
|
Use a choices for specifiying type of reporter
|
Use a choices for specifiying type of reporter
|
Python
|
mit
|
dex4er/mamba,nestorsalceda/mamba,angelsanz/mamba,jaimegildesagredo/mamba,markng/mamba,eferro/mamba,alejandrodob/mamba
|
# -*- coding: utf-8 -*-
import sys
import argparse
from mamba import application_factory, __version__
from mamba.infrastructure import is_python3
def main():
arguments = _parse_arguments()
if arguments.version:
print(__version__)
return
factory = application_factory.ApplicationFactory(arguments)
runner = factory.create_runner()
runner.run()
if runner.has_failed_examples:
sys.exit(1)
def _parse_arguments():
parser = argparse.ArgumentParser()
parser.add_argument('--version', '-v', default=False, action='store_true', help='Display the version.')
parser.add_argument('--slow', '-s', default=0.075, type=float, help='Slow test threshold in seconds (default: %(default)s)')
parser.add_argument('--enable-coverage', default=False, action='store_true', help='Enable code coverage measurement (default: %(default)s)')
parser.add_argument('--format', '-f', default='documentation', action='store', help='Output format (default: %(default)s)')
parser.add_argument('specs', default=['spec'], nargs='*', help='Specs or directories with specs to run (default: %(default)s)')
if not is_python3():
parser.add_argument('--watch', '-w', default=False, action='store_true', help='Enable file watching support - not available with python3 (default: %(default)s)')
return parser.parse_args()
if __name__ == '__main__':
main()
Use a choices for specifiying type of reporter
|
# -*- coding: utf-8 -*-
import sys
import argparse
from mamba import application_factory, __version__
from mamba.infrastructure import is_python3
def main():
arguments = _parse_arguments()
if arguments.version:
print(__version__)
return
factory = application_factory.ApplicationFactory(arguments)
runner = factory.create_runner()
runner.run()
if runner.has_failed_examples:
sys.exit(1)
def _parse_arguments():
parser = argparse.ArgumentParser()
parser.add_argument('--version', '-v', default=False, action='store_true', help='Display the version.')
parser.add_argument('--slow', '-s', default=0.075, type=float, help='Slow test threshold in seconds (default: %(default)s)')
parser.add_argument('--enable-coverage', default=False, action='store_true', help='Enable code coverage measurement (default: %(default)s)')
parser.add_argument('--format', '-f', default='documentation', action='store', choices=['documentation', 'progress'], help='Output format (default: %(default)s)')
parser.add_argument('specs', default=['spec'], nargs='*', help='Specs or directories with specs to run (default: %(default)s)')
if not is_python3():
parser.add_argument('--watch', '-w', default=False, action='store_true', help='Enable file watching support - not available with python3 (default: %(default)s)')
return parser.parse_args()
if __name__ == '__main__':
main()
|
<commit_before># -*- coding: utf-8 -*-
import sys
import argparse
from mamba import application_factory, __version__
from mamba.infrastructure import is_python3
def main():
arguments = _parse_arguments()
if arguments.version:
print(__version__)
return
factory = application_factory.ApplicationFactory(arguments)
runner = factory.create_runner()
runner.run()
if runner.has_failed_examples:
sys.exit(1)
def _parse_arguments():
parser = argparse.ArgumentParser()
parser.add_argument('--version', '-v', default=False, action='store_true', help='Display the version.')
parser.add_argument('--slow', '-s', default=0.075, type=float, help='Slow test threshold in seconds (default: %(default)s)')
parser.add_argument('--enable-coverage', default=False, action='store_true', help='Enable code coverage measurement (default: %(default)s)')
parser.add_argument('--format', '-f', default='documentation', action='store', help='Output format (default: %(default)s)')
parser.add_argument('specs', default=['spec'], nargs='*', help='Specs or directories with specs to run (default: %(default)s)')
if not is_python3():
parser.add_argument('--watch', '-w', default=False, action='store_true', help='Enable file watching support - not available with python3 (default: %(default)s)')
return parser.parse_args()
if __name__ == '__main__':
main()
<commit_msg>Use a choices for specifiying type of reporter<commit_after>
|
# -*- coding: utf-8 -*-
import sys
import argparse
from mamba import application_factory, __version__
from mamba.infrastructure import is_python3
def main():
arguments = _parse_arguments()
if arguments.version:
print(__version__)
return
factory = application_factory.ApplicationFactory(arguments)
runner = factory.create_runner()
runner.run()
if runner.has_failed_examples:
sys.exit(1)
def _parse_arguments():
parser = argparse.ArgumentParser()
parser.add_argument('--version', '-v', default=False, action='store_true', help='Display the version.')
parser.add_argument('--slow', '-s', default=0.075, type=float, help='Slow test threshold in seconds (default: %(default)s)')
parser.add_argument('--enable-coverage', default=False, action='store_true', help='Enable code coverage measurement (default: %(default)s)')
parser.add_argument('--format', '-f', default='documentation', action='store', choices=['documentation', 'progress'], help='Output format (default: %(default)s)')
parser.add_argument('specs', default=['spec'], nargs='*', help='Specs or directories with specs to run (default: %(default)s)')
if not is_python3():
parser.add_argument('--watch', '-w', default=False, action='store_true', help='Enable file watching support - not available with python3 (default: %(default)s)')
return parser.parse_args()
if __name__ == '__main__':
main()
|
# -*- coding: utf-8 -*-
import sys
import argparse
from mamba import application_factory, __version__
from mamba.infrastructure import is_python3
def main():
arguments = _parse_arguments()
if arguments.version:
print(__version__)
return
factory = application_factory.ApplicationFactory(arguments)
runner = factory.create_runner()
runner.run()
if runner.has_failed_examples:
sys.exit(1)
def _parse_arguments():
parser = argparse.ArgumentParser()
parser.add_argument('--version', '-v', default=False, action='store_true', help='Display the version.')
parser.add_argument('--slow', '-s', default=0.075, type=float, help='Slow test threshold in seconds (default: %(default)s)')
parser.add_argument('--enable-coverage', default=False, action='store_true', help='Enable code coverage measurement (default: %(default)s)')
parser.add_argument('--format', '-f', default='documentation', action='store', help='Output format (default: %(default)s)')
parser.add_argument('specs', default=['spec'], nargs='*', help='Specs or directories with specs to run (default: %(default)s)')
if not is_python3():
parser.add_argument('--watch', '-w', default=False, action='store_true', help='Enable file watching support - not available with python3 (default: %(default)s)')
return parser.parse_args()
if __name__ == '__main__':
main()
Use a choices for specifiying type of reporter# -*- coding: utf-8 -*-
import sys
import argparse
from mamba import application_factory, __version__
from mamba.infrastructure import is_python3
def main():
arguments = _parse_arguments()
if arguments.version:
print(__version__)
return
factory = application_factory.ApplicationFactory(arguments)
runner = factory.create_runner()
runner.run()
if runner.has_failed_examples:
sys.exit(1)
def _parse_arguments():
parser = argparse.ArgumentParser()
parser.add_argument('--version', '-v', default=False, action='store_true', help='Display the version.')
parser.add_argument('--slow', '-s', default=0.075, type=float, help='Slow test threshold in seconds (default: %(default)s)')
parser.add_argument('--enable-coverage', default=False, action='store_true', help='Enable code coverage measurement (default: %(default)s)')
parser.add_argument('--format', '-f', default='documentation', action='store', choices=['documentation', 'progress'], help='Output format (default: %(default)s)')
parser.add_argument('specs', default=['spec'], nargs='*', help='Specs or directories with specs to run (default: %(default)s)')
if not is_python3():
parser.add_argument('--watch', '-w', default=False, action='store_true', help='Enable file watching support - not available with python3 (default: %(default)s)')
return parser.parse_args()
if __name__ == '__main__':
main()
|
<commit_before># -*- coding: utf-8 -*-
import sys
import argparse
from mamba import application_factory, __version__
from mamba.infrastructure import is_python3
def main():
arguments = _parse_arguments()
if arguments.version:
print(__version__)
return
factory = application_factory.ApplicationFactory(arguments)
runner = factory.create_runner()
runner.run()
if runner.has_failed_examples:
sys.exit(1)
def _parse_arguments():
parser = argparse.ArgumentParser()
parser.add_argument('--version', '-v', default=False, action='store_true', help='Display the version.')
parser.add_argument('--slow', '-s', default=0.075, type=float, help='Slow test threshold in seconds (default: %(default)s)')
parser.add_argument('--enable-coverage', default=False, action='store_true', help='Enable code coverage measurement (default: %(default)s)')
parser.add_argument('--format', '-f', default='documentation', action='store', help='Output format (default: %(default)s)')
parser.add_argument('specs', default=['spec'], nargs='*', help='Specs or directories with specs to run (default: %(default)s)')
if not is_python3():
parser.add_argument('--watch', '-w', default=False, action='store_true', help='Enable file watching support - not available with python3 (default: %(default)s)')
return parser.parse_args()
if __name__ == '__main__':
main()
<commit_msg>Use a choices for specifiying type of reporter<commit_after># -*- coding: utf-8 -*-
import sys
import argparse
from mamba import application_factory, __version__
from mamba.infrastructure import is_python3
def main():
arguments = _parse_arguments()
if arguments.version:
print(__version__)
return
factory = application_factory.ApplicationFactory(arguments)
runner = factory.create_runner()
runner.run()
if runner.has_failed_examples:
sys.exit(1)
def _parse_arguments():
parser = argparse.ArgumentParser()
parser.add_argument('--version', '-v', default=False, action='store_true', help='Display the version.')
parser.add_argument('--slow', '-s', default=0.075, type=float, help='Slow test threshold in seconds (default: %(default)s)')
parser.add_argument('--enable-coverage', default=False, action='store_true', help='Enable code coverage measurement (default: %(default)s)')
parser.add_argument('--format', '-f', default='documentation', action='store', choices=['documentation', 'progress'], help='Output format (default: %(default)s)')
parser.add_argument('specs', default=['spec'], nargs='*', help='Specs or directories with specs to run (default: %(default)s)')
if not is_python3():
parser.add_argument('--watch', '-w', default=False, action='store_true', help='Enable file watching support - not available with python3 (default: %(default)s)')
return parser.parse_args()
if __name__ == '__main__':
main()
|
6b72e0bbb09e8f8b6d8821252e34aeca89693441
|
mt_core/backends/__init__.py
|
mt_core/backends/__init__.py
|
# coding=UTF-8
|
# coding=UTF-8
class GuestInfo:
OS_WINDOWS = "windows"
OS_LINUX = "linux"
def __init__(self, username, password, os):
self.username = username
self.password = password
self.os = os
class Hypervisor:
# 完全克隆
CLONE_FULL = 0
# 链接克隆
CLONE_LINKED = 1
def clone(self, src_vm, dst_vm, type=CLONE_LINKED):
"""
克隆虚拟机
:param src_vm: 模板虚拟机路径
:param dst_vm: 目标虚拟机路径
:param type: 克隆类型
:return:
"""
pass
def set_cpu_count(self, cpu_count):
"""
设置CPU个数
:param cpu_count: CPU个数
"""
pass
def set_ram(self, ram):
"""
设置内存大小
:param ram: 内存大小,以MB为单位
"""
pass
def power_on(self, vm):
pass
def power_off(self, vm):
pass
def reset(self, vm):
pass
def shutdown_guest(self, vm):
pass
def restart_guest(self, vm):
pass
def create_vlan(self, vm, vlan_name):
pass
def delete_vlan(self, vm, vlan_name):
pass
def add_nic(self, vm, index, vlan_name):
pass
def remove_nic(self, vm, index):
pass
def put_file(self, vm, local_path, guest_path, guest_info):
"""
将本地文件放置到客户操作系统中
:param vm: 虚拟机路径
:param local_path: 本地文件路径
:param guest_path: 客户操作系统路径
:param guest_info: 客户操作系统类型
"""
pass
def get_file(self, vm, local_path, guest_path, guest_info):
"""
将客户操作系统中的文件传输到本地
:param vm: 虚拟机路径
:param local_path: 本地文件路径
:param guest_path: 客户操作系统路径
:param guest_info: 客户操作系统类型
"""
pass
def exec_guest(self, vm, cmd, guest_info):
"""
在虚拟机中执行指定命令
:param vm: 虚拟机路径
:param cmd: 命令行
:param guest_info: 客户操作系统信息
:return: 返回值
"""
def create_snapshot(self, vm, name):
"""
创建快照
:param vm: 虚拟机路径
:param name: 快照名称
"""
def revert_snapshot(self, vm, name):
"""
恢复快照
:param vm: 虚拟机路径
:param name: 快照名称
"""
|
Add Hypervisor base class for workstation and virtualbox
|
Add Hypervisor base class for workstation and virtualbox
|
Python
|
mit
|
CADTS-Bachelor/mini-testbed
|
# coding=UTF-8
Add Hypervisor base class for workstation and virtualbox
|
# coding=UTF-8
class GuestInfo:
OS_WINDOWS = "windows"
OS_LINUX = "linux"
def __init__(self, username, password, os):
self.username = username
self.password = password
self.os = os
class Hypervisor:
# 完全克隆
CLONE_FULL = 0
# 链接克隆
CLONE_LINKED = 1
def clone(self, src_vm, dst_vm, type=CLONE_LINKED):
"""
克隆虚拟机
:param src_vm: 模板虚拟机路径
:param dst_vm: 目标虚拟机路径
:param type: 克隆类型
:return:
"""
pass
def set_cpu_count(self, cpu_count):
"""
设置CPU个数
:param cpu_count: CPU个数
"""
pass
def set_ram(self, ram):
"""
设置内存大小
:param ram: 内存大小,以MB为单位
"""
pass
def power_on(self, vm):
pass
def power_off(self, vm):
pass
def reset(self, vm):
pass
def shutdown_guest(self, vm):
pass
def restart_guest(self, vm):
pass
def create_vlan(self, vm, vlan_name):
pass
def delete_vlan(self, vm, vlan_name):
pass
def add_nic(self, vm, index, vlan_name):
pass
def remove_nic(self, vm, index):
pass
def put_file(self, vm, local_path, guest_path, guest_info):
"""
将本地文件放置到客户操作系统中
:param vm: 虚拟机路径
:param local_path: 本地文件路径
:param guest_path: 客户操作系统路径
:param guest_info: 客户操作系统类型
"""
pass
def get_file(self, vm, local_path, guest_path, guest_info):
"""
将客户操作系统中的文件传输到本地
:param vm: 虚拟机路径
:param local_path: 本地文件路径
:param guest_path: 客户操作系统路径
:param guest_info: 客户操作系统类型
"""
pass
def exec_guest(self, vm, cmd, guest_info):
"""
在虚拟机中执行指定命令
:param vm: 虚拟机路径
:param cmd: 命令行
:param guest_info: 客户操作系统信息
:return: 返回值
"""
def create_snapshot(self, vm, name):
"""
创建快照
:param vm: 虚拟机路径
:param name: 快照名称
"""
def revert_snapshot(self, vm, name):
"""
恢复快照
:param vm: 虚拟机路径
:param name: 快照名称
"""
|
<commit_before># coding=UTF-8
<commit_msg>Add Hypervisor base class for workstation and virtualbox<commit_after>
|
# coding=UTF-8
class GuestInfo:
OS_WINDOWS = "windows"
OS_LINUX = "linux"
def __init__(self, username, password, os):
self.username = username
self.password = password
self.os = os
class Hypervisor:
# 完全克隆
CLONE_FULL = 0
# 链接克隆
CLONE_LINKED = 1
def clone(self, src_vm, dst_vm, type=CLONE_LINKED):
"""
克隆虚拟机
:param src_vm: 模板虚拟机路径
:param dst_vm: 目标虚拟机路径
:param type: 克隆类型
:return:
"""
pass
def set_cpu_count(self, cpu_count):
"""
设置CPU个数
:param cpu_count: CPU个数
"""
pass
def set_ram(self, ram):
"""
设置内存大小
:param ram: 内存大小,以MB为单位
"""
pass
def power_on(self, vm):
pass
def power_off(self, vm):
pass
def reset(self, vm):
pass
def shutdown_guest(self, vm):
pass
def restart_guest(self, vm):
pass
def create_vlan(self, vm, vlan_name):
pass
def delete_vlan(self, vm, vlan_name):
pass
def add_nic(self, vm, index, vlan_name):
pass
def remove_nic(self, vm, index):
pass
def put_file(self, vm, local_path, guest_path, guest_info):
"""
将本地文件放置到客户操作系统中
:param vm: 虚拟机路径
:param local_path: 本地文件路径
:param guest_path: 客户操作系统路径
:param guest_info: 客户操作系统类型
"""
pass
def get_file(self, vm, local_path, guest_path, guest_info):
"""
将客户操作系统中的文件传输到本地
:param vm: 虚拟机路径
:param local_path: 本地文件路径
:param guest_path: 客户操作系统路径
:param guest_info: 客户操作系统类型
"""
pass
def exec_guest(self, vm, cmd, guest_info):
"""
在虚拟机中执行指定命令
:param vm: 虚拟机路径
:param cmd: 命令行
:param guest_info: 客户操作系统信息
:return: 返回值
"""
def create_snapshot(self, vm, name):
"""
创建快照
:param vm: 虚拟机路径
:param name: 快照名称
"""
def revert_snapshot(self, vm, name):
"""
恢复快照
:param vm: 虚拟机路径
:param name: 快照名称
"""
|
# coding=UTF-8
Add Hypervisor base class for workstation and virtualbox# coding=UTF-8
class GuestInfo:
OS_WINDOWS = "windows"
OS_LINUX = "linux"
def __init__(self, username, password, os):
self.username = username
self.password = password
self.os = os
class Hypervisor:
# 完全克隆
CLONE_FULL = 0
# 链接克隆
CLONE_LINKED = 1
def clone(self, src_vm, dst_vm, type=CLONE_LINKED):
"""
克隆虚拟机
:param src_vm: 模板虚拟机路径
:param dst_vm: 目标虚拟机路径
:param type: 克隆类型
:return:
"""
pass
def set_cpu_count(self, cpu_count):
"""
设置CPU个数
:param cpu_count: CPU个数
"""
pass
def set_ram(self, ram):
"""
设置内存大小
:param ram: 内存大小,以MB为单位
"""
pass
def power_on(self, vm):
pass
def power_off(self, vm):
pass
def reset(self, vm):
pass
def shutdown_guest(self, vm):
pass
def restart_guest(self, vm):
pass
def create_vlan(self, vm, vlan_name):
pass
def delete_vlan(self, vm, vlan_name):
pass
def add_nic(self, vm, index, vlan_name):
pass
def remove_nic(self, vm, index):
pass
def put_file(self, vm, local_path, guest_path, guest_info):
"""
将本地文件放置到客户操作系统中
:param vm: 虚拟机路径
:param local_path: 本地文件路径
:param guest_path: 客户操作系统路径
:param guest_info: 客户操作系统类型
"""
pass
def get_file(self, vm, local_path, guest_path, guest_info):
"""
将客户操作系统中的文件传输到本地
:param vm: 虚拟机路径
:param local_path: 本地文件路径
:param guest_path: 客户操作系统路径
:param guest_info: 客户操作系统类型
"""
pass
def exec_guest(self, vm, cmd, guest_info):
"""
在虚拟机中执行指定命令
:param vm: 虚拟机路径
:param cmd: 命令行
:param guest_info: 客户操作系统信息
:return: 返回值
"""
def create_snapshot(self, vm, name):
"""
创建快照
:param vm: 虚拟机路径
:param name: 快照名称
"""
def revert_snapshot(self, vm, name):
"""
恢复快照
:param vm: 虚拟机路径
:param name: 快照名称
"""
|
<commit_before># coding=UTF-8
<commit_msg>Add Hypervisor base class for workstation and virtualbox<commit_after># coding=UTF-8
class GuestInfo:
OS_WINDOWS = "windows"
OS_LINUX = "linux"
def __init__(self, username, password, os):
self.username = username
self.password = password
self.os = os
class Hypervisor:
# 完全克隆
CLONE_FULL = 0
# 链接克隆
CLONE_LINKED = 1
def clone(self, src_vm, dst_vm, type=CLONE_LINKED):
"""
克隆虚拟机
:param src_vm: 模板虚拟机路径
:param dst_vm: 目标虚拟机路径
:param type: 克隆类型
:return:
"""
pass
def set_cpu_count(self, cpu_count):
"""
设置CPU个数
:param cpu_count: CPU个数
"""
pass
def set_ram(self, ram):
"""
设置内存大小
:param ram: 内存大小,以MB为单位
"""
pass
def power_on(self, vm):
pass
def power_off(self, vm):
pass
def reset(self, vm):
pass
def shutdown_guest(self, vm):
pass
def restart_guest(self, vm):
pass
def create_vlan(self, vm, vlan_name):
pass
def delete_vlan(self, vm, vlan_name):
pass
def add_nic(self, vm, index, vlan_name):
pass
def remove_nic(self, vm, index):
pass
def put_file(self, vm, local_path, guest_path, guest_info):
"""
将本地文件放置到客户操作系统中
:param vm: 虚拟机路径
:param local_path: 本地文件路径
:param guest_path: 客户操作系统路径
:param guest_info: 客户操作系统类型
"""
pass
def get_file(self, vm, local_path, guest_path, guest_info):
"""
将客户操作系统中的文件传输到本地
:param vm: 虚拟机路径
:param local_path: 本地文件路径
:param guest_path: 客户操作系统路径
:param guest_info: 客户操作系统类型
"""
pass
def exec_guest(self, vm, cmd, guest_info):
"""
在虚拟机中执行指定命令
:param vm: 虚拟机路径
:param cmd: 命令行
:param guest_info: 客户操作系统信息
:return: 返回值
"""
def create_snapshot(self, vm, name):
"""
创建快照
:param vm: 虚拟机路径
:param name: 快照名称
"""
def revert_snapshot(self, vm, name):
"""
恢复快照
:param vm: 虚拟机路径
:param name: 快照名称
"""
|
e478a70549164bee7351f01c161a8b0ef6f8c1c8
|
dashboard/src/api.py
|
dashboard/src/api.py
|
import requests
import os
class Api:
_API_ENDPOINT = 'api/v1'
def __init__(self, url, token=None):
self.url = Api.add_slash(url)
self.token = token
def is_api_running(self):
try:
res = requests.get(self.url)
if res.status_code in {200, 401}:
return True
except requests.exceptions.ConnectionError:
pass
return False
def add_slash(url):
if url and not url.endswith('/'):
url += '/'
return url
def get(self):
return requests.get(self.url)
def print_error_response(self, response, message_key):
error_message = response.json().get(message_key, "Server does not sent error message")
print(" Server returned HTTP code {c}".format(c=response.status_code))
print(" Error message: {m}".format(m=error_message))
|
"""Module with class representing common API."""
import requests
import os
class Api:
"""Class representing common API."""
_API_ENDPOINT = 'api/v1'
def __init__(self, url, token=None):
"""Set the API endpoint and store the authorization token if provided."""
self.url = Api.add_slash(url)
self.token = token
def is_api_running(self):
"""Check if the API is available for calls."""
try:
res = requests.get(self.url)
if res.status_code in {200, 401}:
return True
except requests.exceptions.ConnectionError:
pass
return False
@staticmethod
def add_slash(url):
"""Add a slash at end of URL, if the slash is not provided."""
if url and not url.endswith('/'):
url += '/'
return url
def get(self):
"""Use GET method to access API."""
return requests.get(self.url)
def print_error_response(self, response, message_key):
"""Print error message if anything goes wrong."""
print(" Server returned HTTP code {c}".format(c=response.status_code))
print(" Error message: {m}".format(m=error_message))
|
Add staticmethod annotation + docstrings to module, class, and all public methods
|
Add staticmethod annotation + docstrings to module, class, and all public methods
|
Python
|
apache-2.0
|
jpopelka/fabric8-analytics-common,jpopelka/fabric8-analytics-common,jpopelka/fabric8-analytics-common,tisnik/fabric8-analytics-common,tisnik/fabric8-analytics-common,tisnik/fabric8-analytics-common
|
import requests
import os
class Api:
_API_ENDPOINT = 'api/v1'
def __init__(self, url, token=None):
self.url = Api.add_slash(url)
self.token = token
def is_api_running(self):
try:
res = requests.get(self.url)
if res.status_code in {200, 401}:
return True
except requests.exceptions.ConnectionError:
pass
return False
def add_slash(url):
if url and not url.endswith('/'):
url += '/'
return url
def get(self):
return requests.get(self.url)
def print_error_response(self, response, message_key):
error_message = response.json().get(message_key, "Server does not sent error message")
print(" Server returned HTTP code {c}".format(c=response.status_code))
print(" Error message: {m}".format(m=error_message))
Add staticmethod annotation + docstrings to module, class, and all public methods
|
"""Module with class representing common API."""
import requests
import os
class Api:
"""Class representing common API."""
_API_ENDPOINT = 'api/v1'
def __init__(self, url, token=None):
"""Set the API endpoint and store the authorization token if provided."""
self.url = Api.add_slash(url)
self.token = token
def is_api_running(self):
"""Check if the API is available for calls."""
try:
res = requests.get(self.url)
if res.status_code in {200, 401}:
return True
except requests.exceptions.ConnectionError:
pass
return False
@staticmethod
def add_slash(url):
"""Add a slash at end of URL, if the slash is not provided."""
if url and not url.endswith('/'):
url += '/'
return url
def get(self):
"""Use GET method to access API."""
return requests.get(self.url)
def print_error_response(self, response, message_key):
"""Print error message if anything goes wrong."""
print(" Server returned HTTP code {c}".format(c=response.status_code))
print(" Error message: {m}".format(m=error_message))
|
<commit_before>import requests
import os
class Api:
_API_ENDPOINT = 'api/v1'
def __init__(self, url, token=None):
self.url = Api.add_slash(url)
self.token = token
def is_api_running(self):
try:
res = requests.get(self.url)
if res.status_code in {200, 401}:
return True
except requests.exceptions.ConnectionError:
pass
return False
def add_slash(url):
if url and not url.endswith('/'):
url += '/'
return url
def get(self):
return requests.get(self.url)
def print_error_response(self, response, message_key):
error_message = response.json().get(message_key, "Server does not sent error message")
print(" Server returned HTTP code {c}".format(c=response.status_code))
print(" Error message: {m}".format(m=error_message))
<commit_msg>Add staticmethod annotation + docstrings to module, class, and all public methods<commit_after>
|
"""Module with class representing common API."""
import requests
import os
class Api:
"""Class representing common API."""
_API_ENDPOINT = 'api/v1'
def __init__(self, url, token=None):
"""Set the API endpoint and store the authorization token if provided."""
self.url = Api.add_slash(url)
self.token = token
def is_api_running(self):
"""Check if the API is available for calls."""
try:
res = requests.get(self.url)
if res.status_code in {200, 401}:
return True
except requests.exceptions.ConnectionError:
pass
return False
@staticmethod
def add_slash(url):
"""Add a slash at end of URL, if the slash is not provided."""
if url and not url.endswith('/'):
url += '/'
return url
def get(self):
"""Use GET method to access API."""
return requests.get(self.url)
def print_error_response(self, response, message_key):
"""Print error message if anything goes wrong."""
print(" Server returned HTTP code {c}".format(c=response.status_code))
print(" Error message: {m}".format(m=error_message))
|
import requests
import os
class Api:
_API_ENDPOINT = 'api/v1'
def __init__(self, url, token=None):
self.url = Api.add_slash(url)
self.token = token
def is_api_running(self):
try:
res = requests.get(self.url)
if res.status_code in {200, 401}:
return True
except requests.exceptions.ConnectionError:
pass
return False
def add_slash(url):
if url and not url.endswith('/'):
url += '/'
return url
def get(self):
return requests.get(self.url)
def print_error_response(self, response, message_key):
error_message = response.json().get(message_key, "Server does not sent error message")
print(" Server returned HTTP code {c}".format(c=response.status_code))
print(" Error message: {m}".format(m=error_message))
Add staticmethod annotation + docstrings to module, class, and all public methods"""Module with class representing common API."""
import requests
import os
class Api:
"""Class representing common API."""
_API_ENDPOINT = 'api/v1'
def __init__(self, url, token=None):
"""Set the API endpoint and store the authorization token if provided."""
self.url = Api.add_slash(url)
self.token = token
def is_api_running(self):
"""Check if the API is available for calls."""
try:
res = requests.get(self.url)
if res.status_code in {200, 401}:
return True
except requests.exceptions.ConnectionError:
pass
return False
@staticmethod
def add_slash(url):
"""Add a slash at end of URL, if the slash is not provided."""
if url and not url.endswith('/'):
url += '/'
return url
def get(self):
"""Use GET method to access API."""
return requests.get(self.url)
def print_error_response(self, response, message_key):
"""Print error message if anything goes wrong."""
print(" Server returned HTTP code {c}".format(c=response.status_code))
print(" Error message: {m}".format(m=error_message))
|
<commit_before>import requests
import os
class Api:
_API_ENDPOINT = 'api/v1'
def __init__(self, url, token=None):
self.url = Api.add_slash(url)
self.token = token
def is_api_running(self):
try:
res = requests.get(self.url)
if res.status_code in {200, 401}:
return True
except requests.exceptions.ConnectionError:
pass
return False
def add_slash(url):
if url and not url.endswith('/'):
url += '/'
return url
def get(self):
return requests.get(self.url)
def print_error_response(self, response, message_key):
error_message = response.json().get(message_key, "Server does not sent error message")
print(" Server returned HTTP code {c}".format(c=response.status_code))
print(" Error message: {m}".format(m=error_message))
<commit_msg>Add staticmethod annotation + docstrings to module, class, and all public methods<commit_after>"""Module with class representing common API."""
import requests
import os
class Api:
"""Class representing common API."""
_API_ENDPOINT = 'api/v1'
def __init__(self, url, token=None):
"""Set the API endpoint and store the authorization token if provided."""
self.url = Api.add_slash(url)
self.token = token
def is_api_running(self):
"""Check if the API is available for calls."""
try:
res = requests.get(self.url)
if res.status_code in {200, 401}:
return True
except requests.exceptions.ConnectionError:
pass
return False
@staticmethod
def add_slash(url):
"""Add a slash at end of URL, if the slash is not provided."""
if url and not url.endswith('/'):
url += '/'
return url
def get(self):
"""Use GET method to access API."""
return requests.get(self.url)
def print_error_response(self, response, message_key):
"""Print error message if anything goes wrong."""
print(" Server returned HTTP code {c}".format(c=response.status_code))
print(" Error message: {m}".format(m=error_message))
|
abcee44e3a2b20856cf78d38de5a1a72c5b9a097
|
retdec/decompilation_phase.py
|
retdec/decompilation_phase.py
|
#
# Project: retdec-python
# Copyright: (c) 2015 by Petr Zemek <s3rvac@gmail.com> and contributors
# License: MIT, see the LICENSE file for more details
#
"""Phase of a decompilation."""
class DecompilationPhase:
"""Phase of a decompilation."""
def __init__(self, name, part, description, completion):
"""Initializes a phase."""
self._name = name
self._part = part
self._description = description
self._completion = completion
@property
def name(self):
"""Name of the phase (`str`)."""
return self._name
@property
def part(self):
"""Part to which the phase belongs (`str`).
May be ``None`` if the phase does not belong to any part.
"""
return self._part
@property
def description(self):
"""Description of the phase (`str`)."""
return self._description
@property
def completion(self):
"""Completion (in percentages, ``0-100``)."""
return self._completion
def __eq__(self, other):
return (self.__class__ == other.__class__ and
self.__dict__ == other.__dict__)
def __ne__(self, other):
return not self == other
def __repr__(self):
return '{}(name={!r}, part={!r}, description={!r}, completion={})'.format(
__name__ + '.' + self.__class__.__name__,
self.name,
self.part,
self.description,
self.completion
)
|
#
# Project: retdec-python
# Copyright: (c) 2015 by Petr Zemek <s3rvac@gmail.com> and contributors
# License: MIT, see the LICENSE file for more details
#
"""Phase of a decompilation."""
class DecompilationPhase:
"""Phase of a decompilation."""
def __init__(self, name, part, description, completion):
"""Initializes the phase."""
self._name = name
self._part = part
self._description = description
self._completion = completion
@property
def name(self):
"""Name of the phase (`str`)."""
return self._name
@property
def part(self):
"""Part to which the phase belongs (`str`).
May be ``None`` if the phase does not belong to any part.
"""
return self._part
@property
def description(self):
"""Description of the phase (`str`)."""
return self._description
@property
def completion(self):
"""Completion (in percentages, ``0-100``)."""
return self._completion
def __eq__(self, other):
return (self.__class__ == other.__class__ and
self.__dict__ == other.__dict__)
def __ne__(self, other):
return not self == other
def __repr__(self):
return '{}(name={!r}, part={!r}, description={!r}, completion={})'.format(
__name__ + '.' + self.__class__.__name__,
self.name,
self.part,
self.description,
self.completion
)
|
Fix the description of DecompilationPhase.__init__().
|
Fix the description of DecompilationPhase.__init__().
|
Python
|
mit
|
s3rvac/retdec-python
|
#
# Project: retdec-python
# Copyright: (c) 2015 by Petr Zemek <s3rvac@gmail.com> and contributors
# License: MIT, see the LICENSE file for more details
#
"""Phase of a decompilation."""
class DecompilationPhase:
"""Phase of a decompilation."""
def __init__(self, name, part, description, completion):
"""Initializes a phase."""
self._name = name
self._part = part
self._description = description
self._completion = completion
@property
def name(self):
"""Name of the phase (`str`)."""
return self._name
@property
def part(self):
"""Part to which the phase belongs (`str`).
May be ``None`` if the phase does not belong to any part.
"""
return self._part
@property
def description(self):
"""Description of the phase (`str`)."""
return self._description
@property
def completion(self):
"""Completion (in percentages, ``0-100``)."""
return self._completion
def __eq__(self, other):
return (self.__class__ == other.__class__ and
self.__dict__ == other.__dict__)
def __ne__(self, other):
return not self == other
def __repr__(self):
return '{}(name={!r}, part={!r}, description={!r}, completion={})'.format(
__name__ + '.' + self.__class__.__name__,
self.name,
self.part,
self.description,
self.completion
)
Fix the description of DecompilationPhase.__init__().
|
#
# Project: retdec-python
# Copyright: (c) 2015 by Petr Zemek <s3rvac@gmail.com> and contributors
# License: MIT, see the LICENSE file for more details
#
"""Phase of a decompilation."""
class DecompilationPhase:
"""Phase of a decompilation."""
def __init__(self, name, part, description, completion):
"""Initializes the phase."""
self._name = name
self._part = part
self._description = description
self._completion = completion
@property
def name(self):
"""Name of the phase (`str`)."""
return self._name
@property
def part(self):
"""Part to which the phase belongs (`str`).
May be ``None`` if the phase does not belong to any part.
"""
return self._part
@property
def description(self):
"""Description of the phase (`str`)."""
return self._description
@property
def completion(self):
"""Completion (in percentages, ``0-100``)."""
return self._completion
def __eq__(self, other):
return (self.__class__ == other.__class__ and
self.__dict__ == other.__dict__)
def __ne__(self, other):
return not self == other
def __repr__(self):
return '{}(name={!r}, part={!r}, description={!r}, completion={})'.format(
__name__ + '.' + self.__class__.__name__,
self.name,
self.part,
self.description,
self.completion
)
|
<commit_before>#
# Project: retdec-python
# Copyright: (c) 2015 by Petr Zemek <s3rvac@gmail.com> and contributors
# License: MIT, see the LICENSE file for more details
#
"""Phase of a decompilation."""
class DecompilationPhase:
"""Phase of a decompilation."""
def __init__(self, name, part, description, completion):
"""Initializes a phase."""
self._name = name
self._part = part
self._description = description
self._completion = completion
@property
def name(self):
"""Name of the phase (`str`)."""
return self._name
@property
def part(self):
"""Part to which the phase belongs (`str`).
May be ``None`` if the phase does not belong to any part.
"""
return self._part
@property
def description(self):
"""Description of the phase (`str`)."""
return self._description
@property
def completion(self):
"""Completion (in percentages, ``0-100``)."""
return self._completion
def __eq__(self, other):
return (self.__class__ == other.__class__ and
self.__dict__ == other.__dict__)
def __ne__(self, other):
return not self == other
def __repr__(self):
return '{}(name={!r}, part={!r}, description={!r}, completion={})'.format(
__name__ + '.' + self.__class__.__name__,
self.name,
self.part,
self.description,
self.completion
)
<commit_msg>Fix the description of DecompilationPhase.__init__().<commit_after>
|
#
# Project: retdec-python
# Copyright: (c) 2015 by Petr Zemek <s3rvac@gmail.com> and contributors
# License: MIT, see the LICENSE file for more details
#
"""Phase of a decompilation."""
class DecompilationPhase:
"""Phase of a decompilation."""
def __init__(self, name, part, description, completion):
"""Initializes the phase."""
self._name = name
self._part = part
self._description = description
self._completion = completion
@property
def name(self):
"""Name of the phase (`str`)."""
return self._name
@property
def part(self):
"""Part to which the phase belongs (`str`).
May be ``None`` if the phase does not belong to any part.
"""
return self._part
@property
def description(self):
"""Description of the phase (`str`)."""
return self._description
@property
def completion(self):
"""Completion (in percentages, ``0-100``)."""
return self._completion
def __eq__(self, other):
return (self.__class__ == other.__class__ and
self.__dict__ == other.__dict__)
def __ne__(self, other):
return not self == other
def __repr__(self):
return '{}(name={!r}, part={!r}, description={!r}, completion={})'.format(
__name__ + '.' + self.__class__.__name__,
self.name,
self.part,
self.description,
self.completion
)
|
#
# Project: retdec-python
# Copyright: (c) 2015 by Petr Zemek <s3rvac@gmail.com> and contributors
# License: MIT, see the LICENSE file for more details
#
"""Phase of a decompilation."""
class DecompilationPhase:
"""Phase of a decompilation."""
def __init__(self, name, part, description, completion):
"""Initializes a phase."""
self._name = name
self._part = part
self._description = description
self._completion = completion
@property
def name(self):
"""Name of the phase (`str`)."""
return self._name
@property
def part(self):
"""Part to which the phase belongs (`str`).
May be ``None`` if the phase does not belong to any part.
"""
return self._part
@property
def description(self):
"""Description of the phase (`str`)."""
return self._description
@property
def completion(self):
"""Completion (in percentages, ``0-100``)."""
return self._completion
def __eq__(self, other):
return (self.__class__ == other.__class__ and
self.__dict__ == other.__dict__)
def __ne__(self, other):
return not self == other
def __repr__(self):
return '{}(name={!r}, part={!r}, description={!r}, completion={})'.format(
__name__ + '.' + self.__class__.__name__,
self.name,
self.part,
self.description,
self.completion
)
Fix the description of DecompilationPhase.__init__().#
# Project: retdec-python
# Copyright: (c) 2015 by Petr Zemek <s3rvac@gmail.com> and contributors
# License: MIT, see the LICENSE file for more details
#
"""Phase of a decompilation."""
class DecompilationPhase:
"""Phase of a decompilation."""
def __init__(self, name, part, description, completion):
"""Initializes the phase."""
self._name = name
self._part = part
self._description = description
self._completion = completion
@property
def name(self):
"""Name of the phase (`str`)."""
return self._name
@property
def part(self):
"""Part to which the phase belongs (`str`).
May be ``None`` if the phase does not belong to any part.
"""
return self._part
@property
def description(self):
"""Description of the phase (`str`)."""
return self._description
@property
def completion(self):
"""Completion (in percentages, ``0-100``)."""
return self._completion
def __eq__(self, other):
return (self.__class__ == other.__class__ and
self.__dict__ == other.__dict__)
def __ne__(self, other):
return not self == other
def __repr__(self):
return '{}(name={!r}, part={!r}, description={!r}, completion={})'.format(
__name__ + '.' + self.__class__.__name__,
self.name,
self.part,
self.description,
self.completion
)
|
<commit_before>#
# Project: retdec-python
# Copyright: (c) 2015 by Petr Zemek <s3rvac@gmail.com> and contributors
# License: MIT, see the LICENSE file for more details
#
"""Phase of a decompilation."""
class DecompilationPhase:
"""Phase of a decompilation."""
def __init__(self, name, part, description, completion):
"""Initializes a phase."""
self._name = name
self._part = part
self._description = description
self._completion = completion
@property
def name(self):
"""Name of the phase (`str`)."""
return self._name
@property
def part(self):
"""Part to which the phase belongs (`str`).
May be ``None`` if the phase does not belong to any part.
"""
return self._part
@property
def description(self):
"""Description of the phase (`str`)."""
return self._description
@property
def completion(self):
"""Completion (in percentages, ``0-100``)."""
return self._completion
def __eq__(self, other):
return (self.__class__ == other.__class__ and
self.__dict__ == other.__dict__)
def __ne__(self, other):
return not self == other
def __repr__(self):
return '{}(name={!r}, part={!r}, description={!r}, completion={})'.format(
__name__ + '.' + self.__class__.__name__,
self.name,
self.part,
self.description,
self.completion
)
<commit_msg>Fix the description of DecompilationPhase.__init__().<commit_after>#
# Project: retdec-python
# Copyright: (c) 2015 by Petr Zemek <s3rvac@gmail.com> and contributors
# License: MIT, see the LICENSE file for more details
#
"""Phase of a decompilation."""
class DecompilationPhase:
"""Phase of a decompilation."""
def __init__(self, name, part, description, completion):
"""Initializes the phase."""
self._name = name
self._part = part
self._description = description
self._completion = completion
@property
def name(self):
"""Name of the phase (`str`)."""
return self._name
@property
def part(self):
"""Part to which the phase belongs (`str`).
May be ``None`` if the phase does not belong to any part.
"""
return self._part
@property
def description(self):
"""Description of the phase (`str`)."""
return self._description
@property
def completion(self):
"""Completion (in percentages, ``0-100``)."""
return self._completion
def __eq__(self, other):
return (self.__class__ == other.__class__ and
self.__dict__ == other.__dict__)
def __ne__(self, other):
return not self == other
def __repr__(self):
return '{}(name={!r}, part={!r}, description={!r}, completion={})'.format(
__name__ + '.' + self.__class__.__name__,
self.name,
self.part,
self.description,
self.completion
)
|
c86b6390e46bac17c64e19010912c4cb165fa9dd
|
satnogsclient/settings.py
|
satnogsclient/settings.py
|
from os import environ
DEMODULATION_COMMAND = environ.get('DEMODULATION_COMMAND', None)
ENCODING_COMMAND = environ.get('ENCODING_COMMAND', None)
DECODING_COMMAND = environ.get('DECODING_COMMAND', None)
OUTPUT_PATH = environ.get('OUTPUT_PATH', None)
|
from os import environ
DEMODULATION_COMMAND = environ.get('SATNOGS_DEMODULATION_COMMAND', None)
ENCODING_COMMAND = environ.get('SATNOGS_ENCODING_COMMAND', None)
DECODING_COMMAND = environ.get('SATNOGS_DECODING_COMMAND', None)
OUTPUT_PATH = environ.get('SATNOGS_OUTPUT_PATH', None)
|
Add prefix to required environment variables.
|
Add prefix to required environment variables.
|
Python
|
agpl-3.0
|
cshields/satnogs-client,adamkalis/satnogs-client,adamkalis/satnogs-client,cshields/satnogs-client
|
from os import environ
DEMODULATION_COMMAND = environ.get('DEMODULATION_COMMAND', None)
ENCODING_COMMAND = environ.get('ENCODING_COMMAND', None)
DECODING_COMMAND = environ.get('DECODING_COMMAND', None)
OUTPUT_PATH = environ.get('OUTPUT_PATH', None)
Add prefix to required environment variables.
|
from os import environ
DEMODULATION_COMMAND = environ.get('SATNOGS_DEMODULATION_COMMAND', None)
ENCODING_COMMAND = environ.get('SATNOGS_ENCODING_COMMAND', None)
DECODING_COMMAND = environ.get('SATNOGS_DECODING_COMMAND', None)
OUTPUT_PATH = environ.get('SATNOGS_OUTPUT_PATH', None)
|
<commit_before>from os import environ
DEMODULATION_COMMAND = environ.get('DEMODULATION_COMMAND', None)
ENCODING_COMMAND = environ.get('ENCODING_COMMAND', None)
DECODING_COMMAND = environ.get('DECODING_COMMAND', None)
OUTPUT_PATH = environ.get('OUTPUT_PATH', None)
<commit_msg>Add prefix to required environment variables.<commit_after>
|
from os import environ
DEMODULATION_COMMAND = environ.get('SATNOGS_DEMODULATION_COMMAND', None)
ENCODING_COMMAND = environ.get('SATNOGS_ENCODING_COMMAND', None)
DECODING_COMMAND = environ.get('SATNOGS_DECODING_COMMAND', None)
OUTPUT_PATH = environ.get('SATNOGS_OUTPUT_PATH', None)
|
from os import environ
DEMODULATION_COMMAND = environ.get('DEMODULATION_COMMAND', None)
ENCODING_COMMAND = environ.get('ENCODING_COMMAND', None)
DECODING_COMMAND = environ.get('DECODING_COMMAND', None)
OUTPUT_PATH = environ.get('OUTPUT_PATH', None)
Add prefix to required environment variables.from os import environ
DEMODULATION_COMMAND = environ.get('SATNOGS_DEMODULATION_COMMAND', None)
ENCODING_COMMAND = environ.get('SATNOGS_ENCODING_COMMAND', None)
DECODING_COMMAND = environ.get('SATNOGS_DECODING_COMMAND', None)
OUTPUT_PATH = environ.get('SATNOGS_OUTPUT_PATH', None)
|
<commit_before>from os import environ
DEMODULATION_COMMAND = environ.get('DEMODULATION_COMMAND', None)
ENCODING_COMMAND = environ.get('ENCODING_COMMAND', None)
DECODING_COMMAND = environ.get('DECODING_COMMAND', None)
OUTPUT_PATH = environ.get('OUTPUT_PATH', None)
<commit_msg>Add prefix to required environment variables.<commit_after>from os import environ
DEMODULATION_COMMAND = environ.get('SATNOGS_DEMODULATION_COMMAND', None)
ENCODING_COMMAND = environ.get('SATNOGS_ENCODING_COMMAND', None)
DECODING_COMMAND = environ.get('SATNOGS_DECODING_COMMAND', None)
OUTPUT_PATH = environ.get('SATNOGS_OUTPUT_PATH', None)
|
d7bb652118970c97dacd26f8aff60aa16804e21c
|
sqlalchemy_redshift/__init__.py
|
sqlalchemy_redshift/__init__.py
|
from pkg_resources import DistributionNotFound, get_distribution, parse_version
try:
import psycopg2 # noqa: F401
except ImportError:
raise ImportError(
'No module named psycopg2. Please install either '
'psycopg2 or psycopg2-binary package for CPython '
'or psycopg2cffi for Pypy.'
) from None
for package in ['psycopg2', 'psycopg2-binary', 'psycopg2cffi']:
try:
if get_distribution(package).parsed_version < parse_version('2.5'):
raise ImportError('Minimum required version for psycopg2 is 2.5')
break
except DistributionNotFound:
pass
else:
raise ImportError(
'A module was found named psycopg2, '
'but the version of it could not be checked '
'as it was neither the Python package psycopg2, '
'psycopg2-binary or psycopg2cffi.'
)
__version__ = get_distribution('sqlalchemy-redshift').version
from sqlalchemy.dialects import registry
registry.register("redshift", "sqlalchemy_redshift.dialect", "RedshiftDialect")
registry.register(
"redshift.psycopg2", "sqlalchemy_redshift.dialect", "RedshiftDialect"
)
|
from pkg_resources import DistributionNotFound, get_distribution, parse_version
try:
import psycopg2 # noqa: F401
except ImportError:
raise ImportError(
'No module named psycopg2. Please install either '
'psycopg2 or psycopg2-binary package for CPython '
'or psycopg2cffi for Pypy.'
)
for package in ['psycopg2', 'psycopg2-binary', 'psycopg2cffi']:
try:
if get_distribution(package).parsed_version < parse_version('2.5'):
raise ImportError('Minimum required version for psycopg2 is 2.5')
break
except DistributionNotFound:
pass
else:
raise ImportError(
'A module was found named psycopg2, '
'but the version of it could not be checked '
'as it was neither the Python package psycopg2, '
'psycopg2-binary or psycopg2cffi.'
)
__version__ = get_distribution('sqlalchemy-redshift').version
from sqlalchemy.dialects import registry
registry.register("redshift", "sqlalchemy_redshift.dialect", "RedshiftDialect")
registry.register(
"redshift.psycopg2", "sqlalchemy_redshift.dialect", "RedshiftDialect"
)
|
Remove clearing of exception context when raising a new exception
|
Remove clearing of exception context when raising a new exception
This syntax is only supported in Python 3.3 and up and is causing tests in
Python 2.7 to fail.
|
Python
|
mit
|
sqlalchemy-redshift/sqlalchemy-redshift,graingert/redshift_sqlalchemy,sqlalchemy-redshift/sqlalchemy-redshift
|
from pkg_resources import DistributionNotFound, get_distribution, parse_version
try:
import psycopg2 # noqa: F401
except ImportError:
raise ImportError(
'No module named psycopg2. Please install either '
'psycopg2 or psycopg2-binary package for CPython '
'or psycopg2cffi for Pypy.'
) from None
for package in ['psycopg2', 'psycopg2-binary', 'psycopg2cffi']:
try:
if get_distribution(package).parsed_version < parse_version('2.5'):
raise ImportError('Minimum required version for psycopg2 is 2.5')
break
except DistributionNotFound:
pass
else:
raise ImportError(
'A module was found named psycopg2, '
'but the version of it could not be checked '
'as it was neither the Python package psycopg2, '
'psycopg2-binary or psycopg2cffi.'
)
__version__ = get_distribution('sqlalchemy-redshift').version
from sqlalchemy.dialects import registry
registry.register("redshift", "sqlalchemy_redshift.dialect", "RedshiftDialect")
registry.register(
"redshift.psycopg2", "sqlalchemy_redshift.dialect", "RedshiftDialect"
)
Remove clearing of exception context when raising a new exception
This syntax is only supported in Python 3.3 and up and is causing tests in
Python 2.7 to fail.
|
from pkg_resources import DistributionNotFound, get_distribution, parse_version
try:
import psycopg2 # noqa: F401
except ImportError:
raise ImportError(
'No module named psycopg2. Please install either '
'psycopg2 or psycopg2-binary package for CPython '
'or psycopg2cffi for Pypy.'
)
for package in ['psycopg2', 'psycopg2-binary', 'psycopg2cffi']:
try:
if get_distribution(package).parsed_version < parse_version('2.5'):
raise ImportError('Minimum required version for psycopg2 is 2.5')
break
except DistributionNotFound:
pass
else:
raise ImportError(
'A module was found named psycopg2, '
'but the version of it could not be checked '
'as it was neither the Python package psycopg2, '
'psycopg2-binary or psycopg2cffi.'
)
__version__ = get_distribution('sqlalchemy-redshift').version
from sqlalchemy.dialects import registry
registry.register("redshift", "sqlalchemy_redshift.dialect", "RedshiftDialect")
registry.register(
"redshift.psycopg2", "sqlalchemy_redshift.dialect", "RedshiftDialect"
)
|
<commit_before>from pkg_resources import DistributionNotFound, get_distribution, parse_version
try:
import psycopg2 # noqa: F401
except ImportError:
raise ImportError(
'No module named psycopg2. Please install either '
'psycopg2 or psycopg2-binary package for CPython '
'or psycopg2cffi for Pypy.'
) from None
for package in ['psycopg2', 'psycopg2-binary', 'psycopg2cffi']:
try:
if get_distribution(package).parsed_version < parse_version('2.5'):
raise ImportError('Minimum required version for psycopg2 is 2.5')
break
except DistributionNotFound:
pass
else:
raise ImportError(
'A module was found named psycopg2, '
'but the version of it could not be checked '
'as it was neither the Python package psycopg2, '
'psycopg2-binary or psycopg2cffi.'
)
__version__ = get_distribution('sqlalchemy-redshift').version
from sqlalchemy.dialects import registry
registry.register("redshift", "sqlalchemy_redshift.dialect", "RedshiftDialect")
registry.register(
"redshift.psycopg2", "sqlalchemy_redshift.dialect", "RedshiftDialect"
)
<commit_msg>Remove clearing of exception context when raising a new exception
This syntax is only supported in Python 3.3 and up and is causing tests in
Python 2.7 to fail.<commit_after>
|
from pkg_resources import DistributionNotFound, get_distribution, parse_version
try:
import psycopg2 # noqa: F401
except ImportError:
raise ImportError(
'No module named psycopg2. Please install either '
'psycopg2 or psycopg2-binary package for CPython '
'or psycopg2cffi for Pypy.'
)
for package in ['psycopg2', 'psycopg2-binary', 'psycopg2cffi']:
try:
if get_distribution(package).parsed_version < parse_version('2.5'):
raise ImportError('Minimum required version for psycopg2 is 2.5')
break
except DistributionNotFound:
pass
else:
raise ImportError(
'A module was found named psycopg2, '
'but the version of it could not be checked '
'as it was neither the Python package psycopg2, '
'psycopg2-binary or psycopg2cffi.'
)
__version__ = get_distribution('sqlalchemy-redshift').version
from sqlalchemy.dialects import registry
registry.register("redshift", "sqlalchemy_redshift.dialect", "RedshiftDialect")
registry.register(
"redshift.psycopg2", "sqlalchemy_redshift.dialect", "RedshiftDialect"
)
|
from pkg_resources import DistributionNotFound, get_distribution, parse_version
try:
import psycopg2 # noqa: F401
except ImportError:
raise ImportError(
'No module named psycopg2. Please install either '
'psycopg2 or psycopg2-binary package for CPython '
'or psycopg2cffi for Pypy.'
) from None
for package in ['psycopg2', 'psycopg2-binary', 'psycopg2cffi']:
try:
if get_distribution(package).parsed_version < parse_version('2.5'):
raise ImportError('Minimum required version for psycopg2 is 2.5')
break
except DistributionNotFound:
pass
else:
raise ImportError(
'A module was found named psycopg2, '
'but the version of it could not be checked '
'as it was neither the Python package psycopg2, '
'psycopg2-binary or psycopg2cffi.'
)
__version__ = get_distribution('sqlalchemy-redshift').version
from sqlalchemy.dialects import registry
registry.register("redshift", "sqlalchemy_redshift.dialect", "RedshiftDialect")
registry.register(
"redshift.psycopg2", "sqlalchemy_redshift.dialect", "RedshiftDialect"
)
Remove clearing of exception context when raising a new exception
This syntax is only supported in Python 3.3 and up and is causing tests in
Python 2.7 to fail.from pkg_resources import DistributionNotFound, get_distribution, parse_version
try:
import psycopg2 # noqa: F401
except ImportError:
raise ImportError(
'No module named psycopg2. Please install either '
'psycopg2 or psycopg2-binary package for CPython '
'or psycopg2cffi for Pypy.'
)
for package in ['psycopg2', 'psycopg2-binary', 'psycopg2cffi']:
try:
if get_distribution(package).parsed_version < parse_version('2.5'):
raise ImportError('Minimum required version for psycopg2 is 2.5')
break
except DistributionNotFound:
pass
else:
raise ImportError(
'A module was found named psycopg2, '
'but the version of it could not be checked '
'as it was neither the Python package psycopg2, '
'psycopg2-binary or psycopg2cffi.'
)
__version__ = get_distribution('sqlalchemy-redshift').version
from sqlalchemy.dialects import registry
registry.register("redshift", "sqlalchemy_redshift.dialect", "RedshiftDialect")
registry.register(
"redshift.psycopg2", "sqlalchemy_redshift.dialect", "RedshiftDialect"
)
|
<commit_before>from pkg_resources import DistributionNotFound, get_distribution, parse_version
try:
import psycopg2 # noqa: F401
except ImportError:
raise ImportError(
'No module named psycopg2. Please install either '
'psycopg2 or psycopg2-binary package for CPython '
'or psycopg2cffi for Pypy.'
) from None
for package in ['psycopg2', 'psycopg2-binary', 'psycopg2cffi']:
try:
if get_distribution(package).parsed_version < parse_version('2.5'):
raise ImportError('Minimum required version for psycopg2 is 2.5')
break
except DistributionNotFound:
pass
else:
raise ImportError(
'A module was found named psycopg2, '
'but the version of it could not be checked '
'as it was neither the Python package psycopg2, '
'psycopg2-binary or psycopg2cffi.'
)
__version__ = get_distribution('sqlalchemy-redshift').version
from sqlalchemy.dialects import registry
registry.register("redshift", "sqlalchemy_redshift.dialect", "RedshiftDialect")
registry.register(
"redshift.psycopg2", "sqlalchemy_redshift.dialect", "RedshiftDialect"
)
<commit_msg>Remove clearing of exception context when raising a new exception
This syntax is only supported in Python 3.3 and up and is causing tests in
Python 2.7 to fail.<commit_after>from pkg_resources import DistributionNotFound, get_distribution, parse_version
try:
import psycopg2 # noqa: F401
except ImportError:
raise ImportError(
'No module named psycopg2. Please install either '
'psycopg2 or psycopg2-binary package for CPython '
'or psycopg2cffi for Pypy.'
)
for package in ['psycopg2', 'psycopg2-binary', 'psycopg2cffi']:
try:
if get_distribution(package).parsed_version < parse_version('2.5'):
raise ImportError('Minimum required version for psycopg2 is 2.5')
break
except DistributionNotFound:
pass
else:
raise ImportError(
'A module was found named psycopg2, '
'but the version of it could not be checked '
'as it was neither the Python package psycopg2, '
'psycopg2-binary or psycopg2cffi.'
)
__version__ = get_distribution('sqlalchemy-redshift').version
from sqlalchemy.dialects import registry
registry.register("redshift", "sqlalchemy_redshift.dialect", "RedshiftDialect")
registry.register(
"redshift.psycopg2", "sqlalchemy_redshift.dialect", "RedshiftDialect"
)
|
bf2ace8bd6cb0c492ff4347f9c2fe10a003abaff
|
sqlalchemy_redshift/__init__.py
|
sqlalchemy_redshift/__init__.py
|
from pkg_resources import get_distribution, parse_version
try:
import psycopg2 # noqa: F401
if get_distribution('psycopg2').parsed_version < parse_version('2.5'):
raise ImportError('Minimum required version for psycopg2 is 2.5')
except ImportError:
raise ImportError(
'No module named psycopg2. Please install either '
'psycopg2 or psycopg2-binary package for CPython '
'or psycopg2cffi for Pypy.'
)
__version__ = get_distribution('sqlalchemy-redshift').version
from sqlalchemy.dialects import registry
registry.register("redshift", "sqlalchemy_redshift.dialect", "RedshiftDialect")
registry.register(
"redshift.psycopg2", "sqlalchemy_redshift.dialect", "RedshiftDialect"
)
|
from pkg_resources import DistributionNotFound, get_distribution, parse_version
try:
import psycopg2 # noqa: F401
except ImportError:
raise ImportError(
'No module named psycopg2. Please install either '
'psycopg2 or psycopg2-binary package for CPython '
'or psycopg2cffi for Pypy.'
) from None
for package in ['psycopg2', 'psycopg2-binary', 'psycopg2cffi']:
try:
if get_distribution(package).parsed_version < parse_version('2.5'):
raise ImportError('Minimum required version for psycopg2 is 2.5')
break
except DistributionNotFound:
pass
else:
raise ImportError(
'A module was found named psycopg2, '
'but the version of it could not be checked '
'as it was neither the Python package psycopg2, '
'psycopg2-binary or psycopg2cffi.'
)
__version__ = get_distribution('sqlalchemy-redshift').version
from sqlalchemy.dialects import registry
registry.register("redshift", "sqlalchemy_redshift.dialect", "RedshiftDialect")
registry.register(
"redshift.psycopg2", "sqlalchemy_redshift.dialect", "RedshiftDialect"
)
|
Check the version of any of the supported Psycopg2 packages
|
Check the version of any of the supported Psycopg2 packages
A check was introduced in commit 8e0c4857a1c08f257b95d3b1ee5f6eb795d55cdc which
would check what version of the 'psycopg2' Python (pip) package was installed
as the dependency was removed from setup.py.
The check would however only check the 'psycopg2' package and not the other two
supported providers of the psycopg2 module, which meant importing the
sqlalchemy_redshift module would throw an exception, even though they were
installed.
This changes the check to check for either of the three supported psycopg2
packages and throws an exception if any of them fail to validate.
|
Python
|
mit
|
sqlalchemy-redshift/sqlalchemy-redshift,graingert/redshift_sqlalchemy,sqlalchemy-redshift/sqlalchemy-redshift
|
from pkg_resources import get_distribution, parse_version
try:
import psycopg2 # noqa: F401
if get_distribution('psycopg2').parsed_version < parse_version('2.5'):
raise ImportError('Minimum required version for psycopg2 is 2.5')
except ImportError:
raise ImportError(
'No module named psycopg2. Please install either '
'psycopg2 or psycopg2-binary package for CPython '
'or psycopg2cffi for Pypy.'
)
__version__ = get_distribution('sqlalchemy-redshift').version
from sqlalchemy.dialects import registry
registry.register("redshift", "sqlalchemy_redshift.dialect", "RedshiftDialect")
registry.register(
"redshift.psycopg2", "sqlalchemy_redshift.dialect", "RedshiftDialect"
)
Check the version of any of the supported Psycopg2 packages
A check was introduced in commit 8e0c4857a1c08f257b95d3b1ee5f6eb795d55cdc which
would check what version of the 'psycopg2' Python (pip) package was installed
as the dependency was removed from setup.py.
The check would however only check the 'psycopg2' package and not the other two
supported providers of the psycopg2 module, which meant importing the
sqlalchemy_redshift module would throw an exception, even though they were
installed.
This changes the check to check for either of the three supported psycopg2
packages and throws an exception if any of them fail to validate.
|
from pkg_resources import DistributionNotFound, get_distribution, parse_version
try:
import psycopg2 # noqa: F401
except ImportError:
raise ImportError(
'No module named psycopg2. Please install either '
'psycopg2 or psycopg2-binary package for CPython '
'or psycopg2cffi for Pypy.'
) from None
for package in ['psycopg2', 'psycopg2-binary', 'psycopg2cffi']:
try:
if get_distribution(package).parsed_version < parse_version('2.5'):
raise ImportError('Minimum required version for psycopg2 is 2.5')
break
except DistributionNotFound:
pass
else:
raise ImportError(
'A module was found named psycopg2, '
'but the version of it could not be checked '
'as it was neither the Python package psycopg2, '
'psycopg2-binary or psycopg2cffi.'
)
__version__ = get_distribution('sqlalchemy-redshift').version
from sqlalchemy.dialects import registry
registry.register("redshift", "sqlalchemy_redshift.dialect", "RedshiftDialect")
registry.register(
"redshift.psycopg2", "sqlalchemy_redshift.dialect", "RedshiftDialect"
)
|
<commit_before>from pkg_resources import get_distribution, parse_version
try:
import psycopg2 # noqa: F401
if get_distribution('psycopg2').parsed_version < parse_version('2.5'):
raise ImportError('Minimum required version for psycopg2 is 2.5')
except ImportError:
raise ImportError(
'No module named psycopg2. Please install either '
'psycopg2 or psycopg2-binary package for CPython '
'or psycopg2cffi for Pypy.'
)
__version__ = get_distribution('sqlalchemy-redshift').version
from sqlalchemy.dialects import registry
registry.register("redshift", "sqlalchemy_redshift.dialect", "RedshiftDialect")
registry.register(
"redshift.psycopg2", "sqlalchemy_redshift.dialect", "RedshiftDialect"
)
<commit_msg>Check the version of any of the supported Psycopg2 packages
A check was introduced in commit 8e0c4857a1c08f257b95d3b1ee5f6eb795d55cdc which
would check what version of the 'psycopg2' Python (pip) package was installed
as the dependency was removed from setup.py.
The check would however only check the 'psycopg2' package and not the other two
supported providers of the psycopg2 module, which meant importing the
sqlalchemy_redshift module would throw an exception, even though they were
installed.
This changes the check to check for either of the three supported psycopg2
packages and throws an exception if any of them fail to validate.<commit_after>
|
from pkg_resources import DistributionNotFound, get_distribution, parse_version
try:
import psycopg2 # noqa: F401
except ImportError:
raise ImportError(
'No module named psycopg2. Please install either '
'psycopg2 or psycopg2-binary package for CPython '
'or psycopg2cffi for Pypy.'
) from None
for package in ['psycopg2', 'psycopg2-binary', 'psycopg2cffi']:
try:
if get_distribution(package).parsed_version < parse_version('2.5'):
raise ImportError('Minimum required version for psycopg2 is 2.5')
break
except DistributionNotFound:
pass
else:
raise ImportError(
'A module was found named psycopg2, '
'but the version of it could not be checked '
'as it was neither the Python package psycopg2, '
'psycopg2-binary or psycopg2cffi.'
)
__version__ = get_distribution('sqlalchemy-redshift').version
from sqlalchemy.dialects import registry
registry.register("redshift", "sqlalchemy_redshift.dialect", "RedshiftDialect")
registry.register(
"redshift.psycopg2", "sqlalchemy_redshift.dialect", "RedshiftDialect"
)
|
from pkg_resources import get_distribution, parse_version
try:
import psycopg2 # noqa: F401
if get_distribution('psycopg2').parsed_version < parse_version('2.5'):
raise ImportError('Minimum required version for psycopg2 is 2.5')
except ImportError:
raise ImportError(
'No module named psycopg2. Please install either '
'psycopg2 or psycopg2-binary package for CPython '
'or psycopg2cffi for Pypy.'
)
__version__ = get_distribution('sqlalchemy-redshift').version
from sqlalchemy.dialects import registry
registry.register("redshift", "sqlalchemy_redshift.dialect", "RedshiftDialect")
registry.register(
"redshift.psycopg2", "sqlalchemy_redshift.dialect", "RedshiftDialect"
)
Check the version of any of the supported Psycopg2 packages
A check was introduced in commit 8e0c4857a1c08f257b95d3b1ee5f6eb795d55cdc which
would check what version of the 'psycopg2' Python (pip) package was installed
as the dependency was removed from setup.py.
The check would however only check the 'psycopg2' package and not the other two
supported providers of the psycopg2 module, which meant importing the
sqlalchemy_redshift module would throw an exception, even though they were
installed.
This changes the check to check for either of the three supported psycopg2
packages and throws an exception if any of them fail to validate.from pkg_resources import DistributionNotFound, get_distribution, parse_version
try:
import psycopg2 # noqa: F401
except ImportError:
raise ImportError(
'No module named psycopg2. Please install either '
'psycopg2 or psycopg2-binary package for CPython '
'or psycopg2cffi for Pypy.'
) from None
for package in ['psycopg2', 'psycopg2-binary', 'psycopg2cffi']:
try:
if get_distribution(package).parsed_version < parse_version('2.5'):
raise ImportError('Minimum required version for psycopg2 is 2.5')
break
except DistributionNotFound:
pass
else:
raise ImportError(
'A module was found named psycopg2, '
'but the version of it could not be checked '
'as it was neither the Python package psycopg2, '
'psycopg2-binary or psycopg2cffi.'
)
__version__ = get_distribution('sqlalchemy-redshift').version
from sqlalchemy.dialects import registry
registry.register("redshift", "sqlalchemy_redshift.dialect", "RedshiftDialect")
registry.register(
"redshift.psycopg2", "sqlalchemy_redshift.dialect", "RedshiftDialect"
)
|
<commit_before>from pkg_resources import get_distribution, parse_version
try:
import psycopg2 # noqa: F401
if get_distribution('psycopg2').parsed_version < parse_version('2.5'):
raise ImportError('Minimum required version for psycopg2 is 2.5')
except ImportError:
raise ImportError(
'No module named psycopg2. Please install either '
'psycopg2 or psycopg2-binary package for CPython '
'or psycopg2cffi for Pypy.'
)
__version__ = get_distribution('sqlalchemy-redshift').version
from sqlalchemy.dialects import registry
registry.register("redshift", "sqlalchemy_redshift.dialect", "RedshiftDialect")
registry.register(
"redshift.psycopg2", "sqlalchemy_redshift.dialect", "RedshiftDialect"
)
<commit_msg>Check the version of any of the supported Psycopg2 packages
A check was introduced in commit 8e0c4857a1c08f257b95d3b1ee5f6eb795d55cdc which
would check what version of the 'psycopg2' Python (pip) package was installed
as the dependency was removed from setup.py.
The check would however only check the 'psycopg2' package and not the other two
supported providers of the psycopg2 module, which meant importing the
sqlalchemy_redshift module would throw an exception, even though they were
installed.
This changes the check to check for either of the three supported psycopg2
packages and throws an exception if any of them fail to validate.<commit_after>from pkg_resources import DistributionNotFound, get_distribution, parse_version
try:
import psycopg2 # noqa: F401
except ImportError:
raise ImportError(
'No module named psycopg2. Please install either '
'psycopg2 or psycopg2-binary package for CPython '
'or psycopg2cffi for Pypy.'
) from None
for package in ['psycopg2', 'psycopg2-binary', 'psycopg2cffi']:
try:
if get_distribution(package).parsed_version < parse_version('2.5'):
raise ImportError('Minimum required version for psycopg2 is 2.5')
break
except DistributionNotFound:
pass
else:
raise ImportError(
'A module was found named psycopg2, '
'but the version of it could not be checked '
'as it was neither the Python package psycopg2, '
'psycopg2-binary or psycopg2cffi.'
)
__version__ = get_distribution('sqlalchemy-redshift').version
from sqlalchemy.dialects import registry
registry.register("redshift", "sqlalchemy_redshift.dialect", "RedshiftDialect")
registry.register(
"redshift.psycopg2", "sqlalchemy_redshift.dialect", "RedshiftDialect"
)
|
485d32e71996194fd5bf7bddb2535b5753b23572
|
plasmapy/classes/tests/test_plasma_base.py
|
plasmapy/classes/tests/test_plasma_base.py
|
from plasmapy.classes import BasePlasma, GenericPlasma
class NoDataSource(BasePlasma):
pass
class IsDataSource(BasePlasma):
@classmethod
def is_datasource_for(cls, **kwargs):
return True
class IsNotDataSource(BasePlasma):
@classmethod
def is_datasource_for(cls, **kwargs):
return False
class TestRegistrar:
def test_no_data_source(self):
"""
NoDataSource class should not be registered since it has
no method named ``is_datasource_for``.
"""
assert not BasePlasma._registry.get(NoDataSource)
def test_is_data_source(self):
"""
IsDataSource class should be registered since it has a
method named ``is_datasource_for`` and must return True.
"""
assert BasePlasma._registry.get(IsDataSource)
assert BasePlasma._registry[IsDataSource]()
# Delete the class from _registry once test is done
# to not interfere with plasma factory tests
del BasePlasma._registry[IsDataSource]
def test_is_not_data_source(self):
"""
IsNotDataSource class should be registered since it has a
method named ``is_datasource_for`` but must return False.
"""
assert BasePlasma._registry.get(IsNotDataSource)
assert not BasePlasma._registry[IsNotDataSource]()
del BasePlasma._registry[IsNotDataSource]
def test_subclasses():
assert issubclass(GenericPlasma, BasePlasma)
|
from plasmapy.classes import BasePlasma, GenericPlasma
# Get rid of any previously registered classes.
BasePlasma._registry = {}
class NoDataSource(BasePlasma):
pass
class IsDataSource(BasePlasma):
@classmethod
def is_datasource_for(cls, **kwargs):
return True
class IsNotDataSource(BasePlasma):
@classmethod
def is_datasource_for(cls, **kwargs):
return False
class TestRegistrar:
def test_no_data_source(self):
"""
NoDataSource class should not be registered since it has
no method named ``is_datasource_for``.
"""
assert not BasePlasma._registry.get(NoDataSource)
def test_is_data_source(self):
"""
IsDataSource class should be registered since it has a
method named ``is_datasource_for`` and must return True.
"""
assert BasePlasma._registry.get(IsDataSource)
assert BasePlasma._registry[IsDataSource]()
# Delete the class from _registry once test is done
# to not interfere with plasma factory tests
del BasePlasma._registry[IsDataSource]
def test_is_not_data_source(self):
"""
IsNotDataSource class should be registered since it has a
method named ``is_datasource_for`` but must return False.
"""
assert BasePlasma._registry.get(IsNotDataSource)
assert not BasePlasma._registry[IsNotDataSource]()
del BasePlasma._registry[IsNotDataSource]
def test_subclasses():
assert issubclass(GenericPlasma, BasePlasma)
|
Fix failing tests on setup.py test
|
Fix failing tests on setup.py test
|
Python
|
bsd-3-clause
|
StanczakDominik/PlasmaPy
|
from plasmapy.classes import BasePlasma, GenericPlasma
class NoDataSource(BasePlasma):
pass
class IsDataSource(BasePlasma):
@classmethod
def is_datasource_for(cls, **kwargs):
return True
class IsNotDataSource(BasePlasma):
@classmethod
def is_datasource_for(cls, **kwargs):
return False
class TestRegistrar:
def test_no_data_source(self):
"""
NoDataSource class should not be registered since it has
no method named ``is_datasource_for``.
"""
assert not BasePlasma._registry.get(NoDataSource)
def test_is_data_source(self):
"""
IsDataSource class should be registered since it has a
method named ``is_datasource_for`` and must return True.
"""
assert BasePlasma._registry.get(IsDataSource)
assert BasePlasma._registry[IsDataSource]()
# Delete the class from _registry once test is done
# to not interfere with plasma factory tests
del BasePlasma._registry[IsDataSource]
def test_is_not_data_source(self):
"""
IsNotDataSource class should be registered since it has a
method named ``is_datasource_for`` but must return False.
"""
assert BasePlasma._registry.get(IsNotDataSource)
assert not BasePlasma._registry[IsNotDataSource]()
del BasePlasma._registry[IsNotDataSource]
def test_subclasses():
assert issubclass(GenericPlasma, BasePlasma)
Fix failing tests on setup.py test
|
from plasmapy.classes import BasePlasma, GenericPlasma
# Get rid of any previously registered classes.
BasePlasma._registry = {}
class NoDataSource(BasePlasma):
pass
class IsDataSource(BasePlasma):
@classmethod
def is_datasource_for(cls, **kwargs):
return True
class IsNotDataSource(BasePlasma):
@classmethod
def is_datasource_for(cls, **kwargs):
return False
class TestRegistrar:
def test_no_data_source(self):
"""
NoDataSource class should not be registered since it has
no method named ``is_datasource_for``.
"""
assert not BasePlasma._registry.get(NoDataSource)
def test_is_data_source(self):
"""
IsDataSource class should be registered since it has a
method named ``is_datasource_for`` and must return True.
"""
assert BasePlasma._registry.get(IsDataSource)
assert BasePlasma._registry[IsDataSource]()
# Delete the class from _registry once test is done
# to not interfere with plasma factory tests
del BasePlasma._registry[IsDataSource]
def test_is_not_data_source(self):
"""
IsNotDataSource class should be registered since it has a
method named ``is_datasource_for`` but must return False.
"""
assert BasePlasma._registry.get(IsNotDataSource)
assert not BasePlasma._registry[IsNotDataSource]()
del BasePlasma._registry[IsNotDataSource]
def test_subclasses():
assert issubclass(GenericPlasma, BasePlasma)
|
<commit_before>from plasmapy.classes import BasePlasma, GenericPlasma
class NoDataSource(BasePlasma):
pass
class IsDataSource(BasePlasma):
@classmethod
def is_datasource_for(cls, **kwargs):
return True
class IsNotDataSource(BasePlasma):
@classmethod
def is_datasource_for(cls, **kwargs):
return False
class TestRegistrar:
def test_no_data_source(self):
"""
NoDataSource class should not be registered since it has
no method named ``is_datasource_for``.
"""
assert not BasePlasma._registry.get(NoDataSource)
def test_is_data_source(self):
"""
IsDataSource class should be registered since it has a
method named ``is_datasource_for`` and must return True.
"""
assert BasePlasma._registry.get(IsDataSource)
assert BasePlasma._registry[IsDataSource]()
# Delete the class from _registry once test is done
# to not interfere with plasma factory tests
del BasePlasma._registry[IsDataSource]
def test_is_not_data_source(self):
"""
IsNotDataSource class should be registered since it has a
method named ``is_datasource_for`` but must return False.
"""
assert BasePlasma._registry.get(IsNotDataSource)
assert not BasePlasma._registry[IsNotDataSource]()
del BasePlasma._registry[IsNotDataSource]
def test_subclasses():
assert issubclass(GenericPlasma, BasePlasma)
<commit_msg>Fix failing tests on setup.py test<commit_after>
|
from plasmapy.classes import BasePlasma, GenericPlasma
# Get rid of any previously registered classes.
BasePlasma._registry = {}
class NoDataSource(BasePlasma):
pass
class IsDataSource(BasePlasma):
@classmethod
def is_datasource_for(cls, **kwargs):
return True
class IsNotDataSource(BasePlasma):
@classmethod
def is_datasource_for(cls, **kwargs):
return False
class TestRegistrar:
def test_no_data_source(self):
"""
NoDataSource class should not be registered since it has
no method named ``is_datasource_for``.
"""
assert not BasePlasma._registry.get(NoDataSource)
def test_is_data_source(self):
"""
IsDataSource class should be registered since it has a
method named ``is_datasource_for`` and must return True.
"""
assert BasePlasma._registry.get(IsDataSource)
assert BasePlasma._registry[IsDataSource]()
# Delete the class from _registry once test is done
# to not interfere with plasma factory tests
del BasePlasma._registry[IsDataSource]
def test_is_not_data_source(self):
"""
IsNotDataSource class should be registered since it has a
method named ``is_datasource_for`` but must return False.
"""
assert BasePlasma._registry.get(IsNotDataSource)
assert not BasePlasma._registry[IsNotDataSource]()
del BasePlasma._registry[IsNotDataSource]
def test_subclasses():
assert issubclass(GenericPlasma, BasePlasma)
|
from plasmapy.classes import BasePlasma, GenericPlasma
class NoDataSource(BasePlasma):
pass
class IsDataSource(BasePlasma):
@classmethod
def is_datasource_for(cls, **kwargs):
return True
class IsNotDataSource(BasePlasma):
@classmethod
def is_datasource_for(cls, **kwargs):
return False
class TestRegistrar:
def test_no_data_source(self):
"""
NoDataSource class should not be registered since it has
no method named ``is_datasource_for``.
"""
assert not BasePlasma._registry.get(NoDataSource)
def test_is_data_source(self):
"""
IsDataSource class should be registered since it has a
method named ``is_datasource_for`` and must return True.
"""
assert BasePlasma._registry.get(IsDataSource)
assert BasePlasma._registry[IsDataSource]()
# Delete the class from _registry once test is done
# to not interfere with plasma factory tests
del BasePlasma._registry[IsDataSource]
def test_is_not_data_source(self):
"""
IsNotDataSource class should be registered since it has a
method named ``is_datasource_for`` but must return False.
"""
assert BasePlasma._registry.get(IsNotDataSource)
assert not BasePlasma._registry[IsNotDataSource]()
del BasePlasma._registry[IsNotDataSource]
def test_subclasses():
assert issubclass(GenericPlasma, BasePlasma)
Fix failing tests on setup.py testfrom plasmapy.classes import BasePlasma, GenericPlasma
# Get rid of any previously registered classes.
BasePlasma._registry = {}
class NoDataSource(BasePlasma):
pass
class IsDataSource(BasePlasma):
@classmethod
def is_datasource_for(cls, **kwargs):
return True
class IsNotDataSource(BasePlasma):
@classmethod
def is_datasource_for(cls, **kwargs):
return False
class TestRegistrar:
def test_no_data_source(self):
"""
NoDataSource class should not be registered since it has
no method named ``is_datasource_for``.
"""
assert not BasePlasma._registry.get(NoDataSource)
def test_is_data_source(self):
"""
IsDataSource class should be registered since it has a
method named ``is_datasource_for`` and must return True.
"""
assert BasePlasma._registry.get(IsDataSource)
assert BasePlasma._registry[IsDataSource]()
# Delete the class from _registry once test is done
# to not interfere with plasma factory tests
del BasePlasma._registry[IsDataSource]
def test_is_not_data_source(self):
"""
IsNotDataSource class should be registered since it has a
method named ``is_datasource_for`` but must return False.
"""
assert BasePlasma._registry.get(IsNotDataSource)
assert not BasePlasma._registry[IsNotDataSource]()
del BasePlasma._registry[IsNotDataSource]
def test_subclasses():
assert issubclass(GenericPlasma, BasePlasma)
|
<commit_before>from plasmapy.classes import BasePlasma, GenericPlasma
class NoDataSource(BasePlasma):
pass
class IsDataSource(BasePlasma):
@classmethod
def is_datasource_for(cls, **kwargs):
return True
class IsNotDataSource(BasePlasma):
@classmethod
def is_datasource_for(cls, **kwargs):
return False
class TestRegistrar:
def test_no_data_source(self):
"""
NoDataSource class should not be registered since it has
no method named ``is_datasource_for``.
"""
assert not BasePlasma._registry.get(NoDataSource)
def test_is_data_source(self):
"""
IsDataSource class should be registered since it has a
method named ``is_datasource_for`` and must return True.
"""
assert BasePlasma._registry.get(IsDataSource)
assert BasePlasma._registry[IsDataSource]()
# Delete the class from _registry once test is done
# to not interfere with plasma factory tests
del BasePlasma._registry[IsDataSource]
def test_is_not_data_source(self):
"""
IsNotDataSource class should be registered since it has a
method named ``is_datasource_for`` but must return False.
"""
assert BasePlasma._registry.get(IsNotDataSource)
assert not BasePlasma._registry[IsNotDataSource]()
del BasePlasma._registry[IsNotDataSource]
def test_subclasses():
assert issubclass(GenericPlasma, BasePlasma)
<commit_msg>Fix failing tests on setup.py test<commit_after>from plasmapy.classes import BasePlasma, GenericPlasma
# Get rid of any previously registered classes.
BasePlasma._registry = {}
class NoDataSource(BasePlasma):
pass
class IsDataSource(BasePlasma):
@classmethod
def is_datasource_for(cls, **kwargs):
return True
class IsNotDataSource(BasePlasma):
@classmethod
def is_datasource_for(cls, **kwargs):
return False
class TestRegistrar:
def test_no_data_source(self):
"""
NoDataSource class should not be registered since it has
no method named ``is_datasource_for``.
"""
assert not BasePlasma._registry.get(NoDataSource)
def test_is_data_source(self):
"""
IsDataSource class should be registered since it has a
method named ``is_datasource_for`` and must return True.
"""
assert BasePlasma._registry.get(IsDataSource)
assert BasePlasma._registry[IsDataSource]()
# Delete the class from _registry once test is done
# to not interfere with plasma factory tests
del BasePlasma._registry[IsDataSource]
def test_is_not_data_source(self):
"""
IsNotDataSource class should be registered since it has a
method named ``is_datasource_for`` but must return False.
"""
assert BasePlasma._registry.get(IsNotDataSource)
assert not BasePlasma._registry[IsNotDataSource]()
del BasePlasma._registry[IsNotDataSource]
def test_subclasses():
assert issubclass(GenericPlasma, BasePlasma)
|
2d9d3e5a0a904a52e8b97bdb64e59f455d15b6e8
|
migrations/versions/1815829d365_.py
|
migrations/versions/1815829d365_.py
|
"""empty message
Revision ID: 1815829d365
Revises: 3fcddd64a72
Create Date: 2016-02-09 17:58:47.362133
"""
# revision identifiers, used by Alembic.
revision = '1815829d365'
down_revision = '3fcddd64a72'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
# create new unique index to include geo app ref
op.execute("DROP INDEX title_abr_idx")
op.execute("CREATE UNIQUE INDEX title_abr_geo_idx ON records((record->'data'->>'title_number'),(record->'data'->>'application_reference'), (record->'data'->>'geometry_application_reference'))")
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.execute("DROP INDEX title_abr_geo_idx")
op.execute("CREATE UNIQUE INDEX title_abr_idx ON records((record->'data'->>'title_number'),(record->'data'->>'application_reference'))")
### end Alembic commands ###
|
"""empty message
Revision ID: 1815829d365
Revises: 3fcddd64a72
Create Date: 2016-02-09 17:58:47.362133
"""
# revision identifiers, used by Alembic.
revision = '1815829d365'
down_revision = '3fcddd64a72'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
# create new unique index to include geometry_application_ref
op.execute("DROP INDEX title_abr_idx")
op.execute("CREATE UNIQUE INDEX title_abr_geo_idx ON records((record->'data'->>'title_number'),(record->'data'->>'application_reference'), (record->'data'->>'geometry_application_reference'))")
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.execute("DROP INDEX title_abr_geo_idx")
op.execute("CREATE UNIQUE INDEX title_abr_idx ON records((record->'data'->>'title_number'),(record->'data'->>'application_reference'))")
### end Alembic commands ###
|
Add geometry_application_reference to new unique index.
|
Add geometry_application_reference to new unique index.
|
Python
|
mit
|
LandRegistry/system-of-record,LandRegistry/system-of-record
|
"""empty message
Revision ID: 1815829d365
Revises: 3fcddd64a72
Create Date: 2016-02-09 17:58:47.362133
"""
# revision identifiers, used by Alembic.
revision = '1815829d365'
down_revision = '3fcddd64a72'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
# create new unique index to include geo app ref
op.execute("DROP INDEX title_abr_idx")
op.execute("CREATE UNIQUE INDEX title_abr_geo_idx ON records((record->'data'->>'title_number'),(record->'data'->>'application_reference'), (record->'data'->>'geometry_application_reference'))")
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.execute("DROP INDEX title_abr_geo_idx")
op.execute("CREATE UNIQUE INDEX title_abr_idx ON records((record->'data'->>'title_number'),(record->'data'->>'application_reference'))")
### end Alembic commands ###
Add geometry_application_reference to new unique index.
|
"""empty message
Revision ID: 1815829d365
Revises: 3fcddd64a72
Create Date: 2016-02-09 17:58:47.362133
"""
# revision identifiers, used by Alembic.
revision = '1815829d365'
down_revision = '3fcddd64a72'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
# create new unique index to include geometry_application_ref
op.execute("DROP INDEX title_abr_idx")
op.execute("CREATE UNIQUE INDEX title_abr_geo_idx ON records((record->'data'->>'title_number'),(record->'data'->>'application_reference'), (record->'data'->>'geometry_application_reference'))")
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.execute("DROP INDEX title_abr_geo_idx")
op.execute("CREATE UNIQUE INDEX title_abr_idx ON records((record->'data'->>'title_number'),(record->'data'->>'application_reference'))")
### end Alembic commands ###
|
<commit_before>"""empty message
Revision ID: 1815829d365
Revises: 3fcddd64a72
Create Date: 2016-02-09 17:58:47.362133
"""
# revision identifiers, used by Alembic.
revision = '1815829d365'
down_revision = '3fcddd64a72'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
# create new unique index to include geo app ref
op.execute("DROP INDEX title_abr_idx")
op.execute("CREATE UNIQUE INDEX title_abr_geo_idx ON records((record->'data'->>'title_number'),(record->'data'->>'application_reference'), (record->'data'->>'geometry_application_reference'))")
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.execute("DROP INDEX title_abr_geo_idx")
op.execute("CREATE UNIQUE INDEX title_abr_idx ON records((record->'data'->>'title_number'),(record->'data'->>'application_reference'))")
### end Alembic commands ###
<commit_msg>Add geometry_application_reference to new unique index.<commit_after>
|
"""empty message
Revision ID: 1815829d365
Revises: 3fcddd64a72
Create Date: 2016-02-09 17:58:47.362133
"""
# revision identifiers, used by Alembic.
revision = '1815829d365'
down_revision = '3fcddd64a72'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
# create new unique index to include geometry_application_ref
op.execute("DROP INDEX title_abr_idx")
op.execute("CREATE UNIQUE INDEX title_abr_geo_idx ON records((record->'data'->>'title_number'),(record->'data'->>'application_reference'), (record->'data'->>'geometry_application_reference'))")
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.execute("DROP INDEX title_abr_geo_idx")
op.execute("CREATE UNIQUE INDEX title_abr_idx ON records((record->'data'->>'title_number'),(record->'data'->>'application_reference'))")
### end Alembic commands ###
|
"""empty message
Revision ID: 1815829d365
Revises: 3fcddd64a72
Create Date: 2016-02-09 17:58:47.362133
"""
# revision identifiers, used by Alembic.
revision = '1815829d365'
down_revision = '3fcddd64a72'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
# create new unique index to include geo app ref
op.execute("DROP INDEX title_abr_idx")
op.execute("CREATE UNIQUE INDEX title_abr_geo_idx ON records((record->'data'->>'title_number'),(record->'data'->>'application_reference'), (record->'data'->>'geometry_application_reference'))")
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.execute("DROP INDEX title_abr_geo_idx")
op.execute("CREATE UNIQUE INDEX title_abr_idx ON records((record->'data'->>'title_number'),(record->'data'->>'application_reference'))")
### end Alembic commands ###
Add geometry_application_reference to new unique index."""empty message
Revision ID: 1815829d365
Revises: 3fcddd64a72
Create Date: 2016-02-09 17:58:47.362133
"""
# revision identifiers, used by Alembic.
revision = '1815829d365'
down_revision = '3fcddd64a72'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
# create new unique index to include geometry_application_ref
op.execute("DROP INDEX title_abr_idx")
op.execute("CREATE UNIQUE INDEX title_abr_geo_idx ON records((record->'data'->>'title_number'),(record->'data'->>'application_reference'), (record->'data'->>'geometry_application_reference'))")
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.execute("DROP INDEX title_abr_geo_idx")
op.execute("CREATE UNIQUE INDEX title_abr_idx ON records((record->'data'->>'title_number'),(record->'data'->>'application_reference'))")
### end Alembic commands ###
|
<commit_before>"""empty message
Revision ID: 1815829d365
Revises: 3fcddd64a72
Create Date: 2016-02-09 17:58:47.362133
"""
# revision identifiers, used by Alembic.
revision = '1815829d365'
down_revision = '3fcddd64a72'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
# create new unique index to include geo app ref
op.execute("DROP INDEX title_abr_idx")
op.execute("CREATE UNIQUE INDEX title_abr_geo_idx ON records((record->'data'->>'title_number'),(record->'data'->>'application_reference'), (record->'data'->>'geometry_application_reference'))")
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.execute("DROP INDEX title_abr_geo_idx")
op.execute("CREATE UNIQUE INDEX title_abr_idx ON records((record->'data'->>'title_number'),(record->'data'->>'application_reference'))")
### end Alembic commands ###
<commit_msg>Add geometry_application_reference to new unique index.<commit_after>"""empty message
Revision ID: 1815829d365
Revises: 3fcddd64a72
Create Date: 2016-02-09 17:58:47.362133
"""
# revision identifiers, used by Alembic.
revision = '1815829d365'
down_revision = '3fcddd64a72'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
# create new unique index to include geometry_application_ref
op.execute("DROP INDEX title_abr_idx")
op.execute("CREATE UNIQUE INDEX title_abr_geo_idx ON records((record->'data'->>'title_number'),(record->'data'->>'application_reference'), (record->'data'->>'geometry_application_reference'))")
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.execute("DROP INDEX title_abr_geo_idx")
op.execute("CREATE UNIQUE INDEX title_abr_idx ON records((record->'data'->>'title_number'),(record->'data'->>'application_reference'))")
### end Alembic commands ###
|
3ed14bcd364d1843e35cd4a6d1bd48e06379c223
|
linter.py
|
linter.py
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Hardy Jones
# Copyright (c) 2013
#
# License: MIT
#
"""This module exports the Hlint plugin class."""
from SublimeLinter.lint import Linter
class Hlint(Linter):
"""Provides an interface to hlint."""
defaults = {
'selector': 'source.haskell'
}
cmd = 'hlint'
regex = (
r'^.+:(?P<line>\d+):'
'(?P<col>\d+):\s*'
'(?:(?P<error>Error)|(?P<warning>Warning)):\s*'
'(?P<message>.+)$'
)
multiline = True
tempfile_suffix = 'hs'
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Hardy Jones
# Copyright (c) 2013
#
# License: MIT
#
"""This module exports the Hlint plugin class."""
import json
from SublimeLinter.lint import Linter, LintMatch
class Hlint(Linter):
"""Provides an interface to hlint."""
cmd = 'hlint ${args} --json -'
defaults = {
'selector': 'source.haskell'
}
def find_errors(self, output):
# type: (str) -> Iterator[LintMatch]
errors = json.loads(output)
for error in errors:
message = "{hint}. Found: {from}".format(**error)
if error['to']:
message += " Perhaps: {to}".format(**error)
yield LintMatch(
error_type=error['severity'].lower(),
line=error['startLine'] - 1,
col=error['startColumn'] - 1,
message=message
)
|
Use JSON to parse hlint output
|
Use JSON to parse hlint output
|
Python
|
mit
|
SublimeLinter/SublimeLinter-hlint
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Hardy Jones
# Copyright (c) 2013
#
# License: MIT
#
"""This module exports the Hlint plugin class."""
from SublimeLinter.lint import Linter
class Hlint(Linter):
"""Provides an interface to hlint."""
defaults = {
'selector': 'source.haskell'
}
cmd = 'hlint'
regex = (
r'^.+:(?P<line>\d+):'
'(?P<col>\d+):\s*'
'(?:(?P<error>Error)|(?P<warning>Warning)):\s*'
'(?P<message>.+)$'
)
multiline = True
tempfile_suffix = 'hs'
Use JSON to parse hlint output
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Hardy Jones
# Copyright (c) 2013
#
# License: MIT
#
"""This module exports the Hlint plugin class."""
import json
from SublimeLinter.lint import Linter, LintMatch
class Hlint(Linter):
"""Provides an interface to hlint."""
cmd = 'hlint ${args} --json -'
defaults = {
'selector': 'source.haskell'
}
def find_errors(self, output):
# type: (str) -> Iterator[LintMatch]
errors = json.loads(output)
for error in errors:
message = "{hint}. Found: {from}".format(**error)
if error['to']:
message += " Perhaps: {to}".format(**error)
yield LintMatch(
error_type=error['severity'].lower(),
line=error['startLine'] - 1,
col=error['startColumn'] - 1,
message=message
)
|
<commit_before>#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Hardy Jones
# Copyright (c) 2013
#
# License: MIT
#
"""This module exports the Hlint plugin class."""
from SublimeLinter.lint import Linter
class Hlint(Linter):
"""Provides an interface to hlint."""
defaults = {
'selector': 'source.haskell'
}
cmd = 'hlint'
regex = (
r'^.+:(?P<line>\d+):'
'(?P<col>\d+):\s*'
'(?:(?P<error>Error)|(?P<warning>Warning)):\s*'
'(?P<message>.+)$'
)
multiline = True
tempfile_suffix = 'hs'
<commit_msg>Use JSON to parse hlint output<commit_after>
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Hardy Jones
# Copyright (c) 2013
#
# License: MIT
#
"""This module exports the Hlint plugin class."""
import json
from SublimeLinter.lint import Linter, LintMatch
class Hlint(Linter):
"""Provides an interface to hlint."""
cmd = 'hlint ${args} --json -'
defaults = {
'selector': 'source.haskell'
}
def find_errors(self, output):
# type: (str) -> Iterator[LintMatch]
errors = json.loads(output)
for error in errors:
message = "{hint}. Found: {from}".format(**error)
if error['to']:
message += " Perhaps: {to}".format(**error)
yield LintMatch(
error_type=error['severity'].lower(),
line=error['startLine'] - 1,
col=error['startColumn'] - 1,
message=message
)
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Hardy Jones
# Copyright (c) 2013
#
# License: MIT
#
"""This module exports the Hlint plugin class."""
from SublimeLinter.lint import Linter
class Hlint(Linter):
"""Provides an interface to hlint."""
defaults = {
'selector': 'source.haskell'
}
cmd = 'hlint'
regex = (
r'^.+:(?P<line>\d+):'
'(?P<col>\d+):\s*'
'(?:(?P<error>Error)|(?P<warning>Warning)):\s*'
'(?P<message>.+)$'
)
multiline = True
tempfile_suffix = 'hs'
Use JSON to parse hlint output#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Hardy Jones
# Copyright (c) 2013
#
# License: MIT
#
"""This module exports the Hlint plugin class."""
import json
from SublimeLinter.lint import Linter, LintMatch
class Hlint(Linter):
"""Provides an interface to hlint."""
cmd = 'hlint ${args} --json -'
defaults = {
'selector': 'source.haskell'
}
def find_errors(self, output):
# type: (str) -> Iterator[LintMatch]
errors = json.loads(output)
for error in errors:
message = "{hint}. Found: {from}".format(**error)
if error['to']:
message += " Perhaps: {to}".format(**error)
yield LintMatch(
error_type=error['severity'].lower(),
line=error['startLine'] - 1,
col=error['startColumn'] - 1,
message=message
)
|
<commit_before>#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Hardy Jones
# Copyright (c) 2013
#
# License: MIT
#
"""This module exports the Hlint plugin class."""
from SublimeLinter.lint import Linter
class Hlint(Linter):
"""Provides an interface to hlint."""
defaults = {
'selector': 'source.haskell'
}
cmd = 'hlint'
regex = (
r'^.+:(?P<line>\d+):'
'(?P<col>\d+):\s*'
'(?:(?P<error>Error)|(?P<warning>Warning)):\s*'
'(?P<message>.+)$'
)
multiline = True
tempfile_suffix = 'hs'
<commit_msg>Use JSON to parse hlint output<commit_after>#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Hardy Jones
# Copyright (c) 2013
#
# License: MIT
#
"""This module exports the Hlint plugin class."""
import json
from SublimeLinter.lint import Linter, LintMatch
class Hlint(Linter):
"""Provides an interface to hlint."""
cmd = 'hlint ${args} --json -'
defaults = {
'selector': 'source.haskell'
}
def find_errors(self, output):
# type: (str) -> Iterator[LintMatch]
errors = json.loads(output)
for error in errors:
message = "{hint}. Found: {from}".format(**error)
if error['to']:
message += " Perhaps: {to}".format(**error)
yield LintMatch(
error_type=error['severity'].lower(),
line=error['startLine'] - 1,
col=error['startColumn'] - 1,
message=message
)
|
b5776f5223b5f648d166c7608abe79c7fb566bb2
|
templatetags/views.py
|
templatetags/views.py
|
from django.views.decorators.csrf import csrf_exempt
from django.views.generic import View
from django.utils.decorators import method_decorator
from django.shortcuts import render
# Create your views here.
class MarkdownPreview(View):
template_name = "markdown_preview.html"
@method_decorator(csrf_exempt)
def post(self, request, *args, **kwargs):
return render(request, self.template_name, {'body': request.POST['md'].strip()})
# Create your views here.
class MarkdownPreviewSafe(View):
template_name = "markdown_preview_safe.html"
@method_decorator(csrf_exempt)
def post(self, request, *args, **kwargs):
return render(request, self.template_name, {'body': request.POST['md'].strip()})
class MarkdownPreviewNewsletter(View):
template_name = "markdown_preview_newsletter.html"
@method_decorator(csrf_exempt)
def post(self, request, *args, **kwargs):
return render(request, self.template_name, {'body': request.POST['md'].strip()})
class MarkdownPreviewText(View):
template_name = "markdown_preview_newsletter.html"
@method_decorator(csrf_exempt)
def post(self, request, *args, **kwargs):
return render(request, self.template_name, {'body': request.POST['md'].strip()})
|
from django.views.decorators.csrf import csrf_exempt
from django.views.generic import View
from django.utils.decorators import method_decorator
from django.shortcuts import render
# Create your views here.
class MarkdownPreview(View):
template_name = "markdown_preview.html"
@method_decorator(csrf_exempt)
def post(self, request, *args, **kwargs):
return render(request, self.template_name, {'body': request.POST['md'].strip()})
# Create your views here.
class MarkdownPreviewSafe(View):
template_name = "markdown_preview_safe.html"
@method_decorator(csrf_exempt)
def post(self, request, *args, **kwargs):
return render(request, self.template_name, {'body': request.POST['md'].strip()})
class MarkdownPreviewNewsletter(View):
template_name = "markdown_preview_newsletter.html"
@method_decorator(csrf_exempt)
def post(self, request, *args, **kwargs):
return render(request, self.template_name, {'body': request.POST['md'].strip()})
class MarkdownPreviewText(View):
template_name = "markdown_preview_text.html"
@method_decorator(csrf_exempt)
def post(self, request, *args, **kwargs):
return render(request, self.template_name, {'body': request.POST['md'].strip()})
|
Fix preview still being slightly different.
|
Fix preview still being slightly different.
|
Python
|
isc
|
ashbc/tgrsite,ashbc/tgrsite,ashbc/tgrsite
|
from django.views.decorators.csrf import csrf_exempt
from django.views.generic import View
from django.utils.decorators import method_decorator
from django.shortcuts import render
# Create your views here.
class MarkdownPreview(View):
template_name = "markdown_preview.html"
@method_decorator(csrf_exempt)
def post(self, request, *args, **kwargs):
return render(request, self.template_name, {'body': request.POST['md'].strip()})
# Create your views here.
class MarkdownPreviewSafe(View):
template_name = "markdown_preview_safe.html"
@method_decorator(csrf_exempt)
def post(self, request, *args, **kwargs):
return render(request, self.template_name, {'body': request.POST['md'].strip()})
class MarkdownPreviewNewsletter(View):
template_name = "markdown_preview_newsletter.html"
@method_decorator(csrf_exempt)
def post(self, request, *args, **kwargs):
return render(request, self.template_name, {'body': request.POST['md'].strip()})
class MarkdownPreviewText(View):
template_name = "markdown_preview_newsletter.html"
@method_decorator(csrf_exempt)
def post(self, request, *args, **kwargs):
return render(request, self.template_name, {'body': request.POST['md'].strip()})
Fix preview still being slightly different.
|
from django.views.decorators.csrf import csrf_exempt
from django.views.generic import View
from django.utils.decorators import method_decorator
from django.shortcuts import render
# Create your views here.
class MarkdownPreview(View):
template_name = "markdown_preview.html"
@method_decorator(csrf_exempt)
def post(self, request, *args, **kwargs):
return render(request, self.template_name, {'body': request.POST['md'].strip()})
# Create your views here.
class MarkdownPreviewSafe(View):
template_name = "markdown_preview_safe.html"
@method_decorator(csrf_exempt)
def post(self, request, *args, **kwargs):
return render(request, self.template_name, {'body': request.POST['md'].strip()})
class MarkdownPreviewNewsletter(View):
template_name = "markdown_preview_newsletter.html"
@method_decorator(csrf_exempt)
def post(self, request, *args, **kwargs):
return render(request, self.template_name, {'body': request.POST['md'].strip()})
class MarkdownPreviewText(View):
template_name = "markdown_preview_text.html"
@method_decorator(csrf_exempt)
def post(self, request, *args, **kwargs):
return render(request, self.template_name, {'body': request.POST['md'].strip()})
|
<commit_before>from django.views.decorators.csrf import csrf_exempt
from django.views.generic import View
from django.utils.decorators import method_decorator
from django.shortcuts import render
# Create your views here.
class MarkdownPreview(View):
template_name = "markdown_preview.html"
@method_decorator(csrf_exempt)
def post(self, request, *args, **kwargs):
return render(request, self.template_name, {'body': request.POST['md'].strip()})
# Create your views here.
class MarkdownPreviewSafe(View):
template_name = "markdown_preview_safe.html"
@method_decorator(csrf_exempt)
def post(self, request, *args, **kwargs):
return render(request, self.template_name, {'body': request.POST['md'].strip()})
class MarkdownPreviewNewsletter(View):
template_name = "markdown_preview_newsletter.html"
@method_decorator(csrf_exempt)
def post(self, request, *args, **kwargs):
return render(request, self.template_name, {'body': request.POST['md'].strip()})
class MarkdownPreviewText(View):
template_name = "markdown_preview_newsletter.html"
@method_decorator(csrf_exempt)
def post(self, request, *args, **kwargs):
return render(request, self.template_name, {'body': request.POST['md'].strip()})
<commit_msg>Fix preview still being slightly different.<commit_after>
|
from django.views.decorators.csrf import csrf_exempt
from django.views.generic import View
from django.utils.decorators import method_decorator
from django.shortcuts import render
# Create your views here.
class MarkdownPreview(View):
template_name = "markdown_preview.html"
@method_decorator(csrf_exempt)
def post(self, request, *args, **kwargs):
return render(request, self.template_name, {'body': request.POST['md'].strip()})
# Create your views here.
class MarkdownPreviewSafe(View):
template_name = "markdown_preview_safe.html"
@method_decorator(csrf_exempt)
def post(self, request, *args, **kwargs):
return render(request, self.template_name, {'body': request.POST['md'].strip()})
class MarkdownPreviewNewsletter(View):
template_name = "markdown_preview_newsletter.html"
@method_decorator(csrf_exempt)
def post(self, request, *args, **kwargs):
return render(request, self.template_name, {'body': request.POST['md'].strip()})
class MarkdownPreviewText(View):
template_name = "markdown_preview_text.html"
@method_decorator(csrf_exempt)
def post(self, request, *args, **kwargs):
return render(request, self.template_name, {'body': request.POST['md'].strip()})
|
from django.views.decorators.csrf import csrf_exempt
from django.views.generic import View
from django.utils.decorators import method_decorator
from django.shortcuts import render
# Create your views here.
class MarkdownPreview(View):
template_name = "markdown_preview.html"
@method_decorator(csrf_exempt)
def post(self, request, *args, **kwargs):
return render(request, self.template_name, {'body': request.POST['md'].strip()})
# Create your views here.
class MarkdownPreviewSafe(View):
template_name = "markdown_preview_safe.html"
@method_decorator(csrf_exempt)
def post(self, request, *args, **kwargs):
return render(request, self.template_name, {'body': request.POST['md'].strip()})
class MarkdownPreviewNewsletter(View):
template_name = "markdown_preview_newsletter.html"
@method_decorator(csrf_exempt)
def post(self, request, *args, **kwargs):
return render(request, self.template_name, {'body': request.POST['md'].strip()})
class MarkdownPreviewText(View):
template_name = "markdown_preview_newsletter.html"
@method_decorator(csrf_exempt)
def post(self, request, *args, **kwargs):
return render(request, self.template_name, {'body': request.POST['md'].strip()})
Fix preview still being slightly different.from django.views.decorators.csrf import csrf_exempt
from django.views.generic import View
from django.utils.decorators import method_decorator
from django.shortcuts import render
# Create your views here.
class MarkdownPreview(View):
template_name = "markdown_preview.html"
@method_decorator(csrf_exempt)
def post(self, request, *args, **kwargs):
return render(request, self.template_name, {'body': request.POST['md'].strip()})
# Create your views here.
class MarkdownPreviewSafe(View):
template_name = "markdown_preview_safe.html"
@method_decorator(csrf_exempt)
def post(self, request, *args, **kwargs):
return render(request, self.template_name, {'body': request.POST['md'].strip()})
class MarkdownPreviewNewsletter(View):
template_name = "markdown_preview_newsletter.html"
@method_decorator(csrf_exempt)
def post(self, request, *args, **kwargs):
return render(request, self.template_name, {'body': request.POST['md'].strip()})
class MarkdownPreviewText(View):
template_name = "markdown_preview_text.html"
@method_decorator(csrf_exempt)
def post(self, request, *args, **kwargs):
return render(request, self.template_name, {'body': request.POST['md'].strip()})
|
<commit_before>from django.views.decorators.csrf import csrf_exempt
from django.views.generic import View
from django.utils.decorators import method_decorator
from django.shortcuts import render
# Create your views here.
class MarkdownPreview(View):
template_name = "markdown_preview.html"
@method_decorator(csrf_exempt)
def post(self, request, *args, **kwargs):
return render(request, self.template_name, {'body': request.POST['md'].strip()})
# Create your views here.
class MarkdownPreviewSafe(View):
template_name = "markdown_preview_safe.html"
@method_decorator(csrf_exempt)
def post(self, request, *args, **kwargs):
return render(request, self.template_name, {'body': request.POST['md'].strip()})
class MarkdownPreviewNewsletter(View):
template_name = "markdown_preview_newsletter.html"
@method_decorator(csrf_exempt)
def post(self, request, *args, **kwargs):
return render(request, self.template_name, {'body': request.POST['md'].strip()})
class MarkdownPreviewText(View):
template_name = "markdown_preview_newsletter.html"
@method_decorator(csrf_exempt)
def post(self, request, *args, **kwargs):
return render(request, self.template_name, {'body': request.POST['md'].strip()})
<commit_msg>Fix preview still being slightly different.<commit_after>from django.views.decorators.csrf import csrf_exempt
from django.views.generic import View
from django.utils.decorators import method_decorator
from django.shortcuts import render
# Create your views here.
class MarkdownPreview(View):
template_name = "markdown_preview.html"
@method_decorator(csrf_exempt)
def post(self, request, *args, **kwargs):
return render(request, self.template_name, {'body': request.POST['md'].strip()})
# Create your views here.
class MarkdownPreviewSafe(View):
template_name = "markdown_preview_safe.html"
@method_decorator(csrf_exempt)
def post(self, request, *args, **kwargs):
return render(request, self.template_name, {'body': request.POST['md'].strip()})
class MarkdownPreviewNewsletter(View):
template_name = "markdown_preview_newsletter.html"
@method_decorator(csrf_exempt)
def post(self, request, *args, **kwargs):
return render(request, self.template_name, {'body': request.POST['md'].strip()})
class MarkdownPreviewText(View):
template_name = "markdown_preview_text.html"
@method_decorator(csrf_exempt)
def post(self, request, *args, **kwargs):
return render(request, self.template_name, {'body': request.POST['md'].strip()})
|
0683645a2fb2323a9534d985005d843aada66040
|
anypytools/__init__.py
|
anypytools/__init__.py
|
# -*- coding: utf-8 -*-
"""AnyPyTools library."""
import sys
import platform
import logging
from anypytools.abcutils import AnyPyProcess
from anypytools.macroutils import AnyMacro
from anypytools import macro_commands
logger = logging.getLogger('abt.anypytools')
logger.addHandler(logging.NullHandler())
__all__ = [
'datautils', 'h5py_wrapper', 'AnyPyProcess', 'AnyMacro', 'macro_commands',
'print_versions',
]
__version__ = '0.10.2'
def print_versions():
"""Print all the versions of software that AnyPyTools relies on."""
import numpy as np
import scipy as sp
print("-=" * 38)
print("AnyPyTools version: %s" % __version__)
print("NumPy version: %s" % np.__version__)
print("SciPy version: %s" % sp.__version__)
print("Python version: %s" % sys.version)
(sysname, nodename, release, version, machine, processor) = \
platform.uname()
print("Platform: %s-%s-%s (%s)" % (sysname, release, machine, version))
if sysname == "Linux":
print("Linux dist: %s" % " ".join(platform.linux_distribution()[:-1]))
if not processor:
processor = "not recognized"
print("Processor: %s" % processor)
print("Byte-ordering: %s" % sys.byteorder)
print("-=" * 38)
|
# -*- coding: utf-8 -*-
"""AnyPyTools library."""
import sys
import platform
import logging
from anypytools.abcutils import AnyPyProcess, execute_anybodycon
from anypytools.macroutils import AnyMacro
from anypytools import macro_commands
logger = logging.getLogger('abt.anypytools')
logger.addHandler(logging.NullHandler())
__all__ = [
'datautils', 'h5py_wrapper', 'AnyPyProcess', 'AnyMacro', 'macro_commands',
'print_versions', 'execute_anybodycon',
]
__version__ = '0.10.2'
def print_versions():
"""Print all the versions of software that AnyPyTools relies on."""
import numpy as np
import scipy as sp
print("-=" * 38)
print("AnyPyTools version: %s" % __version__)
print("NumPy version: %s" % np.__version__)
print("SciPy version: %s" % sp.__version__)
print("Python version: %s" % sys.version)
(sysname, nodename, release, version, machine, processor) = \
platform.uname()
print("Platform: %s-%s-%s (%s)" % (sysname, release, machine, version))
if sysname == "Linux":
print("Linux dist: %s" % " ".join(platform.linux_distribution()[:-1]))
if not processor:
processor = "not recognized"
print("Processor: %s" % processor)
print("Byte-ordering: %s" % sys.byteorder)
print("-=" * 38)
|
Add execute_anybodycon to toplevel package
|
Add execute_anybodycon to toplevel package
|
Python
|
mit
|
AnyBody-Research-Group/AnyPyTools
|
# -*- coding: utf-8 -*-
"""AnyPyTools library."""
import sys
import platform
import logging
from anypytools.abcutils import AnyPyProcess
from anypytools.macroutils import AnyMacro
from anypytools import macro_commands
logger = logging.getLogger('abt.anypytools')
logger.addHandler(logging.NullHandler())
__all__ = [
'datautils', 'h5py_wrapper', 'AnyPyProcess', 'AnyMacro', 'macro_commands',
'print_versions',
]
__version__ = '0.10.2'
def print_versions():
"""Print all the versions of software that AnyPyTools relies on."""
import numpy as np
import scipy as sp
print("-=" * 38)
print("AnyPyTools version: %s" % __version__)
print("NumPy version: %s" % np.__version__)
print("SciPy version: %s" % sp.__version__)
print("Python version: %s" % sys.version)
(sysname, nodename, release, version, machine, processor) = \
platform.uname()
print("Platform: %s-%s-%s (%s)" % (sysname, release, machine, version))
if sysname == "Linux":
print("Linux dist: %s" % " ".join(platform.linux_distribution()[:-1]))
if not processor:
processor = "not recognized"
print("Processor: %s" % processor)
print("Byte-ordering: %s" % sys.byteorder)
print("-=" * 38)
Add execute_anybodycon to toplevel package
|
# -*- coding: utf-8 -*-
"""AnyPyTools library."""
import sys
import platform
import logging
from anypytools.abcutils import AnyPyProcess, execute_anybodycon
from anypytools.macroutils import AnyMacro
from anypytools import macro_commands
logger = logging.getLogger('abt.anypytools')
logger.addHandler(logging.NullHandler())
__all__ = [
'datautils', 'h5py_wrapper', 'AnyPyProcess', 'AnyMacro', 'macro_commands',
'print_versions', 'execute_anybodycon',
]
__version__ = '0.10.2'
def print_versions():
"""Print all the versions of software that AnyPyTools relies on."""
import numpy as np
import scipy as sp
print("-=" * 38)
print("AnyPyTools version: %s" % __version__)
print("NumPy version: %s" % np.__version__)
print("SciPy version: %s" % sp.__version__)
print("Python version: %s" % sys.version)
(sysname, nodename, release, version, machine, processor) = \
platform.uname()
print("Platform: %s-%s-%s (%s)" % (sysname, release, machine, version))
if sysname == "Linux":
print("Linux dist: %s" % " ".join(platform.linux_distribution()[:-1]))
if not processor:
processor = "not recognized"
print("Processor: %s" % processor)
print("Byte-ordering: %s" % sys.byteorder)
print("-=" * 38)
|
<commit_before># -*- coding: utf-8 -*-
"""AnyPyTools library."""
import sys
import platform
import logging
from anypytools.abcutils import AnyPyProcess
from anypytools.macroutils import AnyMacro
from anypytools import macro_commands
logger = logging.getLogger('abt.anypytools')
logger.addHandler(logging.NullHandler())
__all__ = [
'datautils', 'h5py_wrapper', 'AnyPyProcess', 'AnyMacro', 'macro_commands',
'print_versions',
]
__version__ = '0.10.2'
def print_versions():
"""Print all the versions of software that AnyPyTools relies on."""
import numpy as np
import scipy as sp
print("-=" * 38)
print("AnyPyTools version: %s" % __version__)
print("NumPy version: %s" % np.__version__)
print("SciPy version: %s" % sp.__version__)
print("Python version: %s" % sys.version)
(sysname, nodename, release, version, machine, processor) = \
platform.uname()
print("Platform: %s-%s-%s (%s)" % (sysname, release, machine, version))
if sysname == "Linux":
print("Linux dist: %s" % " ".join(platform.linux_distribution()[:-1]))
if not processor:
processor = "not recognized"
print("Processor: %s" % processor)
print("Byte-ordering: %s" % sys.byteorder)
print("-=" * 38)
<commit_msg>Add execute_anybodycon to toplevel package<commit_after>
|
# -*- coding: utf-8 -*-
"""AnyPyTools library."""
import sys
import platform
import logging
from anypytools.abcutils import AnyPyProcess, execute_anybodycon
from anypytools.macroutils import AnyMacro
from anypytools import macro_commands
logger = logging.getLogger('abt.anypytools')
logger.addHandler(logging.NullHandler())
__all__ = [
'datautils', 'h5py_wrapper', 'AnyPyProcess', 'AnyMacro', 'macro_commands',
'print_versions', 'execute_anybodycon',
]
__version__ = '0.10.2'
def print_versions():
"""Print all the versions of software that AnyPyTools relies on."""
import numpy as np
import scipy as sp
print("-=" * 38)
print("AnyPyTools version: %s" % __version__)
print("NumPy version: %s" % np.__version__)
print("SciPy version: %s" % sp.__version__)
print("Python version: %s" % sys.version)
(sysname, nodename, release, version, machine, processor) = \
platform.uname()
print("Platform: %s-%s-%s (%s)" % (sysname, release, machine, version))
if sysname == "Linux":
print("Linux dist: %s" % " ".join(platform.linux_distribution()[:-1]))
if not processor:
processor = "not recognized"
print("Processor: %s" % processor)
print("Byte-ordering: %s" % sys.byteorder)
print("-=" * 38)
|
# -*- coding: utf-8 -*-
"""AnyPyTools library."""
import sys
import platform
import logging
from anypytools.abcutils import AnyPyProcess
from anypytools.macroutils import AnyMacro
from anypytools import macro_commands
logger = logging.getLogger('abt.anypytools')
logger.addHandler(logging.NullHandler())
__all__ = [
'datautils', 'h5py_wrapper', 'AnyPyProcess', 'AnyMacro', 'macro_commands',
'print_versions',
]
__version__ = '0.10.2'
def print_versions():
"""Print all the versions of software that AnyPyTools relies on."""
import numpy as np
import scipy as sp
print("-=" * 38)
print("AnyPyTools version: %s" % __version__)
print("NumPy version: %s" % np.__version__)
print("SciPy version: %s" % sp.__version__)
print("Python version: %s" % sys.version)
(sysname, nodename, release, version, machine, processor) = \
platform.uname()
print("Platform: %s-%s-%s (%s)" % (sysname, release, machine, version))
if sysname == "Linux":
print("Linux dist: %s" % " ".join(platform.linux_distribution()[:-1]))
if not processor:
processor = "not recognized"
print("Processor: %s" % processor)
print("Byte-ordering: %s" % sys.byteorder)
print("-=" * 38)
Add execute_anybodycon to toplevel package# -*- coding: utf-8 -*-
"""AnyPyTools library."""
import sys
import platform
import logging
from anypytools.abcutils import AnyPyProcess, execute_anybodycon
from anypytools.macroutils import AnyMacro
from anypytools import macro_commands
logger = logging.getLogger('abt.anypytools')
logger.addHandler(logging.NullHandler())
__all__ = [
'datautils', 'h5py_wrapper', 'AnyPyProcess', 'AnyMacro', 'macro_commands',
'print_versions', 'execute_anybodycon',
]
__version__ = '0.10.2'
def print_versions():
"""Print all the versions of software that AnyPyTools relies on."""
import numpy as np
import scipy as sp
print("-=" * 38)
print("AnyPyTools version: %s" % __version__)
print("NumPy version: %s" % np.__version__)
print("SciPy version: %s" % sp.__version__)
print("Python version: %s" % sys.version)
(sysname, nodename, release, version, machine, processor) = \
platform.uname()
print("Platform: %s-%s-%s (%s)" % (sysname, release, machine, version))
if sysname == "Linux":
print("Linux dist: %s" % " ".join(platform.linux_distribution()[:-1]))
if not processor:
processor = "not recognized"
print("Processor: %s" % processor)
print("Byte-ordering: %s" % sys.byteorder)
print("-=" * 38)
|
<commit_before># -*- coding: utf-8 -*-
"""AnyPyTools library."""
import sys
import platform
import logging
from anypytools.abcutils import AnyPyProcess
from anypytools.macroutils import AnyMacro
from anypytools import macro_commands
logger = logging.getLogger('abt.anypytools')
logger.addHandler(logging.NullHandler())
__all__ = [
'datautils', 'h5py_wrapper', 'AnyPyProcess', 'AnyMacro', 'macro_commands',
'print_versions',
]
__version__ = '0.10.2'
def print_versions():
"""Print all the versions of software that AnyPyTools relies on."""
import numpy as np
import scipy as sp
print("-=" * 38)
print("AnyPyTools version: %s" % __version__)
print("NumPy version: %s" % np.__version__)
print("SciPy version: %s" % sp.__version__)
print("Python version: %s" % sys.version)
(sysname, nodename, release, version, machine, processor) = \
platform.uname()
print("Platform: %s-%s-%s (%s)" % (sysname, release, machine, version))
if sysname == "Linux":
print("Linux dist: %s" % " ".join(platform.linux_distribution()[:-1]))
if not processor:
processor = "not recognized"
print("Processor: %s" % processor)
print("Byte-ordering: %s" % sys.byteorder)
print("-=" * 38)
<commit_msg>Add execute_anybodycon to toplevel package<commit_after># -*- coding: utf-8 -*-
"""AnyPyTools library."""
import sys
import platform
import logging
from anypytools.abcutils import AnyPyProcess, execute_anybodycon
from anypytools.macroutils import AnyMacro
from anypytools import macro_commands
logger = logging.getLogger('abt.anypytools')
logger.addHandler(logging.NullHandler())
__all__ = [
'datautils', 'h5py_wrapper', 'AnyPyProcess', 'AnyMacro', 'macro_commands',
'print_versions', 'execute_anybodycon',
]
__version__ = '0.10.2'
def print_versions():
"""Print all the versions of software that AnyPyTools relies on."""
import numpy as np
import scipy as sp
print("-=" * 38)
print("AnyPyTools version: %s" % __version__)
print("NumPy version: %s" % np.__version__)
print("SciPy version: %s" % sp.__version__)
print("Python version: %s" % sys.version)
(sysname, nodename, release, version, machine, processor) = \
platform.uname()
print("Platform: %s-%s-%s (%s)" % (sysname, release, machine, version))
if sysname == "Linux":
print("Linux dist: %s" % " ".join(platform.linux_distribution()[:-1]))
if not processor:
processor = "not recognized"
print("Processor: %s" % processor)
print("Byte-ordering: %s" % sys.byteorder)
print("-=" * 38)
|
818de1d8ef32ef853d37e753cc0dc701d76d04ea
|
app/apis/search_api.py
|
app/apis/search_api.py
|
from flask import Blueprint, jsonify, request
from importlib import import_module
import re
blueprint = Blueprint('search_api', __name__, url_prefix='/search')
@blueprint.route('/<string:model>')
def api(model):
global Model
class_name = model.title() + 'Search'
model_name = model + '_search'
Model = getattr(import_module('app.models.' + model_name), class_name)
query_string = request.args.get('query')
query_string = re.sub('[^0-9a-zA-Z ]+', '*', query_string).lower()
if not query_string:
return 'Query is missing', 400
query = Model.query.filter(Model.search.contains(query_string))
return jsonify(data=[q.serialize() for q in query.all()])
|
# -*- coding: utf-8 -*-
import sys
from flask import Blueprint, jsonify, request
from importlib import import_module
from unicodedata import normalize
reload(sys)
sys.setdefaultencoding('utf8')
def remove_accents(txt):
return normalize('NFKD', txt.decode('utf-8')).encode('ASCII','ignore')
blueprint = Blueprint('search_api', __name__, url_prefix='/search')
@blueprint.route('/<string:model>')
def api(model):
global Model
class_name = model.title() + 'Search'
model_name = model + '_search'
Model = getattr(import_module('app.models.' + model_name), class_name)
query_string = request.args.get('query')
query_string = remove_accents(query_string).lower()
if not query_string:
return 'Query is missing', 400
query = Model.query
for word in query_string.split(' '):
query = query.filter(Model.search.contains(word))
return jsonify(data=[q.serialize() for q in query.all()])
|
Add support to search for word in search api
|
Add support to search for word in search api
|
Python
|
mit
|
daniel1409/dataviva-api,DataViva/dataviva-api
|
from flask import Blueprint, jsonify, request
from importlib import import_module
import re
blueprint = Blueprint('search_api', __name__, url_prefix='/search')
@blueprint.route('/<string:model>')
def api(model):
global Model
class_name = model.title() + 'Search'
model_name = model + '_search'
Model = getattr(import_module('app.models.' + model_name), class_name)
query_string = request.args.get('query')
query_string = re.sub('[^0-9a-zA-Z ]+', '*', query_string).lower()
if not query_string:
return 'Query is missing', 400
query = Model.query.filter(Model.search.contains(query_string))
return jsonify(data=[q.serialize() for q in query.all()])
Add support to search for word in search api
|
# -*- coding: utf-8 -*-
import sys
from flask import Blueprint, jsonify, request
from importlib import import_module
from unicodedata import normalize
reload(sys)
sys.setdefaultencoding('utf8')
def remove_accents(txt):
return normalize('NFKD', txt.decode('utf-8')).encode('ASCII','ignore')
blueprint = Blueprint('search_api', __name__, url_prefix='/search')
@blueprint.route('/<string:model>')
def api(model):
global Model
class_name = model.title() + 'Search'
model_name = model + '_search'
Model = getattr(import_module('app.models.' + model_name), class_name)
query_string = request.args.get('query')
query_string = remove_accents(query_string).lower()
if not query_string:
return 'Query is missing', 400
query = Model.query
for word in query_string.split(' '):
query = query.filter(Model.search.contains(word))
return jsonify(data=[q.serialize() for q in query.all()])
|
<commit_before>from flask import Blueprint, jsonify, request
from importlib import import_module
import re
blueprint = Blueprint('search_api', __name__, url_prefix='/search')
@blueprint.route('/<string:model>')
def api(model):
global Model
class_name = model.title() + 'Search'
model_name = model + '_search'
Model = getattr(import_module('app.models.' + model_name), class_name)
query_string = request.args.get('query')
query_string = re.sub('[^0-9a-zA-Z ]+', '*', query_string).lower()
if not query_string:
return 'Query is missing', 400
query = Model.query.filter(Model.search.contains(query_string))
return jsonify(data=[q.serialize() for q in query.all()])
<commit_msg>Add support to search for word in search api<commit_after>
|
# -*- coding: utf-8 -*-
import sys
from flask import Blueprint, jsonify, request
from importlib import import_module
from unicodedata import normalize
reload(sys)
sys.setdefaultencoding('utf8')
def remove_accents(txt):
return normalize('NFKD', txt.decode('utf-8')).encode('ASCII','ignore')
blueprint = Blueprint('search_api', __name__, url_prefix='/search')
@blueprint.route('/<string:model>')
def api(model):
global Model
class_name = model.title() + 'Search'
model_name = model + '_search'
Model = getattr(import_module('app.models.' + model_name), class_name)
query_string = request.args.get('query')
query_string = remove_accents(query_string).lower()
if not query_string:
return 'Query is missing', 400
query = Model.query
for word in query_string.split(' '):
query = query.filter(Model.search.contains(word))
return jsonify(data=[q.serialize() for q in query.all()])
|
from flask import Blueprint, jsonify, request
from importlib import import_module
import re
blueprint = Blueprint('search_api', __name__, url_prefix='/search')
@blueprint.route('/<string:model>')
def api(model):
global Model
class_name = model.title() + 'Search'
model_name = model + '_search'
Model = getattr(import_module('app.models.' + model_name), class_name)
query_string = request.args.get('query')
query_string = re.sub('[^0-9a-zA-Z ]+', '*', query_string).lower()
if not query_string:
return 'Query is missing', 400
query = Model.query.filter(Model.search.contains(query_string))
return jsonify(data=[q.serialize() for q in query.all()])
Add support to search for word in search api# -*- coding: utf-8 -*-
import sys
from flask import Blueprint, jsonify, request
from importlib import import_module
from unicodedata import normalize
reload(sys)
sys.setdefaultencoding('utf8')
def remove_accents(txt):
return normalize('NFKD', txt.decode('utf-8')).encode('ASCII','ignore')
blueprint = Blueprint('search_api', __name__, url_prefix='/search')
@blueprint.route('/<string:model>')
def api(model):
global Model
class_name = model.title() + 'Search'
model_name = model + '_search'
Model = getattr(import_module('app.models.' + model_name), class_name)
query_string = request.args.get('query')
query_string = remove_accents(query_string).lower()
if not query_string:
return 'Query is missing', 400
query = Model.query
for word in query_string.split(' '):
query = query.filter(Model.search.contains(word))
return jsonify(data=[q.serialize() for q in query.all()])
|
<commit_before>from flask import Blueprint, jsonify, request
from importlib import import_module
import re
blueprint = Blueprint('search_api', __name__, url_prefix='/search')
@blueprint.route('/<string:model>')
def api(model):
global Model
class_name = model.title() + 'Search'
model_name = model + '_search'
Model = getattr(import_module('app.models.' + model_name), class_name)
query_string = request.args.get('query')
query_string = re.sub('[^0-9a-zA-Z ]+', '*', query_string).lower()
if not query_string:
return 'Query is missing', 400
query = Model.query.filter(Model.search.contains(query_string))
return jsonify(data=[q.serialize() for q in query.all()])
<commit_msg>Add support to search for word in search api<commit_after># -*- coding: utf-8 -*-
import sys
from flask import Blueprint, jsonify, request
from importlib import import_module
from unicodedata import normalize
reload(sys)
sys.setdefaultencoding('utf8')
def remove_accents(txt):
return normalize('NFKD', txt.decode('utf-8')).encode('ASCII','ignore')
blueprint = Blueprint('search_api', __name__, url_prefix='/search')
@blueprint.route('/<string:model>')
def api(model):
global Model
class_name = model.title() + 'Search'
model_name = model + '_search'
Model = getattr(import_module('app.models.' + model_name), class_name)
query_string = request.args.get('query')
query_string = remove_accents(query_string).lower()
if not query_string:
return 'Query is missing', 400
query = Model.query
for word in query_string.split(' '):
query = query.filter(Model.search.contains(word))
return jsonify(data=[q.serialize() for q in query.all()])
|
2f7ead81f6820f0c4f47a3334ed6bf418c02fe9d
|
simpleseo/templatetags/seo.py
|
simpleseo/templatetags/seo.py
|
from django.template import Library
from simpleseo import settings
from simpleseo.models import SeoMetadata
register = Library()
@register.filter
def single_quotes(description):
return description.replace('\"', '\'')
@register.inclusion_tag('simpleseo/metadata.html', takes_context=True)
def get_seo(context):
request = context['request']
lang_code = request.LANGUAGE_CODE
path = request.path
try:
metadata = SeoMetadata.objects.get(path=path, lang_code=lang_code)
except SeoMetadata.DoesNotExist:
metadata = None
if metadata is None:
return {'title': settings.FALLBACK_TITLE, 'description': settings.FALLBACK_DESCRIPTION}
return {'title': metadata.title, 'description': metadata.description}
|
from django.template import Library
from django.utils.translation import get_language
from simpleseo import settings
from simpleseo.models import SeoMetadata
register = Library()
@register.filter
def single_quotes(description):
return description.replace('\"', '\'')
@register.inclusion_tag('simpleseo/metadata.html', takes_context=True)
def get_seo(context):
lang_code = get_language()[:2]
path = context['request'].path
try:
metadata = SeoMetadata.objects.get(path=path, lang_code=lang_code)
except SeoMetadata.DoesNotExist:
metadata = None
if metadata is None:
return {'title': settings.FALLBACK_TITLE,
'description': settings.FALLBACK_DESCRIPTION}
return {'title': metadata.title, 'description': metadata.description}
@register.simple_tag(takes_context=True)
def get_seo_title(context):
return get_seo(context)['title']
@register.simple_tag(takes_context=True)
def get_seo_description(context):
return get_seo(context)['description']
|
Add simple tags for title and description
|
Add simple tags for title and description
|
Python
|
bsd-3-clause
|
AMongeMoreno/django-painless-seo,AMongeMoreno/django-painless-seo,Glamping-Hub/django-painless-seo,Glamping-Hub/django-painless-seo
|
from django.template import Library
from simpleseo import settings
from simpleseo.models import SeoMetadata
register = Library()
@register.filter
def single_quotes(description):
return description.replace('\"', '\'')
@register.inclusion_tag('simpleseo/metadata.html', takes_context=True)
def get_seo(context):
request = context['request']
lang_code = request.LANGUAGE_CODE
path = request.path
try:
metadata = SeoMetadata.objects.get(path=path, lang_code=lang_code)
except SeoMetadata.DoesNotExist:
metadata = None
if metadata is None:
return {'title': settings.FALLBACK_TITLE, 'description': settings.FALLBACK_DESCRIPTION}
return {'title': metadata.title, 'description': metadata.description}
Add simple tags for title and description
|
from django.template import Library
from django.utils.translation import get_language
from simpleseo import settings
from simpleseo.models import SeoMetadata
register = Library()
@register.filter
def single_quotes(description):
return description.replace('\"', '\'')
@register.inclusion_tag('simpleseo/metadata.html', takes_context=True)
def get_seo(context):
lang_code = get_language()[:2]
path = context['request'].path
try:
metadata = SeoMetadata.objects.get(path=path, lang_code=lang_code)
except SeoMetadata.DoesNotExist:
metadata = None
if metadata is None:
return {'title': settings.FALLBACK_TITLE,
'description': settings.FALLBACK_DESCRIPTION}
return {'title': metadata.title, 'description': metadata.description}
@register.simple_tag(takes_context=True)
def get_seo_title(context):
return get_seo(context)['title']
@register.simple_tag(takes_context=True)
def get_seo_description(context):
return get_seo(context)['description']
|
<commit_before>from django.template import Library
from simpleseo import settings
from simpleseo.models import SeoMetadata
register = Library()
@register.filter
def single_quotes(description):
return description.replace('\"', '\'')
@register.inclusion_tag('simpleseo/metadata.html', takes_context=True)
def get_seo(context):
request = context['request']
lang_code = request.LANGUAGE_CODE
path = request.path
try:
metadata = SeoMetadata.objects.get(path=path, lang_code=lang_code)
except SeoMetadata.DoesNotExist:
metadata = None
if metadata is None:
return {'title': settings.FALLBACK_TITLE, 'description': settings.FALLBACK_DESCRIPTION}
return {'title': metadata.title, 'description': metadata.description}
<commit_msg>Add simple tags for title and description<commit_after>
|
from django.template import Library
from django.utils.translation import get_language
from simpleseo import settings
from simpleseo.models import SeoMetadata
register = Library()
@register.filter
def single_quotes(description):
return description.replace('\"', '\'')
@register.inclusion_tag('simpleseo/metadata.html', takes_context=True)
def get_seo(context):
lang_code = get_language()[:2]
path = context['request'].path
try:
metadata = SeoMetadata.objects.get(path=path, lang_code=lang_code)
except SeoMetadata.DoesNotExist:
metadata = None
if metadata is None:
return {'title': settings.FALLBACK_TITLE,
'description': settings.FALLBACK_DESCRIPTION}
return {'title': metadata.title, 'description': metadata.description}
@register.simple_tag(takes_context=True)
def get_seo_title(context):
return get_seo(context)['title']
@register.simple_tag(takes_context=True)
def get_seo_description(context):
return get_seo(context)['description']
|
from django.template import Library
from simpleseo import settings
from simpleseo.models import SeoMetadata
register = Library()
@register.filter
def single_quotes(description):
return description.replace('\"', '\'')
@register.inclusion_tag('simpleseo/metadata.html', takes_context=True)
def get_seo(context):
request = context['request']
lang_code = request.LANGUAGE_CODE
path = request.path
try:
metadata = SeoMetadata.objects.get(path=path, lang_code=lang_code)
except SeoMetadata.DoesNotExist:
metadata = None
if metadata is None:
return {'title': settings.FALLBACK_TITLE, 'description': settings.FALLBACK_DESCRIPTION}
return {'title': metadata.title, 'description': metadata.description}
Add simple tags for title and descriptionfrom django.template import Library
from django.utils.translation import get_language
from simpleseo import settings
from simpleseo.models import SeoMetadata
register = Library()
@register.filter
def single_quotes(description):
return description.replace('\"', '\'')
@register.inclusion_tag('simpleseo/metadata.html', takes_context=True)
def get_seo(context):
lang_code = get_language()[:2]
path = context['request'].path
try:
metadata = SeoMetadata.objects.get(path=path, lang_code=lang_code)
except SeoMetadata.DoesNotExist:
metadata = None
if metadata is None:
return {'title': settings.FALLBACK_TITLE,
'description': settings.FALLBACK_DESCRIPTION}
return {'title': metadata.title, 'description': metadata.description}
@register.simple_tag(takes_context=True)
def get_seo_title(context):
return get_seo(context)['title']
@register.simple_tag(takes_context=True)
def get_seo_description(context):
return get_seo(context)['description']
|
<commit_before>from django.template import Library
from simpleseo import settings
from simpleseo.models import SeoMetadata
register = Library()
@register.filter
def single_quotes(description):
return description.replace('\"', '\'')
@register.inclusion_tag('simpleseo/metadata.html', takes_context=True)
def get_seo(context):
request = context['request']
lang_code = request.LANGUAGE_CODE
path = request.path
try:
metadata = SeoMetadata.objects.get(path=path, lang_code=lang_code)
except SeoMetadata.DoesNotExist:
metadata = None
if metadata is None:
return {'title': settings.FALLBACK_TITLE, 'description': settings.FALLBACK_DESCRIPTION}
return {'title': metadata.title, 'description': metadata.description}
<commit_msg>Add simple tags for title and description<commit_after>from django.template import Library
from django.utils.translation import get_language
from simpleseo import settings
from simpleseo.models import SeoMetadata
register = Library()
@register.filter
def single_quotes(description):
return description.replace('\"', '\'')
@register.inclusion_tag('simpleseo/metadata.html', takes_context=True)
def get_seo(context):
lang_code = get_language()[:2]
path = context['request'].path
try:
metadata = SeoMetadata.objects.get(path=path, lang_code=lang_code)
except SeoMetadata.DoesNotExist:
metadata = None
if metadata is None:
return {'title': settings.FALLBACK_TITLE,
'description': settings.FALLBACK_DESCRIPTION}
return {'title': metadata.title, 'description': metadata.description}
@register.simple_tag(takes_context=True)
def get_seo_title(context):
return get_seo(context)['title']
@register.simple_tag(takes_context=True)
def get_seo_description(context):
return get_seo(context)['description']
|
4f4522bfa969a823a240a6ce16bcec395da06cf2
|
src/poliastro/twobody/decorators.py
|
src/poliastro/twobody/decorators.py
|
"""Decorators.
"""
from functools import wraps
from astropy import units as u
from poliastro.bodies import Body
from poliastro.twobody.rv import RVState
u.kms = u.km / u.s
u.km3s2 = u.km ** 3 / u.s ** 2
def state_from_vector(func):
@wraps(func)
def wrapper(t, u_, k, *args, **kwargs):
r, v = u_[:3], u_[3:]
ss = RVState(Body(k * u.km3s2), r * u.km, v * u.kms)
return func(t, ss, r, v, *args, **kwargs)
return wrapper
|
"""Decorators.
"""
from functools import wraps
from astropy import units as u
from poliastro.bodies import Body
from poliastro.twobody.rv import RVState
u.kms = u.km / u.s
u.km3s2 = u.km ** 3 / u.s ** 2
def state_from_vector(func):
@wraps(func)
def wrapper(t, u_, k, *args, **kwargs):
r, v = u_[:3], u_[3:]
ss = RVState(Body(k * u.km3s2), r * u.km, v * u.kms)
return func(t, ss, *args, **kwargs)
return wrapper
|
Remove extra arguments from decorated function
|
Remove extra arguments from decorated function
|
Python
|
mit
|
anhiga/poliastro,poliastro/poliastro,Juanlu001/poliastro,newlawrence/poliastro,newlawrence/poliastro,anhiga/poliastro,newlawrence/poliastro,Juanlu001/poliastro,anhiga/poliastro,Juanlu001/poliastro
|
"""Decorators.
"""
from functools import wraps
from astropy import units as u
from poliastro.bodies import Body
from poliastro.twobody.rv import RVState
u.kms = u.km / u.s
u.km3s2 = u.km ** 3 / u.s ** 2
def state_from_vector(func):
@wraps(func)
def wrapper(t, u_, k, *args, **kwargs):
r, v = u_[:3], u_[3:]
ss = RVState(Body(k * u.km3s2), r * u.km, v * u.kms)
return func(t, ss, r, v, *args, **kwargs)
return wrapper
Remove extra arguments from decorated function
|
"""Decorators.
"""
from functools import wraps
from astropy import units as u
from poliastro.bodies import Body
from poliastro.twobody.rv import RVState
u.kms = u.km / u.s
u.km3s2 = u.km ** 3 / u.s ** 2
def state_from_vector(func):
@wraps(func)
def wrapper(t, u_, k, *args, **kwargs):
r, v = u_[:3], u_[3:]
ss = RVState(Body(k * u.km3s2), r * u.km, v * u.kms)
return func(t, ss, *args, **kwargs)
return wrapper
|
<commit_before>"""Decorators.
"""
from functools import wraps
from astropy import units as u
from poliastro.bodies import Body
from poliastro.twobody.rv import RVState
u.kms = u.km / u.s
u.km3s2 = u.km ** 3 / u.s ** 2
def state_from_vector(func):
@wraps(func)
def wrapper(t, u_, k, *args, **kwargs):
r, v = u_[:3], u_[3:]
ss = RVState(Body(k * u.km3s2), r * u.km, v * u.kms)
return func(t, ss, r, v, *args, **kwargs)
return wrapper
<commit_msg>Remove extra arguments from decorated function<commit_after>
|
"""Decorators.
"""
from functools import wraps
from astropy import units as u
from poliastro.bodies import Body
from poliastro.twobody.rv import RVState
u.kms = u.km / u.s
u.km3s2 = u.km ** 3 / u.s ** 2
def state_from_vector(func):
@wraps(func)
def wrapper(t, u_, k, *args, **kwargs):
r, v = u_[:3], u_[3:]
ss = RVState(Body(k * u.km3s2), r * u.km, v * u.kms)
return func(t, ss, *args, **kwargs)
return wrapper
|
"""Decorators.
"""
from functools import wraps
from astropy import units as u
from poliastro.bodies import Body
from poliastro.twobody.rv import RVState
u.kms = u.km / u.s
u.km3s2 = u.km ** 3 / u.s ** 2
def state_from_vector(func):
@wraps(func)
def wrapper(t, u_, k, *args, **kwargs):
r, v = u_[:3], u_[3:]
ss = RVState(Body(k * u.km3s2), r * u.km, v * u.kms)
return func(t, ss, r, v, *args, **kwargs)
return wrapper
Remove extra arguments from decorated function"""Decorators.
"""
from functools import wraps
from astropy import units as u
from poliastro.bodies import Body
from poliastro.twobody.rv import RVState
u.kms = u.km / u.s
u.km3s2 = u.km ** 3 / u.s ** 2
def state_from_vector(func):
@wraps(func)
def wrapper(t, u_, k, *args, **kwargs):
r, v = u_[:3], u_[3:]
ss = RVState(Body(k * u.km3s2), r * u.km, v * u.kms)
return func(t, ss, *args, **kwargs)
return wrapper
|
<commit_before>"""Decorators.
"""
from functools import wraps
from astropy import units as u
from poliastro.bodies import Body
from poliastro.twobody.rv import RVState
u.kms = u.km / u.s
u.km3s2 = u.km ** 3 / u.s ** 2
def state_from_vector(func):
@wraps(func)
def wrapper(t, u_, k, *args, **kwargs):
r, v = u_[:3], u_[3:]
ss = RVState(Body(k * u.km3s2), r * u.km, v * u.kms)
return func(t, ss, r, v, *args, **kwargs)
return wrapper
<commit_msg>Remove extra arguments from decorated function<commit_after>"""Decorators.
"""
from functools import wraps
from astropy import units as u
from poliastro.bodies import Body
from poliastro.twobody.rv import RVState
u.kms = u.km / u.s
u.km3s2 = u.km ** 3 / u.s ** 2
def state_from_vector(func):
@wraps(func)
def wrapper(t, u_, k, *args, **kwargs):
r, v = u_[:3], u_[3:]
ss = RVState(Body(k * u.km3s2), r * u.km, v * u.kms)
return func(t, ss, *args, **kwargs)
return wrapper
|
1fca398ce977dbdcb0bcb8aec953c3e6bd7fd320
|
actions/aws_decrypt_password_data.py
|
actions/aws_decrypt_password_data.py
|
#!/usr/bin/env python
import base64
import rsa
import six
from st2common.runners.base_action import Action
class AwsDecryptPassworData(Action):
def run(self, keyfile, password_data):
# copied from:
# https://github.com/aws/aws-cli/blob/master/awscli/customizations/ec2/decryptpassword.py#L96-L122
self.logger.debug("Decrypting password data using: %s", keyfile)
value = password_data
if not value:
return ''
# Hack because somewhere in the Mistral parameter "publish" pipeline, we
# strip trailing and leading whitespace from a string which results in
# an invalid base64 string
if not value.startswith('\r\n'):
value = '\r\n' + value
if not value.endswith('\r\n'):
value = value + '\r\n'
self.logger.debug('Encrypted value: "%s"' % (value))
value = base64.b64decode(value)
try:
with open(keyfile) as pk_file:
pk_contents = pk_file.read()
private_key = rsa.PrivateKey.load_pkcs1(six.b(pk_contents))
value = rsa.decrypt(value, private_key)
return value.decode('utf-8')
except Exception:
msg = ('Unable to decrypt password data using '
'provided private key file: {}').format(keyfile)
self.logger.debug(msg, exc_info=True)
raise ValueError(msg)
|
#!/usr/bin/env python
import base64
import rsa
import six
from st2common.runners.base_action import Action
class AwsDecryptPassworData(Action):
def run(self, keyfile, password_data):
# copied from:
# https://github.com/aws/aws-cli/blob/master/awscli/customizations/ec2/decryptpassword.py#L96-L122
self.logger.debug("Decrypting password data using: %s", keyfile)
value = password_data
if not value:
return ''
# Note: Somewhere in the param transformation pipeline line break and
# carrieage return characters get messed up
value = value.strip('\\r').strip('\\n')
self.logger.debug('Encrypted value: "%s"' % (value))
value = base64.b64decode(value)
try:
with open(keyfile) as pk_file:
pk_contents = pk_file.read()
private_key = rsa.PrivateKey.load_pkcs1(six.b(pk_contents))
value = rsa.decrypt(value, private_key)
return value.decode('utf-8')
except Exception:
msg = ('Unable to decrypt password data using '
'provided private key file: {}').format(keyfile)
self.logger.debug(msg, exc_info=True)
raise ValueError(msg)
|
Remove broken leading and trailing characters.
|
Remove broken leading and trailing characters.
|
Python
|
apache-2.0
|
StackStorm/st2cd,StackStorm/st2cd
|
#!/usr/bin/env python
import base64
import rsa
import six
from st2common.runners.base_action import Action
class AwsDecryptPassworData(Action):
def run(self, keyfile, password_data):
# copied from:
# https://github.com/aws/aws-cli/blob/master/awscli/customizations/ec2/decryptpassword.py#L96-L122
self.logger.debug("Decrypting password data using: %s", keyfile)
value = password_data
if not value:
return ''
# Hack because somewhere in the Mistral parameter "publish" pipeline, we
# strip trailing and leading whitespace from a string which results in
# an invalid base64 string
if not value.startswith('\r\n'):
value = '\r\n' + value
if not value.endswith('\r\n'):
value = value + '\r\n'
self.logger.debug('Encrypted value: "%s"' % (value))
value = base64.b64decode(value)
try:
with open(keyfile) as pk_file:
pk_contents = pk_file.read()
private_key = rsa.PrivateKey.load_pkcs1(six.b(pk_contents))
value = rsa.decrypt(value, private_key)
return value.decode('utf-8')
except Exception:
msg = ('Unable to decrypt password data using '
'provided private key file: {}').format(keyfile)
self.logger.debug(msg, exc_info=True)
raise ValueError(msg)
Remove broken leading and trailing characters.
|
#!/usr/bin/env python
import base64
import rsa
import six
from st2common.runners.base_action import Action
class AwsDecryptPassworData(Action):
def run(self, keyfile, password_data):
# copied from:
# https://github.com/aws/aws-cli/blob/master/awscli/customizations/ec2/decryptpassword.py#L96-L122
self.logger.debug("Decrypting password data using: %s", keyfile)
value = password_data
if not value:
return ''
# Note: Somewhere in the param transformation pipeline line break and
# carrieage return characters get messed up
value = value.strip('\\r').strip('\\n')
self.logger.debug('Encrypted value: "%s"' % (value))
value = base64.b64decode(value)
try:
with open(keyfile) as pk_file:
pk_contents = pk_file.read()
private_key = rsa.PrivateKey.load_pkcs1(six.b(pk_contents))
value = rsa.decrypt(value, private_key)
return value.decode('utf-8')
except Exception:
msg = ('Unable to decrypt password data using '
'provided private key file: {}').format(keyfile)
self.logger.debug(msg, exc_info=True)
raise ValueError(msg)
|
<commit_before>#!/usr/bin/env python
import base64
import rsa
import six
from st2common.runners.base_action import Action
class AwsDecryptPassworData(Action):
def run(self, keyfile, password_data):
# copied from:
# https://github.com/aws/aws-cli/blob/master/awscli/customizations/ec2/decryptpassword.py#L96-L122
self.logger.debug("Decrypting password data using: %s", keyfile)
value = password_data
if not value:
return ''
# Hack because somewhere in the Mistral parameter "publish" pipeline, we
# strip trailing and leading whitespace from a string which results in
# an invalid base64 string
if not value.startswith('\r\n'):
value = '\r\n' + value
if not value.endswith('\r\n'):
value = value + '\r\n'
self.logger.debug('Encrypted value: "%s"' % (value))
value = base64.b64decode(value)
try:
with open(keyfile) as pk_file:
pk_contents = pk_file.read()
private_key = rsa.PrivateKey.load_pkcs1(six.b(pk_contents))
value = rsa.decrypt(value, private_key)
return value.decode('utf-8')
except Exception:
msg = ('Unable to decrypt password data using '
'provided private key file: {}').format(keyfile)
self.logger.debug(msg, exc_info=True)
raise ValueError(msg)
<commit_msg>Remove broken leading and trailing characters.<commit_after>
|
#!/usr/bin/env python
import base64
import rsa
import six
from st2common.runners.base_action import Action
class AwsDecryptPassworData(Action):
def run(self, keyfile, password_data):
# copied from:
# https://github.com/aws/aws-cli/blob/master/awscli/customizations/ec2/decryptpassword.py#L96-L122
self.logger.debug("Decrypting password data using: %s", keyfile)
value = password_data
if not value:
return ''
# Note: Somewhere in the param transformation pipeline line break and
# carrieage return characters get messed up
value = value.strip('\\r').strip('\\n')
self.logger.debug('Encrypted value: "%s"' % (value))
value = base64.b64decode(value)
try:
with open(keyfile) as pk_file:
pk_contents = pk_file.read()
private_key = rsa.PrivateKey.load_pkcs1(six.b(pk_contents))
value = rsa.decrypt(value, private_key)
return value.decode('utf-8')
except Exception:
msg = ('Unable to decrypt password data using '
'provided private key file: {}').format(keyfile)
self.logger.debug(msg, exc_info=True)
raise ValueError(msg)
|
#!/usr/bin/env python
import base64
import rsa
import six
from st2common.runners.base_action import Action
class AwsDecryptPassworData(Action):
def run(self, keyfile, password_data):
# copied from:
# https://github.com/aws/aws-cli/blob/master/awscli/customizations/ec2/decryptpassword.py#L96-L122
self.logger.debug("Decrypting password data using: %s", keyfile)
value = password_data
if not value:
return ''
# Hack because somewhere in the Mistral parameter "publish" pipeline, we
# strip trailing and leading whitespace from a string which results in
# an invalid base64 string
if not value.startswith('\r\n'):
value = '\r\n' + value
if not value.endswith('\r\n'):
value = value + '\r\n'
self.logger.debug('Encrypted value: "%s"' % (value))
value = base64.b64decode(value)
try:
with open(keyfile) as pk_file:
pk_contents = pk_file.read()
private_key = rsa.PrivateKey.load_pkcs1(six.b(pk_contents))
value = rsa.decrypt(value, private_key)
return value.decode('utf-8')
except Exception:
msg = ('Unable to decrypt password data using '
'provided private key file: {}').format(keyfile)
self.logger.debug(msg, exc_info=True)
raise ValueError(msg)
Remove broken leading and trailing characters.#!/usr/bin/env python
import base64
import rsa
import six
from st2common.runners.base_action import Action
class AwsDecryptPassworData(Action):
def run(self, keyfile, password_data):
# copied from:
# https://github.com/aws/aws-cli/blob/master/awscli/customizations/ec2/decryptpassword.py#L96-L122
self.logger.debug("Decrypting password data using: %s", keyfile)
value = password_data
if not value:
return ''
# Note: Somewhere in the param transformation pipeline line break and
# carrieage return characters get messed up
value = value.strip('\\r').strip('\\n')
self.logger.debug('Encrypted value: "%s"' % (value))
value = base64.b64decode(value)
try:
with open(keyfile) as pk_file:
pk_contents = pk_file.read()
private_key = rsa.PrivateKey.load_pkcs1(six.b(pk_contents))
value = rsa.decrypt(value, private_key)
return value.decode('utf-8')
except Exception:
msg = ('Unable to decrypt password data using '
'provided private key file: {}').format(keyfile)
self.logger.debug(msg, exc_info=True)
raise ValueError(msg)
|
<commit_before>#!/usr/bin/env python
import base64
import rsa
import six
from st2common.runners.base_action import Action
class AwsDecryptPassworData(Action):
def run(self, keyfile, password_data):
# copied from:
# https://github.com/aws/aws-cli/blob/master/awscli/customizations/ec2/decryptpassword.py#L96-L122
self.logger.debug("Decrypting password data using: %s", keyfile)
value = password_data
if not value:
return ''
# Hack because somewhere in the Mistral parameter "publish" pipeline, we
# strip trailing and leading whitespace from a string which results in
# an invalid base64 string
if not value.startswith('\r\n'):
value = '\r\n' + value
if not value.endswith('\r\n'):
value = value + '\r\n'
self.logger.debug('Encrypted value: "%s"' % (value))
value = base64.b64decode(value)
try:
with open(keyfile) as pk_file:
pk_contents = pk_file.read()
private_key = rsa.PrivateKey.load_pkcs1(six.b(pk_contents))
value = rsa.decrypt(value, private_key)
return value.decode('utf-8')
except Exception:
msg = ('Unable to decrypt password data using '
'provided private key file: {}').format(keyfile)
self.logger.debug(msg, exc_info=True)
raise ValueError(msg)
<commit_msg>Remove broken leading and trailing characters.<commit_after>#!/usr/bin/env python
import base64
import rsa
import six
from st2common.runners.base_action import Action
class AwsDecryptPassworData(Action):
def run(self, keyfile, password_data):
# copied from:
# https://github.com/aws/aws-cli/blob/master/awscli/customizations/ec2/decryptpassword.py#L96-L122
self.logger.debug("Decrypting password data using: %s", keyfile)
value = password_data
if not value:
return ''
# Note: Somewhere in the param transformation pipeline line break and
# carrieage return characters get messed up
value = value.strip('\\r').strip('\\n')
self.logger.debug('Encrypted value: "%s"' % (value))
value = base64.b64decode(value)
try:
with open(keyfile) as pk_file:
pk_contents = pk_file.read()
private_key = rsa.PrivateKey.load_pkcs1(six.b(pk_contents))
value = rsa.decrypt(value, private_key)
return value.decode('utf-8')
except Exception:
msg = ('Unable to decrypt password data using '
'provided private key file: {}').format(keyfile)
self.logger.debug(msg, exc_info=True)
raise ValueError(msg)
|
64c9d2c53f0dc4c9ae92b5675248a8f11c2b4e9e
|
pyqode/python/managers/file.py
|
pyqode/python/managers/file.py
|
"""
Contains the python specific FileManager.
"""
import ast
import re
from pyqode.core.managers import FileManager
class PyFileManager(FileManager):
"""
Extends file manager to override detect_encoding. With python, we can
detect encoding by reading the two first lines of a file and extracting its
encoding tag.
"""
def detect_encoding(self, path):
"""
For the implementation of encoding definitions in Python, look at:
- http://www.python.org/dev/peps/pep-0263/
.. note:: code taken and adapted from
```jedi.common.source_to_unicode.detect_encoding```
"""
with open(path, 'rb') as file:
source = file.read()
# take care of line encodings (not in jedi)
source = source.replace(b'\r', b'')
source_str = str(source).replace('\\n', '\n')
byte_mark = ast.literal_eval(r"b'\xef\xbb\xbf'")
if source.startswith(byte_mark):
# UTF-8 byte-order mark
return 'utf-8'
first_two_lines = re.match(r'(?:[^\n]*\n){0,2}', source_str).group(0)
possible_encoding = re.search(r"coding[=:]\s*([-\w.]+)",
first_two_lines)
if possible_encoding:
return possible_encoding.group(1)
else:
return super().detect_encoding(path)
|
"""
Contains the python specific FileManager.
"""
import ast
import re
from pyqode.core.managers import FileManager
class PyFileManager(FileManager):
"""
Extends file manager to override detect_encoding. With python, we can
detect encoding by reading the two first lines of a file and extracting its
encoding tag.
"""
def detect_encoding(self, path):
"""
For the implementation of encoding definitions in Python, look at:
- http://www.python.org/dev/peps/pep-0263/
.. note:: code taken and adapted from
```jedi.common.source_to_unicode.detect_encoding```
"""
with open(path, 'rb') as file:
source = file.read()
# take care of line encodings (not in jedi)
source = source.replace(b'\r', b'')
source_str = str(source).replace('\\n', '\n')
byte_mark = ast.literal_eval(r"b'\xef\xbb\xbf'")
if source.startswith(byte_mark):
# UTF-8 byte-order mark
return 'utf-8'
first_two_lines = re.match(r'(?:[^\n]*\n){0,2}', source_str).group(0)
possible_encoding = re.search(r"coding[=:]\s*([-\w.]+)",
first_two_lines)
if possible_encoding:
return possible_encoding.group(1)
def open(self, path, encoding=None, use_cached_encoding=True):
if encoding is None:
encoding = self.detect_encoding(path)
super().open(path, encoding=encoding,
use_cached_encoding=use_cached_encoding)
|
Fix encoding detection in python (shebang line was not parsed anymore)
|
Fix encoding detection in python (shebang line was not parsed anymore)
|
Python
|
mit
|
pyQode/pyqode.python,mmolero/pyqode.python,pyQode/pyqode.python,zwadar/pyqode.python
|
"""
Contains the python specific FileManager.
"""
import ast
import re
from pyqode.core.managers import FileManager
class PyFileManager(FileManager):
"""
Extends file manager to override detect_encoding. With python, we can
detect encoding by reading the two first lines of a file and extracting its
encoding tag.
"""
def detect_encoding(self, path):
"""
For the implementation of encoding definitions in Python, look at:
- http://www.python.org/dev/peps/pep-0263/
.. note:: code taken and adapted from
```jedi.common.source_to_unicode.detect_encoding```
"""
with open(path, 'rb') as file:
source = file.read()
# take care of line encodings (not in jedi)
source = source.replace(b'\r', b'')
source_str = str(source).replace('\\n', '\n')
byte_mark = ast.literal_eval(r"b'\xef\xbb\xbf'")
if source.startswith(byte_mark):
# UTF-8 byte-order mark
return 'utf-8'
first_two_lines = re.match(r'(?:[^\n]*\n){0,2}', source_str).group(0)
possible_encoding = re.search(r"coding[=:]\s*([-\w.]+)",
first_two_lines)
if possible_encoding:
return possible_encoding.group(1)
else:
return super().detect_encoding(path)
Fix encoding detection in python (shebang line was not parsed anymore)
|
"""
Contains the python specific FileManager.
"""
import ast
import re
from pyqode.core.managers import FileManager
class PyFileManager(FileManager):
"""
Extends file manager to override detect_encoding. With python, we can
detect encoding by reading the two first lines of a file and extracting its
encoding tag.
"""
def detect_encoding(self, path):
"""
For the implementation of encoding definitions in Python, look at:
- http://www.python.org/dev/peps/pep-0263/
.. note:: code taken and adapted from
```jedi.common.source_to_unicode.detect_encoding```
"""
with open(path, 'rb') as file:
source = file.read()
# take care of line encodings (not in jedi)
source = source.replace(b'\r', b'')
source_str = str(source).replace('\\n', '\n')
byte_mark = ast.literal_eval(r"b'\xef\xbb\xbf'")
if source.startswith(byte_mark):
# UTF-8 byte-order mark
return 'utf-8'
first_two_lines = re.match(r'(?:[^\n]*\n){0,2}', source_str).group(0)
possible_encoding = re.search(r"coding[=:]\s*([-\w.]+)",
first_two_lines)
if possible_encoding:
return possible_encoding.group(1)
def open(self, path, encoding=None, use_cached_encoding=True):
if encoding is None:
encoding = self.detect_encoding(path)
super().open(path, encoding=encoding,
use_cached_encoding=use_cached_encoding)
|
<commit_before>"""
Contains the python specific FileManager.
"""
import ast
import re
from pyqode.core.managers import FileManager
class PyFileManager(FileManager):
"""
Extends file manager to override detect_encoding. With python, we can
detect encoding by reading the two first lines of a file and extracting its
encoding tag.
"""
def detect_encoding(self, path):
"""
For the implementation of encoding definitions in Python, look at:
- http://www.python.org/dev/peps/pep-0263/
.. note:: code taken and adapted from
```jedi.common.source_to_unicode.detect_encoding```
"""
with open(path, 'rb') as file:
source = file.read()
# take care of line encodings (not in jedi)
source = source.replace(b'\r', b'')
source_str = str(source).replace('\\n', '\n')
byte_mark = ast.literal_eval(r"b'\xef\xbb\xbf'")
if source.startswith(byte_mark):
# UTF-8 byte-order mark
return 'utf-8'
first_two_lines = re.match(r'(?:[^\n]*\n){0,2}', source_str).group(0)
possible_encoding = re.search(r"coding[=:]\s*([-\w.]+)",
first_two_lines)
if possible_encoding:
return possible_encoding.group(1)
else:
return super().detect_encoding(path)
<commit_msg>Fix encoding detection in python (shebang line was not parsed anymore)<commit_after>
|
"""
Contains the python specific FileManager.
"""
import ast
import re
from pyqode.core.managers import FileManager
class PyFileManager(FileManager):
"""
Extends file manager to override detect_encoding. With python, we can
detect encoding by reading the two first lines of a file and extracting its
encoding tag.
"""
def detect_encoding(self, path):
"""
For the implementation of encoding definitions in Python, look at:
- http://www.python.org/dev/peps/pep-0263/
.. note:: code taken and adapted from
```jedi.common.source_to_unicode.detect_encoding```
"""
with open(path, 'rb') as file:
source = file.read()
# take care of line encodings (not in jedi)
source = source.replace(b'\r', b'')
source_str = str(source).replace('\\n', '\n')
byte_mark = ast.literal_eval(r"b'\xef\xbb\xbf'")
if source.startswith(byte_mark):
# UTF-8 byte-order mark
return 'utf-8'
first_two_lines = re.match(r'(?:[^\n]*\n){0,2}', source_str).group(0)
possible_encoding = re.search(r"coding[=:]\s*([-\w.]+)",
first_two_lines)
if possible_encoding:
return possible_encoding.group(1)
def open(self, path, encoding=None, use_cached_encoding=True):
if encoding is None:
encoding = self.detect_encoding(path)
super().open(path, encoding=encoding,
use_cached_encoding=use_cached_encoding)
|
"""
Contains the python specific FileManager.
"""
import ast
import re
from pyqode.core.managers import FileManager
class PyFileManager(FileManager):
"""
Extends file manager to override detect_encoding. With python, we can
detect encoding by reading the two first lines of a file and extracting its
encoding tag.
"""
def detect_encoding(self, path):
"""
For the implementation of encoding definitions in Python, look at:
- http://www.python.org/dev/peps/pep-0263/
.. note:: code taken and adapted from
```jedi.common.source_to_unicode.detect_encoding```
"""
with open(path, 'rb') as file:
source = file.read()
# take care of line encodings (not in jedi)
source = source.replace(b'\r', b'')
source_str = str(source).replace('\\n', '\n')
byte_mark = ast.literal_eval(r"b'\xef\xbb\xbf'")
if source.startswith(byte_mark):
# UTF-8 byte-order mark
return 'utf-8'
first_two_lines = re.match(r'(?:[^\n]*\n){0,2}', source_str).group(0)
possible_encoding = re.search(r"coding[=:]\s*([-\w.]+)",
first_two_lines)
if possible_encoding:
return possible_encoding.group(1)
else:
return super().detect_encoding(path)
Fix encoding detection in python (shebang line was not parsed anymore)"""
Contains the python specific FileManager.
"""
import ast
import re
from pyqode.core.managers import FileManager
class PyFileManager(FileManager):
"""
Extends file manager to override detect_encoding. With python, we can
detect encoding by reading the two first lines of a file and extracting its
encoding tag.
"""
def detect_encoding(self, path):
"""
For the implementation of encoding definitions in Python, look at:
- http://www.python.org/dev/peps/pep-0263/
.. note:: code taken and adapted from
```jedi.common.source_to_unicode.detect_encoding```
"""
with open(path, 'rb') as file:
source = file.read()
# take care of line encodings (not in jedi)
source = source.replace(b'\r', b'')
source_str = str(source).replace('\\n', '\n')
byte_mark = ast.literal_eval(r"b'\xef\xbb\xbf'")
if source.startswith(byte_mark):
# UTF-8 byte-order mark
return 'utf-8'
first_two_lines = re.match(r'(?:[^\n]*\n){0,2}', source_str).group(0)
possible_encoding = re.search(r"coding[=:]\s*([-\w.]+)",
first_two_lines)
if possible_encoding:
return possible_encoding.group(1)
def open(self, path, encoding=None, use_cached_encoding=True):
if encoding is None:
encoding = self.detect_encoding(path)
super().open(path, encoding=encoding,
use_cached_encoding=use_cached_encoding)
|
<commit_before>"""
Contains the python specific FileManager.
"""
import ast
import re
from pyqode.core.managers import FileManager
class PyFileManager(FileManager):
"""
Extends file manager to override detect_encoding. With python, we can
detect encoding by reading the two first lines of a file and extracting its
encoding tag.
"""
def detect_encoding(self, path):
"""
For the implementation of encoding definitions in Python, look at:
- http://www.python.org/dev/peps/pep-0263/
.. note:: code taken and adapted from
```jedi.common.source_to_unicode.detect_encoding```
"""
with open(path, 'rb') as file:
source = file.read()
# take care of line encodings (not in jedi)
source = source.replace(b'\r', b'')
source_str = str(source).replace('\\n', '\n')
byte_mark = ast.literal_eval(r"b'\xef\xbb\xbf'")
if source.startswith(byte_mark):
# UTF-8 byte-order mark
return 'utf-8'
first_two_lines = re.match(r'(?:[^\n]*\n){0,2}', source_str).group(0)
possible_encoding = re.search(r"coding[=:]\s*([-\w.]+)",
first_two_lines)
if possible_encoding:
return possible_encoding.group(1)
else:
return super().detect_encoding(path)
<commit_msg>Fix encoding detection in python (shebang line was not parsed anymore)<commit_after>"""
Contains the python specific FileManager.
"""
import ast
import re
from pyqode.core.managers import FileManager
class PyFileManager(FileManager):
"""
Extends file manager to override detect_encoding. With python, we can
detect encoding by reading the two first lines of a file and extracting its
encoding tag.
"""
def detect_encoding(self, path):
"""
For the implementation of encoding definitions in Python, look at:
- http://www.python.org/dev/peps/pep-0263/
.. note:: code taken and adapted from
```jedi.common.source_to_unicode.detect_encoding```
"""
with open(path, 'rb') as file:
source = file.read()
# take care of line encodings (not in jedi)
source = source.replace(b'\r', b'')
source_str = str(source).replace('\\n', '\n')
byte_mark = ast.literal_eval(r"b'\xef\xbb\xbf'")
if source.startswith(byte_mark):
# UTF-8 byte-order mark
return 'utf-8'
first_two_lines = re.match(r'(?:[^\n]*\n){0,2}', source_str).group(0)
possible_encoding = re.search(r"coding[=:]\s*([-\w.]+)",
first_two_lines)
if possible_encoding:
return possible_encoding.group(1)
def open(self, path, encoding=None, use_cached_encoding=True):
if encoding is None:
encoding = self.detect_encoding(path)
super().open(path, encoding=encoding,
use_cached_encoding=use_cached_encoding)
|
7c5048ec810b5a0d4eb4d7b08469b8baa67e685f
|
util/regression-tests/config.py
|
util/regression-tests/config.py
|
# Location of Apache Error Log
log_location_linux = '/var/log/httpd/error_log'
log_location_windows = 'C:\Apache24\logs\error.log'
# Regular expression to filter for timestamp in Apache Error Log
#
# Default timestamp format: (example: [Thu Nov 09 09:04:38.912314 2017])
log_date_regex = "\[([A-Z][a-z]{2} [A-z][a-z]{2} \d{1,2} \d{1,2}\:\d{1,2}\:\d{1,2}\.\d+? \d{4})\]"
#
# Reverse format: (example: [2017-11-09 08:25:03.002312])
#log_date_regex = "\[([0-9-]{10} [0-9:.]{15})\]"
# Date format matching the timestamp format used by Apache
# in order to generate matching timestamp ourself
#
# Default timestamp format: (example: see above)
log_date_format = "%a %b %d %H:%M:%S.%f %Y"
#
# Reverse format: (example: see above)
#log_date_format = "%Y-%m-%d %H:%M:%S.%f"
|
# Location of Apache Error Log
log_location_linux = '/var/log/apache2/error.log'
log_location_windows = 'C:\Apache24\logs\error.log'
# Regular expression to filter for timestamp in Apache Error Log
#
# Default timestamp format: (example: [Thu Nov 09 09:04:38.912314 2017])
log_date_regex = "\[([A-Z][a-z]{2} [A-z][a-z]{2} \d{1,2} \d{1,2}\:\d{1,2}\:\d{1,2}\.\d+? \d{4})\]"
#
# Reverse format: (example: [2017-11-09 08:25:03.002312])
#log_date_regex = "\[([0-9-]{10} [0-9:.]{15})\]"
# Date format matching the timestamp format used by Apache
# in order to generate matching timestamp ourself
#
# Default timestamp format: (example: see above)
log_date_format = "%a %b %d %H:%M:%S.%f %Y"
#
# Reverse format: (example: see above)
#log_date_format = "%Y-%m-%d %H:%M:%S.%f"
|
Update log location to reflect ubuntu
|
Update log location to reflect ubuntu
|
Python
|
apache-2.0
|
coreruleset/coreruleset,SpiderLabs/owasp-modsecurity-crs,coreruleset/coreruleset,coreruleset/coreruleset,SpiderLabs/owasp-modsecurity-crs,coreruleset/coreruleset,SpiderLabs/owasp-modsecurity-crs,SpiderLabs/owasp-modsecurity-crs,SpiderLabs/owasp-modsecurity-crs,coreruleset/coreruleset,coreruleset/coreruleset,SpiderLabs/owasp-modsecurity-crs,SpiderLabs/owasp-modsecurity-crs
|
# Location of Apache Error Log
log_location_linux = '/var/log/httpd/error_log'
log_location_windows = 'C:\Apache24\logs\error.log'
# Regular expression to filter for timestamp in Apache Error Log
#
# Default timestamp format: (example: [Thu Nov 09 09:04:38.912314 2017])
log_date_regex = "\[([A-Z][a-z]{2} [A-z][a-z]{2} \d{1,2} \d{1,2}\:\d{1,2}\:\d{1,2}\.\d+? \d{4})\]"
#
# Reverse format: (example: [2017-11-09 08:25:03.002312])
#log_date_regex = "\[([0-9-]{10} [0-9:.]{15})\]"
# Date format matching the timestamp format used by Apache
# in order to generate matching timestamp ourself
#
# Default timestamp format: (example: see above)
log_date_format = "%a %b %d %H:%M:%S.%f %Y"
#
# Reverse format: (example: see above)
#log_date_format = "%Y-%m-%d %H:%M:%S.%f"Update log location to reflect ubuntu
|
# Location of Apache Error Log
log_location_linux = '/var/log/apache2/error.log'
log_location_windows = 'C:\Apache24\logs\error.log'
# Regular expression to filter for timestamp in Apache Error Log
#
# Default timestamp format: (example: [Thu Nov 09 09:04:38.912314 2017])
log_date_regex = "\[([A-Z][a-z]{2} [A-z][a-z]{2} \d{1,2} \d{1,2}\:\d{1,2}\:\d{1,2}\.\d+? \d{4})\]"
#
# Reverse format: (example: [2017-11-09 08:25:03.002312])
#log_date_regex = "\[([0-9-]{10} [0-9:.]{15})\]"
# Date format matching the timestamp format used by Apache
# in order to generate matching timestamp ourself
#
# Default timestamp format: (example: see above)
log_date_format = "%a %b %d %H:%M:%S.%f %Y"
#
# Reverse format: (example: see above)
#log_date_format = "%Y-%m-%d %H:%M:%S.%f"
|
<commit_before># Location of Apache Error Log
log_location_linux = '/var/log/httpd/error_log'
log_location_windows = 'C:\Apache24\logs\error.log'
# Regular expression to filter for timestamp in Apache Error Log
#
# Default timestamp format: (example: [Thu Nov 09 09:04:38.912314 2017])
log_date_regex = "\[([A-Z][a-z]{2} [A-z][a-z]{2} \d{1,2} \d{1,2}\:\d{1,2}\:\d{1,2}\.\d+? \d{4})\]"
#
# Reverse format: (example: [2017-11-09 08:25:03.002312])
#log_date_regex = "\[([0-9-]{10} [0-9:.]{15})\]"
# Date format matching the timestamp format used by Apache
# in order to generate matching timestamp ourself
#
# Default timestamp format: (example: see above)
log_date_format = "%a %b %d %H:%M:%S.%f %Y"
#
# Reverse format: (example: see above)
#log_date_format = "%Y-%m-%d %H:%M:%S.%f"<commit_msg>Update log location to reflect ubuntu<commit_after>
|
# Location of Apache Error Log
log_location_linux = '/var/log/apache2/error.log'
log_location_windows = 'C:\Apache24\logs\error.log'
# Regular expression to filter for timestamp in Apache Error Log
#
# Default timestamp format: (example: [Thu Nov 09 09:04:38.912314 2017])
log_date_regex = "\[([A-Z][a-z]{2} [A-z][a-z]{2} \d{1,2} \d{1,2}\:\d{1,2}\:\d{1,2}\.\d+? \d{4})\]"
#
# Reverse format: (example: [2017-11-09 08:25:03.002312])
#log_date_regex = "\[([0-9-]{10} [0-9:.]{15})\]"
# Date format matching the timestamp format used by Apache
# in order to generate matching timestamp ourself
#
# Default timestamp format: (example: see above)
log_date_format = "%a %b %d %H:%M:%S.%f %Y"
#
# Reverse format: (example: see above)
#log_date_format = "%Y-%m-%d %H:%M:%S.%f"
|
# Location of Apache Error Log
log_location_linux = '/var/log/httpd/error_log'
log_location_windows = 'C:\Apache24\logs\error.log'
# Regular expression to filter for timestamp in Apache Error Log
#
# Default timestamp format: (example: [Thu Nov 09 09:04:38.912314 2017])
log_date_regex = "\[([A-Z][a-z]{2} [A-z][a-z]{2} \d{1,2} \d{1,2}\:\d{1,2}\:\d{1,2}\.\d+? \d{4})\]"
#
# Reverse format: (example: [2017-11-09 08:25:03.002312])
#log_date_regex = "\[([0-9-]{10} [0-9:.]{15})\]"
# Date format matching the timestamp format used by Apache
# in order to generate matching timestamp ourself
#
# Default timestamp format: (example: see above)
log_date_format = "%a %b %d %H:%M:%S.%f %Y"
#
# Reverse format: (example: see above)
#log_date_format = "%Y-%m-%d %H:%M:%S.%f"Update log location to reflect ubuntu# Location of Apache Error Log
log_location_linux = '/var/log/apache2/error.log'
log_location_windows = 'C:\Apache24\logs\error.log'
# Regular expression to filter for timestamp in Apache Error Log
#
# Default timestamp format: (example: [Thu Nov 09 09:04:38.912314 2017])
log_date_regex = "\[([A-Z][a-z]{2} [A-z][a-z]{2} \d{1,2} \d{1,2}\:\d{1,2}\:\d{1,2}\.\d+? \d{4})\]"
#
# Reverse format: (example: [2017-11-09 08:25:03.002312])
#log_date_regex = "\[([0-9-]{10} [0-9:.]{15})\]"
# Date format matching the timestamp format used by Apache
# in order to generate matching timestamp ourself
#
# Default timestamp format: (example: see above)
log_date_format = "%a %b %d %H:%M:%S.%f %Y"
#
# Reverse format: (example: see above)
#log_date_format = "%Y-%m-%d %H:%M:%S.%f"
|
<commit_before># Location of Apache Error Log
log_location_linux = '/var/log/httpd/error_log'
log_location_windows = 'C:\Apache24\logs\error.log'
# Regular expression to filter for timestamp in Apache Error Log
#
# Default timestamp format: (example: [Thu Nov 09 09:04:38.912314 2017])
log_date_regex = "\[([A-Z][a-z]{2} [A-z][a-z]{2} \d{1,2} \d{1,2}\:\d{1,2}\:\d{1,2}\.\d+? \d{4})\]"
#
# Reverse format: (example: [2017-11-09 08:25:03.002312])
#log_date_regex = "\[([0-9-]{10} [0-9:.]{15})\]"
# Date format matching the timestamp format used by Apache
# in order to generate matching timestamp ourself
#
# Default timestamp format: (example: see above)
log_date_format = "%a %b %d %H:%M:%S.%f %Y"
#
# Reverse format: (example: see above)
#log_date_format = "%Y-%m-%d %H:%M:%S.%f"<commit_msg>Update log location to reflect ubuntu<commit_after># Location of Apache Error Log
log_location_linux = '/var/log/apache2/error.log'
log_location_windows = 'C:\Apache24\logs\error.log'
# Regular expression to filter for timestamp in Apache Error Log
#
# Default timestamp format: (example: [Thu Nov 09 09:04:38.912314 2017])
log_date_regex = "\[([A-Z][a-z]{2} [A-z][a-z]{2} \d{1,2} \d{1,2}\:\d{1,2}\:\d{1,2}\.\d+? \d{4})\]"
#
# Reverse format: (example: [2017-11-09 08:25:03.002312])
#log_date_regex = "\[([0-9-]{10} [0-9:.]{15})\]"
# Date format matching the timestamp format used by Apache
# in order to generate matching timestamp ourself
#
# Default timestamp format: (example: see above)
log_date_format = "%a %b %d %H:%M:%S.%f %Y"
#
# Reverse format: (example: see above)
#log_date_format = "%Y-%m-%d %H:%M:%S.%f"
|
225abbf06472fe7afd15252ca446456c4caed0bb
|
contact/test_settings.py
|
contact/test_settings.py
|
# Only used for running the tests
import os
CONTACT_EMAILS = ['charlie@example.com']
DATABASES = {'default': {'ENGINE': 'django.db.backends.sqlite3'}}
INSTALLED_APPS = ['contact', 'django.contrib.staticfiles']
MIDDLEWARE_CLASSES = []
ROOT_URLCONF = 'contact.test_urls'
SECRET_KEY = 'whatever'
STATIC_URL = '/static/'
TEMPLATE_CONTEXT_PROCESSORS = ['django.core.context_processors.static']
TEMPLATE_DIRS = [os.path.join(os.path.dirname(__file__), 'test_templates')]
|
# Only used for running the tests
import os
CONTACT_EMAILS = ['charlie@example.com']
DATABASES = {'default': {'ENGINE': 'django.db.backends.sqlite3'}}
INSTALLED_APPS = ['contact', 'django.contrib.staticfiles']
ROOT_URLCONF = 'contact.test_urls'
SECRET_KEY = 'whatever'
STATIC_URL = '/static/'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(os.path.dirname(__file__), 'test_templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.static',
],
},
},
]
|
Update test settings for Django >= 1.8.
|
Update test settings for Django >= 1.8.
|
Python
|
bsd-3-clause
|
aaugustin/myks-contact,aaugustin/myks-contact
|
# Only used for running the tests
import os
CONTACT_EMAILS = ['charlie@example.com']
DATABASES = {'default': {'ENGINE': 'django.db.backends.sqlite3'}}
INSTALLED_APPS = ['contact', 'django.contrib.staticfiles']
MIDDLEWARE_CLASSES = []
ROOT_URLCONF = 'contact.test_urls'
SECRET_KEY = 'whatever'
STATIC_URL = '/static/'
TEMPLATE_CONTEXT_PROCESSORS = ['django.core.context_processors.static']
TEMPLATE_DIRS = [os.path.join(os.path.dirname(__file__), 'test_templates')]
Update test settings for Django >= 1.8.
|
# Only used for running the tests
import os
CONTACT_EMAILS = ['charlie@example.com']
DATABASES = {'default': {'ENGINE': 'django.db.backends.sqlite3'}}
INSTALLED_APPS = ['contact', 'django.contrib.staticfiles']
ROOT_URLCONF = 'contact.test_urls'
SECRET_KEY = 'whatever'
STATIC_URL = '/static/'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(os.path.dirname(__file__), 'test_templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.static',
],
},
},
]
|
<commit_before># Only used for running the tests
import os
CONTACT_EMAILS = ['charlie@example.com']
DATABASES = {'default': {'ENGINE': 'django.db.backends.sqlite3'}}
INSTALLED_APPS = ['contact', 'django.contrib.staticfiles']
MIDDLEWARE_CLASSES = []
ROOT_URLCONF = 'contact.test_urls'
SECRET_KEY = 'whatever'
STATIC_URL = '/static/'
TEMPLATE_CONTEXT_PROCESSORS = ['django.core.context_processors.static']
TEMPLATE_DIRS = [os.path.join(os.path.dirname(__file__), 'test_templates')]
<commit_msg>Update test settings for Django >= 1.8.<commit_after>
|
# Only used for running the tests
import os
CONTACT_EMAILS = ['charlie@example.com']
DATABASES = {'default': {'ENGINE': 'django.db.backends.sqlite3'}}
INSTALLED_APPS = ['contact', 'django.contrib.staticfiles']
ROOT_URLCONF = 'contact.test_urls'
SECRET_KEY = 'whatever'
STATIC_URL = '/static/'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(os.path.dirname(__file__), 'test_templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.static',
],
},
},
]
|
# Only used for running the tests
import os
CONTACT_EMAILS = ['charlie@example.com']
DATABASES = {'default': {'ENGINE': 'django.db.backends.sqlite3'}}
INSTALLED_APPS = ['contact', 'django.contrib.staticfiles']
MIDDLEWARE_CLASSES = []
ROOT_URLCONF = 'contact.test_urls'
SECRET_KEY = 'whatever'
STATIC_URL = '/static/'
TEMPLATE_CONTEXT_PROCESSORS = ['django.core.context_processors.static']
TEMPLATE_DIRS = [os.path.join(os.path.dirname(__file__), 'test_templates')]
Update test settings for Django >= 1.8.# Only used for running the tests
import os
CONTACT_EMAILS = ['charlie@example.com']
DATABASES = {'default': {'ENGINE': 'django.db.backends.sqlite3'}}
INSTALLED_APPS = ['contact', 'django.contrib.staticfiles']
ROOT_URLCONF = 'contact.test_urls'
SECRET_KEY = 'whatever'
STATIC_URL = '/static/'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(os.path.dirname(__file__), 'test_templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.static',
],
},
},
]
|
<commit_before># Only used for running the tests
import os
CONTACT_EMAILS = ['charlie@example.com']
DATABASES = {'default': {'ENGINE': 'django.db.backends.sqlite3'}}
INSTALLED_APPS = ['contact', 'django.contrib.staticfiles']
MIDDLEWARE_CLASSES = []
ROOT_URLCONF = 'contact.test_urls'
SECRET_KEY = 'whatever'
STATIC_URL = '/static/'
TEMPLATE_CONTEXT_PROCESSORS = ['django.core.context_processors.static']
TEMPLATE_DIRS = [os.path.join(os.path.dirname(__file__), 'test_templates')]
<commit_msg>Update test settings for Django >= 1.8.<commit_after># Only used for running the tests
import os
CONTACT_EMAILS = ['charlie@example.com']
DATABASES = {'default': {'ENGINE': 'django.db.backends.sqlite3'}}
INSTALLED_APPS = ['contact', 'django.contrib.staticfiles']
ROOT_URLCONF = 'contact.test_urls'
SECRET_KEY = 'whatever'
STATIC_URL = '/static/'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(os.path.dirname(__file__), 'test_templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.static',
],
},
},
]
|
ed865984bb620fa13418bc5b45b12c63ddada21a
|
datafilters/views.py
|
datafilters/views.py
|
from django.views.generic.list import MultipleObjectMixin
__all__ = ('FilterFormMixin',)
class FilterFormMixin(MultipleObjectMixin):
"""
Mixin that adds filtering behaviour for Class Based Views.
Changed in a way that can play nicely with other CBV simply by overriding the get_queryset(self) and
get_context_data(self, **kwargs) method.
"""
filter_form_cls = None
use_filter_chaining = False
context_filterform_name = 'filterform'
def get_filter(self):
return self.filter_form_cls(self.request.GET,
runtime_context=self.get_runtime_context(),
use_filter_chaining=self.use_filter_chaining)
def get_queryset(self):
qs = super(FilterFormMixin, self).get_queryset()
qs = self.get_filter().filter(qs).distinct()
return qs
def get_context_data(self, **kwargs):
context = super(FilterFormMixin, self).get_context_data(**kwargs)
context[self.context_filterform_name] = self.get_filter()
return context
def get_runtime_context(self):
return {'user': self.request.user}
|
from django.views.generic.list import MultipleObjectMixin
__all__ = ('FilterFormMixin',)
class FilterFormMixin(MultipleObjectMixin):
"""
Mixin that adds filtering behaviour for Class Based Views.
Changed in a way that can play nicely with other CBV simply by overriding the get_queryset(self) and
get_context_data(self, **kwargs) method.
"""
filter_form_cls = None
use_filter_chaining = False
context_filterform_name = 'filterform'
def get_filter(self):
return self.filter_form_cls(self.request.GET,
runtime_context=self.get_runtime_context(),
use_filter_chaining=self.use_filter_chaining)
def get_queryset(self):
qs = super(FilterFormMixin, self).get_queryset()
filter_form = self.get_filter()
if filter_form.is_valid():
qs = filter_form.filter(qs).distinct()
return qs
def get_context_data(self, **kwargs):
context = super(FilterFormMixin, self).get_context_data(**kwargs)
context[self.context_filterform_name] = self.get_filter()
return context
def get_runtime_context(self):
return {'user': self.request.user}
|
Check filterform validity before calling filter()
|
Check filterform validity before calling filter()
|
Python
|
mit
|
zorainc/django-datafilters,zorainc/django-datafilters,freevoid/django-datafilters
|
from django.views.generic.list import MultipleObjectMixin
__all__ = ('FilterFormMixin',)
class FilterFormMixin(MultipleObjectMixin):
"""
Mixin that adds filtering behaviour for Class Based Views.
Changed in a way that can play nicely with other CBV simply by overriding the get_queryset(self) and
get_context_data(self, **kwargs) method.
"""
filter_form_cls = None
use_filter_chaining = False
context_filterform_name = 'filterform'
def get_filter(self):
return self.filter_form_cls(self.request.GET,
runtime_context=self.get_runtime_context(),
use_filter_chaining=self.use_filter_chaining)
def get_queryset(self):
qs = super(FilterFormMixin, self).get_queryset()
qs = self.get_filter().filter(qs).distinct()
return qs
def get_context_data(self, **kwargs):
context = super(FilterFormMixin, self).get_context_data(**kwargs)
context[self.context_filterform_name] = self.get_filter()
return context
def get_runtime_context(self):
return {'user': self.request.user}
Check filterform validity before calling filter()
|
from django.views.generic.list import MultipleObjectMixin
__all__ = ('FilterFormMixin',)
class FilterFormMixin(MultipleObjectMixin):
"""
Mixin that adds filtering behaviour for Class Based Views.
Changed in a way that can play nicely with other CBV simply by overriding the get_queryset(self) and
get_context_data(self, **kwargs) method.
"""
filter_form_cls = None
use_filter_chaining = False
context_filterform_name = 'filterform'
def get_filter(self):
return self.filter_form_cls(self.request.GET,
runtime_context=self.get_runtime_context(),
use_filter_chaining=self.use_filter_chaining)
def get_queryset(self):
qs = super(FilterFormMixin, self).get_queryset()
filter_form = self.get_filter()
if filter_form.is_valid():
qs = filter_form.filter(qs).distinct()
return qs
def get_context_data(self, **kwargs):
context = super(FilterFormMixin, self).get_context_data(**kwargs)
context[self.context_filterform_name] = self.get_filter()
return context
def get_runtime_context(self):
return {'user': self.request.user}
|
<commit_before>from django.views.generic.list import MultipleObjectMixin
__all__ = ('FilterFormMixin',)
class FilterFormMixin(MultipleObjectMixin):
"""
Mixin that adds filtering behaviour for Class Based Views.
Changed in a way that can play nicely with other CBV simply by overriding the get_queryset(self) and
get_context_data(self, **kwargs) method.
"""
filter_form_cls = None
use_filter_chaining = False
context_filterform_name = 'filterform'
def get_filter(self):
return self.filter_form_cls(self.request.GET,
runtime_context=self.get_runtime_context(),
use_filter_chaining=self.use_filter_chaining)
def get_queryset(self):
qs = super(FilterFormMixin, self).get_queryset()
qs = self.get_filter().filter(qs).distinct()
return qs
def get_context_data(self, **kwargs):
context = super(FilterFormMixin, self).get_context_data(**kwargs)
context[self.context_filterform_name] = self.get_filter()
return context
def get_runtime_context(self):
return {'user': self.request.user}
<commit_msg>Check filterform validity before calling filter()<commit_after>
|
from django.views.generic.list import MultipleObjectMixin
__all__ = ('FilterFormMixin',)
class FilterFormMixin(MultipleObjectMixin):
"""
Mixin that adds filtering behaviour for Class Based Views.
Changed in a way that can play nicely with other CBV simply by overriding the get_queryset(self) and
get_context_data(self, **kwargs) method.
"""
filter_form_cls = None
use_filter_chaining = False
context_filterform_name = 'filterform'
def get_filter(self):
return self.filter_form_cls(self.request.GET,
runtime_context=self.get_runtime_context(),
use_filter_chaining=self.use_filter_chaining)
def get_queryset(self):
qs = super(FilterFormMixin, self).get_queryset()
filter_form = self.get_filter()
if filter_form.is_valid():
qs = filter_form.filter(qs).distinct()
return qs
def get_context_data(self, **kwargs):
context = super(FilterFormMixin, self).get_context_data(**kwargs)
context[self.context_filterform_name] = self.get_filter()
return context
def get_runtime_context(self):
return {'user': self.request.user}
|
from django.views.generic.list import MultipleObjectMixin
__all__ = ('FilterFormMixin',)
class FilterFormMixin(MultipleObjectMixin):
"""
Mixin that adds filtering behaviour for Class Based Views.
Changed in a way that can play nicely with other CBV simply by overriding the get_queryset(self) and
get_context_data(self, **kwargs) method.
"""
filter_form_cls = None
use_filter_chaining = False
context_filterform_name = 'filterform'
def get_filter(self):
return self.filter_form_cls(self.request.GET,
runtime_context=self.get_runtime_context(),
use_filter_chaining=self.use_filter_chaining)
def get_queryset(self):
qs = super(FilterFormMixin, self).get_queryset()
qs = self.get_filter().filter(qs).distinct()
return qs
def get_context_data(self, **kwargs):
context = super(FilterFormMixin, self).get_context_data(**kwargs)
context[self.context_filterform_name] = self.get_filter()
return context
def get_runtime_context(self):
return {'user': self.request.user}
Check filterform validity before calling filter()from django.views.generic.list import MultipleObjectMixin
__all__ = ('FilterFormMixin',)
class FilterFormMixin(MultipleObjectMixin):
"""
Mixin that adds filtering behaviour for Class Based Views.
Changed in a way that can play nicely with other CBV simply by overriding the get_queryset(self) and
get_context_data(self, **kwargs) method.
"""
filter_form_cls = None
use_filter_chaining = False
context_filterform_name = 'filterform'
def get_filter(self):
return self.filter_form_cls(self.request.GET,
runtime_context=self.get_runtime_context(),
use_filter_chaining=self.use_filter_chaining)
def get_queryset(self):
qs = super(FilterFormMixin, self).get_queryset()
filter_form = self.get_filter()
if filter_form.is_valid():
qs = filter_form.filter(qs).distinct()
return qs
def get_context_data(self, **kwargs):
context = super(FilterFormMixin, self).get_context_data(**kwargs)
context[self.context_filterform_name] = self.get_filter()
return context
def get_runtime_context(self):
return {'user': self.request.user}
|
<commit_before>from django.views.generic.list import MultipleObjectMixin
__all__ = ('FilterFormMixin',)
class FilterFormMixin(MultipleObjectMixin):
"""
Mixin that adds filtering behaviour for Class Based Views.
Changed in a way that can play nicely with other CBV simply by overriding the get_queryset(self) and
get_context_data(self, **kwargs) method.
"""
filter_form_cls = None
use_filter_chaining = False
context_filterform_name = 'filterform'
def get_filter(self):
return self.filter_form_cls(self.request.GET,
runtime_context=self.get_runtime_context(),
use_filter_chaining=self.use_filter_chaining)
def get_queryset(self):
qs = super(FilterFormMixin, self).get_queryset()
qs = self.get_filter().filter(qs).distinct()
return qs
def get_context_data(self, **kwargs):
context = super(FilterFormMixin, self).get_context_data(**kwargs)
context[self.context_filterform_name] = self.get_filter()
return context
def get_runtime_context(self):
return {'user': self.request.user}
<commit_msg>Check filterform validity before calling filter()<commit_after>from django.views.generic.list import MultipleObjectMixin
__all__ = ('FilterFormMixin',)
class FilterFormMixin(MultipleObjectMixin):
"""
Mixin that adds filtering behaviour for Class Based Views.
Changed in a way that can play nicely with other CBV simply by overriding the get_queryset(self) and
get_context_data(self, **kwargs) method.
"""
filter_form_cls = None
use_filter_chaining = False
context_filterform_name = 'filterform'
def get_filter(self):
return self.filter_form_cls(self.request.GET,
runtime_context=self.get_runtime_context(),
use_filter_chaining=self.use_filter_chaining)
def get_queryset(self):
qs = super(FilterFormMixin, self).get_queryset()
filter_form = self.get_filter()
if filter_form.is_valid():
qs = filter_form.filter(qs).distinct()
return qs
def get_context_data(self, **kwargs):
context = super(FilterFormMixin, self).get_context_data(**kwargs)
context[self.context_filterform_name] = self.get_filter()
return context
def get_runtime_context(self):
return {'user': self.request.user}
|
e9f68b041b67d4489f3c6e553dc9c8683ed46f8c
|
python/kindergarten-garden/garden.py
|
python/kindergarten-garden/garden.py
|
class Garden:
DEFAULT_STUDENTS = ("Alice Bob Charlie David Eve Fred Ginny "
"Harriet Ileana Joseph Kincaid Larry").split()
PLANTS = {'G': 'Grass',
'C': 'Clover',
'R': 'Radishes',
'V': 'Violets'}
def __init__(self, diagram, students = DEFAULT_STUDENTS):
self.diagram = diagram
self.rows = [list(row) for row in diagram.split()]
self.plant_rows = [[self.PLANTS[c] for c in row] for row in self.rows]
self.students = sorted(students)
def plants(self, name):
return self.plants_for_index(self.students.index(name))
# Dislike how these are hardcoded indices
def plants_for_index(self, i):
return [self.plant_rows[0][i * 2],
self.plant_rows[0][i * 2 + 1],
self.plant_rows[1][i * 2],
self.plant_rows[1][i * 2 + 1]]
|
class Garden:
DEFAULT_STUDENTS = ("Alice Bob Charlie David Eve Fred Ginny "
"Harriet Ileana Joseph Kincaid Larry").split()
PLANTS = {'G': 'Grass',
'C': 'Clover',
'R': 'Radishes',
'V': 'Violets'}
def __init__(self, diagram, students=DEFAULT_STUDENTS):
self.diagram = diagram
self.rows = [list(row) for row in diagram.split()]
self.plant_rows = [[self.PLANTS[c] for c in row] for row in self.rows]
self.students = sorted(students)
def plants(self, name):
return self.plants_for_index(self.students.index(name))
# Dislike how these are hardcoded indices
def plants_for_index(self, i):
return [self.plant_rows[0][i * 2],
self.plant_rows[0][i * 2 + 1],
self.plant_rows[1][i * 2],
self.plant_rows[1][i * 2 + 1]]
|
Remove spaces around '=' for default param
|
Remove spaces around '=' for default param
|
Python
|
mit
|
rootulp/exercism,rootulp/exercism,rootulp/exercism,rootulp/exercism,rootulp/exercism,rootulp/exercism,rootulp/exercism,rootulp/exercism
|
class Garden:
DEFAULT_STUDENTS = ("Alice Bob Charlie David Eve Fred Ginny "
"Harriet Ileana Joseph Kincaid Larry").split()
PLANTS = {'G': 'Grass',
'C': 'Clover',
'R': 'Radishes',
'V': 'Violets'}
def __init__(self, diagram, students = DEFAULT_STUDENTS):
self.diagram = diagram
self.rows = [list(row) for row in diagram.split()]
self.plant_rows = [[self.PLANTS[c] for c in row] for row in self.rows]
self.students = sorted(students)
def plants(self, name):
return self.plants_for_index(self.students.index(name))
# Dislike how these are hardcoded indices
def plants_for_index(self, i):
return [self.plant_rows[0][i * 2],
self.plant_rows[0][i * 2 + 1],
self.plant_rows[1][i * 2],
self.plant_rows[1][i * 2 + 1]]
Remove spaces around '=' for default param
|
class Garden:
DEFAULT_STUDENTS = ("Alice Bob Charlie David Eve Fred Ginny "
"Harriet Ileana Joseph Kincaid Larry").split()
PLANTS = {'G': 'Grass',
'C': 'Clover',
'R': 'Radishes',
'V': 'Violets'}
def __init__(self, diagram, students=DEFAULT_STUDENTS):
self.diagram = diagram
self.rows = [list(row) for row in diagram.split()]
self.plant_rows = [[self.PLANTS[c] for c in row] for row in self.rows]
self.students = sorted(students)
def plants(self, name):
return self.plants_for_index(self.students.index(name))
# Dislike how these are hardcoded indices
def plants_for_index(self, i):
return [self.plant_rows[0][i * 2],
self.plant_rows[0][i * 2 + 1],
self.plant_rows[1][i * 2],
self.plant_rows[1][i * 2 + 1]]
|
<commit_before>class Garden:
DEFAULT_STUDENTS = ("Alice Bob Charlie David Eve Fred Ginny "
"Harriet Ileana Joseph Kincaid Larry").split()
PLANTS = {'G': 'Grass',
'C': 'Clover',
'R': 'Radishes',
'V': 'Violets'}
def __init__(self, diagram, students = DEFAULT_STUDENTS):
self.diagram = diagram
self.rows = [list(row) for row in diagram.split()]
self.plant_rows = [[self.PLANTS[c] for c in row] for row in self.rows]
self.students = sorted(students)
def plants(self, name):
return self.plants_for_index(self.students.index(name))
# Dislike how these are hardcoded indices
def plants_for_index(self, i):
return [self.plant_rows[0][i * 2],
self.plant_rows[0][i * 2 + 1],
self.plant_rows[1][i * 2],
self.plant_rows[1][i * 2 + 1]]
<commit_msg>Remove spaces around '=' for default param<commit_after>
|
class Garden:
DEFAULT_STUDENTS = ("Alice Bob Charlie David Eve Fred Ginny "
"Harriet Ileana Joseph Kincaid Larry").split()
PLANTS = {'G': 'Grass',
'C': 'Clover',
'R': 'Radishes',
'V': 'Violets'}
def __init__(self, diagram, students=DEFAULT_STUDENTS):
self.diagram = diagram
self.rows = [list(row) for row in diagram.split()]
self.plant_rows = [[self.PLANTS[c] for c in row] for row in self.rows]
self.students = sorted(students)
def plants(self, name):
return self.plants_for_index(self.students.index(name))
# Dislike how these are hardcoded indices
def plants_for_index(self, i):
return [self.plant_rows[0][i * 2],
self.plant_rows[0][i * 2 + 1],
self.plant_rows[1][i * 2],
self.plant_rows[1][i * 2 + 1]]
|
class Garden:
DEFAULT_STUDENTS = ("Alice Bob Charlie David Eve Fred Ginny "
"Harriet Ileana Joseph Kincaid Larry").split()
PLANTS = {'G': 'Grass',
'C': 'Clover',
'R': 'Radishes',
'V': 'Violets'}
def __init__(self, diagram, students = DEFAULT_STUDENTS):
self.diagram = diagram
self.rows = [list(row) for row in diagram.split()]
self.plant_rows = [[self.PLANTS[c] for c in row] for row in self.rows]
self.students = sorted(students)
def plants(self, name):
return self.plants_for_index(self.students.index(name))
# Dislike how these are hardcoded indices
def plants_for_index(self, i):
return [self.plant_rows[0][i * 2],
self.plant_rows[0][i * 2 + 1],
self.plant_rows[1][i * 2],
self.plant_rows[1][i * 2 + 1]]
Remove spaces around '=' for default paramclass Garden:
DEFAULT_STUDENTS = ("Alice Bob Charlie David Eve Fred Ginny "
"Harriet Ileana Joseph Kincaid Larry").split()
PLANTS = {'G': 'Grass',
'C': 'Clover',
'R': 'Radishes',
'V': 'Violets'}
def __init__(self, diagram, students=DEFAULT_STUDENTS):
self.diagram = diagram
self.rows = [list(row) for row in diagram.split()]
self.plant_rows = [[self.PLANTS[c] for c in row] for row in self.rows]
self.students = sorted(students)
def plants(self, name):
return self.plants_for_index(self.students.index(name))
# Dislike how these are hardcoded indices
def plants_for_index(self, i):
return [self.plant_rows[0][i * 2],
self.plant_rows[0][i * 2 + 1],
self.plant_rows[1][i * 2],
self.plant_rows[1][i * 2 + 1]]
|
<commit_before>class Garden:
DEFAULT_STUDENTS = ("Alice Bob Charlie David Eve Fred Ginny "
"Harriet Ileana Joseph Kincaid Larry").split()
PLANTS = {'G': 'Grass',
'C': 'Clover',
'R': 'Radishes',
'V': 'Violets'}
def __init__(self, diagram, students = DEFAULT_STUDENTS):
self.diagram = diagram
self.rows = [list(row) for row in diagram.split()]
self.plant_rows = [[self.PLANTS[c] for c in row] for row in self.rows]
self.students = sorted(students)
def plants(self, name):
return self.plants_for_index(self.students.index(name))
# Dislike how these are hardcoded indices
def plants_for_index(self, i):
return [self.plant_rows[0][i * 2],
self.plant_rows[0][i * 2 + 1],
self.plant_rows[1][i * 2],
self.plant_rows[1][i * 2 + 1]]
<commit_msg>Remove spaces around '=' for default param<commit_after>class Garden:
DEFAULT_STUDENTS = ("Alice Bob Charlie David Eve Fred Ginny "
"Harriet Ileana Joseph Kincaid Larry").split()
PLANTS = {'G': 'Grass',
'C': 'Clover',
'R': 'Radishes',
'V': 'Violets'}
def __init__(self, diagram, students=DEFAULT_STUDENTS):
self.diagram = diagram
self.rows = [list(row) for row in diagram.split()]
self.plant_rows = [[self.PLANTS[c] for c in row] for row in self.rows]
self.students = sorted(students)
def plants(self, name):
return self.plants_for_index(self.students.index(name))
# Dislike how these are hardcoded indices
def plants_for_index(self, i):
return [self.plant_rows[0][i * 2],
self.plant_rows[0][i * 2 + 1],
self.plant_rows[1][i * 2],
self.plant_rows[1][i * 2 + 1]]
|
dc9070c14892114b9e05e84cc9195d0fb58f859d
|
api_bouncer/serializers.py
|
api_bouncer/serializers.py
|
import uuid
import jsonschema
from rest_framework import serializers
from .models import (
Api,
Consumer,
ConsumerKey,
Plugin,
)
from .schemas import plugins
class ApiSerializer(serializers.ModelSerializer):
class Meta:
model = Api
fields = '__all__'
class ConsumerSerializer(serializers.ModelSerializer):
class Meta:
model = Consumer
fields = '__all__'
class ConsumerKeySerializer(serializers.ModelSerializer):
class Meta:
model = ConsumerKey
fields = '__all__'
extra_kwargs = {
'key': {
'required': False,
'allow_null': True,
'allow_blank': True,
},
}
def validate_key(self, value):
"""Verify if no key is given and generate one"""
if not value:
value = str(uuid.uuid4()).replace('-', '')
return value
class PluginSerializer(serializers.ModelSerializer):
class Meta:
model = Plugin
fields = '__all__'
extra_kwargs = {
'config': {
'default': {},
}
}
def validate(self, data):
name = data.get('name')
if not name or name not in plugins:
raise serializers.ValidationError('Invalid plugin name')
plugin_schema = plugins[name]
try:
jsonschema.validate(data['config'], plugin_schema)
except jsonschema.ValidationError as e:
raise serializers.ValidationError({'config': e})
return data
|
import uuid
import jsonschema
from rest_framework import serializers
from .models import (
Api,
Consumer,
ConsumerKey,
Plugin,
)
from .schemas import plugins
class ConsumerSerializer(serializers.ModelSerializer):
class Meta:
model = Consumer
fields = '__all__'
class ConsumerKeySerializer(serializers.ModelSerializer):
consumer = serializers.SlugRelatedField(
many=False,
read_only=False,
slug_field='username',
queryset=Consumer.objects.all()
)
class Meta:
model = ConsumerKey
fields = '__all__'
extra_kwargs = {
'key': {
'required': False,
'allow_null': True,
'allow_blank': True,
},
}
def validate_key(self, value):
"""Verify if no key is given and generate one"""
if not value:
value = str(uuid.uuid4()).replace('-', '')
return value
class PluginSerializer(serializers.ModelSerializer):
api = serializers.SlugRelatedField(
many=False,
read_only=True,
slug_field='name'
)
class Meta:
model = Plugin
fields = '__all__'
extra_kwargs = {
'config': {
'default': {},
}
}
def validate(self, data):
name = data.get('name')
if not name or name not in plugins:
raise serializers.ValidationError('Invalid plugin name')
plugin_schema = plugins[name]
try:
jsonschema.validate(data['config'], plugin_schema)
except jsonschema.ValidationError as e:
raise serializers.ValidationError({'config': e})
return data
class ApiSerializer(serializers.ModelSerializer):
plugins = PluginSerializer(
many=True,
read_only=False,
)
class Meta:
model = Api
fields = '__all__'
|
Use SlugRelatedField for foreign keys for better readability
|
Use SlugRelatedField for foreign keys for better readability
|
Python
|
apache-2.0
|
menecio/django-api-bouncer
|
import uuid
import jsonschema
from rest_framework import serializers
from .models import (
Api,
Consumer,
ConsumerKey,
Plugin,
)
from .schemas import plugins
class ApiSerializer(serializers.ModelSerializer):
class Meta:
model = Api
fields = '__all__'
class ConsumerSerializer(serializers.ModelSerializer):
class Meta:
model = Consumer
fields = '__all__'
class ConsumerKeySerializer(serializers.ModelSerializer):
class Meta:
model = ConsumerKey
fields = '__all__'
extra_kwargs = {
'key': {
'required': False,
'allow_null': True,
'allow_blank': True,
},
}
def validate_key(self, value):
"""Verify if no key is given and generate one"""
if not value:
value = str(uuid.uuid4()).replace('-', '')
return value
class PluginSerializer(serializers.ModelSerializer):
class Meta:
model = Plugin
fields = '__all__'
extra_kwargs = {
'config': {
'default': {},
}
}
def validate(self, data):
name = data.get('name')
if not name or name not in plugins:
raise serializers.ValidationError('Invalid plugin name')
plugin_schema = plugins[name]
try:
jsonschema.validate(data['config'], plugin_schema)
except jsonschema.ValidationError as e:
raise serializers.ValidationError({'config': e})
return data
Use SlugRelatedField for foreign keys for better readability
|
import uuid
import jsonschema
from rest_framework import serializers
from .models import (
Api,
Consumer,
ConsumerKey,
Plugin,
)
from .schemas import plugins
class ConsumerSerializer(serializers.ModelSerializer):
class Meta:
model = Consumer
fields = '__all__'
class ConsumerKeySerializer(serializers.ModelSerializer):
consumer = serializers.SlugRelatedField(
many=False,
read_only=False,
slug_field='username',
queryset=Consumer.objects.all()
)
class Meta:
model = ConsumerKey
fields = '__all__'
extra_kwargs = {
'key': {
'required': False,
'allow_null': True,
'allow_blank': True,
},
}
def validate_key(self, value):
"""Verify if no key is given and generate one"""
if not value:
value = str(uuid.uuid4()).replace('-', '')
return value
class PluginSerializer(serializers.ModelSerializer):
api = serializers.SlugRelatedField(
many=False,
read_only=True,
slug_field='name'
)
class Meta:
model = Plugin
fields = '__all__'
extra_kwargs = {
'config': {
'default': {},
}
}
def validate(self, data):
name = data.get('name')
if not name or name not in plugins:
raise serializers.ValidationError('Invalid plugin name')
plugin_schema = plugins[name]
try:
jsonschema.validate(data['config'], plugin_schema)
except jsonschema.ValidationError as e:
raise serializers.ValidationError({'config': e})
return data
class ApiSerializer(serializers.ModelSerializer):
plugins = PluginSerializer(
many=True,
read_only=False,
)
class Meta:
model = Api
fields = '__all__'
|
<commit_before>import uuid
import jsonschema
from rest_framework import serializers
from .models import (
Api,
Consumer,
ConsumerKey,
Plugin,
)
from .schemas import plugins
class ApiSerializer(serializers.ModelSerializer):
class Meta:
model = Api
fields = '__all__'
class ConsumerSerializer(serializers.ModelSerializer):
class Meta:
model = Consumer
fields = '__all__'
class ConsumerKeySerializer(serializers.ModelSerializer):
class Meta:
model = ConsumerKey
fields = '__all__'
extra_kwargs = {
'key': {
'required': False,
'allow_null': True,
'allow_blank': True,
},
}
def validate_key(self, value):
"""Verify if no key is given and generate one"""
if not value:
value = str(uuid.uuid4()).replace('-', '')
return value
class PluginSerializer(serializers.ModelSerializer):
class Meta:
model = Plugin
fields = '__all__'
extra_kwargs = {
'config': {
'default': {},
}
}
def validate(self, data):
name = data.get('name')
if not name or name not in plugins:
raise serializers.ValidationError('Invalid plugin name')
plugin_schema = plugins[name]
try:
jsonschema.validate(data['config'], plugin_schema)
except jsonschema.ValidationError as e:
raise serializers.ValidationError({'config': e})
return data
<commit_msg>Use SlugRelatedField for foreign keys for better readability<commit_after>
|
import uuid
import jsonschema
from rest_framework import serializers
from .models import (
Api,
Consumer,
ConsumerKey,
Plugin,
)
from .schemas import plugins
class ConsumerSerializer(serializers.ModelSerializer):
class Meta:
model = Consumer
fields = '__all__'
class ConsumerKeySerializer(serializers.ModelSerializer):
consumer = serializers.SlugRelatedField(
many=False,
read_only=False,
slug_field='username',
queryset=Consumer.objects.all()
)
class Meta:
model = ConsumerKey
fields = '__all__'
extra_kwargs = {
'key': {
'required': False,
'allow_null': True,
'allow_blank': True,
},
}
def validate_key(self, value):
"""Verify if no key is given and generate one"""
if not value:
value = str(uuid.uuid4()).replace('-', '')
return value
class PluginSerializer(serializers.ModelSerializer):
api = serializers.SlugRelatedField(
many=False,
read_only=True,
slug_field='name'
)
class Meta:
model = Plugin
fields = '__all__'
extra_kwargs = {
'config': {
'default': {},
}
}
def validate(self, data):
name = data.get('name')
if not name or name not in plugins:
raise serializers.ValidationError('Invalid plugin name')
plugin_schema = plugins[name]
try:
jsonschema.validate(data['config'], plugin_schema)
except jsonschema.ValidationError as e:
raise serializers.ValidationError({'config': e})
return data
class ApiSerializer(serializers.ModelSerializer):
plugins = PluginSerializer(
many=True,
read_only=False,
)
class Meta:
model = Api
fields = '__all__'
|
import uuid
import jsonschema
from rest_framework import serializers
from .models import (
Api,
Consumer,
ConsumerKey,
Plugin,
)
from .schemas import plugins
class ApiSerializer(serializers.ModelSerializer):
class Meta:
model = Api
fields = '__all__'
class ConsumerSerializer(serializers.ModelSerializer):
class Meta:
model = Consumer
fields = '__all__'
class ConsumerKeySerializer(serializers.ModelSerializer):
class Meta:
model = ConsumerKey
fields = '__all__'
extra_kwargs = {
'key': {
'required': False,
'allow_null': True,
'allow_blank': True,
},
}
def validate_key(self, value):
"""Verify if no key is given and generate one"""
if not value:
value = str(uuid.uuid4()).replace('-', '')
return value
class PluginSerializer(serializers.ModelSerializer):
class Meta:
model = Plugin
fields = '__all__'
extra_kwargs = {
'config': {
'default': {},
}
}
def validate(self, data):
name = data.get('name')
if not name or name not in plugins:
raise serializers.ValidationError('Invalid plugin name')
plugin_schema = plugins[name]
try:
jsonschema.validate(data['config'], plugin_schema)
except jsonschema.ValidationError as e:
raise serializers.ValidationError({'config': e})
return data
Use SlugRelatedField for foreign keys for better readabilityimport uuid
import jsonschema
from rest_framework import serializers
from .models import (
Api,
Consumer,
ConsumerKey,
Plugin,
)
from .schemas import plugins
class ConsumerSerializer(serializers.ModelSerializer):
class Meta:
model = Consumer
fields = '__all__'
class ConsumerKeySerializer(serializers.ModelSerializer):
consumer = serializers.SlugRelatedField(
many=False,
read_only=False,
slug_field='username',
queryset=Consumer.objects.all()
)
class Meta:
model = ConsumerKey
fields = '__all__'
extra_kwargs = {
'key': {
'required': False,
'allow_null': True,
'allow_blank': True,
},
}
def validate_key(self, value):
"""Verify if no key is given and generate one"""
if not value:
value = str(uuid.uuid4()).replace('-', '')
return value
class PluginSerializer(serializers.ModelSerializer):
api = serializers.SlugRelatedField(
many=False,
read_only=True,
slug_field='name'
)
class Meta:
model = Plugin
fields = '__all__'
extra_kwargs = {
'config': {
'default': {},
}
}
def validate(self, data):
name = data.get('name')
if not name or name not in plugins:
raise serializers.ValidationError('Invalid plugin name')
plugin_schema = plugins[name]
try:
jsonschema.validate(data['config'], plugin_schema)
except jsonschema.ValidationError as e:
raise serializers.ValidationError({'config': e})
return data
class ApiSerializer(serializers.ModelSerializer):
plugins = PluginSerializer(
many=True,
read_only=False,
)
class Meta:
model = Api
fields = '__all__'
|
<commit_before>import uuid
import jsonschema
from rest_framework import serializers
from .models import (
Api,
Consumer,
ConsumerKey,
Plugin,
)
from .schemas import plugins
class ApiSerializer(serializers.ModelSerializer):
class Meta:
model = Api
fields = '__all__'
class ConsumerSerializer(serializers.ModelSerializer):
class Meta:
model = Consumer
fields = '__all__'
class ConsumerKeySerializer(serializers.ModelSerializer):
class Meta:
model = ConsumerKey
fields = '__all__'
extra_kwargs = {
'key': {
'required': False,
'allow_null': True,
'allow_blank': True,
},
}
def validate_key(self, value):
"""Verify if no key is given and generate one"""
if not value:
value = str(uuid.uuid4()).replace('-', '')
return value
class PluginSerializer(serializers.ModelSerializer):
class Meta:
model = Plugin
fields = '__all__'
extra_kwargs = {
'config': {
'default': {},
}
}
def validate(self, data):
name = data.get('name')
if not name or name not in plugins:
raise serializers.ValidationError('Invalid plugin name')
plugin_schema = plugins[name]
try:
jsonschema.validate(data['config'], plugin_schema)
except jsonschema.ValidationError as e:
raise serializers.ValidationError({'config': e})
return data
<commit_msg>Use SlugRelatedField for foreign keys for better readability<commit_after>import uuid
import jsonschema
from rest_framework import serializers
from .models import (
Api,
Consumer,
ConsumerKey,
Plugin,
)
from .schemas import plugins
class ConsumerSerializer(serializers.ModelSerializer):
class Meta:
model = Consumer
fields = '__all__'
class ConsumerKeySerializer(serializers.ModelSerializer):
consumer = serializers.SlugRelatedField(
many=False,
read_only=False,
slug_field='username',
queryset=Consumer.objects.all()
)
class Meta:
model = ConsumerKey
fields = '__all__'
extra_kwargs = {
'key': {
'required': False,
'allow_null': True,
'allow_blank': True,
},
}
def validate_key(self, value):
"""Verify if no key is given and generate one"""
if not value:
value = str(uuid.uuid4()).replace('-', '')
return value
class PluginSerializer(serializers.ModelSerializer):
api = serializers.SlugRelatedField(
many=False,
read_only=True,
slug_field='name'
)
class Meta:
model = Plugin
fields = '__all__'
extra_kwargs = {
'config': {
'default': {},
}
}
def validate(self, data):
name = data.get('name')
if not name or name not in plugins:
raise serializers.ValidationError('Invalid plugin name')
plugin_schema = plugins[name]
try:
jsonschema.validate(data['config'], plugin_schema)
except jsonschema.ValidationError as e:
raise serializers.ValidationError({'config': e})
return data
class ApiSerializer(serializers.ModelSerializer):
plugins = PluginSerializer(
many=True,
read_only=False,
)
class Meta:
model = Api
fields = '__all__'
|
ab59466b0cce94106e7e48fd4480c33b2f17910b
|
pylinks/search/views.py
|
pylinks/search/views.py
|
from django.core.paginator import InvalidPage, Paginator
from django.http import Http404
from django.utils.translation import ugettext as _
from haystack.forms import FacetedSearchForm
from haystack.query import SearchQuerySet
from haystack.views import FacetedSearchView
class SearchView(FacetedSearchView):
template = 'search/search.htm'
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.form_class = FacetedSearchForm
self.searchqueryset = SearchQuerySet()
def build_page(self):
page = self.request.resolver_match.kwargs.get('page') or self.request.GET.get('page') or 1
try:
page_number = int(page)
except ValueError:
if page == 'last':
page_number = paginator.num_pages
else:
raise Http404(_("Page is not 'last', nor can it be converted to an int."))
paginator = Paginator(self.results, self.results_per_page)
try:
page = paginator.page(page_number)
except InvalidPage:
raise Http404(_('Invalid page (%(page_number)s)') % {
'page_number': page_number
})
return (paginator, page)
def extra_context(self):
context = super().extra_context()
context.update({'is_paginated': bool(self.query)})
return context
|
from django.core.paginator import InvalidPage, Paginator
from django.http import Http404
from django.utils.translation import gettext as _
from haystack.forms import FacetedSearchForm
from haystack.query import SearchQuerySet
from haystack.views import FacetedSearchView
class SearchView(FacetedSearchView):
template = 'search/search.htm'
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.form_class = FacetedSearchForm
self.searchqueryset = SearchQuerySet()
def build_page(self):
page = self.request.resolver_match.kwargs.get('page') or self.request.GET.get('page') or 1
try:
page_number = int(page)
except ValueError:
if page == 'last':
page_number = paginator.num_pages
else:
raise Http404(_("Page is not 'last', nor can it be converted to an int."))
paginator = Paginator(self.results, self.results_per_page)
try:
page = paginator.page(page_number)
except InvalidPage:
raise Http404(_('Invalid page (%(page_number)s)') % {
'page_number': page_number
})
return (paginator, page)
def extra_context(self):
context = super().extra_context()
context.update({'is_paginated': bool(self.query)})
return context
|
Switch import from ugettext to gettext
|
Switch import from ugettext to gettext
|
Python
|
mit
|
michaelmior/pylinks,michaelmior/pylinks,michaelmior/pylinks
|
from django.core.paginator import InvalidPage, Paginator
from django.http import Http404
from django.utils.translation import ugettext as _
from haystack.forms import FacetedSearchForm
from haystack.query import SearchQuerySet
from haystack.views import FacetedSearchView
class SearchView(FacetedSearchView):
template = 'search/search.htm'
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.form_class = FacetedSearchForm
self.searchqueryset = SearchQuerySet()
def build_page(self):
page = self.request.resolver_match.kwargs.get('page') or self.request.GET.get('page') or 1
try:
page_number = int(page)
except ValueError:
if page == 'last':
page_number = paginator.num_pages
else:
raise Http404(_("Page is not 'last', nor can it be converted to an int."))
paginator = Paginator(self.results, self.results_per_page)
try:
page = paginator.page(page_number)
except InvalidPage:
raise Http404(_('Invalid page (%(page_number)s)') % {
'page_number': page_number
})
return (paginator, page)
def extra_context(self):
context = super().extra_context()
context.update({'is_paginated': bool(self.query)})
return context
Switch import from ugettext to gettext
|
from django.core.paginator import InvalidPage, Paginator
from django.http import Http404
from django.utils.translation import gettext as _
from haystack.forms import FacetedSearchForm
from haystack.query import SearchQuerySet
from haystack.views import FacetedSearchView
class SearchView(FacetedSearchView):
template = 'search/search.htm'
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.form_class = FacetedSearchForm
self.searchqueryset = SearchQuerySet()
def build_page(self):
page = self.request.resolver_match.kwargs.get('page') or self.request.GET.get('page') or 1
try:
page_number = int(page)
except ValueError:
if page == 'last':
page_number = paginator.num_pages
else:
raise Http404(_("Page is not 'last', nor can it be converted to an int."))
paginator = Paginator(self.results, self.results_per_page)
try:
page = paginator.page(page_number)
except InvalidPage:
raise Http404(_('Invalid page (%(page_number)s)') % {
'page_number': page_number
})
return (paginator, page)
def extra_context(self):
context = super().extra_context()
context.update({'is_paginated': bool(self.query)})
return context
|
<commit_before>from django.core.paginator import InvalidPage, Paginator
from django.http import Http404
from django.utils.translation import ugettext as _
from haystack.forms import FacetedSearchForm
from haystack.query import SearchQuerySet
from haystack.views import FacetedSearchView
class SearchView(FacetedSearchView):
template = 'search/search.htm'
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.form_class = FacetedSearchForm
self.searchqueryset = SearchQuerySet()
def build_page(self):
page = self.request.resolver_match.kwargs.get('page') or self.request.GET.get('page') or 1
try:
page_number = int(page)
except ValueError:
if page == 'last':
page_number = paginator.num_pages
else:
raise Http404(_("Page is not 'last', nor can it be converted to an int."))
paginator = Paginator(self.results, self.results_per_page)
try:
page = paginator.page(page_number)
except InvalidPage:
raise Http404(_('Invalid page (%(page_number)s)') % {
'page_number': page_number
})
return (paginator, page)
def extra_context(self):
context = super().extra_context()
context.update({'is_paginated': bool(self.query)})
return context
<commit_msg>Switch import from ugettext to gettext<commit_after>
|
from django.core.paginator import InvalidPage, Paginator
from django.http import Http404
from django.utils.translation import gettext as _
from haystack.forms import FacetedSearchForm
from haystack.query import SearchQuerySet
from haystack.views import FacetedSearchView
class SearchView(FacetedSearchView):
template = 'search/search.htm'
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.form_class = FacetedSearchForm
self.searchqueryset = SearchQuerySet()
def build_page(self):
page = self.request.resolver_match.kwargs.get('page') or self.request.GET.get('page') or 1
try:
page_number = int(page)
except ValueError:
if page == 'last':
page_number = paginator.num_pages
else:
raise Http404(_("Page is not 'last', nor can it be converted to an int."))
paginator = Paginator(self.results, self.results_per_page)
try:
page = paginator.page(page_number)
except InvalidPage:
raise Http404(_('Invalid page (%(page_number)s)') % {
'page_number': page_number
})
return (paginator, page)
def extra_context(self):
context = super().extra_context()
context.update({'is_paginated': bool(self.query)})
return context
|
from django.core.paginator import InvalidPage, Paginator
from django.http import Http404
from django.utils.translation import ugettext as _
from haystack.forms import FacetedSearchForm
from haystack.query import SearchQuerySet
from haystack.views import FacetedSearchView
class SearchView(FacetedSearchView):
template = 'search/search.htm'
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.form_class = FacetedSearchForm
self.searchqueryset = SearchQuerySet()
def build_page(self):
page = self.request.resolver_match.kwargs.get('page') or self.request.GET.get('page') or 1
try:
page_number = int(page)
except ValueError:
if page == 'last':
page_number = paginator.num_pages
else:
raise Http404(_("Page is not 'last', nor can it be converted to an int."))
paginator = Paginator(self.results, self.results_per_page)
try:
page = paginator.page(page_number)
except InvalidPage:
raise Http404(_('Invalid page (%(page_number)s)') % {
'page_number': page_number
})
return (paginator, page)
def extra_context(self):
context = super().extra_context()
context.update({'is_paginated': bool(self.query)})
return context
Switch import from ugettext to gettextfrom django.core.paginator import InvalidPage, Paginator
from django.http import Http404
from django.utils.translation import gettext as _
from haystack.forms import FacetedSearchForm
from haystack.query import SearchQuerySet
from haystack.views import FacetedSearchView
class SearchView(FacetedSearchView):
template = 'search/search.htm'
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.form_class = FacetedSearchForm
self.searchqueryset = SearchQuerySet()
def build_page(self):
page = self.request.resolver_match.kwargs.get('page') or self.request.GET.get('page') or 1
try:
page_number = int(page)
except ValueError:
if page == 'last':
page_number = paginator.num_pages
else:
raise Http404(_("Page is not 'last', nor can it be converted to an int."))
paginator = Paginator(self.results, self.results_per_page)
try:
page = paginator.page(page_number)
except InvalidPage:
raise Http404(_('Invalid page (%(page_number)s)') % {
'page_number': page_number
})
return (paginator, page)
def extra_context(self):
context = super().extra_context()
context.update({'is_paginated': bool(self.query)})
return context
|
<commit_before>from django.core.paginator import InvalidPage, Paginator
from django.http import Http404
from django.utils.translation import ugettext as _
from haystack.forms import FacetedSearchForm
from haystack.query import SearchQuerySet
from haystack.views import FacetedSearchView
class SearchView(FacetedSearchView):
template = 'search/search.htm'
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.form_class = FacetedSearchForm
self.searchqueryset = SearchQuerySet()
def build_page(self):
page = self.request.resolver_match.kwargs.get('page') or self.request.GET.get('page') or 1
try:
page_number = int(page)
except ValueError:
if page == 'last':
page_number = paginator.num_pages
else:
raise Http404(_("Page is not 'last', nor can it be converted to an int."))
paginator = Paginator(self.results, self.results_per_page)
try:
page = paginator.page(page_number)
except InvalidPage:
raise Http404(_('Invalid page (%(page_number)s)') % {
'page_number': page_number
})
return (paginator, page)
def extra_context(self):
context = super().extra_context()
context.update({'is_paginated': bool(self.query)})
return context
<commit_msg>Switch import from ugettext to gettext<commit_after>from django.core.paginator import InvalidPage, Paginator
from django.http import Http404
from django.utils.translation import gettext as _
from haystack.forms import FacetedSearchForm
from haystack.query import SearchQuerySet
from haystack.views import FacetedSearchView
class SearchView(FacetedSearchView):
template = 'search/search.htm'
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.form_class = FacetedSearchForm
self.searchqueryset = SearchQuerySet()
def build_page(self):
page = self.request.resolver_match.kwargs.get('page') or self.request.GET.get('page') or 1
try:
page_number = int(page)
except ValueError:
if page == 'last':
page_number = paginator.num_pages
else:
raise Http404(_("Page is not 'last', nor can it be converted to an int."))
paginator = Paginator(self.results, self.results_per_page)
try:
page = paginator.page(page_number)
except InvalidPage:
raise Http404(_('Invalid page (%(page_number)s)') % {
'page_number': page_number
})
return (paginator, page)
def extra_context(self):
context = super().extra_context()
context.update({'is_paginated': bool(self.query)})
return context
|
0058a20aa01d1de15b9b98785309d8ca018f4485
|
sympy/utilities/source.py
|
sympy/utilities/source.py
|
"""
This module adds several functions for interactive source code inspection.
"""
import inspect
from sympy.core.compatibility import callable
def source(object):
"""
Prints the source code of a given object.
"""
print 'In file: %s' % inspect.getsourcefile(object)
print inspect.getsource(object)
def get_class(lookup_view):
"""
Convert a string version of a class name to the object.
For example, get_class('sympy.core.Basic') will return
class Basic located in module sympy.core
"""
if isinstance(lookup_view, str):
lookup_view = lookup_view
mod_name, func_name = get_mod_func(lookup_view)
if func_name != '':
lookup_view = getattr(__import__(mod_name, {}, {}, ['']), func_name)
if not callable(lookup_view):
raise AttributeError("'%s.%s' is not a callable." % (mod_name, func_name))
return lookup_view
def get_mod_func(callback):
"""
splits the string path to a class into a string path to the module
and the name of the class. For example:
>>> from sympy.utilities.source import get_mod_func
>>> get_mod_func('sympy.core.basic.Basic')
('sympy.core.basic', 'Basic')
"""
dot = callback.rfind('.')
if dot == -1:
return callback, ''
return callback[:dot], callback[dot+1:]
|
"""
This module adds several functions for interactive source code inspection.
"""
import inspect
from sympy.core.compatibility import callable
def source(object):
"""
Prints the source code of a given object.
"""
print 'In file: %s' % inspect.getsourcefile(object)
print inspect.getsource(object)
def get_class(lookup_view):
"""
Convert a string version of a class name to the object.
For example, get_class('sympy.core.Basic') will return
class Basic located in module sympy.core
"""
if isinstance(lookup_view, str):
lookup_view = lookup_view
mod_name, func_name = get_mod_func(lookup_view)
if func_name != '':
lookup_view = getattr(__import__(mod_name, {}, {}, ['*']), func_name)
if not callable(lookup_view):
raise AttributeError("'%s.%s' is not a callable." % (mod_name, func_name))
return lookup_view
def get_mod_func(callback):
"""
splits the string path to a class into a string path to the module
and the name of the class. For example:
>>> from sympy.utilities.source import get_mod_func
>>> get_mod_func('sympy.core.basic.Basic')
('sympy.core.basic', 'Basic')
"""
dot = callback.rfind('.')
if dot == -1:
return callback, ''
return callback[:dot], callback[dot+1:]
|
Fix test failures in Python 3.3b2
|
Fix test failures in Python 3.3b2
The fromlist argument of __import__ was being called as [''], which is
meaningless. Because we need fromlist to be non-empty to get the submodule
returned, this was changed to ['*'].
|
Python
|
bsd-3-clause
|
Shaswat27/sympy,VaibhavAgarwalVA/sympy,sunny94/temp,hargup/sympy,sampadsaha5/sympy,kumarkrishna/sympy,kaichogami/sympy,jerli/sympy,skidzo/sympy,VaibhavAgarwalVA/sympy,bukzor/sympy,madan96/sympy,farhaanbukhsh/sympy,amitjamadagni/sympy,bukzor/sympy,MridulS/sympy,atsao72/sympy,farhaanbukhsh/sympy,skidzo/sympy,wyom/sympy,Titan-C/sympy,wanglongqi/sympy,cccfran/sympy,debugger22/sympy,lidavidm/sympy,kaushik94/sympy,Curious72/sympy,mafiya69/sympy,atreyv/sympy,MechCoder/sympy,sahmed95/sympy,kumarkrishna/sympy,kaushik94/sympy,Arafatk/sympy,MridulS/sympy,cswiercz/sympy,ahhda/sympy,Shaswat27/sympy,wanglongqi/sympy,chaffra/sympy,farhaanbukhsh/sympy,ga7g08/sympy,vipulroxx/sympy,ChristinaZografou/sympy,AunShiLord/sympy,bukzor/sympy,kmacinnis/sympy,lindsayad/sympy,abhiii5459/sympy,abhiii5459/sympy,aktech/sympy,vipulroxx/sympy,jaimahajan1997/sympy,Curious72/sympy,abloomston/sympy,asm666/sympy,pandeyadarsh/sympy,Titan-C/sympy,MechCoder/sympy,wyom/sympy,grevutiu-gabriel/sympy,wanglongqi/sympy,drufat/sympy,Arafatk/sympy,yashsharan/sympy,shipci/sympy,MechCoder/sympy,Arafatk/sympy,moble/sympy,AkademieOlympia/sympy,Davidjohnwilson/sympy,mafiya69/sympy,kumarkrishna/sympy,postvakje/sympy,jamesblunt/sympy,rahuldan/sympy,pandeyadarsh/sympy,Designist/sympy,Vishluck/sympy,toolforger/sympy,kmacinnis/sympy,meghana1995/sympy,dqnykamp/sympy,sahmed95/sympy,maniteja123/sympy,grevutiu-gabriel/sympy,ahhda/sympy,kevalds51/sympy,yukoba/sympy,madan96/sympy,AunShiLord/sympy,debugger22/sympy,Vishluck/sympy,sahilshekhawat/sympy,liangjiaxing/sympy,ahhda/sympy,mcdaniel67/sympy,Designist/sympy,ChristinaZografou/sympy,Gadal/sympy,grevutiu-gabriel/sympy,amitjamadagni/sympy,kaichogami/sympy,abhiii5459/sympy,cswiercz/sympy,atreyv/sympy,sahilshekhawat/sympy,hrashk/sympy,jamesblunt/sympy,iamutkarshtiwari/sympy,dqnykamp/sympy,emon10005/sympy,asm666/sympy,rahuldan/sympy,Gadal/sympy,cswiercz/sympy,toolforger/sympy,shipci/sympy,oliverlee/sympy,Mitchkoens/sympy,Mitchkoens/sympy,lidavidm/sympy,asm666/sympy,beni55/sympy,wyom/sympy,madan96/sympy,yukoba/sympy,MridulS/sympy,atsao72/sympy,chaffra/sympy,jbbskinny/sympy,sunny94/temp,oliverlee/sympy,rahuldan/sympy,kevalds51/sympy,drufat/sympy,garvitr/sympy,emon10005/sympy,diofant/diofant,jaimahajan1997/sympy,liangjiaxing/sympy,lindsayad/sympy,ga7g08/sympy,abloomston/sympy,yashsharan/sympy,jbbskinny/sympy,jerli/sympy,Davidjohnwilson/sympy,kmacinnis/sympy,atsao72/sympy,kaichogami/sympy,skirpichev/omg,aktech/sympy,beni55/sympy,Shaswat27/sympy,pbrady/sympy,srjoglekar246/sympy,flacjacket/sympy,mcdaniel67/sympy,pbrady/sympy,liangjiaxing/sympy,ga7g08/sympy,hargup/sympy,Sumith1896/sympy,souravsingh/sympy,drufat/sympy,Sumith1896/sympy,kaushik94/sympy,Designist/sympy,shikil/sympy,saurabhjn76/sympy,moble/sympy,saurabhjn76/sympy,maniteja123/sympy,AkademieOlympia/sympy,garvitr/sympy,toolforger/sympy,garvitr/sympy,sampadsaha5/sympy,dqnykamp/sympy,souravsingh/sympy,hargup/sympy,shipci/sympy,vipulroxx/sympy,sampadsaha5/sympy,chaffra/sympy,aktech/sympy,souravsingh/sympy,Vishluck/sympy,shikil/sympy,debugger22/sympy,iamutkarshtiwari/sympy,Titan-C/sympy,AkademieOlympia/sympy,postvakje/sympy,jbbskinny/sympy,lidavidm/sympy,hrashk/sympy,meghana1995/sympy,sahilshekhawat/sympy,AunShiLord/sympy,pandeyadarsh/sympy,Mitchkoens/sympy,Sumith1896/sympy,pbrady/sympy,ChristinaZografou/sympy,lindsayad/sympy,atreyv/sympy,sunny94/temp,mafiya69/sympy,Gadal/sympy,hrashk/sympy,moble/sympy,Curious72/sympy,beni55/sympy,jerli/sympy,saurabhjn76/sympy,VaibhavAgarwalVA/sympy,maniteja123/sympy,yukoba/sympy,cccfran/sympy,sahmed95/sympy,Davidjohnwilson/sympy,iamutkarshtiwari/sympy,skidzo/sympy,postvakje/sympy,cccfran/sympy,oliverlee/sympy,mcdaniel67/sympy,yashsharan/sympy,emon10005/sympy,jaimahajan1997/sympy,jamesblunt/sympy,kevalds51/sympy,abloomston/sympy,meghana1995/sympy,shikil/sympy
|
"""
This module adds several functions for interactive source code inspection.
"""
import inspect
from sympy.core.compatibility import callable
def source(object):
"""
Prints the source code of a given object.
"""
print 'In file: %s' % inspect.getsourcefile(object)
print inspect.getsource(object)
def get_class(lookup_view):
"""
Convert a string version of a class name to the object.
For example, get_class('sympy.core.Basic') will return
class Basic located in module sympy.core
"""
if isinstance(lookup_view, str):
lookup_view = lookup_view
mod_name, func_name = get_mod_func(lookup_view)
if func_name != '':
lookup_view = getattr(__import__(mod_name, {}, {}, ['']), func_name)
if not callable(lookup_view):
raise AttributeError("'%s.%s' is not a callable." % (mod_name, func_name))
return lookup_view
def get_mod_func(callback):
"""
splits the string path to a class into a string path to the module
and the name of the class. For example:
>>> from sympy.utilities.source import get_mod_func
>>> get_mod_func('sympy.core.basic.Basic')
('sympy.core.basic', 'Basic')
"""
dot = callback.rfind('.')
if dot == -1:
return callback, ''
return callback[:dot], callback[dot+1:]
Fix test failures in Python 3.3b2
The fromlist argument of __import__ was being called as [''], which is
meaningless. Because we need fromlist to be non-empty to get the submodule
returned, this was changed to ['*'].
|
"""
This module adds several functions for interactive source code inspection.
"""
import inspect
from sympy.core.compatibility import callable
def source(object):
"""
Prints the source code of a given object.
"""
print 'In file: %s' % inspect.getsourcefile(object)
print inspect.getsource(object)
def get_class(lookup_view):
"""
Convert a string version of a class name to the object.
For example, get_class('sympy.core.Basic') will return
class Basic located in module sympy.core
"""
if isinstance(lookup_view, str):
lookup_view = lookup_view
mod_name, func_name = get_mod_func(lookup_view)
if func_name != '':
lookup_view = getattr(__import__(mod_name, {}, {}, ['*']), func_name)
if not callable(lookup_view):
raise AttributeError("'%s.%s' is not a callable." % (mod_name, func_name))
return lookup_view
def get_mod_func(callback):
"""
splits the string path to a class into a string path to the module
and the name of the class. For example:
>>> from sympy.utilities.source import get_mod_func
>>> get_mod_func('sympy.core.basic.Basic')
('sympy.core.basic', 'Basic')
"""
dot = callback.rfind('.')
if dot == -1:
return callback, ''
return callback[:dot], callback[dot+1:]
|
<commit_before>"""
This module adds several functions for interactive source code inspection.
"""
import inspect
from sympy.core.compatibility import callable
def source(object):
"""
Prints the source code of a given object.
"""
print 'In file: %s' % inspect.getsourcefile(object)
print inspect.getsource(object)
def get_class(lookup_view):
"""
Convert a string version of a class name to the object.
For example, get_class('sympy.core.Basic') will return
class Basic located in module sympy.core
"""
if isinstance(lookup_view, str):
lookup_view = lookup_view
mod_name, func_name = get_mod_func(lookup_view)
if func_name != '':
lookup_view = getattr(__import__(mod_name, {}, {}, ['']), func_name)
if not callable(lookup_view):
raise AttributeError("'%s.%s' is not a callable." % (mod_name, func_name))
return lookup_view
def get_mod_func(callback):
"""
splits the string path to a class into a string path to the module
and the name of the class. For example:
>>> from sympy.utilities.source import get_mod_func
>>> get_mod_func('sympy.core.basic.Basic')
('sympy.core.basic', 'Basic')
"""
dot = callback.rfind('.')
if dot == -1:
return callback, ''
return callback[:dot], callback[dot+1:]
<commit_msg>Fix test failures in Python 3.3b2
The fromlist argument of __import__ was being called as [''], which is
meaningless. Because we need fromlist to be non-empty to get the submodule
returned, this was changed to ['*'].<commit_after>
|
"""
This module adds several functions for interactive source code inspection.
"""
import inspect
from sympy.core.compatibility import callable
def source(object):
"""
Prints the source code of a given object.
"""
print 'In file: %s' % inspect.getsourcefile(object)
print inspect.getsource(object)
def get_class(lookup_view):
"""
Convert a string version of a class name to the object.
For example, get_class('sympy.core.Basic') will return
class Basic located in module sympy.core
"""
if isinstance(lookup_view, str):
lookup_view = lookup_view
mod_name, func_name = get_mod_func(lookup_view)
if func_name != '':
lookup_view = getattr(__import__(mod_name, {}, {}, ['*']), func_name)
if not callable(lookup_view):
raise AttributeError("'%s.%s' is not a callable." % (mod_name, func_name))
return lookup_view
def get_mod_func(callback):
"""
splits the string path to a class into a string path to the module
and the name of the class. For example:
>>> from sympy.utilities.source import get_mod_func
>>> get_mod_func('sympy.core.basic.Basic')
('sympy.core.basic', 'Basic')
"""
dot = callback.rfind('.')
if dot == -1:
return callback, ''
return callback[:dot], callback[dot+1:]
|
"""
This module adds several functions for interactive source code inspection.
"""
import inspect
from sympy.core.compatibility import callable
def source(object):
"""
Prints the source code of a given object.
"""
print 'In file: %s' % inspect.getsourcefile(object)
print inspect.getsource(object)
def get_class(lookup_view):
"""
Convert a string version of a class name to the object.
For example, get_class('sympy.core.Basic') will return
class Basic located in module sympy.core
"""
if isinstance(lookup_view, str):
lookup_view = lookup_view
mod_name, func_name = get_mod_func(lookup_view)
if func_name != '':
lookup_view = getattr(__import__(mod_name, {}, {}, ['']), func_name)
if not callable(lookup_view):
raise AttributeError("'%s.%s' is not a callable." % (mod_name, func_name))
return lookup_view
def get_mod_func(callback):
"""
splits the string path to a class into a string path to the module
and the name of the class. For example:
>>> from sympy.utilities.source import get_mod_func
>>> get_mod_func('sympy.core.basic.Basic')
('sympy.core.basic', 'Basic')
"""
dot = callback.rfind('.')
if dot == -1:
return callback, ''
return callback[:dot], callback[dot+1:]
Fix test failures in Python 3.3b2
The fromlist argument of __import__ was being called as [''], which is
meaningless. Because we need fromlist to be non-empty to get the submodule
returned, this was changed to ['*']."""
This module adds several functions for interactive source code inspection.
"""
import inspect
from sympy.core.compatibility import callable
def source(object):
"""
Prints the source code of a given object.
"""
print 'In file: %s' % inspect.getsourcefile(object)
print inspect.getsource(object)
def get_class(lookup_view):
"""
Convert a string version of a class name to the object.
For example, get_class('sympy.core.Basic') will return
class Basic located in module sympy.core
"""
if isinstance(lookup_view, str):
lookup_view = lookup_view
mod_name, func_name = get_mod_func(lookup_view)
if func_name != '':
lookup_view = getattr(__import__(mod_name, {}, {}, ['*']), func_name)
if not callable(lookup_view):
raise AttributeError("'%s.%s' is not a callable." % (mod_name, func_name))
return lookup_view
def get_mod_func(callback):
"""
splits the string path to a class into a string path to the module
and the name of the class. For example:
>>> from sympy.utilities.source import get_mod_func
>>> get_mod_func('sympy.core.basic.Basic')
('sympy.core.basic', 'Basic')
"""
dot = callback.rfind('.')
if dot == -1:
return callback, ''
return callback[:dot], callback[dot+1:]
|
<commit_before>"""
This module adds several functions for interactive source code inspection.
"""
import inspect
from sympy.core.compatibility import callable
def source(object):
"""
Prints the source code of a given object.
"""
print 'In file: %s' % inspect.getsourcefile(object)
print inspect.getsource(object)
def get_class(lookup_view):
"""
Convert a string version of a class name to the object.
For example, get_class('sympy.core.Basic') will return
class Basic located in module sympy.core
"""
if isinstance(lookup_view, str):
lookup_view = lookup_view
mod_name, func_name = get_mod_func(lookup_view)
if func_name != '':
lookup_view = getattr(__import__(mod_name, {}, {}, ['']), func_name)
if not callable(lookup_view):
raise AttributeError("'%s.%s' is not a callable." % (mod_name, func_name))
return lookup_view
def get_mod_func(callback):
"""
splits the string path to a class into a string path to the module
and the name of the class. For example:
>>> from sympy.utilities.source import get_mod_func
>>> get_mod_func('sympy.core.basic.Basic')
('sympy.core.basic', 'Basic')
"""
dot = callback.rfind('.')
if dot == -1:
return callback, ''
return callback[:dot], callback[dot+1:]
<commit_msg>Fix test failures in Python 3.3b2
The fromlist argument of __import__ was being called as [''], which is
meaningless. Because we need fromlist to be non-empty to get the submodule
returned, this was changed to ['*'].<commit_after>"""
This module adds several functions for interactive source code inspection.
"""
import inspect
from sympy.core.compatibility import callable
def source(object):
"""
Prints the source code of a given object.
"""
print 'In file: %s' % inspect.getsourcefile(object)
print inspect.getsource(object)
def get_class(lookup_view):
"""
Convert a string version of a class name to the object.
For example, get_class('sympy.core.Basic') will return
class Basic located in module sympy.core
"""
if isinstance(lookup_view, str):
lookup_view = lookup_view
mod_name, func_name = get_mod_func(lookup_view)
if func_name != '':
lookup_view = getattr(__import__(mod_name, {}, {}, ['*']), func_name)
if not callable(lookup_view):
raise AttributeError("'%s.%s' is not a callable." % (mod_name, func_name))
return lookup_view
def get_mod_func(callback):
"""
splits the string path to a class into a string path to the module
and the name of the class. For example:
>>> from sympy.utilities.source import get_mod_func
>>> get_mod_func('sympy.core.basic.Basic')
('sympy.core.basic', 'Basic')
"""
dot = callback.rfind('.')
if dot == -1:
return callback, ''
return callback[:dot], callback[dot+1:]
|
8d217c9797f19d4276484fd070a4a5f3de623e84
|
tapioca_toggl/__init__.py
|
tapioca_toggl/__init__.py
|
# -*- coding: utf-8 -*-
"""
tapioca_toggl
-------------
Python wrapper for Toggl API v8
"""
__version__ = '0.1.0'
|
# -*- coding: utf-8 -*-
"""
tapioca_toggl
-------------
Python wrapper for Toggl API v8
"""
__version__ = '0.1.0'
from .tapioca_toggl import Toggl # noqa
|
Make api accessible from python package
|
Make api accessible from python package
|
Python
|
mit
|
hackebrot/tapioca-toggl
|
# -*- coding: utf-8 -*-
"""
tapioca_toggl
-------------
Python wrapper for Toggl API v8
"""
__version__ = '0.1.0'
Make api accessible from python package
|
# -*- coding: utf-8 -*-
"""
tapioca_toggl
-------------
Python wrapper for Toggl API v8
"""
__version__ = '0.1.0'
from .tapioca_toggl import Toggl # noqa
|
<commit_before># -*- coding: utf-8 -*-
"""
tapioca_toggl
-------------
Python wrapper for Toggl API v8
"""
__version__ = '0.1.0'
<commit_msg>Make api accessible from python package<commit_after>
|
# -*- coding: utf-8 -*-
"""
tapioca_toggl
-------------
Python wrapper for Toggl API v8
"""
__version__ = '0.1.0'
from .tapioca_toggl import Toggl # noqa
|
# -*- coding: utf-8 -*-
"""
tapioca_toggl
-------------
Python wrapper for Toggl API v8
"""
__version__ = '0.1.0'
Make api accessible from python package# -*- coding: utf-8 -*-
"""
tapioca_toggl
-------------
Python wrapper for Toggl API v8
"""
__version__ = '0.1.0'
from .tapioca_toggl import Toggl # noqa
|
<commit_before># -*- coding: utf-8 -*-
"""
tapioca_toggl
-------------
Python wrapper for Toggl API v8
"""
__version__ = '0.1.0'
<commit_msg>Make api accessible from python package<commit_after># -*- coding: utf-8 -*-
"""
tapioca_toggl
-------------
Python wrapper for Toggl API v8
"""
__version__ = '0.1.0'
from .tapioca_toggl import Toggl # noqa
|
d24e31dbebc776524e0a2cd4b971c726bfcbfda5
|
py_nist_beacon/nist_randomness_beacon.py
|
py_nist_beacon/nist_randomness_beacon.py
|
import requests
from requests.exceptions import RequestException
from py_nist_beacon.nist_randomness_beacon_value import (
NistRandomnessBeaconValue
)
class NistRandomnessBeacon(object):
NIST_BASE_URL = "https://beacon.nist.gov/rest/record"
@classmethod
def get_last_record(cls):
try:
r = requests.get("{}/last".format(cls.NIST_BASE_URL))
return NistRandomnessBeaconValue.from_xml(r.text)
except RequestException:
return None
|
import requests
from requests.exceptions import RequestException
from py_nist_beacon.nist_randomness_beacon_value import (
NistRandomnessBeaconValue
)
class NistRandomnessBeacon(object):
NIST_BASE_URL = "https://beacon.nist.gov/rest/record"
@classmethod
def get_last_record(cls):
try:
r = requests.get("{}/last".format(cls.NIST_BASE_URL))
if r.status_code is requests.codes.OK:
return NistRandomnessBeaconValue.from_xml(r.text)
else:
return None
except RequestException:
return None
|
Check status code before object
|
Check status code before object
|
Python
|
apache-2.0
|
urda/nistbeacon
|
import requests
from requests.exceptions import RequestException
from py_nist_beacon.nist_randomness_beacon_value import (
NistRandomnessBeaconValue
)
class NistRandomnessBeacon(object):
NIST_BASE_URL = "https://beacon.nist.gov/rest/record"
@classmethod
def get_last_record(cls):
try:
r = requests.get("{}/last".format(cls.NIST_BASE_URL))
return NistRandomnessBeaconValue.from_xml(r.text)
except RequestException:
return None
Check status code before object
|
import requests
from requests.exceptions import RequestException
from py_nist_beacon.nist_randomness_beacon_value import (
NistRandomnessBeaconValue
)
class NistRandomnessBeacon(object):
NIST_BASE_URL = "https://beacon.nist.gov/rest/record"
@classmethod
def get_last_record(cls):
try:
r = requests.get("{}/last".format(cls.NIST_BASE_URL))
if r.status_code is requests.codes.OK:
return NistRandomnessBeaconValue.from_xml(r.text)
else:
return None
except RequestException:
return None
|
<commit_before>import requests
from requests.exceptions import RequestException
from py_nist_beacon.nist_randomness_beacon_value import (
NistRandomnessBeaconValue
)
class NistRandomnessBeacon(object):
NIST_BASE_URL = "https://beacon.nist.gov/rest/record"
@classmethod
def get_last_record(cls):
try:
r = requests.get("{}/last".format(cls.NIST_BASE_URL))
return NistRandomnessBeaconValue.from_xml(r.text)
except RequestException:
return None
<commit_msg>Check status code before object<commit_after>
|
import requests
from requests.exceptions import RequestException
from py_nist_beacon.nist_randomness_beacon_value import (
NistRandomnessBeaconValue
)
class NistRandomnessBeacon(object):
NIST_BASE_URL = "https://beacon.nist.gov/rest/record"
@classmethod
def get_last_record(cls):
try:
r = requests.get("{}/last".format(cls.NIST_BASE_URL))
if r.status_code is requests.codes.OK:
return NistRandomnessBeaconValue.from_xml(r.text)
else:
return None
except RequestException:
return None
|
import requests
from requests.exceptions import RequestException
from py_nist_beacon.nist_randomness_beacon_value import (
NistRandomnessBeaconValue
)
class NistRandomnessBeacon(object):
NIST_BASE_URL = "https://beacon.nist.gov/rest/record"
@classmethod
def get_last_record(cls):
try:
r = requests.get("{}/last".format(cls.NIST_BASE_URL))
return NistRandomnessBeaconValue.from_xml(r.text)
except RequestException:
return None
Check status code before objectimport requests
from requests.exceptions import RequestException
from py_nist_beacon.nist_randomness_beacon_value import (
NistRandomnessBeaconValue
)
class NistRandomnessBeacon(object):
NIST_BASE_URL = "https://beacon.nist.gov/rest/record"
@classmethod
def get_last_record(cls):
try:
r = requests.get("{}/last".format(cls.NIST_BASE_URL))
if r.status_code is requests.codes.OK:
return NistRandomnessBeaconValue.from_xml(r.text)
else:
return None
except RequestException:
return None
|
<commit_before>import requests
from requests.exceptions import RequestException
from py_nist_beacon.nist_randomness_beacon_value import (
NistRandomnessBeaconValue
)
class NistRandomnessBeacon(object):
NIST_BASE_URL = "https://beacon.nist.gov/rest/record"
@classmethod
def get_last_record(cls):
try:
r = requests.get("{}/last".format(cls.NIST_BASE_URL))
return NistRandomnessBeaconValue.from_xml(r.text)
except RequestException:
return None
<commit_msg>Check status code before object<commit_after>import requests
from requests.exceptions import RequestException
from py_nist_beacon.nist_randomness_beacon_value import (
NistRandomnessBeaconValue
)
class NistRandomnessBeacon(object):
NIST_BASE_URL = "https://beacon.nist.gov/rest/record"
@classmethod
def get_last_record(cls):
try:
r = requests.get("{}/last".format(cls.NIST_BASE_URL))
if r.status_code is requests.codes.OK:
return NistRandomnessBeaconValue.from_xml(r.text)
else:
return None
except RequestException:
return None
|
d6edbc05f1d6f06848b78f131c975b3373b1179a
|
cpgintegrate/__init__.py
|
cpgintegrate/__init__.py
|
import pandas
import traceback
import typing
def process_files(file_iterator: typing.Iterator[typing.IO], processor: typing.Callable) -> pandas.DataFrame:
def get_frames():
for file in file_iterator:
try:
df = processor(file)
except Exception:
df = pandas.DataFrame({"error": [traceback.format_exc()]})
yield (df
.assign(Source=getattr(file, 'name', None),
SubjectID=getattr(file, 'cpgintegrate_subject_id', None),
FileSubjectID=df.index if df.index.name else None))
return pandas.DataFrame(pandas.concat((frame for frame in get_frames()))).set_index("SubjectID")
|
import pandas
import traceback
import typing
def process_files(file_iterator: typing.Iterator[typing.IO], processor: typing.Callable) -> pandas.DataFrame:
def get_frames():
for file in file_iterator:
df = processor(file)
yield (df
.assign(Source=getattr(file, 'name', None),
SubjectID=getattr(file, 'cpgintegrate_subject_id', None),
FileSubjectID=df.index if df.index.name else None))
return pandas.DataFrame(pandas.concat((frame for frame in get_frames()))).set_index("SubjectID")
|
Raise exceptions rather than catching
|
Raise exceptions rather than catching
|
Python
|
agpl-3.0
|
PointyShinyBurning/cpgintegrate
|
import pandas
import traceback
import typing
def process_files(file_iterator: typing.Iterator[typing.IO], processor: typing.Callable) -> pandas.DataFrame:
def get_frames():
for file in file_iterator:
try:
df = processor(file)
except Exception:
df = pandas.DataFrame({"error": [traceback.format_exc()]})
yield (df
.assign(Source=getattr(file, 'name', None),
SubjectID=getattr(file, 'cpgintegrate_subject_id', None),
FileSubjectID=df.index if df.index.name else None))
return pandas.DataFrame(pandas.concat((frame for frame in get_frames()))).set_index("SubjectID")
Raise exceptions rather than catching
|
import pandas
import traceback
import typing
def process_files(file_iterator: typing.Iterator[typing.IO], processor: typing.Callable) -> pandas.DataFrame:
def get_frames():
for file in file_iterator:
df = processor(file)
yield (df
.assign(Source=getattr(file, 'name', None),
SubjectID=getattr(file, 'cpgintegrate_subject_id', None),
FileSubjectID=df.index if df.index.name else None))
return pandas.DataFrame(pandas.concat((frame for frame in get_frames()))).set_index("SubjectID")
|
<commit_before>import pandas
import traceback
import typing
def process_files(file_iterator: typing.Iterator[typing.IO], processor: typing.Callable) -> pandas.DataFrame:
def get_frames():
for file in file_iterator:
try:
df = processor(file)
except Exception:
df = pandas.DataFrame({"error": [traceback.format_exc()]})
yield (df
.assign(Source=getattr(file, 'name', None),
SubjectID=getattr(file, 'cpgintegrate_subject_id', None),
FileSubjectID=df.index if df.index.name else None))
return pandas.DataFrame(pandas.concat((frame for frame in get_frames()))).set_index("SubjectID")
<commit_msg>Raise exceptions rather than catching<commit_after>
|
import pandas
import traceback
import typing
def process_files(file_iterator: typing.Iterator[typing.IO], processor: typing.Callable) -> pandas.DataFrame:
def get_frames():
for file in file_iterator:
df = processor(file)
yield (df
.assign(Source=getattr(file, 'name', None),
SubjectID=getattr(file, 'cpgintegrate_subject_id', None),
FileSubjectID=df.index if df.index.name else None))
return pandas.DataFrame(pandas.concat((frame for frame in get_frames()))).set_index("SubjectID")
|
import pandas
import traceback
import typing
def process_files(file_iterator: typing.Iterator[typing.IO], processor: typing.Callable) -> pandas.DataFrame:
def get_frames():
for file in file_iterator:
try:
df = processor(file)
except Exception:
df = pandas.DataFrame({"error": [traceback.format_exc()]})
yield (df
.assign(Source=getattr(file, 'name', None),
SubjectID=getattr(file, 'cpgintegrate_subject_id', None),
FileSubjectID=df.index if df.index.name else None))
return pandas.DataFrame(pandas.concat((frame for frame in get_frames()))).set_index("SubjectID")
Raise exceptions rather than catchingimport pandas
import traceback
import typing
def process_files(file_iterator: typing.Iterator[typing.IO], processor: typing.Callable) -> pandas.DataFrame:
def get_frames():
for file in file_iterator:
df = processor(file)
yield (df
.assign(Source=getattr(file, 'name', None),
SubjectID=getattr(file, 'cpgintegrate_subject_id', None),
FileSubjectID=df.index if df.index.name else None))
return pandas.DataFrame(pandas.concat((frame for frame in get_frames()))).set_index("SubjectID")
|
<commit_before>import pandas
import traceback
import typing
def process_files(file_iterator: typing.Iterator[typing.IO], processor: typing.Callable) -> pandas.DataFrame:
def get_frames():
for file in file_iterator:
try:
df = processor(file)
except Exception:
df = pandas.DataFrame({"error": [traceback.format_exc()]})
yield (df
.assign(Source=getattr(file, 'name', None),
SubjectID=getattr(file, 'cpgintegrate_subject_id', None),
FileSubjectID=df.index if df.index.name else None))
return pandas.DataFrame(pandas.concat((frame for frame in get_frames()))).set_index("SubjectID")
<commit_msg>Raise exceptions rather than catching<commit_after>import pandas
import traceback
import typing
def process_files(file_iterator: typing.Iterator[typing.IO], processor: typing.Callable) -> pandas.DataFrame:
def get_frames():
for file in file_iterator:
df = processor(file)
yield (df
.assign(Source=getattr(file, 'name', None),
SubjectID=getattr(file, 'cpgintegrate_subject_id', None),
FileSubjectID=df.index if df.index.name else None))
return pandas.DataFrame(pandas.concat((frame for frame in get_frames()))).set_index("SubjectID")
|
ad37b36b40b9e59b380049855012b30f1c5c1a28
|
scripts/master/optional_arguments.py
|
scripts/master/optional_arguments.py
|
# Copyright (c) 2010 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Utility classes to enhance process.properties.Properties usefulness."""
from buildbot.process.properties import WithProperties
class ListProperties(WithProperties):
"""Act like a list but skip over items that are None.
This class doesn't use WithProperties methods but inherits from it since it is
used as a flag in Properties.render() to defer the actual work to
self.render()."""
compare_attrs = ('items')
def __init__(self, items):
"""items should be a list."""
# Dummy initialization.
WithProperties.__init__(self, '')
self.items = items
def render(self, pmap):
results = []
# For each optional item, look up the corresponding property in the
# PropertyMap.
for item in self.items:
if isinstance(item, WithProperties):
item = item.render(pmap)
# Skip over None items.
if item is not None and item != '':
results.append(item)
return results
|
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Utility classes to enhance process.properties.Properties usefulness."""
from buildbot.process.properties import WithProperties
class ListProperties(WithProperties):
"""Act like a list but skip over items that are None.
This class doesn't use WithProperties methods but inherits from it since it is
used as a flag in Properties.render() to defer the actual work to
self.render()."""
compare_attrs = ('items')
def __init__(self, items):
"""items should be a list."""
# Dummy initialization.
WithProperties.__init__(self, '')
self.items = items
# For buildbot 8.4 and below.
def render(self, pmap):
results = []
# For each optional item, look up the corresponding property in the
# PropertyMap.
for item in self.items:
if isinstance(item, WithProperties):
item = item.render(pmap)
# Skip over None items.
if item is not None and item != '':
results.append(item)
return results
# For buildbot 8.4p1 and above.
def getRenderingFor(self, build):
results = []
# For each optional item, look up the corresponding property in the
# PropertyMap.
for item in self.items:
if isinstance(item, WithProperties):
item = item.getRenderingFor(build)
# Skip over None items.
if item is not None and item != '':
results.append(item)
return results
|
Fix ListProperties to be compatible with buildbot 0.8.4p1.
|
Fix ListProperties to be compatible with buildbot 0.8.4p1.
The duplicated code will be removed once 0.7.12 is removed.
Review URL: http://codereview.chromium.org/7193037
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@91477 0039d316-1c4b-4281-b951-d872f2087c98
|
Python
|
bsd-3-clause
|
eunchong/build,eunchong/build,eunchong/build,eunchong/build
|
# Copyright (c) 2010 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Utility classes to enhance process.properties.Properties usefulness."""
from buildbot.process.properties import WithProperties
class ListProperties(WithProperties):
"""Act like a list but skip over items that are None.
This class doesn't use WithProperties methods but inherits from it since it is
used as a flag in Properties.render() to defer the actual work to
self.render()."""
compare_attrs = ('items')
def __init__(self, items):
"""items should be a list."""
# Dummy initialization.
WithProperties.__init__(self, '')
self.items = items
def render(self, pmap):
results = []
# For each optional item, look up the corresponding property in the
# PropertyMap.
for item in self.items:
if isinstance(item, WithProperties):
item = item.render(pmap)
# Skip over None items.
if item is not None and item != '':
results.append(item)
return results
Fix ListProperties to be compatible with buildbot 0.8.4p1.
The duplicated code will be removed once 0.7.12 is removed.
Review URL: http://codereview.chromium.org/7193037
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@91477 0039d316-1c4b-4281-b951-d872f2087c98
|
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Utility classes to enhance process.properties.Properties usefulness."""
from buildbot.process.properties import WithProperties
class ListProperties(WithProperties):
"""Act like a list but skip over items that are None.
This class doesn't use WithProperties methods but inherits from it since it is
used as a flag in Properties.render() to defer the actual work to
self.render()."""
compare_attrs = ('items')
def __init__(self, items):
"""items should be a list."""
# Dummy initialization.
WithProperties.__init__(self, '')
self.items = items
# For buildbot 8.4 and below.
def render(self, pmap):
results = []
# For each optional item, look up the corresponding property in the
# PropertyMap.
for item in self.items:
if isinstance(item, WithProperties):
item = item.render(pmap)
# Skip over None items.
if item is not None and item != '':
results.append(item)
return results
# For buildbot 8.4p1 and above.
def getRenderingFor(self, build):
results = []
# For each optional item, look up the corresponding property in the
# PropertyMap.
for item in self.items:
if isinstance(item, WithProperties):
item = item.getRenderingFor(build)
# Skip over None items.
if item is not None and item != '':
results.append(item)
return results
|
<commit_before># Copyright (c) 2010 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Utility classes to enhance process.properties.Properties usefulness."""
from buildbot.process.properties import WithProperties
class ListProperties(WithProperties):
"""Act like a list but skip over items that are None.
This class doesn't use WithProperties methods but inherits from it since it is
used as a flag in Properties.render() to defer the actual work to
self.render()."""
compare_attrs = ('items')
def __init__(self, items):
"""items should be a list."""
# Dummy initialization.
WithProperties.__init__(self, '')
self.items = items
def render(self, pmap):
results = []
# For each optional item, look up the corresponding property in the
# PropertyMap.
for item in self.items:
if isinstance(item, WithProperties):
item = item.render(pmap)
# Skip over None items.
if item is not None and item != '':
results.append(item)
return results
<commit_msg>Fix ListProperties to be compatible with buildbot 0.8.4p1.
The duplicated code will be removed once 0.7.12 is removed.
Review URL: http://codereview.chromium.org/7193037
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@91477 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>
|
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Utility classes to enhance process.properties.Properties usefulness."""
from buildbot.process.properties import WithProperties
class ListProperties(WithProperties):
"""Act like a list but skip over items that are None.
This class doesn't use WithProperties methods but inherits from it since it is
used as a flag in Properties.render() to defer the actual work to
self.render()."""
compare_attrs = ('items')
def __init__(self, items):
"""items should be a list."""
# Dummy initialization.
WithProperties.__init__(self, '')
self.items = items
# For buildbot 8.4 and below.
def render(self, pmap):
results = []
# For each optional item, look up the corresponding property in the
# PropertyMap.
for item in self.items:
if isinstance(item, WithProperties):
item = item.render(pmap)
# Skip over None items.
if item is not None and item != '':
results.append(item)
return results
# For buildbot 8.4p1 and above.
def getRenderingFor(self, build):
results = []
# For each optional item, look up the corresponding property in the
# PropertyMap.
for item in self.items:
if isinstance(item, WithProperties):
item = item.getRenderingFor(build)
# Skip over None items.
if item is not None and item != '':
results.append(item)
return results
|
# Copyright (c) 2010 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Utility classes to enhance process.properties.Properties usefulness."""
from buildbot.process.properties import WithProperties
class ListProperties(WithProperties):
"""Act like a list but skip over items that are None.
This class doesn't use WithProperties methods but inherits from it since it is
used as a flag in Properties.render() to defer the actual work to
self.render()."""
compare_attrs = ('items')
def __init__(self, items):
"""items should be a list."""
# Dummy initialization.
WithProperties.__init__(self, '')
self.items = items
def render(self, pmap):
results = []
# For each optional item, look up the corresponding property in the
# PropertyMap.
for item in self.items:
if isinstance(item, WithProperties):
item = item.render(pmap)
# Skip over None items.
if item is not None and item != '':
results.append(item)
return results
Fix ListProperties to be compatible with buildbot 0.8.4p1.
The duplicated code will be removed once 0.7.12 is removed.
Review URL: http://codereview.chromium.org/7193037
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@91477 0039d316-1c4b-4281-b951-d872f2087c98# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Utility classes to enhance process.properties.Properties usefulness."""
from buildbot.process.properties import WithProperties
class ListProperties(WithProperties):
"""Act like a list but skip over items that are None.
This class doesn't use WithProperties methods but inherits from it since it is
used as a flag in Properties.render() to defer the actual work to
self.render()."""
compare_attrs = ('items')
def __init__(self, items):
"""items should be a list."""
# Dummy initialization.
WithProperties.__init__(self, '')
self.items = items
# For buildbot 8.4 and below.
def render(self, pmap):
results = []
# For each optional item, look up the corresponding property in the
# PropertyMap.
for item in self.items:
if isinstance(item, WithProperties):
item = item.render(pmap)
# Skip over None items.
if item is not None and item != '':
results.append(item)
return results
# For buildbot 8.4p1 and above.
def getRenderingFor(self, build):
results = []
# For each optional item, look up the corresponding property in the
# PropertyMap.
for item in self.items:
if isinstance(item, WithProperties):
item = item.getRenderingFor(build)
# Skip over None items.
if item is not None and item != '':
results.append(item)
return results
|
<commit_before># Copyright (c) 2010 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Utility classes to enhance process.properties.Properties usefulness."""
from buildbot.process.properties import WithProperties
class ListProperties(WithProperties):
"""Act like a list but skip over items that are None.
This class doesn't use WithProperties methods but inherits from it since it is
used as a flag in Properties.render() to defer the actual work to
self.render()."""
compare_attrs = ('items')
def __init__(self, items):
"""items should be a list."""
# Dummy initialization.
WithProperties.__init__(self, '')
self.items = items
def render(self, pmap):
results = []
# For each optional item, look up the corresponding property in the
# PropertyMap.
for item in self.items:
if isinstance(item, WithProperties):
item = item.render(pmap)
# Skip over None items.
if item is not None and item != '':
results.append(item)
return results
<commit_msg>Fix ListProperties to be compatible with buildbot 0.8.4p1.
The duplicated code will be removed once 0.7.12 is removed.
Review URL: http://codereview.chromium.org/7193037
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@91477 0039d316-1c4b-4281-b951-d872f2087c98<commit_after># Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Utility classes to enhance process.properties.Properties usefulness."""
from buildbot.process.properties import WithProperties
class ListProperties(WithProperties):
"""Act like a list but skip over items that are None.
This class doesn't use WithProperties methods but inherits from it since it is
used as a flag in Properties.render() to defer the actual work to
self.render()."""
compare_attrs = ('items')
def __init__(self, items):
"""items should be a list."""
# Dummy initialization.
WithProperties.__init__(self, '')
self.items = items
# For buildbot 8.4 and below.
def render(self, pmap):
results = []
# For each optional item, look up the corresponding property in the
# PropertyMap.
for item in self.items:
if isinstance(item, WithProperties):
item = item.render(pmap)
# Skip over None items.
if item is not None and item != '':
results.append(item)
return results
# For buildbot 8.4p1 and above.
def getRenderingFor(self, build):
results = []
# For each optional item, look up the corresponding property in the
# PropertyMap.
for item in self.items:
if isinstance(item, WithProperties):
item = item.getRenderingFor(build)
# Skip over None items.
if item is not None and item != '':
results.append(item)
return results
|
9df966ba388d05e66e64d7692ab971e51cd9762a
|
csv-to-json.py
|
csv-to-json.py
|
import simplejson as json
import zmq
import sys
import base64
import zlib
from jsonsig import *
fieldnames = "buyPrice,sellPrice,demand,demandLevel,stationStock,stationStockLevel,categoryName,itemName,stationName,timestamp".split(',')
(pk, sk) = pysodium.crypto_sign_keypair()
context = zmq.Context()
socket = context.socket(zmq.SUB)
socket.connect("tcp://firehose.elite-market-data.net:9050")
socket.setsockopt(zmq.SUBSCRIBE, "")
publisher = context.socket(zmq.PUSH)
publisher.connect("tcp://localhost:8500")
while True:
data = socket.recv()
values = data.split(',')
message = dict(zip(fieldnames, values))
message['timestamp'] = message['timestamp']+"+00:00"
for field in ['buyPrice', 'sellPrice']:
message[field] = float(message[field])
for field in ['demand', 'demandLevel', 'stationStock', 'stationStockLevel']:
message[field] = int(message[field])
envelope = {'version': '0.1', 'type': 'marketquote', 'message': message}
envelope = sign_json(envelope, pk, sk)
jsonstring = json.dumps(envelope, separators=(',', ':'), sort_keys=True)
print jsonstring
publisher.send(zlib.compress(jsonstring))
sys.stdout.flush()
|
import simplejson as json
import zmq
import sys
import base64
import zlib
from jsonsig import *
fieldnames = "buyPrice,sellPrice,demand,demandLevel,stationStock,stationStockLevel,categoryName,itemName,stationName,timestamp".split(',')
(pk, sk) = pysodium.crypto_sign_keypair()
context = zmq.Context()
socket = context.socket(zmq.SUB)
socket.connect("tcp://firehose.elite-market-data.net:9050")
socket.setsockopt(zmq.SUBSCRIBE, "")
publisher = context.socket(zmq.PUSH)
publisher.connect("tcp://collector.elite-market-data.net:8500")
while True:
data = socket.recv()
values = data.split(',')
message = dict(zip(fieldnames, values))
message['timestamp'] = message['timestamp']+"+00:00"
for field in ['buyPrice', 'sellPrice']:
message[field] = float(message[field])
for field in ['demand', 'demandLevel', 'stationStock', 'stationStockLevel']:
message[field] = int(message[field])
envelope = {'version': '0.1', 'type': 'marketquote', 'message': message}
envelope = sign_json(envelope, pk, sk)
jsonstring = json.dumps(envelope, separators=(',', ':'), sort_keys=True)
print jsonstring
publisher.send(zlib.compress(jsonstring))
sys.stdout.flush()
|
Use official URL for collector
|
Use official URL for collector
|
Python
|
bsd-2-clause
|
andreas23/emdn
|
import simplejson as json
import zmq
import sys
import base64
import zlib
from jsonsig import *
fieldnames = "buyPrice,sellPrice,demand,demandLevel,stationStock,stationStockLevel,categoryName,itemName,stationName,timestamp".split(',')
(pk, sk) = pysodium.crypto_sign_keypair()
context = zmq.Context()
socket = context.socket(zmq.SUB)
socket.connect("tcp://firehose.elite-market-data.net:9050")
socket.setsockopt(zmq.SUBSCRIBE, "")
publisher = context.socket(zmq.PUSH)
publisher.connect("tcp://localhost:8500")
while True:
data = socket.recv()
values = data.split(',')
message = dict(zip(fieldnames, values))
message['timestamp'] = message['timestamp']+"+00:00"
for field in ['buyPrice', 'sellPrice']:
message[field] = float(message[field])
for field in ['demand', 'demandLevel', 'stationStock', 'stationStockLevel']:
message[field] = int(message[field])
envelope = {'version': '0.1', 'type': 'marketquote', 'message': message}
envelope = sign_json(envelope, pk, sk)
jsonstring = json.dumps(envelope, separators=(',', ':'), sort_keys=True)
print jsonstring
publisher.send(zlib.compress(jsonstring))
sys.stdout.flush()
Use official URL for collector
|
import simplejson as json
import zmq
import sys
import base64
import zlib
from jsonsig import *
fieldnames = "buyPrice,sellPrice,demand,demandLevel,stationStock,stationStockLevel,categoryName,itemName,stationName,timestamp".split(',')
(pk, sk) = pysodium.crypto_sign_keypair()
context = zmq.Context()
socket = context.socket(zmq.SUB)
socket.connect("tcp://firehose.elite-market-data.net:9050")
socket.setsockopt(zmq.SUBSCRIBE, "")
publisher = context.socket(zmq.PUSH)
publisher.connect("tcp://collector.elite-market-data.net:8500")
while True:
data = socket.recv()
values = data.split(',')
message = dict(zip(fieldnames, values))
message['timestamp'] = message['timestamp']+"+00:00"
for field in ['buyPrice', 'sellPrice']:
message[field] = float(message[field])
for field in ['demand', 'demandLevel', 'stationStock', 'stationStockLevel']:
message[field] = int(message[field])
envelope = {'version': '0.1', 'type': 'marketquote', 'message': message}
envelope = sign_json(envelope, pk, sk)
jsonstring = json.dumps(envelope, separators=(',', ':'), sort_keys=True)
print jsonstring
publisher.send(zlib.compress(jsonstring))
sys.stdout.flush()
|
<commit_before>import simplejson as json
import zmq
import sys
import base64
import zlib
from jsonsig import *
fieldnames = "buyPrice,sellPrice,demand,demandLevel,stationStock,stationStockLevel,categoryName,itemName,stationName,timestamp".split(',')
(pk, sk) = pysodium.crypto_sign_keypair()
context = zmq.Context()
socket = context.socket(zmq.SUB)
socket.connect("tcp://firehose.elite-market-data.net:9050")
socket.setsockopt(zmq.SUBSCRIBE, "")
publisher = context.socket(zmq.PUSH)
publisher.connect("tcp://localhost:8500")
while True:
data = socket.recv()
values = data.split(',')
message = dict(zip(fieldnames, values))
message['timestamp'] = message['timestamp']+"+00:00"
for field in ['buyPrice', 'sellPrice']:
message[field] = float(message[field])
for field in ['demand', 'demandLevel', 'stationStock', 'stationStockLevel']:
message[field] = int(message[field])
envelope = {'version': '0.1', 'type': 'marketquote', 'message': message}
envelope = sign_json(envelope, pk, sk)
jsonstring = json.dumps(envelope, separators=(',', ':'), sort_keys=True)
print jsonstring
publisher.send(zlib.compress(jsonstring))
sys.stdout.flush()
<commit_msg>Use official URL for collector<commit_after>
|
import simplejson as json
import zmq
import sys
import base64
import zlib
from jsonsig import *
fieldnames = "buyPrice,sellPrice,demand,demandLevel,stationStock,stationStockLevel,categoryName,itemName,stationName,timestamp".split(',')
(pk, sk) = pysodium.crypto_sign_keypair()
context = zmq.Context()
socket = context.socket(zmq.SUB)
socket.connect("tcp://firehose.elite-market-data.net:9050")
socket.setsockopt(zmq.SUBSCRIBE, "")
publisher = context.socket(zmq.PUSH)
publisher.connect("tcp://collector.elite-market-data.net:8500")
while True:
data = socket.recv()
values = data.split(',')
message = dict(zip(fieldnames, values))
message['timestamp'] = message['timestamp']+"+00:00"
for field in ['buyPrice', 'sellPrice']:
message[field] = float(message[field])
for field in ['demand', 'demandLevel', 'stationStock', 'stationStockLevel']:
message[field] = int(message[field])
envelope = {'version': '0.1', 'type': 'marketquote', 'message': message}
envelope = sign_json(envelope, pk, sk)
jsonstring = json.dumps(envelope, separators=(',', ':'), sort_keys=True)
print jsonstring
publisher.send(zlib.compress(jsonstring))
sys.stdout.flush()
|
import simplejson as json
import zmq
import sys
import base64
import zlib
from jsonsig import *
fieldnames = "buyPrice,sellPrice,demand,demandLevel,stationStock,stationStockLevel,categoryName,itemName,stationName,timestamp".split(',')
(pk, sk) = pysodium.crypto_sign_keypair()
context = zmq.Context()
socket = context.socket(zmq.SUB)
socket.connect("tcp://firehose.elite-market-data.net:9050")
socket.setsockopt(zmq.SUBSCRIBE, "")
publisher = context.socket(zmq.PUSH)
publisher.connect("tcp://localhost:8500")
while True:
data = socket.recv()
values = data.split(',')
message = dict(zip(fieldnames, values))
message['timestamp'] = message['timestamp']+"+00:00"
for field in ['buyPrice', 'sellPrice']:
message[field] = float(message[field])
for field in ['demand', 'demandLevel', 'stationStock', 'stationStockLevel']:
message[field] = int(message[field])
envelope = {'version': '0.1', 'type': 'marketquote', 'message': message}
envelope = sign_json(envelope, pk, sk)
jsonstring = json.dumps(envelope, separators=(',', ':'), sort_keys=True)
print jsonstring
publisher.send(zlib.compress(jsonstring))
sys.stdout.flush()
Use official URL for collectorimport simplejson as json
import zmq
import sys
import base64
import zlib
from jsonsig import *
fieldnames = "buyPrice,sellPrice,demand,demandLevel,stationStock,stationStockLevel,categoryName,itemName,stationName,timestamp".split(',')
(pk, sk) = pysodium.crypto_sign_keypair()
context = zmq.Context()
socket = context.socket(zmq.SUB)
socket.connect("tcp://firehose.elite-market-data.net:9050")
socket.setsockopt(zmq.SUBSCRIBE, "")
publisher = context.socket(zmq.PUSH)
publisher.connect("tcp://collector.elite-market-data.net:8500")
while True:
data = socket.recv()
values = data.split(',')
message = dict(zip(fieldnames, values))
message['timestamp'] = message['timestamp']+"+00:00"
for field in ['buyPrice', 'sellPrice']:
message[field] = float(message[field])
for field in ['demand', 'demandLevel', 'stationStock', 'stationStockLevel']:
message[field] = int(message[field])
envelope = {'version': '0.1', 'type': 'marketquote', 'message': message}
envelope = sign_json(envelope, pk, sk)
jsonstring = json.dumps(envelope, separators=(',', ':'), sort_keys=True)
print jsonstring
publisher.send(zlib.compress(jsonstring))
sys.stdout.flush()
|
<commit_before>import simplejson as json
import zmq
import sys
import base64
import zlib
from jsonsig import *
fieldnames = "buyPrice,sellPrice,demand,demandLevel,stationStock,stationStockLevel,categoryName,itemName,stationName,timestamp".split(',')
(pk, sk) = pysodium.crypto_sign_keypair()
context = zmq.Context()
socket = context.socket(zmq.SUB)
socket.connect("tcp://firehose.elite-market-data.net:9050")
socket.setsockopt(zmq.SUBSCRIBE, "")
publisher = context.socket(zmq.PUSH)
publisher.connect("tcp://localhost:8500")
while True:
data = socket.recv()
values = data.split(',')
message = dict(zip(fieldnames, values))
message['timestamp'] = message['timestamp']+"+00:00"
for field in ['buyPrice', 'sellPrice']:
message[field] = float(message[field])
for field in ['demand', 'demandLevel', 'stationStock', 'stationStockLevel']:
message[field] = int(message[field])
envelope = {'version': '0.1', 'type': 'marketquote', 'message': message}
envelope = sign_json(envelope, pk, sk)
jsonstring = json.dumps(envelope, separators=(',', ':'), sort_keys=True)
print jsonstring
publisher.send(zlib.compress(jsonstring))
sys.stdout.flush()
<commit_msg>Use official URL for collector<commit_after>import simplejson as json
import zmq
import sys
import base64
import zlib
from jsonsig import *
fieldnames = "buyPrice,sellPrice,demand,demandLevel,stationStock,stationStockLevel,categoryName,itemName,stationName,timestamp".split(',')
(pk, sk) = pysodium.crypto_sign_keypair()
context = zmq.Context()
socket = context.socket(zmq.SUB)
socket.connect("tcp://firehose.elite-market-data.net:9050")
socket.setsockopt(zmq.SUBSCRIBE, "")
publisher = context.socket(zmq.PUSH)
publisher.connect("tcp://collector.elite-market-data.net:8500")
while True:
data = socket.recv()
values = data.split(',')
message = dict(zip(fieldnames, values))
message['timestamp'] = message['timestamp']+"+00:00"
for field in ['buyPrice', 'sellPrice']:
message[field] = float(message[field])
for field in ['demand', 'demandLevel', 'stationStock', 'stationStockLevel']:
message[field] = int(message[field])
envelope = {'version': '0.1', 'type': 'marketquote', 'message': message}
envelope = sign_json(envelope, pk, sk)
jsonstring = json.dumps(envelope, separators=(',', ':'), sort_keys=True)
print jsonstring
publisher.send(zlib.compress(jsonstring))
sys.stdout.flush()
|
f5513d6fa736c8e1ffc8490c61f05c33ec42616c
|
config/main.py
|
config/main.py
|
# -*- coding: utf-8 -*-
PROJECT_NAME = "Stoppt das Überwachungspaket!"
IMPORTANT_REPS = ["05375", "02819", "51570", "02941", "08696", "35504"]
|
# -*- coding: utf-8 -*-
PROJECT_NAME = "Stoppt das Überwachungspaket!"
IMPORTANT_REPS = ["05375", "02819", "51570", "02941", "35504"]
|
Update representative importance, remove Reinhold Mitterlehner
|
Update representative importance, remove Reinhold Mitterlehner
|
Python
|
mit
|
AKVorrat/ueberwachungspaket.at,AKVorrat/ueberwachungspaket.at,AKVorrat/ueberwachungspaket.at
|
# -*- coding: utf-8 -*-
PROJECT_NAME = "Stoppt das Überwachungspaket!"
IMPORTANT_REPS = ["05375", "02819", "51570", "02941", "08696", "35504"]
Update representative importance, remove Reinhold Mitterlehner
|
# -*- coding: utf-8 -*-
PROJECT_NAME = "Stoppt das Überwachungspaket!"
IMPORTANT_REPS = ["05375", "02819", "51570", "02941", "35504"]
|
<commit_before># -*- coding: utf-8 -*-
PROJECT_NAME = "Stoppt das Überwachungspaket!"
IMPORTANT_REPS = ["05375", "02819", "51570", "02941", "08696", "35504"]
<commit_msg>Update representative importance, remove Reinhold Mitterlehner<commit_after>
|
# -*- coding: utf-8 -*-
PROJECT_NAME = "Stoppt das Überwachungspaket!"
IMPORTANT_REPS = ["05375", "02819", "51570", "02941", "35504"]
|
# -*- coding: utf-8 -*-
PROJECT_NAME = "Stoppt das Überwachungspaket!"
IMPORTANT_REPS = ["05375", "02819", "51570", "02941", "08696", "35504"]
Update representative importance, remove Reinhold Mitterlehner# -*- coding: utf-8 -*-
PROJECT_NAME = "Stoppt das Überwachungspaket!"
IMPORTANT_REPS = ["05375", "02819", "51570", "02941", "35504"]
|
<commit_before># -*- coding: utf-8 -*-
PROJECT_NAME = "Stoppt das Überwachungspaket!"
IMPORTANT_REPS = ["05375", "02819", "51570", "02941", "08696", "35504"]
<commit_msg>Update representative importance, remove Reinhold Mitterlehner<commit_after># -*- coding: utf-8 -*-
PROJECT_NAME = "Stoppt das Überwachungspaket!"
IMPORTANT_REPS = ["05375", "02819", "51570", "02941", "35504"]
|
c360289fe00722ff2f85390e2d9c40c4e9338893
|
test/test_function.py
|
test/test_function.py
|
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <gogogo.vm@gmail.com>
"""
from __future__ import print_function
from __future__ import unicode_literals
from pytablewriter._function import convert_idx_to_alphabet
import pytest
class Test_convert_idx_to_alphabet:
@pytest.mark.parametrize(["value", "expected"], [
[
range(30),
[
"A", "B", "C", "D", "E",
"F", "G", "H", "I", "J",
"K", "L", "M", "N", "O",
"P", "Q", "R", "S", "T",
"U", "V", "W", "X", "Y",
"Z", "AA", "AB", "AC", "AD",
]
],
[
range(0, 900, 30),
[
"A", "AE", "BI", "CM", "DQ",
"EU", "FY", "HC", "IG", "JK",
"KO", "LS", "MW", "OA", "PE",
"QI", "RM", "SQ", "TU", "UY",
"WC", "XG", "YK", "ZO", "AAS",
"ABW", "ADA", "AEE", "AFI", "AGM",
]
],
])
def test_normal(self, value, expected):
assert [convert_idx_to_alphabet(v) for v in value] == expected
|
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <gogogo.vm@gmail.com>
"""
from __future__ import print_function
from __future__ import unicode_literals
from pytablewriter._function import convert_idx_to_alphabet
import pytest
class Test_convert_idx_to_alphabet(object):
@pytest.mark.parametrize(["value", "expected"], [
[
range(30),
[
"A", "B", "C", "D", "E",
"F", "G", "H", "I", "J",
"K", "L", "M", "N", "O",
"P", "Q", "R", "S", "T",
"U", "V", "W", "X", "Y",
"Z", "AA", "AB", "AC", "AD",
]
],
[
range(0, 900, 30),
[
"A", "AE", "BI", "CM", "DQ",
"EU", "FY", "HC", "IG", "JK",
"KO", "LS", "MW", "OA", "PE",
"QI", "RM", "SQ", "TU", "UY",
"WC", "XG", "YK", "ZO", "AAS",
"ABW", "ADA", "AEE", "AFI", "AGM",
]
],
])
def test_normal(self, value, expected):
assert [convert_idx_to_alphabet(v) for v in value] == expected
|
Change class definitions from old style to new style
|
Change class definitions from old style to new style
|
Python
|
mit
|
thombashi/pytablewriter
|
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <gogogo.vm@gmail.com>
"""
from __future__ import print_function
from __future__ import unicode_literals
from pytablewriter._function import convert_idx_to_alphabet
import pytest
class Test_convert_idx_to_alphabet:
@pytest.mark.parametrize(["value", "expected"], [
[
range(30),
[
"A", "B", "C", "D", "E",
"F", "G", "H", "I", "J",
"K", "L", "M", "N", "O",
"P", "Q", "R", "S", "T",
"U", "V", "W", "X", "Y",
"Z", "AA", "AB", "AC", "AD",
]
],
[
range(0, 900, 30),
[
"A", "AE", "BI", "CM", "DQ",
"EU", "FY", "HC", "IG", "JK",
"KO", "LS", "MW", "OA", "PE",
"QI", "RM", "SQ", "TU", "UY",
"WC", "XG", "YK", "ZO", "AAS",
"ABW", "ADA", "AEE", "AFI", "AGM",
]
],
])
def test_normal(self, value, expected):
assert [convert_idx_to_alphabet(v) for v in value] == expected
Change class definitions from old style to new style
|
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <gogogo.vm@gmail.com>
"""
from __future__ import print_function
from __future__ import unicode_literals
from pytablewriter._function import convert_idx_to_alphabet
import pytest
class Test_convert_idx_to_alphabet(object):
@pytest.mark.parametrize(["value", "expected"], [
[
range(30),
[
"A", "B", "C", "D", "E",
"F", "G", "H", "I", "J",
"K", "L", "M", "N", "O",
"P", "Q", "R", "S", "T",
"U", "V", "W", "X", "Y",
"Z", "AA", "AB", "AC", "AD",
]
],
[
range(0, 900, 30),
[
"A", "AE", "BI", "CM", "DQ",
"EU", "FY", "HC", "IG", "JK",
"KO", "LS", "MW", "OA", "PE",
"QI", "RM", "SQ", "TU", "UY",
"WC", "XG", "YK", "ZO", "AAS",
"ABW", "ADA", "AEE", "AFI", "AGM",
]
],
])
def test_normal(self, value, expected):
assert [convert_idx_to_alphabet(v) for v in value] == expected
|
<commit_before># encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <gogogo.vm@gmail.com>
"""
from __future__ import print_function
from __future__ import unicode_literals
from pytablewriter._function import convert_idx_to_alphabet
import pytest
class Test_convert_idx_to_alphabet:
@pytest.mark.parametrize(["value", "expected"], [
[
range(30),
[
"A", "B", "C", "D", "E",
"F", "G", "H", "I", "J",
"K", "L", "M", "N", "O",
"P", "Q", "R", "S", "T",
"U", "V", "W", "X", "Y",
"Z", "AA", "AB", "AC", "AD",
]
],
[
range(0, 900, 30),
[
"A", "AE", "BI", "CM", "DQ",
"EU", "FY", "HC", "IG", "JK",
"KO", "LS", "MW", "OA", "PE",
"QI", "RM", "SQ", "TU", "UY",
"WC", "XG", "YK", "ZO", "AAS",
"ABW", "ADA", "AEE", "AFI", "AGM",
]
],
])
def test_normal(self, value, expected):
assert [convert_idx_to_alphabet(v) for v in value] == expected
<commit_msg>Change class definitions from old style to new style<commit_after>
|
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <gogogo.vm@gmail.com>
"""
from __future__ import print_function
from __future__ import unicode_literals
from pytablewriter._function import convert_idx_to_alphabet
import pytest
class Test_convert_idx_to_alphabet(object):
@pytest.mark.parametrize(["value", "expected"], [
[
range(30),
[
"A", "B", "C", "D", "E",
"F", "G", "H", "I", "J",
"K", "L", "M", "N", "O",
"P", "Q", "R", "S", "T",
"U", "V", "W", "X", "Y",
"Z", "AA", "AB", "AC", "AD",
]
],
[
range(0, 900, 30),
[
"A", "AE", "BI", "CM", "DQ",
"EU", "FY", "HC", "IG", "JK",
"KO", "LS", "MW", "OA", "PE",
"QI", "RM", "SQ", "TU", "UY",
"WC", "XG", "YK", "ZO", "AAS",
"ABW", "ADA", "AEE", "AFI", "AGM",
]
],
])
def test_normal(self, value, expected):
assert [convert_idx_to_alphabet(v) for v in value] == expected
|
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <gogogo.vm@gmail.com>
"""
from __future__ import print_function
from __future__ import unicode_literals
from pytablewriter._function import convert_idx_to_alphabet
import pytest
class Test_convert_idx_to_alphabet:
@pytest.mark.parametrize(["value", "expected"], [
[
range(30),
[
"A", "B", "C", "D", "E",
"F", "G", "H", "I", "J",
"K", "L", "M", "N", "O",
"P", "Q", "R", "S", "T",
"U", "V", "W", "X", "Y",
"Z", "AA", "AB", "AC", "AD",
]
],
[
range(0, 900, 30),
[
"A", "AE", "BI", "CM", "DQ",
"EU", "FY", "HC", "IG", "JK",
"KO", "LS", "MW", "OA", "PE",
"QI", "RM", "SQ", "TU", "UY",
"WC", "XG", "YK", "ZO", "AAS",
"ABW", "ADA", "AEE", "AFI", "AGM",
]
],
])
def test_normal(self, value, expected):
assert [convert_idx_to_alphabet(v) for v in value] == expected
Change class definitions from old style to new style# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <gogogo.vm@gmail.com>
"""
from __future__ import print_function
from __future__ import unicode_literals
from pytablewriter._function import convert_idx_to_alphabet
import pytest
class Test_convert_idx_to_alphabet(object):
@pytest.mark.parametrize(["value", "expected"], [
[
range(30),
[
"A", "B", "C", "D", "E",
"F", "G", "H", "I", "J",
"K", "L", "M", "N", "O",
"P", "Q", "R", "S", "T",
"U", "V", "W", "X", "Y",
"Z", "AA", "AB", "AC", "AD",
]
],
[
range(0, 900, 30),
[
"A", "AE", "BI", "CM", "DQ",
"EU", "FY", "HC", "IG", "JK",
"KO", "LS", "MW", "OA", "PE",
"QI", "RM", "SQ", "TU", "UY",
"WC", "XG", "YK", "ZO", "AAS",
"ABW", "ADA", "AEE", "AFI", "AGM",
]
],
])
def test_normal(self, value, expected):
assert [convert_idx_to_alphabet(v) for v in value] == expected
|
<commit_before># encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <gogogo.vm@gmail.com>
"""
from __future__ import print_function
from __future__ import unicode_literals
from pytablewriter._function import convert_idx_to_alphabet
import pytest
class Test_convert_idx_to_alphabet:
@pytest.mark.parametrize(["value", "expected"], [
[
range(30),
[
"A", "B", "C", "D", "E",
"F", "G", "H", "I", "J",
"K", "L", "M", "N", "O",
"P", "Q", "R", "S", "T",
"U", "V", "W", "X", "Y",
"Z", "AA", "AB", "AC", "AD",
]
],
[
range(0, 900, 30),
[
"A", "AE", "BI", "CM", "DQ",
"EU", "FY", "HC", "IG", "JK",
"KO", "LS", "MW", "OA", "PE",
"QI", "RM", "SQ", "TU", "UY",
"WC", "XG", "YK", "ZO", "AAS",
"ABW", "ADA", "AEE", "AFI", "AGM",
]
],
])
def test_normal(self, value, expected):
assert [convert_idx_to_alphabet(v) for v in value] == expected
<commit_msg>Change class definitions from old style to new style<commit_after># encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <gogogo.vm@gmail.com>
"""
from __future__ import print_function
from __future__ import unicode_literals
from pytablewriter._function import convert_idx_to_alphabet
import pytest
class Test_convert_idx_to_alphabet(object):
@pytest.mark.parametrize(["value", "expected"], [
[
range(30),
[
"A", "B", "C", "D", "E",
"F", "G", "H", "I", "J",
"K", "L", "M", "N", "O",
"P", "Q", "R", "S", "T",
"U", "V", "W", "X", "Y",
"Z", "AA", "AB", "AC", "AD",
]
],
[
range(0, 900, 30),
[
"A", "AE", "BI", "CM", "DQ",
"EU", "FY", "HC", "IG", "JK",
"KO", "LS", "MW", "OA", "PE",
"QI", "RM", "SQ", "TU", "UY",
"WC", "XG", "YK", "ZO", "AAS",
"ABW", "ADA", "AEE", "AFI", "AGM",
]
],
])
def test_normal(self, value, expected):
assert [convert_idx_to_alphabet(v) for v in value] == expected
|
391d5ef0d13c9f7401ee3576ff578515c07c5f77
|
spacy/tests/regression/test_issue1434.py
|
spacy/tests/regression/test_issue1434.py
|
from __future__ import unicode_literals
from spacy.tokens import Doc
from spacy.vocab import Vocab
from spacy.matcher import Matcher
from spacy.lang.lex_attrs import LEX_ATTRS
def test_issue1434():
'''Test matches occur when optional element at end of short doc'''
vocab = Vocab(lex_attr_getters=LEX_ATTRS)
hello_world = Doc(vocab, words=['Hello', 'World'])
hello = Doc(vocab, words=['Hello'])
matcher = Matcher(vocab)
matcher.add('MyMatcher', None,
[ {'ORTH': 'Hello' }, {'IS_ALPHA': True, 'OP': '?'} ])
matches = matcher(hello_world)
assert matches
matches = matcher(hello)
assert matches
|
from __future__ import unicode_literals
from ...vocab import Vocab
from ...lang.lex_attrs import LEX_ATTRS
from ...tokens import Doc
from ...matcher import Matcher
def test_issue1434():
'''Test matches occur when optional element at end of short doc'''
vocab = Vocab(lex_attr_getters=LEX_ATTRS)
hello_world = Doc(vocab, words=['Hello', 'World'])
hello = Doc(vocab, words=['Hello'])
matcher = Matcher(vocab)
matcher.add('MyMatcher', None,
[ {'ORTH': 'Hello' }, {'IS_ALPHA': True, 'OP': '?'} ])
matches = matcher(hello_world)
assert matches
matches = matcher(hello)
assert matches
|
Normalize imports in regression test
|
Normalize imports in regression test
|
Python
|
mit
|
honnibal/spaCy,recognai/spaCy,recognai/spaCy,honnibal/spaCy,explosion/spaCy,explosion/spaCy,spacy-io/spaCy,aikramer2/spaCy,explosion/spaCy,aikramer2/spaCy,aikramer2/spaCy,spacy-io/spaCy,aikramer2/spaCy,spacy-io/spaCy,honnibal/spaCy,recognai/spaCy,explosion/spaCy,spacy-io/spaCy,aikramer2/spaCy,honnibal/spaCy,explosion/spaCy,recognai/spaCy,recognai/spaCy,aikramer2/spaCy,spacy-io/spaCy,explosion/spaCy,spacy-io/spaCy,recognai/spaCy
|
from __future__ import unicode_literals
from spacy.tokens import Doc
from spacy.vocab import Vocab
from spacy.matcher import Matcher
from spacy.lang.lex_attrs import LEX_ATTRS
def test_issue1434():
'''Test matches occur when optional element at end of short doc'''
vocab = Vocab(lex_attr_getters=LEX_ATTRS)
hello_world = Doc(vocab, words=['Hello', 'World'])
hello = Doc(vocab, words=['Hello'])
matcher = Matcher(vocab)
matcher.add('MyMatcher', None,
[ {'ORTH': 'Hello' }, {'IS_ALPHA': True, 'OP': '?'} ])
matches = matcher(hello_world)
assert matches
matches = matcher(hello)
assert matches
Normalize imports in regression test
|
from __future__ import unicode_literals
from ...vocab import Vocab
from ...lang.lex_attrs import LEX_ATTRS
from ...tokens import Doc
from ...matcher import Matcher
def test_issue1434():
'''Test matches occur when optional element at end of short doc'''
vocab = Vocab(lex_attr_getters=LEX_ATTRS)
hello_world = Doc(vocab, words=['Hello', 'World'])
hello = Doc(vocab, words=['Hello'])
matcher = Matcher(vocab)
matcher.add('MyMatcher', None,
[ {'ORTH': 'Hello' }, {'IS_ALPHA': True, 'OP': '?'} ])
matches = matcher(hello_world)
assert matches
matches = matcher(hello)
assert matches
|
<commit_before>from __future__ import unicode_literals
from spacy.tokens import Doc
from spacy.vocab import Vocab
from spacy.matcher import Matcher
from spacy.lang.lex_attrs import LEX_ATTRS
def test_issue1434():
'''Test matches occur when optional element at end of short doc'''
vocab = Vocab(lex_attr_getters=LEX_ATTRS)
hello_world = Doc(vocab, words=['Hello', 'World'])
hello = Doc(vocab, words=['Hello'])
matcher = Matcher(vocab)
matcher.add('MyMatcher', None,
[ {'ORTH': 'Hello' }, {'IS_ALPHA': True, 'OP': '?'} ])
matches = matcher(hello_world)
assert matches
matches = matcher(hello)
assert matches
<commit_msg>Normalize imports in regression test<commit_after>
|
from __future__ import unicode_literals
from ...vocab import Vocab
from ...lang.lex_attrs import LEX_ATTRS
from ...tokens import Doc
from ...matcher import Matcher
def test_issue1434():
'''Test matches occur when optional element at end of short doc'''
vocab = Vocab(lex_attr_getters=LEX_ATTRS)
hello_world = Doc(vocab, words=['Hello', 'World'])
hello = Doc(vocab, words=['Hello'])
matcher = Matcher(vocab)
matcher.add('MyMatcher', None,
[ {'ORTH': 'Hello' }, {'IS_ALPHA': True, 'OP': '?'} ])
matches = matcher(hello_world)
assert matches
matches = matcher(hello)
assert matches
|
from __future__ import unicode_literals
from spacy.tokens import Doc
from spacy.vocab import Vocab
from spacy.matcher import Matcher
from spacy.lang.lex_attrs import LEX_ATTRS
def test_issue1434():
'''Test matches occur when optional element at end of short doc'''
vocab = Vocab(lex_attr_getters=LEX_ATTRS)
hello_world = Doc(vocab, words=['Hello', 'World'])
hello = Doc(vocab, words=['Hello'])
matcher = Matcher(vocab)
matcher.add('MyMatcher', None,
[ {'ORTH': 'Hello' }, {'IS_ALPHA': True, 'OP': '?'} ])
matches = matcher(hello_world)
assert matches
matches = matcher(hello)
assert matches
Normalize imports in regression testfrom __future__ import unicode_literals
from ...vocab import Vocab
from ...lang.lex_attrs import LEX_ATTRS
from ...tokens import Doc
from ...matcher import Matcher
def test_issue1434():
'''Test matches occur when optional element at end of short doc'''
vocab = Vocab(lex_attr_getters=LEX_ATTRS)
hello_world = Doc(vocab, words=['Hello', 'World'])
hello = Doc(vocab, words=['Hello'])
matcher = Matcher(vocab)
matcher.add('MyMatcher', None,
[ {'ORTH': 'Hello' }, {'IS_ALPHA': True, 'OP': '?'} ])
matches = matcher(hello_world)
assert matches
matches = matcher(hello)
assert matches
|
<commit_before>from __future__ import unicode_literals
from spacy.tokens import Doc
from spacy.vocab import Vocab
from spacy.matcher import Matcher
from spacy.lang.lex_attrs import LEX_ATTRS
def test_issue1434():
'''Test matches occur when optional element at end of short doc'''
vocab = Vocab(lex_attr_getters=LEX_ATTRS)
hello_world = Doc(vocab, words=['Hello', 'World'])
hello = Doc(vocab, words=['Hello'])
matcher = Matcher(vocab)
matcher.add('MyMatcher', None,
[ {'ORTH': 'Hello' }, {'IS_ALPHA': True, 'OP': '?'} ])
matches = matcher(hello_world)
assert matches
matches = matcher(hello)
assert matches
<commit_msg>Normalize imports in regression test<commit_after>from __future__ import unicode_literals
from ...vocab import Vocab
from ...lang.lex_attrs import LEX_ATTRS
from ...tokens import Doc
from ...matcher import Matcher
def test_issue1434():
'''Test matches occur when optional element at end of short doc'''
vocab = Vocab(lex_attr_getters=LEX_ATTRS)
hello_world = Doc(vocab, words=['Hello', 'World'])
hello = Doc(vocab, words=['Hello'])
matcher = Matcher(vocab)
matcher.add('MyMatcher', None,
[ {'ORTH': 'Hello' }, {'IS_ALPHA': True, 'OP': '?'} ])
matches = matcher(hello_world)
assert matches
matches = matcher(hello)
assert matches
|
36b24c21124ce8756b122b197f1f930732caa61f
|
tornadowebapi/resource.py
|
tornadowebapi/resource.py
|
from tornadowebapi.traitlets import HasTraits
class Resource(HasTraits):
"""A model representing a resource in our system.
Must be reimplemented for the specific resource in our domain,
as well as specifying its properties with traitlets.
The following metadata in the specified traitlets are accepted:
- optional
bool, default False.
If True, the information can be omitted from the representation
when creating.
If False, the information must be present, or an error
BadRepresentation will be raised.
The resource is always identified via its collection name, and
its identifier. Both will end up in the URL, like so
/collection_name/identifier/
"""
def __init__(self, identifier, *args, **kwargs):
self.identifier = identifier
super(Resource, self).__init__(*args, **kwargs)
@classmethod
def collection_name(cls):
"""Identifies the name of the collection. By REST convention, it is
a plural form of the class name, so the default is the name of the
class, lowercase, and with an "s" added at the end.
Override this method to return a better pluralization.
"""
return cls.__name__.lower() + "s"
@property
def identifier(self):
return self._identifier
@identifier.setter
def identifier(self, value):
if not isinstance(value, str):
raise ValueError("Identifier must be a string. Got {}".format(
type(value)
))
self._identifier = value
|
from tornadowebapi.traitlets import HasTraits
class Resource(HasTraits):
"""A model representing a resource in our system.
Must be reimplemented for the specific resource in our domain,
as well as specifying its properties with traitlets.
The following metadata in the specified traitlets are accepted:
- optional
bool, default False.
If True, the information can be omitted from the representation
when creating.
If False, the information must be present, or an error
BadRepresentation will be raised.
The resource is always identified via its collection name, and
its identifier. Both will end up in the URL, like so
/collection_name/identifier/
"""
def __init__(self, identifier, *args, **kwargs):
self.identifier = identifier
super(Resource, self).__init__(*args, **kwargs)
@classmethod
def collection_name(cls):
"""Identifies the name of the collection. By REST convention, it is
a plural form of the class name, so the default is the name of the
class, lowercase, and with an "s" added at the end.
Override this method to return a better pluralization.
"""
return cls.__name__.lower() + "s"
@property
def identifier(self):
return self._identifier
@identifier.setter
def identifier(self, value):
if not (value is None or isinstance(value, str)):
raise ValueError("Identifier must be a string. Got {}".format(
type(value)
))
self._identifier = value
|
Allow back None as identifier
|
Allow back None as identifier
|
Python
|
bsd-3-clause
|
simphony/tornado-webapi
|
from tornadowebapi.traitlets import HasTraits
class Resource(HasTraits):
"""A model representing a resource in our system.
Must be reimplemented for the specific resource in our domain,
as well as specifying its properties with traitlets.
The following metadata in the specified traitlets are accepted:
- optional
bool, default False.
If True, the information can be omitted from the representation
when creating.
If False, the information must be present, or an error
BadRepresentation will be raised.
The resource is always identified via its collection name, and
its identifier. Both will end up in the URL, like so
/collection_name/identifier/
"""
def __init__(self, identifier, *args, **kwargs):
self.identifier = identifier
super(Resource, self).__init__(*args, **kwargs)
@classmethod
def collection_name(cls):
"""Identifies the name of the collection. By REST convention, it is
a plural form of the class name, so the default is the name of the
class, lowercase, and with an "s" added at the end.
Override this method to return a better pluralization.
"""
return cls.__name__.lower() + "s"
@property
def identifier(self):
return self._identifier
@identifier.setter
def identifier(self, value):
if not isinstance(value, str):
raise ValueError("Identifier must be a string. Got {}".format(
type(value)
))
self._identifier = value
Allow back None as identifier
|
from tornadowebapi.traitlets import HasTraits
class Resource(HasTraits):
"""A model representing a resource in our system.
Must be reimplemented for the specific resource in our domain,
as well as specifying its properties with traitlets.
The following metadata in the specified traitlets are accepted:
- optional
bool, default False.
If True, the information can be omitted from the representation
when creating.
If False, the information must be present, or an error
BadRepresentation will be raised.
The resource is always identified via its collection name, and
its identifier. Both will end up in the URL, like so
/collection_name/identifier/
"""
def __init__(self, identifier, *args, **kwargs):
self.identifier = identifier
super(Resource, self).__init__(*args, **kwargs)
@classmethod
def collection_name(cls):
"""Identifies the name of the collection. By REST convention, it is
a plural form of the class name, so the default is the name of the
class, lowercase, and with an "s" added at the end.
Override this method to return a better pluralization.
"""
return cls.__name__.lower() + "s"
@property
def identifier(self):
return self._identifier
@identifier.setter
def identifier(self, value):
if not (value is None or isinstance(value, str)):
raise ValueError("Identifier must be a string. Got {}".format(
type(value)
))
self._identifier = value
|
<commit_before>from tornadowebapi.traitlets import HasTraits
class Resource(HasTraits):
"""A model representing a resource in our system.
Must be reimplemented for the specific resource in our domain,
as well as specifying its properties with traitlets.
The following metadata in the specified traitlets are accepted:
- optional
bool, default False.
If True, the information can be omitted from the representation
when creating.
If False, the information must be present, or an error
BadRepresentation will be raised.
The resource is always identified via its collection name, and
its identifier. Both will end up in the URL, like so
/collection_name/identifier/
"""
def __init__(self, identifier, *args, **kwargs):
self.identifier = identifier
super(Resource, self).__init__(*args, **kwargs)
@classmethod
def collection_name(cls):
"""Identifies the name of the collection. By REST convention, it is
a plural form of the class name, so the default is the name of the
class, lowercase, and with an "s" added at the end.
Override this method to return a better pluralization.
"""
return cls.__name__.lower() + "s"
@property
def identifier(self):
return self._identifier
@identifier.setter
def identifier(self, value):
if not isinstance(value, str):
raise ValueError("Identifier must be a string. Got {}".format(
type(value)
))
self._identifier = value
<commit_msg>Allow back None as identifier<commit_after>
|
from tornadowebapi.traitlets import HasTraits
class Resource(HasTraits):
"""A model representing a resource in our system.
Must be reimplemented for the specific resource in our domain,
as well as specifying its properties with traitlets.
The following metadata in the specified traitlets are accepted:
- optional
bool, default False.
If True, the information can be omitted from the representation
when creating.
If False, the information must be present, or an error
BadRepresentation will be raised.
The resource is always identified via its collection name, and
its identifier. Both will end up in the URL, like so
/collection_name/identifier/
"""
def __init__(self, identifier, *args, **kwargs):
self.identifier = identifier
super(Resource, self).__init__(*args, **kwargs)
@classmethod
def collection_name(cls):
"""Identifies the name of the collection. By REST convention, it is
a plural form of the class name, so the default is the name of the
class, lowercase, and with an "s" added at the end.
Override this method to return a better pluralization.
"""
return cls.__name__.lower() + "s"
@property
def identifier(self):
return self._identifier
@identifier.setter
def identifier(self, value):
if not (value is None or isinstance(value, str)):
raise ValueError("Identifier must be a string. Got {}".format(
type(value)
))
self._identifier = value
|
from tornadowebapi.traitlets import HasTraits
class Resource(HasTraits):
"""A model representing a resource in our system.
Must be reimplemented for the specific resource in our domain,
as well as specifying its properties with traitlets.
The following metadata in the specified traitlets are accepted:
- optional
bool, default False.
If True, the information can be omitted from the representation
when creating.
If False, the information must be present, or an error
BadRepresentation will be raised.
The resource is always identified via its collection name, and
its identifier. Both will end up in the URL, like so
/collection_name/identifier/
"""
def __init__(self, identifier, *args, **kwargs):
self.identifier = identifier
super(Resource, self).__init__(*args, **kwargs)
@classmethod
def collection_name(cls):
"""Identifies the name of the collection. By REST convention, it is
a plural form of the class name, so the default is the name of the
class, lowercase, and with an "s" added at the end.
Override this method to return a better pluralization.
"""
return cls.__name__.lower() + "s"
@property
def identifier(self):
return self._identifier
@identifier.setter
def identifier(self, value):
if not isinstance(value, str):
raise ValueError("Identifier must be a string. Got {}".format(
type(value)
))
self._identifier = value
Allow back None as identifierfrom tornadowebapi.traitlets import HasTraits
class Resource(HasTraits):
"""A model representing a resource in our system.
Must be reimplemented for the specific resource in our domain,
as well as specifying its properties with traitlets.
The following metadata in the specified traitlets are accepted:
- optional
bool, default False.
If True, the information can be omitted from the representation
when creating.
If False, the information must be present, or an error
BadRepresentation will be raised.
The resource is always identified via its collection name, and
its identifier. Both will end up in the URL, like so
/collection_name/identifier/
"""
def __init__(self, identifier, *args, **kwargs):
self.identifier = identifier
super(Resource, self).__init__(*args, **kwargs)
@classmethod
def collection_name(cls):
"""Identifies the name of the collection. By REST convention, it is
a plural form of the class name, so the default is the name of the
class, lowercase, and with an "s" added at the end.
Override this method to return a better pluralization.
"""
return cls.__name__.lower() + "s"
@property
def identifier(self):
return self._identifier
@identifier.setter
def identifier(self, value):
if not (value is None or isinstance(value, str)):
raise ValueError("Identifier must be a string. Got {}".format(
type(value)
))
self._identifier = value
|
<commit_before>from tornadowebapi.traitlets import HasTraits
class Resource(HasTraits):
"""A model representing a resource in our system.
Must be reimplemented for the specific resource in our domain,
as well as specifying its properties with traitlets.
The following metadata in the specified traitlets are accepted:
- optional
bool, default False.
If True, the information can be omitted from the representation
when creating.
If False, the information must be present, or an error
BadRepresentation will be raised.
The resource is always identified via its collection name, and
its identifier. Both will end up in the URL, like so
/collection_name/identifier/
"""
def __init__(self, identifier, *args, **kwargs):
self.identifier = identifier
super(Resource, self).__init__(*args, **kwargs)
@classmethod
def collection_name(cls):
"""Identifies the name of the collection. By REST convention, it is
a plural form of the class name, so the default is the name of the
class, lowercase, and with an "s" added at the end.
Override this method to return a better pluralization.
"""
return cls.__name__.lower() + "s"
@property
def identifier(self):
return self._identifier
@identifier.setter
def identifier(self, value):
if not isinstance(value, str):
raise ValueError("Identifier must be a string. Got {}".format(
type(value)
))
self._identifier = value
<commit_msg>Allow back None as identifier<commit_after>from tornadowebapi.traitlets import HasTraits
class Resource(HasTraits):
"""A model representing a resource in our system.
Must be reimplemented for the specific resource in our domain,
as well as specifying its properties with traitlets.
The following metadata in the specified traitlets are accepted:
- optional
bool, default False.
If True, the information can be omitted from the representation
when creating.
If False, the information must be present, or an error
BadRepresentation will be raised.
The resource is always identified via its collection name, and
its identifier. Both will end up in the URL, like so
/collection_name/identifier/
"""
def __init__(self, identifier, *args, **kwargs):
self.identifier = identifier
super(Resource, self).__init__(*args, **kwargs)
@classmethod
def collection_name(cls):
"""Identifies the name of the collection. By REST convention, it is
a plural form of the class name, so the default is the name of the
class, lowercase, and with an "s" added at the end.
Override this method to return a better pluralization.
"""
return cls.__name__.lower() + "s"
@property
def identifier(self):
return self._identifier
@identifier.setter
def identifier(self, value):
if not (value is None or isinstance(value, str)):
raise ValueError("Identifier must be a string. Got {}".format(
type(value)
))
self._identifier = value
|
952ef8d596916b7e753c1179552a270430a21122
|
tests/test_lattice.py
|
tests/test_lattice.py
|
import rml.lattice
import rml.element
DUMMY_NAME = 'dummy'
def test_create_lattice():
l = rml.lattice.Lattice(DUMMY_NAME)
assert(len(l)) == 0
assert l.name == DUMMY_NAME
def test_non_negative_lattice():
l = rml.lattice.Lattice()
assert(len(l)) >= 0
def test_lattice_with_one_element():
l = rml.lattice.Lattice(DUMMY_NAME)
element_length = 1.5
e = rml.element.Element('dummy_element', element_length)
l.append_element(e)
# There is one element in the lattice.
assert(len(l) == 1)
# The total length of the lattice is the same as its one element.
assert l.length() == element_length
# Get all elements
assert l.get_elements() == [e]
|
import pytest
import rml.lattice
import rml.element
DUMMY_NAME = 'dummy'
@pytest.fixture
def simple_element():
element_length = 1.5
e = rml.element.Element('dummy_element', element_length)
return e
@pytest.fixture
def simple_element_and_lattice(simple_element):
l = rml.lattice.Lattice(DUMMY_NAME)
l.append_element(simple_element)
return simple_element, l
def test_create_lattice():
l = rml.lattice.Lattice(DUMMY_NAME)
assert(len(l)) == 0
assert l.name == DUMMY_NAME
def test_non_negative_lattice():
l = rml.lattice.Lattice()
assert(len(l)) >= 0
def test_lattice_with_one_element(simple_element_and_lattice):
element, lattice = simple_element_and_lattice
# There is one element in the lattice.
assert(len(lattice) == 1)
# The total length of the lattice is the same as its one element.
assert lattice.length() == element.length
# Get all elements
assert lattice.get_elements() == [element]
def test_lattice_get_element_with_family(simple_element_and_lattice):
element, lattice = simple_element_and_lattice
element.add_to_family('fam')
assert lattice.get_elements('fam') == [element]
assert lattice.get_elements('nofam') == []
|
Test getting elements with different family names.
|
Test getting elements with different family names.
|
Python
|
apache-2.0
|
razvanvasile/RML,willrogers/pml,willrogers/pml
|
import rml.lattice
import rml.element
DUMMY_NAME = 'dummy'
def test_create_lattice():
l = rml.lattice.Lattice(DUMMY_NAME)
assert(len(l)) == 0
assert l.name == DUMMY_NAME
def test_non_negative_lattice():
l = rml.lattice.Lattice()
assert(len(l)) >= 0
def test_lattice_with_one_element():
l = rml.lattice.Lattice(DUMMY_NAME)
element_length = 1.5
e = rml.element.Element('dummy_element', element_length)
l.append_element(e)
# There is one element in the lattice.
assert(len(l) == 1)
# The total length of the lattice is the same as its one element.
assert l.length() == element_length
# Get all elements
assert l.get_elements() == [e]
Test getting elements with different family names.
|
import pytest
import rml.lattice
import rml.element
DUMMY_NAME = 'dummy'
@pytest.fixture
def simple_element():
element_length = 1.5
e = rml.element.Element('dummy_element', element_length)
return e
@pytest.fixture
def simple_element_and_lattice(simple_element):
l = rml.lattice.Lattice(DUMMY_NAME)
l.append_element(simple_element)
return simple_element, l
def test_create_lattice():
l = rml.lattice.Lattice(DUMMY_NAME)
assert(len(l)) == 0
assert l.name == DUMMY_NAME
def test_non_negative_lattice():
l = rml.lattice.Lattice()
assert(len(l)) >= 0
def test_lattice_with_one_element(simple_element_and_lattice):
element, lattice = simple_element_and_lattice
# There is one element in the lattice.
assert(len(lattice) == 1)
# The total length of the lattice is the same as its one element.
assert lattice.length() == element.length
# Get all elements
assert lattice.get_elements() == [element]
def test_lattice_get_element_with_family(simple_element_and_lattice):
element, lattice = simple_element_and_lattice
element.add_to_family('fam')
assert lattice.get_elements('fam') == [element]
assert lattice.get_elements('nofam') == []
|
<commit_before>import rml.lattice
import rml.element
DUMMY_NAME = 'dummy'
def test_create_lattice():
l = rml.lattice.Lattice(DUMMY_NAME)
assert(len(l)) == 0
assert l.name == DUMMY_NAME
def test_non_negative_lattice():
l = rml.lattice.Lattice()
assert(len(l)) >= 0
def test_lattice_with_one_element():
l = rml.lattice.Lattice(DUMMY_NAME)
element_length = 1.5
e = rml.element.Element('dummy_element', element_length)
l.append_element(e)
# There is one element in the lattice.
assert(len(l) == 1)
# The total length of the lattice is the same as its one element.
assert l.length() == element_length
# Get all elements
assert l.get_elements() == [e]
<commit_msg>Test getting elements with different family names.<commit_after>
|
import pytest
import rml.lattice
import rml.element
DUMMY_NAME = 'dummy'
@pytest.fixture
def simple_element():
element_length = 1.5
e = rml.element.Element('dummy_element', element_length)
return e
@pytest.fixture
def simple_element_and_lattice(simple_element):
l = rml.lattice.Lattice(DUMMY_NAME)
l.append_element(simple_element)
return simple_element, l
def test_create_lattice():
l = rml.lattice.Lattice(DUMMY_NAME)
assert(len(l)) == 0
assert l.name == DUMMY_NAME
def test_non_negative_lattice():
l = rml.lattice.Lattice()
assert(len(l)) >= 0
def test_lattice_with_one_element(simple_element_and_lattice):
element, lattice = simple_element_and_lattice
# There is one element in the lattice.
assert(len(lattice) == 1)
# The total length of the lattice is the same as its one element.
assert lattice.length() == element.length
# Get all elements
assert lattice.get_elements() == [element]
def test_lattice_get_element_with_family(simple_element_and_lattice):
element, lattice = simple_element_and_lattice
element.add_to_family('fam')
assert lattice.get_elements('fam') == [element]
assert lattice.get_elements('nofam') == []
|
import rml.lattice
import rml.element
DUMMY_NAME = 'dummy'
def test_create_lattice():
l = rml.lattice.Lattice(DUMMY_NAME)
assert(len(l)) == 0
assert l.name == DUMMY_NAME
def test_non_negative_lattice():
l = rml.lattice.Lattice()
assert(len(l)) >= 0
def test_lattice_with_one_element():
l = rml.lattice.Lattice(DUMMY_NAME)
element_length = 1.5
e = rml.element.Element('dummy_element', element_length)
l.append_element(e)
# There is one element in the lattice.
assert(len(l) == 1)
# The total length of the lattice is the same as its one element.
assert l.length() == element_length
# Get all elements
assert l.get_elements() == [e]
Test getting elements with different family names.import pytest
import rml.lattice
import rml.element
DUMMY_NAME = 'dummy'
@pytest.fixture
def simple_element():
element_length = 1.5
e = rml.element.Element('dummy_element', element_length)
return e
@pytest.fixture
def simple_element_and_lattice(simple_element):
l = rml.lattice.Lattice(DUMMY_NAME)
l.append_element(simple_element)
return simple_element, l
def test_create_lattice():
l = rml.lattice.Lattice(DUMMY_NAME)
assert(len(l)) == 0
assert l.name == DUMMY_NAME
def test_non_negative_lattice():
l = rml.lattice.Lattice()
assert(len(l)) >= 0
def test_lattice_with_one_element(simple_element_and_lattice):
element, lattice = simple_element_and_lattice
# There is one element in the lattice.
assert(len(lattice) == 1)
# The total length of the lattice is the same as its one element.
assert lattice.length() == element.length
# Get all elements
assert lattice.get_elements() == [element]
def test_lattice_get_element_with_family(simple_element_and_lattice):
element, lattice = simple_element_and_lattice
element.add_to_family('fam')
assert lattice.get_elements('fam') == [element]
assert lattice.get_elements('nofam') == []
|
<commit_before>import rml.lattice
import rml.element
DUMMY_NAME = 'dummy'
def test_create_lattice():
l = rml.lattice.Lattice(DUMMY_NAME)
assert(len(l)) == 0
assert l.name == DUMMY_NAME
def test_non_negative_lattice():
l = rml.lattice.Lattice()
assert(len(l)) >= 0
def test_lattice_with_one_element():
l = rml.lattice.Lattice(DUMMY_NAME)
element_length = 1.5
e = rml.element.Element('dummy_element', element_length)
l.append_element(e)
# There is one element in the lattice.
assert(len(l) == 1)
# The total length of the lattice is the same as its one element.
assert l.length() == element_length
# Get all elements
assert l.get_elements() == [e]
<commit_msg>Test getting elements with different family names.<commit_after>import pytest
import rml.lattice
import rml.element
DUMMY_NAME = 'dummy'
@pytest.fixture
def simple_element():
element_length = 1.5
e = rml.element.Element('dummy_element', element_length)
return e
@pytest.fixture
def simple_element_and_lattice(simple_element):
l = rml.lattice.Lattice(DUMMY_NAME)
l.append_element(simple_element)
return simple_element, l
def test_create_lattice():
l = rml.lattice.Lattice(DUMMY_NAME)
assert(len(l)) == 0
assert l.name == DUMMY_NAME
def test_non_negative_lattice():
l = rml.lattice.Lattice()
assert(len(l)) >= 0
def test_lattice_with_one_element(simple_element_and_lattice):
element, lattice = simple_element_and_lattice
# There is one element in the lattice.
assert(len(lattice) == 1)
# The total length of the lattice is the same as its one element.
assert lattice.length() == element.length
# Get all elements
assert lattice.get_elements() == [element]
def test_lattice_get_element_with_family(simple_element_and_lattice):
element, lattice = simple_element_and_lattice
element.add_to_family('fam')
assert lattice.get_elements('fam') == [element]
assert lattice.get_elements('nofam') == []
|
cde822bc87efa47cc3fae6fbb9462ae6a362afbc
|
fedmsg.d/endpoints.py
|
fedmsg.d/endpoints.py
|
# This file is part of fedmsg.
# Copyright (C) 2012 Red Hat, Inc.
#
# fedmsg is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# fedmsg is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with fedmsg; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Authors: Ralph Bean <rbean@redhat.com>
#
config = dict(
# This is a dict of possible addresses from which fedmsg can send
# messages. fedmsg.init(...) requires that a 'name' argument be passed
# to it which corresponds with one of the keys in this dict.
endpoints={
# These are here so your local box can listen to the upstream
# infrastructure's bus. Cool, right? :)
"fedora-infrastructure": [
"tcp://hub.fedoraproject.org:9940",
#"tcp://stg.fedoraproject.org:9940",
],
},
)
|
# This file is part of fedmsg.
# Copyright (C) 2012 Red Hat, Inc.
#
# fedmsg is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# fedmsg is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with fedmsg; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Authors: Ralph Bean <rbean@redhat.com>
#
config = dict(
# This is a dict of possible addresses from which fedmsg can send
# messages. fedmsg.init(...) requires that a 'name' argument be passed
# to it which corresponds with one of the keys in this dict.
endpoints={
# These are here so your local box can listen to the upstream
# infrastructure's bus. Cool, right? :)
"fedora-infrastructure": [
"tcp://hub.fedoraproject.org:9940",
#"tcp://stg.fedoraproject.org:9940",
],
#"debian-infrastructure": [
# "tcp://fedmsg.olasd.eu:9940",
#],
},
)
|
Add debian endpoint as comment to file.
|
Add debian endpoint as comment to file.
|
Python
|
lgpl-2.1
|
fedora-infra/fedmsg,vivekanand1101/fedmsg,cicku/fedmsg,cicku/fedmsg,pombredanne/fedmsg,chaiku/fedmsg,vivekanand1101/fedmsg,cicku/fedmsg,mathstuf/fedmsg,vivekanand1101/fedmsg,chaiku/fedmsg,fedora-infra/fedmsg,pombredanne/fedmsg,mathstuf/fedmsg,maxamillion/fedmsg,maxamillion/fedmsg,mathstuf/fedmsg,chaiku/fedmsg,pombredanne/fedmsg,fedora-infra/fedmsg,maxamillion/fedmsg
|
# This file is part of fedmsg.
# Copyright (C) 2012 Red Hat, Inc.
#
# fedmsg is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# fedmsg is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with fedmsg; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Authors: Ralph Bean <rbean@redhat.com>
#
config = dict(
# This is a dict of possible addresses from which fedmsg can send
# messages. fedmsg.init(...) requires that a 'name' argument be passed
# to it which corresponds with one of the keys in this dict.
endpoints={
# These are here so your local box can listen to the upstream
# infrastructure's bus. Cool, right? :)
"fedora-infrastructure": [
"tcp://hub.fedoraproject.org:9940",
#"tcp://stg.fedoraproject.org:9940",
],
},
)
Add debian endpoint as comment to file.
|
# This file is part of fedmsg.
# Copyright (C) 2012 Red Hat, Inc.
#
# fedmsg is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# fedmsg is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with fedmsg; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Authors: Ralph Bean <rbean@redhat.com>
#
config = dict(
# This is a dict of possible addresses from which fedmsg can send
# messages. fedmsg.init(...) requires that a 'name' argument be passed
# to it which corresponds with one of the keys in this dict.
endpoints={
# These are here so your local box can listen to the upstream
# infrastructure's bus. Cool, right? :)
"fedora-infrastructure": [
"tcp://hub.fedoraproject.org:9940",
#"tcp://stg.fedoraproject.org:9940",
],
#"debian-infrastructure": [
# "tcp://fedmsg.olasd.eu:9940",
#],
},
)
|
<commit_before># This file is part of fedmsg.
# Copyright (C) 2012 Red Hat, Inc.
#
# fedmsg is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# fedmsg is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with fedmsg; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Authors: Ralph Bean <rbean@redhat.com>
#
config = dict(
# This is a dict of possible addresses from which fedmsg can send
# messages. fedmsg.init(...) requires that a 'name' argument be passed
# to it which corresponds with one of the keys in this dict.
endpoints={
# These are here so your local box can listen to the upstream
# infrastructure's bus. Cool, right? :)
"fedora-infrastructure": [
"tcp://hub.fedoraproject.org:9940",
#"tcp://stg.fedoraproject.org:9940",
],
},
)
<commit_msg>Add debian endpoint as comment to file.<commit_after>
|
# This file is part of fedmsg.
# Copyright (C) 2012 Red Hat, Inc.
#
# fedmsg is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# fedmsg is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with fedmsg; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Authors: Ralph Bean <rbean@redhat.com>
#
config = dict(
# This is a dict of possible addresses from which fedmsg can send
# messages. fedmsg.init(...) requires that a 'name' argument be passed
# to it which corresponds with one of the keys in this dict.
endpoints={
# These are here so your local box can listen to the upstream
# infrastructure's bus. Cool, right? :)
"fedora-infrastructure": [
"tcp://hub.fedoraproject.org:9940",
#"tcp://stg.fedoraproject.org:9940",
],
#"debian-infrastructure": [
# "tcp://fedmsg.olasd.eu:9940",
#],
},
)
|
# This file is part of fedmsg.
# Copyright (C) 2012 Red Hat, Inc.
#
# fedmsg is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# fedmsg is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with fedmsg; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Authors: Ralph Bean <rbean@redhat.com>
#
config = dict(
# This is a dict of possible addresses from which fedmsg can send
# messages. fedmsg.init(...) requires that a 'name' argument be passed
# to it which corresponds with one of the keys in this dict.
endpoints={
# These are here so your local box can listen to the upstream
# infrastructure's bus. Cool, right? :)
"fedora-infrastructure": [
"tcp://hub.fedoraproject.org:9940",
#"tcp://stg.fedoraproject.org:9940",
],
},
)
Add debian endpoint as comment to file.# This file is part of fedmsg.
# Copyright (C) 2012 Red Hat, Inc.
#
# fedmsg is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# fedmsg is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with fedmsg; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Authors: Ralph Bean <rbean@redhat.com>
#
config = dict(
# This is a dict of possible addresses from which fedmsg can send
# messages. fedmsg.init(...) requires that a 'name' argument be passed
# to it which corresponds with one of the keys in this dict.
endpoints={
# These are here so your local box can listen to the upstream
# infrastructure's bus. Cool, right? :)
"fedora-infrastructure": [
"tcp://hub.fedoraproject.org:9940",
#"tcp://stg.fedoraproject.org:9940",
],
#"debian-infrastructure": [
# "tcp://fedmsg.olasd.eu:9940",
#],
},
)
|
<commit_before># This file is part of fedmsg.
# Copyright (C) 2012 Red Hat, Inc.
#
# fedmsg is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# fedmsg is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with fedmsg; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Authors: Ralph Bean <rbean@redhat.com>
#
config = dict(
# This is a dict of possible addresses from which fedmsg can send
# messages. fedmsg.init(...) requires that a 'name' argument be passed
# to it which corresponds with one of the keys in this dict.
endpoints={
# These are here so your local box can listen to the upstream
# infrastructure's bus. Cool, right? :)
"fedora-infrastructure": [
"tcp://hub.fedoraproject.org:9940",
#"tcp://stg.fedoraproject.org:9940",
],
},
)
<commit_msg>Add debian endpoint as comment to file.<commit_after># This file is part of fedmsg.
# Copyright (C) 2012 Red Hat, Inc.
#
# fedmsg is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# fedmsg is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with fedmsg; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Authors: Ralph Bean <rbean@redhat.com>
#
config = dict(
# This is a dict of possible addresses from which fedmsg can send
# messages. fedmsg.init(...) requires that a 'name' argument be passed
# to it which corresponds with one of the keys in this dict.
endpoints={
# These are here so your local box can listen to the upstream
# infrastructure's bus. Cool, right? :)
"fedora-infrastructure": [
"tcp://hub.fedoraproject.org:9940",
#"tcp://stg.fedoraproject.org:9940",
],
#"debian-infrastructure": [
# "tcp://fedmsg.olasd.eu:9940",
#],
},
)
|
45896c766f0bd34d00fa0c3d99b94f650b9f8cd7
|
ddsc_api/views.py
|
ddsc_api/views.py
|
# (c) Nelen & Schuurmans. MIT licensed, see LICENSE.rst.
from __future__ import print_function, unicode_literals
from __future__ import absolute_import, division
from rest_framework.reverse import reverse
from rest_framework.views import APIView
from rest_framework.response import Response
class Root(APIView):
def get(self, request, format=None):
response = {
'datasets': reverse('dataset-list', request=request),
'locations': reverse('location-list', request=request),
'timeseries': reverse('timeseries-list', request=request),
'parameters': reverse('parameter-list', request=request),
'layers': reverse('layers-list', request=request),
}
user = getattr(request, 'user', None)
if user is not None and user.is_superuser:
response.update({
'users': reverse('user-list', request=request),
'groups': reverse('usergroup-list', request=request),
'roles': reverse('role-list', request=request),
})
return Response(response)
|
# (c) Nelen & Schuurmans. MIT licensed, see LICENSE.rst.
from __future__ import print_function, unicode_literals
from __future__ import absolute_import, division
from rest_framework.reverse import reverse
from rest_framework.views import APIView
from rest_framework.response import Response
class Root(APIView):
def get(self, request, format=None):
response = {
'datasets': reverse('dataset-list', request=request),
'locations': reverse('location-list', request=request),
'timeseries': reverse('timeseries-list', request=request),
'parameters': reverse('parameter-list', request=request),
'layers': reverse('layer-list', request=request),
'collages': reverse('collage-list', request=request),
}
user = getattr(request, 'user', None)
if user is not None and user.is_superuser:
response.update({
'users': reverse('user-list', request=request),
'groups': reverse('usergroup-list', request=request),
'roles': reverse('role-list', request=request),
})
return Response(response)
|
Fix and add ddsc-site urls.
|
Fix and add ddsc-site urls.
|
Python
|
mit
|
ddsc/ddsc-api,ddsc/ddsc-api
|
# (c) Nelen & Schuurmans. MIT licensed, see LICENSE.rst.
from __future__ import print_function, unicode_literals
from __future__ import absolute_import, division
from rest_framework.reverse import reverse
from rest_framework.views import APIView
from rest_framework.response import Response
class Root(APIView):
def get(self, request, format=None):
response = {
'datasets': reverse('dataset-list', request=request),
'locations': reverse('location-list', request=request),
'timeseries': reverse('timeseries-list', request=request),
'parameters': reverse('parameter-list', request=request),
'layers': reverse('layers-list', request=request),
}
user = getattr(request, 'user', None)
if user is not None and user.is_superuser:
response.update({
'users': reverse('user-list', request=request),
'groups': reverse('usergroup-list', request=request),
'roles': reverse('role-list', request=request),
})
return Response(response)
Fix and add ddsc-site urls.
|
# (c) Nelen & Schuurmans. MIT licensed, see LICENSE.rst.
from __future__ import print_function, unicode_literals
from __future__ import absolute_import, division
from rest_framework.reverse import reverse
from rest_framework.views import APIView
from rest_framework.response import Response
class Root(APIView):
def get(self, request, format=None):
response = {
'datasets': reverse('dataset-list', request=request),
'locations': reverse('location-list', request=request),
'timeseries': reverse('timeseries-list', request=request),
'parameters': reverse('parameter-list', request=request),
'layers': reverse('layer-list', request=request),
'collages': reverse('collage-list', request=request),
}
user = getattr(request, 'user', None)
if user is not None and user.is_superuser:
response.update({
'users': reverse('user-list', request=request),
'groups': reverse('usergroup-list', request=request),
'roles': reverse('role-list', request=request),
})
return Response(response)
|
<commit_before># (c) Nelen & Schuurmans. MIT licensed, see LICENSE.rst.
from __future__ import print_function, unicode_literals
from __future__ import absolute_import, division
from rest_framework.reverse import reverse
from rest_framework.views import APIView
from rest_framework.response import Response
class Root(APIView):
def get(self, request, format=None):
response = {
'datasets': reverse('dataset-list', request=request),
'locations': reverse('location-list', request=request),
'timeseries': reverse('timeseries-list', request=request),
'parameters': reverse('parameter-list', request=request),
'layers': reverse('layers-list', request=request),
}
user = getattr(request, 'user', None)
if user is not None and user.is_superuser:
response.update({
'users': reverse('user-list', request=request),
'groups': reverse('usergroup-list', request=request),
'roles': reverse('role-list', request=request),
})
return Response(response)
<commit_msg>Fix and add ddsc-site urls.<commit_after>
|
# (c) Nelen & Schuurmans. MIT licensed, see LICENSE.rst.
from __future__ import print_function, unicode_literals
from __future__ import absolute_import, division
from rest_framework.reverse import reverse
from rest_framework.views import APIView
from rest_framework.response import Response
class Root(APIView):
def get(self, request, format=None):
response = {
'datasets': reverse('dataset-list', request=request),
'locations': reverse('location-list', request=request),
'timeseries': reverse('timeseries-list', request=request),
'parameters': reverse('parameter-list', request=request),
'layers': reverse('layer-list', request=request),
'collages': reverse('collage-list', request=request),
}
user = getattr(request, 'user', None)
if user is not None and user.is_superuser:
response.update({
'users': reverse('user-list', request=request),
'groups': reverse('usergroup-list', request=request),
'roles': reverse('role-list', request=request),
})
return Response(response)
|
# (c) Nelen & Schuurmans. MIT licensed, see LICENSE.rst.
from __future__ import print_function, unicode_literals
from __future__ import absolute_import, division
from rest_framework.reverse import reverse
from rest_framework.views import APIView
from rest_framework.response import Response
class Root(APIView):
def get(self, request, format=None):
response = {
'datasets': reverse('dataset-list', request=request),
'locations': reverse('location-list', request=request),
'timeseries': reverse('timeseries-list', request=request),
'parameters': reverse('parameter-list', request=request),
'layers': reverse('layers-list', request=request),
}
user = getattr(request, 'user', None)
if user is not None and user.is_superuser:
response.update({
'users': reverse('user-list', request=request),
'groups': reverse('usergroup-list', request=request),
'roles': reverse('role-list', request=request),
})
return Response(response)
Fix and add ddsc-site urls.# (c) Nelen & Schuurmans. MIT licensed, see LICENSE.rst.
from __future__ import print_function, unicode_literals
from __future__ import absolute_import, division
from rest_framework.reverse import reverse
from rest_framework.views import APIView
from rest_framework.response import Response
class Root(APIView):
def get(self, request, format=None):
response = {
'datasets': reverse('dataset-list', request=request),
'locations': reverse('location-list', request=request),
'timeseries': reverse('timeseries-list', request=request),
'parameters': reverse('parameter-list', request=request),
'layers': reverse('layer-list', request=request),
'collages': reverse('collage-list', request=request),
}
user = getattr(request, 'user', None)
if user is not None and user.is_superuser:
response.update({
'users': reverse('user-list', request=request),
'groups': reverse('usergroup-list', request=request),
'roles': reverse('role-list', request=request),
})
return Response(response)
|
<commit_before># (c) Nelen & Schuurmans. MIT licensed, see LICENSE.rst.
from __future__ import print_function, unicode_literals
from __future__ import absolute_import, division
from rest_framework.reverse import reverse
from rest_framework.views import APIView
from rest_framework.response import Response
class Root(APIView):
def get(self, request, format=None):
response = {
'datasets': reverse('dataset-list', request=request),
'locations': reverse('location-list', request=request),
'timeseries': reverse('timeseries-list', request=request),
'parameters': reverse('parameter-list', request=request),
'layers': reverse('layers-list', request=request),
}
user = getattr(request, 'user', None)
if user is not None and user.is_superuser:
response.update({
'users': reverse('user-list', request=request),
'groups': reverse('usergroup-list', request=request),
'roles': reverse('role-list', request=request),
})
return Response(response)
<commit_msg>Fix and add ddsc-site urls.<commit_after># (c) Nelen & Schuurmans. MIT licensed, see LICENSE.rst.
from __future__ import print_function, unicode_literals
from __future__ import absolute_import, division
from rest_framework.reverse import reverse
from rest_framework.views import APIView
from rest_framework.response import Response
class Root(APIView):
def get(self, request, format=None):
response = {
'datasets': reverse('dataset-list', request=request),
'locations': reverse('location-list', request=request),
'timeseries': reverse('timeseries-list', request=request),
'parameters': reverse('parameter-list', request=request),
'layers': reverse('layer-list', request=request),
'collages': reverse('collage-list', request=request),
}
user = getattr(request, 'user', None)
if user is not None and user.is_superuser:
response.update({
'users': reverse('user-list', request=request),
'groups': reverse('usergroup-list', request=request),
'roles': reverse('role-list', request=request),
})
return Response(response)
|
759e6b66ebd601fb1902f6bee2cbc980d61baab8
|
unitTestUtils/parseXML.py
|
unitTestUtils/parseXML.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import print_function
from xml.etree.ElementTree import ParseError
import xml.etree.ElementTree as ET
import glob
import sys
def eprint(*args, **kwargs):
print(*args, file=sys.stderr, **kwargs)
def parse():
for infile in glob.glob('*.xml'):
try:
tree = ET.parse(infile)
root = tree.getroot()
if root.findall('.//FatalError'):
eprint("Error detected")
sys.exit(1)
except ParseError:
eprint("The file xml isn't correct. There were some mistakes in the tests ")
sys.exit(1)
def main():
parse()
if __name__ == '__main__':
main()
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import print_function
from xml.etree.ElementTree import ParseError
import xml.etree.ElementTree as ET
import glob
import sys
def eprint(*args, **kwargs):
print(*args, file=sys.stderr, **kwargs)
def parse():
for infile in glob.glob('*.xml'):
try:
tree = ET.parse(infile)
root = tree.getroot()
if root.findall('.//FatalError'):
eprint("Error detected")
print(infile)
sys.exit(1)
except ParseError:
eprint("The file xml isn't correct. There were some mistakes in the tests ")
sys.exit(1)
def main():
parse()
if __name__ == '__main__':
main()
|
Add a print with file where mistake is
|
Add a print with file where mistake is
|
Python
|
apache-2.0
|
alexkernphysiker/j-pet-framework,JPETTomography/j-pet-framework,JPETTomography/j-pet-framework,alexkernphysiker/j-pet-framework,alexkernphysiker/j-pet-framework,alexkernphysiker/j-pet-framework,alexkernphysiker/j-pet-framework,JPETTomography/j-pet-framework
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import print_function
from xml.etree.ElementTree import ParseError
import xml.etree.ElementTree as ET
import glob
import sys
def eprint(*args, **kwargs):
print(*args, file=sys.stderr, **kwargs)
def parse():
for infile in glob.glob('*.xml'):
try:
tree = ET.parse(infile)
root = tree.getroot()
if root.findall('.//FatalError'):
eprint("Error detected")
sys.exit(1)
except ParseError:
eprint("The file xml isn't correct. There were some mistakes in the tests ")
sys.exit(1)
def main():
parse()
if __name__ == '__main__':
main()
Add a print with file where mistake is
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import print_function
from xml.etree.ElementTree import ParseError
import xml.etree.ElementTree as ET
import glob
import sys
def eprint(*args, **kwargs):
print(*args, file=sys.stderr, **kwargs)
def parse():
for infile in glob.glob('*.xml'):
try:
tree = ET.parse(infile)
root = tree.getroot()
if root.findall('.//FatalError'):
eprint("Error detected")
print(infile)
sys.exit(1)
except ParseError:
eprint("The file xml isn't correct. There were some mistakes in the tests ")
sys.exit(1)
def main():
parse()
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import print_function
from xml.etree.ElementTree import ParseError
import xml.etree.ElementTree as ET
import glob
import sys
def eprint(*args, **kwargs):
print(*args, file=sys.stderr, **kwargs)
def parse():
for infile in glob.glob('*.xml'):
try:
tree = ET.parse(infile)
root = tree.getroot()
if root.findall('.//FatalError'):
eprint("Error detected")
sys.exit(1)
except ParseError:
eprint("The file xml isn't correct. There were some mistakes in the tests ")
sys.exit(1)
def main():
parse()
if __name__ == '__main__':
main()
<commit_msg>Add a print with file where mistake is<commit_after>
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import print_function
from xml.etree.ElementTree import ParseError
import xml.etree.ElementTree as ET
import glob
import sys
def eprint(*args, **kwargs):
print(*args, file=sys.stderr, **kwargs)
def parse():
for infile in glob.glob('*.xml'):
try:
tree = ET.parse(infile)
root = tree.getroot()
if root.findall('.//FatalError'):
eprint("Error detected")
print(infile)
sys.exit(1)
except ParseError:
eprint("The file xml isn't correct. There were some mistakes in the tests ")
sys.exit(1)
def main():
parse()
if __name__ == '__main__':
main()
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import print_function
from xml.etree.ElementTree import ParseError
import xml.etree.ElementTree as ET
import glob
import sys
def eprint(*args, **kwargs):
print(*args, file=sys.stderr, **kwargs)
def parse():
for infile in glob.glob('*.xml'):
try:
tree = ET.parse(infile)
root = tree.getroot()
if root.findall('.//FatalError'):
eprint("Error detected")
sys.exit(1)
except ParseError:
eprint("The file xml isn't correct. There were some mistakes in the tests ")
sys.exit(1)
def main():
parse()
if __name__ == '__main__':
main()
Add a print with file where mistake is#!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import print_function
from xml.etree.ElementTree import ParseError
import xml.etree.ElementTree as ET
import glob
import sys
def eprint(*args, **kwargs):
print(*args, file=sys.stderr, **kwargs)
def parse():
for infile in glob.glob('*.xml'):
try:
tree = ET.parse(infile)
root = tree.getroot()
if root.findall('.//FatalError'):
eprint("Error detected")
print(infile)
sys.exit(1)
except ParseError:
eprint("The file xml isn't correct. There were some mistakes in the tests ")
sys.exit(1)
def main():
parse()
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import print_function
from xml.etree.ElementTree import ParseError
import xml.etree.ElementTree as ET
import glob
import sys
def eprint(*args, **kwargs):
print(*args, file=sys.stderr, **kwargs)
def parse():
for infile in glob.glob('*.xml'):
try:
tree = ET.parse(infile)
root = tree.getroot()
if root.findall('.//FatalError'):
eprint("Error detected")
sys.exit(1)
except ParseError:
eprint("The file xml isn't correct. There were some mistakes in the tests ")
sys.exit(1)
def main():
parse()
if __name__ == '__main__':
main()
<commit_msg>Add a print with file where mistake is<commit_after>#!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import print_function
from xml.etree.ElementTree import ParseError
import xml.etree.ElementTree as ET
import glob
import sys
def eprint(*args, **kwargs):
print(*args, file=sys.stderr, **kwargs)
def parse():
for infile in glob.glob('*.xml'):
try:
tree = ET.parse(infile)
root = tree.getroot()
if root.findall('.//FatalError'):
eprint("Error detected")
print(infile)
sys.exit(1)
except ParseError:
eprint("The file xml isn't correct. There were some mistakes in the tests ")
sys.exit(1)
def main():
parse()
if __name__ == '__main__':
main()
|
8d9b2bdbf47b51e3ada3b5e14fcc27bcaafce4fb
|
dbsync/logs.py
|
dbsync/logs.py
|
"""
.. module:: dbsync.logs
:synopsis: Logging facilities for the library.
"""
import logging
#: All the library loggers
loggers = set()
log_handler = None
def get_logger(name):
logger = logging.getLogger(name)
logger.setLevel(logging.WARNING)
loggers.add(logger)
if log_handler is not None:
logger.addHandler(log_handler)
return logger
def set_log_target(fo):
"Set a stream as target for dbsync's logging."
global log_handler
if log_handler is None:
log_handler = logging.StreamHandler(fo)
log_handler.setLevel(logging.WARNING)
log_handler.setFormatter(
logging.Formatter(
"%(asctime)s - %(name)s - %(levelname)s - %(message)s"))
for logger in loggers:
logger.addHandler(log_handler)
|
"""
.. module:: dbsync.logs
:synopsis: Logging facilities for the library.
"""
import logging
#: All the library loggers
loggers = set()
log_handler = None
def get_logger(name):
logger = logging.getLogger(name)
logger.setLevel(logging.WARNING)
loggers.add(logger)
if log_handler is not None:
logger.addHandler(log_handler)
return logger
def set_log_target(fo):
"""
Set a stream as target for dbsync's logging. If a string is given,
it will be considered to be a path to a file.
"""
global log_handler
if log_handler is None:
log_handler = logging.FileHandler(fo) if isinstance(fo, basestring) \
else logging.StreamHandler(fo)
log_handler.setLevel(logging.WARNING)
log_handler.setFormatter(
logging.Formatter(
"%(asctime)s - %(name)s - %(levelname)s - %(message)s"))
for logger in loggers:
logger.addHandler(log_handler)
|
Allow file paths to be given to set_log_target.
|
Allow file paths to be given to set_log_target.
|
Python
|
mit
|
bintlabs/python-sync-db
|
"""
.. module:: dbsync.logs
:synopsis: Logging facilities for the library.
"""
import logging
#: All the library loggers
loggers = set()
log_handler = None
def get_logger(name):
logger = logging.getLogger(name)
logger.setLevel(logging.WARNING)
loggers.add(logger)
if log_handler is not None:
logger.addHandler(log_handler)
return logger
def set_log_target(fo):
"Set a stream as target for dbsync's logging."
global log_handler
if log_handler is None:
log_handler = logging.StreamHandler(fo)
log_handler.setLevel(logging.WARNING)
log_handler.setFormatter(
logging.Formatter(
"%(asctime)s - %(name)s - %(levelname)s - %(message)s"))
for logger in loggers:
logger.addHandler(log_handler)
Allow file paths to be given to set_log_target.
|
"""
.. module:: dbsync.logs
:synopsis: Logging facilities for the library.
"""
import logging
#: All the library loggers
loggers = set()
log_handler = None
def get_logger(name):
logger = logging.getLogger(name)
logger.setLevel(logging.WARNING)
loggers.add(logger)
if log_handler is not None:
logger.addHandler(log_handler)
return logger
def set_log_target(fo):
"""
Set a stream as target for dbsync's logging. If a string is given,
it will be considered to be a path to a file.
"""
global log_handler
if log_handler is None:
log_handler = logging.FileHandler(fo) if isinstance(fo, basestring) \
else logging.StreamHandler(fo)
log_handler.setLevel(logging.WARNING)
log_handler.setFormatter(
logging.Formatter(
"%(asctime)s - %(name)s - %(levelname)s - %(message)s"))
for logger in loggers:
logger.addHandler(log_handler)
|
<commit_before>"""
.. module:: dbsync.logs
:synopsis: Logging facilities for the library.
"""
import logging
#: All the library loggers
loggers = set()
log_handler = None
def get_logger(name):
logger = logging.getLogger(name)
logger.setLevel(logging.WARNING)
loggers.add(logger)
if log_handler is not None:
logger.addHandler(log_handler)
return logger
def set_log_target(fo):
"Set a stream as target for dbsync's logging."
global log_handler
if log_handler is None:
log_handler = logging.StreamHandler(fo)
log_handler.setLevel(logging.WARNING)
log_handler.setFormatter(
logging.Formatter(
"%(asctime)s - %(name)s - %(levelname)s - %(message)s"))
for logger in loggers:
logger.addHandler(log_handler)
<commit_msg>Allow file paths to be given to set_log_target.<commit_after>
|
"""
.. module:: dbsync.logs
:synopsis: Logging facilities for the library.
"""
import logging
#: All the library loggers
loggers = set()
log_handler = None
def get_logger(name):
logger = logging.getLogger(name)
logger.setLevel(logging.WARNING)
loggers.add(logger)
if log_handler is not None:
logger.addHandler(log_handler)
return logger
def set_log_target(fo):
"""
Set a stream as target for dbsync's logging. If a string is given,
it will be considered to be a path to a file.
"""
global log_handler
if log_handler is None:
log_handler = logging.FileHandler(fo) if isinstance(fo, basestring) \
else logging.StreamHandler(fo)
log_handler.setLevel(logging.WARNING)
log_handler.setFormatter(
logging.Formatter(
"%(asctime)s - %(name)s - %(levelname)s - %(message)s"))
for logger in loggers:
logger.addHandler(log_handler)
|
"""
.. module:: dbsync.logs
:synopsis: Logging facilities for the library.
"""
import logging
#: All the library loggers
loggers = set()
log_handler = None
def get_logger(name):
logger = logging.getLogger(name)
logger.setLevel(logging.WARNING)
loggers.add(logger)
if log_handler is not None:
logger.addHandler(log_handler)
return logger
def set_log_target(fo):
"Set a stream as target for dbsync's logging."
global log_handler
if log_handler is None:
log_handler = logging.StreamHandler(fo)
log_handler.setLevel(logging.WARNING)
log_handler.setFormatter(
logging.Formatter(
"%(asctime)s - %(name)s - %(levelname)s - %(message)s"))
for logger in loggers:
logger.addHandler(log_handler)
Allow file paths to be given to set_log_target."""
.. module:: dbsync.logs
:synopsis: Logging facilities for the library.
"""
import logging
#: All the library loggers
loggers = set()
log_handler = None
def get_logger(name):
logger = logging.getLogger(name)
logger.setLevel(logging.WARNING)
loggers.add(logger)
if log_handler is not None:
logger.addHandler(log_handler)
return logger
def set_log_target(fo):
"""
Set a stream as target for dbsync's logging. If a string is given,
it will be considered to be a path to a file.
"""
global log_handler
if log_handler is None:
log_handler = logging.FileHandler(fo) if isinstance(fo, basestring) \
else logging.StreamHandler(fo)
log_handler.setLevel(logging.WARNING)
log_handler.setFormatter(
logging.Formatter(
"%(asctime)s - %(name)s - %(levelname)s - %(message)s"))
for logger in loggers:
logger.addHandler(log_handler)
|
<commit_before>"""
.. module:: dbsync.logs
:synopsis: Logging facilities for the library.
"""
import logging
#: All the library loggers
loggers = set()
log_handler = None
def get_logger(name):
logger = logging.getLogger(name)
logger.setLevel(logging.WARNING)
loggers.add(logger)
if log_handler is not None:
logger.addHandler(log_handler)
return logger
def set_log_target(fo):
"Set a stream as target for dbsync's logging."
global log_handler
if log_handler is None:
log_handler = logging.StreamHandler(fo)
log_handler.setLevel(logging.WARNING)
log_handler.setFormatter(
logging.Formatter(
"%(asctime)s - %(name)s - %(levelname)s - %(message)s"))
for logger in loggers:
logger.addHandler(log_handler)
<commit_msg>Allow file paths to be given to set_log_target.<commit_after>"""
.. module:: dbsync.logs
:synopsis: Logging facilities for the library.
"""
import logging
#: All the library loggers
loggers = set()
log_handler = None
def get_logger(name):
logger = logging.getLogger(name)
logger.setLevel(logging.WARNING)
loggers.add(logger)
if log_handler is not None:
logger.addHandler(log_handler)
return logger
def set_log_target(fo):
"""
Set a stream as target for dbsync's logging. If a string is given,
it will be considered to be a path to a file.
"""
global log_handler
if log_handler is None:
log_handler = logging.FileHandler(fo) if isinstance(fo, basestring) \
else logging.StreamHandler(fo)
log_handler.setLevel(logging.WARNING)
log_handler.setFormatter(
logging.Formatter(
"%(asctime)s - %(name)s - %(levelname)s - %(message)s"))
for logger in loggers:
logger.addHandler(log_handler)
|
55d10f77f963eb0cdbe29e04fe910f65c4edaec4
|
erpnext/buying/doctype/supplier/supplier_dashboard.py
|
erpnext/buying/doctype/supplier/supplier_dashboard.py
|
from __future__ import unicode_literals
from frappe import _
def get_data():
return {
'heatmap': True,
'heatmap_message': _('This is based on transactions against this Supplier. See timeline below for details'),
'fieldname': 'supplier',
'non_standard_fieldnames': {
'Payment Entry': 'party_name'
},
'transactions': [
{
'label': _('Procurement'),
'items': ['Request for Quotation', 'Supplier Quotation']
},
{
'label': _('Orders'),
'items': ['Purchase Order', 'Purchase Receipt', 'Purchase Invoice']
},
{
'label': _('Payments'),
'items': ['Payment Entry']
},
{
'label': _('Pricing'),
'items': ['Pricing Rule']
}
]
}
|
from __future__ import unicode_literals
from frappe import _
def get_data():
return {
'heatmap': True,
'heatmap_message': _('This is based on transactions against this Supplier. See timeline below for details'),
'fieldname': 'supplier',
'non_standard_fieldnames': {
'Payment Entry': 'party_name',
'Bank Account': 'party'
},
'transactions': [
{
'label': _('Procurement'),
'items': ['Request for Quotation', 'Supplier Quotation']
},
{
'label': _('Orders'),
'items': ['Purchase Order', 'Purchase Receipt', 'Purchase Invoice']
},
{
'label': _('Payments'),
'items': ['Payment Entry']
},
{
'label': _('Bank'),
'items': ['Bank Account']
},
{
'label': _('Pricing'),
'items': ['Pricing Rule']
}
]
}
|
Add linked bank accounts to supplier dashboard
|
fix: Add linked bank accounts to supplier dashboard
|
Python
|
agpl-3.0
|
gsnbng/erpnext,gsnbng/erpnext,gsnbng/erpnext,gsnbng/erpnext
|
from __future__ import unicode_literals
from frappe import _
def get_data():
return {
'heatmap': True,
'heatmap_message': _('This is based on transactions against this Supplier. See timeline below for details'),
'fieldname': 'supplier',
'non_standard_fieldnames': {
'Payment Entry': 'party_name'
},
'transactions': [
{
'label': _('Procurement'),
'items': ['Request for Quotation', 'Supplier Quotation']
},
{
'label': _('Orders'),
'items': ['Purchase Order', 'Purchase Receipt', 'Purchase Invoice']
},
{
'label': _('Payments'),
'items': ['Payment Entry']
},
{
'label': _('Pricing'),
'items': ['Pricing Rule']
}
]
}
fix: Add linked bank accounts to supplier dashboard
|
from __future__ import unicode_literals
from frappe import _
def get_data():
return {
'heatmap': True,
'heatmap_message': _('This is based on transactions against this Supplier. See timeline below for details'),
'fieldname': 'supplier',
'non_standard_fieldnames': {
'Payment Entry': 'party_name',
'Bank Account': 'party'
},
'transactions': [
{
'label': _('Procurement'),
'items': ['Request for Quotation', 'Supplier Quotation']
},
{
'label': _('Orders'),
'items': ['Purchase Order', 'Purchase Receipt', 'Purchase Invoice']
},
{
'label': _('Payments'),
'items': ['Payment Entry']
},
{
'label': _('Bank'),
'items': ['Bank Account']
},
{
'label': _('Pricing'),
'items': ['Pricing Rule']
}
]
}
|
<commit_before>from __future__ import unicode_literals
from frappe import _
def get_data():
return {
'heatmap': True,
'heatmap_message': _('This is based on transactions against this Supplier. See timeline below for details'),
'fieldname': 'supplier',
'non_standard_fieldnames': {
'Payment Entry': 'party_name'
},
'transactions': [
{
'label': _('Procurement'),
'items': ['Request for Quotation', 'Supplier Quotation']
},
{
'label': _('Orders'),
'items': ['Purchase Order', 'Purchase Receipt', 'Purchase Invoice']
},
{
'label': _('Payments'),
'items': ['Payment Entry']
},
{
'label': _('Pricing'),
'items': ['Pricing Rule']
}
]
}
<commit_msg>fix: Add linked bank accounts to supplier dashboard<commit_after>
|
from __future__ import unicode_literals
from frappe import _
def get_data():
return {
'heatmap': True,
'heatmap_message': _('This is based on transactions against this Supplier. See timeline below for details'),
'fieldname': 'supplier',
'non_standard_fieldnames': {
'Payment Entry': 'party_name',
'Bank Account': 'party'
},
'transactions': [
{
'label': _('Procurement'),
'items': ['Request for Quotation', 'Supplier Quotation']
},
{
'label': _('Orders'),
'items': ['Purchase Order', 'Purchase Receipt', 'Purchase Invoice']
},
{
'label': _('Payments'),
'items': ['Payment Entry']
},
{
'label': _('Bank'),
'items': ['Bank Account']
},
{
'label': _('Pricing'),
'items': ['Pricing Rule']
}
]
}
|
from __future__ import unicode_literals
from frappe import _
def get_data():
return {
'heatmap': True,
'heatmap_message': _('This is based on transactions against this Supplier. See timeline below for details'),
'fieldname': 'supplier',
'non_standard_fieldnames': {
'Payment Entry': 'party_name'
},
'transactions': [
{
'label': _('Procurement'),
'items': ['Request for Quotation', 'Supplier Quotation']
},
{
'label': _('Orders'),
'items': ['Purchase Order', 'Purchase Receipt', 'Purchase Invoice']
},
{
'label': _('Payments'),
'items': ['Payment Entry']
},
{
'label': _('Pricing'),
'items': ['Pricing Rule']
}
]
}
fix: Add linked bank accounts to supplier dashboardfrom __future__ import unicode_literals
from frappe import _
def get_data():
return {
'heatmap': True,
'heatmap_message': _('This is based on transactions against this Supplier. See timeline below for details'),
'fieldname': 'supplier',
'non_standard_fieldnames': {
'Payment Entry': 'party_name',
'Bank Account': 'party'
},
'transactions': [
{
'label': _('Procurement'),
'items': ['Request for Quotation', 'Supplier Quotation']
},
{
'label': _('Orders'),
'items': ['Purchase Order', 'Purchase Receipt', 'Purchase Invoice']
},
{
'label': _('Payments'),
'items': ['Payment Entry']
},
{
'label': _('Bank'),
'items': ['Bank Account']
},
{
'label': _('Pricing'),
'items': ['Pricing Rule']
}
]
}
|
<commit_before>from __future__ import unicode_literals
from frappe import _
def get_data():
return {
'heatmap': True,
'heatmap_message': _('This is based on transactions against this Supplier. See timeline below for details'),
'fieldname': 'supplier',
'non_standard_fieldnames': {
'Payment Entry': 'party_name'
},
'transactions': [
{
'label': _('Procurement'),
'items': ['Request for Quotation', 'Supplier Quotation']
},
{
'label': _('Orders'),
'items': ['Purchase Order', 'Purchase Receipt', 'Purchase Invoice']
},
{
'label': _('Payments'),
'items': ['Payment Entry']
},
{
'label': _('Pricing'),
'items': ['Pricing Rule']
}
]
}
<commit_msg>fix: Add linked bank accounts to supplier dashboard<commit_after>from __future__ import unicode_literals
from frappe import _
def get_data():
return {
'heatmap': True,
'heatmap_message': _('This is based on transactions against this Supplier. See timeline below for details'),
'fieldname': 'supplier',
'non_standard_fieldnames': {
'Payment Entry': 'party_name',
'Bank Account': 'party'
},
'transactions': [
{
'label': _('Procurement'),
'items': ['Request for Quotation', 'Supplier Quotation']
},
{
'label': _('Orders'),
'items': ['Purchase Order', 'Purchase Receipt', 'Purchase Invoice']
},
{
'label': _('Payments'),
'items': ['Payment Entry']
},
{
'label': _('Bank'),
'items': ['Bank Account']
},
{
'label': _('Pricing'),
'items': ['Pricing Rule']
}
]
}
|
2f2eff3374372cabc6962cd7332aefbaa67bd7ec
|
examples/facebook-cli.py
|
examples/facebook-cli.py
|
from rauth.service import OAuth2Service
import re
import webbrowser
# Get a real consumer key & secret from:
# https://developers.facebook.com/apps
facebook = OAuth2Service(
name='facebook',
authorize_url='https:/graph.facebook.com/oauth/authorize',
access_token_url='https:/graph.facebook.com/oauth/access_token',
consumer_key='YOUR CONSUMER KEY',
consumer_secret='YOUR CONSUMER SECRET')
redirect_uri = 'https://www.facebook.com/connect/login_success.html'
authorize_url = facebook.get_authorize_url(redirect_uri=redirect_uri,
scope='read_stream',
response_type='token')
print 'Visit this URL in your browser: ' + authorize_url
webbrowser.open(authorize_url);
url_with_code = raw_input("Copy URL from your browser's address bar: ")
access_token = re.search('#access_token=([^&]*)', url_with_code).group(1)
user = facebook.get('https://graph.facebook.com/me',
params=dict(access_token=access_token)).content
print 'currently logged in as: ' + user['link']
|
from rauth.service import OAuth2Service
import re
import webbrowser
# Get a real consumer key & secret from:
# https://developers.facebook.com/apps
facebook = OAuth2Service(
name='facebook',
authorize_url='https:/graph.facebook.com/oauth/authorize',
access_token_url='https:/graph.facebook.com/oauth/access_token',
base_url='https://graph.facebook.com/',
consumer_key='YOUR CONSUMER KEY',
consumer_secret='YOUR CONSUMER SECRET')
redirect_uri = 'https://www.facebook.com/connect/login_success.html'
authorize_url = facebook.get_authorize_url(redirect_uri=redirect_uri,
scope='read_stream',
response_type='token')
print 'Visit this URL in your browser: ' + authorize_url
webbrowser.open(authorize_url);
url_with_code = raw_input("Copy URL from your browser's address bar: ")
facebook.access_token = re.search('#access_token=([^&]*)', url_with_code).group(1)
user = facebook.get('me').content
print 'currently logged in as: ' + user['link']
|
Update facebook.cli example to use short syntax
|
Update facebook.cli example to use short syntax
|
Python
|
mit
|
maxcountryman/rauth,arifgursel/rauth,isouzasoares/rauth,litl/rauth,arifgursel/rauth,isouzasoares/rauth,litl/rauth,arifgursel/rauth,maxcountryman/rauth
|
from rauth.service import OAuth2Service
import re
import webbrowser
# Get a real consumer key & secret from:
# https://developers.facebook.com/apps
facebook = OAuth2Service(
name='facebook',
authorize_url='https:/graph.facebook.com/oauth/authorize',
access_token_url='https:/graph.facebook.com/oauth/access_token',
consumer_key='YOUR CONSUMER KEY',
consumer_secret='YOUR CONSUMER SECRET')
redirect_uri = 'https://www.facebook.com/connect/login_success.html'
authorize_url = facebook.get_authorize_url(redirect_uri=redirect_uri,
scope='read_stream',
response_type='token')
print 'Visit this URL in your browser: ' + authorize_url
webbrowser.open(authorize_url);
url_with_code = raw_input("Copy URL from your browser's address bar: ")
access_token = re.search('#access_token=([^&]*)', url_with_code).group(1)
user = facebook.get('https://graph.facebook.com/me',
params=dict(access_token=access_token)).content
print 'currently logged in as: ' + user['link']
Update facebook.cli example to use short syntax
|
from rauth.service import OAuth2Service
import re
import webbrowser
# Get a real consumer key & secret from:
# https://developers.facebook.com/apps
facebook = OAuth2Service(
name='facebook',
authorize_url='https:/graph.facebook.com/oauth/authorize',
access_token_url='https:/graph.facebook.com/oauth/access_token',
base_url='https://graph.facebook.com/',
consumer_key='YOUR CONSUMER KEY',
consumer_secret='YOUR CONSUMER SECRET')
redirect_uri = 'https://www.facebook.com/connect/login_success.html'
authorize_url = facebook.get_authorize_url(redirect_uri=redirect_uri,
scope='read_stream',
response_type='token')
print 'Visit this URL in your browser: ' + authorize_url
webbrowser.open(authorize_url);
url_with_code = raw_input("Copy URL from your browser's address bar: ")
facebook.access_token = re.search('#access_token=([^&]*)', url_with_code).group(1)
user = facebook.get('me').content
print 'currently logged in as: ' + user['link']
|
<commit_before>from rauth.service import OAuth2Service
import re
import webbrowser
# Get a real consumer key & secret from:
# https://developers.facebook.com/apps
facebook = OAuth2Service(
name='facebook',
authorize_url='https:/graph.facebook.com/oauth/authorize',
access_token_url='https:/graph.facebook.com/oauth/access_token',
consumer_key='YOUR CONSUMER KEY',
consumer_secret='YOUR CONSUMER SECRET')
redirect_uri = 'https://www.facebook.com/connect/login_success.html'
authorize_url = facebook.get_authorize_url(redirect_uri=redirect_uri,
scope='read_stream',
response_type='token')
print 'Visit this URL in your browser: ' + authorize_url
webbrowser.open(authorize_url);
url_with_code = raw_input("Copy URL from your browser's address bar: ")
access_token = re.search('#access_token=([^&]*)', url_with_code).group(1)
user = facebook.get('https://graph.facebook.com/me',
params=dict(access_token=access_token)).content
print 'currently logged in as: ' + user['link']
<commit_msg>Update facebook.cli example to use short syntax<commit_after>
|
from rauth.service import OAuth2Service
import re
import webbrowser
# Get a real consumer key & secret from:
# https://developers.facebook.com/apps
facebook = OAuth2Service(
name='facebook',
authorize_url='https:/graph.facebook.com/oauth/authorize',
access_token_url='https:/graph.facebook.com/oauth/access_token',
base_url='https://graph.facebook.com/',
consumer_key='YOUR CONSUMER KEY',
consumer_secret='YOUR CONSUMER SECRET')
redirect_uri = 'https://www.facebook.com/connect/login_success.html'
authorize_url = facebook.get_authorize_url(redirect_uri=redirect_uri,
scope='read_stream',
response_type='token')
print 'Visit this URL in your browser: ' + authorize_url
webbrowser.open(authorize_url);
url_with_code = raw_input("Copy URL from your browser's address bar: ")
facebook.access_token = re.search('#access_token=([^&]*)', url_with_code).group(1)
user = facebook.get('me').content
print 'currently logged in as: ' + user['link']
|
from rauth.service import OAuth2Service
import re
import webbrowser
# Get a real consumer key & secret from:
# https://developers.facebook.com/apps
facebook = OAuth2Service(
name='facebook',
authorize_url='https:/graph.facebook.com/oauth/authorize',
access_token_url='https:/graph.facebook.com/oauth/access_token',
consumer_key='YOUR CONSUMER KEY',
consumer_secret='YOUR CONSUMER SECRET')
redirect_uri = 'https://www.facebook.com/connect/login_success.html'
authorize_url = facebook.get_authorize_url(redirect_uri=redirect_uri,
scope='read_stream',
response_type='token')
print 'Visit this URL in your browser: ' + authorize_url
webbrowser.open(authorize_url);
url_with_code = raw_input("Copy URL from your browser's address bar: ")
access_token = re.search('#access_token=([^&]*)', url_with_code).group(1)
user = facebook.get('https://graph.facebook.com/me',
params=dict(access_token=access_token)).content
print 'currently logged in as: ' + user['link']
Update facebook.cli example to use short syntaxfrom rauth.service import OAuth2Service
import re
import webbrowser
# Get a real consumer key & secret from:
# https://developers.facebook.com/apps
facebook = OAuth2Service(
name='facebook',
authorize_url='https:/graph.facebook.com/oauth/authorize',
access_token_url='https:/graph.facebook.com/oauth/access_token',
base_url='https://graph.facebook.com/',
consumer_key='YOUR CONSUMER KEY',
consumer_secret='YOUR CONSUMER SECRET')
redirect_uri = 'https://www.facebook.com/connect/login_success.html'
authorize_url = facebook.get_authorize_url(redirect_uri=redirect_uri,
scope='read_stream',
response_type='token')
print 'Visit this URL in your browser: ' + authorize_url
webbrowser.open(authorize_url);
url_with_code = raw_input("Copy URL from your browser's address bar: ")
facebook.access_token = re.search('#access_token=([^&]*)', url_with_code).group(1)
user = facebook.get('me').content
print 'currently logged in as: ' + user['link']
|
<commit_before>from rauth.service import OAuth2Service
import re
import webbrowser
# Get a real consumer key & secret from:
# https://developers.facebook.com/apps
facebook = OAuth2Service(
name='facebook',
authorize_url='https:/graph.facebook.com/oauth/authorize',
access_token_url='https:/graph.facebook.com/oauth/access_token',
consumer_key='YOUR CONSUMER KEY',
consumer_secret='YOUR CONSUMER SECRET')
redirect_uri = 'https://www.facebook.com/connect/login_success.html'
authorize_url = facebook.get_authorize_url(redirect_uri=redirect_uri,
scope='read_stream',
response_type='token')
print 'Visit this URL in your browser: ' + authorize_url
webbrowser.open(authorize_url);
url_with_code = raw_input("Copy URL from your browser's address bar: ")
access_token = re.search('#access_token=([^&]*)', url_with_code).group(1)
user = facebook.get('https://graph.facebook.com/me',
params=dict(access_token=access_token)).content
print 'currently logged in as: ' + user['link']
<commit_msg>Update facebook.cli example to use short syntax<commit_after>from rauth.service import OAuth2Service
import re
import webbrowser
# Get a real consumer key & secret from:
# https://developers.facebook.com/apps
facebook = OAuth2Service(
name='facebook',
authorize_url='https:/graph.facebook.com/oauth/authorize',
access_token_url='https:/graph.facebook.com/oauth/access_token',
base_url='https://graph.facebook.com/',
consumer_key='YOUR CONSUMER KEY',
consumer_secret='YOUR CONSUMER SECRET')
redirect_uri = 'https://www.facebook.com/connect/login_success.html'
authorize_url = facebook.get_authorize_url(redirect_uri=redirect_uri,
scope='read_stream',
response_type='token')
print 'Visit this URL in your browser: ' + authorize_url
webbrowser.open(authorize_url);
url_with_code = raw_input("Copy URL from your browser's address bar: ")
facebook.access_token = re.search('#access_token=([^&]*)', url_with_code).group(1)
user = facebook.get('me').content
print 'currently logged in as: ' + user['link']
|
9c21cdf5fc94cf16079465559c58bbe69feec6e8
|
fhir_io_hapi/__init__.py
|
fhir_io_hapi/__init__.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# vim: ai ts=4 sts=4 et sw=4
"""
django-fhir
FILE: __init__.py
Created: 1/6/16 5:07 PM
"""
__author__ = 'Mark Scrimshire:@ekivemark'
# Hello World is here to test the loading of the module from fhir.settings
# from .settings import *
from .views.get import hello_world
from .views.delete import delete
from .views.get import (read, vread, history)
default_app_config = 'fhir_io_hapi.apps.fhir_io_hapi_config'
from .views.search import find
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# vim: ai ts=4 sts=4 et sw=4
"""
django-fhir
FILE: __init__.py
Created: 1/6/16 5:07 PM
"""
__author__ = 'Mark Scrimshire:@ekivemark'
# Hello World is here to test the loading of the module from fhir.settings
# from .settings import *
from .views.get import hello_world
from .views.delete import delete
from .views.get import (read, vread, history)
from .views.search import find
# Used to load post_save signal for write to backend fhir server
default_app_config = 'fhir_io_hapi.apps.fhir_io_hapi_config'
|
Test post_save of AccessToken as step 1 in writing a fhir consent directive.
|
Test post_save of AccessToken as step 1 in writing a fhir consent directive.
|
Python
|
apache-2.0
|
ekivemark/BlueButtonFHIR_API,ekivemark/BlueButtonFHIR_API,ekivemark/BlueButtonFHIR_API,ekivemark/BlueButtonFHIR_API
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# vim: ai ts=4 sts=4 et sw=4
"""
django-fhir
FILE: __init__.py
Created: 1/6/16 5:07 PM
"""
__author__ = 'Mark Scrimshire:@ekivemark'
# Hello World is here to test the loading of the module from fhir.settings
# from .settings import *
from .views.get import hello_world
from .views.delete import delete
from .views.get import (read, vread, history)
default_app_config = 'fhir_io_hapi.apps.fhir_io_hapi_config'
from .views.search import find
Test post_save of AccessToken as step 1 in writing a fhir consent directive.
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# vim: ai ts=4 sts=4 et sw=4
"""
django-fhir
FILE: __init__.py
Created: 1/6/16 5:07 PM
"""
__author__ = 'Mark Scrimshire:@ekivemark'
# Hello World is here to test the loading of the module from fhir.settings
# from .settings import *
from .views.get import hello_world
from .views.delete import delete
from .views.get import (read, vread, history)
from .views.search import find
# Used to load post_save signal for write to backend fhir server
default_app_config = 'fhir_io_hapi.apps.fhir_io_hapi_config'
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# vim: ai ts=4 sts=4 et sw=4
"""
django-fhir
FILE: __init__.py
Created: 1/6/16 5:07 PM
"""
__author__ = 'Mark Scrimshire:@ekivemark'
# Hello World is here to test the loading of the module from fhir.settings
# from .settings import *
from .views.get import hello_world
from .views.delete import delete
from .views.get import (read, vread, history)
default_app_config = 'fhir_io_hapi.apps.fhir_io_hapi_config'
from .views.search import find
<commit_msg>Test post_save of AccessToken as step 1 in writing a fhir consent directive.<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# vim: ai ts=4 sts=4 et sw=4
"""
django-fhir
FILE: __init__.py
Created: 1/6/16 5:07 PM
"""
__author__ = 'Mark Scrimshire:@ekivemark'
# Hello World is here to test the loading of the module from fhir.settings
# from .settings import *
from .views.get import hello_world
from .views.delete import delete
from .views.get import (read, vread, history)
from .views.search import find
# Used to load post_save signal for write to backend fhir server
default_app_config = 'fhir_io_hapi.apps.fhir_io_hapi_config'
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# vim: ai ts=4 sts=4 et sw=4
"""
django-fhir
FILE: __init__.py
Created: 1/6/16 5:07 PM
"""
__author__ = 'Mark Scrimshire:@ekivemark'
# Hello World is here to test the loading of the module from fhir.settings
# from .settings import *
from .views.get import hello_world
from .views.delete import delete
from .views.get import (read, vread, history)
default_app_config = 'fhir_io_hapi.apps.fhir_io_hapi_config'
from .views.search import find
Test post_save of AccessToken as step 1 in writing a fhir consent directive.#!/usr/bin/env python
# -*- coding: utf-8 -*-
# vim: ai ts=4 sts=4 et sw=4
"""
django-fhir
FILE: __init__.py
Created: 1/6/16 5:07 PM
"""
__author__ = 'Mark Scrimshire:@ekivemark'
# Hello World is here to test the loading of the module from fhir.settings
# from .settings import *
from .views.get import hello_world
from .views.delete import delete
from .views.get import (read, vread, history)
from .views.search import find
# Used to load post_save signal for write to backend fhir server
default_app_config = 'fhir_io_hapi.apps.fhir_io_hapi_config'
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# vim: ai ts=4 sts=4 et sw=4
"""
django-fhir
FILE: __init__.py
Created: 1/6/16 5:07 PM
"""
__author__ = 'Mark Scrimshire:@ekivemark'
# Hello World is here to test the loading of the module from fhir.settings
# from .settings import *
from .views.get import hello_world
from .views.delete import delete
from .views.get import (read, vread, history)
default_app_config = 'fhir_io_hapi.apps.fhir_io_hapi_config'
from .views.search import find
<commit_msg>Test post_save of AccessToken as step 1 in writing a fhir consent directive.<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# vim: ai ts=4 sts=4 et sw=4
"""
django-fhir
FILE: __init__.py
Created: 1/6/16 5:07 PM
"""
__author__ = 'Mark Scrimshire:@ekivemark'
# Hello World is here to test the loading of the module from fhir.settings
# from .settings import *
from .views.get import hello_world
from .views.delete import delete
from .views.get import (read, vread, history)
from .views.search import find
# Used to load post_save signal for write to backend fhir server
default_app_config = 'fhir_io_hapi.apps.fhir_io_hapi_config'
|
fea95164d03950f0255b1e6567f36040c67da173
|
gnotty/bots/rss.py
|
gnotty/bots/rss.py
|
try:
from feedparser import parse
except ImportError:
parse = None
from gnotty.bots import events
class RSSMixin(object):
"""
Mixin for bots that consume RSS feeds and post them to the
channel. Feeds are defined by the ``feeds`` keyword arg to
``__init__``, and should contain a sequence of RSS feed URLs.
Requires the ``feedparser`` library to be installed.
"""
def __init__(self, *args, **kwargs):
if parse is None:
from warnings import warn
warn("RSSMixin requires feedparser installed")
self.feeds = kwargs.pop("feeds", [])
self.feed_items = set()
# Consume initial feed items without posting them.
self.parse_feeds(message_channel=False)
super(RSSMixin, self).__init__(*args, **kwargs)
@events.on("timer", seconds=60)
def parse_feeds(self, message_channel=True):
"""
Iterates through each of the feed URLs, parses their items, and
sends any items to the channel that have not been previously
been parsed.
"""
if parse:
for feed in self.feeds:
for item in parse(feed).entries:
if item["id"] not in self.feed_items:
self.feed_items.add(item["id"])
if message_channel:
self.message_channel("%(title)s: %(id)s" % item)
return
|
try:
from feedparser import parse
except ImportError:
parse = None
from gnotty.bots import events
class RSSMixin(object):
"""
Mixin for bots that consume RSS feeds and post them to the
channel. Feeds are defined by the ``feeds`` keyword arg to
``__init__``, and should contain a sequence of RSS feed URLs.
Requires the ``feedparser`` library to be installed.
"""
def __init__(self, *args, **kwargs):
if parse is None:
from warnings import warn
warn("RSSMixin requires feedparser installed")
self.feeds = kwargs.pop("feeds", [])
self.feed_items = set()
# Consume initial feed items without posting them.
self.parse_feeds(message_channel=False)
super(RSSMixin, self).__init__(*args, **kwargs)
@events.on("timer", seconds=60)
def parse_feeds(self, message_channel=True):
"""
Iterates through each of the feed URLs, parses their items, and
sends any items to the channel that have not been previously
been parsed.
"""
if parse:
for feed_url in self.feeds:
feed = parse(feed_url)
for item in feed.entries:
if item["id"] not in self.feed_items:
self.feed_items.add(item["id"])
if message_channel:
message = self.format_item_message(feed, item)
self.message_channel(message)
return
def format_item_message(self, feed, item):
item["feed_title"] = feed.feed.title or feed.url
return "%(title)s: %(id)s (via %(feed_title)s)" % item
|
Allow overridable message formatting in the RSS bot.
|
Allow overridable message formatting in the RSS bot.
|
Python
|
bsd-2-clause
|
spaceone/gnotty,stephenmcd/gnotty,spaceone/gnotty,stephenmcd/gnotty,spaceone/gnotty,stephenmcd/gnotty
|
try:
from feedparser import parse
except ImportError:
parse = None
from gnotty.bots import events
class RSSMixin(object):
"""
Mixin for bots that consume RSS feeds and post them to the
channel. Feeds are defined by the ``feeds`` keyword arg to
``__init__``, and should contain a sequence of RSS feed URLs.
Requires the ``feedparser`` library to be installed.
"""
def __init__(self, *args, **kwargs):
if parse is None:
from warnings import warn
warn("RSSMixin requires feedparser installed")
self.feeds = kwargs.pop("feeds", [])
self.feed_items = set()
# Consume initial feed items without posting them.
self.parse_feeds(message_channel=False)
super(RSSMixin, self).__init__(*args, **kwargs)
@events.on("timer", seconds=60)
def parse_feeds(self, message_channel=True):
"""
Iterates through each of the feed URLs, parses their items, and
sends any items to the channel that have not been previously
been parsed.
"""
if parse:
for feed in self.feeds:
for item in parse(feed).entries:
if item["id"] not in self.feed_items:
self.feed_items.add(item["id"])
if message_channel:
self.message_channel("%(title)s: %(id)s" % item)
return
Allow overridable message formatting in the RSS bot.
|
try:
from feedparser import parse
except ImportError:
parse = None
from gnotty.bots import events
class RSSMixin(object):
"""
Mixin for bots that consume RSS feeds and post them to the
channel. Feeds are defined by the ``feeds`` keyword arg to
``__init__``, and should contain a sequence of RSS feed URLs.
Requires the ``feedparser`` library to be installed.
"""
def __init__(self, *args, **kwargs):
if parse is None:
from warnings import warn
warn("RSSMixin requires feedparser installed")
self.feeds = kwargs.pop("feeds", [])
self.feed_items = set()
# Consume initial feed items without posting them.
self.parse_feeds(message_channel=False)
super(RSSMixin, self).__init__(*args, **kwargs)
@events.on("timer", seconds=60)
def parse_feeds(self, message_channel=True):
"""
Iterates through each of the feed URLs, parses their items, and
sends any items to the channel that have not been previously
been parsed.
"""
if parse:
for feed_url in self.feeds:
feed = parse(feed_url)
for item in feed.entries:
if item["id"] not in self.feed_items:
self.feed_items.add(item["id"])
if message_channel:
message = self.format_item_message(feed, item)
self.message_channel(message)
return
def format_item_message(self, feed, item):
item["feed_title"] = feed.feed.title or feed.url
return "%(title)s: %(id)s (via %(feed_title)s)" % item
|
<commit_before>
try:
from feedparser import parse
except ImportError:
parse = None
from gnotty.bots import events
class RSSMixin(object):
"""
Mixin for bots that consume RSS feeds and post them to the
channel. Feeds are defined by the ``feeds`` keyword arg to
``__init__``, and should contain a sequence of RSS feed URLs.
Requires the ``feedparser`` library to be installed.
"""
def __init__(self, *args, **kwargs):
if parse is None:
from warnings import warn
warn("RSSMixin requires feedparser installed")
self.feeds = kwargs.pop("feeds", [])
self.feed_items = set()
# Consume initial feed items without posting them.
self.parse_feeds(message_channel=False)
super(RSSMixin, self).__init__(*args, **kwargs)
@events.on("timer", seconds=60)
def parse_feeds(self, message_channel=True):
"""
Iterates through each of the feed URLs, parses their items, and
sends any items to the channel that have not been previously
been parsed.
"""
if parse:
for feed in self.feeds:
for item in parse(feed).entries:
if item["id"] not in self.feed_items:
self.feed_items.add(item["id"])
if message_channel:
self.message_channel("%(title)s: %(id)s" % item)
return
<commit_msg>Allow overridable message formatting in the RSS bot.<commit_after>
|
try:
from feedparser import parse
except ImportError:
parse = None
from gnotty.bots import events
class RSSMixin(object):
"""
Mixin for bots that consume RSS feeds and post them to the
channel. Feeds are defined by the ``feeds`` keyword arg to
``__init__``, and should contain a sequence of RSS feed URLs.
Requires the ``feedparser`` library to be installed.
"""
def __init__(self, *args, **kwargs):
if parse is None:
from warnings import warn
warn("RSSMixin requires feedparser installed")
self.feeds = kwargs.pop("feeds", [])
self.feed_items = set()
# Consume initial feed items without posting them.
self.parse_feeds(message_channel=False)
super(RSSMixin, self).__init__(*args, **kwargs)
@events.on("timer", seconds=60)
def parse_feeds(self, message_channel=True):
"""
Iterates through each of the feed URLs, parses their items, and
sends any items to the channel that have not been previously
been parsed.
"""
if parse:
for feed_url in self.feeds:
feed = parse(feed_url)
for item in feed.entries:
if item["id"] not in self.feed_items:
self.feed_items.add(item["id"])
if message_channel:
message = self.format_item_message(feed, item)
self.message_channel(message)
return
def format_item_message(self, feed, item):
item["feed_title"] = feed.feed.title or feed.url
return "%(title)s: %(id)s (via %(feed_title)s)" % item
|
try:
from feedparser import parse
except ImportError:
parse = None
from gnotty.bots import events
class RSSMixin(object):
"""
Mixin for bots that consume RSS feeds and post them to the
channel. Feeds are defined by the ``feeds`` keyword arg to
``__init__``, and should contain a sequence of RSS feed URLs.
Requires the ``feedparser`` library to be installed.
"""
def __init__(self, *args, **kwargs):
if parse is None:
from warnings import warn
warn("RSSMixin requires feedparser installed")
self.feeds = kwargs.pop("feeds", [])
self.feed_items = set()
# Consume initial feed items without posting them.
self.parse_feeds(message_channel=False)
super(RSSMixin, self).__init__(*args, **kwargs)
@events.on("timer", seconds=60)
def parse_feeds(self, message_channel=True):
"""
Iterates through each of the feed URLs, parses their items, and
sends any items to the channel that have not been previously
been parsed.
"""
if parse:
for feed in self.feeds:
for item in parse(feed).entries:
if item["id"] not in self.feed_items:
self.feed_items.add(item["id"])
if message_channel:
self.message_channel("%(title)s: %(id)s" % item)
return
Allow overridable message formatting in the RSS bot.
try:
from feedparser import parse
except ImportError:
parse = None
from gnotty.bots import events
class RSSMixin(object):
"""
Mixin for bots that consume RSS feeds and post them to the
channel. Feeds are defined by the ``feeds`` keyword arg to
``__init__``, and should contain a sequence of RSS feed URLs.
Requires the ``feedparser`` library to be installed.
"""
def __init__(self, *args, **kwargs):
if parse is None:
from warnings import warn
warn("RSSMixin requires feedparser installed")
self.feeds = kwargs.pop("feeds", [])
self.feed_items = set()
# Consume initial feed items without posting them.
self.parse_feeds(message_channel=False)
super(RSSMixin, self).__init__(*args, **kwargs)
@events.on("timer", seconds=60)
def parse_feeds(self, message_channel=True):
"""
Iterates through each of the feed URLs, parses their items, and
sends any items to the channel that have not been previously
been parsed.
"""
if parse:
for feed_url in self.feeds:
feed = parse(feed_url)
for item in feed.entries:
if item["id"] not in self.feed_items:
self.feed_items.add(item["id"])
if message_channel:
message = self.format_item_message(feed, item)
self.message_channel(message)
return
def format_item_message(self, feed, item):
item["feed_title"] = feed.feed.title or feed.url
return "%(title)s: %(id)s (via %(feed_title)s)" % item
|
<commit_before>
try:
from feedparser import parse
except ImportError:
parse = None
from gnotty.bots import events
class RSSMixin(object):
"""
Mixin for bots that consume RSS feeds and post them to the
channel. Feeds are defined by the ``feeds`` keyword arg to
``__init__``, and should contain a sequence of RSS feed URLs.
Requires the ``feedparser`` library to be installed.
"""
def __init__(self, *args, **kwargs):
if parse is None:
from warnings import warn
warn("RSSMixin requires feedparser installed")
self.feeds = kwargs.pop("feeds", [])
self.feed_items = set()
# Consume initial feed items without posting them.
self.parse_feeds(message_channel=False)
super(RSSMixin, self).__init__(*args, **kwargs)
@events.on("timer", seconds=60)
def parse_feeds(self, message_channel=True):
"""
Iterates through each of the feed URLs, parses their items, and
sends any items to the channel that have not been previously
been parsed.
"""
if parse:
for feed in self.feeds:
for item in parse(feed).entries:
if item["id"] not in self.feed_items:
self.feed_items.add(item["id"])
if message_channel:
self.message_channel("%(title)s: %(id)s" % item)
return
<commit_msg>Allow overridable message formatting in the RSS bot.<commit_after>
try:
from feedparser import parse
except ImportError:
parse = None
from gnotty.bots import events
class RSSMixin(object):
"""
Mixin for bots that consume RSS feeds and post them to the
channel. Feeds are defined by the ``feeds`` keyword arg to
``__init__``, and should contain a sequence of RSS feed URLs.
Requires the ``feedparser`` library to be installed.
"""
def __init__(self, *args, **kwargs):
if parse is None:
from warnings import warn
warn("RSSMixin requires feedparser installed")
self.feeds = kwargs.pop("feeds", [])
self.feed_items = set()
# Consume initial feed items without posting them.
self.parse_feeds(message_channel=False)
super(RSSMixin, self).__init__(*args, **kwargs)
@events.on("timer", seconds=60)
def parse_feeds(self, message_channel=True):
"""
Iterates through each of the feed URLs, parses their items, and
sends any items to the channel that have not been previously
been parsed.
"""
if parse:
for feed_url in self.feeds:
feed = parse(feed_url)
for item in feed.entries:
if item["id"] not in self.feed_items:
self.feed_items.add(item["id"])
if message_channel:
message = self.format_item_message(feed, item)
self.message_channel(message)
return
def format_item_message(self, feed, item):
item["feed_title"] = feed.feed.title or feed.url
return "%(title)s: %(id)s (via %(feed_title)s)" % item
|
b20db32f0f00cf0451e2602697e81b129a149801
|
scheduler_partner.py
|
scheduler_partner.py
|
"""
Scheduler Partner interface (v2 extension).
"""
from novaclient import base
from novaclient.openstack.common.gettextutils import _
from novaclient import utils
class SchedulerPartner(base.Resource):
def __repr__(self):
return "<SchedulerPartner: %s>" % self.name
class SchedulerPartnerManager(base.Manager):
resource_class = SchedulerPartner
def create(self, body):
return self._create('/os-scheduler-partner', body, 'scheduler_partner')
|
Add create action for os-scheduler-partner
|
Add create action for os-scheduler-partner
|
Python
|
mit
|
daitr-gu/scheduler-api-client
|
Add create action for os-scheduler-partner
|
"""
Scheduler Partner interface (v2 extension).
"""
from novaclient import base
from novaclient.openstack.common.gettextutils import _
from novaclient import utils
class SchedulerPartner(base.Resource):
def __repr__(self):
return "<SchedulerPartner: %s>" % self.name
class SchedulerPartnerManager(base.Manager):
resource_class = SchedulerPartner
def create(self, body):
return self._create('/os-scheduler-partner', body, 'scheduler_partner')
|
<commit_before><commit_msg>Add create action for os-scheduler-partner<commit_after>
|
"""
Scheduler Partner interface (v2 extension).
"""
from novaclient import base
from novaclient.openstack.common.gettextutils import _
from novaclient import utils
class SchedulerPartner(base.Resource):
def __repr__(self):
return "<SchedulerPartner: %s>" % self.name
class SchedulerPartnerManager(base.Manager):
resource_class = SchedulerPartner
def create(self, body):
return self._create('/os-scheduler-partner', body, 'scheduler_partner')
|
Add create action for os-scheduler-partner"""
Scheduler Partner interface (v2 extension).
"""
from novaclient import base
from novaclient.openstack.common.gettextutils import _
from novaclient import utils
class SchedulerPartner(base.Resource):
def __repr__(self):
return "<SchedulerPartner: %s>" % self.name
class SchedulerPartnerManager(base.Manager):
resource_class = SchedulerPartner
def create(self, body):
return self._create('/os-scheduler-partner', body, 'scheduler_partner')
|
<commit_before><commit_msg>Add create action for os-scheduler-partner<commit_after>"""
Scheduler Partner interface (v2 extension).
"""
from novaclient import base
from novaclient.openstack.common.gettextutils import _
from novaclient import utils
class SchedulerPartner(base.Resource):
def __repr__(self):
return "<SchedulerPartner: %s>" % self.name
class SchedulerPartnerManager(base.Manager):
resource_class = SchedulerPartner
def create(self, body):
return self._create('/os-scheduler-partner', body, 'scheduler_partner')
|
|
abadd7880d690cebfea865f8afd81c6fc585884c
|
scripts/bedpe2bed.py
|
scripts/bedpe2bed.py
|
import sys
import os
import argparse
import fileinput
import subprocess
parser = argparse.ArgumentParser(description = "Convert BEDPE into a BED file of fragments.", formatter_class = argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("--maxFragmentLength", help = "Maximum fragment length between two pairs of reads.", default = "1000")
args = parser.parse_args()
for line in fileinput.input("-"):
line = line.rstrip()
fields = line.split("\t")
#Check for chimereas
if fields[0] == fields[3]:
if fields[0] != ".":
fragment_length = int(fields[5]) - int(fields[1])
if fragment_length < int(args.maxFragmentLength):
out = "\t".join([fields[0], fields[1], fields[5], fields[6], str(fragment_length)])
print(out)
|
import sys
import os
import argparse
import fileinput
import subprocess
parser = argparse.ArgumentParser(description = "Convert BEDPE into a BED file of fragments.", formatter_class = argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("--maxFragmentLength", help = "Maximum fragment length between two pairs of reads.", default = "1000")
parser.add_argument("--minFragmentLength", help = "Minimum fragment length between two pairs of reads.", default = "30")
args = parser.parse_args()
for line in fileinput.input("-"):
line = line.rstrip()
fields = line.split("\t")
#Check for chimereas
if fields[0] == fields[3]:
if fields[0] != ".":
fragment_length = int(fields[5]) - int(fields[1])
if (fragment_length < int(args.maxFragmentLength)) and (fragment_length > int(args.minFragmentLength)):
out = "\t".join([fields[0], fields[1], fields[5], fields[6], str(fragment_length)])
print(out)
|
Add minimum fragment length filtering.
|
Add minimum fragment length filtering.
|
Python
|
apache-2.0
|
kauralasoo/Blood_ATAC,kauralasoo/Blood_ATAC,kauralasoo/Blood_ATAC
|
import sys
import os
import argparse
import fileinput
import subprocess
parser = argparse.ArgumentParser(description = "Convert BEDPE into a BED file of fragments.", formatter_class = argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("--maxFragmentLength", help = "Maximum fragment length between two pairs of reads.", default = "1000")
args = parser.parse_args()
for line in fileinput.input("-"):
line = line.rstrip()
fields = line.split("\t")
#Check for chimereas
if fields[0] == fields[3]:
if fields[0] != ".":
fragment_length = int(fields[5]) - int(fields[1])
if fragment_length < int(args.maxFragmentLength):
out = "\t".join([fields[0], fields[1], fields[5], fields[6], str(fragment_length)])
print(out)Add minimum fragment length filtering.
|
import sys
import os
import argparse
import fileinput
import subprocess
parser = argparse.ArgumentParser(description = "Convert BEDPE into a BED file of fragments.", formatter_class = argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("--maxFragmentLength", help = "Maximum fragment length between two pairs of reads.", default = "1000")
parser.add_argument("--minFragmentLength", help = "Minimum fragment length between two pairs of reads.", default = "30")
args = parser.parse_args()
for line in fileinput.input("-"):
line = line.rstrip()
fields = line.split("\t")
#Check for chimereas
if fields[0] == fields[3]:
if fields[0] != ".":
fragment_length = int(fields[5]) - int(fields[1])
if (fragment_length < int(args.maxFragmentLength)) and (fragment_length > int(args.minFragmentLength)):
out = "\t".join([fields[0], fields[1], fields[5], fields[6], str(fragment_length)])
print(out)
|
<commit_before>import sys
import os
import argparse
import fileinput
import subprocess
parser = argparse.ArgumentParser(description = "Convert BEDPE into a BED file of fragments.", formatter_class = argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("--maxFragmentLength", help = "Maximum fragment length between two pairs of reads.", default = "1000")
args = parser.parse_args()
for line in fileinput.input("-"):
line = line.rstrip()
fields = line.split("\t")
#Check for chimereas
if fields[0] == fields[3]:
if fields[0] != ".":
fragment_length = int(fields[5]) - int(fields[1])
if fragment_length < int(args.maxFragmentLength):
out = "\t".join([fields[0], fields[1], fields[5], fields[6], str(fragment_length)])
print(out)<commit_msg>Add minimum fragment length filtering.<commit_after>
|
import sys
import os
import argparse
import fileinput
import subprocess
parser = argparse.ArgumentParser(description = "Convert BEDPE into a BED file of fragments.", formatter_class = argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("--maxFragmentLength", help = "Maximum fragment length between two pairs of reads.", default = "1000")
parser.add_argument("--minFragmentLength", help = "Minimum fragment length between two pairs of reads.", default = "30")
args = parser.parse_args()
for line in fileinput.input("-"):
line = line.rstrip()
fields = line.split("\t")
#Check for chimereas
if fields[0] == fields[3]:
if fields[0] != ".":
fragment_length = int(fields[5]) - int(fields[1])
if (fragment_length < int(args.maxFragmentLength)) and (fragment_length > int(args.minFragmentLength)):
out = "\t".join([fields[0], fields[1], fields[5], fields[6], str(fragment_length)])
print(out)
|
import sys
import os
import argparse
import fileinput
import subprocess
parser = argparse.ArgumentParser(description = "Convert BEDPE into a BED file of fragments.", formatter_class = argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("--maxFragmentLength", help = "Maximum fragment length between two pairs of reads.", default = "1000")
args = parser.parse_args()
for line in fileinput.input("-"):
line = line.rstrip()
fields = line.split("\t")
#Check for chimereas
if fields[0] == fields[3]:
if fields[0] != ".":
fragment_length = int(fields[5]) - int(fields[1])
if fragment_length < int(args.maxFragmentLength):
out = "\t".join([fields[0], fields[1], fields[5], fields[6], str(fragment_length)])
print(out)Add minimum fragment length filtering.import sys
import os
import argparse
import fileinput
import subprocess
parser = argparse.ArgumentParser(description = "Convert BEDPE into a BED file of fragments.", formatter_class = argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("--maxFragmentLength", help = "Maximum fragment length between two pairs of reads.", default = "1000")
parser.add_argument("--minFragmentLength", help = "Minimum fragment length between two pairs of reads.", default = "30")
args = parser.parse_args()
for line in fileinput.input("-"):
line = line.rstrip()
fields = line.split("\t")
#Check for chimereas
if fields[0] == fields[3]:
if fields[0] != ".":
fragment_length = int(fields[5]) - int(fields[1])
if (fragment_length < int(args.maxFragmentLength)) and (fragment_length > int(args.minFragmentLength)):
out = "\t".join([fields[0], fields[1], fields[5], fields[6], str(fragment_length)])
print(out)
|
<commit_before>import sys
import os
import argparse
import fileinput
import subprocess
parser = argparse.ArgumentParser(description = "Convert BEDPE into a BED file of fragments.", formatter_class = argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("--maxFragmentLength", help = "Maximum fragment length between two pairs of reads.", default = "1000")
args = parser.parse_args()
for line in fileinput.input("-"):
line = line.rstrip()
fields = line.split("\t")
#Check for chimereas
if fields[0] == fields[3]:
if fields[0] != ".":
fragment_length = int(fields[5]) - int(fields[1])
if fragment_length < int(args.maxFragmentLength):
out = "\t".join([fields[0], fields[1], fields[5], fields[6], str(fragment_length)])
print(out)<commit_msg>Add minimum fragment length filtering.<commit_after>import sys
import os
import argparse
import fileinput
import subprocess
parser = argparse.ArgumentParser(description = "Convert BEDPE into a BED file of fragments.", formatter_class = argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("--maxFragmentLength", help = "Maximum fragment length between two pairs of reads.", default = "1000")
parser.add_argument("--minFragmentLength", help = "Minimum fragment length between two pairs of reads.", default = "30")
args = parser.parse_args()
for line in fileinput.input("-"):
line = line.rstrip()
fields = line.split("\t")
#Check for chimereas
if fields[0] == fields[3]:
if fields[0] != ".":
fragment_length = int(fields[5]) - int(fields[1])
if (fragment_length < int(args.maxFragmentLength)) and (fragment_length > int(args.minFragmentLength)):
out = "\t".join([fields[0], fields[1], fields[5], fields[6], str(fragment_length)])
print(out)
|
1d15a2463f0149531f8cb6913ed3093a0e2220b4
|
espresso/response.py
|
espresso/response.py
|
import logging
class Response(object):
"""The object sent back to the callback
Contains methods for calling senders and responders on Espresso
"""
def __init__(self, robot, msg, match):
self.robot = robot
self.msg = msg
self.match = match
def send(self, message, channel=None):
channel = self.msg.channel.name or channel
self.robot.send(message, channel)
def reply(self, user, message, channel=None):
channel = self.msg.channel.name or channel
logging.debug("message %s on channel #%s to user @%s" % (message, channel, user.name))
self.robot.send("@{}: {}".format(user.name, message), channel)
|
import logging
class Response(object):
"""The object sent back to the callback
Contains methods for calling senders and responders on Espresso
"""
def __init__(self, robot, msg, match):
self.robot = robot
self.msg = msg
self.match = match
def send(self, message, channel=None):
channel = self.msg.channel.name or channel
self.robot.send(message, channel)
def reply(self, user, message, channel=None):
channel = self.msg.channel.name or channel
logging.debug("message %s on channel #%s to user @%s", message, channel, user.name)
self.robot.send("@{}: {}".format(user.name, message), channel)
|
Fix templating in _another_ debug statement
|
Fix templating in _another_ debug statement
|
Python
|
bsd-3-clause
|
ratchetrobotics/espresso
|
import logging
class Response(object):
"""The object sent back to the callback
Contains methods for calling senders and responders on Espresso
"""
def __init__(self, robot, msg, match):
self.robot = robot
self.msg = msg
self.match = match
def send(self, message, channel=None):
channel = self.msg.channel.name or channel
self.robot.send(message, channel)
def reply(self, user, message, channel=None):
channel = self.msg.channel.name or channel
logging.debug("message %s on channel #%s to user @%s" % (message, channel, user.name))
self.robot.send("@{}: {}".format(user.name, message), channel)
Fix templating in _another_ debug statement
|
import logging
class Response(object):
"""The object sent back to the callback
Contains methods for calling senders and responders on Espresso
"""
def __init__(self, robot, msg, match):
self.robot = robot
self.msg = msg
self.match = match
def send(self, message, channel=None):
channel = self.msg.channel.name or channel
self.robot.send(message, channel)
def reply(self, user, message, channel=None):
channel = self.msg.channel.name or channel
logging.debug("message %s on channel #%s to user @%s", message, channel, user.name)
self.robot.send("@{}: {}".format(user.name, message), channel)
|
<commit_before>import logging
class Response(object):
"""The object sent back to the callback
Contains methods for calling senders and responders on Espresso
"""
def __init__(self, robot, msg, match):
self.robot = robot
self.msg = msg
self.match = match
def send(self, message, channel=None):
channel = self.msg.channel.name or channel
self.robot.send(message, channel)
def reply(self, user, message, channel=None):
channel = self.msg.channel.name or channel
logging.debug("message %s on channel #%s to user @%s" % (message, channel, user.name))
self.robot.send("@{}: {}".format(user.name, message), channel)
<commit_msg>Fix templating in _another_ debug statement<commit_after>
|
import logging
class Response(object):
"""The object sent back to the callback
Contains methods for calling senders and responders on Espresso
"""
def __init__(self, robot, msg, match):
self.robot = robot
self.msg = msg
self.match = match
def send(self, message, channel=None):
channel = self.msg.channel.name or channel
self.robot.send(message, channel)
def reply(self, user, message, channel=None):
channel = self.msg.channel.name or channel
logging.debug("message %s on channel #%s to user @%s", message, channel, user.name)
self.robot.send("@{}: {}".format(user.name, message), channel)
|
import logging
class Response(object):
"""The object sent back to the callback
Contains methods for calling senders and responders on Espresso
"""
def __init__(self, robot, msg, match):
self.robot = robot
self.msg = msg
self.match = match
def send(self, message, channel=None):
channel = self.msg.channel.name or channel
self.robot.send(message, channel)
def reply(self, user, message, channel=None):
channel = self.msg.channel.name or channel
logging.debug("message %s on channel #%s to user @%s" % (message, channel, user.name))
self.robot.send("@{}: {}".format(user.name, message), channel)
Fix templating in _another_ debug statementimport logging
class Response(object):
"""The object sent back to the callback
Contains methods for calling senders and responders on Espresso
"""
def __init__(self, robot, msg, match):
self.robot = robot
self.msg = msg
self.match = match
def send(self, message, channel=None):
channel = self.msg.channel.name or channel
self.robot.send(message, channel)
def reply(self, user, message, channel=None):
channel = self.msg.channel.name or channel
logging.debug("message %s on channel #%s to user @%s", message, channel, user.name)
self.robot.send("@{}: {}".format(user.name, message), channel)
|
<commit_before>import logging
class Response(object):
"""The object sent back to the callback
Contains methods for calling senders and responders on Espresso
"""
def __init__(self, robot, msg, match):
self.robot = robot
self.msg = msg
self.match = match
def send(self, message, channel=None):
channel = self.msg.channel.name or channel
self.robot.send(message, channel)
def reply(self, user, message, channel=None):
channel = self.msg.channel.name or channel
logging.debug("message %s on channel #%s to user @%s" % (message, channel, user.name))
self.robot.send("@{}: {}".format(user.name, message), channel)
<commit_msg>Fix templating in _another_ debug statement<commit_after>import logging
class Response(object):
"""The object sent back to the callback
Contains methods for calling senders and responders on Espresso
"""
def __init__(self, robot, msg, match):
self.robot = robot
self.msg = msg
self.match = match
def send(self, message, channel=None):
channel = self.msg.channel.name or channel
self.robot.send(message, channel)
def reply(self, user, message, channel=None):
channel = self.msg.channel.name or channel
logging.debug("message %s on channel #%s to user @%s", message, channel, user.name)
self.robot.send("@{}: {}".format(user.name, message), channel)
|
0f54780e142cb6bd15df2ed702bd4fa4b2d3fe79
|
keys.py
|
keys.py
|
#!/usr/bin/env python
#keys.py
keys = dict(
consumer_key = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
consumer_secret = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
access_key = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
access_secret = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
)
|
#!/usr/bin/env python
#keys.py
keys = dict(
consumer_key = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
consumer_secret = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
access_key = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
access_secret = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
)
|
Use spaces instead of tabs
|
Use spaces instead of tabs
|
Python
|
mit
|
bman4789/weatherBot,bman4789/weatherBot,BrianMitchL/weatherBot
|
#!/usr/bin/env python
#keys.py
keys = dict(
consumer_key = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
consumer_secret = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
access_key = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
access_secret = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
)Use spaces instead of tabs
|
#!/usr/bin/env python
#keys.py
keys = dict(
consumer_key = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
consumer_secret = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
access_key = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
access_secret = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
)
|
<commit_before>#!/usr/bin/env python
#keys.py
keys = dict(
consumer_key = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
consumer_secret = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
access_key = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
access_secret = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
)<commit_msg>Use spaces instead of tabs<commit_after>
|
#!/usr/bin/env python
#keys.py
keys = dict(
consumer_key = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
consumer_secret = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
access_key = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
access_secret = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
)
|
#!/usr/bin/env python
#keys.py
keys = dict(
consumer_key = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
consumer_secret = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
access_key = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
access_secret = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
)Use spaces instead of tabs#!/usr/bin/env python
#keys.py
keys = dict(
consumer_key = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
consumer_secret = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
access_key = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
access_secret = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
)
|
<commit_before>#!/usr/bin/env python
#keys.py
keys = dict(
consumer_key = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
consumer_secret = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
access_key = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
access_secret = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
)<commit_msg>Use spaces instead of tabs<commit_after>#!/usr/bin/env python
#keys.py
keys = dict(
consumer_key = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
consumer_secret = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
access_key = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
access_secret = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
)
|
305b3a83999e7c9d5a80de5aa89e3162d4090d64
|
controllers/default.py
|
controllers/default.py
|
def index():
def GET():
return locals()
@request.restful()
def api():
response.view = 'generic.json'
def GET(resource,resource_id):
if not resource=='study': raise HTTP(400)
# return the correct nexson of study_id
return dict()
def POST(resource,resource_id):
if not resource=='study': raise HTTP(400)
# overwrite the nexson of study_id with the POSTed data
# 1) verify that it is valid json
# 2) Update local treenexus git repo at ../treenexus
# 3) See if the hash of the current value of the file matches the hash of the POSTed data. If so, do nothing and return successfully.
# 4) If not, overwrite the correct nexson file on disk
# 5) Make a git commit with the updated nexson (add as much automated metadata to the commit message as possible)
# 6) return successfully
return dict()
return locals()
|
def index():
def GET():
return locals()
@request.restful()
def api():
response.view = 'generic.json'
def GET(resource,resource_id):
if not resource=='study': raise HTTP(400)
# return the correct nexson of study_id
return _get_nexson(resource_id)
def POST(resource,resource_id):
if not resource=='study': raise HTTP(400)
# overwrite the nexson of study_id with the POSTed data
# 1) verify that it is valid json
# 2) Update local treenexus git submodule at ./treenexus
# 3) See if the hash of the current value of the file matches the hash of the POSTed data. If so, do nothing and return successfully.
# 4) If not, overwrite the correct nexson file on disk
# 5) Make a git commit with the updated nexson (add as much automated metadata to the commit message as possible)
# 6) return successfully
return dict()
return locals()
def _get_nexson(study_id):
# the internal file structure will change soon to study_id/study_id-N.json, where N=0,1,2,3...
nexson_file = file.open("treenexus/study/0/" + study_id + ".json")
return nexson_file.contents()
|
Refactor treenexus logic into a function
|
Refactor treenexus logic into a function
|
Python
|
bsd-2-clause
|
OpenTreeOfLife/phylesystem-api,OpenTreeOfLife/phylesystem-api,leto/new_opentree_api,OpenTreeOfLife/phylesystem-api,leto/new_opentree_api
|
def index():
def GET():
return locals()
@request.restful()
def api():
response.view = 'generic.json'
def GET(resource,resource_id):
if not resource=='study': raise HTTP(400)
# return the correct nexson of study_id
return dict()
def POST(resource,resource_id):
if not resource=='study': raise HTTP(400)
# overwrite the nexson of study_id with the POSTed data
# 1) verify that it is valid json
# 2) Update local treenexus git repo at ../treenexus
# 3) See if the hash of the current value of the file matches the hash of the POSTed data. If so, do nothing and return successfully.
# 4) If not, overwrite the correct nexson file on disk
# 5) Make a git commit with the updated nexson (add as much automated metadata to the commit message as possible)
# 6) return successfully
return dict()
return locals()
Refactor treenexus logic into a function
|
def index():
def GET():
return locals()
@request.restful()
def api():
response.view = 'generic.json'
def GET(resource,resource_id):
if not resource=='study': raise HTTP(400)
# return the correct nexson of study_id
return _get_nexson(resource_id)
def POST(resource,resource_id):
if not resource=='study': raise HTTP(400)
# overwrite the nexson of study_id with the POSTed data
# 1) verify that it is valid json
# 2) Update local treenexus git submodule at ./treenexus
# 3) See if the hash of the current value of the file matches the hash of the POSTed data. If so, do nothing and return successfully.
# 4) If not, overwrite the correct nexson file on disk
# 5) Make a git commit with the updated nexson (add as much automated metadata to the commit message as possible)
# 6) return successfully
return dict()
return locals()
def _get_nexson(study_id):
# the internal file structure will change soon to study_id/study_id-N.json, where N=0,1,2,3...
nexson_file = file.open("treenexus/study/0/" + study_id + ".json")
return nexson_file.contents()
|
<commit_before>def index():
def GET():
return locals()
@request.restful()
def api():
response.view = 'generic.json'
def GET(resource,resource_id):
if not resource=='study': raise HTTP(400)
# return the correct nexson of study_id
return dict()
def POST(resource,resource_id):
if not resource=='study': raise HTTP(400)
# overwrite the nexson of study_id with the POSTed data
# 1) verify that it is valid json
# 2) Update local treenexus git repo at ../treenexus
# 3) See if the hash of the current value of the file matches the hash of the POSTed data. If so, do nothing and return successfully.
# 4) If not, overwrite the correct nexson file on disk
# 5) Make a git commit with the updated nexson (add as much automated metadata to the commit message as possible)
# 6) return successfully
return dict()
return locals()
<commit_msg>Refactor treenexus logic into a function<commit_after>
|
def index():
def GET():
return locals()
@request.restful()
def api():
response.view = 'generic.json'
def GET(resource,resource_id):
if not resource=='study': raise HTTP(400)
# return the correct nexson of study_id
return _get_nexson(resource_id)
def POST(resource,resource_id):
if not resource=='study': raise HTTP(400)
# overwrite the nexson of study_id with the POSTed data
# 1) verify that it is valid json
# 2) Update local treenexus git submodule at ./treenexus
# 3) See if the hash of the current value of the file matches the hash of the POSTed data. If so, do nothing and return successfully.
# 4) If not, overwrite the correct nexson file on disk
# 5) Make a git commit with the updated nexson (add as much automated metadata to the commit message as possible)
# 6) return successfully
return dict()
return locals()
def _get_nexson(study_id):
# the internal file structure will change soon to study_id/study_id-N.json, where N=0,1,2,3...
nexson_file = file.open("treenexus/study/0/" + study_id + ".json")
return nexson_file.contents()
|
def index():
def GET():
return locals()
@request.restful()
def api():
response.view = 'generic.json'
def GET(resource,resource_id):
if not resource=='study': raise HTTP(400)
# return the correct nexson of study_id
return dict()
def POST(resource,resource_id):
if not resource=='study': raise HTTP(400)
# overwrite the nexson of study_id with the POSTed data
# 1) verify that it is valid json
# 2) Update local treenexus git repo at ../treenexus
# 3) See if the hash of the current value of the file matches the hash of the POSTed data. If so, do nothing and return successfully.
# 4) If not, overwrite the correct nexson file on disk
# 5) Make a git commit with the updated nexson (add as much automated metadata to the commit message as possible)
# 6) return successfully
return dict()
return locals()
Refactor treenexus logic into a functiondef index():
def GET():
return locals()
@request.restful()
def api():
response.view = 'generic.json'
def GET(resource,resource_id):
if not resource=='study': raise HTTP(400)
# return the correct nexson of study_id
return _get_nexson(resource_id)
def POST(resource,resource_id):
if not resource=='study': raise HTTP(400)
# overwrite the nexson of study_id with the POSTed data
# 1) verify that it is valid json
# 2) Update local treenexus git submodule at ./treenexus
# 3) See if the hash of the current value of the file matches the hash of the POSTed data. If so, do nothing and return successfully.
# 4) If not, overwrite the correct nexson file on disk
# 5) Make a git commit with the updated nexson (add as much automated metadata to the commit message as possible)
# 6) return successfully
return dict()
return locals()
def _get_nexson(study_id):
# the internal file structure will change soon to study_id/study_id-N.json, where N=0,1,2,3...
nexson_file = file.open("treenexus/study/0/" + study_id + ".json")
return nexson_file.contents()
|
<commit_before>def index():
def GET():
return locals()
@request.restful()
def api():
response.view = 'generic.json'
def GET(resource,resource_id):
if not resource=='study': raise HTTP(400)
# return the correct nexson of study_id
return dict()
def POST(resource,resource_id):
if not resource=='study': raise HTTP(400)
# overwrite the nexson of study_id with the POSTed data
# 1) verify that it is valid json
# 2) Update local treenexus git repo at ../treenexus
# 3) See if the hash of the current value of the file matches the hash of the POSTed data. If so, do nothing and return successfully.
# 4) If not, overwrite the correct nexson file on disk
# 5) Make a git commit with the updated nexson (add as much automated metadata to the commit message as possible)
# 6) return successfully
return dict()
return locals()
<commit_msg>Refactor treenexus logic into a function<commit_after>def index():
def GET():
return locals()
@request.restful()
def api():
response.view = 'generic.json'
def GET(resource,resource_id):
if not resource=='study': raise HTTP(400)
# return the correct nexson of study_id
return _get_nexson(resource_id)
def POST(resource,resource_id):
if not resource=='study': raise HTTP(400)
# overwrite the nexson of study_id with the POSTed data
# 1) verify that it is valid json
# 2) Update local treenexus git submodule at ./treenexus
# 3) See if the hash of the current value of the file matches the hash of the POSTed data. If so, do nothing and return successfully.
# 4) If not, overwrite the correct nexson file on disk
# 5) Make a git commit with the updated nexson (add as much automated metadata to the commit message as possible)
# 6) return successfully
return dict()
return locals()
def _get_nexson(study_id):
# the internal file structure will change soon to study_id/study_id-N.json, where N=0,1,2,3...
nexson_file = file.open("treenexus/study/0/" + study_id + ".json")
return nexson_file.contents()
|
15914cc8bd29392f204bec021b8cc34bf8507daa
|
saleor/integrations/management/commands/update_integrations.py
|
saleor/integrations/management/commands/update_integrations.py
|
from __future__ import unicode_literals
from django.core.management import CommandError, BaseCommand
from saleor.integrations.feeds import SaleorFeed
from saleor.integrations import utils
class Command(BaseCommand):
help = 'Updates integration feeds. '
feed_classes = {'saleor': SaleorFeed}
def add_arguments(self, parser):
parser.add_argument('feed_name', nargs='+', type=str)
def handle(self, *args, **options):
feed_names = options['feed_name'] or self.feed_classes.keys()
for feed_name in feed_names:
feed = self.feed_classes.get(feed_name)
if feed is None:
raise CommandError('Feed "%s" does not exist' % feed_name)
utils.update_feed(feed())
|
from __future__ import unicode_literals
from django.core.management import CommandError, BaseCommand
from ....integrations.feeds import SaleorFeed
from ....integrations import utils
class Command(BaseCommand):
help = ('Updates integration feeds.'
'If feed name not provided, updates all available feeds')
feed_classes = {'saleor': SaleorFeed}
def add_arguments(self, parser):
parser.add_argument('feed_name', nargs='*', type=str, default=None)
def handle(self, *args, **options):
feed_names = options.get('feed_name') or self.feed_classes.keys()
for feed_name in feed_names:
feed = self.feed_classes.get(feed_name)
if feed is None:
raise CommandError('Feed "%s" does not exist' % feed_name)
utils.update_feed(feed())
|
Fix imports style and made feed_name optional
|
Fix imports style and made feed_name optional
|
Python
|
bsd-3-clause
|
KenMutemi/saleor,UITools/saleor,jreigel/saleor,KenMutemi/saleor,HyperManTT/ECommerceSaleor,itbabu/saleor,jreigel/saleor,HyperManTT/ECommerceSaleor,tfroehlich82/saleor,tfroehlich82/saleor,maferelo/saleor,itbabu/saleor,KenMutemi/saleor,mociepka/saleor,UITools/saleor,car3oon/saleor,tfroehlich82/saleor,car3oon/saleor,UITools/saleor,maferelo/saleor,HyperManTT/ECommerceSaleor,mociepka/saleor,UITools/saleor,UITools/saleor,jreigel/saleor,maferelo/saleor,itbabu/saleor,car3oon/saleor,mociepka/saleor
|
from __future__ import unicode_literals
from django.core.management import CommandError, BaseCommand
from saleor.integrations.feeds import SaleorFeed
from saleor.integrations import utils
class Command(BaseCommand):
help = 'Updates integration feeds. '
feed_classes = {'saleor': SaleorFeed}
def add_arguments(self, parser):
parser.add_argument('feed_name', nargs='+', type=str)
def handle(self, *args, **options):
feed_names = options['feed_name'] or self.feed_classes.keys()
for feed_name in feed_names:
feed = self.feed_classes.get(feed_name)
if feed is None:
raise CommandError('Feed "%s" does not exist' % feed_name)
utils.update_feed(feed())
Fix imports style and made feed_name optional
|
from __future__ import unicode_literals
from django.core.management import CommandError, BaseCommand
from ....integrations.feeds import SaleorFeed
from ....integrations import utils
class Command(BaseCommand):
help = ('Updates integration feeds.'
'If feed name not provided, updates all available feeds')
feed_classes = {'saleor': SaleorFeed}
def add_arguments(self, parser):
parser.add_argument('feed_name', nargs='*', type=str, default=None)
def handle(self, *args, **options):
feed_names = options.get('feed_name') or self.feed_classes.keys()
for feed_name in feed_names:
feed = self.feed_classes.get(feed_name)
if feed is None:
raise CommandError('Feed "%s" does not exist' % feed_name)
utils.update_feed(feed())
|
<commit_before>from __future__ import unicode_literals
from django.core.management import CommandError, BaseCommand
from saleor.integrations.feeds import SaleorFeed
from saleor.integrations import utils
class Command(BaseCommand):
help = 'Updates integration feeds. '
feed_classes = {'saleor': SaleorFeed}
def add_arguments(self, parser):
parser.add_argument('feed_name', nargs='+', type=str)
def handle(self, *args, **options):
feed_names = options['feed_name'] or self.feed_classes.keys()
for feed_name in feed_names:
feed = self.feed_classes.get(feed_name)
if feed is None:
raise CommandError('Feed "%s" does not exist' % feed_name)
utils.update_feed(feed())
<commit_msg>Fix imports style and made feed_name optional<commit_after>
|
from __future__ import unicode_literals
from django.core.management import CommandError, BaseCommand
from ....integrations.feeds import SaleorFeed
from ....integrations import utils
class Command(BaseCommand):
help = ('Updates integration feeds.'
'If feed name not provided, updates all available feeds')
feed_classes = {'saleor': SaleorFeed}
def add_arguments(self, parser):
parser.add_argument('feed_name', nargs='*', type=str, default=None)
def handle(self, *args, **options):
feed_names = options.get('feed_name') or self.feed_classes.keys()
for feed_name in feed_names:
feed = self.feed_classes.get(feed_name)
if feed is None:
raise CommandError('Feed "%s" does not exist' % feed_name)
utils.update_feed(feed())
|
from __future__ import unicode_literals
from django.core.management import CommandError, BaseCommand
from saleor.integrations.feeds import SaleorFeed
from saleor.integrations import utils
class Command(BaseCommand):
help = 'Updates integration feeds. '
feed_classes = {'saleor': SaleorFeed}
def add_arguments(self, parser):
parser.add_argument('feed_name', nargs='+', type=str)
def handle(self, *args, **options):
feed_names = options['feed_name'] or self.feed_classes.keys()
for feed_name in feed_names:
feed = self.feed_classes.get(feed_name)
if feed is None:
raise CommandError('Feed "%s" does not exist' % feed_name)
utils.update_feed(feed())
Fix imports style and made feed_name optionalfrom __future__ import unicode_literals
from django.core.management import CommandError, BaseCommand
from ....integrations.feeds import SaleorFeed
from ....integrations import utils
class Command(BaseCommand):
help = ('Updates integration feeds.'
'If feed name not provided, updates all available feeds')
feed_classes = {'saleor': SaleorFeed}
def add_arguments(self, parser):
parser.add_argument('feed_name', nargs='*', type=str, default=None)
def handle(self, *args, **options):
feed_names = options.get('feed_name') or self.feed_classes.keys()
for feed_name in feed_names:
feed = self.feed_classes.get(feed_name)
if feed is None:
raise CommandError('Feed "%s" does not exist' % feed_name)
utils.update_feed(feed())
|
<commit_before>from __future__ import unicode_literals
from django.core.management import CommandError, BaseCommand
from saleor.integrations.feeds import SaleorFeed
from saleor.integrations import utils
class Command(BaseCommand):
help = 'Updates integration feeds. '
feed_classes = {'saleor': SaleorFeed}
def add_arguments(self, parser):
parser.add_argument('feed_name', nargs='+', type=str)
def handle(self, *args, **options):
feed_names = options['feed_name'] or self.feed_classes.keys()
for feed_name in feed_names:
feed = self.feed_classes.get(feed_name)
if feed is None:
raise CommandError('Feed "%s" does not exist' % feed_name)
utils.update_feed(feed())
<commit_msg>Fix imports style and made feed_name optional<commit_after>from __future__ import unicode_literals
from django.core.management import CommandError, BaseCommand
from ....integrations.feeds import SaleorFeed
from ....integrations import utils
class Command(BaseCommand):
help = ('Updates integration feeds.'
'If feed name not provided, updates all available feeds')
feed_classes = {'saleor': SaleorFeed}
def add_arguments(self, parser):
parser.add_argument('feed_name', nargs='*', type=str, default=None)
def handle(self, *args, **options):
feed_names = options.get('feed_name') or self.feed_classes.keys()
for feed_name in feed_names:
feed = self.feed_classes.get(feed_name)
if feed is None:
raise CommandError('Feed "%s" does not exist' % feed_name)
utils.update_feed(feed())
|
1a804bba0ee553cd87d29599284c1b422ad28196
|
server/crashmanager/management/commands/cleanup_old_crashes.py
|
server/crashmanager/management/commands/cleanup_old_crashes.py
|
from django.core.management.base import NoArgsCommand
from crashmanager.models import CrashEntry, Bucket, Bug
from django.db.models.aggregates import Count
from datetime import datetime, timedelta
CLEANUP_CRASHES_AFTER_DAYS = 14
CLEANUP_FIXED_BUCKETS_AFTER_DAYS = 3
class Command(NoArgsCommand):
help = "Cleanup old crash entries."
def handle_noargs(self, **options):
# Select all buckets that have been closed for x days
expiryDate = datetime.now().date() - timedelta(days=CLEANUP_FIXED_BUCKETS_AFTER_DAYS)
bugs = Bug.objects.filter(closed__lt = expiryDate)
for bug in bugs:
# This delete causes buckets referring to this bug, as well as entries
# referring these buckets, to be deleted as well due to cascading delete.
bug.delete()
# Select all buckets that are empty and delete them
buckets = Bucket.objects.annotate(size=Count('crashentry')).filter(size=0)
for bucket in buckets:
bucket.delete()
# Select all entries that are older than x days
expiryDate = datetime.now().date() - timedelta(days=CLEANUP_CRASHES_AFTER_DAYS)
entries = CrashEntry.objects.filter(created__lt = expiryDate)
for entry in entries:
entry.delete()
|
from django.core.management.base import NoArgsCommand
from crashmanager.models import CrashEntry, Bucket, Bug
from django.db.models.aggregates import Count
from datetime import datetime, timedelta
from django.conf import settings
class Command(NoArgsCommand):
help = "Cleanup old crash entries."
def handle_noargs(self, **options):
cleanup_crashes_after_days = getattr(settings, 'CLEANUP_CRASHES_AFTER_DAYS', 14)
cleanup_fixed_buckets_after_days = getattr(settings, 'CLEANUP_FIXED_BUCKETS_AFTER_DAYS', 3)
# Select all buckets that have been closed for x days
expiryDate = datetime.now().date() - timedelta(days=cleanup_fixed_buckets_after_days)
bugs = Bug.objects.filter(closed__lt = expiryDate)
for bug in bugs:
# This delete causes buckets referring to this bug, as well as entries
# referring these buckets, to be deleted as well due to cascading delete.
bug.delete()
# Select all buckets that are empty and delete them
buckets = Bucket.objects.annotate(size=Count('crashentry')).filter(size=0)
for bucket in buckets:
bucket.delete()
# Select all entries that are older than x days
expiryDate = datetime.now().date() - timedelta(days=cleanup_crashes_after_days)
entries = CrashEntry.objects.filter(created__lt = expiryDate)
for entry in entries:
entry.delete()
|
Refactor cleanup command to use settings.py
|
Refactor cleanup command to use settings.py
|
Python
|
mpl-2.0
|
MozillaSecurity/FuzzManager,lazyparser/FuzzManager,cihatix/FuzzManager,cihatix/FuzzManager,lazyparser/FuzzManager,sigma-random/FuzzManager,cihatix/FuzzManager,cihatix/FuzzManager,MozillaSecurity/FuzzManager,sigma-random/FuzzManager,MozillaSecurity/FuzzManager,lazyparser/FuzzManager,sigma-random/FuzzManager,lazyparser/FuzzManager,MozillaSecurity/FuzzManager,sigma-random/FuzzManager
|
from django.core.management.base import NoArgsCommand
from crashmanager.models import CrashEntry, Bucket, Bug
from django.db.models.aggregates import Count
from datetime import datetime, timedelta
CLEANUP_CRASHES_AFTER_DAYS = 14
CLEANUP_FIXED_BUCKETS_AFTER_DAYS = 3
class Command(NoArgsCommand):
help = "Cleanup old crash entries."
def handle_noargs(self, **options):
# Select all buckets that have been closed for x days
expiryDate = datetime.now().date() - timedelta(days=CLEANUP_FIXED_BUCKETS_AFTER_DAYS)
bugs = Bug.objects.filter(closed__lt = expiryDate)
for bug in bugs:
# This delete causes buckets referring to this bug, as well as entries
# referring these buckets, to be deleted as well due to cascading delete.
bug.delete()
# Select all buckets that are empty and delete them
buckets = Bucket.objects.annotate(size=Count('crashentry')).filter(size=0)
for bucket in buckets:
bucket.delete()
# Select all entries that are older than x days
expiryDate = datetime.now().date() - timedelta(days=CLEANUP_CRASHES_AFTER_DAYS)
entries = CrashEntry.objects.filter(created__lt = expiryDate)
for entry in entries:
entry.delete()Refactor cleanup command to use settings.py
|
from django.core.management.base import NoArgsCommand
from crashmanager.models import CrashEntry, Bucket, Bug
from django.db.models.aggregates import Count
from datetime import datetime, timedelta
from django.conf import settings
class Command(NoArgsCommand):
help = "Cleanup old crash entries."
def handle_noargs(self, **options):
cleanup_crashes_after_days = getattr(settings, 'CLEANUP_CRASHES_AFTER_DAYS', 14)
cleanup_fixed_buckets_after_days = getattr(settings, 'CLEANUP_FIXED_BUCKETS_AFTER_DAYS', 3)
# Select all buckets that have been closed for x days
expiryDate = datetime.now().date() - timedelta(days=cleanup_fixed_buckets_after_days)
bugs = Bug.objects.filter(closed__lt = expiryDate)
for bug in bugs:
# This delete causes buckets referring to this bug, as well as entries
# referring these buckets, to be deleted as well due to cascading delete.
bug.delete()
# Select all buckets that are empty and delete them
buckets = Bucket.objects.annotate(size=Count('crashentry')).filter(size=0)
for bucket in buckets:
bucket.delete()
# Select all entries that are older than x days
expiryDate = datetime.now().date() - timedelta(days=cleanup_crashes_after_days)
entries = CrashEntry.objects.filter(created__lt = expiryDate)
for entry in entries:
entry.delete()
|
<commit_before>from django.core.management.base import NoArgsCommand
from crashmanager.models import CrashEntry, Bucket, Bug
from django.db.models.aggregates import Count
from datetime import datetime, timedelta
CLEANUP_CRASHES_AFTER_DAYS = 14
CLEANUP_FIXED_BUCKETS_AFTER_DAYS = 3
class Command(NoArgsCommand):
help = "Cleanup old crash entries."
def handle_noargs(self, **options):
# Select all buckets that have been closed for x days
expiryDate = datetime.now().date() - timedelta(days=CLEANUP_FIXED_BUCKETS_AFTER_DAYS)
bugs = Bug.objects.filter(closed__lt = expiryDate)
for bug in bugs:
# This delete causes buckets referring to this bug, as well as entries
# referring these buckets, to be deleted as well due to cascading delete.
bug.delete()
# Select all buckets that are empty and delete them
buckets = Bucket.objects.annotate(size=Count('crashentry')).filter(size=0)
for bucket in buckets:
bucket.delete()
# Select all entries that are older than x days
expiryDate = datetime.now().date() - timedelta(days=CLEANUP_CRASHES_AFTER_DAYS)
entries = CrashEntry.objects.filter(created__lt = expiryDate)
for entry in entries:
entry.delete()<commit_msg>Refactor cleanup command to use settings.py<commit_after>
|
from django.core.management.base import NoArgsCommand
from crashmanager.models import CrashEntry, Bucket, Bug
from django.db.models.aggregates import Count
from datetime import datetime, timedelta
from django.conf import settings
class Command(NoArgsCommand):
help = "Cleanup old crash entries."
def handle_noargs(self, **options):
cleanup_crashes_after_days = getattr(settings, 'CLEANUP_CRASHES_AFTER_DAYS', 14)
cleanup_fixed_buckets_after_days = getattr(settings, 'CLEANUP_FIXED_BUCKETS_AFTER_DAYS', 3)
# Select all buckets that have been closed for x days
expiryDate = datetime.now().date() - timedelta(days=cleanup_fixed_buckets_after_days)
bugs = Bug.objects.filter(closed__lt = expiryDate)
for bug in bugs:
# This delete causes buckets referring to this bug, as well as entries
# referring these buckets, to be deleted as well due to cascading delete.
bug.delete()
# Select all buckets that are empty and delete them
buckets = Bucket.objects.annotate(size=Count('crashentry')).filter(size=0)
for bucket in buckets:
bucket.delete()
# Select all entries that are older than x days
expiryDate = datetime.now().date() - timedelta(days=cleanup_crashes_after_days)
entries = CrashEntry.objects.filter(created__lt = expiryDate)
for entry in entries:
entry.delete()
|
from django.core.management.base import NoArgsCommand
from crashmanager.models import CrashEntry, Bucket, Bug
from django.db.models.aggregates import Count
from datetime import datetime, timedelta
CLEANUP_CRASHES_AFTER_DAYS = 14
CLEANUP_FIXED_BUCKETS_AFTER_DAYS = 3
class Command(NoArgsCommand):
help = "Cleanup old crash entries."
def handle_noargs(self, **options):
# Select all buckets that have been closed for x days
expiryDate = datetime.now().date() - timedelta(days=CLEANUP_FIXED_BUCKETS_AFTER_DAYS)
bugs = Bug.objects.filter(closed__lt = expiryDate)
for bug in bugs:
# This delete causes buckets referring to this bug, as well as entries
# referring these buckets, to be deleted as well due to cascading delete.
bug.delete()
# Select all buckets that are empty and delete them
buckets = Bucket.objects.annotate(size=Count('crashentry')).filter(size=0)
for bucket in buckets:
bucket.delete()
# Select all entries that are older than x days
expiryDate = datetime.now().date() - timedelta(days=CLEANUP_CRASHES_AFTER_DAYS)
entries = CrashEntry.objects.filter(created__lt = expiryDate)
for entry in entries:
entry.delete()Refactor cleanup command to use settings.pyfrom django.core.management.base import NoArgsCommand
from crashmanager.models import CrashEntry, Bucket, Bug
from django.db.models.aggregates import Count
from datetime import datetime, timedelta
from django.conf import settings
class Command(NoArgsCommand):
help = "Cleanup old crash entries."
def handle_noargs(self, **options):
cleanup_crashes_after_days = getattr(settings, 'CLEANUP_CRASHES_AFTER_DAYS', 14)
cleanup_fixed_buckets_after_days = getattr(settings, 'CLEANUP_FIXED_BUCKETS_AFTER_DAYS', 3)
# Select all buckets that have been closed for x days
expiryDate = datetime.now().date() - timedelta(days=cleanup_fixed_buckets_after_days)
bugs = Bug.objects.filter(closed__lt = expiryDate)
for bug in bugs:
# This delete causes buckets referring to this bug, as well as entries
# referring these buckets, to be deleted as well due to cascading delete.
bug.delete()
# Select all buckets that are empty and delete them
buckets = Bucket.objects.annotate(size=Count('crashentry')).filter(size=0)
for bucket in buckets:
bucket.delete()
# Select all entries that are older than x days
expiryDate = datetime.now().date() - timedelta(days=cleanup_crashes_after_days)
entries = CrashEntry.objects.filter(created__lt = expiryDate)
for entry in entries:
entry.delete()
|
<commit_before>from django.core.management.base import NoArgsCommand
from crashmanager.models import CrashEntry, Bucket, Bug
from django.db.models.aggregates import Count
from datetime import datetime, timedelta
CLEANUP_CRASHES_AFTER_DAYS = 14
CLEANUP_FIXED_BUCKETS_AFTER_DAYS = 3
class Command(NoArgsCommand):
help = "Cleanup old crash entries."
def handle_noargs(self, **options):
# Select all buckets that have been closed for x days
expiryDate = datetime.now().date() - timedelta(days=CLEANUP_FIXED_BUCKETS_AFTER_DAYS)
bugs = Bug.objects.filter(closed__lt = expiryDate)
for bug in bugs:
# This delete causes buckets referring to this bug, as well as entries
# referring these buckets, to be deleted as well due to cascading delete.
bug.delete()
# Select all buckets that are empty and delete them
buckets = Bucket.objects.annotate(size=Count('crashentry')).filter(size=0)
for bucket in buckets:
bucket.delete()
# Select all entries that are older than x days
expiryDate = datetime.now().date() - timedelta(days=CLEANUP_CRASHES_AFTER_DAYS)
entries = CrashEntry.objects.filter(created__lt = expiryDate)
for entry in entries:
entry.delete()<commit_msg>Refactor cleanup command to use settings.py<commit_after>from django.core.management.base import NoArgsCommand
from crashmanager.models import CrashEntry, Bucket, Bug
from django.db.models.aggregates import Count
from datetime import datetime, timedelta
from django.conf import settings
class Command(NoArgsCommand):
help = "Cleanup old crash entries."
def handle_noargs(self, **options):
cleanup_crashes_after_days = getattr(settings, 'CLEANUP_CRASHES_AFTER_DAYS', 14)
cleanup_fixed_buckets_after_days = getattr(settings, 'CLEANUP_FIXED_BUCKETS_AFTER_DAYS', 3)
# Select all buckets that have been closed for x days
expiryDate = datetime.now().date() - timedelta(days=cleanup_fixed_buckets_after_days)
bugs = Bug.objects.filter(closed__lt = expiryDate)
for bug in bugs:
# This delete causes buckets referring to this bug, as well as entries
# referring these buckets, to be deleted as well due to cascading delete.
bug.delete()
# Select all buckets that are empty and delete them
buckets = Bucket.objects.annotate(size=Count('crashentry')).filter(size=0)
for bucket in buckets:
bucket.delete()
# Select all entries that are older than x days
expiryDate = datetime.now().date() - timedelta(days=cleanup_crashes_after_days)
entries = CrashEntry.objects.filter(created__lt = expiryDate)
for entry in entries:
entry.delete()
|
27c2878ab43ff1e38492e17971166e8fe3c8f1e1
|
tests/unit/test_test_setup.py
|
tests/unit/test_test_setup.py
|
"""Tests for correctly generated, working setup."""
from os import system
from sys import version_info
from . import pytest_generate_tests # noqa, pylint: disable=unused-import
# pylint: disable=too-few-public-methods
class TestTestSetup(object):
"""
Tests for verifying generated test setups of this cookiecutter,
executed several times with different values (test scenarios).
"""
scenarios = [
('django', {
'project_slug': 'django-project',
'framework': 'Django',
}),
# ('flask', {
# 'project_slug': 'flask-project',
# 'framework': 'Flask',
# }),
]
# pylint: disable=no-self-use
def test_test_setup(self, cookies, project_slug, framework):
"""
Generate a project and verify the test setup executes successfully.
"""
py_version = 'py%s%s' % version_info[:2]
result = cookies.bake(extra_context={
'project_slug': project_slug,
'framework': framework,
'tests': 'flake8,pylint,%s,behave' % py_version,
})
assert result.exit_code == 0
assert result.exception is None
tox_ini = result.project.join('tox.ini')
assert tox_ini.isfile()
exit_code = system('tox -c %s' % tox_ini)
assert exit_code == 0, 'Running tests in generated project fails.'
|
"""Tests for correctly generated, working setup."""
from os import system
from sys import version_info
from . import pytest_generate_tests # noqa, pylint: disable=unused-import
# pylint: disable=too-few-public-methods
class TestTestSetup(object):
"""
Tests for verifying generated test setups of this cookiecutter,
executed several times with different values (test scenarios).
"""
scenarios = [
('django', {
'project_slug': 'django-project',
'framework': 'Django',
}),
# ('flask', {
# 'project_slug': 'flask-project',
# 'framework': 'Flask',
# }),
]
# pylint: disable=no-self-use
def test_test_setup(self, cookies, project_slug, framework):
"""
Generate a project and verify the test setup executes successfully.
"""
major, minor = version_info[:2]
py_version = 'py%s%s' % (major, minor)
result = cookies.bake(extra_context={
'project_slug': project_slug,
'framework': framework,
'tests': 'flake8,pylint,%s,behave' % py_version,
})
assert result.exit_code == 0, \
'Cookiecutter exits with %(exit_code)s:' \
' %(exception)s' % result.__dict__
assert result.exception is None
tox_ini = result.project.join('tox.ini')
assert tox_ini.isfile()
exit_code = system('tox -c %s' % tox_ini)
assert exit_code == 0, 'Running tests in generated project fails.'
|
Make py_version and assertion more readable
|
Make py_version and assertion more readable
|
Python
|
apache-2.0
|
painless-software/painless-continuous-delivery,painless-software/painless-continuous-delivery,painless-software/painless-continuous-delivery,painless-software/painless-continuous-delivery
|
"""Tests for correctly generated, working setup."""
from os import system
from sys import version_info
from . import pytest_generate_tests # noqa, pylint: disable=unused-import
# pylint: disable=too-few-public-methods
class TestTestSetup(object):
"""
Tests for verifying generated test setups of this cookiecutter,
executed several times with different values (test scenarios).
"""
scenarios = [
('django', {
'project_slug': 'django-project',
'framework': 'Django',
}),
# ('flask', {
# 'project_slug': 'flask-project',
# 'framework': 'Flask',
# }),
]
# pylint: disable=no-self-use
def test_test_setup(self, cookies, project_slug, framework):
"""
Generate a project and verify the test setup executes successfully.
"""
py_version = 'py%s%s' % version_info[:2]
result = cookies.bake(extra_context={
'project_slug': project_slug,
'framework': framework,
'tests': 'flake8,pylint,%s,behave' % py_version,
})
assert result.exit_code == 0
assert result.exception is None
tox_ini = result.project.join('tox.ini')
assert tox_ini.isfile()
exit_code = system('tox -c %s' % tox_ini)
assert exit_code == 0, 'Running tests in generated project fails.'
Make py_version and assertion more readable
|
"""Tests for correctly generated, working setup."""
from os import system
from sys import version_info
from . import pytest_generate_tests # noqa, pylint: disable=unused-import
# pylint: disable=too-few-public-methods
class TestTestSetup(object):
"""
Tests for verifying generated test setups of this cookiecutter,
executed several times with different values (test scenarios).
"""
scenarios = [
('django', {
'project_slug': 'django-project',
'framework': 'Django',
}),
# ('flask', {
# 'project_slug': 'flask-project',
# 'framework': 'Flask',
# }),
]
# pylint: disable=no-self-use
def test_test_setup(self, cookies, project_slug, framework):
"""
Generate a project and verify the test setup executes successfully.
"""
major, minor = version_info[:2]
py_version = 'py%s%s' % (major, minor)
result = cookies.bake(extra_context={
'project_slug': project_slug,
'framework': framework,
'tests': 'flake8,pylint,%s,behave' % py_version,
})
assert result.exit_code == 0, \
'Cookiecutter exits with %(exit_code)s:' \
' %(exception)s' % result.__dict__
assert result.exception is None
tox_ini = result.project.join('tox.ini')
assert tox_ini.isfile()
exit_code = system('tox -c %s' % tox_ini)
assert exit_code == 0, 'Running tests in generated project fails.'
|
<commit_before>"""Tests for correctly generated, working setup."""
from os import system
from sys import version_info
from . import pytest_generate_tests # noqa, pylint: disable=unused-import
# pylint: disable=too-few-public-methods
class TestTestSetup(object):
"""
Tests for verifying generated test setups of this cookiecutter,
executed several times with different values (test scenarios).
"""
scenarios = [
('django', {
'project_slug': 'django-project',
'framework': 'Django',
}),
# ('flask', {
# 'project_slug': 'flask-project',
# 'framework': 'Flask',
# }),
]
# pylint: disable=no-self-use
def test_test_setup(self, cookies, project_slug, framework):
"""
Generate a project and verify the test setup executes successfully.
"""
py_version = 'py%s%s' % version_info[:2]
result = cookies.bake(extra_context={
'project_slug': project_slug,
'framework': framework,
'tests': 'flake8,pylint,%s,behave' % py_version,
})
assert result.exit_code == 0
assert result.exception is None
tox_ini = result.project.join('tox.ini')
assert tox_ini.isfile()
exit_code = system('tox -c %s' % tox_ini)
assert exit_code == 0, 'Running tests in generated project fails.'
<commit_msg>Make py_version and assertion more readable<commit_after>
|
"""Tests for correctly generated, working setup."""
from os import system
from sys import version_info
from . import pytest_generate_tests # noqa, pylint: disable=unused-import
# pylint: disable=too-few-public-methods
class TestTestSetup(object):
"""
Tests for verifying generated test setups of this cookiecutter,
executed several times with different values (test scenarios).
"""
scenarios = [
('django', {
'project_slug': 'django-project',
'framework': 'Django',
}),
# ('flask', {
# 'project_slug': 'flask-project',
# 'framework': 'Flask',
# }),
]
# pylint: disable=no-self-use
def test_test_setup(self, cookies, project_slug, framework):
"""
Generate a project and verify the test setup executes successfully.
"""
major, minor = version_info[:2]
py_version = 'py%s%s' % (major, minor)
result = cookies.bake(extra_context={
'project_slug': project_slug,
'framework': framework,
'tests': 'flake8,pylint,%s,behave' % py_version,
})
assert result.exit_code == 0, \
'Cookiecutter exits with %(exit_code)s:' \
' %(exception)s' % result.__dict__
assert result.exception is None
tox_ini = result.project.join('tox.ini')
assert tox_ini.isfile()
exit_code = system('tox -c %s' % tox_ini)
assert exit_code == 0, 'Running tests in generated project fails.'
|
"""Tests for correctly generated, working setup."""
from os import system
from sys import version_info
from . import pytest_generate_tests # noqa, pylint: disable=unused-import
# pylint: disable=too-few-public-methods
class TestTestSetup(object):
"""
Tests for verifying generated test setups of this cookiecutter,
executed several times with different values (test scenarios).
"""
scenarios = [
('django', {
'project_slug': 'django-project',
'framework': 'Django',
}),
# ('flask', {
# 'project_slug': 'flask-project',
# 'framework': 'Flask',
# }),
]
# pylint: disable=no-self-use
def test_test_setup(self, cookies, project_slug, framework):
"""
Generate a project and verify the test setup executes successfully.
"""
py_version = 'py%s%s' % version_info[:2]
result = cookies.bake(extra_context={
'project_slug': project_slug,
'framework': framework,
'tests': 'flake8,pylint,%s,behave' % py_version,
})
assert result.exit_code == 0
assert result.exception is None
tox_ini = result.project.join('tox.ini')
assert tox_ini.isfile()
exit_code = system('tox -c %s' % tox_ini)
assert exit_code == 0, 'Running tests in generated project fails.'
Make py_version and assertion more readable"""Tests for correctly generated, working setup."""
from os import system
from sys import version_info
from . import pytest_generate_tests # noqa, pylint: disable=unused-import
# pylint: disable=too-few-public-methods
class TestTestSetup(object):
"""
Tests for verifying generated test setups of this cookiecutter,
executed several times with different values (test scenarios).
"""
scenarios = [
('django', {
'project_slug': 'django-project',
'framework': 'Django',
}),
# ('flask', {
# 'project_slug': 'flask-project',
# 'framework': 'Flask',
# }),
]
# pylint: disable=no-self-use
def test_test_setup(self, cookies, project_slug, framework):
"""
Generate a project and verify the test setup executes successfully.
"""
major, minor = version_info[:2]
py_version = 'py%s%s' % (major, minor)
result = cookies.bake(extra_context={
'project_slug': project_slug,
'framework': framework,
'tests': 'flake8,pylint,%s,behave' % py_version,
})
assert result.exit_code == 0, \
'Cookiecutter exits with %(exit_code)s:' \
' %(exception)s' % result.__dict__
assert result.exception is None
tox_ini = result.project.join('tox.ini')
assert tox_ini.isfile()
exit_code = system('tox -c %s' % tox_ini)
assert exit_code == 0, 'Running tests in generated project fails.'
|
<commit_before>"""Tests for correctly generated, working setup."""
from os import system
from sys import version_info
from . import pytest_generate_tests # noqa, pylint: disable=unused-import
# pylint: disable=too-few-public-methods
class TestTestSetup(object):
"""
Tests for verifying generated test setups of this cookiecutter,
executed several times with different values (test scenarios).
"""
scenarios = [
('django', {
'project_slug': 'django-project',
'framework': 'Django',
}),
# ('flask', {
# 'project_slug': 'flask-project',
# 'framework': 'Flask',
# }),
]
# pylint: disable=no-self-use
def test_test_setup(self, cookies, project_slug, framework):
"""
Generate a project and verify the test setup executes successfully.
"""
py_version = 'py%s%s' % version_info[:2]
result = cookies.bake(extra_context={
'project_slug': project_slug,
'framework': framework,
'tests': 'flake8,pylint,%s,behave' % py_version,
})
assert result.exit_code == 0
assert result.exception is None
tox_ini = result.project.join('tox.ini')
assert tox_ini.isfile()
exit_code = system('tox -c %s' % tox_ini)
assert exit_code == 0, 'Running tests in generated project fails.'
<commit_msg>Make py_version and assertion more readable<commit_after>"""Tests for correctly generated, working setup."""
from os import system
from sys import version_info
from . import pytest_generate_tests # noqa, pylint: disable=unused-import
# pylint: disable=too-few-public-methods
class TestTestSetup(object):
"""
Tests for verifying generated test setups of this cookiecutter,
executed several times with different values (test scenarios).
"""
scenarios = [
('django', {
'project_slug': 'django-project',
'framework': 'Django',
}),
# ('flask', {
# 'project_slug': 'flask-project',
# 'framework': 'Flask',
# }),
]
# pylint: disable=no-self-use
def test_test_setup(self, cookies, project_slug, framework):
"""
Generate a project and verify the test setup executes successfully.
"""
major, minor = version_info[:2]
py_version = 'py%s%s' % (major, minor)
result = cookies.bake(extra_context={
'project_slug': project_slug,
'framework': framework,
'tests': 'flake8,pylint,%s,behave' % py_version,
})
assert result.exit_code == 0, \
'Cookiecutter exits with %(exit_code)s:' \
' %(exception)s' % result.__dict__
assert result.exception is None
tox_ini = result.project.join('tox.ini')
assert tox_ini.isfile()
exit_code = system('tox -c %s' % tox_ini)
assert exit_code == 0, 'Running tests in generated project fails.'
|
a1cf304f9941b811b33e1b2d786b6f38bc514546
|
anafero/templatetags/anafero_tags.py
|
anafero/templatetags/anafero_tags.py
|
from django import template
from django.contrib.contenttypes.models import ContentType
from anafero.models import ReferralResponse, ACTION_DISPLAY
register = template.Library()
@register.inclusion_tag("anafero/_create_referral_form.html")
def create_referral(url, obj=None):
if obj:
return {"url": url, "obj": obj, "obj_ct": ContentType.objects.get_for_model(obj)}
else:
return {"url": url, "obj": "", "obj_ct": ""}
@register.assignment_tag
def referral_responses(user):
return ReferralResponse.objects.filter(
referral__user=user
).order_by("-created_at")
@register.filter
def action_display(value):
return ACTION_DISPLAY.get(value, value)
|
from django import template
from django.contrib.contenttypes.models import ContentType
from anafero.models import ReferralResponse, ACTION_DISPLAY
register = template.Library()
@register.inclusion_tag("anafero/_create_referral_form.html", takes_context=True)
def create_referral(context, url, obj=None):
if obj:
context.update(
{"url": url, "obj": obj, "obj_ct": ContentType.objects.get_for_model(obj)}
)
else:
context.update(
{"url": url, "obj": "", "obj_ct": ""}
)
return context
@register.assignment_tag
def referral_responses(user):
return ReferralResponse.objects.filter(
referral__user=user
).order_by("-created_at")
@register.filter
def action_display(value):
return ACTION_DISPLAY.get(value, value)
|
Add full context to the create_referral tag
|
Add full context to the create_referral tag
|
Python
|
mit
|
pinax/pinax-referrals,pinax/pinax-referrals
|
from django import template
from django.contrib.contenttypes.models import ContentType
from anafero.models import ReferralResponse, ACTION_DISPLAY
register = template.Library()
@register.inclusion_tag("anafero/_create_referral_form.html")
def create_referral(url, obj=None):
if obj:
return {"url": url, "obj": obj, "obj_ct": ContentType.objects.get_for_model(obj)}
else:
return {"url": url, "obj": "", "obj_ct": ""}
@register.assignment_tag
def referral_responses(user):
return ReferralResponse.objects.filter(
referral__user=user
).order_by("-created_at")
@register.filter
def action_display(value):
return ACTION_DISPLAY.get(value, value)
Add full context to the create_referral tag
|
from django import template
from django.contrib.contenttypes.models import ContentType
from anafero.models import ReferralResponse, ACTION_DISPLAY
register = template.Library()
@register.inclusion_tag("anafero/_create_referral_form.html", takes_context=True)
def create_referral(context, url, obj=None):
if obj:
context.update(
{"url": url, "obj": obj, "obj_ct": ContentType.objects.get_for_model(obj)}
)
else:
context.update(
{"url": url, "obj": "", "obj_ct": ""}
)
return context
@register.assignment_tag
def referral_responses(user):
return ReferralResponse.objects.filter(
referral__user=user
).order_by("-created_at")
@register.filter
def action_display(value):
return ACTION_DISPLAY.get(value, value)
|
<commit_before>from django import template
from django.contrib.contenttypes.models import ContentType
from anafero.models import ReferralResponse, ACTION_DISPLAY
register = template.Library()
@register.inclusion_tag("anafero/_create_referral_form.html")
def create_referral(url, obj=None):
if obj:
return {"url": url, "obj": obj, "obj_ct": ContentType.objects.get_for_model(obj)}
else:
return {"url": url, "obj": "", "obj_ct": ""}
@register.assignment_tag
def referral_responses(user):
return ReferralResponse.objects.filter(
referral__user=user
).order_by("-created_at")
@register.filter
def action_display(value):
return ACTION_DISPLAY.get(value, value)
<commit_msg>Add full context to the create_referral tag<commit_after>
|
from django import template
from django.contrib.contenttypes.models import ContentType
from anafero.models import ReferralResponse, ACTION_DISPLAY
register = template.Library()
@register.inclusion_tag("anafero/_create_referral_form.html", takes_context=True)
def create_referral(context, url, obj=None):
if obj:
context.update(
{"url": url, "obj": obj, "obj_ct": ContentType.objects.get_for_model(obj)}
)
else:
context.update(
{"url": url, "obj": "", "obj_ct": ""}
)
return context
@register.assignment_tag
def referral_responses(user):
return ReferralResponse.objects.filter(
referral__user=user
).order_by("-created_at")
@register.filter
def action_display(value):
return ACTION_DISPLAY.get(value, value)
|
from django import template
from django.contrib.contenttypes.models import ContentType
from anafero.models import ReferralResponse, ACTION_DISPLAY
register = template.Library()
@register.inclusion_tag("anafero/_create_referral_form.html")
def create_referral(url, obj=None):
if obj:
return {"url": url, "obj": obj, "obj_ct": ContentType.objects.get_for_model(obj)}
else:
return {"url": url, "obj": "", "obj_ct": ""}
@register.assignment_tag
def referral_responses(user):
return ReferralResponse.objects.filter(
referral__user=user
).order_by("-created_at")
@register.filter
def action_display(value):
return ACTION_DISPLAY.get(value, value)
Add full context to the create_referral tagfrom django import template
from django.contrib.contenttypes.models import ContentType
from anafero.models import ReferralResponse, ACTION_DISPLAY
register = template.Library()
@register.inclusion_tag("anafero/_create_referral_form.html", takes_context=True)
def create_referral(context, url, obj=None):
if obj:
context.update(
{"url": url, "obj": obj, "obj_ct": ContentType.objects.get_for_model(obj)}
)
else:
context.update(
{"url": url, "obj": "", "obj_ct": ""}
)
return context
@register.assignment_tag
def referral_responses(user):
return ReferralResponse.objects.filter(
referral__user=user
).order_by("-created_at")
@register.filter
def action_display(value):
return ACTION_DISPLAY.get(value, value)
|
<commit_before>from django import template
from django.contrib.contenttypes.models import ContentType
from anafero.models import ReferralResponse, ACTION_DISPLAY
register = template.Library()
@register.inclusion_tag("anafero/_create_referral_form.html")
def create_referral(url, obj=None):
if obj:
return {"url": url, "obj": obj, "obj_ct": ContentType.objects.get_for_model(obj)}
else:
return {"url": url, "obj": "", "obj_ct": ""}
@register.assignment_tag
def referral_responses(user):
return ReferralResponse.objects.filter(
referral__user=user
).order_by("-created_at")
@register.filter
def action_display(value):
return ACTION_DISPLAY.get(value, value)
<commit_msg>Add full context to the create_referral tag<commit_after>from django import template
from django.contrib.contenttypes.models import ContentType
from anafero.models import ReferralResponse, ACTION_DISPLAY
register = template.Library()
@register.inclusion_tag("anafero/_create_referral_form.html", takes_context=True)
def create_referral(context, url, obj=None):
if obj:
context.update(
{"url": url, "obj": obj, "obj_ct": ContentType.objects.get_for_model(obj)}
)
else:
context.update(
{"url": url, "obj": "", "obj_ct": ""}
)
return context
@register.assignment_tag
def referral_responses(user):
return ReferralResponse.objects.filter(
referral__user=user
).order_by("-created_at")
@register.filter
def action_display(value):
return ACTION_DISPLAY.get(value, value)
|
e709ea42801c7555d683c5d3eda4d22b164938eb
|
TSatPy/tests/discrete_test.py
|
TSatPy/tests/discrete_test.py
|
import unittest
from TSatPy import discrete
class TestDerivative(unittest.TestCase):
def test_derivative(self):
print 'aoue'
d = discrete.Derivative()
return
d.update(4)
print d.val, d
self.assertTrue(True)
if __name__ == "__main__":
unittest.main()
|
import unittest
from mock import patch
from TSatPy import discrete
import time
class TestDerivative(unittest.TestCase):
@patch('time.time')
def test_derivative(self, mock_time, *args):
mock_time.return_value = 1234
d = discrete.Derivative()
self.assertEquals(None, d.last_time)
self.assertEquals(None, d.last_value)
self.assertEquals(None, d.val)
d.update(4)
self.assertEquals(1234, d.last_time)
self.assertEquals(4, d.last_value)
self.assertEquals(None, d.val)
d.update(6)
self.assertEquals(1234, d.last_time)
self.assertEquals(6, d.last_value)
self.assertEquals(None, d.val)
mock_time.return_value = 1237
d.update(10)
self.assertEquals(1237, d.last_time)
self.assertEquals(10, d.last_value)
self.assertEquals(4/3.0, d.val)
s = '<Derivative rate:1.33333333333>'
self.assertEquals(s, str(d))
if __name__ == "__main__":
unittest.main()
|
Test coverage for the discrete derivative class
|
Test coverage for the discrete derivative class
|
Python
|
mit
|
MathYourLife/TSatPy-thesis,MathYourLife/TSatPy-thesis,MathYourLife/TSatPy-thesis,MathYourLife/TSatPy-thesis,MathYourLife/TSatPy-thesis
|
import unittest
from TSatPy import discrete
class TestDerivative(unittest.TestCase):
def test_derivative(self):
print 'aoue'
d = discrete.Derivative()
return
d.update(4)
print d.val, d
self.assertTrue(True)
if __name__ == "__main__":
unittest.main()
Test coverage for the discrete derivative class
|
import unittest
from mock import patch
from TSatPy import discrete
import time
class TestDerivative(unittest.TestCase):
@patch('time.time')
def test_derivative(self, mock_time, *args):
mock_time.return_value = 1234
d = discrete.Derivative()
self.assertEquals(None, d.last_time)
self.assertEquals(None, d.last_value)
self.assertEquals(None, d.val)
d.update(4)
self.assertEquals(1234, d.last_time)
self.assertEquals(4, d.last_value)
self.assertEquals(None, d.val)
d.update(6)
self.assertEquals(1234, d.last_time)
self.assertEquals(6, d.last_value)
self.assertEquals(None, d.val)
mock_time.return_value = 1237
d.update(10)
self.assertEquals(1237, d.last_time)
self.assertEquals(10, d.last_value)
self.assertEquals(4/3.0, d.val)
s = '<Derivative rate:1.33333333333>'
self.assertEquals(s, str(d))
if __name__ == "__main__":
unittest.main()
|
<commit_before>import unittest
from TSatPy import discrete
class TestDerivative(unittest.TestCase):
def test_derivative(self):
print 'aoue'
d = discrete.Derivative()
return
d.update(4)
print d.val, d
self.assertTrue(True)
if __name__ == "__main__":
unittest.main()
<commit_msg>Test coverage for the discrete derivative class<commit_after>
|
import unittest
from mock import patch
from TSatPy import discrete
import time
class TestDerivative(unittest.TestCase):
@patch('time.time')
def test_derivative(self, mock_time, *args):
mock_time.return_value = 1234
d = discrete.Derivative()
self.assertEquals(None, d.last_time)
self.assertEquals(None, d.last_value)
self.assertEquals(None, d.val)
d.update(4)
self.assertEquals(1234, d.last_time)
self.assertEquals(4, d.last_value)
self.assertEquals(None, d.val)
d.update(6)
self.assertEquals(1234, d.last_time)
self.assertEquals(6, d.last_value)
self.assertEquals(None, d.val)
mock_time.return_value = 1237
d.update(10)
self.assertEquals(1237, d.last_time)
self.assertEquals(10, d.last_value)
self.assertEquals(4/3.0, d.val)
s = '<Derivative rate:1.33333333333>'
self.assertEquals(s, str(d))
if __name__ == "__main__":
unittest.main()
|
import unittest
from TSatPy import discrete
class TestDerivative(unittest.TestCase):
def test_derivative(self):
print 'aoue'
d = discrete.Derivative()
return
d.update(4)
print d.val, d
self.assertTrue(True)
if __name__ == "__main__":
unittest.main()
Test coverage for the discrete derivative classimport unittest
from mock import patch
from TSatPy import discrete
import time
class TestDerivative(unittest.TestCase):
@patch('time.time')
def test_derivative(self, mock_time, *args):
mock_time.return_value = 1234
d = discrete.Derivative()
self.assertEquals(None, d.last_time)
self.assertEquals(None, d.last_value)
self.assertEquals(None, d.val)
d.update(4)
self.assertEquals(1234, d.last_time)
self.assertEquals(4, d.last_value)
self.assertEquals(None, d.val)
d.update(6)
self.assertEquals(1234, d.last_time)
self.assertEquals(6, d.last_value)
self.assertEquals(None, d.val)
mock_time.return_value = 1237
d.update(10)
self.assertEquals(1237, d.last_time)
self.assertEquals(10, d.last_value)
self.assertEquals(4/3.0, d.val)
s = '<Derivative rate:1.33333333333>'
self.assertEquals(s, str(d))
if __name__ == "__main__":
unittest.main()
|
<commit_before>import unittest
from TSatPy import discrete
class TestDerivative(unittest.TestCase):
def test_derivative(self):
print 'aoue'
d = discrete.Derivative()
return
d.update(4)
print d.val, d
self.assertTrue(True)
if __name__ == "__main__":
unittest.main()
<commit_msg>Test coverage for the discrete derivative class<commit_after>import unittest
from mock import patch
from TSatPy import discrete
import time
class TestDerivative(unittest.TestCase):
@patch('time.time')
def test_derivative(self, mock_time, *args):
mock_time.return_value = 1234
d = discrete.Derivative()
self.assertEquals(None, d.last_time)
self.assertEquals(None, d.last_value)
self.assertEquals(None, d.val)
d.update(4)
self.assertEquals(1234, d.last_time)
self.assertEquals(4, d.last_value)
self.assertEquals(None, d.val)
d.update(6)
self.assertEquals(1234, d.last_time)
self.assertEquals(6, d.last_value)
self.assertEquals(None, d.val)
mock_time.return_value = 1237
d.update(10)
self.assertEquals(1237, d.last_time)
self.assertEquals(10, d.last_value)
self.assertEquals(4/3.0, d.val)
s = '<Derivative rate:1.33333333333>'
self.assertEquals(s, str(d))
if __name__ == "__main__":
unittest.main()
|
c6d396e8ec29a3641ce1d994c386c9ebea353cd8
|
shipyard2/shipyard2/rules/capnps.py
|
shipyard2/shipyard2/rules/capnps.py
|
"""Helpers for writing rules that depends on //py/g1/third-party/capnp."""
__all__ = [
'make_global_options',
]
def make_global_options(ps):
return [
'compile_schemas',
*('--import-path=%s/codex' % path for path in ps['//bases:roots']),
]
|
"""Helpers for writing rules that depends on //py/g1/third-party/capnp."""
__all__ = [
'make_global_options',
]
def make_global_options(ps):
return [
'compile_schemas',
'--import-path=%s' %
':'.join(str(path / 'codex') for path in ps['//bases:roots']),
]
|
Fix joining of capnp import paths
|
Fix joining of capnp import paths
|
Python
|
mit
|
clchiou/garage,clchiou/garage,clchiou/garage,clchiou/garage
|
"""Helpers for writing rules that depends on //py/g1/third-party/capnp."""
__all__ = [
'make_global_options',
]
def make_global_options(ps):
return [
'compile_schemas',
*('--import-path=%s/codex' % path for path in ps['//bases:roots']),
]
Fix joining of capnp import paths
|
"""Helpers for writing rules that depends on //py/g1/third-party/capnp."""
__all__ = [
'make_global_options',
]
def make_global_options(ps):
return [
'compile_schemas',
'--import-path=%s' %
':'.join(str(path / 'codex') for path in ps['//bases:roots']),
]
|
<commit_before>"""Helpers for writing rules that depends on //py/g1/third-party/capnp."""
__all__ = [
'make_global_options',
]
def make_global_options(ps):
return [
'compile_schemas',
*('--import-path=%s/codex' % path for path in ps['//bases:roots']),
]
<commit_msg>Fix joining of capnp import paths<commit_after>
|
"""Helpers for writing rules that depends on //py/g1/third-party/capnp."""
__all__ = [
'make_global_options',
]
def make_global_options(ps):
return [
'compile_schemas',
'--import-path=%s' %
':'.join(str(path / 'codex') for path in ps['//bases:roots']),
]
|
"""Helpers for writing rules that depends on //py/g1/third-party/capnp."""
__all__ = [
'make_global_options',
]
def make_global_options(ps):
return [
'compile_schemas',
*('--import-path=%s/codex' % path for path in ps['//bases:roots']),
]
Fix joining of capnp import paths"""Helpers for writing rules that depends on //py/g1/third-party/capnp."""
__all__ = [
'make_global_options',
]
def make_global_options(ps):
return [
'compile_schemas',
'--import-path=%s' %
':'.join(str(path / 'codex') for path in ps['//bases:roots']),
]
|
<commit_before>"""Helpers for writing rules that depends on //py/g1/third-party/capnp."""
__all__ = [
'make_global_options',
]
def make_global_options(ps):
return [
'compile_schemas',
*('--import-path=%s/codex' % path for path in ps['//bases:roots']),
]
<commit_msg>Fix joining of capnp import paths<commit_after>"""Helpers for writing rules that depends on //py/g1/third-party/capnp."""
__all__ = [
'make_global_options',
]
def make_global_options(ps):
return [
'compile_schemas',
'--import-path=%s' %
':'.join(str(path / 'codex') for path in ps['//bases:roots']),
]
|
ff6f0204655439e93bab69dc23a9d1d7d0262cb9
|
dog/context.py
|
dog/context.py
|
__all__ = ['Context']
from lifesaver import bot
class Context(bot.Context):
@property
def pool(self):
return self.bot.pool
|
__all__ = ['Context']
from lifesaver import bot
class Context(bot.Context):
@property
def pool(self):
return self.bot.pool
def tick(self, variant: bool = True) -> str:
if variant:
return self.emoji('green_tick')
else:
return self.emoji('red_tick')
def emoji(self, name: str) -> str:
return self.bot.config.emoji[name]
|
Add emoji shortcuts to Context
|
Add emoji shortcuts to Context
|
Python
|
mit
|
slice/dogbot,sliceofcode/dogbot,sliceofcode/dogbot,slice/dogbot,slice/dogbot
|
__all__ = ['Context']
from lifesaver import bot
class Context(bot.Context):
@property
def pool(self):
return self.bot.pool
Add emoji shortcuts to Context
|
__all__ = ['Context']
from lifesaver import bot
class Context(bot.Context):
@property
def pool(self):
return self.bot.pool
def tick(self, variant: bool = True) -> str:
if variant:
return self.emoji('green_tick')
else:
return self.emoji('red_tick')
def emoji(self, name: str) -> str:
return self.bot.config.emoji[name]
|
<commit_before>__all__ = ['Context']
from lifesaver import bot
class Context(bot.Context):
@property
def pool(self):
return self.bot.pool
<commit_msg>Add emoji shortcuts to Context<commit_after>
|
__all__ = ['Context']
from lifesaver import bot
class Context(bot.Context):
@property
def pool(self):
return self.bot.pool
def tick(self, variant: bool = True) -> str:
if variant:
return self.emoji('green_tick')
else:
return self.emoji('red_tick')
def emoji(self, name: str) -> str:
return self.bot.config.emoji[name]
|
__all__ = ['Context']
from lifesaver import bot
class Context(bot.Context):
@property
def pool(self):
return self.bot.pool
Add emoji shortcuts to Context__all__ = ['Context']
from lifesaver import bot
class Context(bot.Context):
@property
def pool(self):
return self.bot.pool
def tick(self, variant: bool = True) -> str:
if variant:
return self.emoji('green_tick')
else:
return self.emoji('red_tick')
def emoji(self, name: str) -> str:
return self.bot.config.emoji[name]
|
<commit_before>__all__ = ['Context']
from lifesaver import bot
class Context(bot.Context):
@property
def pool(self):
return self.bot.pool
<commit_msg>Add emoji shortcuts to Context<commit_after>__all__ = ['Context']
from lifesaver import bot
class Context(bot.Context):
@property
def pool(self):
return self.bot.pool
def tick(self, variant: bool = True) -> str:
if variant:
return self.emoji('green_tick')
else:
return self.emoji('red_tick')
def emoji(self, name: str) -> str:
return self.bot.config.emoji[name]
|
e4e4f92df9401858bf7ac527c3adcf08a7da7c5f
|
github_fork_repos.py
|
github_fork_repos.py
|
#!/bin/env python
"""
Fork github repos
"""
# technical debt:
# --------------
from github3 import login
from getpass import getuser
import os
import sys
import time
token = ''
debug = os.getenv("DM_SQUARE_DEBUG")
user = getuser()
if debug:
print user
# I have cut and pasted code
# I am a bad person
# I promise to make a module
file_credential = os.path.expanduser('~/.sq_github_token')
if not os.path.isfile(file_credential):
print "You don't have a token in {0} ".format(file_credential)
print "Have you run github_auth.py?"
sys.exit(1)
with open(file_credential, 'r') as fd:
token = fd.readline().strip()
gh = login(token=token)
# get the organization object
organization = gh.organization('LSST')
# list of all LSST repos
repos = [g for g in organization.iter_repos()]
if debug:
print repos
for repo in repos:
if debug:
print repo.name
forked_repo = repo.create_fork(user+'-shadow')
forked_name = forked_repo.name
# Trap previous fork with dm_ prefix
if not forked_name.startswith("dm_"):
newname = "dm_" + forked_name
forked_repo.edit(newname)
|
#!/bin/env python
"""
Fork github repos
"""
# technical debt:
# --------------
from github3 import login
from getpass import getuser
import os
import sys
import time
token = ''
debug = os.getenv("DM_SQUARE_DEBUG")
user = getuser()
if debug:
print user
# I have cut and pasted code
# I am a bad person
# I promise to make a module
file_credential = os.path.expanduser('~/.sq_github_token')
if not os.path.isfile(file_credential):
print "You don't have a token in {0} ".format(file_credential)
print "Have you run github_auth.py?"
sys.exit(1)
with open(file_credential, 'r') as fd:
token = fd.readline().strip()
gh = login(token=token)
# get the organization object
organization = gh.organization('lsst')
# list of all LSST repos
repos = [g for g in organization.iter_repos()]
if debug:
print repos
for repo in repos:
if debug:
print repo.name
forked_repo = repo.create_fork(user+'-shadow')
forked_name = forked_repo.name
# Trap previous fork with dm_ prefix
if not forked_name.startswith("dm_"):
newname = "dm_" + forked_name
forked_repo.edit(newname)
|
Change org-name to what is now lowercase
|
Change org-name to what is now lowercase
|
Python
|
mit
|
lsst-sqre/sqre-codekit,lsst-sqre/sqre-codekit
|
#!/bin/env python
"""
Fork github repos
"""
# technical debt:
# --------------
from github3 import login
from getpass import getuser
import os
import sys
import time
token = ''
debug = os.getenv("DM_SQUARE_DEBUG")
user = getuser()
if debug:
print user
# I have cut and pasted code
# I am a bad person
# I promise to make a module
file_credential = os.path.expanduser('~/.sq_github_token')
if not os.path.isfile(file_credential):
print "You don't have a token in {0} ".format(file_credential)
print "Have you run github_auth.py?"
sys.exit(1)
with open(file_credential, 'r') as fd:
token = fd.readline().strip()
gh = login(token=token)
# get the organization object
organization = gh.organization('LSST')
# list of all LSST repos
repos = [g for g in organization.iter_repos()]
if debug:
print repos
for repo in repos:
if debug:
print repo.name
forked_repo = repo.create_fork(user+'-shadow')
forked_name = forked_repo.name
# Trap previous fork with dm_ prefix
if not forked_name.startswith("dm_"):
newname = "dm_" + forked_name
forked_repo.edit(newname)
Change org-name to what is now lowercase
|
#!/bin/env python
"""
Fork github repos
"""
# technical debt:
# --------------
from github3 import login
from getpass import getuser
import os
import sys
import time
token = ''
debug = os.getenv("DM_SQUARE_DEBUG")
user = getuser()
if debug:
print user
# I have cut and pasted code
# I am a bad person
# I promise to make a module
file_credential = os.path.expanduser('~/.sq_github_token')
if not os.path.isfile(file_credential):
print "You don't have a token in {0} ".format(file_credential)
print "Have you run github_auth.py?"
sys.exit(1)
with open(file_credential, 'r') as fd:
token = fd.readline().strip()
gh = login(token=token)
# get the organization object
organization = gh.organization('lsst')
# list of all LSST repos
repos = [g for g in organization.iter_repos()]
if debug:
print repos
for repo in repos:
if debug:
print repo.name
forked_repo = repo.create_fork(user+'-shadow')
forked_name = forked_repo.name
# Trap previous fork with dm_ prefix
if not forked_name.startswith("dm_"):
newname = "dm_" + forked_name
forked_repo.edit(newname)
|
<commit_before>#!/bin/env python
"""
Fork github repos
"""
# technical debt:
# --------------
from github3 import login
from getpass import getuser
import os
import sys
import time
token = ''
debug = os.getenv("DM_SQUARE_DEBUG")
user = getuser()
if debug:
print user
# I have cut and pasted code
# I am a bad person
# I promise to make a module
file_credential = os.path.expanduser('~/.sq_github_token')
if not os.path.isfile(file_credential):
print "You don't have a token in {0} ".format(file_credential)
print "Have you run github_auth.py?"
sys.exit(1)
with open(file_credential, 'r') as fd:
token = fd.readline().strip()
gh = login(token=token)
# get the organization object
organization = gh.organization('LSST')
# list of all LSST repos
repos = [g for g in organization.iter_repos()]
if debug:
print repos
for repo in repos:
if debug:
print repo.name
forked_repo = repo.create_fork(user+'-shadow')
forked_name = forked_repo.name
# Trap previous fork with dm_ prefix
if not forked_name.startswith("dm_"):
newname = "dm_" + forked_name
forked_repo.edit(newname)
<commit_msg>Change org-name to what is now lowercase<commit_after>
|
#!/bin/env python
"""
Fork github repos
"""
# technical debt:
# --------------
from github3 import login
from getpass import getuser
import os
import sys
import time
token = ''
debug = os.getenv("DM_SQUARE_DEBUG")
user = getuser()
if debug:
print user
# I have cut and pasted code
# I am a bad person
# I promise to make a module
file_credential = os.path.expanduser('~/.sq_github_token')
if not os.path.isfile(file_credential):
print "You don't have a token in {0} ".format(file_credential)
print "Have you run github_auth.py?"
sys.exit(1)
with open(file_credential, 'r') as fd:
token = fd.readline().strip()
gh = login(token=token)
# get the organization object
organization = gh.organization('lsst')
# list of all LSST repos
repos = [g for g in organization.iter_repos()]
if debug:
print repos
for repo in repos:
if debug:
print repo.name
forked_repo = repo.create_fork(user+'-shadow')
forked_name = forked_repo.name
# Trap previous fork with dm_ prefix
if not forked_name.startswith("dm_"):
newname = "dm_" + forked_name
forked_repo.edit(newname)
|
#!/bin/env python
"""
Fork github repos
"""
# technical debt:
# --------------
from github3 import login
from getpass import getuser
import os
import sys
import time
token = ''
debug = os.getenv("DM_SQUARE_DEBUG")
user = getuser()
if debug:
print user
# I have cut and pasted code
# I am a bad person
# I promise to make a module
file_credential = os.path.expanduser('~/.sq_github_token')
if not os.path.isfile(file_credential):
print "You don't have a token in {0} ".format(file_credential)
print "Have you run github_auth.py?"
sys.exit(1)
with open(file_credential, 'r') as fd:
token = fd.readline().strip()
gh = login(token=token)
# get the organization object
organization = gh.organization('LSST')
# list of all LSST repos
repos = [g for g in organization.iter_repos()]
if debug:
print repos
for repo in repos:
if debug:
print repo.name
forked_repo = repo.create_fork(user+'-shadow')
forked_name = forked_repo.name
# Trap previous fork with dm_ prefix
if not forked_name.startswith("dm_"):
newname = "dm_" + forked_name
forked_repo.edit(newname)
Change org-name to what is now lowercase#!/bin/env python
"""
Fork github repos
"""
# technical debt:
# --------------
from github3 import login
from getpass import getuser
import os
import sys
import time
token = ''
debug = os.getenv("DM_SQUARE_DEBUG")
user = getuser()
if debug:
print user
# I have cut and pasted code
# I am a bad person
# I promise to make a module
file_credential = os.path.expanduser('~/.sq_github_token')
if not os.path.isfile(file_credential):
print "You don't have a token in {0} ".format(file_credential)
print "Have you run github_auth.py?"
sys.exit(1)
with open(file_credential, 'r') as fd:
token = fd.readline().strip()
gh = login(token=token)
# get the organization object
organization = gh.organization('lsst')
# list of all LSST repos
repos = [g for g in organization.iter_repos()]
if debug:
print repos
for repo in repos:
if debug:
print repo.name
forked_repo = repo.create_fork(user+'-shadow')
forked_name = forked_repo.name
# Trap previous fork with dm_ prefix
if not forked_name.startswith("dm_"):
newname = "dm_" + forked_name
forked_repo.edit(newname)
|
<commit_before>#!/bin/env python
"""
Fork github repos
"""
# technical debt:
# --------------
from github3 import login
from getpass import getuser
import os
import sys
import time
token = ''
debug = os.getenv("DM_SQUARE_DEBUG")
user = getuser()
if debug:
print user
# I have cut and pasted code
# I am a bad person
# I promise to make a module
file_credential = os.path.expanduser('~/.sq_github_token')
if not os.path.isfile(file_credential):
print "You don't have a token in {0} ".format(file_credential)
print "Have you run github_auth.py?"
sys.exit(1)
with open(file_credential, 'r') as fd:
token = fd.readline().strip()
gh = login(token=token)
# get the organization object
organization = gh.organization('LSST')
# list of all LSST repos
repos = [g for g in organization.iter_repos()]
if debug:
print repos
for repo in repos:
if debug:
print repo.name
forked_repo = repo.create_fork(user+'-shadow')
forked_name = forked_repo.name
# Trap previous fork with dm_ prefix
if not forked_name.startswith("dm_"):
newname = "dm_" + forked_name
forked_repo.edit(newname)
<commit_msg>Change org-name to what is now lowercase<commit_after>#!/bin/env python
"""
Fork github repos
"""
# technical debt:
# --------------
from github3 import login
from getpass import getuser
import os
import sys
import time
token = ''
debug = os.getenv("DM_SQUARE_DEBUG")
user = getuser()
if debug:
print user
# I have cut and pasted code
# I am a bad person
# I promise to make a module
file_credential = os.path.expanduser('~/.sq_github_token')
if not os.path.isfile(file_credential):
print "You don't have a token in {0} ".format(file_credential)
print "Have you run github_auth.py?"
sys.exit(1)
with open(file_credential, 'r') as fd:
token = fd.readline().strip()
gh = login(token=token)
# get the organization object
organization = gh.organization('lsst')
# list of all LSST repos
repos = [g for g in organization.iter_repos()]
if debug:
print repos
for repo in repos:
if debug:
print repo.name
forked_repo = repo.create_fork(user+'-shadow')
forked_name = forked_repo.name
# Trap previous fork with dm_ prefix
if not forked_name.startswith("dm_"):
newname = "dm_" + forked_name
forked_repo.edit(newname)
|
393f813867dad176e0d5793f82a606c767145194
|
koushihime/main/utils.py
|
koushihime/main/utils.py
|
# -*- coding:utf-8 -*-
import datetime
from koushihime.main.models import PushRecord, WaitingQueue
def recent_have_pushed(title, hours=24):
limit_date = datetime.datetime.utcnow() - datetime.timedelta(hours=hours)
query = PushRecord.query.filter(PushRecord.title == title, PushRecord.pushed_time > limit_date).all()
if query:
return True
return False
def have_auto_catched(title):
query = WaitingQueue.query.filter_by(title=title).all()
if query:
return True
return False
|
# -*- coding:utf-8 -*-
import datetime
from koushihime.main.models import PushRecord, WaitingQueue
def recent_have_pushed(title, hours=72):
limit_date = datetime.datetime.utcnow() - datetime.timedelta(hours=hours)
query = PushRecord.query.filter(PushRecord.title == title, PushRecord.pushed_time > limit_date).all()
if query:
return True
return False
def have_auto_catched(title):
query = WaitingQueue.query.filter_by(title=title).all()
if query:
return True
return False
|
Change dumplicate entry catching settings.
|
Change dumplicate entry catching settings.
|
Python
|
artistic-2.0
|
ethe/KoushiHime,kafuuchino/MoegirlUpdater,ethe/KoushiHime,ethe/KoushiHime,kafuuchino/MoegirlUpdater,kafuuchino/MoegirlUpdater
|
# -*- coding:utf-8 -*-
import datetime
from koushihime.main.models import PushRecord, WaitingQueue
def recent_have_pushed(title, hours=24):
limit_date = datetime.datetime.utcnow() - datetime.timedelta(hours=hours)
query = PushRecord.query.filter(PushRecord.title == title, PushRecord.pushed_time > limit_date).all()
if query:
return True
return False
def have_auto_catched(title):
query = WaitingQueue.query.filter_by(title=title).all()
if query:
return True
return False
Change dumplicate entry catching settings.
|
# -*- coding:utf-8 -*-
import datetime
from koushihime.main.models import PushRecord, WaitingQueue
def recent_have_pushed(title, hours=72):
limit_date = datetime.datetime.utcnow() - datetime.timedelta(hours=hours)
query = PushRecord.query.filter(PushRecord.title == title, PushRecord.pushed_time > limit_date).all()
if query:
return True
return False
def have_auto_catched(title):
query = WaitingQueue.query.filter_by(title=title).all()
if query:
return True
return False
|
<commit_before># -*- coding:utf-8 -*-
import datetime
from koushihime.main.models import PushRecord, WaitingQueue
def recent_have_pushed(title, hours=24):
limit_date = datetime.datetime.utcnow() - datetime.timedelta(hours=hours)
query = PushRecord.query.filter(PushRecord.title == title, PushRecord.pushed_time > limit_date).all()
if query:
return True
return False
def have_auto_catched(title):
query = WaitingQueue.query.filter_by(title=title).all()
if query:
return True
return False
<commit_msg>Change dumplicate entry catching settings.<commit_after>
|
# -*- coding:utf-8 -*-
import datetime
from koushihime.main.models import PushRecord, WaitingQueue
def recent_have_pushed(title, hours=72):
limit_date = datetime.datetime.utcnow() - datetime.timedelta(hours=hours)
query = PushRecord.query.filter(PushRecord.title == title, PushRecord.pushed_time > limit_date).all()
if query:
return True
return False
def have_auto_catched(title):
query = WaitingQueue.query.filter_by(title=title).all()
if query:
return True
return False
|
# -*- coding:utf-8 -*-
import datetime
from koushihime.main.models import PushRecord, WaitingQueue
def recent_have_pushed(title, hours=24):
limit_date = datetime.datetime.utcnow() - datetime.timedelta(hours=hours)
query = PushRecord.query.filter(PushRecord.title == title, PushRecord.pushed_time > limit_date).all()
if query:
return True
return False
def have_auto_catched(title):
query = WaitingQueue.query.filter_by(title=title).all()
if query:
return True
return False
Change dumplicate entry catching settings.# -*- coding:utf-8 -*-
import datetime
from koushihime.main.models import PushRecord, WaitingQueue
def recent_have_pushed(title, hours=72):
limit_date = datetime.datetime.utcnow() - datetime.timedelta(hours=hours)
query = PushRecord.query.filter(PushRecord.title == title, PushRecord.pushed_time > limit_date).all()
if query:
return True
return False
def have_auto_catched(title):
query = WaitingQueue.query.filter_by(title=title).all()
if query:
return True
return False
|
<commit_before># -*- coding:utf-8 -*-
import datetime
from koushihime.main.models import PushRecord, WaitingQueue
def recent_have_pushed(title, hours=24):
limit_date = datetime.datetime.utcnow() - datetime.timedelta(hours=hours)
query = PushRecord.query.filter(PushRecord.title == title, PushRecord.pushed_time > limit_date).all()
if query:
return True
return False
def have_auto_catched(title):
query = WaitingQueue.query.filter_by(title=title).all()
if query:
return True
return False
<commit_msg>Change dumplicate entry catching settings.<commit_after># -*- coding:utf-8 -*-
import datetime
from koushihime.main.models import PushRecord, WaitingQueue
def recent_have_pushed(title, hours=72):
limit_date = datetime.datetime.utcnow() - datetime.timedelta(hours=hours)
query = PushRecord.query.filter(PushRecord.title == title, PushRecord.pushed_time > limit_date).all()
if query:
return True
return False
def have_auto_catched(title):
query = WaitingQueue.query.filter_by(title=title).all()
if query:
return True
return False
|
f3e39d2250a9c56a2beb6a1a9c1c4dafb97e8c7f
|
encoder/vgg.py
|
encoder/vgg.py
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow_fcn import fcn8_vgg
import tensorflow as tf
def inference(hypes, images, train=True):
"""Build the MNIST model up to where it may be used for inference.
Args:
images: Images placeholder, from inputs().
train: whether the network is used for train of inference
Returns:
softmax_linear: Output tensor with the computed logits.
"""
vgg_fcn = fcn8_vgg.FCN8VGG()
num_classes = hypes["fc_size"]
vgg_fcn.wd = hypes['wd']
vgg_fcn.build(images, train=train, num_classes=num_classes,
random_init_fc8=True)
vgg_dict = {'unpooled': vgg_fcn.conv5_3,
'deep_feat': vgg_fcn.pool5,
'deep_feat_channels': 512,
'early_feat': vgg_fcn.conv4_3,
'scored_feat': vgg_fcn.score_fr}
return vgg_dict
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow_fcn import fcn8_vgg
import tensorflow as tf
def inference(hypes, images, train=True):
"""Build the MNIST model up to where it may be used for inference.
Args:
images: Images placeholder, from inputs().
train: whether the network is used for train of inference
Returns:
softmax_linear: Output tensor with the computed logits.
"""
vgg16_npy_path = os.path.join(hypes['dirs']['data_dir'], "vgg16.npy")
vgg_fcn = fcn8_vgg.FCN8VGG(vgg16_npy_path=vgg16_npy_path)
num_classes = hypes["fc_size"]
vgg_fcn.wd = hypes['wd']
vgg_fcn.build(images, train=train, num_classes=num_classes,
random_init_fc8=True)
vgg_dict = {'unpooled': vgg_fcn.conv5_3,
'deep_feat': vgg_fcn.pool5,
'deep_feat_channels': 512,
'early_feat': vgg_fcn.conv4_3,
'scored_feat': vgg_fcn.score_fr}
return vgg_dict
|
Update to VGG laod from datadir
|
Update to VGG laod from datadir
|
Python
|
mit
|
MarvinTeichmann/KittiBox,MarvinTeichmann/KittiBox
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow_fcn import fcn8_vgg
import tensorflow as tf
def inference(hypes, images, train=True):
"""Build the MNIST model up to where it may be used for inference.
Args:
images: Images placeholder, from inputs().
train: whether the network is used for train of inference
Returns:
softmax_linear: Output tensor with the computed logits.
"""
vgg_fcn = fcn8_vgg.FCN8VGG()
num_classes = hypes["fc_size"]
vgg_fcn.wd = hypes['wd']
vgg_fcn.build(images, train=train, num_classes=num_classes,
random_init_fc8=True)
vgg_dict = {'unpooled': vgg_fcn.conv5_3,
'deep_feat': vgg_fcn.pool5,
'deep_feat_channels': 512,
'early_feat': vgg_fcn.conv4_3,
'scored_feat': vgg_fcn.score_fr}
return vgg_dict
Update to VGG laod from datadir
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow_fcn import fcn8_vgg
import tensorflow as tf
def inference(hypes, images, train=True):
"""Build the MNIST model up to where it may be used for inference.
Args:
images: Images placeholder, from inputs().
train: whether the network is used for train of inference
Returns:
softmax_linear: Output tensor with the computed logits.
"""
vgg16_npy_path = os.path.join(hypes['dirs']['data_dir'], "vgg16.npy")
vgg_fcn = fcn8_vgg.FCN8VGG(vgg16_npy_path=vgg16_npy_path)
num_classes = hypes["fc_size"]
vgg_fcn.wd = hypes['wd']
vgg_fcn.build(images, train=train, num_classes=num_classes,
random_init_fc8=True)
vgg_dict = {'unpooled': vgg_fcn.conv5_3,
'deep_feat': vgg_fcn.pool5,
'deep_feat_channels': 512,
'early_feat': vgg_fcn.conv4_3,
'scored_feat': vgg_fcn.score_fr}
return vgg_dict
|
<commit_before>from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow_fcn import fcn8_vgg
import tensorflow as tf
def inference(hypes, images, train=True):
"""Build the MNIST model up to where it may be used for inference.
Args:
images: Images placeholder, from inputs().
train: whether the network is used for train of inference
Returns:
softmax_linear: Output tensor with the computed logits.
"""
vgg_fcn = fcn8_vgg.FCN8VGG()
num_classes = hypes["fc_size"]
vgg_fcn.wd = hypes['wd']
vgg_fcn.build(images, train=train, num_classes=num_classes,
random_init_fc8=True)
vgg_dict = {'unpooled': vgg_fcn.conv5_3,
'deep_feat': vgg_fcn.pool5,
'deep_feat_channels': 512,
'early_feat': vgg_fcn.conv4_3,
'scored_feat': vgg_fcn.score_fr}
return vgg_dict
<commit_msg>Update to VGG laod from datadir<commit_after>
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow_fcn import fcn8_vgg
import tensorflow as tf
def inference(hypes, images, train=True):
"""Build the MNIST model up to where it may be used for inference.
Args:
images: Images placeholder, from inputs().
train: whether the network is used for train of inference
Returns:
softmax_linear: Output tensor with the computed logits.
"""
vgg16_npy_path = os.path.join(hypes['dirs']['data_dir'], "vgg16.npy")
vgg_fcn = fcn8_vgg.FCN8VGG(vgg16_npy_path=vgg16_npy_path)
num_classes = hypes["fc_size"]
vgg_fcn.wd = hypes['wd']
vgg_fcn.build(images, train=train, num_classes=num_classes,
random_init_fc8=True)
vgg_dict = {'unpooled': vgg_fcn.conv5_3,
'deep_feat': vgg_fcn.pool5,
'deep_feat_channels': 512,
'early_feat': vgg_fcn.conv4_3,
'scored_feat': vgg_fcn.score_fr}
return vgg_dict
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow_fcn import fcn8_vgg
import tensorflow as tf
def inference(hypes, images, train=True):
"""Build the MNIST model up to where it may be used for inference.
Args:
images: Images placeholder, from inputs().
train: whether the network is used for train of inference
Returns:
softmax_linear: Output tensor with the computed logits.
"""
vgg_fcn = fcn8_vgg.FCN8VGG()
num_classes = hypes["fc_size"]
vgg_fcn.wd = hypes['wd']
vgg_fcn.build(images, train=train, num_classes=num_classes,
random_init_fc8=True)
vgg_dict = {'unpooled': vgg_fcn.conv5_3,
'deep_feat': vgg_fcn.pool5,
'deep_feat_channels': 512,
'early_feat': vgg_fcn.conv4_3,
'scored_feat': vgg_fcn.score_fr}
return vgg_dict
Update to VGG laod from datadirfrom __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow_fcn import fcn8_vgg
import tensorflow as tf
def inference(hypes, images, train=True):
"""Build the MNIST model up to where it may be used for inference.
Args:
images: Images placeholder, from inputs().
train: whether the network is used for train of inference
Returns:
softmax_linear: Output tensor with the computed logits.
"""
vgg16_npy_path = os.path.join(hypes['dirs']['data_dir'], "vgg16.npy")
vgg_fcn = fcn8_vgg.FCN8VGG(vgg16_npy_path=vgg16_npy_path)
num_classes = hypes["fc_size"]
vgg_fcn.wd = hypes['wd']
vgg_fcn.build(images, train=train, num_classes=num_classes,
random_init_fc8=True)
vgg_dict = {'unpooled': vgg_fcn.conv5_3,
'deep_feat': vgg_fcn.pool5,
'deep_feat_channels': 512,
'early_feat': vgg_fcn.conv4_3,
'scored_feat': vgg_fcn.score_fr}
return vgg_dict
|
<commit_before>from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow_fcn import fcn8_vgg
import tensorflow as tf
def inference(hypes, images, train=True):
"""Build the MNIST model up to where it may be used for inference.
Args:
images: Images placeholder, from inputs().
train: whether the network is used for train of inference
Returns:
softmax_linear: Output tensor with the computed logits.
"""
vgg_fcn = fcn8_vgg.FCN8VGG()
num_classes = hypes["fc_size"]
vgg_fcn.wd = hypes['wd']
vgg_fcn.build(images, train=train, num_classes=num_classes,
random_init_fc8=True)
vgg_dict = {'unpooled': vgg_fcn.conv5_3,
'deep_feat': vgg_fcn.pool5,
'deep_feat_channels': 512,
'early_feat': vgg_fcn.conv4_3,
'scored_feat': vgg_fcn.score_fr}
return vgg_dict
<commit_msg>Update to VGG laod from datadir<commit_after>from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow_fcn import fcn8_vgg
import tensorflow as tf
def inference(hypes, images, train=True):
"""Build the MNIST model up to where it may be used for inference.
Args:
images: Images placeholder, from inputs().
train: whether the network is used for train of inference
Returns:
softmax_linear: Output tensor with the computed logits.
"""
vgg16_npy_path = os.path.join(hypes['dirs']['data_dir'], "vgg16.npy")
vgg_fcn = fcn8_vgg.FCN8VGG(vgg16_npy_path=vgg16_npy_path)
num_classes = hypes["fc_size"]
vgg_fcn.wd = hypes['wd']
vgg_fcn.build(images, train=train, num_classes=num_classes,
random_init_fc8=True)
vgg_dict = {'unpooled': vgg_fcn.conv5_3,
'deep_feat': vgg_fcn.pool5,
'deep_feat_channels': 512,
'early_feat': vgg_fcn.conv4_3,
'scored_feat': vgg_fcn.score_fr}
return vgg_dict
|
97c66e1cbbc6fd691c2fec4f4e72ba22892fa13c
|
base/components/accounts/backends.py
|
base/components/accounts/backends.py
|
from django.contrib.auth import get_user_model
from django.contrib.auth.backends import ModelBackend
User = get_user_model()
class HelloBaseIDBackend(ModelBackend):
def authenticate(self, username=None):
try:
user = User.objects.filter(username=username)[0]
except IndexError:
return None
else:
return user
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
|
from django.contrib.auth import get_user_model
from django.contrib.auth.backends import ModelBackend
User = get_user_model()
class HelloBaseIDBackend(ModelBackend):
def authenticate(self, username=None):
try:
user = User.objects.filter(username=username)[0]
except IndexError:
return None
else:
return user
def get_user(self, user_id):
return User.objects.get(pk=user_id)
|
Remove the try/except clause from get_user().
|
Remove the try/except clause from get_user().
It doesn't seem like the code will -ever- hit the except clause as the
method that calls this has fallbacks of its own.
|
Python
|
apache-2.0
|
hello-base/web,hello-base/web,hello-base/web,hello-base/web
|
from django.contrib.auth import get_user_model
from django.contrib.auth.backends import ModelBackend
User = get_user_model()
class HelloBaseIDBackend(ModelBackend):
def authenticate(self, username=None):
try:
user = User.objects.filter(username=username)[0]
except IndexError:
return None
else:
return user
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
Remove the try/except clause from get_user().
It doesn't seem like the code will -ever- hit the except clause as the
method that calls this has fallbacks of its own.
|
from django.contrib.auth import get_user_model
from django.contrib.auth.backends import ModelBackend
User = get_user_model()
class HelloBaseIDBackend(ModelBackend):
def authenticate(self, username=None):
try:
user = User.objects.filter(username=username)[0]
except IndexError:
return None
else:
return user
def get_user(self, user_id):
return User.objects.get(pk=user_id)
|
<commit_before>from django.contrib.auth import get_user_model
from django.contrib.auth.backends import ModelBackend
User = get_user_model()
class HelloBaseIDBackend(ModelBackend):
def authenticate(self, username=None):
try:
user = User.objects.filter(username=username)[0]
except IndexError:
return None
else:
return user
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
<commit_msg>Remove the try/except clause from get_user().
It doesn't seem like the code will -ever- hit the except clause as the
method that calls this has fallbacks of its own.<commit_after>
|
from django.contrib.auth import get_user_model
from django.contrib.auth.backends import ModelBackend
User = get_user_model()
class HelloBaseIDBackend(ModelBackend):
def authenticate(self, username=None):
try:
user = User.objects.filter(username=username)[0]
except IndexError:
return None
else:
return user
def get_user(self, user_id):
return User.objects.get(pk=user_id)
|
from django.contrib.auth import get_user_model
from django.contrib.auth.backends import ModelBackend
User = get_user_model()
class HelloBaseIDBackend(ModelBackend):
def authenticate(self, username=None):
try:
user = User.objects.filter(username=username)[0]
except IndexError:
return None
else:
return user
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
Remove the try/except clause from get_user().
It doesn't seem like the code will -ever- hit the except clause as the
method that calls this has fallbacks of its own.from django.contrib.auth import get_user_model
from django.contrib.auth.backends import ModelBackend
User = get_user_model()
class HelloBaseIDBackend(ModelBackend):
def authenticate(self, username=None):
try:
user = User.objects.filter(username=username)[0]
except IndexError:
return None
else:
return user
def get_user(self, user_id):
return User.objects.get(pk=user_id)
|
<commit_before>from django.contrib.auth import get_user_model
from django.contrib.auth.backends import ModelBackend
User = get_user_model()
class HelloBaseIDBackend(ModelBackend):
def authenticate(self, username=None):
try:
user = User.objects.filter(username=username)[0]
except IndexError:
return None
else:
return user
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
<commit_msg>Remove the try/except clause from get_user().
It doesn't seem like the code will -ever- hit the except clause as the
method that calls this has fallbacks of its own.<commit_after>from django.contrib.auth import get_user_model
from django.contrib.auth.backends import ModelBackend
User = get_user_model()
class HelloBaseIDBackend(ModelBackend):
def authenticate(self, username=None):
try:
user = User.objects.filter(username=username)[0]
except IndexError:
return None
else:
return user
def get_user(self, user_id):
return User.objects.get(pk=user_id)
|
f8271a1c244ac38ce787d98a3f953e417a30e2d0
|
3-koodin-refaktorointi/code-examples/test_dependency_updater.py
|
3-koodin-refaktorointi/code-examples/test_dependency_updater.py
|
import shutil
from os import getcwd
from os.path import join, exists, abspath
import dependency_updater
def destroy_testdata(target_path):
if exists(target_path): shutil.rmtree(target_path)
def create_testdata(target_path):
destroy_testdata(target_path)
shutil.copytree('dependency_updater_test_data', target_path)
def get_dep_lines(target_path):
return (line for line in open(join(target_path, "nonpnpapp.vcxproj"))
if '<AdditionalDependencies>' in line)
def test_testdata():
create_testdata('test1')
dep_lines = list(get_dep_lines('test1'))
assert len(dep_lines) == 4
for line in dep_lines: assert 'foo.obj' in line
destroy_testdata('test1')
#def test_changed_obj_update():
#create_testdata('test2')
#dependency_updater.updateProjectFile('nonpnpapp',
#abspath('test2'), abspath('test2/objfiles'), 'Debug')
#dep_lines = list(get_dep_lines('test2'))
#assert len(dep_lines) == 4
#assert 'quux.obj' in dep_lines[1]
#assert 'foo.obj' not in dep_lines[1]
#destroy_testdata('test2')
|
import shutil
from os import getcwd, chdir
from os.path import join, exists, abspath
from contextlib import contextmanager
import dependency_updater
def destroy_testdata(target_path):
if exists(target_path): shutil.rmtree(target_path)
def create_testdata(target_path):
destroy_testdata(target_path)
shutil.copytree('dependency_updater_test_data', target_path)
@contextmanager
def data_fixture(target_path):
save_cwd = getcwd()
test_dir = abspath(target_path)
create_testdata(test_dir)
yield test_dir
destroy_testdata(test_dir)
chdir(save_cwd)
def get_dep_lines(target_path):
return (line for line in open(join(target_path, "nonpnpapp.vcxproj"))
if '<AdditionalDependencies>' in line)
def test_testdata():
with data_fixture('test1') as d:
dep_lines = list(get_dep_lines('test1'))
assert len(dep_lines) == 4
for line in dep_lines: assert 'foo.obj' in line
#def test_changed_obj_update():
#create_testdata('test2')
#dependency_updater.updateProjectFile('nonpnpapp',
#abspath('test2'), abspath('test2/objfiles'), 'Debug')
#dep_lines = list(get_dep_lines('test2'))
#assert len(dep_lines) == 4
#assert 'quux.obj' in dep_lines[1]
#assert 'foo.obj' not in dep_lines[1]
#destroy_testdata('test2')
|
Use context management protocol for test data fixtures
|
Use context management protocol for test data fixtures
|
Python
|
bsd-2-clause
|
pkalliok/python-kurssi,pkalliok/python-kurssi
|
import shutil
from os import getcwd
from os.path import join, exists, abspath
import dependency_updater
def destroy_testdata(target_path):
if exists(target_path): shutil.rmtree(target_path)
def create_testdata(target_path):
destroy_testdata(target_path)
shutil.copytree('dependency_updater_test_data', target_path)
def get_dep_lines(target_path):
return (line for line in open(join(target_path, "nonpnpapp.vcxproj"))
if '<AdditionalDependencies>' in line)
def test_testdata():
create_testdata('test1')
dep_lines = list(get_dep_lines('test1'))
assert len(dep_lines) == 4
for line in dep_lines: assert 'foo.obj' in line
destroy_testdata('test1')
#def test_changed_obj_update():
#create_testdata('test2')
#dependency_updater.updateProjectFile('nonpnpapp',
#abspath('test2'), abspath('test2/objfiles'), 'Debug')
#dep_lines = list(get_dep_lines('test2'))
#assert len(dep_lines) == 4
#assert 'quux.obj' in dep_lines[1]
#assert 'foo.obj' not in dep_lines[1]
#destroy_testdata('test2')
Use context management protocol for test data fixtures
|
import shutil
from os import getcwd, chdir
from os.path import join, exists, abspath
from contextlib import contextmanager
import dependency_updater
def destroy_testdata(target_path):
if exists(target_path): shutil.rmtree(target_path)
def create_testdata(target_path):
destroy_testdata(target_path)
shutil.copytree('dependency_updater_test_data', target_path)
@contextmanager
def data_fixture(target_path):
save_cwd = getcwd()
test_dir = abspath(target_path)
create_testdata(test_dir)
yield test_dir
destroy_testdata(test_dir)
chdir(save_cwd)
def get_dep_lines(target_path):
return (line for line in open(join(target_path, "nonpnpapp.vcxproj"))
if '<AdditionalDependencies>' in line)
def test_testdata():
with data_fixture('test1') as d:
dep_lines = list(get_dep_lines('test1'))
assert len(dep_lines) == 4
for line in dep_lines: assert 'foo.obj' in line
#def test_changed_obj_update():
#create_testdata('test2')
#dependency_updater.updateProjectFile('nonpnpapp',
#abspath('test2'), abspath('test2/objfiles'), 'Debug')
#dep_lines = list(get_dep_lines('test2'))
#assert len(dep_lines) == 4
#assert 'quux.obj' in dep_lines[1]
#assert 'foo.obj' not in dep_lines[1]
#destroy_testdata('test2')
|
<commit_before>
import shutil
from os import getcwd
from os.path import join, exists, abspath
import dependency_updater
def destroy_testdata(target_path):
if exists(target_path): shutil.rmtree(target_path)
def create_testdata(target_path):
destroy_testdata(target_path)
shutil.copytree('dependency_updater_test_data', target_path)
def get_dep_lines(target_path):
return (line for line in open(join(target_path, "nonpnpapp.vcxproj"))
if '<AdditionalDependencies>' in line)
def test_testdata():
create_testdata('test1')
dep_lines = list(get_dep_lines('test1'))
assert len(dep_lines) == 4
for line in dep_lines: assert 'foo.obj' in line
destroy_testdata('test1')
#def test_changed_obj_update():
#create_testdata('test2')
#dependency_updater.updateProjectFile('nonpnpapp',
#abspath('test2'), abspath('test2/objfiles'), 'Debug')
#dep_lines = list(get_dep_lines('test2'))
#assert len(dep_lines) == 4
#assert 'quux.obj' in dep_lines[1]
#assert 'foo.obj' not in dep_lines[1]
#destroy_testdata('test2')
<commit_msg>Use context management protocol for test data fixtures<commit_after>
|
import shutil
from os import getcwd, chdir
from os.path import join, exists, abspath
from contextlib import contextmanager
import dependency_updater
def destroy_testdata(target_path):
if exists(target_path): shutil.rmtree(target_path)
def create_testdata(target_path):
destroy_testdata(target_path)
shutil.copytree('dependency_updater_test_data', target_path)
@contextmanager
def data_fixture(target_path):
save_cwd = getcwd()
test_dir = abspath(target_path)
create_testdata(test_dir)
yield test_dir
destroy_testdata(test_dir)
chdir(save_cwd)
def get_dep_lines(target_path):
return (line for line in open(join(target_path, "nonpnpapp.vcxproj"))
if '<AdditionalDependencies>' in line)
def test_testdata():
with data_fixture('test1') as d:
dep_lines = list(get_dep_lines('test1'))
assert len(dep_lines) == 4
for line in dep_lines: assert 'foo.obj' in line
#def test_changed_obj_update():
#create_testdata('test2')
#dependency_updater.updateProjectFile('nonpnpapp',
#abspath('test2'), abspath('test2/objfiles'), 'Debug')
#dep_lines = list(get_dep_lines('test2'))
#assert len(dep_lines) == 4
#assert 'quux.obj' in dep_lines[1]
#assert 'foo.obj' not in dep_lines[1]
#destroy_testdata('test2')
|
import shutil
from os import getcwd
from os.path import join, exists, abspath
import dependency_updater
def destroy_testdata(target_path):
if exists(target_path): shutil.rmtree(target_path)
def create_testdata(target_path):
destroy_testdata(target_path)
shutil.copytree('dependency_updater_test_data', target_path)
def get_dep_lines(target_path):
return (line for line in open(join(target_path, "nonpnpapp.vcxproj"))
if '<AdditionalDependencies>' in line)
def test_testdata():
create_testdata('test1')
dep_lines = list(get_dep_lines('test1'))
assert len(dep_lines) == 4
for line in dep_lines: assert 'foo.obj' in line
destroy_testdata('test1')
#def test_changed_obj_update():
#create_testdata('test2')
#dependency_updater.updateProjectFile('nonpnpapp',
#abspath('test2'), abspath('test2/objfiles'), 'Debug')
#dep_lines = list(get_dep_lines('test2'))
#assert len(dep_lines) == 4
#assert 'quux.obj' in dep_lines[1]
#assert 'foo.obj' not in dep_lines[1]
#destroy_testdata('test2')
Use context management protocol for test data fixtures
import shutil
from os import getcwd, chdir
from os.path import join, exists, abspath
from contextlib import contextmanager
import dependency_updater
def destroy_testdata(target_path):
if exists(target_path): shutil.rmtree(target_path)
def create_testdata(target_path):
destroy_testdata(target_path)
shutil.copytree('dependency_updater_test_data', target_path)
@contextmanager
def data_fixture(target_path):
save_cwd = getcwd()
test_dir = abspath(target_path)
create_testdata(test_dir)
yield test_dir
destroy_testdata(test_dir)
chdir(save_cwd)
def get_dep_lines(target_path):
return (line for line in open(join(target_path, "nonpnpapp.vcxproj"))
if '<AdditionalDependencies>' in line)
def test_testdata():
with data_fixture('test1') as d:
dep_lines = list(get_dep_lines('test1'))
assert len(dep_lines) == 4
for line in dep_lines: assert 'foo.obj' in line
#def test_changed_obj_update():
#create_testdata('test2')
#dependency_updater.updateProjectFile('nonpnpapp',
#abspath('test2'), abspath('test2/objfiles'), 'Debug')
#dep_lines = list(get_dep_lines('test2'))
#assert len(dep_lines) == 4
#assert 'quux.obj' in dep_lines[1]
#assert 'foo.obj' not in dep_lines[1]
#destroy_testdata('test2')
|
<commit_before>
import shutil
from os import getcwd
from os.path import join, exists, abspath
import dependency_updater
def destroy_testdata(target_path):
if exists(target_path): shutil.rmtree(target_path)
def create_testdata(target_path):
destroy_testdata(target_path)
shutil.copytree('dependency_updater_test_data', target_path)
def get_dep_lines(target_path):
return (line for line in open(join(target_path, "nonpnpapp.vcxproj"))
if '<AdditionalDependencies>' in line)
def test_testdata():
create_testdata('test1')
dep_lines = list(get_dep_lines('test1'))
assert len(dep_lines) == 4
for line in dep_lines: assert 'foo.obj' in line
destroy_testdata('test1')
#def test_changed_obj_update():
#create_testdata('test2')
#dependency_updater.updateProjectFile('nonpnpapp',
#abspath('test2'), abspath('test2/objfiles'), 'Debug')
#dep_lines = list(get_dep_lines('test2'))
#assert len(dep_lines) == 4
#assert 'quux.obj' in dep_lines[1]
#assert 'foo.obj' not in dep_lines[1]
#destroy_testdata('test2')
<commit_msg>Use context management protocol for test data fixtures<commit_after>
import shutil
from os import getcwd, chdir
from os.path import join, exists, abspath
from contextlib import contextmanager
import dependency_updater
def destroy_testdata(target_path):
if exists(target_path): shutil.rmtree(target_path)
def create_testdata(target_path):
destroy_testdata(target_path)
shutil.copytree('dependency_updater_test_data', target_path)
@contextmanager
def data_fixture(target_path):
save_cwd = getcwd()
test_dir = abspath(target_path)
create_testdata(test_dir)
yield test_dir
destroy_testdata(test_dir)
chdir(save_cwd)
def get_dep_lines(target_path):
return (line for line in open(join(target_path, "nonpnpapp.vcxproj"))
if '<AdditionalDependencies>' in line)
def test_testdata():
with data_fixture('test1') as d:
dep_lines = list(get_dep_lines('test1'))
assert len(dep_lines) == 4
for line in dep_lines: assert 'foo.obj' in line
#def test_changed_obj_update():
#create_testdata('test2')
#dependency_updater.updateProjectFile('nonpnpapp',
#abspath('test2'), abspath('test2/objfiles'), 'Debug')
#dep_lines = list(get_dep_lines('test2'))
#assert len(dep_lines) == 4
#assert 'quux.obj' in dep_lines[1]
#assert 'foo.obj' not in dep_lines[1]
#destroy_testdata('test2')
|
4d0f243ce2042a15c41df011f1ba90cf3b8445d2
|
gridfill/__init__.py
|
gridfill/__init__.py
|
"""Fill missing values in a grid."""
# Copyright (c) 2012-2014 Andrew Dawson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import absolute_import
from .gridfill import fill, fill_cube
# Define a version string.
__version__ = '1.0.1'
# Define the objects to be imported by imports of the form:
# from gridfill import *
__all__ = ['fill', 'fill_cube']
|
"""Fill missing values in a grid."""
# Copyright (c) 2012-2014 Andrew Dawson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import absolute_import
from .gridfill import fill, fill_cube
# Define a version string.
__version__ = '1.0.dev2'
# Define the objects to be imported by imports of the form:
# from gridfill import *
__all__ = ['fill', 'fill_cube']
|
Reset version number on v1.0.x maintenance branch
|
Reset version number on v1.0.x maintenance branch
|
Python
|
mit
|
ajdawson/gridfill
|
"""Fill missing values in a grid."""
# Copyright (c) 2012-2014 Andrew Dawson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import absolute_import
from .gridfill import fill, fill_cube
# Define a version string.
__version__ = '1.0.1'
# Define the objects to be imported by imports of the form:
# from gridfill import *
__all__ = ['fill', 'fill_cube']
Reset version number on v1.0.x maintenance branch
|
"""Fill missing values in a grid."""
# Copyright (c) 2012-2014 Andrew Dawson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import absolute_import
from .gridfill import fill, fill_cube
# Define a version string.
__version__ = '1.0.dev2'
# Define the objects to be imported by imports of the form:
# from gridfill import *
__all__ = ['fill', 'fill_cube']
|
<commit_before>"""Fill missing values in a grid."""
# Copyright (c) 2012-2014 Andrew Dawson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import absolute_import
from .gridfill import fill, fill_cube
# Define a version string.
__version__ = '1.0.1'
# Define the objects to be imported by imports of the form:
# from gridfill import *
__all__ = ['fill', 'fill_cube']
<commit_msg>Reset version number on v1.0.x maintenance branch<commit_after>
|
"""Fill missing values in a grid."""
# Copyright (c) 2012-2014 Andrew Dawson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import absolute_import
from .gridfill import fill, fill_cube
# Define a version string.
__version__ = '1.0.dev2'
# Define the objects to be imported by imports of the form:
# from gridfill import *
__all__ = ['fill', 'fill_cube']
|
"""Fill missing values in a grid."""
# Copyright (c) 2012-2014 Andrew Dawson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import absolute_import
from .gridfill import fill, fill_cube
# Define a version string.
__version__ = '1.0.1'
# Define the objects to be imported by imports of the form:
# from gridfill import *
__all__ = ['fill', 'fill_cube']
Reset version number on v1.0.x maintenance branch"""Fill missing values in a grid."""
# Copyright (c) 2012-2014 Andrew Dawson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import absolute_import
from .gridfill import fill, fill_cube
# Define a version string.
__version__ = '1.0.dev2'
# Define the objects to be imported by imports of the form:
# from gridfill import *
__all__ = ['fill', 'fill_cube']
|
<commit_before>"""Fill missing values in a grid."""
# Copyright (c) 2012-2014 Andrew Dawson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import absolute_import
from .gridfill import fill, fill_cube
# Define a version string.
__version__ = '1.0.1'
# Define the objects to be imported by imports of the form:
# from gridfill import *
__all__ = ['fill', 'fill_cube']
<commit_msg>Reset version number on v1.0.x maintenance branch<commit_after>"""Fill missing values in a grid."""
# Copyright (c) 2012-2014 Andrew Dawson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import absolute_import
from .gridfill import fill, fill_cube
# Define a version string.
__version__ = '1.0.dev2'
# Define the objects to be imported by imports of the form:
# from gridfill import *
__all__ = ['fill', 'fill_cube']
|
3983d7a7832fa6da6b19925cef0bce97a41c2f44
|
events/hook.py
|
events/hook.py
|
from events import Event, ACCEPTED
class Hook(Event):
contains = ('name', 'is_command', 'args')
requires = tuple()
requires_predicate = ('name',)
name = None
is_command = False
args = None
line = None
def setup(self):
if not self.name:
if self.line:
t = self.line.split(" ", 1)
self.name = t[0][1:]
self.is_command = True
if len(t) == 2:
self.args = t[1]
def consider(self, r_args):
d = {
'public': False,
'doc': None}
d.update(r_args)
if r_args['name'] != self.name:
return 0
if self.is_command and not r_args['public']:
return 0
return ACCEPTED
|
from events import Event, ACCEPTED
class Hook(Event):
contains = ('name', 'is_command', 'args')
requires = tuple()
requires_predicate = ('name',)
name = None
is_command = False
args = ""
line = None
def setup(self):
if not self.name:
if self.line:
t = self.line.split(" ", 1)
self.name = t[0][1:]
self.is_command = True
if len(t) == 2:
self.args = t[1]
def consider(self, r_args):
d = {
'public': False,
'doc': None}
d.update(r_args)
if r_args['name'] != self.name:
return 0
if self.is_command and not r_args['public']:
return 0
return ACCEPTED
|
Make Hook.args default to the empty string
|
Make Hook.args default to the empty string
|
Python
|
mit
|
frostyfrog/mark2,frostyfrog/mark2,SupaHam/mark2,SupaHam/mark2
|
from events import Event, ACCEPTED
class Hook(Event):
contains = ('name', 'is_command', 'args')
requires = tuple()
requires_predicate = ('name',)
name = None
is_command = False
args = None
line = None
def setup(self):
if not self.name:
if self.line:
t = self.line.split(" ", 1)
self.name = t[0][1:]
self.is_command = True
if len(t) == 2:
self.args = t[1]
def consider(self, r_args):
d = {
'public': False,
'doc': None}
d.update(r_args)
if r_args['name'] != self.name:
return 0
if self.is_command and not r_args['public']:
return 0
return ACCEPTED
Make Hook.args default to the empty string
|
from events import Event, ACCEPTED
class Hook(Event):
contains = ('name', 'is_command', 'args')
requires = tuple()
requires_predicate = ('name',)
name = None
is_command = False
args = ""
line = None
def setup(self):
if not self.name:
if self.line:
t = self.line.split(" ", 1)
self.name = t[0][1:]
self.is_command = True
if len(t) == 2:
self.args = t[1]
def consider(self, r_args):
d = {
'public': False,
'doc': None}
d.update(r_args)
if r_args['name'] != self.name:
return 0
if self.is_command and not r_args['public']:
return 0
return ACCEPTED
|
<commit_before>from events import Event, ACCEPTED
class Hook(Event):
contains = ('name', 'is_command', 'args')
requires = tuple()
requires_predicate = ('name',)
name = None
is_command = False
args = None
line = None
def setup(self):
if not self.name:
if self.line:
t = self.line.split(" ", 1)
self.name = t[0][1:]
self.is_command = True
if len(t) == 2:
self.args = t[1]
def consider(self, r_args):
d = {
'public': False,
'doc': None}
d.update(r_args)
if r_args['name'] != self.name:
return 0
if self.is_command and not r_args['public']:
return 0
return ACCEPTED
<commit_msg>Make Hook.args default to the empty string<commit_after>
|
from events import Event, ACCEPTED
class Hook(Event):
contains = ('name', 'is_command', 'args')
requires = tuple()
requires_predicate = ('name',)
name = None
is_command = False
args = ""
line = None
def setup(self):
if not self.name:
if self.line:
t = self.line.split(" ", 1)
self.name = t[0][1:]
self.is_command = True
if len(t) == 2:
self.args = t[1]
def consider(self, r_args):
d = {
'public': False,
'doc': None}
d.update(r_args)
if r_args['name'] != self.name:
return 0
if self.is_command and not r_args['public']:
return 0
return ACCEPTED
|
from events import Event, ACCEPTED
class Hook(Event):
contains = ('name', 'is_command', 'args')
requires = tuple()
requires_predicate = ('name',)
name = None
is_command = False
args = None
line = None
def setup(self):
if not self.name:
if self.line:
t = self.line.split(" ", 1)
self.name = t[0][1:]
self.is_command = True
if len(t) == 2:
self.args = t[1]
def consider(self, r_args):
d = {
'public': False,
'doc': None}
d.update(r_args)
if r_args['name'] != self.name:
return 0
if self.is_command and not r_args['public']:
return 0
return ACCEPTED
Make Hook.args default to the empty stringfrom events import Event, ACCEPTED
class Hook(Event):
contains = ('name', 'is_command', 'args')
requires = tuple()
requires_predicate = ('name',)
name = None
is_command = False
args = ""
line = None
def setup(self):
if not self.name:
if self.line:
t = self.line.split(" ", 1)
self.name = t[0][1:]
self.is_command = True
if len(t) == 2:
self.args = t[1]
def consider(self, r_args):
d = {
'public': False,
'doc': None}
d.update(r_args)
if r_args['name'] != self.name:
return 0
if self.is_command and not r_args['public']:
return 0
return ACCEPTED
|
<commit_before>from events import Event, ACCEPTED
class Hook(Event):
contains = ('name', 'is_command', 'args')
requires = tuple()
requires_predicate = ('name',)
name = None
is_command = False
args = None
line = None
def setup(self):
if not self.name:
if self.line:
t = self.line.split(" ", 1)
self.name = t[0][1:]
self.is_command = True
if len(t) == 2:
self.args = t[1]
def consider(self, r_args):
d = {
'public': False,
'doc': None}
d.update(r_args)
if r_args['name'] != self.name:
return 0
if self.is_command and not r_args['public']:
return 0
return ACCEPTED
<commit_msg>Make Hook.args default to the empty string<commit_after>from events import Event, ACCEPTED
class Hook(Event):
contains = ('name', 'is_command', 'args')
requires = tuple()
requires_predicate = ('name',)
name = None
is_command = False
args = ""
line = None
def setup(self):
if not self.name:
if self.line:
t = self.line.split(" ", 1)
self.name = t[0][1:]
self.is_command = True
if len(t) == 2:
self.args = t[1]
def consider(self, r_args):
d = {
'public': False,
'doc': None}
d.update(r_args)
if r_args['name'] != self.name:
return 0
if self.is_command and not r_args['public']:
return 0
return ACCEPTED
|
fda9d6fd0a8f437b06fa4e34396ca52f4874d32c
|
modules/pipeurlbuilder.py
|
modules/pipeurlbuilder.py
|
# pipeurlbuilder.py
#
import urllib
from pipe2py import util
def pipe_urlbuilder(context, _INPUT, conf, **kwargs):
"""This source builds a url and yields it forever.
Keyword arguments:
context -- pipeline context
_INPUT -- not used
conf:
BASE -- base
PATH -- path elements
PARAM -- query parameters
Yields (_OUTPUT):
url
"""
for item in _INPUT:
#note: we could cache get_value results if item==True
url = util.get_value(conf['BASE'], item, **kwargs)
if not url.endswith('/'):
url += '/'
path = util.get_value(conf['PATH'], item, **kwargs)
if not isinstance(path, list):
path = [path]
url += "/".join(path)
url = url.rstrip("/")
params = dict([(util.get_value(p['key'], item, **kwargs), util.get_value(p['value'], item, **kwargs)) for p in conf['PARAM']])
if params:
url += "?" + urllib.urlencode(params)
yield url
|
# pipeurlbuilder.py
#
import urllib
from pipe2py import util
def pipe_urlbuilder(context, _INPUT, conf, **kwargs):
"""This source builds a url and yields it forever.
Keyword arguments:
context -- pipeline context
_INPUT -- not used
conf:
BASE -- base
PATH -- path elements
PARAM -- query parameters
Yields (_OUTPUT):
url
"""
for item in _INPUT:
#note: we could cache get_value results if item==True
url = util.get_value(conf['BASE'], item, **kwargs)
if not url.endswith('/'):
url += '/'
path = conf['PATH']
if not isinstance(path, list):
path = [path]
path = [util.get_value(p, item, **kwargs) for p in path]
url += "/".join(path)
url = url.rstrip("/")
params = dict([(util.get_value(p['key'], item, **kwargs), util.get_value(p['value'], item, **kwargs)) for p in conf['PARAM']])
if params:
url += "?" + urllib.urlencode(params)
yield url
|
Fix to handle multiple path segments
|
Fix to handle multiple path segments
|
Python
|
mit
|
nerevu/riko,nerevu/riko
|
# pipeurlbuilder.py
#
import urllib
from pipe2py import util
def pipe_urlbuilder(context, _INPUT, conf, **kwargs):
"""This source builds a url and yields it forever.
Keyword arguments:
context -- pipeline context
_INPUT -- not used
conf:
BASE -- base
PATH -- path elements
PARAM -- query parameters
Yields (_OUTPUT):
url
"""
for item in _INPUT:
#note: we could cache get_value results if item==True
url = util.get_value(conf['BASE'], item, **kwargs)
if not url.endswith('/'):
url += '/'
path = util.get_value(conf['PATH'], item, **kwargs)
if not isinstance(path, list):
path = [path]
url += "/".join(path)
url = url.rstrip("/")
params = dict([(util.get_value(p['key'], item, **kwargs), util.get_value(p['value'], item, **kwargs)) for p in conf['PARAM']])
if params:
url += "?" + urllib.urlencode(params)
yield url
Fix to handle multiple path segments
|
# pipeurlbuilder.py
#
import urllib
from pipe2py import util
def pipe_urlbuilder(context, _INPUT, conf, **kwargs):
"""This source builds a url and yields it forever.
Keyword arguments:
context -- pipeline context
_INPUT -- not used
conf:
BASE -- base
PATH -- path elements
PARAM -- query parameters
Yields (_OUTPUT):
url
"""
for item in _INPUT:
#note: we could cache get_value results if item==True
url = util.get_value(conf['BASE'], item, **kwargs)
if not url.endswith('/'):
url += '/'
path = conf['PATH']
if not isinstance(path, list):
path = [path]
path = [util.get_value(p, item, **kwargs) for p in path]
url += "/".join(path)
url = url.rstrip("/")
params = dict([(util.get_value(p['key'], item, **kwargs), util.get_value(p['value'], item, **kwargs)) for p in conf['PARAM']])
if params:
url += "?" + urllib.urlencode(params)
yield url
|
<commit_before># pipeurlbuilder.py
#
import urllib
from pipe2py import util
def pipe_urlbuilder(context, _INPUT, conf, **kwargs):
"""This source builds a url and yields it forever.
Keyword arguments:
context -- pipeline context
_INPUT -- not used
conf:
BASE -- base
PATH -- path elements
PARAM -- query parameters
Yields (_OUTPUT):
url
"""
for item in _INPUT:
#note: we could cache get_value results if item==True
url = util.get_value(conf['BASE'], item, **kwargs)
if not url.endswith('/'):
url += '/'
path = util.get_value(conf['PATH'], item, **kwargs)
if not isinstance(path, list):
path = [path]
url += "/".join(path)
url = url.rstrip("/")
params = dict([(util.get_value(p['key'], item, **kwargs), util.get_value(p['value'], item, **kwargs)) for p in conf['PARAM']])
if params:
url += "?" + urllib.urlencode(params)
yield url
<commit_msg>Fix to handle multiple path segments<commit_after>
|
# pipeurlbuilder.py
#
import urllib
from pipe2py import util
def pipe_urlbuilder(context, _INPUT, conf, **kwargs):
"""This source builds a url and yields it forever.
Keyword arguments:
context -- pipeline context
_INPUT -- not used
conf:
BASE -- base
PATH -- path elements
PARAM -- query parameters
Yields (_OUTPUT):
url
"""
for item in _INPUT:
#note: we could cache get_value results if item==True
url = util.get_value(conf['BASE'], item, **kwargs)
if not url.endswith('/'):
url += '/'
path = conf['PATH']
if not isinstance(path, list):
path = [path]
path = [util.get_value(p, item, **kwargs) for p in path]
url += "/".join(path)
url = url.rstrip("/")
params = dict([(util.get_value(p['key'], item, **kwargs), util.get_value(p['value'], item, **kwargs)) for p in conf['PARAM']])
if params:
url += "?" + urllib.urlencode(params)
yield url
|
# pipeurlbuilder.py
#
import urllib
from pipe2py import util
def pipe_urlbuilder(context, _INPUT, conf, **kwargs):
"""This source builds a url and yields it forever.
Keyword arguments:
context -- pipeline context
_INPUT -- not used
conf:
BASE -- base
PATH -- path elements
PARAM -- query parameters
Yields (_OUTPUT):
url
"""
for item in _INPUT:
#note: we could cache get_value results if item==True
url = util.get_value(conf['BASE'], item, **kwargs)
if not url.endswith('/'):
url += '/'
path = util.get_value(conf['PATH'], item, **kwargs)
if not isinstance(path, list):
path = [path]
url += "/".join(path)
url = url.rstrip("/")
params = dict([(util.get_value(p['key'], item, **kwargs), util.get_value(p['value'], item, **kwargs)) for p in conf['PARAM']])
if params:
url += "?" + urllib.urlencode(params)
yield url
Fix to handle multiple path segments# pipeurlbuilder.py
#
import urllib
from pipe2py import util
def pipe_urlbuilder(context, _INPUT, conf, **kwargs):
"""This source builds a url and yields it forever.
Keyword arguments:
context -- pipeline context
_INPUT -- not used
conf:
BASE -- base
PATH -- path elements
PARAM -- query parameters
Yields (_OUTPUT):
url
"""
for item in _INPUT:
#note: we could cache get_value results if item==True
url = util.get_value(conf['BASE'], item, **kwargs)
if not url.endswith('/'):
url += '/'
path = conf['PATH']
if not isinstance(path, list):
path = [path]
path = [util.get_value(p, item, **kwargs) for p in path]
url += "/".join(path)
url = url.rstrip("/")
params = dict([(util.get_value(p['key'], item, **kwargs), util.get_value(p['value'], item, **kwargs)) for p in conf['PARAM']])
if params:
url += "?" + urllib.urlencode(params)
yield url
|
<commit_before># pipeurlbuilder.py
#
import urllib
from pipe2py import util
def pipe_urlbuilder(context, _INPUT, conf, **kwargs):
"""This source builds a url and yields it forever.
Keyword arguments:
context -- pipeline context
_INPUT -- not used
conf:
BASE -- base
PATH -- path elements
PARAM -- query parameters
Yields (_OUTPUT):
url
"""
for item in _INPUT:
#note: we could cache get_value results if item==True
url = util.get_value(conf['BASE'], item, **kwargs)
if not url.endswith('/'):
url += '/'
path = util.get_value(conf['PATH'], item, **kwargs)
if not isinstance(path, list):
path = [path]
url += "/".join(path)
url = url.rstrip("/")
params = dict([(util.get_value(p['key'], item, **kwargs), util.get_value(p['value'], item, **kwargs)) for p in conf['PARAM']])
if params:
url += "?" + urllib.urlencode(params)
yield url
<commit_msg>Fix to handle multiple path segments<commit_after># pipeurlbuilder.py
#
import urllib
from pipe2py import util
def pipe_urlbuilder(context, _INPUT, conf, **kwargs):
"""This source builds a url and yields it forever.
Keyword arguments:
context -- pipeline context
_INPUT -- not used
conf:
BASE -- base
PATH -- path elements
PARAM -- query parameters
Yields (_OUTPUT):
url
"""
for item in _INPUT:
#note: we could cache get_value results if item==True
url = util.get_value(conf['BASE'], item, **kwargs)
if not url.endswith('/'):
url += '/'
path = conf['PATH']
if not isinstance(path, list):
path = [path]
path = [util.get_value(p, item, **kwargs) for p in path]
url += "/".join(path)
url = url.rstrip("/")
params = dict([(util.get_value(p['key'], item, **kwargs), util.get_value(p['value'], item, **kwargs)) for p in conf['PARAM']])
if params:
url += "?" + urllib.urlencode(params)
yield url
|
7302af8eb70d14360805910377241b974311d215
|
taiga/projects/validators.py
|
taiga/projects/validators.py
|
from django.utils.translation import ugettext_lazy as _
from rest_framework import serializers
from . import models
class ProjectExistsValidator:
def validate_project_id(self, attrs, source):
value = attrs[source]
if not models.Project.objects.filter(pk=value).exists():
msg = _("There's no project with that id")
raise serializers.ValidationError(msg)
return attrs
class UserStoryStatusExistsValidator:
def validate_status_id(self, attrs, source):
value = attrs[source]
if not models.UserStoryStatus.objects.filter(pk=value).exists():
msg = _("There's no user story status with that id")
raise serializers.ValidationError(msg)
return attrs
class TaskStatusExistsValidator:
def validate_status_id(self, attrs, source):
value = attrs[source]
if not models.TaskStatus.objects.filter(pk=value).exists():
msg = _("There's no task status with that id")
raise serializers.ValidationError(msg)
return attrs
|
# Copyright (C) 2015 Andrey Antukh <niwi@niwi.be>
# Copyright (C) 2015 Jesús Espino <jespinog@gmail.com>
# Copyright (C) 2015 David Barragán <bameda@dbarragan.com>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.utils.translation import ugettext_lazy as _
from rest_framework import serializers
from . import models
class ProjectExistsValidator:
def validate_project_id(self, attrs, source):
value = attrs[source]
if not models.Project.objects.filter(pk=value).exists():
msg = _("There's no project with that id")
raise serializers.ValidationError(msg)
return attrs
class UserStoryStatusExistsValidator:
def validate_status_id(self, attrs, source):
value = attrs[source]
if not models.UserStoryStatus.objects.filter(pk=value).exists():
msg = _("There's no user story status with that id")
raise serializers.ValidationError(msg)
return attrs
class TaskStatusExistsValidator:
def validate_status_id(self, attrs, source):
value = attrs[source]
if not models.TaskStatus.objects.filter(pk=value).exists():
msg = _("There's no task status with that id")
raise serializers.ValidationError(msg)
return attrs
|
Add copyright and license terms
|
Add copyright and license terms
|
Python
|
agpl-3.0
|
xdevelsistemas/taiga-back-community,Rademade/taiga-back,dayatz/taiga-back,joshisa/taiga-back,jeffdwyatt/taiga-back,bdang2012/taiga-back-casting,Tigerwhit4/taiga-back,CMLL/taiga-back,xdevelsistemas/taiga-back-community,gam-phon/taiga-back,dayatz/taiga-back,astronaut1712/taiga-back,forging2012/taiga-back,joshisa/taiga-back,EvgeneOskin/taiga-back,frt-arch/taiga-back,obimod/taiga-back,coopsource/taiga-back,joshisa/taiga-back,forging2012/taiga-back,CMLL/taiga-back,gauravjns/taiga-back,dycodedev/taiga-back,astronaut1712/taiga-back,bdang2012/taiga-back-casting,bdang2012/taiga-back-casting,gauravjns/taiga-back,taigaio/taiga-back,gam-phon/taiga-back,CMLL/taiga-back,CoolCloud/taiga-back,rajiteh/taiga-back,CMLL/taiga-back,forging2012/taiga-back,rajiteh/taiga-back,crr0004/taiga-back,jeffdwyatt/taiga-back,WALR/taiga-back,gam-phon/taiga-back,dycodedev/taiga-back,astagi/taiga-back,crr0004/taiga-back,CoolCloud/taiga-back,taigaio/taiga-back,seanchen/taiga-back,WALR/taiga-back,Rademade/taiga-back,EvgeneOskin/taiga-back,gauravjns/taiga-back,WALR/taiga-back,joshisa/taiga-back,frt-arch/taiga-back,seanchen/taiga-back,gam-phon/taiga-back,dycodedev/taiga-back,bdang2012/taiga-back-casting,CoolCloud/taiga-back,coopsource/taiga-back,rajiteh/taiga-back,crr0004/taiga-back,seanchen/taiga-back,astagi/taiga-back,dycodedev/taiga-back,Rademade/taiga-back,Tigerwhit4/taiga-back,taigaio/taiga-back,xdevelsistemas/taiga-back-community,coopsource/taiga-back,astronaut1712/taiga-back,jeffdwyatt/taiga-back,Tigerwhit4/taiga-back,astronaut1712/taiga-back,frt-arch/taiga-back,Tigerwhit4/taiga-back,astagi/taiga-back,dayatz/taiga-back,Rademade/taiga-back,obimod/taiga-back,forging2012/taiga-back,rajiteh/taiga-back,jeffdwyatt/taiga-back,astagi/taiga-back,coopsource/taiga-back,WALR/taiga-back,EvgeneOskin/taiga-back,crr0004/taiga-back,obimod/taiga-back,EvgeneOskin/taiga-back,gauravjns/taiga-back,Rademade/taiga-back,obimod/taiga-back,CoolCloud/taiga-back,seanchen/taiga-back
|
from django.utils.translation import ugettext_lazy as _
from rest_framework import serializers
from . import models
class ProjectExistsValidator:
def validate_project_id(self, attrs, source):
value = attrs[source]
if not models.Project.objects.filter(pk=value).exists():
msg = _("There's no project with that id")
raise serializers.ValidationError(msg)
return attrs
class UserStoryStatusExistsValidator:
def validate_status_id(self, attrs, source):
value = attrs[source]
if not models.UserStoryStatus.objects.filter(pk=value).exists():
msg = _("There's no user story status with that id")
raise serializers.ValidationError(msg)
return attrs
class TaskStatusExistsValidator:
def validate_status_id(self, attrs, source):
value = attrs[source]
if not models.TaskStatus.objects.filter(pk=value).exists():
msg = _("There's no task status with that id")
raise serializers.ValidationError(msg)
return attrs
Add copyright and license terms
|
# Copyright (C) 2015 Andrey Antukh <niwi@niwi.be>
# Copyright (C) 2015 Jesús Espino <jespinog@gmail.com>
# Copyright (C) 2015 David Barragán <bameda@dbarragan.com>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.utils.translation import ugettext_lazy as _
from rest_framework import serializers
from . import models
class ProjectExistsValidator:
def validate_project_id(self, attrs, source):
value = attrs[source]
if not models.Project.objects.filter(pk=value).exists():
msg = _("There's no project with that id")
raise serializers.ValidationError(msg)
return attrs
class UserStoryStatusExistsValidator:
def validate_status_id(self, attrs, source):
value = attrs[source]
if not models.UserStoryStatus.objects.filter(pk=value).exists():
msg = _("There's no user story status with that id")
raise serializers.ValidationError(msg)
return attrs
class TaskStatusExistsValidator:
def validate_status_id(self, attrs, source):
value = attrs[source]
if not models.TaskStatus.objects.filter(pk=value).exists():
msg = _("There's no task status with that id")
raise serializers.ValidationError(msg)
return attrs
|
<commit_before>from django.utils.translation import ugettext_lazy as _
from rest_framework import serializers
from . import models
class ProjectExistsValidator:
def validate_project_id(self, attrs, source):
value = attrs[source]
if not models.Project.objects.filter(pk=value).exists():
msg = _("There's no project with that id")
raise serializers.ValidationError(msg)
return attrs
class UserStoryStatusExistsValidator:
def validate_status_id(self, attrs, source):
value = attrs[source]
if not models.UserStoryStatus.objects.filter(pk=value).exists():
msg = _("There's no user story status with that id")
raise serializers.ValidationError(msg)
return attrs
class TaskStatusExistsValidator:
def validate_status_id(self, attrs, source):
value = attrs[source]
if not models.TaskStatus.objects.filter(pk=value).exists():
msg = _("There's no task status with that id")
raise serializers.ValidationError(msg)
return attrs
<commit_msg>Add copyright and license terms<commit_after>
|
# Copyright (C) 2015 Andrey Antukh <niwi@niwi.be>
# Copyright (C) 2015 Jesús Espino <jespinog@gmail.com>
# Copyright (C) 2015 David Barragán <bameda@dbarragan.com>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.utils.translation import ugettext_lazy as _
from rest_framework import serializers
from . import models
class ProjectExistsValidator:
def validate_project_id(self, attrs, source):
value = attrs[source]
if not models.Project.objects.filter(pk=value).exists():
msg = _("There's no project with that id")
raise serializers.ValidationError(msg)
return attrs
class UserStoryStatusExistsValidator:
def validate_status_id(self, attrs, source):
value = attrs[source]
if not models.UserStoryStatus.objects.filter(pk=value).exists():
msg = _("There's no user story status with that id")
raise serializers.ValidationError(msg)
return attrs
class TaskStatusExistsValidator:
def validate_status_id(self, attrs, source):
value = attrs[source]
if not models.TaskStatus.objects.filter(pk=value).exists():
msg = _("There's no task status with that id")
raise serializers.ValidationError(msg)
return attrs
|
from django.utils.translation import ugettext_lazy as _
from rest_framework import serializers
from . import models
class ProjectExistsValidator:
def validate_project_id(self, attrs, source):
value = attrs[source]
if not models.Project.objects.filter(pk=value).exists():
msg = _("There's no project with that id")
raise serializers.ValidationError(msg)
return attrs
class UserStoryStatusExistsValidator:
def validate_status_id(self, attrs, source):
value = attrs[source]
if not models.UserStoryStatus.objects.filter(pk=value).exists():
msg = _("There's no user story status with that id")
raise serializers.ValidationError(msg)
return attrs
class TaskStatusExistsValidator:
def validate_status_id(self, attrs, source):
value = attrs[source]
if not models.TaskStatus.objects.filter(pk=value).exists():
msg = _("There's no task status with that id")
raise serializers.ValidationError(msg)
return attrs
Add copyright and license terms# Copyright (C) 2015 Andrey Antukh <niwi@niwi.be>
# Copyright (C) 2015 Jesús Espino <jespinog@gmail.com>
# Copyright (C) 2015 David Barragán <bameda@dbarragan.com>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.utils.translation import ugettext_lazy as _
from rest_framework import serializers
from . import models
class ProjectExistsValidator:
def validate_project_id(self, attrs, source):
value = attrs[source]
if not models.Project.objects.filter(pk=value).exists():
msg = _("There's no project with that id")
raise serializers.ValidationError(msg)
return attrs
class UserStoryStatusExistsValidator:
def validate_status_id(self, attrs, source):
value = attrs[source]
if not models.UserStoryStatus.objects.filter(pk=value).exists():
msg = _("There's no user story status with that id")
raise serializers.ValidationError(msg)
return attrs
class TaskStatusExistsValidator:
def validate_status_id(self, attrs, source):
value = attrs[source]
if not models.TaskStatus.objects.filter(pk=value).exists():
msg = _("There's no task status with that id")
raise serializers.ValidationError(msg)
return attrs
|
<commit_before>from django.utils.translation import ugettext_lazy as _
from rest_framework import serializers
from . import models
class ProjectExistsValidator:
def validate_project_id(self, attrs, source):
value = attrs[source]
if not models.Project.objects.filter(pk=value).exists():
msg = _("There's no project with that id")
raise serializers.ValidationError(msg)
return attrs
class UserStoryStatusExistsValidator:
def validate_status_id(self, attrs, source):
value = attrs[source]
if not models.UserStoryStatus.objects.filter(pk=value).exists():
msg = _("There's no user story status with that id")
raise serializers.ValidationError(msg)
return attrs
class TaskStatusExistsValidator:
def validate_status_id(self, attrs, source):
value = attrs[source]
if not models.TaskStatus.objects.filter(pk=value).exists():
msg = _("There's no task status with that id")
raise serializers.ValidationError(msg)
return attrs
<commit_msg>Add copyright and license terms<commit_after># Copyright (C) 2015 Andrey Antukh <niwi@niwi.be>
# Copyright (C) 2015 Jesús Espino <jespinog@gmail.com>
# Copyright (C) 2015 David Barragán <bameda@dbarragan.com>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.utils.translation import ugettext_lazy as _
from rest_framework import serializers
from . import models
class ProjectExistsValidator:
def validate_project_id(self, attrs, source):
value = attrs[source]
if not models.Project.objects.filter(pk=value).exists():
msg = _("There's no project with that id")
raise serializers.ValidationError(msg)
return attrs
class UserStoryStatusExistsValidator:
def validate_status_id(self, attrs, source):
value = attrs[source]
if not models.UserStoryStatus.objects.filter(pk=value).exists():
msg = _("There's no user story status with that id")
raise serializers.ValidationError(msg)
return attrs
class TaskStatusExistsValidator:
def validate_status_id(self, attrs, source):
value = attrs[source]
if not models.TaskStatus.objects.filter(pk=value).exists():
msg = _("There's no task status with that id")
raise serializers.ValidationError(msg)
return attrs
|
8eb3c6aa123cecec826c3c07f98b2d2b84c265af
|
scrapi/registry.py
|
scrapi/registry.py
|
import sys
class _Registry(dict):
# These must be defined so that doctest gathering doesn't make
# pytest crash when trying to figure out what/where scrapi.registry is
__file__ = __file__
__name__ = __name__
def __init__(self):
dict.__init__(self)
def __getitem__(self, key):
try:
return dict.__getitem__(self, key)
except KeyError:
raise KeyError('No harvester named "{}"'.format(key))
@property
def beat_schedule(self):
from celery.schedules import crontab
return {
'run_{}'.format(name): {
'args': [name],
'schedule': crontab(**inst.run_at),
'task': 'scrapi.tasks.run_harvester',
}
for name, inst
in self.items()
}
sys.modules[__name__] = _Registry()
|
import sys
class _Registry(dict):
# These must be defined so that doctest gathering doesn't make
# pytest crash when trying to figure out what/where scrapi.registry is
__file__ = __file__
__name__ = __name__
def __init__(self):
dict.__init__(self)
def __hash__(self):
return hash(self.freeze(self))
def __getitem__(self, key):
try:
return dict.__getitem__(self, key)
except KeyError:
raise KeyError('No harvester named "{}"'.format(key))
def freeze(self, o):
if isinstance(o, dict):
return frozenset({k: self.freeze(v) for k, v in o.items()}.items())
elif isinstance(o, list):
return tuple(map(self.freeze, o))
return o
@property
def beat_schedule(self):
from celery.schedules import crontab
return {
'run_{}'.format(name): {
'args': [name],
'schedule': crontab(**inst.run_at),
'task': 'scrapi.tasks.run_harvester',
}
for name, inst
in self.items()
}
sys.modules[__name__] = _Registry()
|
Make _Registry hashable so that django can import from scrapi
|
Make _Registry hashable so that django can import from scrapi
|
Python
|
apache-2.0
|
fabianvf/scrapi,felliott/scrapi,erinspace/scrapi,mehanig/scrapi,erinspace/scrapi,CenterForOpenScience/scrapi,fabianvf/scrapi,mehanig/scrapi,felliott/scrapi,CenterForOpenScience/scrapi
|
import sys
class _Registry(dict):
# These must be defined so that doctest gathering doesn't make
# pytest crash when trying to figure out what/where scrapi.registry is
__file__ = __file__
__name__ = __name__
def __init__(self):
dict.__init__(self)
def __getitem__(self, key):
try:
return dict.__getitem__(self, key)
except KeyError:
raise KeyError('No harvester named "{}"'.format(key))
@property
def beat_schedule(self):
from celery.schedules import crontab
return {
'run_{}'.format(name): {
'args': [name],
'schedule': crontab(**inst.run_at),
'task': 'scrapi.tasks.run_harvester',
}
for name, inst
in self.items()
}
sys.modules[__name__] = _Registry()
Make _Registry hashable so that django can import from scrapi
|
import sys
class _Registry(dict):
# These must be defined so that doctest gathering doesn't make
# pytest crash when trying to figure out what/where scrapi.registry is
__file__ = __file__
__name__ = __name__
def __init__(self):
dict.__init__(self)
def __hash__(self):
return hash(self.freeze(self))
def __getitem__(self, key):
try:
return dict.__getitem__(self, key)
except KeyError:
raise KeyError('No harvester named "{}"'.format(key))
def freeze(self, o):
if isinstance(o, dict):
return frozenset({k: self.freeze(v) for k, v in o.items()}.items())
elif isinstance(o, list):
return tuple(map(self.freeze, o))
return o
@property
def beat_schedule(self):
from celery.schedules import crontab
return {
'run_{}'.format(name): {
'args': [name],
'schedule': crontab(**inst.run_at),
'task': 'scrapi.tasks.run_harvester',
}
for name, inst
in self.items()
}
sys.modules[__name__] = _Registry()
|
<commit_before>import sys
class _Registry(dict):
# These must be defined so that doctest gathering doesn't make
# pytest crash when trying to figure out what/where scrapi.registry is
__file__ = __file__
__name__ = __name__
def __init__(self):
dict.__init__(self)
def __getitem__(self, key):
try:
return dict.__getitem__(self, key)
except KeyError:
raise KeyError('No harvester named "{}"'.format(key))
@property
def beat_schedule(self):
from celery.schedules import crontab
return {
'run_{}'.format(name): {
'args': [name],
'schedule': crontab(**inst.run_at),
'task': 'scrapi.tasks.run_harvester',
}
for name, inst
in self.items()
}
sys.modules[__name__] = _Registry()
<commit_msg>Make _Registry hashable so that django can import from scrapi<commit_after>
|
import sys
class _Registry(dict):
# These must be defined so that doctest gathering doesn't make
# pytest crash when trying to figure out what/where scrapi.registry is
__file__ = __file__
__name__ = __name__
def __init__(self):
dict.__init__(self)
def __hash__(self):
return hash(self.freeze(self))
def __getitem__(self, key):
try:
return dict.__getitem__(self, key)
except KeyError:
raise KeyError('No harvester named "{}"'.format(key))
def freeze(self, o):
if isinstance(o, dict):
return frozenset({k: self.freeze(v) for k, v in o.items()}.items())
elif isinstance(o, list):
return tuple(map(self.freeze, o))
return o
@property
def beat_schedule(self):
from celery.schedules import crontab
return {
'run_{}'.format(name): {
'args': [name],
'schedule': crontab(**inst.run_at),
'task': 'scrapi.tasks.run_harvester',
}
for name, inst
in self.items()
}
sys.modules[__name__] = _Registry()
|
import sys
class _Registry(dict):
# These must be defined so that doctest gathering doesn't make
# pytest crash when trying to figure out what/where scrapi.registry is
__file__ = __file__
__name__ = __name__
def __init__(self):
dict.__init__(self)
def __getitem__(self, key):
try:
return dict.__getitem__(self, key)
except KeyError:
raise KeyError('No harvester named "{}"'.format(key))
@property
def beat_schedule(self):
from celery.schedules import crontab
return {
'run_{}'.format(name): {
'args': [name],
'schedule': crontab(**inst.run_at),
'task': 'scrapi.tasks.run_harvester',
}
for name, inst
in self.items()
}
sys.modules[__name__] = _Registry()
Make _Registry hashable so that django can import from scrapiimport sys
class _Registry(dict):
# These must be defined so that doctest gathering doesn't make
# pytest crash when trying to figure out what/where scrapi.registry is
__file__ = __file__
__name__ = __name__
def __init__(self):
dict.__init__(self)
def __hash__(self):
return hash(self.freeze(self))
def __getitem__(self, key):
try:
return dict.__getitem__(self, key)
except KeyError:
raise KeyError('No harvester named "{}"'.format(key))
def freeze(self, o):
if isinstance(o, dict):
return frozenset({k: self.freeze(v) for k, v in o.items()}.items())
elif isinstance(o, list):
return tuple(map(self.freeze, o))
return o
@property
def beat_schedule(self):
from celery.schedules import crontab
return {
'run_{}'.format(name): {
'args': [name],
'schedule': crontab(**inst.run_at),
'task': 'scrapi.tasks.run_harvester',
}
for name, inst
in self.items()
}
sys.modules[__name__] = _Registry()
|
<commit_before>import sys
class _Registry(dict):
# These must be defined so that doctest gathering doesn't make
# pytest crash when trying to figure out what/where scrapi.registry is
__file__ = __file__
__name__ = __name__
def __init__(self):
dict.__init__(self)
def __getitem__(self, key):
try:
return dict.__getitem__(self, key)
except KeyError:
raise KeyError('No harvester named "{}"'.format(key))
@property
def beat_schedule(self):
from celery.schedules import crontab
return {
'run_{}'.format(name): {
'args': [name],
'schedule': crontab(**inst.run_at),
'task': 'scrapi.tasks.run_harvester',
}
for name, inst
in self.items()
}
sys.modules[__name__] = _Registry()
<commit_msg>Make _Registry hashable so that django can import from scrapi<commit_after>import sys
class _Registry(dict):
# These must be defined so that doctest gathering doesn't make
# pytest crash when trying to figure out what/where scrapi.registry is
__file__ = __file__
__name__ = __name__
def __init__(self):
dict.__init__(self)
def __hash__(self):
return hash(self.freeze(self))
def __getitem__(self, key):
try:
return dict.__getitem__(self, key)
except KeyError:
raise KeyError('No harvester named "{}"'.format(key))
def freeze(self, o):
if isinstance(o, dict):
return frozenset({k: self.freeze(v) for k, v in o.items()}.items())
elif isinstance(o, list):
return tuple(map(self.freeze, o))
return o
@property
def beat_schedule(self):
from celery.schedules import crontab
return {
'run_{}'.format(name): {
'args': [name],
'schedule': crontab(**inst.run_at),
'task': 'scrapi.tasks.run_harvester',
}
for name, inst
in self.items()
}
sys.modules[__name__] = _Registry()
|
96cc3b85a34b0047a9483b571aa358df52bcaed0
|
hitchdoc/recorder.py
|
hitchdoc/recorder.py
|
from hitchdoc.database import Database
from hitchdoc import exceptions
import pickle
import base64
class Recorder(object):
def __init__(self, story, sqlite_filename):
self._story = story
self._db = Database(sqlite_filename)
if self._db.Recording.filter(name=story.name).first() is not None:
self._db.Recording.filter(name=story.name).first().delete_instance(
recursive=True
)
self._model = self._db.Recording(
name=story.name,
filename=story.filename,
slug=story.slug,
properties=base64.b64encode(pickle.dumps(story.properties))
)
self._model.save(force_insert=True)
def step(self, name, **kwargs):
for key, value in kwargs.items():
assert type(key) is str
try:
pickle.dumps(value)
except TypeError:
raise exceptions.VarMustBePickleable(
"Can't use non-pickleable objects as vars."
)
new_step = self._db.Step(
recording=self._model,
name=name,
kwargs=base64.b64encode(pickle.dumps(kwargs))
)
new_step.save()
|
from hitchdoc.database import Database
from hitchdoc import exceptions
import pickle
import base64
class Recorder(object):
def __init__(self, story, sqlite_filename):
self._story = story
self._db = Database(sqlite_filename)
if self._db.Recording.filter(name=story.name).first() is not None:
self._db.Recording.filter(name=story.name).first().delete_instance(
recursive=True
)
self._model = self._db.Recording(
name=story.name,
filename=story.filename,
slug=story.slug,
properties=base64.b64encode(pickle.dumps(story.properties))
)
self._model.save(force_insert=True)
def step(self, step_name, **kwargs):
for key, value in kwargs.items():
assert type(key) is str
try:
pickle.dumps(value)
except TypeError:
raise exceptions.VarMustBePickleable(
"Can't use non-pickleable objects as vars."
)
new_step = self._db.Step(
recording=self._model,
step_name=step_name,
kwargs=base64.b64encode(pickle.dumps(kwargs))
)
new_step.save()
|
REFACTOR : Changed the name of step name from 'name' to 'step_name' to avoid clashing with a potential use of the word 'name' in kwargs.
|
REFACTOR : Changed the name of step name from 'name' to 'step_name' to avoid clashing with a potential use of the word 'name' in kwargs.
|
Python
|
agpl-3.0
|
hitchtest/hitchdoc
|
from hitchdoc.database import Database
from hitchdoc import exceptions
import pickle
import base64
class Recorder(object):
def __init__(self, story, sqlite_filename):
self._story = story
self._db = Database(sqlite_filename)
if self._db.Recording.filter(name=story.name).first() is not None:
self._db.Recording.filter(name=story.name).first().delete_instance(
recursive=True
)
self._model = self._db.Recording(
name=story.name,
filename=story.filename,
slug=story.slug,
properties=base64.b64encode(pickle.dumps(story.properties))
)
self._model.save(force_insert=True)
def step(self, name, **kwargs):
for key, value in kwargs.items():
assert type(key) is str
try:
pickle.dumps(value)
except TypeError:
raise exceptions.VarMustBePickleable(
"Can't use non-pickleable objects as vars."
)
new_step = self._db.Step(
recording=self._model,
name=name,
kwargs=base64.b64encode(pickle.dumps(kwargs))
)
new_step.save()
REFACTOR : Changed the name of step name from 'name' to 'step_name' to avoid clashing with a potential use of the word 'name' in kwargs.
|
from hitchdoc.database import Database
from hitchdoc import exceptions
import pickle
import base64
class Recorder(object):
def __init__(self, story, sqlite_filename):
self._story = story
self._db = Database(sqlite_filename)
if self._db.Recording.filter(name=story.name).first() is not None:
self._db.Recording.filter(name=story.name).first().delete_instance(
recursive=True
)
self._model = self._db.Recording(
name=story.name,
filename=story.filename,
slug=story.slug,
properties=base64.b64encode(pickle.dumps(story.properties))
)
self._model.save(force_insert=True)
def step(self, step_name, **kwargs):
for key, value in kwargs.items():
assert type(key) is str
try:
pickle.dumps(value)
except TypeError:
raise exceptions.VarMustBePickleable(
"Can't use non-pickleable objects as vars."
)
new_step = self._db.Step(
recording=self._model,
step_name=step_name,
kwargs=base64.b64encode(pickle.dumps(kwargs))
)
new_step.save()
|
<commit_before>from hitchdoc.database import Database
from hitchdoc import exceptions
import pickle
import base64
class Recorder(object):
def __init__(self, story, sqlite_filename):
self._story = story
self._db = Database(sqlite_filename)
if self._db.Recording.filter(name=story.name).first() is not None:
self._db.Recording.filter(name=story.name).first().delete_instance(
recursive=True
)
self._model = self._db.Recording(
name=story.name,
filename=story.filename,
slug=story.slug,
properties=base64.b64encode(pickle.dumps(story.properties))
)
self._model.save(force_insert=True)
def step(self, name, **kwargs):
for key, value in kwargs.items():
assert type(key) is str
try:
pickle.dumps(value)
except TypeError:
raise exceptions.VarMustBePickleable(
"Can't use non-pickleable objects as vars."
)
new_step = self._db.Step(
recording=self._model,
name=name,
kwargs=base64.b64encode(pickle.dumps(kwargs))
)
new_step.save()
<commit_msg>REFACTOR : Changed the name of step name from 'name' to 'step_name' to avoid clashing with a potential use of the word 'name' in kwargs.<commit_after>
|
from hitchdoc.database import Database
from hitchdoc import exceptions
import pickle
import base64
class Recorder(object):
def __init__(self, story, sqlite_filename):
self._story = story
self._db = Database(sqlite_filename)
if self._db.Recording.filter(name=story.name).first() is not None:
self._db.Recording.filter(name=story.name).first().delete_instance(
recursive=True
)
self._model = self._db.Recording(
name=story.name,
filename=story.filename,
slug=story.slug,
properties=base64.b64encode(pickle.dumps(story.properties))
)
self._model.save(force_insert=True)
def step(self, step_name, **kwargs):
for key, value in kwargs.items():
assert type(key) is str
try:
pickle.dumps(value)
except TypeError:
raise exceptions.VarMustBePickleable(
"Can't use non-pickleable objects as vars."
)
new_step = self._db.Step(
recording=self._model,
step_name=step_name,
kwargs=base64.b64encode(pickle.dumps(kwargs))
)
new_step.save()
|
from hitchdoc.database import Database
from hitchdoc import exceptions
import pickle
import base64
class Recorder(object):
def __init__(self, story, sqlite_filename):
self._story = story
self._db = Database(sqlite_filename)
if self._db.Recording.filter(name=story.name).first() is not None:
self._db.Recording.filter(name=story.name).first().delete_instance(
recursive=True
)
self._model = self._db.Recording(
name=story.name,
filename=story.filename,
slug=story.slug,
properties=base64.b64encode(pickle.dumps(story.properties))
)
self._model.save(force_insert=True)
def step(self, name, **kwargs):
for key, value in kwargs.items():
assert type(key) is str
try:
pickle.dumps(value)
except TypeError:
raise exceptions.VarMustBePickleable(
"Can't use non-pickleable objects as vars."
)
new_step = self._db.Step(
recording=self._model,
name=name,
kwargs=base64.b64encode(pickle.dumps(kwargs))
)
new_step.save()
REFACTOR : Changed the name of step name from 'name' to 'step_name' to avoid clashing with a potential use of the word 'name' in kwargs.from hitchdoc.database import Database
from hitchdoc import exceptions
import pickle
import base64
class Recorder(object):
def __init__(self, story, sqlite_filename):
self._story = story
self._db = Database(sqlite_filename)
if self._db.Recording.filter(name=story.name).first() is not None:
self._db.Recording.filter(name=story.name).first().delete_instance(
recursive=True
)
self._model = self._db.Recording(
name=story.name,
filename=story.filename,
slug=story.slug,
properties=base64.b64encode(pickle.dumps(story.properties))
)
self._model.save(force_insert=True)
def step(self, step_name, **kwargs):
for key, value in kwargs.items():
assert type(key) is str
try:
pickle.dumps(value)
except TypeError:
raise exceptions.VarMustBePickleable(
"Can't use non-pickleable objects as vars."
)
new_step = self._db.Step(
recording=self._model,
step_name=step_name,
kwargs=base64.b64encode(pickle.dumps(kwargs))
)
new_step.save()
|
<commit_before>from hitchdoc.database import Database
from hitchdoc import exceptions
import pickle
import base64
class Recorder(object):
def __init__(self, story, sqlite_filename):
self._story = story
self._db = Database(sqlite_filename)
if self._db.Recording.filter(name=story.name).first() is not None:
self._db.Recording.filter(name=story.name).first().delete_instance(
recursive=True
)
self._model = self._db.Recording(
name=story.name,
filename=story.filename,
slug=story.slug,
properties=base64.b64encode(pickle.dumps(story.properties))
)
self._model.save(force_insert=True)
def step(self, name, **kwargs):
for key, value in kwargs.items():
assert type(key) is str
try:
pickle.dumps(value)
except TypeError:
raise exceptions.VarMustBePickleable(
"Can't use non-pickleable objects as vars."
)
new_step = self._db.Step(
recording=self._model,
name=name,
kwargs=base64.b64encode(pickle.dumps(kwargs))
)
new_step.save()
<commit_msg>REFACTOR : Changed the name of step name from 'name' to 'step_name' to avoid clashing with a potential use of the word 'name' in kwargs.<commit_after>from hitchdoc.database import Database
from hitchdoc import exceptions
import pickle
import base64
class Recorder(object):
def __init__(self, story, sqlite_filename):
self._story = story
self._db = Database(sqlite_filename)
if self._db.Recording.filter(name=story.name).first() is not None:
self._db.Recording.filter(name=story.name).first().delete_instance(
recursive=True
)
self._model = self._db.Recording(
name=story.name,
filename=story.filename,
slug=story.slug,
properties=base64.b64encode(pickle.dumps(story.properties))
)
self._model.save(force_insert=True)
def step(self, step_name, **kwargs):
for key, value in kwargs.items():
assert type(key) is str
try:
pickle.dumps(value)
except TypeError:
raise exceptions.VarMustBePickleable(
"Can't use non-pickleable objects as vars."
)
new_step = self._db.Step(
recording=self._model,
step_name=step_name,
kwargs=base64.b64encode(pickle.dumps(kwargs))
)
new_step.save()
|
9409b9da1392514b7da5db4d44a32b47d8452e67
|
play.py
|
play.py
|
import PyWXSB.XMLSchema as xs
import PyWXSB.Namespace as Namespace
from PyWXSB.generate import PythonGenerator as Generator
import sys
import traceback
from xml.dom import minidom
from xml.dom import Node
files = sys.argv[1:]
if 0 == len(files):
files = [ 'schemas/kml21.xsd' ]
Namespace.XMLSchema.modulePath('xs.datatypes')
for file in files:
try:
wxs = xs.schema().CreateFromDOM(minidom.parse(file))
ns = wxs.getTargetNamespace()
enum_prefix_map = [ ( 'colorModeEnum', 'CM' )
, ( 'styleStateEnum', 'SS' )
, ( 'itemIconStateEnum', 'IIS' )
, ( 'listItemTypeEnum', 'LIT' )
, ( 'unitsEnum', 'Units' )
]
for (std_name, enum_prefix) in enum_prefix_map:
cm = ns.lookupTypeDefinition(std_name)
if cm is not None:
facet = cm.facets().get(xs.facets.CF_enumeration, None)
if facet is not None:
facet.enumPrefix('%s_' % enum_prefix)
gen = Generator(ns, 'xs')
#print "\n".join(gen.generateDefinitions([ns.lookupTypeDefinition('formChoice')]))
print "\n".join(gen.generateDefinitions([ns.lookupTypeDefinition('viewRefreshModeEnum')]))
#print "\n".join(gen.generateDefinitions([ns.lookupTypeDefinition('NetworkLinkControlType')]))
#print "\n".join(gen.generateDefinitions(ns.typeDefinitions()))
except Exception, e:
sys.stderr.write("%s processing %s:\n" % (e.__class__, file))
traceback.print_exception(*sys.exc_info())
|
import PyWXSB.XMLSchema as xs
import PyWXSB.Namespace as Namespace
from PyWXSB.generate import PythonGenerator as Generator
import sys
import traceback
from xml.dom import minidom
from xml.dom import Node
files = sys.argv[1:]
if 0 == len(files):
files = [ 'schemas/kml21.xsd' ]
Namespace.XMLSchema.setModulePath('xs.datatypes')
for file in files:
try:
wxs = xs.schema().CreateFromDOM(minidom.parse(file))
print "\nComponents in the schema:"
for c in wxs.components():
cd = c.dependentComponents()
print 'Instance of %s depends on %d others' % (c.__class__.__name__, len(cd))
except Exception, e:
sys.stderr.write("%s processing %s:\n" % (e.__class__, file))
traceback.print_exception(*sys.exc_info())
|
Update to new namespace interface, walk components
|
Update to new namespace interface, walk components
|
Python
|
apache-2.0
|
jonfoster/pyxb2,jonfoster/pyxb-upstream-mirror,balanced/PyXB,pabigot/pyxb,balanced/PyXB,jonfoster/pyxb2,jonfoster/pyxb1,jonfoster/pyxb2,jonfoster/pyxb-upstream-mirror,CantemoInternal/pyxb,CantemoInternal/pyxb,jonfoster/pyxb-upstream-mirror,jonfoster/pyxb1,CantemoInternal/pyxb,pabigot/pyxb,balanced/PyXB
|
import PyWXSB.XMLSchema as xs
import PyWXSB.Namespace as Namespace
from PyWXSB.generate import PythonGenerator as Generator
import sys
import traceback
from xml.dom import minidom
from xml.dom import Node
files = sys.argv[1:]
if 0 == len(files):
files = [ 'schemas/kml21.xsd' ]
Namespace.XMLSchema.modulePath('xs.datatypes')
for file in files:
try:
wxs = xs.schema().CreateFromDOM(minidom.parse(file))
ns = wxs.getTargetNamespace()
enum_prefix_map = [ ( 'colorModeEnum', 'CM' )
, ( 'styleStateEnum', 'SS' )
, ( 'itemIconStateEnum', 'IIS' )
, ( 'listItemTypeEnum', 'LIT' )
, ( 'unitsEnum', 'Units' )
]
for (std_name, enum_prefix) in enum_prefix_map:
cm = ns.lookupTypeDefinition(std_name)
if cm is not None:
facet = cm.facets().get(xs.facets.CF_enumeration, None)
if facet is not None:
facet.enumPrefix('%s_' % enum_prefix)
gen = Generator(ns, 'xs')
#print "\n".join(gen.generateDefinitions([ns.lookupTypeDefinition('formChoice')]))
print "\n".join(gen.generateDefinitions([ns.lookupTypeDefinition('viewRefreshModeEnum')]))
#print "\n".join(gen.generateDefinitions([ns.lookupTypeDefinition('NetworkLinkControlType')]))
#print "\n".join(gen.generateDefinitions(ns.typeDefinitions()))
except Exception, e:
sys.stderr.write("%s processing %s:\n" % (e.__class__, file))
traceback.print_exception(*sys.exc_info())
Update to new namespace interface, walk components
|
import PyWXSB.XMLSchema as xs
import PyWXSB.Namespace as Namespace
from PyWXSB.generate import PythonGenerator as Generator
import sys
import traceback
from xml.dom import minidom
from xml.dom import Node
files = sys.argv[1:]
if 0 == len(files):
files = [ 'schemas/kml21.xsd' ]
Namespace.XMLSchema.setModulePath('xs.datatypes')
for file in files:
try:
wxs = xs.schema().CreateFromDOM(minidom.parse(file))
print "\nComponents in the schema:"
for c in wxs.components():
cd = c.dependentComponents()
print 'Instance of %s depends on %d others' % (c.__class__.__name__, len(cd))
except Exception, e:
sys.stderr.write("%s processing %s:\n" % (e.__class__, file))
traceback.print_exception(*sys.exc_info())
|
<commit_before>import PyWXSB.XMLSchema as xs
import PyWXSB.Namespace as Namespace
from PyWXSB.generate import PythonGenerator as Generator
import sys
import traceback
from xml.dom import minidom
from xml.dom import Node
files = sys.argv[1:]
if 0 == len(files):
files = [ 'schemas/kml21.xsd' ]
Namespace.XMLSchema.modulePath('xs.datatypes')
for file in files:
try:
wxs = xs.schema().CreateFromDOM(minidom.parse(file))
ns = wxs.getTargetNamespace()
enum_prefix_map = [ ( 'colorModeEnum', 'CM' )
, ( 'styleStateEnum', 'SS' )
, ( 'itemIconStateEnum', 'IIS' )
, ( 'listItemTypeEnum', 'LIT' )
, ( 'unitsEnum', 'Units' )
]
for (std_name, enum_prefix) in enum_prefix_map:
cm = ns.lookupTypeDefinition(std_name)
if cm is not None:
facet = cm.facets().get(xs.facets.CF_enumeration, None)
if facet is not None:
facet.enumPrefix('%s_' % enum_prefix)
gen = Generator(ns, 'xs')
#print "\n".join(gen.generateDefinitions([ns.lookupTypeDefinition('formChoice')]))
print "\n".join(gen.generateDefinitions([ns.lookupTypeDefinition('viewRefreshModeEnum')]))
#print "\n".join(gen.generateDefinitions([ns.lookupTypeDefinition('NetworkLinkControlType')]))
#print "\n".join(gen.generateDefinitions(ns.typeDefinitions()))
except Exception, e:
sys.stderr.write("%s processing %s:\n" % (e.__class__, file))
traceback.print_exception(*sys.exc_info())
<commit_msg>Update to new namespace interface, walk components<commit_after>
|
import PyWXSB.XMLSchema as xs
import PyWXSB.Namespace as Namespace
from PyWXSB.generate import PythonGenerator as Generator
import sys
import traceback
from xml.dom import minidom
from xml.dom import Node
files = sys.argv[1:]
if 0 == len(files):
files = [ 'schemas/kml21.xsd' ]
Namespace.XMLSchema.setModulePath('xs.datatypes')
for file in files:
try:
wxs = xs.schema().CreateFromDOM(minidom.parse(file))
print "\nComponents in the schema:"
for c in wxs.components():
cd = c.dependentComponents()
print 'Instance of %s depends on %d others' % (c.__class__.__name__, len(cd))
except Exception, e:
sys.stderr.write("%s processing %s:\n" % (e.__class__, file))
traceback.print_exception(*sys.exc_info())
|
import PyWXSB.XMLSchema as xs
import PyWXSB.Namespace as Namespace
from PyWXSB.generate import PythonGenerator as Generator
import sys
import traceback
from xml.dom import minidom
from xml.dom import Node
files = sys.argv[1:]
if 0 == len(files):
files = [ 'schemas/kml21.xsd' ]
Namespace.XMLSchema.modulePath('xs.datatypes')
for file in files:
try:
wxs = xs.schema().CreateFromDOM(minidom.parse(file))
ns = wxs.getTargetNamespace()
enum_prefix_map = [ ( 'colorModeEnum', 'CM' )
, ( 'styleStateEnum', 'SS' )
, ( 'itemIconStateEnum', 'IIS' )
, ( 'listItemTypeEnum', 'LIT' )
, ( 'unitsEnum', 'Units' )
]
for (std_name, enum_prefix) in enum_prefix_map:
cm = ns.lookupTypeDefinition(std_name)
if cm is not None:
facet = cm.facets().get(xs.facets.CF_enumeration, None)
if facet is not None:
facet.enumPrefix('%s_' % enum_prefix)
gen = Generator(ns, 'xs')
#print "\n".join(gen.generateDefinitions([ns.lookupTypeDefinition('formChoice')]))
print "\n".join(gen.generateDefinitions([ns.lookupTypeDefinition('viewRefreshModeEnum')]))
#print "\n".join(gen.generateDefinitions([ns.lookupTypeDefinition('NetworkLinkControlType')]))
#print "\n".join(gen.generateDefinitions(ns.typeDefinitions()))
except Exception, e:
sys.stderr.write("%s processing %s:\n" % (e.__class__, file))
traceback.print_exception(*sys.exc_info())
Update to new namespace interface, walk componentsimport PyWXSB.XMLSchema as xs
import PyWXSB.Namespace as Namespace
from PyWXSB.generate import PythonGenerator as Generator
import sys
import traceback
from xml.dom import minidom
from xml.dom import Node
files = sys.argv[1:]
if 0 == len(files):
files = [ 'schemas/kml21.xsd' ]
Namespace.XMLSchema.setModulePath('xs.datatypes')
for file in files:
try:
wxs = xs.schema().CreateFromDOM(minidom.parse(file))
print "\nComponents in the schema:"
for c in wxs.components():
cd = c.dependentComponents()
print 'Instance of %s depends on %d others' % (c.__class__.__name__, len(cd))
except Exception, e:
sys.stderr.write("%s processing %s:\n" % (e.__class__, file))
traceback.print_exception(*sys.exc_info())
|
<commit_before>import PyWXSB.XMLSchema as xs
import PyWXSB.Namespace as Namespace
from PyWXSB.generate import PythonGenerator as Generator
import sys
import traceback
from xml.dom import minidom
from xml.dom import Node
files = sys.argv[1:]
if 0 == len(files):
files = [ 'schemas/kml21.xsd' ]
Namespace.XMLSchema.modulePath('xs.datatypes')
for file in files:
try:
wxs = xs.schema().CreateFromDOM(minidom.parse(file))
ns = wxs.getTargetNamespace()
enum_prefix_map = [ ( 'colorModeEnum', 'CM' )
, ( 'styleStateEnum', 'SS' )
, ( 'itemIconStateEnum', 'IIS' )
, ( 'listItemTypeEnum', 'LIT' )
, ( 'unitsEnum', 'Units' )
]
for (std_name, enum_prefix) in enum_prefix_map:
cm = ns.lookupTypeDefinition(std_name)
if cm is not None:
facet = cm.facets().get(xs.facets.CF_enumeration, None)
if facet is not None:
facet.enumPrefix('%s_' % enum_prefix)
gen = Generator(ns, 'xs')
#print "\n".join(gen.generateDefinitions([ns.lookupTypeDefinition('formChoice')]))
print "\n".join(gen.generateDefinitions([ns.lookupTypeDefinition('viewRefreshModeEnum')]))
#print "\n".join(gen.generateDefinitions([ns.lookupTypeDefinition('NetworkLinkControlType')]))
#print "\n".join(gen.generateDefinitions(ns.typeDefinitions()))
except Exception, e:
sys.stderr.write("%s processing %s:\n" % (e.__class__, file))
traceback.print_exception(*sys.exc_info())
<commit_msg>Update to new namespace interface, walk components<commit_after>import PyWXSB.XMLSchema as xs
import PyWXSB.Namespace as Namespace
from PyWXSB.generate import PythonGenerator as Generator
import sys
import traceback
from xml.dom import minidom
from xml.dom import Node
files = sys.argv[1:]
if 0 == len(files):
files = [ 'schemas/kml21.xsd' ]
Namespace.XMLSchema.setModulePath('xs.datatypes')
for file in files:
try:
wxs = xs.schema().CreateFromDOM(minidom.parse(file))
print "\nComponents in the schema:"
for c in wxs.components():
cd = c.dependentComponents()
print 'Instance of %s depends on %d others' % (c.__class__.__name__, len(cd))
except Exception, e:
sys.stderr.write("%s processing %s:\n" % (e.__class__, file))
traceback.print_exception(*sys.exc_info())
|
f870254cfed6f5ea0f88dae910f5c80b7f325e9a
|
freeze/urls.py
|
freeze/urls.py
|
# -*- coding: utf-8 -*-
from django.conf.urls import url
from freeze import views
urlpatterns = [
url(r'^download-static-site/$', views.download_static_site, name='freeze_download_static_site'),
url(r'^generate-static-site/$', views.generate_static_site, name='freeze_generate_static_site'),
]
|
# -*- coding: utf-8 -*-
if django.VERSION < (2, 0):
from django.conf.urls import include, url as path
else:
from django.urls import include, path
from freeze import views
urlpatterns = [
path("download-static-site/", views.download_static_site, name="freeze_download_static_site"),
path("generate-static-site/", views.generate_static_site, name="freeze_generate_static_site"),
]
|
Support for newer versions of django
|
Support for newer versions of django
|
Python
|
mit
|
fabiocaccamo/django-freeze,fabiocaccamo/django-freeze,fabiocaccamo/django-freeze
|
# -*- coding: utf-8 -*-
from django.conf.urls import url
from freeze import views
urlpatterns = [
url(r'^download-static-site/$', views.download_static_site, name='freeze_download_static_site'),
url(r'^generate-static-site/$', views.generate_static_site, name='freeze_generate_static_site'),
]
Support for newer versions of django
|
# -*- coding: utf-8 -*-
if django.VERSION < (2, 0):
from django.conf.urls import include, url as path
else:
from django.urls import include, path
from freeze import views
urlpatterns = [
path("download-static-site/", views.download_static_site, name="freeze_download_static_site"),
path("generate-static-site/", views.generate_static_site, name="freeze_generate_static_site"),
]
|
<commit_before># -*- coding: utf-8 -*-
from django.conf.urls import url
from freeze import views
urlpatterns = [
url(r'^download-static-site/$', views.download_static_site, name='freeze_download_static_site'),
url(r'^generate-static-site/$', views.generate_static_site, name='freeze_generate_static_site'),
]
<commit_msg>Support for newer versions of django<commit_after>
|
# -*- coding: utf-8 -*-
if django.VERSION < (2, 0):
from django.conf.urls import include, url as path
else:
from django.urls import include, path
from freeze import views
urlpatterns = [
path("download-static-site/", views.download_static_site, name="freeze_download_static_site"),
path("generate-static-site/", views.generate_static_site, name="freeze_generate_static_site"),
]
|
# -*- coding: utf-8 -*-
from django.conf.urls import url
from freeze import views
urlpatterns = [
url(r'^download-static-site/$', views.download_static_site, name='freeze_download_static_site'),
url(r'^generate-static-site/$', views.generate_static_site, name='freeze_generate_static_site'),
]
Support for newer versions of django# -*- coding: utf-8 -*-
if django.VERSION < (2, 0):
from django.conf.urls import include, url as path
else:
from django.urls import include, path
from freeze import views
urlpatterns = [
path("download-static-site/", views.download_static_site, name="freeze_download_static_site"),
path("generate-static-site/", views.generate_static_site, name="freeze_generate_static_site"),
]
|
<commit_before># -*- coding: utf-8 -*-
from django.conf.urls import url
from freeze import views
urlpatterns = [
url(r'^download-static-site/$', views.download_static_site, name='freeze_download_static_site'),
url(r'^generate-static-site/$', views.generate_static_site, name='freeze_generate_static_site'),
]
<commit_msg>Support for newer versions of django<commit_after># -*- coding: utf-8 -*-
if django.VERSION < (2, 0):
from django.conf.urls import include, url as path
else:
from django.urls import include, path
from freeze import views
urlpatterns = [
path("download-static-site/", views.download_static_site, name="freeze_download_static_site"),
path("generate-static-site/", views.generate_static_site, name="freeze_generate_static_site"),
]
|
0348ac3e341cbdba75eed29828c8b7c0a25a9a4a
|
services/flickr.py
|
services/flickr.py
|
import foauth.providers
class Flickr(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'http://www.flickr.com/'
docs_url = 'http://www.flickr.com/services/api/'
category = 'Pictures'
# URLs to interact with the API
request_token_url = 'http://www.flickr.com/services/oauth/request_token'
authorize_url = 'http://www.flickr.com/services/oauth/authorize'
access_token_url = 'http://www.flickr.com/services/oauth/access_token'
api_domain = 'secure.flickr.com'
available_permissions = [
(None, 'access your public and private photos'),
('write', 'upload, edit and replace your photos'),
('delete', 'upload, edit, replace and delete your photos'),
]
permissions_widget = 'radio'
def get_authorize_params(self, redirect_uri, scopes):
params = super(Flickr, self).get_authorize_params(redirect_uri, scopes)
params['perms'] = scopes[0] or 'read'
return params
def get_user_id(self, key):
url = u'/services/rest/?method=flickr.people.getLimits'
url += u'&format=json&nojsoncallback=1'
r = self.api(key, self.api_domain, url)
return r.json()[u'person'][u'nsid']
|
import foauth.providers
class Flickr(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'http://www.flickr.com/'
docs_url = 'http://www.flickr.com/services/api/'
category = 'Pictures'
# URLs to interact with the API
request_token_url = 'http://www.flickr.com/services/oauth/request_token'
authorize_url = 'http://www.flickr.com/services/oauth/authorize'
access_token_url = 'http://www.flickr.com/services/oauth/access_token'
api_domain = 'api.flickr.com'
available_permissions = [
(None, 'access your public and private photos'),
('write', 'upload, edit and replace your photos'),
('delete', 'upload, edit, replace and delete your photos'),
]
permissions_widget = 'radio'
def get_authorize_params(self, redirect_uri, scopes):
params = super(Flickr, self).get_authorize_params(redirect_uri, scopes)
params['perms'] = scopes[0] or 'read'
return params
def get_user_id(self, key):
url = u'/services/rest/?method=flickr.people.getLimits'
url += u'&format=json&nojsoncallback=1'
r = self.api(key, self.api_domain, url)
return r.json()[u'person'][u'nsid']
|
Move Flickr over to its newly-secured API domain
|
Move Flickr over to its newly-secured API domain
|
Python
|
bsd-3-clause
|
foauth/foauth.org,foauth/foauth.org,foauth/foauth.org
|
import foauth.providers
class Flickr(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'http://www.flickr.com/'
docs_url = 'http://www.flickr.com/services/api/'
category = 'Pictures'
# URLs to interact with the API
request_token_url = 'http://www.flickr.com/services/oauth/request_token'
authorize_url = 'http://www.flickr.com/services/oauth/authorize'
access_token_url = 'http://www.flickr.com/services/oauth/access_token'
api_domain = 'secure.flickr.com'
available_permissions = [
(None, 'access your public and private photos'),
('write', 'upload, edit and replace your photos'),
('delete', 'upload, edit, replace and delete your photos'),
]
permissions_widget = 'radio'
def get_authorize_params(self, redirect_uri, scopes):
params = super(Flickr, self).get_authorize_params(redirect_uri, scopes)
params['perms'] = scopes[0] or 'read'
return params
def get_user_id(self, key):
url = u'/services/rest/?method=flickr.people.getLimits'
url += u'&format=json&nojsoncallback=1'
r = self.api(key, self.api_domain, url)
return r.json()[u'person'][u'nsid']
Move Flickr over to its newly-secured API domain
|
import foauth.providers
class Flickr(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'http://www.flickr.com/'
docs_url = 'http://www.flickr.com/services/api/'
category = 'Pictures'
# URLs to interact with the API
request_token_url = 'http://www.flickr.com/services/oauth/request_token'
authorize_url = 'http://www.flickr.com/services/oauth/authorize'
access_token_url = 'http://www.flickr.com/services/oauth/access_token'
api_domain = 'api.flickr.com'
available_permissions = [
(None, 'access your public and private photos'),
('write', 'upload, edit and replace your photos'),
('delete', 'upload, edit, replace and delete your photos'),
]
permissions_widget = 'radio'
def get_authorize_params(self, redirect_uri, scopes):
params = super(Flickr, self).get_authorize_params(redirect_uri, scopes)
params['perms'] = scopes[0] or 'read'
return params
def get_user_id(self, key):
url = u'/services/rest/?method=flickr.people.getLimits'
url += u'&format=json&nojsoncallback=1'
r = self.api(key, self.api_domain, url)
return r.json()[u'person'][u'nsid']
|
<commit_before>import foauth.providers
class Flickr(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'http://www.flickr.com/'
docs_url = 'http://www.flickr.com/services/api/'
category = 'Pictures'
# URLs to interact with the API
request_token_url = 'http://www.flickr.com/services/oauth/request_token'
authorize_url = 'http://www.flickr.com/services/oauth/authorize'
access_token_url = 'http://www.flickr.com/services/oauth/access_token'
api_domain = 'secure.flickr.com'
available_permissions = [
(None, 'access your public and private photos'),
('write', 'upload, edit and replace your photos'),
('delete', 'upload, edit, replace and delete your photos'),
]
permissions_widget = 'radio'
def get_authorize_params(self, redirect_uri, scopes):
params = super(Flickr, self).get_authorize_params(redirect_uri, scopes)
params['perms'] = scopes[0] or 'read'
return params
def get_user_id(self, key):
url = u'/services/rest/?method=flickr.people.getLimits'
url += u'&format=json&nojsoncallback=1'
r = self.api(key, self.api_domain, url)
return r.json()[u'person'][u'nsid']
<commit_msg>Move Flickr over to its newly-secured API domain<commit_after>
|
import foauth.providers
class Flickr(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'http://www.flickr.com/'
docs_url = 'http://www.flickr.com/services/api/'
category = 'Pictures'
# URLs to interact with the API
request_token_url = 'http://www.flickr.com/services/oauth/request_token'
authorize_url = 'http://www.flickr.com/services/oauth/authorize'
access_token_url = 'http://www.flickr.com/services/oauth/access_token'
api_domain = 'api.flickr.com'
available_permissions = [
(None, 'access your public and private photos'),
('write', 'upload, edit and replace your photos'),
('delete', 'upload, edit, replace and delete your photos'),
]
permissions_widget = 'radio'
def get_authorize_params(self, redirect_uri, scopes):
params = super(Flickr, self).get_authorize_params(redirect_uri, scopes)
params['perms'] = scopes[0] or 'read'
return params
def get_user_id(self, key):
url = u'/services/rest/?method=flickr.people.getLimits'
url += u'&format=json&nojsoncallback=1'
r = self.api(key, self.api_domain, url)
return r.json()[u'person'][u'nsid']
|
import foauth.providers
class Flickr(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'http://www.flickr.com/'
docs_url = 'http://www.flickr.com/services/api/'
category = 'Pictures'
# URLs to interact with the API
request_token_url = 'http://www.flickr.com/services/oauth/request_token'
authorize_url = 'http://www.flickr.com/services/oauth/authorize'
access_token_url = 'http://www.flickr.com/services/oauth/access_token'
api_domain = 'secure.flickr.com'
available_permissions = [
(None, 'access your public and private photos'),
('write', 'upload, edit and replace your photos'),
('delete', 'upload, edit, replace and delete your photos'),
]
permissions_widget = 'radio'
def get_authorize_params(self, redirect_uri, scopes):
params = super(Flickr, self).get_authorize_params(redirect_uri, scopes)
params['perms'] = scopes[0] or 'read'
return params
def get_user_id(self, key):
url = u'/services/rest/?method=flickr.people.getLimits'
url += u'&format=json&nojsoncallback=1'
r = self.api(key, self.api_domain, url)
return r.json()[u'person'][u'nsid']
Move Flickr over to its newly-secured API domainimport foauth.providers
class Flickr(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'http://www.flickr.com/'
docs_url = 'http://www.flickr.com/services/api/'
category = 'Pictures'
# URLs to interact with the API
request_token_url = 'http://www.flickr.com/services/oauth/request_token'
authorize_url = 'http://www.flickr.com/services/oauth/authorize'
access_token_url = 'http://www.flickr.com/services/oauth/access_token'
api_domain = 'api.flickr.com'
available_permissions = [
(None, 'access your public and private photos'),
('write', 'upload, edit and replace your photos'),
('delete', 'upload, edit, replace and delete your photos'),
]
permissions_widget = 'radio'
def get_authorize_params(self, redirect_uri, scopes):
params = super(Flickr, self).get_authorize_params(redirect_uri, scopes)
params['perms'] = scopes[0] or 'read'
return params
def get_user_id(self, key):
url = u'/services/rest/?method=flickr.people.getLimits'
url += u'&format=json&nojsoncallback=1'
r = self.api(key, self.api_domain, url)
return r.json()[u'person'][u'nsid']
|
<commit_before>import foauth.providers
class Flickr(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'http://www.flickr.com/'
docs_url = 'http://www.flickr.com/services/api/'
category = 'Pictures'
# URLs to interact with the API
request_token_url = 'http://www.flickr.com/services/oauth/request_token'
authorize_url = 'http://www.flickr.com/services/oauth/authorize'
access_token_url = 'http://www.flickr.com/services/oauth/access_token'
api_domain = 'secure.flickr.com'
available_permissions = [
(None, 'access your public and private photos'),
('write', 'upload, edit and replace your photos'),
('delete', 'upload, edit, replace and delete your photos'),
]
permissions_widget = 'radio'
def get_authorize_params(self, redirect_uri, scopes):
params = super(Flickr, self).get_authorize_params(redirect_uri, scopes)
params['perms'] = scopes[0] or 'read'
return params
def get_user_id(self, key):
url = u'/services/rest/?method=flickr.people.getLimits'
url += u'&format=json&nojsoncallback=1'
r = self.api(key, self.api_domain, url)
return r.json()[u'person'][u'nsid']
<commit_msg>Move Flickr over to its newly-secured API domain<commit_after>import foauth.providers
class Flickr(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'http://www.flickr.com/'
docs_url = 'http://www.flickr.com/services/api/'
category = 'Pictures'
# URLs to interact with the API
request_token_url = 'http://www.flickr.com/services/oauth/request_token'
authorize_url = 'http://www.flickr.com/services/oauth/authorize'
access_token_url = 'http://www.flickr.com/services/oauth/access_token'
api_domain = 'api.flickr.com'
available_permissions = [
(None, 'access your public and private photos'),
('write', 'upload, edit and replace your photos'),
('delete', 'upload, edit, replace and delete your photos'),
]
permissions_widget = 'radio'
def get_authorize_params(self, redirect_uri, scopes):
params = super(Flickr, self).get_authorize_params(redirect_uri, scopes)
params['perms'] = scopes[0] or 'read'
return params
def get_user_id(self, key):
url = u'/services/rest/?method=flickr.people.getLimits'
url += u'&format=json&nojsoncallback=1'
r = self.api(key, self.api_domain, url)
return r.json()[u'person'][u'nsid']
|
5a4cf095a3eda5127ca54f8d293162740b836158
|
services/heroku.py
|
services/heroku.py
|
import foauth.providers
class Heroku(foauth.providers.OAuth2):
# General info about the provider
provider_url = 'https://heroku.com/'
docs_url = 'https://devcenter.heroku.com/articles/platform-api-reference'
category = 'Code'
# URLs to interact with the API
authorize_url = 'https://id.heroku.com/oauth/authorize'
access_token_url = 'https://id.heroku.com/oauth/token'
api_domain = 'api.heroku.com'
available_permissions = [
('identity', 'read your account information'),
('read', 'read all of your apps and resources, excluding configuration values'),
('write', 'write to all of your apps and resources, excluding configuration values'),
('read-protected', 'read all of your apps and resources, including configuration values'),
('write-protected', 'write to all of your apps and resources, including configuration values'),
('global', 'read and write to all of your account, apps and resources'),
]
def get_user_id(self, key):
r = self.api(key, self.api_domain, u'/account')
return unicode(r.json()[u'id'])
|
import foauth.providers
class Heroku(foauth.providers.OAuth2):
# General info about the provider
provider_url = 'https://heroku.com/'
docs_url = 'https://devcenter.heroku.com/articles/platform-api-reference'
category = 'Code'
# URLs to interact with the API
authorize_url = 'https://id.heroku.com/oauth/authorize'
access_token_url = 'https://id.heroku.com/oauth/token'
api_domain = 'api.heroku.com'
available_permissions = [
(None, 'read your account information'),
('read', 'read all of your apps and resources, excluding configuration values'),
('write', 'write to all of your apps and resources, excluding configuration values'),
('read-protected', 'read all of your apps and resources, including configuration values'),
('write-protected', 'write to all of your apps and resources, including configuration values'),
('global', 'read and write to all of your account, apps and resources'),
]
permissions_widget = 'radio'
def get_authorize_params(self, redirect_uri, scopes):
params = super(Heroku, self).get_authorize_params(redirect_uri, scopes)
params['scope'] = scopes[0] or 'identity'
return params
def get_user_id(self, key):
r = self.api(key, self.api_domain, u'/account')
return unicode(r.json()[u'id'])
|
Rewrite Heroku's scope handling a bit to better match reality
|
Rewrite Heroku's scope handling a bit to better match reality
|
Python
|
bsd-3-clause
|
foauth/foauth.org,foauth/foauth.org,foauth/foauth.org
|
import foauth.providers
class Heroku(foauth.providers.OAuth2):
# General info about the provider
provider_url = 'https://heroku.com/'
docs_url = 'https://devcenter.heroku.com/articles/platform-api-reference'
category = 'Code'
# URLs to interact with the API
authorize_url = 'https://id.heroku.com/oauth/authorize'
access_token_url = 'https://id.heroku.com/oauth/token'
api_domain = 'api.heroku.com'
available_permissions = [
('identity', 'read your account information'),
('read', 'read all of your apps and resources, excluding configuration values'),
('write', 'write to all of your apps and resources, excluding configuration values'),
('read-protected', 'read all of your apps and resources, including configuration values'),
('write-protected', 'write to all of your apps and resources, including configuration values'),
('global', 'read and write to all of your account, apps and resources'),
]
def get_user_id(self, key):
r = self.api(key, self.api_domain, u'/account')
return unicode(r.json()[u'id'])
Rewrite Heroku's scope handling a bit to better match reality
|
import foauth.providers
class Heroku(foauth.providers.OAuth2):
# General info about the provider
provider_url = 'https://heroku.com/'
docs_url = 'https://devcenter.heroku.com/articles/platform-api-reference'
category = 'Code'
# URLs to interact with the API
authorize_url = 'https://id.heroku.com/oauth/authorize'
access_token_url = 'https://id.heroku.com/oauth/token'
api_domain = 'api.heroku.com'
available_permissions = [
(None, 'read your account information'),
('read', 'read all of your apps and resources, excluding configuration values'),
('write', 'write to all of your apps and resources, excluding configuration values'),
('read-protected', 'read all of your apps and resources, including configuration values'),
('write-protected', 'write to all of your apps and resources, including configuration values'),
('global', 'read and write to all of your account, apps and resources'),
]
permissions_widget = 'radio'
def get_authorize_params(self, redirect_uri, scopes):
params = super(Heroku, self).get_authorize_params(redirect_uri, scopes)
params['scope'] = scopes[0] or 'identity'
return params
def get_user_id(self, key):
r = self.api(key, self.api_domain, u'/account')
return unicode(r.json()[u'id'])
|
<commit_before>import foauth.providers
class Heroku(foauth.providers.OAuth2):
# General info about the provider
provider_url = 'https://heroku.com/'
docs_url = 'https://devcenter.heroku.com/articles/platform-api-reference'
category = 'Code'
# URLs to interact with the API
authorize_url = 'https://id.heroku.com/oauth/authorize'
access_token_url = 'https://id.heroku.com/oauth/token'
api_domain = 'api.heroku.com'
available_permissions = [
('identity', 'read your account information'),
('read', 'read all of your apps and resources, excluding configuration values'),
('write', 'write to all of your apps and resources, excluding configuration values'),
('read-protected', 'read all of your apps and resources, including configuration values'),
('write-protected', 'write to all of your apps and resources, including configuration values'),
('global', 'read and write to all of your account, apps and resources'),
]
def get_user_id(self, key):
r = self.api(key, self.api_domain, u'/account')
return unicode(r.json()[u'id'])
<commit_msg>Rewrite Heroku's scope handling a bit to better match reality<commit_after>
|
import foauth.providers
class Heroku(foauth.providers.OAuth2):
# General info about the provider
provider_url = 'https://heroku.com/'
docs_url = 'https://devcenter.heroku.com/articles/platform-api-reference'
category = 'Code'
# URLs to interact with the API
authorize_url = 'https://id.heroku.com/oauth/authorize'
access_token_url = 'https://id.heroku.com/oauth/token'
api_domain = 'api.heroku.com'
available_permissions = [
(None, 'read your account information'),
('read', 'read all of your apps and resources, excluding configuration values'),
('write', 'write to all of your apps and resources, excluding configuration values'),
('read-protected', 'read all of your apps and resources, including configuration values'),
('write-protected', 'write to all of your apps and resources, including configuration values'),
('global', 'read and write to all of your account, apps and resources'),
]
permissions_widget = 'radio'
def get_authorize_params(self, redirect_uri, scopes):
params = super(Heroku, self).get_authorize_params(redirect_uri, scopes)
params['scope'] = scopes[0] or 'identity'
return params
def get_user_id(self, key):
r = self.api(key, self.api_domain, u'/account')
return unicode(r.json()[u'id'])
|
import foauth.providers
class Heroku(foauth.providers.OAuth2):
# General info about the provider
provider_url = 'https://heroku.com/'
docs_url = 'https://devcenter.heroku.com/articles/platform-api-reference'
category = 'Code'
# URLs to interact with the API
authorize_url = 'https://id.heroku.com/oauth/authorize'
access_token_url = 'https://id.heroku.com/oauth/token'
api_domain = 'api.heroku.com'
available_permissions = [
('identity', 'read your account information'),
('read', 'read all of your apps and resources, excluding configuration values'),
('write', 'write to all of your apps and resources, excluding configuration values'),
('read-protected', 'read all of your apps and resources, including configuration values'),
('write-protected', 'write to all of your apps and resources, including configuration values'),
('global', 'read and write to all of your account, apps and resources'),
]
def get_user_id(self, key):
r = self.api(key, self.api_domain, u'/account')
return unicode(r.json()[u'id'])
Rewrite Heroku's scope handling a bit to better match realityimport foauth.providers
class Heroku(foauth.providers.OAuth2):
# General info about the provider
provider_url = 'https://heroku.com/'
docs_url = 'https://devcenter.heroku.com/articles/platform-api-reference'
category = 'Code'
# URLs to interact with the API
authorize_url = 'https://id.heroku.com/oauth/authorize'
access_token_url = 'https://id.heroku.com/oauth/token'
api_domain = 'api.heroku.com'
available_permissions = [
(None, 'read your account information'),
('read', 'read all of your apps and resources, excluding configuration values'),
('write', 'write to all of your apps and resources, excluding configuration values'),
('read-protected', 'read all of your apps and resources, including configuration values'),
('write-protected', 'write to all of your apps and resources, including configuration values'),
('global', 'read and write to all of your account, apps and resources'),
]
permissions_widget = 'radio'
def get_authorize_params(self, redirect_uri, scopes):
params = super(Heroku, self).get_authorize_params(redirect_uri, scopes)
params['scope'] = scopes[0] or 'identity'
return params
def get_user_id(self, key):
r = self.api(key, self.api_domain, u'/account')
return unicode(r.json()[u'id'])
|
<commit_before>import foauth.providers
class Heroku(foauth.providers.OAuth2):
# General info about the provider
provider_url = 'https://heroku.com/'
docs_url = 'https://devcenter.heroku.com/articles/platform-api-reference'
category = 'Code'
# URLs to interact with the API
authorize_url = 'https://id.heroku.com/oauth/authorize'
access_token_url = 'https://id.heroku.com/oauth/token'
api_domain = 'api.heroku.com'
available_permissions = [
('identity', 'read your account information'),
('read', 'read all of your apps and resources, excluding configuration values'),
('write', 'write to all of your apps and resources, excluding configuration values'),
('read-protected', 'read all of your apps and resources, including configuration values'),
('write-protected', 'write to all of your apps and resources, including configuration values'),
('global', 'read and write to all of your account, apps and resources'),
]
def get_user_id(self, key):
r = self.api(key, self.api_domain, u'/account')
return unicode(r.json()[u'id'])
<commit_msg>Rewrite Heroku's scope handling a bit to better match reality<commit_after>import foauth.providers
class Heroku(foauth.providers.OAuth2):
# General info about the provider
provider_url = 'https://heroku.com/'
docs_url = 'https://devcenter.heroku.com/articles/platform-api-reference'
category = 'Code'
# URLs to interact with the API
authorize_url = 'https://id.heroku.com/oauth/authorize'
access_token_url = 'https://id.heroku.com/oauth/token'
api_domain = 'api.heroku.com'
available_permissions = [
(None, 'read your account information'),
('read', 'read all of your apps and resources, excluding configuration values'),
('write', 'write to all of your apps and resources, excluding configuration values'),
('read-protected', 'read all of your apps and resources, including configuration values'),
('write-protected', 'write to all of your apps and resources, including configuration values'),
('global', 'read and write to all of your account, apps and resources'),
]
permissions_widget = 'radio'
def get_authorize_params(self, redirect_uri, scopes):
params = super(Heroku, self).get_authorize_params(redirect_uri, scopes)
params['scope'] = scopes[0] or 'identity'
return params
def get_user_id(self, key):
r = self.api(key, self.api_domain, u'/account')
return unicode(r.json()[u'id'])
|
93d33a8b3c618cb809640cfa010a4a34a43bf91f
|
api/scripts/add_adaptive_tests.py
|
api/scripts/add_adaptive_tests.py
|
import json, sqlite3, csv, sys
connection = sqlite3.connect('./db/dev.sqlite3')
filename = sys.argv[1]
if filename.endswith('json'):
with open(filename) as f:
scenarios = json.load(f)['scenarios']
else:
with open(filename) as csvfile:
reader = csv.reader(csvfile)
scenarios = []
for line in reader:
scenarios.append(line)
c = connection.cursor()
c.executemany('INSERT INTO scenarios VALUES (NULL, ?, ?, ?, datetime("now"), datetime("now"))', scenarios)
connection.commit()
connection.close()
|
import json, sqlite3, csv, sys
connection = sqlite3.connect('../db/dev.sqlite3')
filename = sys.argv[1]
if filename.endswith('json'):
with open(filename) as f:
scenarios = json.load(f)['scenarios']
else:
with open(filename) as csvfile:
reader = csv.reader(csvfile)
scenarios = []
for line in reader:
scenarios.append(line)
c = connection.cursor()
c.executemany('INSERT INTO scenarios VALUES (NULL, ?, ?, ?, datetime("now"), datetime("now"))', scenarios)
connection.commit()
connection.close()
|
Fix adaptive tests loading script
|
Fix adaptive tests loading script
|
Python
|
agpl-3.0
|
sgmap/pix,sgmap/pix,sgmap/pix,sgmap/pix
|
import json, sqlite3, csv, sys
connection = sqlite3.connect('./db/dev.sqlite3')
filename = sys.argv[1]
if filename.endswith('json'):
with open(filename) as f:
scenarios = json.load(f)['scenarios']
else:
with open(filename) as csvfile:
reader = csv.reader(csvfile)
scenarios = []
for line in reader:
scenarios.append(line)
c = connection.cursor()
c.executemany('INSERT INTO scenarios VALUES (NULL, ?, ?, ?, datetime("now"), datetime("now"))', scenarios)
connection.commit()
connection.close()
Fix adaptive tests loading script
|
import json, sqlite3, csv, sys
connection = sqlite3.connect('../db/dev.sqlite3')
filename = sys.argv[1]
if filename.endswith('json'):
with open(filename) as f:
scenarios = json.load(f)['scenarios']
else:
with open(filename) as csvfile:
reader = csv.reader(csvfile)
scenarios = []
for line in reader:
scenarios.append(line)
c = connection.cursor()
c.executemany('INSERT INTO scenarios VALUES (NULL, ?, ?, ?, datetime("now"), datetime("now"))', scenarios)
connection.commit()
connection.close()
|
<commit_before>import json, sqlite3, csv, sys
connection = sqlite3.connect('./db/dev.sqlite3')
filename = sys.argv[1]
if filename.endswith('json'):
with open(filename) as f:
scenarios = json.load(f)['scenarios']
else:
with open(filename) as csvfile:
reader = csv.reader(csvfile)
scenarios = []
for line in reader:
scenarios.append(line)
c = connection.cursor()
c.executemany('INSERT INTO scenarios VALUES (NULL, ?, ?, ?, datetime("now"), datetime("now"))', scenarios)
connection.commit()
connection.close()
<commit_msg>Fix adaptive tests loading script<commit_after>
|
import json, sqlite3, csv, sys
connection = sqlite3.connect('../db/dev.sqlite3')
filename = sys.argv[1]
if filename.endswith('json'):
with open(filename) as f:
scenarios = json.load(f)['scenarios']
else:
with open(filename) as csvfile:
reader = csv.reader(csvfile)
scenarios = []
for line in reader:
scenarios.append(line)
c = connection.cursor()
c.executemany('INSERT INTO scenarios VALUES (NULL, ?, ?, ?, datetime("now"), datetime("now"))', scenarios)
connection.commit()
connection.close()
|
import json, sqlite3, csv, sys
connection = sqlite3.connect('./db/dev.sqlite3')
filename = sys.argv[1]
if filename.endswith('json'):
with open(filename) as f:
scenarios = json.load(f)['scenarios']
else:
with open(filename) as csvfile:
reader = csv.reader(csvfile)
scenarios = []
for line in reader:
scenarios.append(line)
c = connection.cursor()
c.executemany('INSERT INTO scenarios VALUES (NULL, ?, ?, ?, datetime("now"), datetime("now"))', scenarios)
connection.commit()
connection.close()
Fix adaptive tests loading scriptimport json, sqlite3, csv, sys
connection = sqlite3.connect('../db/dev.sqlite3')
filename = sys.argv[1]
if filename.endswith('json'):
with open(filename) as f:
scenarios = json.load(f)['scenarios']
else:
with open(filename) as csvfile:
reader = csv.reader(csvfile)
scenarios = []
for line in reader:
scenarios.append(line)
c = connection.cursor()
c.executemany('INSERT INTO scenarios VALUES (NULL, ?, ?, ?, datetime("now"), datetime("now"))', scenarios)
connection.commit()
connection.close()
|
<commit_before>import json, sqlite3, csv, sys
connection = sqlite3.connect('./db/dev.sqlite3')
filename = sys.argv[1]
if filename.endswith('json'):
with open(filename) as f:
scenarios = json.load(f)['scenarios']
else:
with open(filename) as csvfile:
reader = csv.reader(csvfile)
scenarios = []
for line in reader:
scenarios.append(line)
c = connection.cursor()
c.executemany('INSERT INTO scenarios VALUES (NULL, ?, ?, ?, datetime("now"), datetime("now"))', scenarios)
connection.commit()
connection.close()
<commit_msg>Fix adaptive tests loading script<commit_after>import json, sqlite3, csv, sys
connection = sqlite3.connect('../db/dev.sqlite3')
filename = sys.argv[1]
if filename.endswith('json'):
with open(filename) as f:
scenarios = json.load(f)['scenarios']
else:
with open(filename) as csvfile:
reader = csv.reader(csvfile)
scenarios = []
for line in reader:
scenarios.append(line)
c = connection.cursor()
c.executemany('INSERT INTO scenarios VALUES (NULL, ?, ?, ?, datetime("now"), datetime("now"))', scenarios)
connection.commit()
connection.close()
|
33facaa6e656ecc30233d831ca8c8d1f2abc6d03
|
src/tmserver/extensions/__init__.py
|
src/tmserver/extensions/__init__.py
|
from auth import jwt
from spark import Spark
spark = Spark()
from gc3pie import GC3Pie
gc3pie = GC3Pie()
from flask.ext.uwsgi_websocket import GeventWebSocket
websocket = GeventWebSocket()
from flask.ext.redis import FlaskRedis
redis_store = FlaskRedis()
|
from auth import jwt
from spark import Spark
spark = Spark()
from gc3pie import GC3Pie
gc3pie = GC3Pie()
from flask_uwsgi_websocket import GeventWebSocket
websocket = GeventWebSocket()
from flask_redis import FlaskRedis
redis_store = FlaskRedis()
|
Update depracted flask extension code
|
Update depracted flask extension code
|
Python
|
agpl-3.0
|
TissueMAPS/TmServer
|
from auth import jwt
from spark import Spark
spark = Spark()
from gc3pie import GC3Pie
gc3pie = GC3Pie()
from flask.ext.uwsgi_websocket import GeventWebSocket
websocket = GeventWebSocket()
from flask.ext.redis import FlaskRedis
redis_store = FlaskRedis()
Update depracted flask extension code
|
from auth import jwt
from spark import Spark
spark = Spark()
from gc3pie import GC3Pie
gc3pie = GC3Pie()
from flask_uwsgi_websocket import GeventWebSocket
websocket = GeventWebSocket()
from flask_redis import FlaskRedis
redis_store = FlaskRedis()
|
<commit_before>from auth import jwt
from spark import Spark
spark = Spark()
from gc3pie import GC3Pie
gc3pie = GC3Pie()
from flask.ext.uwsgi_websocket import GeventWebSocket
websocket = GeventWebSocket()
from flask.ext.redis import FlaskRedis
redis_store = FlaskRedis()
<commit_msg>Update depracted flask extension code<commit_after>
|
from auth import jwt
from spark import Spark
spark = Spark()
from gc3pie import GC3Pie
gc3pie = GC3Pie()
from flask_uwsgi_websocket import GeventWebSocket
websocket = GeventWebSocket()
from flask_redis import FlaskRedis
redis_store = FlaskRedis()
|
from auth import jwt
from spark import Spark
spark = Spark()
from gc3pie import GC3Pie
gc3pie = GC3Pie()
from flask.ext.uwsgi_websocket import GeventWebSocket
websocket = GeventWebSocket()
from flask.ext.redis import FlaskRedis
redis_store = FlaskRedis()
Update depracted flask extension codefrom auth import jwt
from spark import Spark
spark = Spark()
from gc3pie import GC3Pie
gc3pie = GC3Pie()
from flask_uwsgi_websocket import GeventWebSocket
websocket = GeventWebSocket()
from flask_redis import FlaskRedis
redis_store = FlaskRedis()
|
<commit_before>from auth import jwt
from spark import Spark
spark = Spark()
from gc3pie import GC3Pie
gc3pie = GC3Pie()
from flask.ext.uwsgi_websocket import GeventWebSocket
websocket = GeventWebSocket()
from flask.ext.redis import FlaskRedis
redis_store = FlaskRedis()
<commit_msg>Update depracted flask extension code<commit_after>from auth import jwt
from spark import Spark
spark = Spark()
from gc3pie import GC3Pie
gc3pie = GC3Pie()
from flask_uwsgi_websocket import GeventWebSocket
websocket = GeventWebSocket()
from flask_redis import FlaskRedis
redis_store = FlaskRedis()
|
5e54e38ec6fc06aac08f3b900fe728b353b6a052
|
gpioCleanup.py
|
gpioCleanup.py
|
import RPi.GPIO as GPIO
GPIO.setup(16, GPIO.IN)
GPIO.setup(20, GPIO.IN)
GPIO.setup(23, GPIO.IN)
GPIO.setup(18, GPIO.IN)
GPIO.setup(17, GPIO.IN)
GPIO.setup(27, GPIO.IN)
GPIO.setup(5, GPIO.IN)
GPIO.cleanup()
|
import RPi.GPIO as GPIO
GPIO.setmode(GPIO.BCM)
GPIO.setup(16, GPIO.IN)
GPIO.setup(20, GPIO.IN)
GPIO.setup(23, GPIO.IN)
GPIO.setup(18, GPIO.IN)
GPIO.setup(17, GPIO.IN)
GPIO.setup(27, GPIO.IN)
GPIO.setup(5, GPIO.IN)
GPIO.cleanup()
|
Add gpio clean up tool
|
Add gpio clean up tool
|
Python
|
mit
|
azmiik/tweetBooth
|
import RPi.GPIO as GPIO
GPIO.setup(16, GPIO.IN)
GPIO.setup(20, GPIO.IN)
GPIO.setup(23, GPIO.IN)
GPIO.setup(18, GPIO.IN)
GPIO.setup(17, GPIO.IN)
GPIO.setup(27, GPIO.IN)
GPIO.setup(5, GPIO.IN)
GPIO.cleanup()
Add gpio clean up tool
|
import RPi.GPIO as GPIO
GPIO.setmode(GPIO.BCM)
GPIO.setup(16, GPIO.IN)
GPIO.setup(20, GPIO.IN)
GPIO.setup(23, GPIO.IN)
GPIO.setup(18, GPIO.IN)
GPIO.setup(17, GPIO.IN)
GPIO.setup(27, GPIO.IN)
GPIO.setup(5, GPIO.IN)
GPIO.cleanup()
|
<commit_before>import RPi.GPIO as GPIO
GPIO.setup(16, GPIO.IN)
GPIO.setup(20, GPIO.IN)
GPIO.setup(23, GPIO.IN)
GPIO.setup(18, GPIO.IN)
GPIO.setup(17, GPIO.IN)
GPIO.setup(27, GPIO.IN)
GPIO.setup(5, GPIO.IN)
GPIO.cleanup()
<commit_msg>Add gpio clean up tool<commit_after>
|
import RPi.GPIO as GPIO
GPIO.setmode(GPIO.BCM)
GPIO.setup(16, GPIO.IN)
GPIO.setup(20, GPIO.IN)
GPIO.setup(23, GPIO.IN)
GPIO.setup(18, GPIO.IN)
GPIO.setup(17, GPIO.IN)
GPIO.setup(27, GPIO.IN)
GPIO.setup(5, GPIO.IN)
GPIO.cleanup()
|
import RPi.GPIO as GPIO
GPIO.setup(16, GPIO.IN)
GPIO.setup(20, GPIO.IN)
GPIO.setup(23, GPIO.IN)
GPIO.setup(18, GPIO.IN)
GPIO.setup(17, GPIO.IN)
GPIO.setup(27, GPIO.IN)
GPIO.setup(5, GPIO.IN)
GPIO.cleanup()
Add gpio clean up toolimport RPi.GPIO as GPIO
GPIO.setmode(GPIO.BCM)
GPIO.setup(16, GPIO.IN)
GPIO.setup(20, GPIO.IN)
GPIO.setup(23, GPIO.IN)
GPIO.setup(18, GPIO.IN)
GPIO.setup(17, GPIO.IN)
GPIO.setup(27, GPIO.IN)
GPIO.setup(5, GPIO.IN)
GPIO.cleanup()
|
<commit_before>import RPi.GPIO as GPIO
GPIO.setup(16, GPIO.IN)
GPIO.setup(20, GPIO.IN)
GPIO.setup(23, GPIO.IN)
GPIO.setup(18, GPIO.IN)
GPIO.setup(17, GPIO.IN)
GPIO.setup(27, GPIO.IN)
GPIO.setup(5, GPIO.IN)
GPIO.cleanup()
<commit_msg>Add gpio clean up tool<commit_after>import RPi.GPIO as GPIO
GPIO.setmode(GPIO.BCM)
GPIO.setup(16, GPIO.IN)
GPIO.setup(20, GPIO.IN)
GPIO.setup(23, GPIO.IN)
GPIO.setup(18, GPIO.IN)
GPIO.setup(17, GPIO.IN)
GPIO.setup(27, GPIO.IN)
GPIO.setup(5, GPIO.IN)
GPIO.cleanup()
|
923c994fe9a7b02e1939b83ebeefc296cd16b607
|
lib/proc_query.py
|
lib/proc_query.py
|
import re
import snmpy
class proc_query(snmpy.plugin):
def create(self):
for key, val in sorted(self.conf['objects'].items()):
extra = {
'run': self.gather,
'start': val.get('start', 0),
'regex': re.compile(val['regex']),
}
self.data['1.%s' % key] = 'string', val['label']
self.data['2.%s' % key] = val['type'], val.get('start', 0), extra
def gather(self, obj):
text = open('/proc/%s' % self.conf['proc_entry'])
find = self.data[obj:'regex'].findall(text)
if find:
if self.data[obj:'regex'].groups == 0:
self.data[obj] = len(find)
else:
self.data[obj] = find[0].strip()
else:
self.data[obj] = self.data[obj:'start']
|
import re
import snmpy
class proc_query(snmpy.plugin):
def create(self):
for key, val in sorted(self.conf['objects'].items()):
extra = {
'run': self.gather,
'start': val.get('start', 0),
'regex': re.compile(val['regex']),
}
self.data['1.%s' % key] = 'string', val['label']
self.data['2.%s' % key] = val['type'], val.get('start', 0), extra
def gather(self, obj):
text = open('/proc/%s' % self.conf['object']).read()
find = self.data[obj:'regex'].findall(text)
if find:
if self.data[obj:'regex'].groups == 0:
self.data[obj] = len(find)
else:
self.data[obj] = find[0].strip()
else:
self.data[obj] = self.data[obj:'start']
|
Rename proc object config key from proc_entry to simply object.
|
Rename proc object config key from proc_entry to simply object.
|
Python
|
mit
|
mk23/snmpy,mk23/snmpy
|
import re
import snmpy
class proc_query(snmpy.plugin):
def create(self):
for key, val in sorted(self.conf['objects'].items()):
extra = {
'run': self.gather,
'start': val.get('start', 0),
'regex': re.compile(val['regex']),
}
self.data['1.%s' % key] = 'string', val['label']
self.data['2.%s' % key] = val['type'], val.get('start', 0), extra
def gather(self, obj):
text = open('/proc/%s' % self.conf['proc_entry'])
find = self.data[obj:'regex'].findall(text)
if find:
if self.data[obj:'regex'].groups == 0:
self.data[obj] = len(find)
else:
self.data[obj] = find[0].strip()
else:
self.data[obj] = self.data[obj:'start']
Rename proc object config key from proc_entry to simply object.
|
import re
import snmpy
class proc_query(snmpy.plugin):
def create(self):
for key, val in sorted(self.conf['objects'].items()):
extra = {
'run': self.gather,
'start': val.get('start', 0),
'regex': re.compile(val['regex']),
}
self.data['1.%s' % key] = 'string', val['label']
self.data['2.%s' % key] = val['type'], val.get('start', 0), extra
def gather(self, obj):
text = open('/proc/%s' % self.conf['object']).read()
find = self.data[obj:'regex'].findall(text)
if find:
if self.data[obj:'regex'].groups == 0:
self.data[obj] = len(find)
else:
self.data[obj] = find[0].strip()
else:
self.data[obj] = self.data[obj:'start']
|
<commit_before>import re
import snmpy
class proc_query(snmpy.plugin):
def create(self):
for key, val in sorted(self.conf['objects'].items()):
extra = {
'run': self.gather,
'start': val.get('start', 0),
'regex': re.compile(val['regex']),
}
self.data['1.%s' % key] = 'string', val['label']
self.data['2.%s' % key] = val['type'], val.get('start', 0), extra
def gather(self, obj):
text = open('/proc/%s' % self.conf['proc_entry'])
find = self.data[obj:'regex'].findall(text)
if find:
if self.data[obj:'regex'].groups == 0:
self.data[obj] = len(find)
else:
self.data[obj] = find[0].strip()
else:
self.data[obj] = self.data[obj:'start']
<commit_msg>Rename proc object config key from proc_entry to simply object.<commit_after>
|
import re
import snmpy
class proc_query(snmpy.plugin):
def create(self):
for key, val in sorted(self.conf['objects'].items()):
extra = {
'run': self.gather,
'start': val.get('start', 0),
'regex': re.compile(val['regex']),
}
self.data['1.%s' % key] = 'string', val['label']
self.data['2.%s' % key] = val['type'], val.get('start', 0), extra
def gather(self, obj):
text = open('/proc/%s' % self.conf['object']).read()
find = self.data[obj:'regex'].findall(text)
if find:
if self.data[obj:'regex'].groups == 0:
self.data[obj] = len(find)
else:
self.data[obj] = find[0].strip()
else:
self.data[obj] = self.data[obj:'start']
|
import re
import snmpy
class proc_query(snmpy.plugin):
def create(self):
for key, val in sorted(self.conf['objects'].items()):
extra = {
'run': self.gather,
'start': val.get('start', 0),
'regex': re.compile(val['regex']),
}
self.data['1.%s' % key] = 'string', val['label']
self.data['2.%s' % key] = val['type'], val.get('start', 0), extra
def gather(self, obj):
text = open('/proc/%s' % self.conf['proc_entry'])
find = self.data[obj:'regex'].findall(text)
if find:
if self.data[obj:'regex'].groups == 0:
self.data[obj] = len(find)
else:
self.data[obj] = find[0].strip()
else:
self.data[obj] = self.data[obj:'start']
Rename proc object config key from proc_entry to simply object.import re
import snmpy
class proc_query(snmpy.plugin):
def create(self):
for key, val in sorted(self.conf['objects'].items()):
extra = {
'run': self.gather,
'start': val.get('start', 0),
'regex': re.compile(val['regex']),
}
self.data['1.%s' % key] = 'string', val['label']
self.data['2.%s' % key] = val['type'], val.get('start', 0), extra
def gather(self, obj):
text = open('/proc/%s' % self.conf['object']).read()
find = self.data[obj:'regex'].findall(text)
if find:
if self.data[obj:'regex'].groups == 0:
self.data[obj] = len(find)
else:
self.data[obj] = find[0].strip()
else:
self.data[obj] = self.data[obj:'start']
|
<commit_before>import re
import snmpy
class proc_query(snmpy.plugin):
def create(self):
for key, val in sorted(self.conf['objects'].items()):
extra = {
'run': self.gather,
'start': val.get('start', 0),
'regex': re.compile(val['regex']),
}
self.data['1.%s' % key] = 'string', val['label']
self.data['2.%s' % key] = val['type'], val.get('start', 0), extra
def gather(self, obj):
text = open('/proc/%s' % self.conf['proc_entry'])
find = self.data[obj:'regex'].findall(text)
if find:
if self.data[obj:'regex'].groups == 0:
self.data[obj] = len(find)
else:
self.data[obj] = find[0].strip()
else:
self.data[obj] = self.data[obj:'start']
<commit_msg>Rename proc object config key from proc_entry to simply object.<commit_after>import re
import snmpy
class proc_query(snmpy.plugin):
def create(self):
for key, val in sorted(self.conf['objects'].items()):
extra = {
'run': self.gather,
'start': val.get('start', 0),
'regex': re.compile(val['regex']),
}
self.data['1.%s' % key] = 'string', val['label']
self.data['2.%s' % key] = val['type'], val.get('start', 0), extra
def gather(self, obj):
text = open('/proc/%s' % self.conf['object']).read()
find = self.data[obj:'regex'].findall(text)
if find:
if self.data[obj:'regex'].groups == 0:
self.data[obj] = len(find)
else:
self.data[obj] = find[0].strip()
else:
self.data[obj] = self.data[obj:'start']
|
ec9c671bc4140590c17b00277c424f93e20a5a5e
|
hvac/api/secrets_engines/__init__.py
|
hvac/api/secrets_engines/__init__.py
|
"""Vault secrets engines endpoints"""
from hvac.api.secrets_engines.aws import Aws
from hvac.api.secrets_engines.azure import Azure
from hvac.api.secrets_engines.gcp import Gcp
from hvac.api.secrets_engines.identity import Identity
from hvac.api.secrets_engines.kv import Kv
from hvac.api.secrets_engines.pki import Pki
from hvac.api.secrets_engines.kv_v1 import KvV1
from hvac.api.secrets_engines.kv_v2 import KvV2
from hvac.api.secrets_engines.transit import Transit
from hvac.api.secrets_engines.database import Database
from hvac.api.vault_api_category import VaultApiCategory
__all__ = (
'Aws',
'Azure',
'Gcp',
'Identity',
'Kv',
'KvV1',
'KvV2',
'Pki',
'Transit',
'SecretsEngines',
'Database'
)
class SecretsEngines(VaultApiCategory):
"""Secrets Engines."""
implemented_classes = [
Aws,
Azure,
Gcp,
Identity,
Kv,
Pki,
Transit,
Database,
]
unimplemented_classes = [
'Ad',
'AliCloud',
'Azure',
'Consul',
'GcpKms',
'Nomad',
'RabbitMq',
'Ssh',
'TOTP',
'Cassandra',
'MongoDb',
'Mssql',
'MySql',
'PostgreSql',
]
|
"""Vault secrets engines endpoints"""
from hvac.api.secrets_engines.aws import Aws
from hvac.api.secrets_engines.azure import Azure
from hvac.api.secrets_engines.gcp import Gcp
from hvac.api.secrets_engines.identity import Identity
from hvac.api.secrets_engines.kv import Kv
from hvac.api.secrets_engines.pki import Pki
from hvac.api.secrets_engines.kv_v1 import KvV1
from hvac.api.secrets_engines.kv_v2 import KvV2
from hvac.api.secrets_engines.transit import Transit
from hvac.api.secrets_engines.database import Database
from hvac.api.secrets_engines.consul import Consul
from hvac.api.vault_api_category import VaultApiCategory
__all__ = (
'Aws',
'Azure',
'Gcp',
'Identity',
'Kv',
'KvV1',
'KvV2',
'Pki',
'Transit',
'SecretsEngines',
'Database'
)
class SecretsEngines(VaultApiCategory):
"""Secrets Engines."""
implemented_classes = [
Aws,
Azure,
Gcp,
Identity,
Kv,
Pki,
Transit,
Database,
Consul,
]
unimplemented_classes = [
'Ad',
'AliCloud',
'Azure',
'GcpKms',
'Nomad',
'RabbitMq',
'Ssh',
'TOTP',
'Cassandra',
'MongoDb',
'Mssql',
'MySql',
'PostgreSql',
]
|
Enable the consul secret engine
|
Enable the consul secret engine
|
Python
|
apache-2.0
|
ianunruh/hvac,ianunruh/hvac
|
"""Vault secrets engines endpoints"""
from hvac.api.secrets_engines.aws import Aws
from hvac.api.secrets_engines.azure import Azure
from hvac.api.secrets_engines.gcp import Gcp
from hvac.api.secrets_engines.identity import Identity
from hvac.api.secrets_engines.kv import Kv
from hvac.api.secrets_engines.pki import Pki
from hvac.api.secrets_engines.kv_v1 import KvV1
from hvac.api.secrets_engines.kv_v2 import KvV2
from hvac.api.secrets_engines.transit import Transit
from hvac.api.secrets_engines.database import Database
from hvac.api.vault_api_category import VaultApiCategory
__all__ = (
'Aws',
'Azure',
'Gcp',
'Identity',
'Kv',
'KvV1',
'KvV2',
'Pki',
'Transit',
'SecretsEngines',
'Database'
)
class SecretsEngines(VaultApiCategory):
"""Secrets Engines."""
implemented_classes = [
Aws,
Azure,
Gcp,
Identity,
Kv,
Pki,
Transit,
Database,
]
unimplemented_classes = [
'Ad',
'AliCloud',
'Azure',
'Consul',
'GcpKms',
'Nomad',
'RabbitMq',
'Ssh',
'TOTP',
'Cassandra',
'MongoDb',
'Mssql',
'MySql',
'PostgreSql',
]
Enable the consul secret engine
|
"""Vault secrets engines endpoints"""
from hvac.api.secrets_engines.aws import Aws
from hvac.api.secrets_engines.azure import Azure
from hvac.api.secrets_engines.gcp import Gcp
from hvac.api.secrets_engines.identity import Identity
from hvac.api.secrets_engines.kv import Kv
from hvac.api.secrets_engines.pki import Pki
from hvac.api.secrets_engines.kv_v1 import KvV1
from hvac.api.secrets_engines.kv_v2 import KvV2
from hvac.api.secrets_engines.transit import Transit
from hvac.api.secrets_engines.database import Database
from hvac.api.secrets_engines.consul import Consul
from hvac.api.vault_api_category import VaultApiCategory
__all__ = (
'Aws',
'Azure',
'Gcp',
'Identity',
'Kv',
'KvV1',
'KvV2',
'Pki',
'Transit',
'SecretsEngines',
'Database'
)
class SecretsEngines(VaultApiCategory):
"""Secrets Engines."""
implemented_classes = [
Aws,
Azure,
Gcp,
Identity,
Kv,
Pki,
Transit,
Database,
Consul,
]
unimplemented_classes = [
'Ad',
'AliCloud',
'Azure',
'GcpKms',
'Nomad',
'RabbitMq',
'Ssh',
'TOTP',
'Cassandra',
'MongoDb',
'Mssql',
'MySql',
'PostgreSql',
]
|
<commit_before>"""Vault secrets engines endpoints"""
from hvac.api.secrets_engines.aws import Aws
from hvac.api.secrets_engines.azure import Azure
from hvac.api.secrets_engines.gcp import Gcp
from hvac.api.secrets_engines.identity import Identity
from hvac.api.secrets_engines.kv import Kv
from hvac.api.secrets_engines.pki import Pki
from hvac.api.secrets_engines.kv_v1 import KvV1
from hvac.api.secrets_engines.kv_v2 import KvV2
from hvac.api.secrets_engines.transit import Transit
from hvac.api.secrets_engines.database import Database
from hvac.api.vault_api_category import VaultApiCategory
__all__ = (
'Aws',
'Azure',
'Gcp',
'Identity',
'Kv',
'KvV1',
'KvV2',
'Pki',
'Transit',
'SecretsEngines',
'Database'
)
class SecretsEngines(VaultApiCategory):
"""Secrets Engines."""
implemented_classes = [
Aws,
Azure,
Gcp,
Identity,
Kv,
Pki,
Transit,
Database,
]
unimplemented_classes = [
'Ad',
'AliCloud',
'Azure',
'Consul',
'GcpKms',
'Nomad',
'RabbitMq',
'Ssh',
'TOTP',
'Cassandra',
'MongoDb',
'Mssql',
'MySql',
'PostgreSql',
]
<commit_msg>Enable the consul secret engine<commit_after>
|
"""Vault secrets engines endpoints"""
from hvac.api.secrets_engines.aws import Aws
from hvac.api.secrets_engines.azure import Azure
from hvac.api.secrets_engines.gcp import Gcp
from hvac.api.secrets_engines.identity import Identity
from hvac.api.secrets_engines.kv import Kv
from hvac.api.secrets_engines.pki import Pki
from hvac.api.secrets_engines.kv_v1 import KvV1
from hvac.api.secrets_engines.kv_v2 import KvV2
from hvac.api.secrets_engines.transit import Transit
from hvac.api.secrets_engines.database import Database
from hvac.api.secrets_engines.consul import Consul
from hvac.api.vault_api_category import VaultApiCategory
__all__ = (
'Aws',
'Azure',
'Gcp',
'Identity',
'Kv',
'KvV1',
'KvV2',
'Pki',
'Transit',
'SecretsEngines',
'Database'
)
class SecretsEngines(VaultApiCategory):
"""Secrets Engines."""
implemented_classes = [
Aws,
Azure,
Gcp,
Identity,
Kv,
Pki,
Transit,
Database,
Consul,
]
unimplemented_classes = [
'Ad',
'AliCloud',
'Azure',
'GcpKms',
'Nomad',
'RabbitMq',
'Ssh',
'TOTP',
'Cassandra',
'MongoDb',
'Mssql',
'MySql',
'PostgreSql',
]
|
"""Vault secrets engines endpoints"""
from hvac.api.secrets_engines.aws import Aws
from hvac.api.secrets_engines.azure import Azure
from hvac.api.secrets_engines.gcp import Gcp
from hvac.api.secrets_engines.identity import Identity
from hvac.api.secrets_engines.kv import Kv
from hvac.api.secrets_engines.pki import Pki
from hvac.api.secrets_engines.kv_v1 import KvV1
from hvac.api.secrets_engines.kv_v2 import KvV2
from hvac.api.secrets_engines.transit import Transit
from hvac.api.secrets_engines.database import Database
from hvac.api.vault_api_category import VaultApiCategory
__all__ = (
'Aws',
'Azure',
'Gcp',
'Identity',
'Kv',
'KvV1',
'KvV2',
'Pki',
'Transit',
'SecretsEngines',
'Database'
)
class SecretsEngines(VaultApiCategory):
"""Secrets Engines."""
implemented_classes = [
Aws,
Azure,
Gcp,
Identity,
Kv,
Pki,
Transit,
Database,
]
unimplemented_classes = [
'Ad',
'AliCloud',
'Azure',
'Consul',
'GcpKms',
'Nomad',
'RabbitMq',
'Ssh',
'TOTP',
'Cassandra',
'MongoDb',
'Mssql',
'MySql',
'PostgreSql',
]
Enable the consul secret engine"""Vault secrets engines endpoints"""
from hvac.api.secrets_engines.aws import Aws
from hvac.api.secrets_engines.azure import Azure
from hvac.api.secrets_engines.gcp import Gcp
from hvac.api.secrets_engines.identity import Identity
from hvac.api.secrets_engines.kv import Kv
from hvac.api.secrets_engines.pki import Pki
from hvac.api.secrets_engines.kv_v1 import KvV1
from hvac.api.secrets_engines.kv_v2 import KvV2
from hvac.api.secrets_engines.transit import Transit
from hvac.api.secrets_engines.database import Database
from hvac.api.secrets_engines.consul import Consul
from hvac.api.vault_api_category import VaultApiCategory
__all__ = (
'Aws',
'Azure',
'Gcp',
'Identity',
'Kv',
'KvV1',
'KvV2',
'Pki',
'Transit',
'SecretsEngines',
'Database'
)
class SecretsEngines(VaultApiCategory):
"""Secrets Engines."""
implemented_classes = [
Aws,
Azure,
Gcp,
Identity,
Kv,
Pki,
Transit,
Database,
Consul,
]
unimplemented_classes = [
'Ad',
'AliCloud',
'Azure',
'GcpKms',
'Nomad',
'RabbitMq',
'Ssh',
'TOTP',
'Cassandra',
'MongoDb',
'Mssql',
'MySql',
'PostgreSql',
]
|
<commit_before>"""Vault secrets engines endpoints"""
from hvac.api.secrets_engines.aws import Aws
from hvac.api.secrets_engines.azure import Azure
from hvac.api.secrets_engines.gcp import Gcp
from hvac.api.secrets_engines.identity import Identity
from hvac.api.secrets_engines.kv import Kv
from hvac.api.secrets_engines.pki import Pki
from hvac.api.secrets_engines.kv_v1 import KvV1
from hvac.api.secrets_engines.kv_v2 import KvV2
from hvac.api.secrets_engines.transit import Transit
from hvac.api.secrets_engines.database import Database
from hvac.api.vault_api_category import VaultApiCategory
__all__ = (
'Aws',
'Azure',
'Gcp',
'Identity',
'Kv',
'KvV1',
'KvV2',
'Pki',
'Transit',
'SecretsEngines',
'Database'
)
class SecretsEngines(VaultApiCategory):
"""Secrets Engines."""
implemented_classes = [
Aws,
Azure,
Gcp,
Identity,
Kv,
Pki,
Transit,
Database,
]
unimplemented_classes = [
'Ad',
'AliCloud',
'Azure',
'Consul',
'GcpKms',
'Nomad',
'RabbitMq',
'Ssh',
'TOTP',
'Cassandra',
'MongoDb',
'Mssql',
'MySql',
'PostgreSql',
]
<commit_msg>Enable the consul secret engine<commit_after>"""Vault secrets engines endpoints"""
from hvac.api.secrets_engines.aws import Aws
from hvac.api.secrets_engines.azure import Azure
from hvac.api.secrets_engines.gcp import Gcp
from hvac.api.secrets_engines.identity import Identity
from hvac.api.secrets_engines.kv import Kv
from hvac.api.secrets_engines.pki import Pki
from hvac.api.secrets_engines.kv_v1 import KvV1
from hvac.api.secrets_engines.kv_v2 import KvV2
from hvac.api.secrets_engines.transit import Transit
from hvac.api.secrets_engines.database import Database
from hvac.api.secrets_engines.consul import Consul
from hvac.api.vault_api_category import VaultApiCategory
__all__ = (
'Aws',
'Azure',
'Gcp',
'Identity',
'Kv',
'KvV1',
'KvV2',
'Pki',
'Transit',
'SecretsEngines',
'Database'
)
class SecretsEngines(VaultApiCategory):
"""Secrets Engines."""
implemented_classes = [
Aws,
Azure,
Gcp,
Identity,
Kv,
Pki,
Transit,
Database,
Consul,
]
unimplemented_classes = [
'Ad',
'AliCloud',
'Azure',
'GcpKms',
'Nomad',
'RabbitMq',
'Ssh',
'TOTP',
'Cassandra',
'MongoDb',
'Mssql',
'MySql',
'PostgreSql',
]
|
9d15915784a94056283845a4ec0fd08ac8849d13
|
jobs/test_settings.py
|
jobs/test_settings.py
|
from decouple import config
from jobs.settings import *
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'test_db',
'USER': 'postgres',
'PASSWORD': 'pass1234',
'HOST': 'db',
'PORT': 5432,
}
}
#DATABASES = {
# 'default': {
# 'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
# 'NAME': 'sqlite3.db', # Or path to database file if using sqlite3.
# 'USER': '', # Not used with sqlite3.
# 'PASSWORD': '', # Not used with sqlite3.
# 'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
# 'PORT': '', # Set to empty string for default. Not used with sqlite3.
# }
#}
|
from decouple import config
from jobs.settings import *
try:
host = config('DB_HOST')
except:
host = 'db'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'test_db',
'USER': 'postgres',
'PASSWORD': 'pass1234',
'HOST': host,
'PORT': 5432,
}
}
#DATABASES = {
# 'default': {
# 'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
# 'NAME': 'sqlite3.db', # Or path to database file if using sqlite3.
# 'USER': '', # Not used with sqlite3.
# 'PASSWORD': '', # Not used with sqlite3.
# 'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
# 'PORT': '', # Set to empty string for default. Not used with sqlite3.
# }
#}
|
Add logic for db host during runtime
|
Add logic for db host during runtime
|
Python
|
mit
|
misachi/job_match,misachi/job_match,misachi/job_match
|
from decouple import config
from jobs.settings import *
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'test_db',
'USER': 'postgres',
'PASSWORD': 'pass1234',
'HOST': 'db',
'PORT': 5432,
}
}
#DATABASES = {
# 'default': {
# 'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
# 'NAME': 'sqlite3.db', # Or path to database file if using sqlite3.
# 'USER': '', # Not used with sqlite3.
# 'PASSWORD': '', # Not used with sqlite3.
# 'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
# 'PORT': '', # Set to empty string for default. Not used with sqlite3.
# }
#}
Add logic for db host during runtime
|
from decouple import config
from jobs.settings import *
try:
host = config('DB_HOST')
except:
host = 'db'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'test_db',
'USER': 'postgres',
'PASSWORD': 'pass1234',
'HOST': host,
'PORT': 5432,
}
}
#DATABASES = {
# 'default': {
# 'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
# 'NAME': 'sqlite3.db', # Or path to database file if using sqlite3.
# 'USER': '', # Not used with sqlite3.
# 'PASSWORD': '', # Not used with sqlite3.
# 'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
# 'PORT': '', # Set to empty string for default. Not used with sqlite3.
# }
#}
|
<commit_before>from decouple import config
from jobs.settings import *
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'test_db',
'USER': 'postgres',
'PASSWORD': 'pass1234',
'HOST': 'db',
'PORT': 5432,
}
}
#DATABASES = {
# 'default': {
# 'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
# 'NAME': 'sqlite3.db', # Or path to database file if using sqlite3.
# 'USER': '', # Not used with sqlite3.
# 'PASSWORD': '', # Not used with sqlite3.
# 'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
# 'PORT': '', # Set to empty string for default. Not used with sqlite3.
# }
#}
<commit_msg>Add logic for db host during runtime<commit_after>
|
from decouple import config
from jobs.settings import *
try:
host = config('DB_HOST')
except:
host = 'db'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'test_db',
'USER': 'postgres',
'PASSWORD': 'pass1234',
'HOST': host,
'PORT': 5432,
}
}
#DATABASES = {
# 'default': {
# 'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
# 'NAME': 'sqlite3.db', # Or path to database file if using sqlite3.
# 'USER': '', # Not used with sqlite3.
# 'PASSWORD': '', # Not used with sqlite3.
# 'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
# 'PORT': '', # Set to empty string for default. Not used with sqlite3.
# }
#}
|
from decouple import config
from jobs.settings import *
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'test_db',
'USER': 'postgres',
'PASSWORD': 'pass1234',
'HOST': 'db',
'PORT': 5432,
}
}
#DATABASES = {
# 'default': {
# 'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
# 'NAME': 'sqlite3.db', # Or path to database file if using sqlite3.
# 'USER': '', # Not used with sqlite3.
# 'PASSWORD': '', # Not used with sqlite3.
# 'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
# 'PORT': '', # Set to empty string for default. Not used with sqlite3.
# }
#}
Add logic for db host during runtimefrom decouple import config
from jobs.settings import *
try:
host = config('DB_HOST')
except:
host = 'db'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'test_db',
'USER': 'postgres',
'PASSWORD': 'pass1234',
'HOST': host,
'PORT': 5432,
}
}
#DATABASES = {
# 'default': {
# 'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
# 'NAME': 'sqlite3.db', # Or path to database file if using sqlite3.
# 'USER': '', # Not used with sqlite3.
# 'PASSWORD': '', # Not used with sqlite3.
# 'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
# 'PORT': '', # Set to empty string for default. Not used with sqlite3.
# }
#}
|
<commit_before>from decouple import config
from jobs.settings import *
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'test_db',
'USER': 'postgres',
'PASSWORD': 'pass1234',
'HOST': 'db',
'PORT': 5432,
}
}
#DATABASES = {
# 'default': {
# 'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
# 'NAME': 'sqlite3.db', # Or path to database file if using sqlite3.
# 'USER': '', # Not used with sqlite3.
# 'PASSWORD': '', # Not used with sqlite3.
# 'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
# 'PORT': '', # Set to empty string for default. Not used with sqlite3.
# }
#}
<commit_msg>Add logic for db host during runtime<commit_after>from decouple import config
from jobs.settings import *
try:
host = config('DB_HOST')
except:
host = 'db'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'test_db',
'USER': 'postgres',
'PASSWORD': 'pass1234',
'HOST': host,
'PORT': 5432,
}
}
#DATABASES = {
# 'default': {
# 'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
# 'NAME': 'sqlite3.db', # Or path to database file if using sqlite3.
# 'USER': '', # Not used with sqlite3.
# 'PASSWORD': '', # Not used with sqlite3.
# 'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
# 'PORT': '', # Set to empty string for default. Not used with sqlite3.
# }
#}
|
f6a2ee7af08ac69be539b4d364bc2692918633e0
|
hello_world.py
|
hello_world.py
|
#!/usr/bin/env python
"""Prints a ``Hello World`` statement."""
print "Hello Derrick Naminda!"
|
#!/usr/bin/env python
"""Prints a ``Hello World`` statement."""
print "Hello derricknaminda!"
|
Change hello world to include my github username
|
Change hello world to include my github username
|
Python
|
mpl-2.0
|
derricknaminda/lesson_01
|
#!/usr/bin/env python
"""Prints a ``Hello World`` statement."""
print "Hello Derrick Naminda!"
Change hello world to include my github username
|
#!/usr/bin/env python
"""Prints a ``Hello World`` statement."""
print "Hello derricknaminda!"
|
<commit_before>#!/usr/bin/env python
"""Prints a ``Hello World`` statement."""
print "Hello Derrick Naminda!"
<commit_msg>Change hello world to include my github username<commit_after>
|
#!/usr/bin/env python
"""Prints a ``Hello World`` statement."""
print "Hello derricknaminda!"
|
#!/usr/bin/env python
"""Prints a ``Hello World`` statement."""
print "Hello Derrick Naminda!"
Change hello world to include my github username#!/usr/bin/env python
"""Prints a ``Hello World`` statement."""
print "Hello derricknaminda!"
|
<commit_before>#!/usr/bin/env python
"""Prints a ``Hello World`` statement."""
print "Hello Derrick Naminda!"
<commit_msg>Change hello world to include my github username<commit_after>#!/usr/bin/env python
"""Prints a ``Hello World`` statement."""
print "Hello derricknaminda!"
|
6fedddf54200d4fcd9a5fac4946311be0abb80f1
|
hutmap/urls.py
|
hutmap/urls.py
|
from django.conf import settings
from django.conf.urls import patterns, url, include
from django.conf.urls.static import static
from django.contrib.gis import admin
from huts.urls import hut_patterns, api_patterns
admin.autodiscover()
# main site
urlpatterns = patterns('',
url(r'', include((hut_patterns, 'huts', 'huts'))),
url(r'^api/', include((api_patterns, 'huts_api', 'huts_api'))),
)
# admin
urlpatterns += patterns('',
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin/', include(admin.site.urls)),
)
# serve static and media files during development
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
from django.conf import settings
from django.conf.urls import patterns, url, include
from django.conf.urls.static import static
from django.contrib.gis import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from huts.urls import hut_patterns, api_patterns
admin.autodiscover()
# main site
urlpatterns = patterns('',
url(r'', include((hut_patterns, 'huts', 'huts'))),
url(r'^api/', include((api_patterns, 'huts_api', 'huts_api'))),
)
# admin
urlpatterns += patterns('',
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin/', include(admin.site.urls)),
)
# serve static and media files during development
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
Use proper setting for static files in development
|
Use proper setting for static files in development
|
Python
|
mit
|
muescha/hutmap,dylanfprice/hutmap,muescha/hutmap,dylanfprice/hutmap,dylanfprice/hutmap,dylanfprice/hutmap,muescha/hutmap,muescha/hutmap
|
from django.conf import settings
from django.conf.urls import patterns, url, include
from django.conf.urls.static import static
from django.contrib.gis import admin
from huts.urls import hut_patterns, api_patterns
admin.autodiscover()
# main site
urlpatterns = patterns('',
url(r'', include((hut_patterns, 'huts', 'huts'))),
url(r'^api/', include((api_patterns, 'huts_api', 'huts_api'))),
)
# admin
urlpatterns += patterns('',
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin/', include(admin.site.urls)),
)
# serve static and media files during development
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
Use proper setting for static files in development
|
from django.conf import settings
from django.conf.urls import patterns, url, include
from django.conf.urls.static import static
from django.contrib.gis import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from huts.urls import hut_patterns, api_patterns
admin.autodiscover()
# main site
urlpatterns = patterns('',
url(r'', include((hut_patterns, 'huts', 'huts'))),
url(r'^api/', include((api_patterns, 'huts_api', 'huts_api'))),
)
# admin
urlpatterns += patterns('',
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin/', include(admin.site.urls)),
)
# serve static and media files during development
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
<commit_before>from django.conf import settings
from django.conf.urls import patterns, url, include
from django.conf.urls.static import static
from django.contrib.gis import admin
from huts.urls import hut_patterns, api_patterns
admin.autodiscover()
# main site
urlpatterns = patterns('',
url(r'', include((hut_patterns, 'huts', 'huts'))),
url(r'^api/', include((api_patterns, 'huts_api', 'huts_api'))),
)
# admin
urlpatterns += patterns('',
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin/', include(admin.site.urls)),
)
# serve static and media files during development
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
<commit_msg>Use proper setting for static files in development<commit_after>
|
from django.conf import settings
from django.conf.urls import patterns, url, include
from django.conf.urls.static import static
from django.contrib.gis import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from huts.urls import hut_patterns, api_patterns
admin.autodiscover()
# main site
urlpatterns = patterns('',
url(r'', include((hut_patterns, 'huts', 'huts'))),
url(r'^api/', include((api_patterns, 'huts_api', 'huts_api'))),
)
# admin
urlpatterns += patterns('',
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin/', include(admin.site.urls)),
)
# serve static and media files during development
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
from django.conf import settings
from django.conf.urls import patterns, url, include
from django.conf.urls.static import static
from django.contrib.gis import admin
from huts.urls import hut_patterns, api_patterns
admin.autodiscover()
# main site
urlpatterns = patterns('',
url(r'', include((hut_patterns, 'huts', 'huts'))),
url(r'^api/', include((api_patterns, 'huts_api', 'huts_api'))),
)
# admin
urlpatterns += patterns('',
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin/', include(admin.site.urls)),
)
# serve static and media files during development
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
Use proper setting for static files in developmentfrom django.conf import settings
from django.conf.urls import patterns, url, include
from django.conf.urls.static import static
from django.contrib.gis import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from huts.urls import hut_patterns, api_patterns
admin.autodiscover()
# main site
urlpatterns = patterns('',
url(r'', include((hut_patterns, 'huts', 'huts'))),
url(r'^api/', include((api_patterns, 'huts_api', 'huts_api'))),
)
# admin
urlpatterns += patterns('',
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin/', include(admin.site.urls)),
)
# serve static and media files during development
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
<commit_before>from django.conf import settings
from django.conf.urls import patterns, url, include
from django.conf.urls.static import static
from django.contrib.gis import admin
from huts.urls import hut_patterns, api_patterns
admin.autodiscover()
# main site
urlpatterns = patterns('',
url(r'', include((hut_patterns, 'huts', 'huts'))),
url(r'^api/', include((api_patterns, 'huts_api', 'huts_api'))),
)
# admin
urlpatterns += patterns('',
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin/', include(admin.site.urls)),
)
# serve static and media files during development
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
<commit_msg>Use proper setting for static files in development<commit_after>from django.conf import settings
from django.conf.urls import patterns, url, include
from django.conf.urls.static import static
from django.contrib.gis import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from huts.urls import hut_patterns, api_patterns
admin.autodiscover()
# main site
urlpatterns = patterns('',
url(r'', include((hut_patterns, 'huts', 'huts'))),
url(r'^api/', include((api_patterns, 'huts_api', 'huts_api'))),
)
# admin
urlpatterns += patterns('',
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin/', include(admin.site.urls)),
)
# serve static and media files during development
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
bc6c744d56ca451aa47b0e66558de844d154f32c
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name = 'brunnhilde',
version = '1.5.3',
url = 'https://github.com/timothyryanwalsh/brunnhilde',
author = 'Tim Walsh',
author_email = 'timothyryanwalsh@gmail.com',
py_modules = ['brunnhilde'],
scripts = ['brunnhilde.py'],
description = 'A Siegfried-based digital archives reporting tool for directories and disk images',
keywords = 'archives reporting characterization identification diskimages',
platforms = ['POSIX'],
test_suite='test',
classifiers = [
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: MIT License',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Developers',
'Natural Language :: English',
'Operating System :: MacOS',
'Operating System :: MacOS :: MacOS X',
'Operating System :: POSIX :: Linux',
'Topic :: Communications :: File Sharing',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Database',
'Topic :: System :: Archiving',
'Topic :: System :: Filesystems',
'Topic :: Utilities'
],
)
|
from setuptools import setup
setup(
name = 'brunnhilde',
version = '1.6.0',
url = 'https://github.com/timothyryanwalsh/brunnhilde',
author = 'Tim Walsh',
author_email = 'timothyryanwalsh@gmail.com',
py_modules = ['brunnhilde'],
scripts = ['brunnhilde.py'],
description = 'A Siegfried-based digital archives reporting tool for directories and disk images',
keywords = 'archives reporting characterization identification diskimages',
platforms = ['POSIX'],
test_suite='test',
classifiers = [
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: MIT License',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Developers',
'Natural Language :: English',
'Operating System :: MacOS',
'Operating System :: MacOS :: MacOS X',
'Operating System :: POSIX :: Linux',
'Topic :: Communications :: File Sharing',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Database',
'Topic :: System :: Archiving',
'Topic :: System :: Filesystems',
'Topic :: Utilities'
],
)
|
Update for 1.6.0 - TODO: add Windows
|
Update for 1.6.0 - TODO: add Windows
|
Python
|
mit
|
timothyryanwalsh/brunnhilde
|
from setuptools import setup
setup(
name = 'brunnhilde',
version = '1.5.3',
url = 'https://github.com/timothyryanwalsh/brunnhilde',
author = 'Tim Walsh',
author_email = 'timothyryanwalsh@gmail.com',
py_modules = ['brunnhilde'],
scripts = ['brunnhilde.py'],
description = 'A Siegfried-based digital archives reporting tool for directories and disk images',
keywords = 'archives reporting characterization identification diskimages',
platforms = ['POSIX'],
test_suite='test',
classifiers = [
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: MIT License',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Developers',
'Natural Language :: English',
'Operating System :: MacOS',
'Operating System :: MacOS :: MacOS X',
'Operating System :: POSIX :: Linux',
'Topic :: Communications :: File Sharing',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Database',
'Topic :: System :: Archiving',
'Topic :: System :: Filesystems',
'Topic :: Utilities'
],
)
Update for 1.6.0 - TODO: add Windows
|
from setuptools import setup
setup(
name = 'brunnhilde',
version = '1.6.0',
url = 'https://github.com/timothyryanwalsh/brunnhilde',
author = 'Tim Walsh',
author_email = 'timothyryanwalsh@gmail.com',
py_modules = ['brunnhilde'],
scripts = ['brunnhilde.py'],
description = 'A Siegfried-based digital archives reporting tool for directories and disk images',
keywords = 'archives reporting characterization identification diskimages',
platforms = ['POSIX'],
test_suite='test',
classifiers = [
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: MIT License',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Developers',
'Natural Language :: English',
'Operating System :: MacOS',
'Operating System :: MacOS :: MacOS X',
'Operating System :: POSIX :: Linux',
'Topic :: Communications :: File Sharing',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Database',
'Topic :: System :: Archiving',
'Topic :: System :: Filesystems',
'Topic :: Utilities'
],
)
|
<commit_before>from setuptools import setup
setup(
name = 'brunnhilde',
version = '1.5.3',
url = 'https://github.com/timothyryanwalsh/brunnhilde',
author = 'Tim Walsh',
author_email = 'timothyryanwalsh@gmail.com',
py_modules = ['brunnhilde'],
scripts = ['brunnhilde.py'],
description = 'A Siegfried-based digital archives reporting tool for directories and disk images',
keywords = 'archives reporting characterization identification diskimages',
platforms = ['POSIX'],
test_suite='test',
classifiers = [
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: MIT License',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Developers',
'Natural Language :: English',
'Operating System :: MacOS',
'Operating System :: MacOS :: MacOS X',
'Operating System :: POSIX :: Linux',
'Topic :: Communications :: File Sharing',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Database',
'Topic :: System :: Archiving',
'Topic :: System :: Filesystems',
'Topic :: Utilities'
],
)
<commit_msg>Update for 1.6.0 - TODO: add Windows<commit_after>
|
from setuptools import setup
setup(
name = 'brunnhilde',
version = '1.6.0',
url = 'https://github.com/timothyryanwalsh/brunnhilde',
author = 'Tim Walsh',
author_email = 'timothyryanwalsh@gmail.com',
py_modules = ['brunnhilde'],
scripts = ['brunnhilde.py'],
description = 'A Siegfried-based digital archives reporting tool for directories and disk images',
keywords = 'archives reporting characterization identification diskimages',
platforms = ['POSIX'],
test_suite='test',
classifiers = [
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: MIT License',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Developers',
'Natural Language :: English',
'Operating System :: MacOS',
'Operating System :: MacOS :: MacOS X',
'Operating System :: POSIX :: Linux',
'Topic :: Communications :: File Sharing',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Database',
'Topic :: System :: Archiving',
'Topic :: System :: Filesystems',
'Topic :: Utilities'
],
)
|
from setuptools import setup
setup(
name = 'brunnhilde',
version = '1.5.3',
url = 'https://github.com/timothyryanwalsh/brunnhilde',
author = 'Tim Walsh',
author_email = 'timothyryanwalsh@gmail.com',
py_modules = ['brunnhilde'],
scripts = ['brunnhilde.py'],
description = 'A Siegfried-based digital archives reporting tool for directories and disk images',
keywords = 'archives reporting characterization identification diskimages',
platforms = ['POSIX'],
test_suite='test',
classifiers = [
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: MIT License',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Developers',
'Natural Language :: English',
'Operating System :: MacOS',
'Operating System :: MacOS :: MacOS X',
'Operating System :: POSIX :: Linux',
'Topic :: Communications :: File Sharing',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Database',
'Topic :: System :: Archiving',
'Topic :: System :: Filesystems',
'Topic :: Utilities'
],
)
Update for 1.6.0 - TODO: add Windowsfrom setuptools import setup
setup(
name = 'brunnhilde',
version = '1.6.0',
url = 'https://github.com/timothyryanwalsh/brunnhilde',
author = 'Tim Walsh',
author_email = 'timothyryanwalsh@gmail.com',
py_modules = ['brunnhilde'],
scripts = ['brunnhilde.py'],
description = 'A Siegfried-based digital archives reporting tool for directories and disk images',
keywords = 'archives reporting characterization identification diskimages',
platforms = ['POSIX'],
test_suite='test',
classifiers = [
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: MIT License',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Developers',
'Natural Language :: English',
'Operating System :: MacOS',
'Operating System :: MacOS :: MacOS X',
'Operating System :: POSIX :: Linux',
'Topic :: Communications :: File Sharing',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Database',
'Topic :: System :: Archiving',
'Topic :: System :: Filesystems',
'Topic :: Utilities'
],
)
|
<commit_before>from setuptools import setup
setup(
name = 'brunnhilde',
version = '1.5.3',
url = 'https://github.com/timothyryanwalsh/brunnhilde',
author = 'Tim Walsh',
author_email = 'timothyryanwalsh@gmail.com',
py_modules = ['brunnhilde'],
scripts = ['brunnhilde.py'],
description = 'A Siegfried-based digital archives reporting tool for directories and disk images',
keywords = 'archives reporting characterization identification diskimages',
platforms = ['POSIX'],
test_suite='test',
classifiers = [
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: MIT License',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Developers',
'Natural Language :: English',
'Operating System :: MacOS',
'Operating System :: MacOS :: MacOS X',
'Operating System :: POSIX :: Linux',
'Topic :: Communications :: File Sharing',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Database',
'Topic :: System :: Archiving',
'Topic :: System :: Filesystems',
'Topic :: Utilities'
],
)
<commit_msg>Update for 1.6.0 - TODO: add Windows<commit_after>from setuptools import setup
setup(
name = 'brunnhilde',
version = '1.6.0',
url = 'https://github.com/timothyryanwalsh/brunnhilde',
author = 'Tim Walsh',
author_email = 'timothyryanwalsh@gmail.com',
py_modules = ['brunnhilde'],
scripts = ['brunnhilde.py'],
description = 'A Siegfried-based digital archives reporting tool for directories and disk images',
keywords = 'archives reporting characterization identification diskimages',
platforms = ['POSIX'],
test_suite='test',
classifiers = [
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: MIT License',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Developers',
'Natural Language :: English',
'Operating System :: MacOS',
'Operating System :: MacOS :: MacOS X',
'Operating System :: POSIX :: Linux',
'Topic :: Communications :: File Sharing',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Database',
'Topic :: System :: Archiving',
'Topic :: System :: Filesystems',
'Topic :: Utilities'
],
)
|
d92405ac96b104b0f2ae005fe9c1ae1d10d9f66c
|
backdrop/write/config/development.py
|
backdrop/write/config/development.py
|
DATABASE_NAME = "backdrop"
MONGO_HOSTS = ['localhost']
MONGO_PORT = 27017
LOG_LEVEL = "DEBUG"
DATA_SET_AUTO_ID_KEYS = {
"lpa_volumes": ("key", "start_at", "end_at")
}
STAGECRAFT_COLLECTION_ENDPOINT_TOKEN = 'dev-create-endpoint-token'
try:
from development_environment import *
except ImportError:
from development_environment_sample import *
STAGECRAFT_URL = 'http://localhost:3204'
STAGECRAFT_DATA_SET_QUERY_TOKEN = 'dev-data-set-query-token'
SIGNON_API_USER_TOKEN = 'development-oauth-access-token'
|
DATABASE_NAME = "backdrop"
MONGO_HOSTS = ['localhost']
MONGO_PORT = 27017
LOG_LEVEL = "DEBUG"
DATA_SET_AUTO_ID_KEYS = {
"lpa_volumes": ("key", "start_at", "end_at")
}
STAGECRAFT_COLLECTION_ENDPOINT_TOKEN = 'dev-create-endpoint-token'
try:
from development_environment import *
except ImportError:
from development_environment_sample import *
STAGECRAFT_URL = 'http://localhost:3204'
STAGECRAFT_DATA_SET_QUERY_TOKEN = 'dev-data-set-query-token'
SIGNON_API_USER_TOKEN = 'development-oauth-access-token'
TRANSFORMER_AMQP_URL = 'amqp://transformer:notarealpw@localhost:5672/%2Ftransformations'
|
Add dev credentials for transformer rabbitmq user
|
Add dev credentials for transformer rabbitmq user
|
Python
|
mit
|
alphagov/backdrop,alphagov/backdrop,alphagov/backdrop
|
DATABASE_NAME = "backdrop"
MONGO_HOSTS = ['localhost']
MONGO_PORT = 27017
LOG_LEVEL = "DEBUG"
DATA_SET_AUTO_ID_KEYS = {
"lpa_volumes": ("key", "start_at", "end_at")
}
STAGECRAFT_COLLECTION_ENDPOINT_TOKEN = 'dev-create-endpoint-token'
try:
from development_environment import *
except ImportError:
from development_environment_sample import *
STAGECRAFT_URL = 'http://localhost:3204'
STAGECRAFT_DATA_SET_QUERY_TOKEN = 'dev-data-set-query-token'
SIGNON_API_USER_TOKEN = 'development-oauth-access-token'
Add dev credentials for transformer rabbitmq user
|
DATABASE_NAME = "backdrop"
MONGO_HOSTS = ['localhost']
MONGO_PORT = 27017
LOG_LEVEL = "DEBUG"
DATA_SET_AUTO_ID_KEYS = {
"lpa_volumes": ("key", "start_at", "end_at")
}
STAGECRAFT_COLLECTION_ENDPOINT_TOKEN = 'dev-create-endpoint-token'
try:
from development_environment import *
except ImportError:
from development_environment_sample import *
STAGECRAFT_URL = 'http://localhost:3204'
STAGECRAFT_DATA_SET_QUERY_TOKEN = 'dev-data-set-query-token'
SIGNON_API_USER_TOKEN = 'development-oauth-access-token'
TRANSFORMER_AMQP_URL = 'amqp://transformer:notarealpw@localhost:5672/%2Ftransformations'
|
<commit_before>DATABASE_NAME = "backdrop"
MONGO_HOSTS = ['localhost']
MONGO_PORT = 27017
LOG_LEVEL = "DEBUG"
DATA_SET_AUTO_ID_KEYS = {
"lpa_volumes": ("key", "start_at", "end_at")
}
STAGECRAFT_COLLECTION_ENDPOINT_TOKEN = 'dev-create-endpoint-token'
try:
from development_environment import *
except ImportError:
from development_environment_sample import *
STAGECRAFT_URL = 'http://localhost:3204'
STAGECRAFT_DATA_SET_QUERY_TOKEN = 'dev-data-set-query-token'
SIGNON_API_USER_TOKEN = 'development-oauth-access-token'
<commit_msg>Add dev credentials for transformer rabbitmq user<commit_after>
|
DATABASE_NAME = "backdrop"
MONGO_HOSTS = ['localhost']
MONGO_PORT = 27017
LOG_LEVEL = "DEBUG"
DATA_SET_AUTO_ID_KEYS = {
"lpa_volumes": ("key", "start_at", "end_at")
}
STAGECRAFT_COLLECTION_ENDPOINT_TOKEN = 'dev-create-endpoint-token'
try:
from development_environment import *
except ImportError:
from development_environment_sample import *
STAGECRAFT_URL = 'http://localhost:3204'
STAGECRAFT_DATA_SET_QUERY_TOKEN = 'dev-data-set-query-token'
SIGNON_API_USER_TOKEN = 'development-oauth-access-token'
TRANSFORMER_AMQP_URL = 'amqp://transformer:notarealpw@localhost:5672/%2Ftransformations'
|
DATABASE_NAME = "backdrop"
MONGO_HOSTS = ['localhost']
MONGO_PORT = 27017
LOG_LEVEL = "DEBUG"
DATA_SET_AUTO_ID_KEYS = {
"lpa_volumes": ("key", "start_at", "end_at")
}
STAGECRAFT_COLLECTION_ENDPOINT_TOKEN = 'dev-create-endpoint-token'
try:
from development_environment import *
except ImportError:
from development_environment_sample import *
STAGECRAFT_URL = 'http://localhost:3204'
STAGECRAFT_DATA_SET_QUERY_TOKEN = 'dev-data-set-query-token'
SIGNON_API_USER_TOKEN = 'development-oauth-access-token'
Add dev credentials for transformer rabbitmq userDATABASE_NAME = "backdrop"
MONGO_HOSTS = ['localhost']
MONGO_PORT = 27017
LOG_LEVEL = "DEBUG"
DATA_SET_AUTO_ID_KEYS = {
"lpa_volumes": ("key", "start_at", "end_at")
}
STAGECRAFT_COLLECTION_ENDPOINT_TOKEN = 'dev-create-endpoint-token'
try:
from development_environment import *
except ImportError:
from development_environment_sample import *
STAGECRAFT_URL = 'http://localhost:3204'
STAGECRAFT_DATA_SET_QUERY_TOKEN = 'dev-data-set-query-token'
SIGNON_API_USER_TOKEN = 'development-oauth-access-token'
TRANSFORMER_AMQP_URL = 'amqp://transformer:notarealpw@localhost:5672/%2Ftransformations'
|
<commit_before>DATABASE_NAME = "backdrop"
MONGO_HOSTS = ['localhost']
MONGO_PORT = 27017
LOG_LEVEL = "DEBUG"
DATA_SET_AUTO_ID_KEYS = {
"lpa_volumes": ("key", "start_at", "end_at")
}
STAGECRAFT_COLLECTION_ENDPOINT_TOKEN = 'dev-create-endpoint-token'
try:
from development_environment import *
except ImportError:
from development_environment_sample import *
STAGECRAFT_URL = 'http://localhost:3204'
STAGECRAFT_DATA_SET_QUERY_TOKEN = 'dev-data-set-query-token'
SIGNON_API_USER_TOKEN = 'development-oauth-access-token'
<commit_msg>Add dev credentials for transformer rabbitmq user<commit_after>DATABASE_NAME = "backdrop"
MONGO_HOSTS = ['localhost']
MONGO_PORT = 27017
LOG_LEVEL = "DEBUG"
DATA_SET_AUTO_ID_KEYS = {
"lpa_volumes": ("key", "start_at", "end_at")
}
STAGECRAFT_COLLECTION_ENDPOINT_TOKEN = 'dev-create-endpoint-token'
try:
from development_environment import *
except ImportError:
from development_environment_sample import *
STAGECRAFT_URL = 'http://localhost:3204'
STAGECRAFT_DATA_SET_QUERY_TOKEN = 'dev-data-set-query-token'
SIGNON_API_USER_TOKEN = 'development-oauth-access-token'
TRANSFORMER_AMQP_URL = 'amqp://transformer:notarealpw@localhost:5672/%2Ftransformations'
|
05dd1182574c1f95a92c4523d18686e0482e6a68
|
kboard/board/urls.py
|
kboard/board/urls.py
|
# Created by JHJ on 2016. 10. 5.
from django.conf.urls import url
from . import views
app_name = 'board'
urlpatterns = [
url(r'^$', views.board_list, name='board_list'),
url(r'^(?P<board_slug>[-a-z]+)/$', views.post_list, name='post_list'),
url(r'^(?P<board_slug>[-a-z]+)/new/$', views.new_post, name='new_post'),
url(r'^(?P<post_id>\d+)/delete/$', views.delete_post, name='delete_post'),
url(r'^(?P<post_id>\d+)/$', views.view_post, name='view_post'),
url(r'^(?P<board_slug>[-a-z]+)/(?P<post_id>\d+)/comment/new/$', views.new_comment, name='new_comment'),
url(r'^(?P<post_id>\d+)/comment/delete/$', views.delete_comment, name='delete_comment'),
]
|
# Created by JHJ on 2016. 10. 5.
from django.conf.urls import url
from . import views
app_name = 'board'
urlpatterns = [
url(r'^$', views.board_list, name='board_list'),
url(r'^(?P<board_slug>[-a-z]+)/$', views.post_list, name='post_list'),
url(r'^(?P<board_slug>[-a-z]+)/new/$', views.new_post, name='new_post'),
url(r'^(?P<post_id>\d+)/delete/$', views.delete_post, name='delete_post'),
url(r'^(?P<post_id>\d+)/$', views.view_post, name='view_post'),
url(r'^(?P<post_id>\d+)/comment/new/$', views.new_comment, name='new_comment'),
url(r'^(?P<post_id>\d+)/comment/delete/$', views.delete_comment, name='delete_comment'),
]
|
Delete 'borad_slug' parameter on 'new_comment' url
|
Delete 'borad_slug' parameter on 'new_comment' url
|
Python
|
mit
|
guswnsxodlf/k-board,kboard/kboard,hyesun03/k-board,darjeeling/k-board,cjh5414/kboard,cjh5414/kboard,hyesun03/k-board,kboard/kboard,cjh5414/kboard,kboard/kboard,guswnsxodlf/k-board,hyesun03/k-board,guswnsxodlf/k-board
|
# Created by JHJ on 2016. 10. 5.
from django.conf.urls import url
from . import views
app_name = 'board'
urlpatterns = [
url(r'^$', views.board_list, name='board_list'),
url(r'^(?P<board_slug>[-a-z]+)/$', views.post_list, name='post_list'),
url(r'^(?P<board_slug>[-a-z]+)/new/$', views.new_post, name='new_post'),
url(r'^(?P<post_id>\d+)/delete/$', views.delete_post, name='delete_post'),
url(r'^(?P<post_id>\d+)/$', views.view_post, name='view_post'),
url(r'^(?P<board_slug>[-a-z]+)/(?P<post_id>\d+)/comment/new/$', views.new_comment, name='new_comment'),
url(r'^(?P<post_id>\d+)/comment/delete/$', views.delete_comment, name='delete_comment'),
]
Delete 'borad_slug' parameter on 'new_comment' url
|
# Created by JHJ on 2016. 10. 5.
from django.conf.urls import url
from . import views
app_name = 'board'
urlpatterns = [
url(r'^$', views.board_list, name='board_list'),
url(r'^(?P<board_slug>[-a-z]+)/$', views.post_list, name='post_list'),
url(r'^(?P<board_slug>[-a-z]+)/new/$', views.new_post, name='new_post'),
url(r'^(?P<post_id>\d+)/delete/$', views.delete_post, name='delete_post'),
url(r'^(?P<post_id>\d+)/$', views.view_post, name='view_post'),
url(r'^(?P<post_id>\d+)/comment/new/$', views.new_comment, name='new_comment'),
url(r'^(?P<post_id>\d+)/comment/delete/$', views.delete_comment, name='delete_comment'),
]
|
<commit_before># Created by JHJ on 2016. 10. 5.
from django.conf.urls import url
from . import views
app_name = 'board'
urlpatterns = [
url(r'^$', views.board_list, name='board_list'),
url(r'^(?P<board_slug>[-a-z]+)/$', views.post_list, name='post_list'),
url(r'^(?P<board_slug>[-a-z]+)/new/$', views.new_post, name='new_post'),
url(r'^(?P<post_id>\d+)/delete/$', views.delete_post, name='delete_post'),
url(r'^(?P<post_id>\d+)/$', views.view_post, name='view_post'),
url(r'^(?P<board_slug>[-a-z]+)/(?P<post_id>\d+)/comment/new/$', views.new_comment, name='new_comment'),
url(r'^(?P<post_id>\d+)/comment/delete/$', views.delete_comment, name='delete_comment'),
]
<commit_msg>Delete 'borad_slug' parameter on 'new_comment' url<commit_after>
|
# Created by JHJ on 2016. 10. 5.
from django.conf.urls import url
from . import views
app_name = 'board'
urlpatterns = [
url(r'^$', views.board_list, name='board_list'),
url(r'^(?P<board_slug>[-a-z]+)/$', views.post_list, name='post_list'),
url(r'^(?P<board_slug>[-a-z]+)/new/$', views.new_post, name='new_post'),
url(r'^(?P<post_id>\d+)/delete/$', views.delete_post, name='delete_post'),
url(r'^(?P<post_id>\d+)/$', views.view_post, name='view_post'),
url(r'^(?P<post_id>\d+)/comment/new/$', views.new_comment, name='new_comment'),
url(r'^(?P<post_id>\d+)/comment/delete/$', views.delete_comment, name='delete_comment'),
]
|
# Created by JHJ on 2016. 10. 5.
from django.conf.urls import url
from . import views
app_name = 'board'
urlpatterns = [
url(r'^$', views.board_list, name='board_list'),
url(r'^(?P<board_slug>[-a-z]+)/$', views.post_list, name='post_list'),
url(r'^(?P<board_slug>[-a-z]+)/new/$', views.new_post, name='new_post'),
url(r'^(?P<post_id>\d+)/delete/$', views.delete_post, name='delete_post'),
url(r'^(?P<post_id>\d+)/$', views.view_post, name='view_post'),
url(r'^(?P<board_slug>[-a-z]+)/(?P<post_id>\d+)/comment/new/$', views.new_comment, name='new_comment'),
url(r'^(?P<post_id>\d+)/comment/delete/$', views.delete_comment, name='delete_comment'),
]
Delete 'borad_slug' parameter on 'new_comment' url# Created by JHJ on 2016. 10. 5.
from django.conf.urls import url
from . import views
app_name = 'board'
urlpatterns = [
url(r'^$', views.board_list, name='board_list'),
url(r'^(?P<board_slug>[-a-z]+)/$', views.post_list, name='post_list'),
url(r'^(?P<board_slug>[-a-z]+)/new/$', views.new_post, name='new_post'),
url(r'^(?P<post_id>\d+)/delete/$', views.delete_post, name='delete_post'),
url(r'^(?P<post_id>\d+)/$', views.view_post, name='view_post'),
url(r'^(?P<post_id>\d+)/comment/new/$', views.new_comment, name='new_comment'),
url(r'^(?P<post_id>\d+)/comment/delete/$', views.delete_comment, name='delete_comment'),
]
|
<commit_before># Created by JHJ on 2016. 10. 5.
from django.conf.urls import url
from . import views
app_name = 'board'
urlpatterns = [
url(r'^$', views.board_list, name='board_list'),
url(r'^(?P<board_slug>[-a-z]+)/$', views.post_list, name='post_list'),
url(r'^(?P<board_slug>[-a-z]+)/new/$', views.new_post, name='new_post'),
url(r'^(?P<post_id>\d+)/delete/$', views.delete_post, name='delete_post'),
url(r'^(?P<post_id>\d+)/$', views.view_post, name='view_post'),
url(r'^(?P<board_slug>[-a-z]+)/(?P<post_id>\d+)/comment/new/$', views.new_comment, name='new_comment'),
url(r'^(?P<post_id>\d+)/comment/delete/$', views.delete_comment, name='delete_comment'),
]
<commit_msg>Delete 'borad_slug' parameter on 'new_comment' url<commit_after># Created by JHJ on 2016. 10. 5.
from django.conf.urls import url
from . import views
app_name = 'board'
urlpatterns = [
url(r'^$', views.board_list, name='board_list'),
url(r'^(?P<board_slug>[-a-z]+)/$', views.post_list, name='post_list'),
url(r'^(?P<board_slug>[-a-z]+)/new/$', views.new_post, name='new_post'),
url(r'^(?P<post_id>\d+)/delete/$', views.delete_post, name='delete_post'),
url(r'^(?P<post_id>\d+)/$', views.view_post, name='view_post'),
url(r'^(?P<post_id>\d+)/comment/new/$', views.new_comment, name='new_comment'),
url(r'^(?P<post_id>\d+)/comment/delete/$', views.delete_comment, name='delete_comment'),
]
|
9f2ac5bf89c7a50281f6409f43bae5e513334e03
|
sslify/middleware.py
|
sslify/middleware.py
|
from django.conf import settings
from django.core import mail
from django.http import HttpResponsePermanentRedirect
class SSLifyMiddleware(object):
"""Force all requests to use HTTPs. If we get an HTTP request, we'll just
force a redirect to HTTPs.
.. note::
This will only take effect if ``settings.DEBUG`` is False.
.. note::
You can also disable this middleware when testing by setting
``settings.SSLIFY_DISABLE`` to True
"""
def process_request(self, request):
# disabled for test mode?
if getattr(settings, 'SSLIFY_DISABLE', False) and \
hasattr(mail, 'outbox'):
return None
# proceed as normal
if not any((settings.DEBUG, request.is_secure())):
url = request.build_absolute_uri(request.get_full_path())
secure_url = url.replace('http://', 'https://')
return HttpResponsePermanentRedirect(secure_url)
|
from django.conf import settings
from django.http import HttpResponsePermanentRedirect
class SSLifyMiddleware(object):
"""Force all requests to use HTTPs. If we get an HTTP request, we'll just
force a redirect to HTTPs.
.. note::
This will only take effect if ``settings.DEBUG`` is False.
.. note::
You can also disable this middleware when testing by setting
``settings.SSLIFY_DISABLE`` to True
"""
def process_request(self, request):
# disabled for test mode?
if getattr(settings, 'SSLIFY_DISABLE', False):
return None
# proceed as normal
if not any((settings.DEBUG, request.is_secure())):
url = request.build_absolute_uri(request.get_full_path())
secure_url = url.replace('http://', 'https://')
return HttpResponsePermanentRedirect(secure_url)
|
Remove strange outbox requirement for SSLIFY_DISABLE
|
Remove strange outbox requirement for SSLIFY_DISABLE
I don't understand why that was a condition of the SSLIFY_DISABLE flag, so I removed it.
|
Python
|
unlicense
|
rdegges/django-sslify
|
from django.conf import settings
from django.core import mail
from django.http import HttpResponsePermanentRedirect
class SSLifyMiddleware(object):
"""Force all requests to use HTTPs. If we get an HTTP request, we'll just
force a redirect to HTTPs.
.. note::
This will only take effect if ``settings.DEBUG`` is False.
.. note::
You can also disable this middleware when testing by setting
``settings.SSLIFY_DISABLE`` to True
"""
def process_request(self, request):
# disabled for test mode?
if getattr(settings, 'SSLIFY_DISABLE', False) and \
hasattr(mail, 'outbox'):
return None
# proceed as normal
if not any((settings.DEBUG, request.is_secure())):
url = request.build_absolute_uri(request.get_full_path())
secure_url = url.replace('http://', 'https://')
return HttpResponsePermanentRedirect(secure_url)
Remove strange outbox requirement for SSLIFY_DISABLE
I don't understand why that was a condition of the SSLIFY_DISABLE flag, so I removed it.
|
from django.conf import settings
from django.http import HttpResponsePermanentRedirect
class SSLifyMiddleware(object):
"""Force all requests to use HTTPs. If we get an HTTP request, we'll just
force a redirect to HTTPs.
.. note::
This will only take effect if ``settings.DEBUG`` is False.
.. note::
You can also disable this middleware when testing by setting
``settings.SSLIFY_DISABLE`` to True
"""
def process_request(self, request):
# disabled for test mode?
if getattr(settings, 'SSLIFY_DISABLE', False):
return None
# proceed as normal
if not any((settings.DEBUG, request.is_secure())):
url = request.build_absolute_uri(request.get_full_path())
secure_url = url.replace('http://', 'https://')
return HttpResponsePermanentRedirect(secure_url)
|
<commit_before>from django.conf import settings
from django.core import mail
from django.http import HttpResponsePermanentRedirect
class SSLifyMiddleware(object):
"""Force all requests to use HTTPs. If we get an HTTP request, we'll just
force a redirect to HTTPs.
.. note::
This will only take effect if ``settings.DEBUG`` is False.
.. note::
You can also disable this middleware when testing by setting
``settings.SSLIFY_DISABLE`` to True
"""
def process_request(self, request):
# disabled for test mode?
if getattr(settings, 'SSLIFY_DISABLE', False) and \
hasattr(mail, 'outbox'):
return None
# proceed as normal
if not any((settings.DEBUG, request.is_secure())):
url = request.build_absolute_uri(request.get_full_path())
secure_url = url.replace('http://', 'https://')
return HttpResponsePermanentRedirect(secure_url)
<commit_msg>Remove strange outbox requirement for SSLIFY_DISABLE
I don't understand why that was a condition of the SSLIFY_DISABLE flag, so I removed it.<commit_after>
|
from django.conf import settings
from django.http import HttpResponsePermanentRedirect
class SSLifyMiddleware(object):
"""Force all requests to use HTTPs. If we get an HTTP request, we'll just
force a redirect to HTTPs.
.. note::
This will only take effect if ``settings.DEBUG`` is False.
.. note::
You can also disable this middleware when testing by setting
``settings.SSLIFY_DISABLE`` to True
"""
def process_request(self, request):
# disabled for test mode?
if getattr(settings, 'SSLIFY_DISABLE', False):
return None
# proceed as normal
if not any((settings.DEBUG, request.is_secure())):
url = request.build_absolute_uri(request.get_full_path())
secure_url = url.replace('http://', 'https://')
return HttpResponsePermanentRedirect(secure_url)
|
from django.conf import settings
from django.core import mail
from django.http import HttpResponsePermanentRedirect
class SSLifyMiddleware(object):
"""Force all requests to use HTTPs. If we get an HTTP request, we'll just
force a redirect to HTTPs.
.. note::
This will only take effect if ``settings.DEBUG`` is False.
.. note::
You can also disable this middleware when testing by setting
``settings.SSLIFY_DISABLE`` to True
"""
def process_request(self, request):
# disabled for test mode?
if getattr(settings, 'SSLIFY_DISABLE', False) and \
hasattr(mail, 'outbox'):
return None
# proceed as normal
if not any((settings.DEBUG, request.is_secure())):
url = request.build_absolute_uri(request.get_full_path())
secure_url = url.replace('http://', 'https://')
return HttpResponsePermanentRedirect(secure_url)
Remove strange outbox requirement for SSLIFY_DISABLE
I don't understand why that was a condition of the SSLIFY_DISABLE flag, so I removed it.from django.conf import settings
from django.http import HttpResponsePermanentRedirect
class SSLifyMiddleware(object):
"""Force all requests to use HTTPs. If we get an HTTP request, we'll just
force a redirect to HTTPs.
.. note::
This will only take effect if ``settings.DEBUG`` is False.
.. note::
You can also disable this middleware when testing by setting
``settings.SSLIFY_DISABLE`` to True
"""
def process_request(self, request):
# disabled for test mode?
if getattr(settings, 'SSLIFY_DISABLE', False):
return None
# proceed as normal
if not any((settings.DEBUG, request.is_secure())):
url = request.build_absolute_uri(request.get_full_path())
secure_url = url.replace('http://', 'https://')
return HttpResponsePermanentRedirect(secure_url)
|
<commit_before>from django.conf import settings
from django.core import mail
from django.http import HttpResponsePermanentRedirect
class SSLifyMiddleware(object):
"""Force all requests to use HTTPs. If we get an HTTP request, we'll just
force a redirect to HTTPs.
.. note::
This will only take effect if ``settings.DEBUG`` is False.
.. note::
You can also disable this middleware when testing by setting
``settings.SSLIFY_DISABLE`` to True
"""
def process_request(self, request):
# disabled for test mode?
if getattr(settings, 'SSLIFY_DISABLE', False) and \
hasattr(mail, 'outbox'):
return None
# proceed as normal
if not any((settings.DEBUG, request.is_secure())):
url = request.build_absolute_uri(request.get_full_path())
secure_url = url.replace('http://', 'https://')
return HttpResponsePermanentRedirect(secure_url)
<commit_msg>Remove strange outbox requirement for SSLIFY_DISABLE
I don't understand why that was a condition of the SSLIFY_DISABLE flag, so I removed it.<commit_after>from django.conf import settings
from django.http import HttpResponsePermanentRedirect
class SSLifyMiddleware(object):
"""Force all requests to use HTTPs. If we get an HTTP request, we'll just
force a redirect to HTTPs.
.. note::
This will only take effect if ``settings.DEBUG`` is False.
.. note::
You can also disable this middleware when testing by setting
``settings.SSLIFY_DISABLE`` to True
"""
def process_request(self, request):
# disabled for test mode?
if getattr(settings, 'SSLIFY_DISABLE', False):
return None
# proceed as normal
if not any((settings.DEBUG, request.is_secure())):
url = request.build_absolute_uri(request.get_full_path())
secure_url = url.replace('http://', 'https://')
return HttpResponsePermanentRedirect(secure_url)
|
895f82d1ad3840b56f8b2c629e822b143494c990
|
manoseimas/mps_v2/management/commands/precompute_word_counts.py
|
manoseimas/mps_v2/management/commands/precompute_word_counts.py
|
import tqdm
from django.core.management.base import BaseCommand
import manoseimas.common.utils.words as words_utils
import manoseimas.mps_v2.models as mpsv2_models
class Command(BaseCommand):
help = 'Procompute word counts for stenogram statements'
def handle(self, **options):
total = mpsv2_models.StenogramStatement.objects.count()
statements = mpsv2_models.StenogramStatement.objects.all()
for statement in tqdm.tqdm(statements):
statement.word_count = words_utils.get_word_count(statement.text)
statement.save()
self.stdout.write(
'Successfully updated word counts for %d statements.' % total
)
|
import tqdm
from django.core.management.base import BaseCommand
import manoseimas.common.utils.words as words_utils
import manoseimas.mps_v2.models as mpsv2_models
class Command(BaseCommand):
help = 'Precompute word counts for stenogram statements'
def handle(self, **options):
total = mpsv2_models.StenogramStatement.objects.count()
statements = mpsv2_models.StenogramStatement.objects.all()
for statement in tqdm.tqdm(statements):
statement.word_count = words_utils.get_word_count(statement.text)
statement.save()
self.stdout.write(
'Successfully updated word counts for %d statements.' % total
)
|
Fix a typo in precompute_word_count command help text.
|
Fix a typo in precompute_word_count command help text.
|
Python
|
agpl-3.0
|
ManoSeimas/manoseimas.lt,ManoSeimas/manoseimas.lt,ManoSeimas/manoseimas.lt,ManoSeimas/manoseimas.lt
|
import tqdm
from django.core.management.base import BaseCommand
import manoseimas.common.utils.words as words_utils
import manoseimas.mps_v2.models as mpsv2_models
class Command(BaseCommand):
help = 'Procompute word counts for stenogram statements'
def handle(self, **options):
total = mpsv2_models.StenogramStatement.objects.count()
statements = mpsv2_models.StenogramStatement.objects.all()
for statement in tqdm.tqdm(statements):
statement.word_count = words_utils.get_word_count(statement.text)
statement.save()
self.stdout.write(
'Successfully updated word counts for %d statements.' % total
)
Fix a typo in precompute_word_count command help text.
|
import tqdm
from django.core.management.base import BaseCommand
import manoseimas.common.utils.words as words_utils
import manoseimas.mps_v2.models as mpsv2_models
class Command(BaseCommand):
help = 'Precompute word counts for stenogram statements'
def handle(self, **options):
total = mpsv2_models.StenogramStatement.objects.count()
statements = mpsv2_models.StenogramStatement.objects.all()
for statement in tqdm.tqdm(statements):
statement.word_count = words_utils.get_word_count(statement.text)
statement.save()
self.stdout.write(
'Successfully updated word counts for %d statements.' % total
)
|
<commit_before>import tqdm
from django.core.management.base import BaseCommand
import manoseimas.common.utils.words as words_utils
import manoseimas.mps_v2.models as mpsv2_models
class Command(BaseCommand):
help = 'Procompute word counts for stenogram statements'
def handle(self, **options):
total = mpsv2_models.StenogramStatement.objects.count()
statements = mpsv2_models.StenogramStatement.objects.all()
for statement in tqdm.tqdm(statements):
statement.word_count = words_utils.get_word_count(statement.text)
statement.save()
self.stdout.write(
'Successfully updated word counts for %d statements.' % total
)
<commit_msg>Fix a typo in precompute_word_count command help text.<commit_after>
|
import tqdm
from django.core.management.base import BaseCommand
import manoseimas.common.utils.words as words_utils
import manoseimas.mps_v2.models as mpsv2_models
class Command(BaseCommand):
help = 'Precompute word counts for stenogram statements'
def handle(self, **options):
total = mpsv2_models.StenogramStatement.objects.count()
statements = mpsv2_models.StenogramStatement.objects.all()
for statement in tqdm.tqdm(statements):
statement.word_count = words_utils.get_word_count(statement.text)
statement.save()
self.stdout.write(
'Successfully updated word counts for %d statements.' % total
)
|
import tqdm
from django.core.management.base import BaseCommand
import manoseimas.common.utils.words as words_utils
import manoseimas.mps_v2.models as mpsv2_models
class Command(BaseCommand):
help = 'Procompute word counts for stenogram statements'
def handle(self, **options):
total = mpsv2_models.StenogramStatement.objects.count()
statements = mpsv2_models.StenogramStatement.objects.all()
for statement in tqdm.tqdm(statements):
statement.word_count = words_utils.get_word_count(statement.text)
statement.save()
self.stdout.write(
'Successfully updated word counts for %d statements.' % total
)
Fix a typo in precompute_word_count command help text.import tqdm
from django.core.management.base import BaseCommand
import manoseimas.common.utils.words as words_utils
import manoseimas.mps_v2.models as mpsv2_models
class Command(BaseCommand):
help = 'Precompute word counts for stenogram statements'
def handle(self, **options):
total = mpsv2_models.StenogramStatement.objects.count()
statements = mpsv2_models.StenogramStatement.objects.all()
for statement in tqdm.tqdm(statements):
statement.word_count = words_utils.get_word_count(statement.text)
statement.save()
self.stdout.write(
'Successfully updated word counts for %d statements.' % total
)
|
<commit_before>import tqdm
from django.core.management.base import BaseCommand
import manoseimas.common.utils.words as words_utils
import manoseimas.mps_v2.models as mpsv2_models
class Command(BaseCommand):
help = 'Procompute word counts for stenogram statements'
def handle(self, **options):
total = mpsv2_models.StenogramStatement.objects.count()
statements = mpsv2_models.StenogramStatement.objects.all()
for statement in tqdm.tqdm(statements):
statement.word_count = words_utils.get_word_count(statement.text)
statement.save()
self.stdout.write(
'Successfully updated word counts for %d statements.' % total
)
<commit_msg>Fix a typo in precompute_word_count command help text.<commit_after>import tqdm
from django.core.management.base import BaseCommand
import manoseimas.common.utils.words as words_utils
import manoseimas.mps_v2.models as mpsv2_models
class Command(BaseCommand):
help = 'Precompute word counts for stenogram statements'
def handle(self, **options):
total = mpsv2_models.StenogramStatement.objects.count()
statements = mpsv2_models.StenogramStatement.objects.all()
for statement in tqdm.tqdm(statements):
statement.word_count = words_utils.get_word_count(statement.text)
statement.save()
self.stdout.write(
'Successfully updated word counts for %d statements.' % total
)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.