commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
184b64da2a4fa0d8827d839501e12317f9adfa46
|
simple-cipher/simple_cipher.py
|
simple-cipher/simple_cipher.py
|
import math
import secrets
from string import ascii_lowercase
class Cipher(object):
def __init__(self, key=None):
if not key:
key = Cipher._random_key()
if not key.isalpha() or not key.islower():
raise ValueError("Key must consist only of lowercase letters")
self.key = key
self._key = [ord(k)-97 for k in key]
def encode(self, s):
key = self._key * math.ceil(len(s)/len(self._key))
chars = [c for c in s.lower() if c in ascii_lowercase]
return "".join(Cipher._shift(c, k) for c, k in zip(chars, key))
def decode(self, s):
key = self._key * math.ceil(len(s)/len(self._key))
chars = [c for c in s.lower() if c in ascii_lowercase]
return "".join(Cipher._shift(c, -k) for c, k in zip(chars, key))
@staticmethod
def _shift(char, key):
return chr(97 + ((ord(char) - 97 + key) % 26))
@staticmethod
def _random_key(length=256):
return "".join(secrets.choice(ascii_lowercase) for _ in range(length))
class Caesar(Cipher):
def __init__(self):
Cipher.__init__(self, "d")
|
import math
import secrets
from string import ascii_lowercase
class Cipher(object):
def __init__(self, key=None):
if not key:
key = self._random_key()
if not key.isalpha() or not key.islower():
raise ValueError("Key must consist only of lowercase letters")
self.key = key
self._key = [ord(k)-97 for k in key]
def encode(self, s):
key = self._key * math.ceil(len(s)/len(self._key))
chars = [c for c in s.lower() if c in ascii_lowercase]
return "".join(self._shift(c, k) for c, k in zip(chars, key))
def decode(self, s):
key = self._key * math.ceil(len(s)/len(self._key))
chars = [c for c in s.lower() if c in ascii_lowercase]
return "".join(self._shift(c, -k) for c, k in zip(chars, key))
@staticmethod
def _shift(char, key):
return chr(97 + ((ord(char) - 97 + key) % 26))
@staticmethod
def _random_key(length=256):
return "".join(secrets.choice(ascii_lowercase) for _ in range(length))
class Caesar(Cipher):
def __init__(self):
super().__init__("d")
|
Use super() and self within the Cipher and Caesar classes
|
Use super() and self within the Cipher and Caesar classes
|
Python
|
agpl-3.0
|
CubicComet/exercism-python-solutions
|
import math
import secrets
from string import ascii_lowercase
class Cipher(object):
def __init__(self, key=None):
if not key:
key = Cipher._random_key()
if not key.isalpha() or not key.islower():
raise ValueError("Key must consist only of lowercase letters")
self.key = key
self._key = [ord(k)-97 for k in key]
def encode(self, s):
key = self._key * math.ceil(len(s)/len(self._key))
chars = [c for c in s.lower() if c in ascii_lowercase]
return "".join(Cipher._shift(c, k) for c, k in zip(chars, key))
def decode(self, s):
key = self._key * math.ceil(len(s)/len(self._key))
chars = [c for c in s.lower() if c in ascii_lowercase]
return "".join(Cipher._shift(c, -k) for c, k in zip(chars, key))
@staticmethod
def _shift(char, key):
return chr(97 + ((ord(char) - 97 + key) % 26))
@staticmethod
def _random_key(length=256):
return "".join(secrets.choice(ascii_lowercase) for _ in range(length))
class Caesar(Cipher):
def __init__(self):
Cipher.__init__(self, "d")
Use super() and self within the Cipher and Caesar classes
|
import math
import secrets
from string import ascii_lowercase
class Cipher(object):
def __init__(self, key=None):
if not key:
key = self._random_key()
if not key.isalpha() or not key.islower():
raise ValueError("Key must consist only of lowercase letters")
self.key = key
self._key = [ord(k)-97 for k in key]
def encode(self, s):
key = self._key * math.ceil(len(s)/len(self._key))
chars = [c for c in s.lower() if c in ascii_lowercase]
return "".join(self._shift(c, k) for c, k in zip(chars, key))
def decode(self, s):
key = self._key * math.ceil(len(s)/len(self._key))
chars = [c for c in s.lower() if c in ascii_lowercase]
return "".join(self._shift(c, -k) for c, k in zip(chars, key))
@staticmethod
def _shift(char, key):
return chr(97 + ((ord(char) - 97 + key) % 26))
@staticmethod
def _random_key(length=256):
return "".join(secrets.choice(ascii_lowercase) for _ in range(length))
class Caesar(Cipher):
def __init__(self):
super().__init__("d")
|
<commit_before>import math
import secrets
from string import ascii_lowercase
class Cipher(object):
def __init__(self, key=None):
if not key:
key = Cipher._random_key()
if not key.isalpha() or not key.islower():
raise ValueError("Key must consist only of lowercase letters")
self.key = key
self._key = [ord(k)-97 for k in key]
def encode(self, s):
key = self._key * math.ceil(len(s)/len(self._key))
chars = [c for c in s.lower() if c in ascii_lowercase]
return "".join(Cipher._shift(c, k) for c, k in zip(chars, key))
def decode(self, s):
key = self._key * math.ceil(len(s)/len(self._key))
chars = [c for c in s.lower() if c in ascii_lowercase]
return "".join(Cipher._shift(c, -k) for c, k in zip(chars, key))
@staticmethod
def _shift(char, key):
return chr(97 + ((ord(char) - 97 + key) % 26))
@staticmethod
def _random_key(length=256):
return "".join(secrets.choice(ascii_lowercase) for _ in range(length))
class Caesar(Cipher):
def __init__(self):
Cipher.__init__(self, "d")
<commit_msg>Use super() and self within the Cipher and Caesar classes<commit_after>
|
import math
import secrets
from string import ascii_lowercase
class Cipher(object):
def __init__(self, key=None):
if not key:
key = self._random_key()
if not key.isalpha() or not key.islower():
raise ValueError("Key must consist only of lowercase letters")
self.key = key
self._key = [ord(k)-97 for k in key]
def encode(self, s):
key = self._key * math.ceil(len(s)/len(self._key))
chars = [c for c in s.lower() if c in ascii_lowercase]
return "".join(self._shift(c, k) for c, k in zip(chars, key))
def decode(self, s):
key = self._key * math.ceil(len(s)/len(self._key))
chars = [c for c in s.lower() if c in ascii_lowercase]
return "".join(self._shift(c, -k) for c, k in zip(chars, key))
@staticmethod
def _shift(char, key):
return chr(97 + ((ord(char) - 97 + key) % 26))
@staticmethod
def _random_key(length=256):
return "".join(secrets.choice(ascii_lowercase) for _ in range(length))
class Caesar(Cipher):
def __init__(self):
super().__init__("d")
|
import math
import secrets
from string import ascii_lowercase
class Cipher(object):
def __init__(self, key=None):
if not key:
key = Cipher._random_key()
if not key.isalpha() or not key.islower():
raise ValueError("Key must consist only of lowercase letters")
self.key = key
self._key = [ord(k)-97 for k in key]
def encode(self, s):
key = self._key * math.ceil(len(s)/len(self._key))
chars = [c for c in s.lower() if c in ascii_lowercase]
return "".join(Cipher._shift(c, k) for c, k in zip(chars, key))
def decode(self, s):
key = self._key * math.ceil(len(s)/len(self._key))
chars = [c for c in s.lower() if c in ascii_lowercase]
return "".join(Cipher._shift(c, -k) for c, k in zip(chars, key))
@staticmethod
def _shift(char, key):
return chr(97 + ((ord(char) - 97 + key) % 26))
@staticmethod
def _random_key(length=256):
return "".join(secrets.choice(ascii_lowercase) for _ in range(length))
class Caesar(Cipher):
def __init__(self):
Cipher.__init__(self, "d")
Use super() and self within the Cipher and Caesar classesimport math
import secrets
from string import ascii_lowercase
class Cipher(object):
def __init__(self, key=None):
if not key:
key = self._random_key()
if not key.isalpha() or not key.islower():
raise ValueError("Key must consist only of lowercase letters")
self.key = key
self._key = [ord(k)-97 for k in key]
def encode(self, s):
key = self._key * math.ceil(len(s)/len(self._key))
chars = [c for c in s.lower() if c in ascii_lowercase]
return "".join(self._shift(c, k) for c, k in zip(chars, key))
def decode(self, s):
key = self._key * math.ceil(len(s)/len(self._key))
chars = [c for c in s.lower() if c in ascii_lowercase]
return "".join(self._shift(c, -k) for c, k in zip(chars, key))
@staticmethod
def _shift(char, key):
return chr(97 + ((ord(char) - 97 + key) % 26))
@staticmethod
def _random_key(length=256):
return "".join(secrets.choice(ascii_lowercase) for _ in range(length))
class Caesar(Cipher):
def __init__(self):
super().__init__("d")
|
<commit_before>import math
import secrets
from string import ascii_lowercase
class Cipher(object):
def __init__(self, key=None):
if not key:
key = Cipher._random_key()
if not key.isalpha() or not key.islower():
raise ValueError("Key must consist only of lowercase letters")
self.key = key
self._key = [ord(k)-97 for k in key]
def encode(self, s):
key = self._key * math.ceil(len(s)/len(self._key))
chars = [c for c in s.lower() if c in ascii_lowercase]
return "".join(Cipher._shift(c, k) for c, k in zip(chars, key))
def decode(self, s):
key = self._key * math.ceil(len(s)/len(self._key))
chars = [c for c in s.lower() if c in ascii_lowercase]
return "".join(Cipher._shift(c, -k) for c, k in zip(chars, key))
@staticmethod
def _shift(char, key):
return chr(97 + ((ord(char) - 97 + key) % 26))
@staticmethod
def _random_key(length=256):
return "".join(secrets.choice(ascii_lowercase) for _ in range(length))
class Caesar(Cipher):
def __init__(self):
Cipher.__init__(self, "d")
<commit_msg>Use super() and self within the Cipher and Caesar classes<commit_after>import math
import secrets
from string import ascii_lowercase
class Cipher(object):
def __init__(self, key=None):
if not key:
key = self._random_key()
if not key.isalpha() or not key.islower():
raise ValueError("Key must consist only of lowercase letters")
self.key = key
self._key = [ord(k)-97 for k in key]
def encode(self, s):
key = self._key * math.ceil(len(s)/len(self._key))
chars = [c for c in s.lower() if c in ascii_lowercase]
return "".join(self._shift(c, k) for c, k in zip(chars, key))
def decode(self, s):
key = self._key * math.ceil(len(s)/len(self._key))
chars = [c for c in s.lower() if c in ascii_lowercase]
return "".join(self._shift(c, -k) for c, k in zip(chars, key))
@staticmethod
def _shift(char, key):
return chr(97 + ((ord(char) - 97 + key) % 26))
@staticmethod
def _random_key(length=256):
return "".join(secrets.choice(ascii_lowercase) for _ in range(length))
class Caesar(Cipher):
def __init__(self):
super().__init__("d")
|
b0011541a21927c4f9ee378b77b11fd4b0dfbcff
|
tests/test_cookiecutter_substitution.py
|
tests/test_cookiecutter_substitution.py
|
import re
import sh
from .base import DjangoCookieTestCase
class TestCookiecutterSubstitution(DjangoCookieTestCase):
"""Test that all cookiecutter instances are substituted"""
def test_all_cookiecutter_instances_are_substituted(self):
# Build a list containing absolute paths to the generated files
paths = self.generate_project()
# Construct the cookiecutter search pattern
pattern = "{{(\s?cookiecutter)[.](.*?)}}"
re_obj = re.compile(pattern)
# Assert that no match is found in any of the files
for path in paths:
for line in open(path, 'r'):
match = re_obj.search(line)
self.assertIsNone(
match,
"cookiecutter variable not replaced in {}".format(path))
def test_flake8_complaince(self):
"""generated project should pass flake8"""
self.generate_project()
try:
sh.flake8(self.destpath)
except sh.ErrorReturnCode as e:
raise AssertionError(e)
|
import re
import sh
from .base import DjangoCookieTestCase
class TestCookiecutterSubstitution(DjangoCookieTestCase):
"""Test that all cookiecutter instances are substituted"""
def test_all_cookiecutter_instances_are_substituted(self):
# Build a list containing absolute paths to the generated files
paths = self.generate_project()
# Construct the cookiecutter search pattern
pattern = "{{(\s?cookiecutter)[.](.*?)}}"
re_obj = re.compile(pattern)
# Assert that no match is found in any of the files
for path in paths:
for line in open(path, 'r', encoding='utf-8', errors='ignore'):
match = re_obj.search(line)
self.assertIsNone(
match,
"cookiecutter variable not replaced in {}".format(path))
def test_flake8_complaince(self):
"""generated project should pass flake8"""
self.generate_project()
try:
sh.flake8(self.destpath)
except sh.ErrorReturnCode as e:
raise AssertionError(e)
|
Fix tests for python 3
|
Fix tests for python 3
|
Python
|
bsd-3-clause
|
wldcordeiro/cookiecutter-django-essentials,wldcordeiro/cookiecutter-django-essentials,wldcordeiro/cookiecutter-django-essentials
|
import re
import sh
from .base import DjangoCookieTestCase
class TestCookiecutterSubstitution(DjangoCookieTestCase):
"""Test that all cookiecutter instances are substituted"""
def test_all_cookiecutter_instances_are_substituted(self):
# Build a list containing absolute paths to the generated files
paths = self.generate_project()
# Construct the cookiecutter search pattern
pattern = "{{(\s?cookiecutter)[.](.*?)}}"
re_obj = re.compile(pattern)
# Assert that no match is found in any of the files
for path in paths:
for line in open(path, 'r'):
match = re_obj.search(line)
self.assertIsNone(
match,
"cookiecutter variable not replaced in {}".format(path))
def test_flake8_complaince(self):
"""generated project should pass flake8"""
self.generate_project()
try:
sh.flake8(self.destpath)
except sh.ErrorReturnCode as e:
raise AssertionError(e)
Fix tests for python 3
|
import re
import sh
from .base import DjangoCookieTestCase
class TestCookiecutterSubstitution(DjangoCookieTestCase):
"""Test that all cookiecutter instances are substituted"""
def test_all_cookiecutter_instances_are_substituted(self):
# Build a list containing absolute paths to the generated files
paths = self.generate_project()
# Construct the cookiecutter search pattern
pattern = "{{(\s?cookiecutter)[.](.*?)}}"
re_obj = re.compile(pattern)
# Assert that no match is found in any of the files
for path in paths:
for line in open(path, 'r', encoding='utf-8', errors='ignore'):
match = re_obj.search(line)
self.assertIsNone(
match,
"cookiecutter variable not replaced in {}".format(path))
def test_flake8_complaince(self):
"""generated project should pass flake8"""
self.generate_project()
try:
sh.flake8(self.destpath)
except sh.ErrorReturnCode as e:
raise AssertionError(e)
|
<commit_before>import re
import sh
from .base import DjangoCookieTestCase
class TestCookiecutterSubstitution(DjangoCookieTestCase):
"""Test that all cookiecutter instances are substituted"""
def test_all_cookiecutter_instances_are_substituted(self):
# Build a list containing absolute paths to the generated files
paths = self.generate_project()
# Construct the cookiecutter search pattern
pattern = "{{(\s?cookiecutter)[.](.*?)}}"
re_obj = re.compile(pattern)
# Assert that no match is found in any of the files
for path in paths:
for line in open(path, 'r'):
match = re_obj.search(line)
self.assertIsNone(
match,
"cookiecutter variable not replaced in {}".format(path))
def test_flake8_complaince(self):
"""generated project should pass flake8"""
self.generate_project()
try:
sh.flake8(self.destpath)
except sh.ErrorReturnCode as e:
raise AssertionError(e)
<commit_msg>Fix tests for python 3<commit_after>
|
import re
import sh
from .base import DjangoCookieTestCase
class TestCookiecutterSubstitution(DjangoCookieTestCase):
"""Test that all cookiecutter instances are substituted"""
def test_all_cookiecutter_instances_are_substituted(self):
# Build a list containing absolute paths to the generated files
paths = self.generate_project()
# Construct the cookiecutter search pattern
pattern = "{{(\s?cookiecutter)[.](.*?)}}"
re_obj = re.compile(pattern)
# Assert that no match is found in any of the files
for path in paths:
for line in open(path, 'r', encoding='utf-8', errors='ignore'):
match = re_obj.search(line)
self.assertIsNone(
match,
"cookiecutter variable not replaced in {}".format(path))
def test_flake8_complaince(self):
"""generated project should pass flake8"""
self.generate_project()
try:
sh.flake8(self.destpath)
except sh.ErrorReturnCode as e:
raise AssertionError(e)
|
import re
import sh
from .base import DjangoCookieTestCase
class TestCookiecutterSubstitution(DjangoCookieTestCase):
"""Test that all cookiecutter instances are substituted"""
def test_all_cookiecutter_instances_are_substituted(self):
# Build a list containing absolute paths to the generated files
paths = self.generate_project()
# Construct the cookiecutter search pattern
pattern = "{{(\s?cookiecutter)[.](.*?)}}"
re_obj = re.compile(pattern)
# Assert that no match is found in any of the files
for path in paths:
for line in open(path, 'r'):
match = re_obj.search(line)
self.assertIsNone(
match,
"cookiecutter variable not replaced in {}".format(path))
def test_flake8_complaince(self):
"""generated project should pass flake8"""
self.generate_project()
try:
sh.flake8(self.destpath)
except sh.ErrorReturnCode as e:
raise AssertionError(e)
Fix tests for python 3import re
import sh
from .base import DjangoCookieTestCase
class TestCookiecutterSubstitution(DjangoCookieTestCase):
"""Test that all cookiecutter instances are substituted"""
def test_all_cookiecutter_instances_are_substituted(self):
# Build a list containing absolute paths to the generated files
paths = self.generate_project()
# Construct the cookiecutter search pattern
pattern = "{{(\s?cookiecutter)[.](.*?)}}"
re_obj = re.compile(pattern)
# Assert that no match is found in any of the files
for path in paths:
for line in open(path, 'r', encoding='utf-8', errors='ignore'):
match = re_obj.search(line)
self.assertIsNone(
match,
"cookiecutter variable not replaced in {}".format(path))
def test_flake8_complaince(self):
"""generated project should pass flake8"""
self.generate_project()
try:
sh.flake8(self.destpath)
except sh.ErrorReturnCode as e:
raise AssertionError(e)
|
<commit_before>import re
import sh
from .base import DjangoCookieTestCase
class TestCookiecutterSubstitution(DjangoCookieTestCase):
"""Test that all cookiecutter instances are substituted"""
def test_all_cookiecutter_instances_are_substituted(self):
# Build a list containing absolute paths to the generated files
paths = self.generate_project()
# Construct the cookiecutter search pattern
pattern = "{{(\s?cookiecutter)[.](.*?)}}"
re_obj = re.compile(pattern)
# Assert that no match is found in any of the files
for path in paths:
for line in open(path, 'r'):
match = re_obj.search(line)
self.assertIsNone(
match,
"cookiecutter variable not replaced in {}".format(path))
def test_flake8_complaince(self):
"""generated project should pass flake8"""
self.generate_project()
try:
sh.flake8(self.destpath)
except sh.ErrorReturnCode as e:
raise AssertionError(e)
<commit_msg>Fix tests for python 3<commit_after>import re
import sh
from .base import DjangoCookieTestCase
class TestCookiecutterSubstitution(DjangoCookieTestCase):
"""Test that all cookiecutter instances are substituted"""
def test_all_cookiecutter_instances_are_substituted(self):
# Build a list containing absolute paths to the generated files
paths = self.generate_project()
# Construct the cookiecutter search pattern
pattern = "{{(\s?cookiecutter)[.](.*?)}}"
re_obj = re.compile(pattern)
# Assert that no match is found in any of the files
for path in paths:
for line in open(path, 'r', encoding='utf-8', errors='ignore'):
match = re_obj.search(line)
self.assertIsNone(
match,
"cookiecutter variable not replaced in {}".format(path))
def test_flake8_complaince(self):
"""generated project should pass flake8"""
self.generate_project()
try:
sh.flake8(self.destpath)
except sh.ErrorReturnCode as e:
raise AssertionError(e)
|
35fa85aa850dbdf6c81e0952911a40755aca5774
|
dags/main_summary.py
|
dags/main_summary.py
|
from airflow import DAG
from datetime import datetime, timedelta
from operators.emr_spark_operator import EMRSparkOperator
from airflow.operators import BashOperator
default_args = {
'owner': 'mreid@mozilla.com',
'depends_on_past': False,
'start_date': datetime(2016, 6, 27),
'email': ['telemetry-alerts@mozilla.com', 'mreid@mozilla.com'],
'email_on_failure': True,
'email_on_retry': True,
'retries': 2,
'retry_delay': timedelta(minutes=30),
}
dag = DAG('main_summary', default_args=default_args, schedule_interval='@daily')
# Make sure all the data for the given day has arrived before running.
t0 = BashOperator(task_id="delayed_start",
bash_command="sleep 1800",
dag=dag)
t1 = EMRSparkOperator(task_id="main_summary",
job_name="Main Summary View",
execution_timeout=timedelta(hours=6),
instance_count=10,
env = {"date": "{{ ds_nodash }}", "bucket": "{{ task.__class__.airflow_bucket }}"},
uri="https://raw.githubusercontent.com/mozilla/telemetry-airflow/master/jobs/main_summary_view.sh",
dag=dag)
# Wait a little while after midnight to start for a given day.
t1.set_upstream(t0)
|
from airflow import DAG
from datetime import datetime, timedelta
from operators.emr_spark_operator import EMRSparkOperator
from airflow.operators import BashOperator
default_args = {
'owner': 'mreid@mozilla.com',
'depends_on_past': False,
'start_date': datetime(2016, 6, 27),
'email': ['telemetry-alerts@mozilla.com', 'mreid@mozilla.com'],
'email_on_failure': True,
'email_on_retry': True,
'retries': 2,
'retry_delay': timedelta(minutes=30),
}
dag = DAG('main_summary', default_args=default_args, schedule_interval='@daily')
# Make sure all the data for the given day has arrived before running.
t0 = BashOperator(task_id="delayed_start",
bash_command="sleep 1800",
dag=dag)
t1 = EMRSparkOperator(task_id="main_summary",
job_name="Main Summary View",
execution_timeout=timedelta(hours=10),
instance_count=10,
env = {"date": "{{ ds_nodash }}", "bucket": "{{ task.__class__.airflow_bucket }}"},
uri="https://raw.githubusercontent.com/mozilla/telemetry-airflow/master/jobs/main_summary_view.sh",
dag=dag)
# Wait a little while after midnight to start for a given day.
t1.set_upstream(t0)
|
Increase timeout to 10 hours (temporarily).
|
Increase timeout to 10 hours (temporarily).
The job is expected to take 3-4 hours, but was intermittently
timing out at 6 hours. Increase to 10 hours while profiling and
debugging the job.
|
Python
|
mpl-2.0
|
opentrials/opentrials-airflow,opentrials/opentrials-airflow
|
from airflow import DAG
from datetime import datetime, timedelta
from operators.emr_spark_operator import EMRSparkOperator
from airflow.operators import BashOperator
default_args = {
'owner': 'mreid@mozilla.com',
'depends_on_past': False,
'start_date': datetime(2016, 6, 27),
'email': ['telemetry-alerts@mozilla.com', 'mreid@mozilla.com'],
'email_on_failure': True,
'email_on_retry': True,
'retries': 2,
'retry_delay': timedelta(minutes=30),
}
dag = DAG('main_summary', default_args=default_args, schedule_interval='@daily')
# Make sure all the data for the given day has arrived before running.
t0 = BashOperator(task_id="delayed_start",
bash_command="sleep 1800",
dag=dag)
t1 = EMRSparkOperator(task_id="main_summary",
job_name="Main Summary View",
execution_timeout=timedelta(hours=6),
instance_count=10,
env = {"date": "{{ ds_nodash }}", "bucket": "{{ task.__class__.airflow_bucket }}"},
uri="https://raw.githubusercontent.com/mozilla/telemetry-airflow/master/jobs/main_summary_view.sh",
dag=dag)
# Wait a little while after midnight to start for a given day.
t1.set_upstream(t0)
Increase timeout to 10 hours (temporarily).
The job is expected to take 3-4 hours, but was intermittently
timing out at 6 hours. Increase to 10 hours while profiling and
debugging the job.
|
from airflow import DAG
from datetime import datetime, timedelta
from operators.emr_spark_operator import EMRSparkOperator
from airflow.operators import BashOperator
default_args = {
'owner': 'mreid@mozilla.com',
'depends_on_past': False,
'start_date': datetime(2016, 6, 27),
'email': ['telemetry-alerts@mozilla.com', 'mreid@mozilla.com'],
'email_on_failure': True,
'email_on_retry': True,
'retries': 2,
'retry_delay': timedelta(minutes=30),
}
dag = DAG('main_summary', default_args=default_args, schedule_interval='@daily')
# Make sure all the data for the given day has arrived before running.
t0 = BashOperator(task_id="delayed_start",
bash_command="sleep 1800",
dag=dag)
t1 = EMRSparkOperator(task_id="main_summary",
job_name="Main Summary View",
execution_timeout=timedelta(hours=10),
instance_count=10,
env = {"date": "{{ ds_nodash }}", "bucket": "{{ task.__class__.airflow_bucket }}"},
uri="https://raw.githubusercontent.com/mozilla/telemetry-airflow/master/jobs/main_summary_view.sh",
dag=dag)
# Wait a little while after midnight to start for a given day.
t1.set_upstream(t0)
|
<commit_before>from airflow import DAG
from datetime import datetime, timedelta
from operators.emr_spark_operator import EMRSparkOperator
from airflow.operators import BashOperator
default_args = {
'owner': 'mreid@mozilla.com',
'depends_on_past': False,
'start_date': datetime(2016, 6, 27),
'email': ['telemetry-alerts@mozilla.com', 'mreid@mozilla.com'],
'email_on_failure': True,
'email_on_retry': True,
'retries': 2,
'retry_delay': timedelta(minutes=30),
}
dag = DAG('main_summary', default_args=default_args, schedule_interval='@daily')
# Make sure all the data for the given day has arrived before running.
t0 = BashOperator(task_id="delayed_start",
bash_command="sleep 1800",
dag=dag)
t1 = EMRSparkOperator(task_id="main_summary",
job_name="Main Summary View",
execution_timeout=timedelta(hours=6),
instance_count=10,
env = {"date": "{{ ds_nodash }}", "bucket": "{{ task.__class__.airflow_bucket }}"},
uri="https://raw.githubusercontent.com/mozilla/telemetry-airflow/master/jobs/main_summary_view.sh",
dag=dag)
# Wait a little while after midnight to start for a given day.
t1.set_upstream(t0)
<commit_msg>Increase timeout to 10 hours (temporarily).
The job is expected to take 3-4 hours, but was intermittently
timing out at 6 hours. Increase to 10 hours while profiling and
debugging the job.<commit_after>
|
from airflow import DAG
from datetime import datetime, timedelta
from operators.emr_spark_operator import EMRSparkOperator
from airflow.operators import BashOperator
default_args = {
'owner': 'mreid@mozilla.com',
'depends_on_past': False,
'start_date': datetime(2016, 6, 27),
'email': ['telemetry-alerts@mozilla.com', 'mreid@mozilla.com'],
'email_on_failure': True,
'email_on_retry': True,
'retries': 2,
'retry_delay': timedelta(minutes=30),
}
dag = DAG('main_summary', default_args=default_args, schedule_interval='@daily')
# Make sure all the data for the given day has arrived before running.
t0 = BashOperator(task_id="delayed_start",
bash_command="sleep 1800",
dag=dag)
t1 = EMRSparkOperator(task_id="main_summary",
job_name="Main Summary View",
execution_timeout=timedelta(hours=10),
instance_count=10,
env = {"date": "{{ ds_nodash }}", "bucket": "{{ task.__class__.airflow_bucket }}"},
uri="https://raw.githubusercontent.com/mozilla/telemetry-airflow/master/jobs/main_summary_view.sh",
dag=dag)
# Wait a little while after midnight to start for a given day.
t1.set_upstream(t0)
|
from airflow import DAG
from datetime import datetime, timedelta
from operators.emr_spark_operator import EMRSparkOperator
from airflow.operators import BashOperator
default_args = {
'owner': 'mreid@mozilla.com',
'depends_on_past': False,
'start_date': datetime(2016, 6, 27),
'email': ['telemetry-alerts@mozilla.com', 'mreid@mozilla.com'],
'email_on_failure': True,
'email_on_retry': True,
'retries': 2,
'retry_delay': timedelta(minutes=30),
}
dag = DAG('main_summary', default_args=default_args, schedule_interval='@daily')
# Make sure all the data for the given day has arrived before running.
t0 = BashOperator(task_id="delayed_start",
bash_command="sleep 1800",
dag=dag)
t1 = EMRSparkOperator(task_id="main_summary",
job_name="Main Summary View",
execution_timeout=timedelta(hours=6),
instance_count=10,
env = {"date": "{{ ds_nodash }}", "bucket": "{{ task.__class__.airflow_bucket }}"},
uri="https://raw.githubusercontent.com/mozilla/telemetry-airflow/master/jobs/main_summary_view.sh",
dag=dag)
# Wait a little while after midnight to start for a given day.
t1.set_upstream(t0)
Increase timeout to 10 hours (temporarily).
The job is expected to take 3-4 hours, but was intermittently
timing out at 6 hours. Increase to 10 hours while profiling and
debugging the job.from airflow import DAG
from datetime import datetime, timedelta
from operators.emr_spark_operator import EMRSparkOperator
from airflow.operators import BashOperator
default_args = {
'owner': 'mreid@mozilla.com',
'depends_on_past': False,
'start_date': datetime(2016, 6, 27),
'email': ['telemetry-alerts@mozilla.com', 'mreid@mozilla.com'],
'email_on_failure': True,
'email_on_retry': True,
'retries': 2,
'retry_delay': timedelta(minutes=30),
}
dag = DAG('main_summary', default_args=default_args, schedule_interval='@daily')
# Make sure all the data for the given day has arrived before running.
t0 = BashOperator(task_id="delayed_start",
bash_command="sleep 1800",
dag=dag)
t1 = EMRSparkOperator(task_id="main_summary",
job_name="Main Summary View",
execution_timeout=timedelta(hours=10),
instance_count=10,
env = {"date": "{{ ds_nodash }}", "bucket": "{{ task.__class__.airflow_bucket }}"},
uri="https://raw.githubusercontent.com/mozilla/telemetry-airflow/master/jobs/main_summary_view.sh",
dag=dag)
# Wait a little while after midnight to start for a given day.
t1.set_upstream(t0)
|
<commit_before>from airflow import DAG
from datetime import datetime, timedelta
from operators.emr_spark_operator import EMRSparkOperator
from airflow.operators import BashOperator
default_args = {
'owner': 'mreid@mozilla.com',
'depends_on_past': False,
'start_date': datetime(2016, 6, 27),
'email': ['telemetry-alerts@mozilla.com', 'mreid@mozilla.com'],
'email_on_failure': True,
'email_on_retry': True,
'retries': 2,
'retry_delay': timedelta(minutes=30),
}
dag = DAG('main_summary', default_args=default_args, schedule_interval='@daily')
# Make sure all the data for the given day has arrived before running.
t0 = BashOperator(task_id="delayed_start",
bash_command="sleep 1800",
dag=dag)
t1 = EMRSparkOperator(task_id="main_summary",
job_name="Main Summary View",
execution_timeout=timedelta(hours=6),
instance_count=10,
env = {"date": "{{ ds_nodash }}", "bucket": "{{ task.__class__.airflow_bucket }}"},
uri="https://raw.githubusercontent.com/mozilla/telemetry-airflow/master/jobs/main_summary_view.sh",
dag=dag)
# Wait a little while after midnight to start for a given day.
t1.set_upstream(t0)
<commit_msg>Increase timeout to 10 hours (temporarily).
The job is expected to take 3-4 hours, but was intermittently
timing out at 6 hours. Increase to 10 hours while profiling and
debugging the job.<commit_after>from airflow import DAG
from datetime import datetime, timedelta
from operators.emr_spark_operator import EMRSparkOperator
from airflow.operators import BashOperator
default_args = {
'owner': 'mreid@mozilla.com',
'depends_on_past': False,
'start_date': datetime(2016, 6, 27),
'email': ['telemetry-alerts@mozilla.com', 'mreid@mozilla.com'],
'email_on_failure': True,
'email_on_retry': True,
'retries': 2,
'retry_delay': timedelta(minutes=30),
}
dag = DAG('main_summary', default_args=default_args, schedule_interval='@daily')
# Make sure all the data for the given day has arrived before running.
t0 = BashOperator(task_id="delayed_start",
bash_command="sleep 1800",
dag=dag)
t1 = EMRSparkOperator(task_id="main_summary",
job_name="Main Summary View",
execution_timeout=timedelta(hours=10),
instance_count=10,
env = {"date": "{{ ds_nodash }}", "bucket": "{{ task.__class__.airflow_bucket }}"},
uri="https://raw.githubusercontent.com/mozilla/telemetry-airflow/master/jobs/main_summary_view.sh",
dag=dag)
# Wait a little while after midnight to start for a given day.
t1.set_upstream(t0)
|
055966bb3cf16d3d0ef5e03c5da85f6479ae6a0e
|
examples/annotation.py
|
examples/annotation.py
|
import os
from os.path import join as pjoin
from surfer import io
from surfer import viz
subj_dir = os.environ["SUBJECTS_DIR"]
subject_id = 'fsaverage'
sub = 'fsaverage'
hemi = 'lh'
surf = 'inflated'
data_path = pjoin(subj_dir, subject_id)
annot_path = pjoin(data_path, "label", "%s.aparc.annot" % "lh")
brain = viz.Brain(sub, hemi, surf)
brain.add_annotation(annot_path, contour=True)
# show all views
brain.show_view('lateral')
|
import os
from os.path import join as pjoin
from surfer import io
from surfer import viz
subj_dir = os.environ["SUBJECTS_DIR"]
subject_id = 'fsaverage'
sub = 'fsaverage'
hemi = 'lh'
surf = 'inflated'
data_path = pjoin(subj_dir, subject_id)
annot_path = pjoin(data_path, "label", "%s.aparc.annot" % "lh")
brain = viz.Brain(sub, hemi, surf)
brain.add_annotation(annot_path, borders=True)
# show all views
brain.show_view('lateral')
|
FIX : kwarg pb with annot contour in example
|
FIX : kwarg pb with annot contour in example
|
Python
|
bsd-3-clause
|
effigies/PySurfer,nipy/PySurfer,mwaskom/PySurfer,haribharadwaj/PySurfer,diego0020/PySurfer,Eric89GXL/PySurfer,bpinsard/PySurfer
|
import os
from os.path import join as pjoin
from surfer import io
from surfer import viz
subj_dir = os.environ["SUBJECTS_DIR"]
subject_id = 'fsaverage'
sub = 'fsaverage'
hemi = 'lh'
surf = 'inflated'
data_path = pjoin(subj_dir, subject_id)
annot_path = pjoin(data_path, "label", "%s.aparc.annot" % "lh")
brain = viz.Brain(sub, hemi, surf)
brain.add_annotation(annot_path, contour=True)
# show all views
brain.show_view('lateral')
FIX : kwarg pb with annot contour in example
|
import os
from os.path import join as pjoin
from surfer import io
from surfer import viz
subj_dir = os.environ["SUBJECTS_DIR"]
subject_id = 'fsaverage'
sub = 'fsaverage'
hemi = 'lh'
surf = 'inflated'
data_path = pjoin(subj_dir, subject_id)
annot_path = pjoin(data_path, "label", "%s.aparc.annot" % "lh")
brain = viz.Brain(sub, hemi, surf)
brain.add_annotation(annot_path, borders=True)
# show all views
brain.show_view('lateral')
|
<commit_before>import os
from os.path import join as pjoin
from surfer import io
from surfer import viz
subj_dir = os.environ["SUBJECTS_DIR"]
subject_id = 'fsaverage'
sub = 'fsaverage'
hemi = 'lh'
surf = 'inflated'
data_path = pjoin(subj_dir, subject_id)
annot_path = pjoin(data_path, "label", "%s.aparc.annot" % "lh")
brain = viz.Brain(sub, hemi, surf)
brain.add_annotation(annot_path, contour=True)
# show all views
brain.show_view('lateral')
<commit_msg>FIX : kwarg pb with annot contour in example<commit_after>
|
import os
from os.path import join as pjoin
from surfer import io
from surfer import viz
subj_dir = os.environ["SUBJECTS_DIR"]
subject_id = 'fsaverage'
sub = 'fsaverage'
hemi = 'lh'
surf = 'inflated'
data_path = pjoin(subj_dir, subject_id)
annot_path = pjoin(data_path, "label", "%s.aparc.annot" % "lh")
brain = viz.Brain(sub, hemi, surf)
brain.add_annotation(annot_path, borders=True)
# show all views
brain.show_view('lateral')
|
import os
from os.path import join as pjoin
from surfer import io
from surfer import viz
subj_dir = os.environ["SUBJECTS_DIR"]
subject_id = 'fsaverage'
sub = 'fsaverage'
hemi = 'lh'
surf = 'inflated'
data_path = pjoin(subj_dir, subject_id)
annot_path = pjoin(data_path, "label", "%s.aparc.annot" % "lh")
brain = viz.Brain(sub, hemi, surf)
brain.add_annotation(annot_path, contour=True)
# show all views
brain.show_view('lateral')
FIX : kwarg pb with annot contour in exampleimport os
from os.path import join as pjoin
from surfer import io
from surfer import viz
subj_dir = os.environ["SUBJECTS_DIR"]
subject_id = 'fsaverage'
sub = 'fsaverage'
hemi = 'lh'
surf = 'inflated'
data_path = pjoin(subj_dir, subject_id)
annot_path = pjoin(data_path, "label", "%s.aparc.annot" % "lh")
brain = viz.Brain(sub, hemi, surf)
brain.add_annotation(annot_path, borders=True)
# show all views
brain.show_view('lateral')
|
<commit_before>import os
from os.path import join as pjoin
from surfer import io
from surfer import viz
subj_dir = os.environ["SUBJECTS_DIR"]
subject_id = 'fsaverage'
sub = 'fsaverage'
hemi = 'lh'
surf = 'inflated'
data_path = pjoin(subj_dir, subject_id)
annot_path = pjoin(data_path, "label", "%s.aparc.annot" % "lh")
brain = viz.Brain(sub, hemi, surf)
brain.add_annotation(annot_path, contour=True)
# show all views
brain.show_view('lateral')
<commit_msg>FIX : kwarg pb with annot contour in example<commit_after>import os
from os.path import join as pjoin
from surfer import io
from surfer import viz
subj_dir = os.environ["SUBJECTS_DIR"]
subject_id = 'fsaverage'
sub = 'fsaverage'
hemi = 'lh'
surf = 'inflated'
data_path = pjoin(subj_dir, subject_id)
annot_path = pjoin(data_path, "label", "%s.aparc.annot" % "lh")
brain = viz.Brain(sub, hemi, surf)
brain.add_annotation(annot_path, borders=True)
# show all views
brain.show_view('lateral')
|
29aa5847feba9d5efceec2014d3524867b4284e1
|
go/testsettings.py
|
go/testsettings.py
|
import os
from settings import *
# This needs to point at the test riak buckets.
VUMI_API_CONFIG['riak_manager'] = {'bucket_prefix': 'test.'}
VUMI_API_CONFIG['redis_manager'] = {
'key_prefix': 'test',
'FAKE_REDIS': 'sure',
}
if os.environ.get('VUMIGO_FAST_TESTS'):
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend'
NOSE_ARGS = ['-evumitools', '-evumi_app', '-ehandlers', '-m^test']
MESSAGE_STORAGE = 'django.contrib.messages.storage.cookie.CookieStorage'
STATICFILES_STORAGE = 'pipeline.storage.NonPackagingPipelineStorage'
# disable console logging to avoid log messages messing up test output
LOGGING['loggers']['go']['handlers'].remove('console')
|
import os
from settings import *
SECRET_KEY = "test_secret"
# This needs to point at the test riak buckets.
VUMI_API_CONFIG['riak_manager'] = {'bucket_prefix': 'test.'}
VUMI_API_CONFIG['redis_manager'] = {
'key_prefix': 'test',
'FAKE_REDIS': 'sure',
}
if os.environ.get('VUMIGO_FAST_TESTS'):
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend'
NOSE_ARGS = ['-evumitools', '-evumi_app', '-ehandlers', '-m^test']
MESSAGE_STORAGE = 'django.contrib.messages.storage.cookie.CookieStorage'
STATICFILES_STORAGE = 'pipeline.storage.NonPackagingPipelineStorage'
# disable console logging to avoid log messages messing up test output
LOGGING['loggers']['go']['handlers'].remove('console')
|
Add SECRET_KEY which is required by Django 1.5.
|
Add SECRET_KEY which is required by Django 1.5.
|
Python
|
bsd-3-clause
|
praekelt/vumi-go,praekelt/vumi-go,praekelt/vumi-go,praekelt/vumi-go
|
import os
from settings import *
# This needs to point at the test riak buckets.
VUMI_API_CONFIG['riak_manager'] = {'bucket_prefix': 'test.'}
VUMI_API_CONFIG['redis_manager'] = {
'key_prefix': 'test',
'FAKE_REDIS': 'sure',
}
if os.environ.get('VUMIGO_FAST_TESTS'):
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend'
NOSE_ARGS = ['-evumitools', '-evumi_app', '-ehandlers', '-m^test']
MESSAGE_STORAGE = 'django.contrib.messages.storage.cookie.CookieStorage'
STATICFILES_STORAGE = 'pipeline.storage.NonPackagingPipelineStorage'
# disable console logging to avoid log messages messing up test output
LOGGING['loggers']['go']['handlers'].remove('console')
Add SECRET_KEY which is required by Django 1.5.
|
import os
from settings import *
SECRET_KEY = "test_secret"
# This needs to point at the test riak buckets.
VUMI_API_CONFIG['riak_manager'] = {'bucket_prefix': 'test.'}
VUMI_API_CONFIG['redis_manager'] = {
'key_prefix': 'test',
'FAKE_REDIS': 'sure',
}
if os.environ.get('VUMIGO_FAST_TESTS'):
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend'
NOSE_ARGS = ['-evumitools', '-evumi_app', '-ehandlers', '-m^test']
MESSAGE_STORAGE = 'django.contrib.messages.storage.cookie.CookieStorage'
STATICFILES_STORAGE = 'pipeline.storage.NonPackagingPipelineStorage'
# disable console logging to avoid log messages messing up test output
LOGGING['loggers']['go']['handlers'].remove('console')
|
<commit_before>import os
from settings import *
# This needs to point at the test riak buckets.
VUMI_API_CONFIG['riak_manager'] = {'bucket_prefix': 'test.'}
VUMI_API_CONFIG['redis_manager'] = {
'key_prefix': 'test',
'FAKE_REDIS': 'sure',
}
if os.environ.get('VUMIGO_FAST_TESTS'):
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend'
NOSE_ARGS = ['-evumitools', '-evumi_app', '-ehandlers', '-m^test']
MESSAGE_STORAGE = 'django.contrib.messages.storage.cookie.CookieStorage'
STATICFILES_STORAGE = 'pipeline.storage.NonPackagingPipelineStorage'
# disable console logging to avoid log messages messing up test output
LOGGING['loggers']['go']['handlers'].remove('console')
<commit_msg>Add SECRET_KEY which is required by Django 1.5.<commit_after>
|
import os
from settings import *
SECRET_KEY = "test_secret"
# This needs to point at the test riak buckets.
VUMI_API_CONFIG['riak_manager'] = {'bucket_prefix': 'test.'}
VUMI_API_CONFIG['redis_manager'] = {
'key_prefix': 'test',
'FAKE_REDIS': 'sure',
}
if os.environ.get('VUMIGO_FAST_TESTS'):
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend'
NOSE_ARGS = ['-evumitools', '-evumi_app', '-ehandlers', '-m^test']
MESSAGE_STORAGE = 'django.contrib.messages.storage.cookie.CookieStorage'
STATICFILES_STORAGE = 'pipeline.storage.NonPackagingPipelineStorage'
# disable console logging to avoid log messages messing up test output
LOGGING['loggers']['go']['handlers'].remove('console')
|
import os
from settings import *
# This needs to point at the test riak buckets.
VUMI_API_CONFIG['riak_manager'] = {'bucket_prefix': 'test.'}
VUMI_API_CONFIG['redis_manager'] = {
'key_prefix': 'test',
'FAKE_REDIS': 'sure',
}
if os.environ.get('VUMIGO_FAST_TESTS'):
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend'
NOSE_ARGS = ['-evumitools', '-evumi_app', '-ehandlers', '-m^test']
MESSAGE_STORAGE = 'django.contrib.messages.storage.cookie.CookieStorage'
STATICFILES_STORAGE = 'pipeline.storage.NonPackagingPipelineStorage'
# disable console logging to avoid log messages messing up test output
LOGGING['loggers']['go']['handlers'].remove('console')
Add SECRET_KEY which is required by Django 1.5.import os
from settings import *
SECRET_KEY = "test_secret"
# This needs to point at the test riak buckets.
VUMI_API_CONFIG['riak_manager'] = {'bucket_prefix': 'test.'}
VUMI_API_CONFIG['redis_manager'] = {
'key_prefix': 'test',
'FAKE_REDIS': 'sure',
}
if os.environ.get('VUMIGO_FAST_TESTS'):
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend'
NOSE_ARGS = ['-evumitools', '-evumi_app', '-ehandlers', '-m^test']
MESSAGE_STORAGE = 'django.contrib.messages.storage.cookie.CookieStorage'
STATICFILES_STORAGE = 'pipeline.storage.NonPackagingPipelineStorage'
# disable console logging to avoid log messages messing up test output
LOGGING['loggers']['go']['handlers'].remove('console')
|
<commit_before>import os
from settings import *
# This needs to point at the test riak buckets.
VUMI_API_CONFIG['riak_manager'] = {'bucket_prefix': 'test.'}
VUMI_API_CONFIG['redis_manager'] = {
'key_prefix': 'test',
'FAKE_REDIS': 'sure',
}
if os.environ.get('VUMIGO_FAST_TESTS'):
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend'
NOSE_ARGS = ['-evumitools', '-evumi_app', '-ehandlers', '-m^test']
MESSAGE_STORAGE = 'django.contrib.messages.storage.cookie.CookieStorage'
STATICFILES_STORAGE = 'pipeline.storage.NonPackagingPipelineStorage'
# disable console logging to avoid log messages messing up test output
LOGGING['loggers']['go']['handlers'].remove('console')
<commit_msg>Add SECRET_KEY which is required by Django 1.5.<commit_after>import os
from settings import *
SECRET_KEY = "test_secret"
# This needs to point at the test riak buckets.
VUMI_API_CONFIG['riak_manager'] = {'bucket_prefix': 'test.'}
VUMI_API_CONFIG['redis_manager'] = {
'key_prefix': 'test',
'FAKE_REDIS': 'sure',
}
if os.environ.get('VUMIGO_FAST_TESTS'):
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend'
NOSE_ARGS = ['-evumitools', '-evumi_app', '-ehandlers', '-m^test']
MESSAGE_STORAGE = 'django.contrib.messages.storage.cookie.CookieStorage'
STATICFILES_STORAGE = 'pipeline.storage.NonPackagingPipelineStorage'
# disable console logging to avoid log messages messing up test output
LOGGING['loggers']['go']['handlers'].remove('console')
|
a6b14d2f80355e556c466b52b518dc808c90c54a
|
polling_stations/settings/static_files.py
|
polling_stations/settings/static_files.py
|
from dc_theme.settings import get_pipeline_settings
from dc_theme.settings import STATICFILES_FINDERS
STATICFILES_STORAGE = 'pipeline.storage.PipelineStorage'
PIPELINE = get_pipeline_settings(
extra_css=[
'custom_css/style.scss',
'font-awesome/css/font-awesome.min.css',
],
extra_js=[],
)
PIPELINE['JS_COMPRESSOR'] = 'pipeline.compressors.uglifyjs.UglifyJSCompressor'
PIPELINE['UGLIFYJS_BINARY'] = 'node_modules/uglify-js/bin/uglifyjs'
PIPELINE['STYLESHEETS']['map'] = {
'source_filenames': [
'leaflet/dist/leaflet.css',
'custom_css/map.css',
'leaflet-extra-markers/dist/css/leaflet.extra-markers.min.css',
],
'output_filename': 'css/map.css',
}
PIPELINE['JAVASCRIPT']['map'] = {
'source_filenames': [
'leaflet/dist/leaflet.js',
'leaflet-extra-markers/dist/js/leaflet.extra-markers.min.js',
'@mapbox/polyline/src/polyline.js',
'custom_js/polyline_global.js',
'custom_js/map.js',
],
'output_filename': 'js/map.js',
}
COMPRESS_CSS_FILTERS = ['compressor.filters.css_default.CssAbsoluteFilter']
|
from dc_theme.settings import get_pipeline_settings
from dc_theme.settings import STATICFILES_FINDERS, STATICFILES_STORAGE
PIPELINE = get_pipeline_settings(
extra_css=[
'custom_css/style.scss',
'font-awesome/css/font-awesome.min.css',
],
extra_js=[],
)
PIPELINE['STYLESHEETS']['map'] = {
'source_filenames': [
'leaflet/dist/leaflet.css',
'custom_css/map.css',
'leaflet-extra-markers/dist/css/leaflet.extra-markers.min.css',
],
'output_filename': 'css/map.css',
}
PIPELINE['JAVASCRIPT']['map'] = {
'source_filenames': [
'leaflet/dist/leaflet.js',
'leaflet-extra-markers/dist/js/leaflet.extra-markers.min.js',
'@mapbox/polyline/src/polyline.js',
'custom_js/polyline_global.js',
'custom_js/map.js',
],
'output_filename': 'js/map.js',
}
|
Remove old pipeline and compressor settings
|
Remove old pipeline and compressor settings
libsass has a compresssion mode that's enabled by default in the 0.3
dc base theme. This removes the need for uglifyjs
|
Python
|
bsd-3-clause
|
DemocracyClub/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations
|
from dc_theme.settings import get_pipeline_settings
from dc_theme.settings import STATICFILES_FINDERS
STATICFILES_STORAGE = 'pipeline.storage.PipelineStorage'
PIPELINE = get_pipeline_settings(
extra_css=[
'custom_css/style.scss',
'font-awesome/css/font-awesome.min.css',
],
extra_js=[],
)
PIPELINE['JS_COMPRESSOR'] = 'pipeline.compressors.uglifyjs.UglifyJSCompressor'
PIPELINE['UGLIFYJS_BINARY'] = 'node_modules/uglify-js/bin/uglifyjs'
PIPELINE['STYLESHEETS']['map'] = {
'source_filenames': [
'leaflet/dist/leaflet.css',
'custom_css/map.css',
'leaflet-extra-markers/dist/css/leaflet.extra-markers.min.css',
],
'output_filename': 'css/map.css',
}
PIPELINE['JAVASCRIPT']['map'] = {
'source_filenames': [
'leaflet/dist/leaflet.js',
'leaflet-extra-markers/dist/js/leaflet.extra-markers.min.js',
'@mapbox/polyline/src/polyline.js',
'custom_js/polyline_global.js',
'custom_js/map.js',
],
'output_filename': 'js/map.js',
}
COMPRESS_CSS_FILTERS = ['compressor.filters.css_default.CssAbsoluteFilter']
Remove old pipeline and compressor settings
libsass has a compresssion mode that's enabled by default in the 0.3
dc base theme. This removes the need for uglifyjs
|
from dc_theme.settings import get_pipeline_settings
from dc_theme.settings import STATICFILES_FINDERS, STATICFILES_STORAGE
PIPELINE = get_pipeline_settings(
extra_css=[
'custom_css/style.scss',
'font-awesome/css/font-awesome.min.css',
],
extra_js=[],
)
PIPELINE['STYLESHEETS']['map'] = {
'source_filenames': [
'leaflet/dist/leaflet.css',
'custom_css/map.css',
'leaflet-extra-markers/dist/css/leaflet.extra-markers.min.css',
],
'output_filename': 'css/map.css',
}
PIPELINE['JAVASCRIPT']['map'] = {
'source_filenames': [
'leaflet/dist/leaflet.js',
'leaflet-extra-markers/dist/js/leaflet.extra-markers.min.js',
'@mapbox/polyline/src/polyline.js',
'custom_js/polyline_global.js',
'custom_js/map.js',
],
'output_filename': 'js/map.js',
}
|
<commit_before>from dc_theme.settings import get_pipeline_settings
from dc_theme.settings import STATICFILES_FINDERS
STATICFILES_STORAGE = 'pipeline.storage.PipelineStorage'
PIPELINE = get_pipeline_settings(
extra_css=[
'custom_css/style.scss',
'font-awesome/css/font-awesome.min.css',
],
extra_js=[],
)
PIPELINE['JS_COMPRESSOR'] = 'pipeline.compressors.uglifyjs.UglifyJSCompressor'
PIPELINE['UGLIFYJS_BINARY'] = 'node_modules/uglify-js/bin/uglifyjs'
PIPELINE['STYLESHEETS']['map'] = {
'source_filenames': [
'leaflet/dist/leaflet.css',
'custom_css/map.css',
'leaflet-extra-markers/dist/css/leaflet.extra-markers.min.css',
],
'output_filename': 'css/map.css',
}
PIPELINE['JAVASCRIPT']['map'] = {
'source_filenames': [
'leaflet/dist/leaflet.js',
'leaflet-extra-markers/dist/js/leaflet.extra-markers.min.js',
'@mapbox/polyline/src/polyline.js',
'custom_js/polyline_global.js',
'custom_js/map.js',
],
'output_filename': 'js/map.js',
}
COMPRESS_CSS_FILTERS = ['compressor.filters.css_default.CssAbsoluteFilter']
<commit_msg>Remove old pipeline and compressor settings
libsass has a compresssion mode that's enabled by default in the 0.3
dc base theme. This removes the need for uglifyjs<commit_after>
|
from dc_theme.settings import get_pipeline_settings
from dc_theme.settings import STATICFILES_FINDERS, STATICFILES_STORAGE
PIPELINE = get_pipeline_settings(
extra_css=[
'custom_css/style.scss',
'font-awesome/css/font-awesome.min.css',
],
extra_js=[],
)
PIPELINE['STYLESHEETS']['map'] = {
'source_filenames': [
'leaflet/dist/leaflet.css',
'custom_css/map.css',
'leaflet-extra-markers/dist/css/leaflet.extra-markers.min.css',
],
'output_filename': 'css/map.css',
}
PIPELINE['JAVASCRIPT']['map'] = {
'source_filenames': [
'leaflet/dist/leaflet.js',
'leaflet-extra-markers/dist/js/leaflet.extra-markers.min.js',
'@mapbox/polyline/src/polyline.js',
'custom_js/polyline_global.js',
'custom_js/map.js',
],
'output_filename': 'js/map.js',
}
|
from dc_theme.settings import get_pipeline_settings
from dc_theme.settings import STATICFILES_FINDERS
STATICFILES_STORAGE = 'pipeline.storage.PipelineStorage'
PIPELINE = get_pipeline_settings(
extra_css=[
'custom_css/style.scss',
'font-awesome/css/font-awesome.min.css',
],
extra_js=[],
)
PIPELINE['JS_COMPRESSOR'] = 'pipeline.compressors.uglifyjs.UglifyJSCompressor'
PIPELINE['UGLIFYJS_BINARY'] = 'node_modules/uglify-js/bin/uglifyjs'
PIPELINE['STYLESHEETS']['map'] = {
'source_filenames': [
'leaflet/dist/leaflet.css',
'custom_css/map.css',
'leaflet-extra-markers/dist/css/leaflet.extra-markers.min.css',
],
'output_filename': 'css/map.css',
}
PIPELINE['JAVASCRIPT']['map'] = {
'source_filenames': [
'leaflet/dist/leaflet.js',
'leaflet-extra-markers/dist/js/leaflet.extra-markers.min.js',
'@mapbox/polyline/src/polyline.js',
'custom_js/polyline_global.js',
'custom_js/map.js',
],
'output_filename': 'js/map.js',
}
COMPRESS_CSS_FILTERS = ['compressor.filters.css_default.CssAbsoluteFilter']
Remove old pipeline and compressor settings
libsass has a compresssion mode that's enabled by default in the 0.3
dc base theme. This removes the need for uglifyjsfrom dc_theme.settings import get_pipeline_settings
from dc_theme.settings import STATICFILES_FINDERS, STATICFILES_STORAGE
PIPELINE = get_pipeline_settings(
extra_css=[
'custom_css/style.scss',
'font-awesome/css/font-awesome.min.css',
],
extra_js=[],
)
PIPELINE['STYLESHEETS']['map'] = {
'source_filenames': [
'leaflet/dist/leaflet.css',
'custom_css/map.css',
'leaflet-extra-markers/dist/css/leaflet.extra-markers.min.css',
],
'output_filename': 'css/map.css',
}
PIPELINE['JAVASCRIPT']['map'] = {
'source_filenames': [
'leaflet/dist/leaflet.js',
'leaflet-extra-markers/dist/js/leaflet.extra-markers.min.js',
'@mapbox/polyline/src/polyline.js',
'custom_js/polyline_global.js',
'custom_js/map.js',
],
'output_filename': 'js/map.js',
}
|
<commit_before>from dc_theme.settings import get_pipeline_settings
from dc_theme.settings import STATICFILES_FINDERS
STATICFILES_STORAGE = 'pipeline.storage.PipelineStorage'
PIPELINE = get_pipeline_settings(
extra_css=[
'custom_css/style.scss',
'font-awesome/css/font-awesome.min.css',
],
extra_js=[],
)
PIPELINE['JS_COMPRESSOR'] = 'pipeline.compressors.uglifyjs.UglifyJSCompressor'
PIPELINE['UGLIFYJS_BINARY'] = 'node_modules/uglify-js/bin/uglifyjs'
PIPELINE['STYLESHEETS']['map'] = {
'source_filenames': [
'leaflet/dist/leaflet.css',
'custom_css/map.css',
'leaflet-extra-markers/dist/css/leaflet.extra-markers.min.css',
],
'output_filename': 'css/map.css',
}
PIPELINE['JAVASCRIPT']['map'] = {
'source_filenames': [
'leaflet/dist/leaflet.js',
'leaflet-extra-markers/dist/js/leaflet.extra-markers.min.js',
'@mapbox/polyline/src/polyline.js',
'custom_js/polyline_global.js',
'custom_js/map.js',
],
'output_filename': 'js/map.js',
}
COMPRESS_CSS_FILTERS = ['compressor.filters.css_default.CssAbsoluteFilter']
<commit_msg>Remove old pipeline and compressor settings
libsass has a compresssion mode that's enabled by default in the 0.3
dc base theme. This removes the need for uglifyjs<commit_after>from dc_theme.settings import get_pipeline_settings
from dc_theme.settings import STATICFILES_FINDERS, STATICFILES_STORAGE
PIPELINE = get_pipeline_settings(
extra_css=[
'custom_css/style.scss',
'font-awesome/css/font-awesome.min.css',
],
extra_js=[],
)
PIPELINE['STYLESHEETS']['map'] = {
'source_filenames': [
'leaflet/dist/leaflet.css',
'custom_css/map.css',
'leaflet-extra-markers/dist/css/leaflet.extra-markers.min.css',
],
'output_filename': 'css/map.css',
}
PIPELINE['JAVASCRIPT']['map'] = {
'source_filenames': [
'leaflet/dist/leaflet.js',
'leaflet-extra-markers/dist/js/leaflet.extra-markers.min.js',
'@mapbox/polyline/src/polyline.js',
'custom_js/polyline_global.js',
'custom_js/map.js',
],
'output_filename': 'js/map.js',
}
|
63e5bd0e7c771e4a2efe2ff203b75e4a56024af5
|
app.py
|
app.py
|
"""
Main entry point of the logup-factory
"""
from flask import Flask
from flask.ext.mongoengine import MongoEngine
from flask import request
from flask import render_template
app = Flask(__name__)
app.config.from_pyfile('flask-conf.cfg')
db = MongoEngine(app)
@app.route('/')
def index():
return render_template('index.html')
@app.route('/signup', methods=['GET', 'POST'])
def signup():
if request.method == 'GET':
return render_template('signup.html')
else:
return 'ok'
@app.route('/login', methods=['GET', 'POST'])
def login():
if request.method == 'GET':
return render_template('login.html')
else:
return 'ok'
@app.route('/forgot-password', methods=['GET', 'POST'])
def forgot_password():
if request.method == 'GET':
return render_template('forgot-password.html')
else:
return 'ok'
@app.route('/password-reset/<token>', methods=['GET', 'POST'])
def password_reset(token):
if request.method == 'GET':
return render_template('password-reset.html')
else:
return 'ok'
if __name__ == '__main__':
app.run()
|
"""
Main entry point of the logup-factory
"""
from flask import Flask
from flask.ext.mongoengine import MongoEngine
from flask import request
from flask import render_template
app = Flask(__name__, static_url_path='', static_folder='frontend/dist')
app.config.from_pyfile('flask-conf.cfg')
# db = MongoEngine(app)
@app.route('/')
def index():
return render_template('index.html')
@app.route('/signup', methods=['GET', 'POST'])
def signup():
if request.method == 'GET':
return render_template('signup.html')
else:
return 'ok'
@app.route('/login', methods=['GET', 'POST'])
def login():
if request.method == 'GET':
return render_template('login.html')
else:
return 'ok'
@app.route('/forgot-password', methods=['GET', 'POST'])
def forgot_password():
if request.method == 'GET':
return render_template('forgot-password.html')
else:
return 'ok'
@app.route('/password-reset/<token>', methods=['GET', 'POST'])
def password_reset(token):
if request.method == 'GET':
return render_template('password-reset.html')
else:
return 'ok'
if __name__ == '__main__':
app.run()
|
Change static directory, commented mongo dev stuff
|
Change static directory, commented mongo dev stuff
|
Python
|
mit
|
cogniteev/logup-factory,cogniteev/logup-factory
|
"""
Main entry point of the logup-factory
"""
from flask import Flask
from flask.ext.mongoengine import MongoEngine
from flask import request
from flask import render_template
app = Flask(__name__)
app.config.from_pyfile('flask-conf.cfg')
db = MongoEngine(app)
@app.route('/')
def index():
return render_template('index.html')
@app.route('/signup', methods=['GET', 'POST'])
def signup():
if request.method == 'GET':
return render_template('signup.html')
else:
return 'ok'
@app.route('/login', methods=['GET', 'POST'])
def login():
if request.method == 'GET':
return render_template('login.html')
else:
return 'ok'
@app.route('/forgot-password', methods=['GET', 'POST'])
def forgot_password():
if request.method == 'GET':
return render_template('forgot-password.html')
else:
return 'ok'
@app.route('/password-reset/<token>', methods=['GET', 'POST'])
def password_reset(token):
if request.method == 'GET':
return render_template('password-reset.html')
else:
return 'ok'
if __name__ == '__main__':
app.run()
Change static directory, commented mongo dev stuff
|
"""
Main entry point of the logup-factory
"""
from flask import Flask
from flask.ext.mongoengine import MongoEngine
from flask import request
from flask import render_template
app = Flask(__name__, static_url_path='', static_folder='frontend/dist')
app.config.from_pyfile('flask-conf.cfg')
# db = MongoEngine(app)
@app.route('/')
def index():
return render_template('index.html')
@app.route('/signup', methods=['GET', 'POST'])
def signup():
if request.method == 'GET':
return render_template('signup.html')
else:
return 'ok'
@app.route('/login', methods=['GET', 'POST'])
def login():
if request.method == 'GET':
return render_template('login.html')
else:
return 'ok'
@app.route('/forgot-password', methods=['GET', 'POST'])
def forgot_password():
if request.method == 'GET':
return render_template('forgot-password.html')
else:
return 'ok'
@app.route('/password-reset/<token>', methods=['GET', 'POST'])
def password_reset(token):
if request.method == 'GET':
return render_template('password-reset.html')
else:
return 'ok'
if __name__ == '__main__':
app.run()
|
<commit_before>"""
Main entry point of the logup-factory
"""
from flask import Flask
from flask.ext.mongoengine import MongoEngine
from flask import request
from flask import render_template
app = Flask(__name__)
app.config.from_pyfile('flask-conf.cfg')
db = MongoEngine(app)
@app.route('/')
def index():
return render_template('index.html')
@app.route('/signup', methods=['GET', 'POST'])
def signup():
if request.method == 'GET':
return render_template('signup.html')
else:
return 'ok'
@app.route('/login', methods=['GET', 'POST'])
def login():
if request.method == 'GET':
return render_template('login.html')
else:
return 'ok'
@app.route('/forgot-password', methods=['GET', 'POST'])
def forgot_password():
if request.method == 'GET':
return render_template('forgot-password.html')
else:
return 'ok'
@app.route('/password-reset/<token>', methods=['GET', 'POST'])
def password_reset(token):
if request.method == 'GET':
return render_template('password-reset.html')
else:
return 'ok'
if __name__ == '__main__':
app.run()
<commit_msg>Change static directory, commented mongo dev stuff<commit_after>
|
"""
Main entry point of the logup-factory
"""
from flask import Flask
from flask.ext.mongoengine import MongoEngine
from flask import request
from flask import render_template
app = Flask(__name__, static_url_path='', static_folder='frontend/dist')
app.config.from_pyfile('flask-conf.cfg')
# db = MongoEngine(app)
@app.route('/')
def index():
return render_template('index.html')
@app.route('/signup', methods=['GET', 'POST'])
def signup():
if request.method == 'GET':
return render_template('signup.html')
else:
return 'ok'
@app.route('/login', methods=['GET', 'POST'])
def login():
if request.method == 'GET':
return render_template('login.html')
else:
return 'ok'
@app.route('/forgot-password', methods=['GET', 'POST'])
def forgot_password():
if request.method == 'GET':
return render_template('forgot-password.html')
else:
return 'ok'
@app.route('/password-reset/<token>', methods=['GET', 'POST'])
def password_reset(token):
if request.method == 'GET':
return render_template('password-reset.html')
else:
return 'ok'
if __name__ == '__main__':
app.run()
|
"""
Main entry point of the logup-factory
"""
from flask import Flask
from flask.ext.mongoengine import MongoEngine
from flask import request
from flask import render_template
app = Flask(__name__)
app.config.from_pyfile('flask-conf.cfg')
db = MongoEngine(app)
@app.route('/')
def index():
return render_template('index.html')
@app.route('/signup', methods=['GET', 'POST'])
def signup():
if request.method == 'GET':
return render_template('signup.html')
else:
return 'ok'
@app.route('/login', methods=['GET', 'POST'])
def login():
if request.method == 'GET':
return render_template('login.html')
else:
return 'ok'
@app.route('/forgot-password', methods=['GET', 'POST'])
def forgot_password():
if request.method == 'GET':
return render_template('forgot-password.html')
else:
return 'ok'
@app.route('/password-reset/<token>', methods=['GET', 'POST'])
def password_reset(token):
if request.method == 'GET':
return render_template('password-reset.html')
else:
return 'ok'
if __name__ == '__main__':
app.run()
Change static directory, commented mongo dev stuff"""
Main entry point of the logup-factory
"""
from flask import Flask
from flask.ext.mongoengine import MongoEngine
from flask import request
from flask import render_template
app = Flask(__name__, static_url_path='', static_folder='frontend/dist')
app.config.from_pyfile('flask-conf.cfg')
# db = MongoEngine(app)
@app.route('/')
def index():
return render_template('index.html')
@app.route('/signup', methods=['GET', 'POST'])
def signup():
if request.method == 'GET':
return render_template('signup.html')
else:
return 'ok'
@app.route('/login', methods=['GET', 'POST'])
def login():
if request.method == 'GET':
return render_template('login.html')
else:
return 'ok'
@app.route('/forgot-password', methods=['GET', 'POST'])
def forgot_password():
if request.method == 'GET':
return render_template('forgot-password.html')
else:
return 'ok'
@app.route('/password-reset/<token>', methods=['GET', 'POST'])
def password_reset(token):
if request.method == 'GET':
return render_template('password-reset.html')
else:
return 'ok'
if __name__ == '__main__':
app.run()
|
<commit_before>"""
Main entry point of the logup-factory
"""
from flask import Flask
from flask.ext.mongoengine import MongoEngine
from flask import request
from flask import render_template
app = Flask(__name__)
app.config.from_pyfile('flask-conf.cfg')
db = MongoEngine(app)
@app.route('/')
def index():
return render_template('index.html')
@app.route('/signup', methods=['GET', 'POST'])
def signup():
if request.method == 'GET':
return render_template('signup.html')
else:
return 'ok'
@app.route('/login', methods=['GET', 'POST'])
def login():
if request.method == 'GET':
return render_template('login.html')
else:
return 'ok'
@app.route('/forgot-password', methods=['GET', 'POST'])
def forgot_password():
if request.method == 'GET':
return render_template('forgot-password.html')
else:
return 'ok'
@app.route('/password-reset/<token>', methods=['GET', 'POST'])
def password_reset(token):
if request.method == 'GET':
return render_template('password-reset.html')
else:
return 'ok'
if __name__ == '__main__':
app.run()
<commit_msg>Change static directory, commented mongo dev stuff<commit_after>"""
Main entry point of the logup-factory
"""
from flask import Flask
from flask.ext.mongoengine import MongoEngine
from flask import request
from flask import render_template
app = Flask(__name__, static_url_path='', static_folder='frontend/dist')
app.config.from_pyfile('flask-conf.cfg')
# db = MongoEngine(app)
@app.route('/')
def index():
return render_template('index.html')
@app.route('/signup', methods=['GET', 'POST'])
def signup():
if request.method == 'GET':
return render_template('signup.html')
else:
return 'ok'
@app.route('/login', methods=['GET', 'POST'])
def login():
if request.method == 'GET':
return render_template('login.html')
else:
return 'ok'
@app.route('/forgot-password', methods=['GET', 'POST'])
def forgot_password():
if request.method == 'GET':
return render_template('forgot-password.html')
else:
return 'ok'
@app.route('/password-reset/<token>', methods=['GET', 'POST'])
def password_reset(token):
if request.method == 'GET':
return render_template('password-reset.html')
else:
return 'ok'
if __name__ == '__main__':
app.run()
|
58823e20e3891cea7198be15b7c85395521086e1
|
extension_course/tests/conftest.py
|
extension_course/tests/conftest.py
|
import pytest
from django.conf import settings
from django.core.management import call_command
from events.tests.conftest import (administrative_division, administrative_division_type, data_source, event, # noqa
location_id, minimal_event_dict, municipality, organization, place, user,
user_api_client, django_db_modify_db_settings)
# Django test harness tries to serialize DB in order to support transactions
# within tests. (It restores the snapshot after such tests).
# This fails with modeltranslate, as the serialization is done before
# sync_translation_fields has a chance to run. Thus the fields are missing
# and serialization fails horribly.
#@pytest.fixture(scope='session')
#def django_db_modify_db_settings(django_db_modify_db_settings_xdist_suffix):
# settings.DATABASES['default']['TEST']['SERIALIZE'] = False
@pytest.fixture(scope='session')
def django_db_setup(django_db_setup, django_db_blocker):
with django_db_blocker.unblock():
call_command('sync_translation_fields', '--noinput')
@pytest.fixture(autouse=True)
def auto_enable(settings):
settings.AUTO_ENABLED_EXTENSIONS = ['course']
|
import pytest
from events.tests.conftest import (administrative_division, administrative_division_type, data_source, event, # noqa
location_id, minimal_event_dict, municipality, organization, place, user,
user_api_client, django_db_modify_db_settings, django_db_setup)
@pytest.fixture(autouse=True)
def auto_enable(settings):
settings.AUTO_ENABLED_EXTENSIONS = ['course']
|
Remove some needless code from course extension tests
|
Remove some needless code from course extension tests
|
Python
|
mit
|
City-of-Helsinki/linkedevents,City-of-Helsinki/linkedevents,City-of-Helsinki/linkedevents
|
import pytest
from django.conf import settings
from django.core.management import call_command
from events.tests.conftest import (administrative_division, administrative_division_type, data_source, event, # noqa
location_id, minimal_event_dict, municipality, organization, place, user,
user_api_client, django_db_modify_db_settings)
# Django test harness tries to serialize DB in order to support transactions
# within tests. (It restores the snapshot after such tests).
# This fails with modeltranslate, as the serialization is done before
# sync_translation_fields has a chance to run. Thus the fields are missing
# and serialization fails horribly.
#@pytest.fixture(scope='session')
#def django_db_modify_db_settings(django_db_modify_db_settings_xdist_suffix):
# settings.DATABASES['default']['TEST']['SERIALIZE'] = False
@pytest.fixture(scope='session')
def django_db_setup(django_db_setup, django_db_blocker):
with django_db_blocker.unblock():
call_command('sync_translation_fields', '--noinput')
@pytest.fixture(autouse=True)
def auto_enable(settings):
settings.AUTO_ENABLED_EXTENSIONS = ['course']
Remove some needless code from course extension tests
|
import pytest
from events.tests.conftest import (administrative_division, administrative_division_type, data_source, event, # noqa
location_id, minimal_event_dict, municipality, organization, place, user,
user_api_client, django_db_modify_db_settings, django_db_setup)
@pytest.fixture(autouse=True)
def auto_enable(settings):
settings.AUTO_ENABLED_EXTENSIONS = ['course']
|
<commit_before>import pytest
from django.conf import settings
from django.core.management import call_command
from events.tests.conftest import (administrative_division, administrative_division_type, data_source, event, # noqa
location_id, minimal_event_dict, municipality, organization, place, user,
user_api_client, django_db_modify_db_settings)
# Django test harness tries to serialize DB in order to support transactions
# within tests. (It restores the snapshot after such tests).
# This fails with modeltranslate, as the serialization is done before
# sync_translation_fields has a chance to run. Thus the fields are missing
# and serialization fails horribly.
#@pytest.fixture(scope='session')
#def django_db_modify_db_settings(django_db_modify_db_settings_xdist_suffix):
# settings.DATABASES['default']['TEST']['SERIALIZE'] = False
@pytest.fixture(scope='session')
def django_db_setup(django_db_setup, django_db_blocker):
with django_db_blocker.unblock():
call_command('sync_translation_fields', '--noinput')
@pytest.fixture(autouse=True)
def auto_enable(settings):
settings.AUTO_ENABLED_EXTENSIONS = ['course']
<commit_msg>Remove some needless code from course extension tests<commit_after>
|
import pytest
from events.tests.conftest import (administrative_division, administrative_division_type, data_source, event, # noqa
location_id, minimal_event_dict, municipality, organization, place, user,
user_api_client, django_db_modify_db_settings, django_db_setup)
@pytest.fixture(autouse=True)
def auto_enable(settings):
settings.AUTO_ENABLED_EXTENSIONS = ['course']
|
import pytest
from django.conf import settings
from django.core.management import call_command
from events.tests.conftest import (administrative_division, administrative_division_type, data_source, event, # noqa
location_id, minimal_event_dict, municipality, organization, place, user,
user_api_client, django_db_modify_db_settings)
# Django test harness tries to serialize DB in order to support transactions
# within tests. (It restores the snapshot after such tests).
# This fails with modeltranslate, as the serialization is done before
# sync_translation_fields has a chance to run. Thus the fields are missing
# and serialization fails horribly.
#@pytest.fixture(scope='session')
#def django_db_modify_db_settings(django_db_modify_db_settings_xdist_suffix):
# settings.DATABASES['default']['TEST']['SERIALIZE'] = False
@pytest.fixture(scope='session')
def django_db_setup(django_db_setup, django_db_blocker):
with django_db_blocker.unblock():
call_command('sync_translation_fields', '--noinput')
@pytest.fixture(autouse=True)
def auto_enable(settings):
settings.AUTO_ENABLED_EXTENSIONS = ['course']
Remove some needless code from course extension testsimport pytest
from events.tests.conftest import (administrative_division, administrative_division_type, data_source, event, # noqa
location_id, minimal_event_dict, municipality, organization, place, user,
user_api_client, django_db_modify_db_settings, django_db_setup)
@pytest.fixture(autouse=True)
def auto_enable(settings):
settings.AUTO_ENABLED_EXTENSIONS = ['course']
|
<commit_before>import pytest
from django.conf import settings
from django.core.management import call_command
from events.tests.conftest import (administrative_division, administrative_division_type, data_source, event, # noqa
location_id, minimal_event_dict, municipality, organization, place, user,
user_api_client, django_db_modify_db_settings)
# Django test harness tries to serialize DB in order to support transactions
# within tests. (It restores the snapshot after such tests).
# This fails with modeltranslate, as the serialization is done before
# sync_translation_fields has a chance to run. Thus the fields are missing
# and serialization fails horribly.
#@pytest.fixture(scope='session')
#def django_db_modify_db_settings(django_db_modify_db_settings_xdist_suffix):
# settings.DATABASES['default']['TEST']['SERIALIZE'] = False
@pytest.fixture(scope='session')
def django_db_setup(django_db_setup, django_db_blocker):
with django_db_blocker.unblock():
call_command('sync_translation_fields', '--noinput')
@pytest.fixture(autouse=True)
def auto_enable(settings):
settings.AUTO_ENABLED_EXTENSIONS = ['course']
<commit_msg>Remove some needless code from course extension tests<commit_after>import pytest
from events.tests.conftest import (administrative_division, administrative_division_type, data_source, event, # noqa
location_id, minimal_event_dict, municipality, organization, place, user,
user_api_client, django_db_modify_db_settings, django_db_setup)
@pytest.fixture(autouse=True)
def auto_enable(settings):
settings.AUTO_ENABLED_EXTENSIONS = ['course']
|
f999b821fc00216d13759eb028f0fbd57352fa35
|
flocker/docs/version_code_block.py
|
flocker/docs/version_code_block.py
|
# Copyright Hybrid Logic Ltd. See LICENSE file for details.
"""
Sphinx extension to add a ``version-code-block`` directive
This directive allows Flocker's release version to be inserted into code
blocks.
.. version-code-block:: rest
$ brew install flocker-|RELEASE|
"""
from sphinx.directives.code import CodeBlock
from flocker import __version__ as version
class VersionCodeBlock(CodeBlock):
"""
Similar to CodeBlock but replaces |RELEASE| with the latest release
version.
"""
def run(self):
# Use the WIP get_doc_version to get the latest release version
# from https://github.com/ClusterHQ/flocker/pull/1092/
self.content = [item.replace(u'|RELEASE|', version) for item in
self.content]
block = CodeBlock(self.name, self.arguments, self.options,
self.content, self.lineno, self.content_offset,
self.block_text, self.state, self.state_machine)
return block.run()
def setup(app):
app.add_directive('version-code-block', VersionCodeBlock)
|
# Copyright Hybrid Logic Ltd. See LICENSE file for details.
"""
Sphinx extension to add a ``version-code-block`` directive
This directive allows Flocker's release version to be inserted into code
blocks.
.. version-code-block:: console
$ brew install flocker-|RELEASE|
"""
from sphinx.directives.code import CodeBlock
from flocker import __version__ as version
class VersionCodeBlock(CodeBlock):
"""
Similar to CodeBlock but replaces |RELEASE| with the latest release
version.
"""
def run(self):
# Use the WIP get_doc_version to get the latest release version
# from https://github.com/ClusterHQ/flocker/pull/1092/
self.content = [item.replace(u'|RELEASE|', version) for item in
self.content]
block = CodeBlock(self.name, self.arguments, self.options,
self.content, self.lineno, self.content_offset,
self.block_text, self.state, self.state_machine)
return block.run()
def setup(app):
app.add_directive('version-code-block', VersionCodeBlock)
|
Use console highlighting in example
|
Use console highlighting in example
|
Python
|
apache-2.0
|
jml/flocker,Azulinho/flocker,jml/flocker,w4ngyi/flocker,jml/flocker,AndyHuu/flocker,mbrukman/flocker,LaynePeng/flocker,AndyHuu/flocker,runcom/flocker,runcom/flocker,agonzalezro/flocker,adamtheturtle/flocker,agonzalezro/flocker,1d4Nf6/flocker,LaynePeng/flocker,mbrukman/flocker,w4ngyi/flocker,moypray/flocker,lukemarsden/flocker,moypray/flocker,achanda/flocker,w4ngyi/flocker,Azulinho/flocker,Azulinho/flocker,adamtheturtle/flocker,hackday-profilers/flocker,lukemarsden/flocker,moypray/flocker,wallnerryan/flocker-profiles,wallnerryan/flocker-profiles,lukemarsden/flocker,1d4Nf6/flocker,runcom/flocker,achanda/flocker,hackday-profilers/flocker,adamtheturtle/flocker,hackday-profilers/flocker,mbrukman/flocker,LaynePeng/flocker,achanda/flocker,agonzalezro/flocker,AndyHuu/flocker,1d4Nf6/flocker,wallnerryan/flocker-profiles
|
# Copyright Hybrid Logic Ltd. See LICENSE file for details.
"""
Sphinx extension to add a ``version-code-block`` directive
This directive allows Flocker's release version to be inserted into code
blocks.
.. version-code-block:: rest
$ brew install flocker-|RELEASE|
"""
from sphinx.directives.code import CodeBlock
from flocker import __version__ as version
class VersionCodeBlock(CodeBlock):
"""
Similar to CodeBlock but replaces |RELEASE| with the latest release
version.
"""
def run(self):
# Use the WIP get_doc_version to get the latest release version
# from https://github.com/ClusterHQ/flocker/pull/1092/
self.content = [item.replace(u'|RELEASE|', version) for item in
self.content]
block = CodeBlock(self.name, self.arguments, self.options,
self.content, self.lineno, self.content_offset,
self.block_text, self.state, self.state_machine)
return block.run()
def setup(app):
app.add_directive('version-code-block', VersionCodeBlock)
Use console highlighting in example
|
# Copyright Hybrid Logic Ltd. See LICENSE file for details.
"""
Sphinx extension to add a ``version-code-block`` directive
This directive allows Flocker's release version to be inserted into code
blocks.
.. version-code-block:: console
$ brew install flocker-|RELEASE|
"""
from sphinx.directives.code import CodeBlock
from flocker import __version__ as version
class VersionCodeBlock(CodeBlock):
"""
Similar to CodeBlock but replaces |RELEASE| with the latest release
version.
"""
def run(self):
# Use the WIP get_doc_version to get the latest release version
# from https://github.com/ClusterHQ/flocker/pull/1092/
self.content = [item.replace(u'|RELEASE|', version) for item in
self.content]
block = CodeBlock(self.name, self.arguments, self.options,
self.content, self.lineno, self.content_offset,
self.block_text, self.state, self.state_machine)
return block.run()
def setup(app):
app.add_directive('version-code-block', VersionCodeBlock)
|
<commit_before># Copyright Hybrid Logic Ltd. See LICENSE file for details.
"""
Sphinx extension to add a ``version-code-block`` directive
This directive allows Flocker's release version to be inserted into code
blocks.
.. version-code-block:: rest
$ brew install flocker-|RELEASE|
"""
from sphinx.directives.code import CodeBlock
from flocker import __version__ as version
class VersionCodeBlock(CodeBlock):
"""
Similar to CodeBlock but replaces |RELEASE| with the latest release
version.
"""
def run(self):
# Use the WIP get_doc_version to get the latest release version
# from https://github.com/ClusterHQ/flocker/pull/1092/
self.content = [item.replace(u'|RELEASE|', version) for item in
self.content]
block = CodeBlock(self.name, self.arguments, self.options,
self.content, self.lineno, self.content_offset,
self.block_text, self.state, self.state_machine)
return block.run()
def setup(app):
app.add_directive('version-code-block', VersionCodeBlock)
<commit_msg>Use console highlighting in example<commit_after>
|
# Copyright Hybrid Logic Ltd. See LICENSE file for details.
"""
Sphinx extension to add a ``version-code-block`` directive
This directive allows Flocker's release version to be inserted into code
blocks.
.. version-code-block:: console
$ brew install flocker-|RELEASE|
"""
from sphinx.directives.code import CodeBlock
from flocker import __version__ as version
class VersionCodeBlock(CodeBlock):
"""
Similar to CodeBlock but replaces |RELEASE| with the latest release
version.
"""
def run(self):
# Use the WIP get_doc_version to get the latest release version
# from https://github.com/ClusterHQ/flocker/pull/1092/
self.content = [item.replace(u'|RELEASE|', version) for item in
self.content]
block = CodeBlock(self.name, self.arguments, self.options,
self.content, self.lineno, self.content_offset,
self.block_text, self.state, self.state_machine)
return block.run()
def setup(app):
app.add_directive('version-code-block', VersionCodeBlock)
|
# Copyright Hybrid Logic Ltd. See LICENSE file for details.
"""
Sphinx extension to add a ``version-code-block`` directive
This directive allows Flocker's release version to be inserted into code
blocks.
.. version-code-block:: rest
$ brew install flocker-|RELEASE|
"""
from sphinx.directives.code import CodeBlock
from flocker import __version__ as version
class VersionCodeBlock(CodeBlock):
"""
Similar to CodeBlock but replaces |RELEASE| with the latest release
version.
"""
def run(self):
# Use the WIP get_doc_version to get the latest release version
# from https://github.com/ClusterHQ/flocker/pull/1092/
self.content = [item.replace(u'|RELEASE|', version) for item in
self.content]
block = CodeBlock(self.name, self.arguments, self.options,
self.content, self.lineno, self.content_offset,
self.block_text, self.state, self.state_machine)
return block.run()
def setup(app):
app.add_directive('version-code-block', VersionCodeBlock)
Use console highlighting in example# Copyright Hybrid Logic Ltd. See LICENSE file for details.
"""
Sphinx extension to add a ``version-code-block`` directive
This directive allows Flocker's release version to be inserted into code
blocks.
.. version-code-block:: console
$ brew install flocker-|RELEASE|
"""
from sphinx.directives.code import CodeBlock
from flocker import __version__ as version
class VersionCodeBlock(CodeBlock):
"""
Similar to CodeBlock but replaces |RELEASE| with the latest release
version.
"""
def run(self):
# Use the WIP get_doc_version to get the latest release version
# from https://github.com/ClusterHQ/flocker/pull/1092/
self.content = [item.replace(u'|RELEASE|', version) for item in
self.content]
block = CodeBlock(self.name, self.arguments, self.options,
self.content, self.lineno, self.content_offset,
self.block_text, self.state, self.state_machine)
return block.run()
def setup(app):
app.add_directive('version-code-block', VersionCodeBlock)
|
<commit_before># Copyright Hybrid Logic Ltd. See LICENSE file for details.
"""
Sphinx extension to add a ``version-code-block`` directive
This directive allows Flocker's release version to be inserted into code
blocks.
.. version-code-block:: rest
$ brew install flocker-|RELEASE|
"""
from sphinx.directives.code import CodeBlock
from flocker import __version__ as version
class VersionCodeBlock(CodeBlock):
"""
Similar to CodeBlock but replaces |RELEASE| with the latest release
version.
"""
def run(self):
# Use the WIP get_doc_version to get the latest release version
# from https://github.com/ClusterHQ/flocker/pull/1092/
self.content = [item.replace(u'|RELEASE|', version) for item in
self.content]
block = CodeBlock(self.name, self.arguments, self.options,
self.content, self.lineno, self.content_offset,
self.block_text, self.state, self.state_machine)
return block.run()
def setup(app):
app.add_directive('version-code-block', VersionCodeBlock)
<commit_msg>Use console highlighting in example<commit_after># Copyright Hybrid Logic Ltd. See LICENSE file for details.
"""
Sphinx extension to add a ``version-code-block`` directive
This directive allows Flocker's release version to be inserted into code
blocks.
.. version-code-block:: console
$ brew install flocker-|RELEASE|
"""
from sphinx.directives.code import CodeBlock
from flocker import __version__ as version
class VersionCodeBlock(CodeBlock):
"""
Similar to CodeBlock but replaces |RELEASE| with the latest release
version.
"""
def run(self):
# Use the WIP get_doc_version to get the latest release version
# from https://github.com/ClusterHQ/flocker/pull/1092/
self.content = [item.replace(u'|RELEASE|', version) for item in
self.content]
block = CodeBlock(self.name, self.arguments, self.options,
self.content, self.lineno, self.content_offset,
self.block_text, self.state, self.state_machine)
return block.run()
def setup(app):
app.add_directive('version-code-block', VersionCodeBlock)
|
65ed7106126effc922df2bf7252a3c840d9bc768
|
hasjob/__init__.py
|
hasjob/__init__.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from os import environ
from flask import Flask
from flask.ext.mail import Mail
from flask.ext.assets import Environment, Bundle
from coaster import configureapp
# First, make an app and config it
app = Flask(__name__, instance_relative_config=True)
configureapp(app, 'HASJOB_ENV')
mail = Mail()
mail.init_app(app)
assets = Environment(app)
from uploads import configure
from search import configure
uploads.configure()
search.configure()
# Second, setup assets
assets = Environment(app)
js = Bundle('js/libs/jquery-1.5.1.min.js',
'js/libs/jquery.textarea-expander.js',
'js/libs/tiny_mce/jquery.tinymce.js',
'js/libs/jquery.form.js',
'js/scripts.js',
filters='jsmin', output='js/packed.js')
assets.register('js_all', js)
# Third, after config, import the models and views
import hasjob.models
import hasjob.views
from uploads import configure
from search import configure
uploads.configure()
search.configure()
if environ.get('HASJOB_ENV') == 'prod':
import hasjob.loghandler
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from os import environ
from flask import Flask
from flask.ext.mail import Mail
from flask.ext.assets import Environment, Bundle
from coaster import configureapp
# First, make an app and config it
app = Flask(__name__, instance_relative_config=True)
configureapp(app, 'HASJOB_ENV')
mail = Mail()
mail.init_app(app)
assets = Environment(app)
# Imported here to prevent circular imports
from uploads import configure
from search import configure
uploads.configure()
search.configure()
# Second, setup assets
assets = Environment(app)
js = Bundle('js/libs/jquery-1.5.1.min.js',
'js/libs/jquery.textarea-expander.js',
'js/libs/tiny_mce/jquery.tinymce.js',
'js/libs/jquery.form.js',
'js/scripts.js',
filters='jsmin', output='js/packed.js')
assets.register('js_all', js)
# Third, after config, import the models and views
import hasjob.models
import hasjob.views
if environ.get('HASJOB_ENV') == 'prod':
import hasjob.loghandler
|
Remove duplicate code, leave comment about circular imports
|
Remove duplicate code, leave comment about circular imports
|
Python
|
agpl-3.0
|
hasgeek/hasjob,ashwin01/hasjob,nhannv/hasjob,qitianchan/hasjob,ashwin01/hasjob,hasgeek/hasjob,qitianchan/hasjob,sindhus/hasjob,sindhus/hasjob,hasgeek/hasjob,sindhus/hasjob,ashwin01/hasjob,ashwin01/hasjob,sindhus/hasjob,qitianchan/hasjob,nhannv/hasjob,qitianchan/hasjob,sindhus/hasjob,qitianchan/hasjob,nhannv/hasjob,hasgeek/hasjob,nhannv/hasjob,ashwin01/hasjob,nhannv/hasjob
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from os import environ
from flask import Flask
from flask.ext.mail import Mail
from flask.ext.assets import Environment, Bundle
from coaster import configureapp
# First, make an app and config it
app = Flask(__name__, instance_relative_config=True)
configureapp(app, 'HASJOB_ENV')
mail = Mail()
mail.init_app(app)
assets = Environment(app)
from uploads import configure
from search import configure
uploads.configure()
search.configure()
# Second, setup assets
assets = Environment(app)
js = Bundle('js/libs/jquery-1.5.1.min.js',
'js/libs/jquery.textarea-expander.js',
'js/libs/tiny_mce/jquery.tinymce.js',
'js/libs/jquery.form.js',
'js/scripts.js',
filters='jsmin', output='js/packed.js')
assets.register('js_all', js)
# Third, after config, import the models and views
import hasjob.models
import hasjob.views
from uploads import configure
from search import configure
uploads.configure()
search.configure()
if environ.get('HASJOB_ENV') == 'prod':
import hasjob.loghandler
Remove duplicate code, leave comment about circular imports
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from os import environ
from flask import Flask
from flask.ext.mail import Mail
from flask.ext.assets import Environment, Bundle
from coaster import configureapp
# First, make an app and config it
app = Flask(__name__, instance_relative_config=True)
configureapp(app, 'HASJOB_ENV')
mail = Mail()
mail.init_app(app)
assets = Environment(app)
# Imported here to prevent circular imports
from uploads import configure
from search import configure
uploads.configure()
search.configure()
# Second, setup assets
assets = Environment(app)
js = Bundle('js/libs/jquery-1.5.1.min.js',
'js/libs/jquery.textarea-expander.js',
'js/libs/tiny_mce/jquery.tinymce.js',
'js/libs/jquery.form.js',
'js/scripts.js',
filters='jsmin', output='js/packed.js')
assets.register('js_all', js)
# Third, after config, import the models and views
import hasjob.models
import hasjob.views
if environ.get('HASJOB_ENV') == 'prod':
import hasjob.loghandler
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from os import environ
from flask import Flask
from flask.ext.mail import Mail
from flask.ext.assets import Environment, Bundle
from coaster import configureapp
# First, make an app and config it
app = Flask(__name__, instance_relative_config=True)
configureapp(app, 'HASJOB_ENV')
mail = Mail()
mail.init_app(app)
assets = Environment(app)
from uploads import configure
from search import configure
uploads.configure()
search.configure()
# Second, setup assets
assets = Environment(app)
js = Bundle('js/libs/jquery-1.5.1.min.js',
'js/libs/jquery.textarea-expander.js',
'js/libs/tiny_mce/jquery.tinymce.js',
'js/libs/jquery.form.js',
'js/scripts.js',
filters='jsmin', output='js/packed.js')
assets.register('js_all', js)
# Third, after config, import the models and views
import hasjob.models
import hasjob.views
from uploads import configure
from search import configure
uploads.configure()
search.configure()
if environ.get('HASJOB_ENV') == 'prod':
import hasjob.loghandler
<commit_msg>Remove duplicate code, leave comment about circular imports<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from os import environ
from flask import Flask
from flask.ext.mail import Mail
from flask.ext.assets import Environment, Bundle
from coaster import configureapp
# First, make an app and config it
app = Flask(__name__, instance_relative_config=True)
configureapp(app, 'HASJOB_ENV')
mail = Mail()
mail.init_app(app)
assets = Environment(app)
# Imported here to prevent circular imports
from uploads import configure
from search import configure
uploads.configure()
search.configure()
# Second, setup assets
assets = Environment(app)
js = Bundle('js/libs/jquery-1.5.1.min.js',
'js/libs/jquery.textarea-expander.js',
'js/libs/tiny_mce/jquery.tinymce.js',
'js/libs/jquery.form.js',
'js/scripts.js',
filters='jsmin', output='js/packed.js')
assets.register('js_all', js)
# Third, after config, import the models and views
import hasjob.models
import hasjob.views
if environ.get('HASJOB_ENV') == 'prod':
import hasjob.loghandler
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from os import environ
from flask import Flask
from flask.ext.mail import Mail
from flask.ext.assets import Environment, Bundle
from coaster import configureapp
# First, make an app and config it
app = Flask(__name__, instance_relative_config=True)
configureapp(app, 'HASJOB_ENV')
mail = Mail()
mail.init_app(app)
assets = Environment(app)
from uploads import configure
from search import configure
uploads.configure()
search.configure()
# Second, setup assets
assets = Environment(app)
js = Bundle('js/libs/jquery-1.5.1.min.js',
'js/libs/jquery.textarea-expander.js',
'js/libs/tiny_mce/jquery.tinymce.js',
'js/libs/jquery.form.js',
'js/scripts.js',
filters='jsmin', output='js/packed.js')
assets.register('js_all', js)
# Third, after config, import the models and views
import hasjob.models
import hasjob.views
from uploads import configure
from search import configure
uploads.configure()
search.configure()
if environ.get('HASJOB_ENV') == 'prod':
import hasjob.loghandler
Remove duplicate code, leave comment about circular imports#!/usr/bin/env python
# -*- coding: utf-8 -*-
from os import environ
from flask import Flask
from flask.ext.mail import Mail
from flask.ext.assets import Environment, Bundle
from coaster import configureapp
# First, make an app and config it
app = Flask(__name__, instance_relative_config=True)
configureapp(app, 'HASJOB_ENV')
mail = Mail()
mail.init_app(app)
assets = Environment(app)
# Imported here to prevent circular imports
from uploads import configure
from search import configure
uploads.configure()
search.configure()
# Second, setup assets
assets = Environment(app)
js = Bundle('js/libs/jquery-1.5.1.min.js',
'js/libs/jquery.textarea-expander.js',
'js/libs/tiny_mce/jquery.tinymce.js',
'js/libs/jquery.form.js',
'js/scripts.js',
filters='jsmin', output='js/packed.js')
assets.register('js_all', js)
# Third, after config, import the models and views
import hasjob.models
import hasjob.views
if environ.get('HASJOB_ENV') == 'prod':
import hasjob.loghandler
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from os import environ
from flask import Flask
from flask.ext.mail import Mail
from flask.ext.assets import Environment, Bundle
from coaster import configureapp
# First, make an app and config it
app = Flask(__name__, instance_relative_config=True)
configureapp(app, 'HASJOB_ENV')
mail = Mail()
mail.init_app(app)
assets = Environment(app)
from uploads import configure
from search import configure
uploads.configure()
search.configure()
# Second, setup assets
assets = Environment(app)
js = Bundle('js/libs/jquery-1.5.1.min.js',
'js/libs/jquery.textarea-expander.js',
'js/libs/tiny_mce/jquery.tinymce.js',
'js/libs/jquery.form.js',
'js/scripts.js',
filters='jsmin', output='js/packed.js')
assets.register('js_all', js)
# Third, after config, import the models and views
import hasjob.models
import hasjob.views
from uploads import configure
from search import configure
uploads.configure()
search.configure()
if environ.get('HASJOB_ENV') == 'prod':
import hasjob.loghandler
<commit_msg>Remove duplicate code, leave comment about circular imports<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from os import environ
from flask import Flask
from flask.ext.mail import Mail
from flask.ext.assets import Environment, Bundle
from coaster import configureapp
# First, make an app and config it
app = Flask(__name__, instance_relative_config=True)
configureapp(app, 'HASJOB_ENV')
mail = Mail()
mail.init_app(app)
assets = Environment(app)
# Imported here to prevent circular imports
from uploads import configure
from search import configure
uploads.configure()
search.configure()
# Second, setup assets
assets = Environment(app)
js = Bundle('js/libs/jquery-1.5.1.min.js',
'js/libs/jquery.textarea-expander.js',
'js/libs/tiny_mce/jquery.tinymce.js',
'js/libs/jquery.form.js',
'js/scripts.js',
filters='jsmin', output='js/packed.js')
assets.register('js_all', js)
# Third, after config, import the models and views
import hasjob.models
import hasjob.views
if environ.get('HASJOB_ENV') == 'prod':
import hasjob.loghandler
|
d6a8b995f2a1b069729f07ef43b966b2f15fd3b3
|
linter.py
|
linter.py
|
from SublimeLinter.lint import NodeLinter
class XO(NodeLinter):
npm_name = 'xo'
cmd = ('xo', '--stdin', '--reporter', 'compact', '--filename', '@')
regex = (
r'^.+?: line (?P<line>\d+), col (?P<col>\d+), '
r'(?:(?P<error>Error)|(?P<warning>Warning)) - '
r'(?P<message>.+)'
)
defaults = {
'selector': 'source.js - meta.attribute-with-value',
'disable_if_not_dependency': True
}
|
from SublimeLinter.lint import NodeLinter
class XO(NodeLinter):
npm_name = 'xo'
cmd = ('xo', '--stdin', '--reporter', 'compact', '--filename', '${file}')
regex = (
r'^.+?: line (?P<line>\d+), col (?P<col>\d+), '
r'(?:(?P<error>Error)|(?P<warning>Warning)) - '
r'(?P<message>.+)'
r' \((?P<code>.+)\)$'
)
defaults = {
'selector': 'source.js - meta.attribute-with-value',
'disable_if_not_dependency': True
}
|
Support the new SublimeLinter `code` property
|
Support the new SublimeLinter `code` property
|
Python
|
mit
|
sindresorhus/SublimeLinter-contrib-xo,sindresorhus/SublimeLinter-contrib-xo
|
from SublimeLinter.lint import NodeLinter
class XO(NodeLinter):
npm_name = 'xo'
cmd = ('xo', '--stdin', '--reporter', 'compact', '--filename', '@')
regex = (
r'^.+?: line (?P<line>\d+), col (?P<col>\d+), '
r'(?:(?P<error>Error)|(?P<warning>Warning)) - '
r'(?P<message>.+)'
)
defaults = {
'selector': 'source.js - meta.attribute-with-value',
'disable_if_not_dependency': True
}
Support the new SublimeLinter `code` property
|
from SublimeLinter.lint import NodeLinter
class XO(NodeLinter):
npm_name = 'xo'
cmd = ('xo', '--stdin', '--reporter', 'compact', '--filename', '${file}')
regex = (
r'^.+?: line (?P<line>\d+), col (?P<col>\d+), '
r'(?:(?P<error>Error)|(?P<warning>Warning)) - '
r'(?P<message>.+)'
r' \((?P<code>.+)\)$'
)
defaults = {
'selector': 'source.js - meta.attribute-with-value',
'disable_if_not_dependency': True
}
|
<commit_before>from SublimeLinter.lint import NodeLinter
class XO(NodeLinter):
npm_name = 'xo'
cmd = ('xo', '--stdin', '--reporter', 'compact', '--filename', '@')
regex = (
r'^.+?: line (?P<line>\d+), col (?P<col>\d+), '
r'(?:(?P<error>Error)|(?P<warning>Warning)) - '
r'(?P<message>.+)'
)
defaults = {
'selector': 'source.js - meta.attribute-with-value',
'disable_if_not_dependency': True
}
<commit_msg>Support the new SublimeLinter `code` property<commit_after>
|
from SublimeLinter.lint import NodeLinter
class XO(NodeLinter):
npm_name = 'xo'
cmd = ('xo', '--stdin', '--reporter', 'compact', '--filename', '${file}')
regex = (
r'^.+?: line (?P<line>\d+), col (?P<col>\d+), '
r'(?:(?P<error>Error)|(?P<warning>Warning)) - '
r'(?P<message>.+)'
r' \((?P<code>.+)\)$'
)
defaults = {
'selector': 'source.js - meta.attribute-with-value',
'disable_if_not_dependency': True
}
|
from SublimeLinter.lint import NodeLinter
class XO(NodeLinter):
npm_name = 'xo'
cmd = ('xo', '--stdin', '--reporter', 'compact', '--filename', '@')
regex = (
r'^.+?: line (?P<line>\d+), col (?P<col>\d+), '
r'(?:(?P<error>Error)|(?P<warning>Warning)) - '
r'(?P<message>.+)'
)
defaults = {
'selector': 'source.js - meta.attribute-with-value',
'disable_if_not_dependency': True
}
Support the new SublimeLinter `code` propertyfrom SublimeLinter.lint import NodeLinter
class XO(NodeLinter):
npm_name = 'xo'
cmd = ('xo', '--stdin', '--reporter', 'compact', '--filename', '${file}')
regex = (
r'^.+?: line (?P<line>\d+), col (?P<col>\d+), '
r'(?:(?P<error>Error)|(?P<warning>Warning)) - '
r'(?P<message>.+)'
r' \((?P<code>.+)\)$'
)
defaults = {
'selector': 'source.js - meta.attribute-with-value',
'disable_if_not_dependency': True
}
|
<commit_before>from SublimeLinter.lint import NodeLinter
class XO(NodeLinter):
npm_name = 'xo'
cmd = ('xo', '--stdin', '--reporter', 'compact', '--filename', '@')
regex = (
r'^.+?: line (?P<line>\d+), col (?P<col>\d+), '
r'(?:(?P<error>Error)|(?P<warning>Warning)) - '
r'(?P<message>.+)'
)
defaults = {
'selector': 'source.js - meta.attribute-with-value',
'disable_if_not_dependency': True
}
<commit_msg>Support the new SublimeLinter `code` property<commit_after>from SublimeLinter.lint import NodeLinter
class XO(NodeLinter):
npm_name = 'xo'
cmd = ('xo', '--stdin', '--reporter', 'compact', '--filename', '${file}')
regex = (
r'^.+?: line (?P<line>\d+), col (?P<col>\d+), '
r'(?:(?P<error>Error)|(?P<warning>Warning)) - '
r'(?P<message>.+)'
r' \((?P<code>.+)\)$'
)
defaults = {
'selector': 'source.js - meta.attribute-with-value',
'disable_if_not_dependency': True
}
|
c5b2cb667a59cf6fa16c860744fd5978cd3c01a2
|
src/lexington/util/paths.py
|
src/lexington/util/paths.py
|
from urllib import parse
from werkzeug.wrappers import Request
from lexington.util.di import depends_on
@depends_on(['environ'])
def get_request(environ):
return Request(environ)
# TODO: clean up all of these...
@depends_on(['environ'])
def get_method(environ):
return environ['REQUEST_METHOD']
@depends_on(['environ'])
def get_path(environ):
return environ['PATH_INFO']
@depends_on(['environ'])
def get_query_string(environ):
return environ['QUERY_STRING']
@depends_on(['query_string'])
def get_query(query_string):
return parse.parse_qs(query_string)
def register_all(dependencies):
dependant_functions = {
'request': get_request,
'method': get_method,
'path': get_path,
'query_string': get_query_string,
'query': get_query,
}
for name, dependant in dependant_functions.items():
dependencies.register_dependant(name, dependant)
|
from werkzeug.wrappers import Request
from lexington.util.di import depends_on
@depends_on(['environ'])
def get_request(environ):
return Request(environ)
@depends_on(['request'])
def get_method(request):
return request.method
@depends_on(['request'])
def get_path(request):
return request.path
@depends_on(['request'])
def get_query_string(request):
return request.query_string
@depends_on(['request'])
def get_query(request):
return request.args
def register_all(dependencies):
dependant_functions = {
'request': get_request,
'method': get_method,
'path': get_path,
'query_string': get_query_string,
'query': get_query,
}
for name, dependant in dependant_functions.items():
dependencies.register_dependant(name, dependant)
|
Reduce direct dependencies on environ
|
Reduce direct dependencies on environ
|
Python
|
mit
|
jmikkola/Lexington
|
from urllib import parse
from werkzeug.wrappers import Request
from lexington.util.di import depends_on
@depends_on(['environ'])
def get_request(environ):
return Request(environ)
# TODO: clean up all of these...
@depends_on(['environ'])
def get_method(environ):
return environ['REQUEST_METHOD']
@depends_on(['environ'])
def get_path(environ):
return environ['PATH_INFO']
@depends_on(['environ'])
def get_query_string(environ):
return environ['QUERY_STRING']
@depends_on(['query_string'])
def get_query(query_string):
return parse.parse_qs(query_string)
def register_all(dependencies):
dependant_functions = {
'request': get_request,
'method': get_method,
'path': get_path,
'query_string': get_query_string,
'query': get_query,
}
for name, dependant in dependant_functions.items():
dependencies.register_dependant(name, dependant)
Reduce direct dependencies on environ
|
from werkzeug.wrappers import Request
from lexington.util.di import depends_on
@depends_on(['environ'])
def get_request(environ):
return Request(environ)
@depends_on(['request'])
def get_method(request):
return request.method
@depends_on(['request'])
def get_path(request):
return request.path
@depends_on(['request'])
def get_query_string(request):
return request.query_string
@depends_on(['request'])
def get_query(request):
return request.args
def register_all(dependencies):
dependant_functions = {
'request': get_request,
'method': get_method,
'path': get_path,
'query_string': get_query_string,
'query': get_query,
}
for name, dependant in dependant_functions.items():
dependencies.register_dependant(name, dependant)
|
<commit_before>from urllib import parse
from werkzeug.wrappers import Request
from lexington.util.di import depends_on
@depends_on(['environ'])
def get_request(environ):
return Request(environ)
# TODO: clean up all of these...
@depends_on(['environ'])
def get_method(environ):
return environ['REQUEST_METHOD']
@depends_on(['environ'])
def get_path(environ):
return environ['PATH_INFO']
@depends_on(['environ'])
def get_query_string(environ):
return environ['QUERY_STRING']
@depends_on(['query_string'])
def get_query(query_string):
return parse.parse_qs(query_string)
def register_all(dependencies):
dependant_functions = {
'request': get_request,
'method': get_method,
'path': get_path,
'query_string': get_query_string,
'query': get_query,
}
for name, dependant in dependant_functions.items():
dependencies.register_dependant(name, dependant)
<commit_msg>Reduce direct dependencies on environ<commit_after>
|
from werkzeug.wrappers import Request
from lexington.util.di import depends_on
@depends_on(['environ'])
def get_request(environ):
return Request(environ)
@depends_on(['request'])
def get_method(request):
return request.method
@depends_on(['request'])
def get_path(request):
return request.path
@depends_on(['request'])
def get_query_string(request):
return request.query_string
@depends_on(['request'])
def get_query(request):
return request.args
def register_all(dependencies):
dependant_functions = {
'request': get_request,
'method': get_method,
'path': get_path,
'query_string': get_query_string,
'query': get_query,
}
for name, dependant in dependant_functions.items():
dependencies.register_dependant(name, dependant)
|
from urllib import parse
from werkzeug.wrappers import Request
from lexington.util.di import depends_on
@depends_on(['environ'])
def get_request(environ):
return Request(environ)
# TODO: clean up all of these...
@depends_on(['environ'])
def get_method(environ):
return environ['REQUEST_METHOD']
@depends_on(['environ'])
def get_path(environ):
return environ['PATH_INFO']
@depends_on(['environ'])
def get_query_string(environ):
return environ['QUERY_STRING']
@depends_on(['query_string'])
def get_query(query_string):
return parse.parse_qs(query_string)
def register_all(dependencies):
dependant_functions = {
'request': get_request,
'method': get_method,
'path': get_path,
'query_string': get_query_string,
'query': get_query,
}
for name, dependant in dependant_functions.items():
dependencies.register_dependant(name, dependant)
Reduce direct dependencies on environfrom werkzeug.wrappers import Request
from lexington.util.di import depends_on
@depends_on(['environ'])
def get_request(environ):
return Request(environ)
@depends_on(['request'])
def get_method(request):
return request.method
@depends_on(['request'])
def get_path(request):
return request.path
@depends_on(['request'])
def get_query_string(request):
return request.query_string
@depends_on(['request'])
def get_query(request):
return request.args
def register_all(dependencies):
dependant_functions = {
'request': get_request,
'method': get_method,
'path': get_path,
'query_string': get_query_string,
'query': get_query,
}
for name, dependant in dependant_functions.items():
dependencies.register_dependant(name, dependant)
|
<commit_before>from urllib import parse
from werkzeug.wrappers import Request
from lexington.util.di import depends_on
@depends_on(['environ'])
def get_request(environ):
return Request(environ)
# TODO: clean up all of these...
@depends_on(['environ'])
def get_method(environ):
return environ['REQUEST_METHOD']
@depends_on(['environ'])
def get_path(environ):
return environ['PATH_INFO']
@depends_on(['environ'])
def get_query_string(environ):
return environ['QUERY_STRING']
@depends_on(['query_string'])
def get_query(query_string):
return parse.parse_qs(query_string)
def register_all(dependencies):
dependant_functions = {
'request': get_request,
'method': get_method,
'path': get_path,
'query_string': get_query_string,
'query': get_query,
}
for name, dependant in dependant_functions.items():
dependencies.register_dependant(name, dependant)
<commit_msg>Reduce direct dependencies on environ<commit_after>from werkzeug.wrappers import Request
from lexington.util.di import depends_on
@depends_on(['environ'])
def get_request(environ):
return Request(environ)
@depends_on(['request'])
def get_method(request):
return request.method
@depends_on(['request'])
def get_path(request):
return request.path
@depends_on(['request'])
def get_query_string(request):
return request.query_string
@depends_on(['request'])
def get_query(request):
return request.args
def register_all(dependencies):
dependant_functions = {
'request': get_request,
'method': get_method,
'path': get_path,
'query_string': get_query_string,
'query': get_query,
}
for name, dependant in dependant_functions.items():
dependencies.register_dependant(name, dependant)
|
2dc34a9952fda8a46a89aa43ea833c36998d891a
|
shop/models/fields.py
|
shop/models/fields.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from distutils.version import LooseVersion
import re
from django.db import connection
POSTGRES_FLAG = False
if str(connection.vendor) == 'postgresql':
POSTGRES_FLAG = True
try:
import psycopg2
version = re.search('([0-9.]+)', psycopg2.__version__ or "0").group(0)
# To be able to use the Django version of JSONField, it requires to have PostgreSQL ≥ 9.4 , otherwise some issues
# could be faced.
if POSTGRES_FLAG and (LooseVersion(version) >= LooseVersion('2.5.4')):
from django.contrib.postgres.fields import JSONField
else:
raise ImportError
except ImportError:
from jsonfield.fields import JSONField
class JSONFieldWrapper(JSONField):
def __init__(self, *args, **kwargs):
kwargs.update({'default': {}})
super(JSONFieldWrapper, self).__init__(*args, **kwargs)
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from distutils.version import LooseVersion
import re
from django.db import connection
POSTGRES_FLAG = False
if str(connection.vendor) == 'postgresql':
POSTGRES_FLAG = True
try:
import psycopg2
version = re.search('([0-9.]+)', psycopg2.__version__ or "0").group(0)
# To be able to use the Django version of JSONField, it requires to have PostgreSQL ≥ 9.4 and psycopg2 ≥ 2.5.4,
# otherwise some issues could be faced.
if POSTGRES_FLAG and (LooseVersion(version) >= LooseVersion('2.5.4')):
from django.contrib.postgres.fields import JSONField
else:
raise ImportError
except ImportError:
from jsonfield.fields import JSONField
class JSONFieldWrapper(JSONField):
def __init__(self, *args, **kwargs):
kwargs.update({'default': {}})
super(JSONFieldWrapper, self).__init__(*args, **kwargs)
|
Add psycopg2's version to the comment
|
Add psycopg2's version to the comment
|
Python
|
bsd-3-clause
|
nimbis/django-shop,jrief/django-shop,khchine5/django-shop,divio/django-shop,divio/django-shop,divio/django-shop,khchine5/django-shop,awesto/django-shop,nimbis/django-shop,awesto/django-shop,nimbis/django-shop,jrief/django-shop,awesto/django-shop,jrief/django-shop,khchine5/django-shop,nimbis/django-shop,khchine5/django-shop,jrief/django-shop
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from distutils.version import LooseVersion
import re
from django.db import connection
POSTGRES_FLAG = False
if str(connection.vendor) == 'postgresql':
POSTGRES_FLAG = True
try:
import psycopg2
version = re.search('([0-9.]+)', psycopg2.__version__ or "0").group(0)
# To be able to use the Django version of JSONField, it requires to have PostgreSQL ≥ 9.4 , otherwise some issues
# could be faced.
if POSTGRES_FLAG and (LooseVersion(version) >= LooseVersion('2.5.4')):
from django.contrib.postgres.fields import JSONField
else:
raise ImportError
except ImportError:
from jsonfield.fields import JSONField
class JSONFieldWrapper(JSONField):
def __init__(self, *args, **kwargs):
kwargs.update({'default': {}})
super(JSONFieldWrapper, self).__init__(*args, **kwargs)
Add psycopg2's version to the comment
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from distutils.version import LooseVersion
import re
from django.db import connection
POSTGRES_FLAG = False
if str(connection.vendor) == 'postgresql':
POSTGRES_FLAG = True
try:
import psycopg2
version = re.search('([0-9.]+)', psycopg2.__version__ or "0").group(0)
# To be able to use the Django version of JSONField, it requires to have PostgreSQL ≥ 9.4 and psycopg2 ≥ 2.5.4,
# otherwise some issues could be faced.
if POSTGRES_FLAG and (LooseVersion(version) >= LooseVersion('2.5.4')):
from django.contrib.postgres.fields import JSONField
else:
raise ImportError
except ImportError:
from jsonfield.fields import JSONField
class JSONFieldWrapper(JSONField):
def __init__(self, *args, **kwargs):
kwargs.update({'default': {}})
super(JSONFieldWrapper, self).__init__(*args, **kwargs)
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from distutils.version import LooseVersion
import re
from django.db import connection
POSTGRES_FLAG = False
if str(connection.vendor) == 'postgresql':
POSTGRES_FLAG = True
try:
import psycopg2
version = re.search('([0-9.]+)', psycopg2.__version__ or "0").group(0)
# To be able to use the Django version of JSONField, it requires to have PostgreSQL ≥ 9.4 , otherwise some issues
# could be faced.
if POSTGRES_FLAG and (LooseVersion(version) >= LooseVersion('2.5.4')):
from django.contrib.postgres.fields import JSONField
else:
raise ImportError
except ImportError:
from jsonfield.fields import JSONField
class JSONFieldWrapper(JSONField):
def __init__(self, *args, **kwargs):
kwargs.update({'default': {}})
super(JSONFieldWrapper, self).__init__(*args, **kwargs)
<commit_msg>Add psycopg2's version to the comment<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from distutils.version import LooseVersion
import re
from django.db import connection
POSTGRES_FLAG = False
if str(connection.vendor) == 'postgresql':
POSTGRES_FLAG = True
try:
import psycopg2
version = re.search('([0-9.]+)', psycopg2.__version__ or "0").group(0)
# To be able to use the Django version of JSONField, it requires to have PostgreSQL ≥ 9.4 and psycopg2 ≥ 2.5.4,
# otherwise some issues could be faced.
if POSTGRES_FLAG and (LooseVersion(version) >= LooseVersion('2.5.4')):
from django.contrib.postgres.fields import JSONField
else:
raise ImportError
except ImportError:
from jsonfield.fields import JSONField
class JSONFieldWrapper(JSONField):
def __init__(self, *args, **kwargs):
kwargs.update({'default': {}})
super(JSONFieldWrapper, self).__init__(*args, **kwargs)
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from distutils.version import LooseVersion
import re
from django.db import connection
POSTGRES_FLAG = False
if str(connection.vendor) == 'postgresql':
POSTGRES_FLAG = True
try:
import psycopg2
version = re.search('([0-9.]+)', psycopg2.__version__ or "0").group(0)
# To be able to use the Django version of JSONField, it requires to have PostgreSQL ≥ 9.4 , otherwise some issues
# could be faced.
if POSTGRES_FLAG and (LooseVersion(version) >= LooseVersion('2.5.4')):
from django.contrib.postgres.fields import JSONField
else:
raise ImportError
except ImportError:
from jsonfield.fields import JSONField
class JSONFieldWrapper(JSONField):
def __init__(self, *args, **kwargs):
kwargs.update({'default': {}})
super(JSONFieldWrapper, self).__init__(*args, **kwargs)
Add psycopg2's version to the comment# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from distutils.version import LooseVersion
import re
from django.db import connection
POSTGRES_FLAG = False
if str(connection.vendor) == 'postgresql':
POSTGRES_FLAG = True
try:
import psycopg2
version = re.search('([0-9.]+)', psycopg2.__version__ or "0").group(0)
# To be able to use the Django version of JSONField, it requires to have PostgreSQL ≥ 9.4 and psycopg2 ≥ 2.5.4,
# otherwise some issues could be faced.
if POSTGRES_FLAG and (LooseVersion(version) >= LooseVersion('2.5.4')):
from django.contrib.postgres.fields import JSONField
else:
raise ImportError
except ImportError:
from jsonfield.fields import JSONField
class JSONFieldWrapper(JSONField):
def __init__(self, *args, **kwargs):
kwargs.update({'default': {}})
super(JSONFieldWrapper, self).__init__(*args, **kwargs)
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from distutils.version import LooseVersion
import re
from django.db import connection
POSTGRES_FLAG = False
if str(connection.vendor) == 'postgresql':
POSTGRES_FLAG = True
try:
import psycopg2
version = re.search('([0-9.]+)', psycopg2.__version__ or "0").group(0)
# To be able to use the Django version of JSONField, it requires to have PostgreSQL ≥ 9.4 , otherwise some issues
# could be faced.
if POSTGRES_FLAG and (LooseVersion(version) >= LooseVersion('2.5.4')):
from django.contrib.postgres.fields import JSONField
else:
raise ImportError
except ImportError:
from jsonfield.fields import JSONField
class JSONFieldWrapper(JSONField):
def __init__(self, *args, **kwargs):
kwargs.update({'default': {}})
super(JSONFieldWrapper, self).__init__(*args, **kwargs)
<commit_msg>Add psycopg2's version to the comment<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from distutils.version import LooseVersion
import re
from django.db import connection
POSTGRES_FLAG = False
if str(connection.vendor) == 'postgresql':
POSTGRES_FLAG = True
try:
import psycopg2
version = re.search('([0-9.]+)', psycopg2.__version__ or "0").group(0)
# To be able to use the Django version of JSONField, it requires to have PostgreSQL ≥ 9.4 and psycopg2 ≥ 2.5.4,
# otherwise some issues could be faced.
if POSTGRES_FLAG and (LooseVersion(version) >= LooseVersion('2.5.4')):
from django.contrib.postgres.fields import JSONField
else:
raise ImportError
except ImportError:
from jsonfield.fields import JSONField
class JSONFieldWrapper(JSONField):
def __init__(self, *args, **kwargs):
kwargs.update({'default': {}})
super(JSONFieldWrapper, self).__init__(*args, **kwargs)
|
a928039c32e1991ec0892ec202d22c43d0add0c2
|
config/urls.py
|
config/urls.py
|
from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from jobs_backend.views import APIRoot
api_urlpatterns = [
# All api endpoints should be included here
url(r'^users/', include('jobs_backend.users.urls.users', namespace='users')),
url(r'^account/', include('jobs_backend.users.urls.account', namespace='account')),
url(r'^vacancies/', include('jobs_backend.vacancies.urls', namespace='vacancies')),
]
urlpatterns = [
url(r'^api/', include(api_urlpatterns, namespace='api')),
url(settings.ADMIN_URL, admin.site.urls),
url(r'^api/api-auth/', include('rest_framework.urls', namespace='rest_framework')),
]
urlpatterns += [
url(r'^$', APIRoot.as_view(urlpatterns=urlpatterns, app_namespace='api_v1'), name='api_root')
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
if settings.DEBUG:
if 'debug_toolbar' in settings.INSTALLED_APPS:
import debug_toolbar
urlpatterns += [
url(r'^__debug__/', include(debug_toolbar.urls)),
]
|
from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from jobs_backend.views import APIRoot
api_urlpatterns = [
# All api endpoints should be included here
url(r'^users/', include('jobs_backend.users.urls.users', namespace='users')),
url(r'^account/', include('jobs_backend.users.urls.account', namespace='account')),
url(r'^vacancies/', include('jobs_backend.vacancies.urls', namespace='vacancies')),
]
urlpatterns = [
url(r'^api/', include(api_urlpatterns, namespace='api')),
url(settings.ADMIN_URL, admin.site.urls),
url(r'^api/api-auth/', include('rest_framework.urls', namespace='rest_framework')),
]
urlpatterns += [
url(r'^api/$', APIRoot.as_view(urlpatterns=urlpatterns, app_namespace='api_v1'), name='api_root')
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
if settings.DEBUG:
if 'debug_toolbar' in settings.INSTALLED_APPS:
import debug_toolbar
urlpatterns += [
url(r'^__debug__/', include(debug_toolbar.urls)),
]
|
Fix browsable API index regexp
|
jobs-041: Fix browsable API index regexp
|
Python
|
mit
|
pyshopml/jobs-backend,pyshopml/jobs-backend
|
from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from jobs_backend.views import APIRoot
api_urlpatterns = [
# All api endpoints should be included here
url(r'^users/', include('jobs_backend.users.urls.users', namespace='users')),
url(r'^account/', include('jobs_backend.users.urls.account', namespace='account')),
url(r'^vacancies/', include('jobs_backend.vacancies.urls', namespace='vacancies')),
]
urlpatterns = [
url(r'^api/', include(api_urlpatterns, namespace='api')),
url(settings.ADMIN_URL, admin.site.urls),
url(r'^api/api-auth/', include('rest_framework.urls', namespace='rest_framework')),
]
urlpatterns += [
url(r'^$', APIRoot.as_view(urlpatterns=urlpatterns, app_namespace='api_v1'), name='api_root')
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
if settings.DEBUG:
if 'debug_toolbar' in settings.INSTALLED_APPS:
import debug_toolbar
urlpatterns += [
url(r'^__debug__/', include(debug_toolbar.urls)),
]
jobs-041: Fix browsable API index regexp
|
from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from jobs_backend.views import APIRoot
api_urlpatterns = [
# All api endpoints should be included here
url(r'^users/', include('jobs_backend.users.urls.users', namespace='users')),
url(r'^account/', include('jobs_backend.users.urls.account', namespace='account')),
url(r'^vacancies/', include('jobs_backend.vacancies.urls', namespace='vacancies')),
]
urlpatterns = [
url(r'^api/', include(api_urlpatterns, namespace='api')),
url(settings.ADMIN_URL, admin.site.urls),
url(r'^api/api-auth/', include('rest_framework.urls', namespace='rest_framework')),
]
urlpatterns += [
url(r'^api/$', APIRoot.as_view(urlpatterns=urlpatterns, app_namespace='api_v1'), name='api_root')
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
if settings.DEBUG:
if 'debug_toolbar' in settings.INSTALLED_APPS:
import debug_toolbar
urlpatterns += [
url(r'^__debug__/', include(debug_toolbar.urls)),
]
|
<commit_before>from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from jobs_backend.views import APIRoot
api_urlpatterns = [
# All api endpoints should be included here
url(r'^users/', include('jobs_backend.users.urls.users', namespace='users')),
url(r'^account/', include('jobs_backend.users.urls.account', namespace='account')),
url(r'^vacancies/', include('jobs_backend.vacancies.urls', namespace='vacancies')),
]
urlpatterns = [
url(r'^api/', include(api_urlpatterns, namespace='api')),
url(settings.ADMIN_URL, admin.site.urls),
url(r'^api/api-auth/', include('rest_framework.urls', namespace='rest_framework')),
]
urlpatterns += [
url(r'^$', APIRoot.as_view(urlpatterns=urlpatterns, app_namespace='api_v1'), name='api_root')
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
if settings.DEBUG:
if 'debug_toolbar' in settings.INSTALLED_APPS:
import debug_toolbar
urlpatterns += [
url(r'^__debug__/', include(debug_toolbar.urls)),
]
<commit_msg>jobs-041: Fix browsable API index regexp<commit_after>
|
from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from jobs_backend.views import APIRoot
api_urlpatterns = [
# All api endpoints should be included here
url(r'^users/', include('jobs_backend.users.urls.users', namespace='users')),
url(r'^account/', include('jobs_backend.users.urls.account', namespace='account')),
url(r'^vacancies/', include('jobs_backend.vacancies.urls', namespace='vacancies')),
]
urlpatterns = [
url(r'^api/', include(api_urlpatterns, namespace='api')),
url(settings.ADMIN_URL, admin.site.urls),
url(r'^api/api-auth/', include('rest_framework.urls', namespace='rest_framework')),
]
urlpatterns += [
url(r'^api/$', APIRoot.as_view(urlpatterns=urlpatterns, app_namespace='api_v1'), name='api_root')
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
if settings.DEBUG:
if 'debug_toolbar' in settings.INSTALLED_APPS:
import debug_toolbar
urlpatterns += [
url(r'^__debug__/', include(debug_toolbar.urls)),
]
|
from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from jobs_backend.views import APIRoot
api_urlpatterns = [
# All api endpoints should be included here
url(r'^users/', include('jobs_backend.users.urls.users', namespace='users')),
url(r'^account/', include('jobs_backend.users.urls.account', namespace='account')),
url(r'^vacancies/', include('jobs_backend.vacancies.urls', namespace='vacancies')),
]
urlpatterns = [
url(r'^api/', include(api_urlpatterns, namespace='api')),
url(settings.ADMIN_URL, admin.site.urls),
url(r'^api/api-auth/', include('rest_framework.urls', namespace='rest_framework')),
]
urlpatterns += [
url(r'^$', APIRoot.as_view(urlpatterns=urlpatterns, app_namespace='api_v1'), name='api_root')
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
if settings.DEBUG:
if 'debug_toolbar' in settings.INSTALLED_APPS:
import debug_toolbar
urlpatterns += [
url(r'^__debug__/', include(debug_toolbar.urls)),
]
jobs-041: Fix browsable API index regexpfrom django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from jobs_backend.views import APIRoot
api_urlpatterns = [
# All api endpoints should be included here
url(r'^users/', include('jobs_backend.users.urls.users', namespace='users')),
url(r'^account/', include('jobs_backend.users.urls.account', namespace='account')),
url(r'^vacancies/', include('jobs_backend.vacancies.urls', namespace='vacancies')),
]
urlpatterns = [
url(r'^api/', include(api_urlpatterns, namespace='api')),
url(settings.ADMIN_URL, admin.site.urls),
url(r'^api/api-auth/', include('rest_framework.urls', namespace='rest_framework')),
]
urlpatterns += [
url(r'^api/$', APIRoot.as_view(urlpatterns=urlpatterns, app_namespace='api_v1'), name='api_root')
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
if settings.DEBUG:
if 'debug_toolbar' in settings.INSTALLED_APPS:
import debug_toolbar
urlpatterns += [
url(r'^__debug__/', include(debug_toolbar.urls)),
]
|
<commit_before>from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from jobs_backend.views import APIRoot
api_urlpatterns = [
# All api endpoints should be included here
url(r'^users/', include('jobs_backend.users.urls.users', namespace='users')),
url(r'^account/', include('jobs_backend.users.urls.account', namespace='account')),
url(r'^vacancies/', include('jobs_backend.vacancies.urls', namespace='vacancies')),
]
urlpatterns = [
url(r'^api/', include(api_urlpatterns, namespace='api')),
url(settings.ADMIN_URL, admin.site.urls),
url(r'^api/api-auth/', include('rest_framework.urls', namespace='rest_framework')),
]
urlpatterns += [
url(r'^$', APIRoot.as_view(urlpatterns=urlpatterns, app_namespace='api_v1'), name='api_root')
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
if settings.DEBUG:
if 'debug_toolbar' in settings.INSTALLED_APPS:
import debug_toolbar
urlpatterns += [
url(r'^__debug__/', include(debug_toolbar.urls)),
]
<commit_msg>jobs-041: Fix browsable API index regexp<commit_after>from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from jobs_backend.views import APIRoot
api_urlpatterns = [
# All api endpoints should be included here
url(r'^users/', include('jobs_backend.users.urls.users', namespace='users')),
url(r'^account/', include('jobs_backend.users.urls.account', namespace='account')),
url(r'^vacancies/', include('jobs_backend.vacancies.urls', namespace='vacancies')),
]
urlpatterns = [
url(r'^api/', include(api_urlpatterns, namespace='api')),
url(settings.ADMIN_URL, admin.site.urls),
url(r'^api/api-auth/', include('rest_framework.urls', namespace='rest_framework')),
]
urlpatterns += [
url(r'^api/$', APIRoot.as_view(urlpatterns=urlpatterns, app_namespace='api_v1'), name='api_root')
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
if settings.DEBUG:
if 'debug_toolbar' in settings.INSTALLED_APPS:
import debug_toolbar
urlpatterns += [
url(r'^__debug__/', include(debug_toolbar.urls)),
]
|
cdac109151b3ed804ae889155c140a485653aa40
|
wex/__init__.py
|
wex/__init__.py
|
"""
Wextracto is a library for extracting data from web resources.
:copyright: (c) 2012-2016
"""
__version__ = '0.9.0' # pragma: no cover
|
"""
Wextracto is a library for extracting data from web resources.
:copyright: (c) 2012-2016
"""
__version__ = '0.9.1' # pragma: no cover
|
Bump version for nested cache fix
|
Bump version for nested cache fix
|
Python
|
bsd-3-clause
|
gilessbrown/wextracto,eBay/wextracto,eBay/wextracto,gilessbrown/wextracto
|
"""
Wextracto is a library for extracting data from web resources.
:copyright: (c) 2012-2016
"""
__version__ = '0.9.0' # pragma: no cover
Bump version for nested cache fix
|
"""
Wextracto is a library for extracting data from web resources.
:copyright: (c) 2012-2016
"""
__version__ = '0.9.1' # pragma: no cover
|
<commit_before>"""
Wextracto is a library for extracting data from web resources.
:copyright: (c) 2012-2016
"""
__version__ = '0.9.0' # pragma: no cover
<commit_msg>Bump version for nested cache fix<commit_after>
|
"""
Wextracto is a library for extracting data from web resources.
:copyright: (c) 2012-2016
"""
__version__ = '0.9.1' # pragma: no cover
|
"""
Wextracto is a library for extracting data from web resources.
:copyright: (c) 2012-2016
"""
__version__ = '0.9.0' # pragma: no cover
Bump version for nested cache fix"""
Wextracto is a library for extracting data from web resources.
:copyright: (c) 2012-2016
"""
__version__ = '0.9.1' # pragma: no cover
|
<commit_before>"""
Wextracto is a library for extracting data from web resources.
:copyright: (c) 2012-2016
"""
__version__ = '0.9.0' # pragma: no cover
<commit_msg>Bump version for nested cache fix<commit_after>"""
Wextracto is a library for extracting data from web resources.
:copyright: (c) 2012-2016
"""
__version__ = '0.9.1' # pragma: no cover
|
a0e0e70f2ae37bfb3c460324b4aea961f2ea0afb
|
dallinger/heroku/worker.py
|
dallinger/heroku/worker.py
|
"""Heroku web worker."""
import os
import redis
listen = ['high', 'default', 'low']
redis_url = os.getenv('REDIS_URL', 'redis://localhost:6379')
conn = redis.from_url(redis_url)
if __name__ == '__main__': # pragma: nocover
# These imports are inside the __main__ block
# to make sure that we only import from rq_gevent_worker
# (which has the side effect of applying gevent monkey patches)
# in the worker process. This way other processes can import the
# redis connection without that side effect.
from rq import (
Queue,
Connection
)
try:
from rq_gevent_worker import GeventWorker as Worker
except ImportError:
from rq import Worker
import logging
logging.basicConfig(format='%(asctime)s %(message)s', level=logging.INFO)
with Connection(conn):
worker = Worker(list(map(Queue, listen)))
worker.work()
|
"""Heroku web worker."""
import os
import redis
listen = ['high', 'default', 'low']
redis_url = os.getenv('REDIS_URL', 'redis://localhost:6379')
conn = redis.from_url(redis_url)
if __name__ == '__main__': # pragma: nocover
# These imports are inside the __main__ block
# to make sure that we only import from rq_gevent_worker
# (which has the side effect of applying gevent monkey patches)
# in the worker process. This way other processes can import the
# redis connection without that side effect.
from rq import (
Queue,
Connection
)
try:
from rq_gevent_worker import GeventWorker as Worker
except ImportError:
from rq import Worker
from dallinger.config import initialize_experiment_package
initialize_experiment_package(os.getcwd())
import logging
logging.basicConfig(format='%(asctime)s %(message)s', level=logging.INFO)
with Connection(conn):
worker = Worker(list(map(Queue, listen)))
worker.work()
|
Make sure bot jobs can be deserialized
|
Make sure bot jobs can be deserialized
|
Python
|
mit
|
Dallinger/Dallinger,jcpeterson/Dallinger,jcpeterson/Dallinger,Dallinger/Dallinger,jcpeterson/Dallinger,jcpeterson/Dallinger,Dallinger/Dallinger,Dallinger/Dallinger,Dallinger/Dallinger,jcpeterson/Dallinger
|
"""Heroku web worker."""
import os
import redis
listen = ['high', 'default', 'low']
redis_url = os.getenv('REDIS_URL', 'redis://localhost:6379')
conn = redis.from_url(redis_url)
if __name__ == '__main__': # pragma: nocover
# These imports are inside the __main__ block
# to make sure that we only import from rq_gevent_worker
# (which has the side effect of applying gevent monkey patches)
# in the worker process. This way other processes can import the
# redis connection without that side effect.
from rq import (
Queue,
Connection
)
try:
from rq_gevent_worker import GeventWorker as Worker
except ImportError:
from rq import Worker
import logging
logging.basicConfig(format='%(asctime)s %(message)s', level=logging.INFO)
with Connection(conn):
worker = Worker(list(map(Queue, listen)))
worker.work()
Make sure bot jobs can be deserialized
|
"""Heroku web worker."""
import os
import redis
listen = ['high', 'default', 'low']
redis_url = os.getenv('REDIS_URL', 'redis://localhost:6379')
conn = redis.from_url(redis_url)
if __name__ == '__main__': # pragma: nocover
# These imports are inside the __main__ block
# to make sure that we only import from rq_gevent_worker
# (which has the side effect of applying gevent monkey patches)
# in the worker process. This way other processes can import the
# redis connection without that side effect.
from rq import (
Queue,
Connection
)
try:
from rq_gevent_worker import GeventWorker as Worker
except ImportError:
from rq import Worker
from dallinger.config import initialize_experiment_package
initialize_experiment_package(os.getcwd())
import logging
logging.basicConfig(format='%(asctime)s %(message)s', level=logging.INFO)
with Connection(conn):
worker = Worker(list(map(Queue, listen)))
worker.work()
|
<commit_before>"""Heroku web worker."""
import os
import redis
listen = ['high', 'default', 'low']
redis_url = os.getenv('REDIS_URL', 'redis://localhost:6379')
conn = redis.from_url(redis_url)
if __name__ == '__main__': # pragma: nocover
# These imports are inside the __main__ block
# to make sure that we only import from rq_gevent_worker
# (which has the side effect of applying gevent monkey patches)
# in the worker process. This way other processes can import the
# redis connection without that side effect.
from rq import (
Queue,
Connection
)
try:
from rq_gevent_worker import GeventWorker as Worker
except ImportError:
from rq import Worker
import logging
logging.basicConfig(format='%(asctime)s %(message)s', level=logging.INFO)
with Connection(conn):
worker = Worker(list(map(Queue, listen)))
worker.work()
<commit_msg>Make sure bot jobs can be deserialized<commit_after>
|
"""Heroku web worker."""
import os
import redis
listen = ['high', 'default', 'low']
redis_url = os.getenv('REDIS_URL', 'redis://localhost:6379')
conn = redis.from_url(redis_url)
if __name__ == '__main__': # pragma: nocover
# These imports are inside the __main__ block
# to make sure that we only import from rq_gevent_worker
# (which has the side effect of applying gevent monkey patches)
# in the worker process. This way other processes can import the
# redis connection without that side effect.
from rq import (
Queue,
Connection
)
try:
from rq_gevent_worker import GeventWorker as Worker
except ImportError:
from rq import Worker
from dallinger.config import initialize_experiment_package
initialize_experiment_package(os.getcwd())
import logging
logging.basicConfig(format='%(asctime)s %(message)s', level=logging.INFO)
with Connection(conn):
worker = Worker(list(map(Queue, listen)))
worker.work()
|
"""Heroku web worker."""
import os
import redis
listen = ['high', 'default', 'low']
redis_url = os.getenv('REDIS_URL', 'redis://localhost:6379')
conn = redis.from_url(redis_url)
if __name__ == '__main__': # pragma: nocover
# These imports are inside the __main__ block
# to make sure that we only import from rq_gevent_worker
# (which has the side effect of applying gevent monkey patches)
# in the worker process. This way other processes can import the
# redis connection without that side effect.
from rq import (
Queue,
Connection
)
try:
from rq_gevent_worker import GeventWorker as Worker
except ImportError:
from rq import Worker
import logging
logging.basicConfig(format='%(asctime)s %(message)s', level=logging.INFO)
with Connection(conn):
worker = Worker(list(map(Queue, listen)))
worker.work()
Make sure bot jobs can be deserialized"""Heroku web worker."""
import os
import redis
listen = ['high', 'default', 'low']
redis_url = os.getenv('REDIS_URL', 'redis://localhost:6379')
conn = redis.from_url(redis_url)
if __name__ == '__main__': # pragma: nocover
# These imports are inside the __main__ block
# to make sure that we only import from rq_gevent_worker
# (which has the side effect of applying gevent monkey patches)
# in the worker process. This way other processes can import the
# redis connection without that side effect.
from rq import (
Queue,
Connection
)
try:
from rq_gevent_worker import GeventWorker as Worker
except ImportError:
from rq import Worker
from dallinger.config import initialize_experiment_package
initialize_experiment_package(os.getcwd())
import logging
logging.basicConfig(format='%(asctime)s %(message)s', level=logging.INFO)
with Connection(conn):
worker = Worker(list(map(Queue, listen)))
worker.work()
|
<commit_before>"""Heroku web worker."""
import os
import redis
listen = ['high', 'default', 'low']
redis_url = os.getenv('REDIS_URL', 'redis://localhost:6379')
conn = redis.from_url(redis_url)
if __name__ == '__main__': # pragma: nocover
# These imports are inside the __main__ block
# to make sure that we only import from rq_gevent_worker
# (which has the side effect of applying gevent monkey patches)
# in the worker process. This way other processes can import the
# redis connection without that side effect.
from rq import (
Queue,
Connection
)
try:
from rq_gevent_worker import GeventWorker as Worker
except ImportError:
from rq import Worker
import logging
logging.basicConfig(format='%(asctime)s %(message)s', level=logging.INFO)
with Connection(conn):
worker = Worker(list(map(Queue, listen)))
worker.work()
<commit_msg>Make sure bot jobs can be deserialized<commit_after>"""Heroku web worker."""
import os
import redis
listen = ['high', 'default', 'low']
redis_url = os.getenv('REDIS_URL', 'redis://localhost:6379')
conn = redis.from_url(redis_url)
if __name__ == '__main__': # pragma: nocover
# These imports are inside the __main__ block
# to make sure that we only import from rq_gevent_worker
# (which has the side effect of applying gevent monkey patches)
# in the worker process. This way other processes can import the
# redis connection without that side effect.
from rq import (
Queue,
Connection
)
try:
from rq_gevent_worker import GeventWorker as Worker
except ImportError:
from rq import Worker
from dallinger.config import initialize_experiment_package
initialize_experiment_package(os.getcwd())
import logging
logging.basicConfig(format='%(asctime)s %(message)s', level=logging.INFO)
with Connection(conn):
worker = Worker(list(map(Queue, listen)))
worker.work()
|
b812843f03fd0da920872c109132aee7fae82b3a
|
tests/instancing_tests/NonterminalsTest.py
|
tests/instancing_tests/NonterminalsTest.py
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 31.08.2017 11:55
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy import *
from grammpy.exceptions import TreeDeletedException
class A(Nonterminal): pass
class B(Nonterminal): pass
class C(Nonterminal): pass
class From(Rule): rule = ([C], [A, B])
class To(Rule): rule = ([A], [B, C])
class NonterminalsTest(TestCase):
def test_correctChild(self):
a = A()
t = To()
a._set_to_rule(t)
self.assertEqual(a.to_rule, t)
def test_correctParent(self):
a = A()
f = From()
a._set_from_rule(f)
self.assertEqual(a.from_rule, f)
def test_deleteParent(self):
a = A()
f = From()
a._set_from_rule(f)
self.assertEqual(a.from_rule, f)
del f
with self.assertRaises(TreeDeletedException):
a.from_rule
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 31.08.2017 11:55
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy import *
from grammpy.exceptions import TreeDeletedException
class A(Nonterminal): pass
class B(Nonterminal): pass
class C(Nonterminal): pass
class From(Rule): rule = ([C], [A, B])
class To(Rule): rule = ([A], [B, C])
class NonterminalsTest(TestCase):
def test_correctChild(self):
a = A()
t = To()
a._set_to_rule(t)
self.assertEqual(a.to_rule, t)
def test_correctParent(self):
a = A()
f = From()
a._set_from_rule(f)
self.assertEqual(a.from_rule, f)
def test_deleteParent(self):
a = A()
f = From()
a._set_from_rule(f)
self.assertEqual(a.from_rule, f)
del f
with self.assertRaises(TreeDeletedException):
a.from_rule
def test_shouldNotDeleteChild(self):
a = A()
t = To()
a._set_to_rule(t)
del t
a.to_rule
if __name__ == '__main__':
main()
|
Add test of deleteing child for nonterminal
|
Add test of deleteing child for nonterminal
|
Python
|
mit
|
PatrikValkovic/grammpy
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 31.08.2017 11:55
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy import *
from grammpy.exceptions import TreeDeletedException
class A(Nonterminal): pass
class B(Nonterminal): pass
class C(Nonterminal): pass
class From(Rule): rule = ([C], [A, B])
class To(Rule): rule = ([A], [B, C])
class NonterminalsTest(TestCase):
def test_correctChild(self):
a = A()
t = To()
a._set_to_rule(t)
self.assertEqual(a.to_rule, t)
def test_correctParent(self):
a = A()
f = From()
a._set_from_rule(f)
self.assertEqual(a.from_rule, f)
def test_deleteParent(self):
a = A()
f = From()
a._set_from_rule(f)
self.assertEqual(a.from_rule, f)
del f
with self.assertRaises(TreeDeletedException):
a.from_rule
if __name__ == '__main__':
main()
Add test of deleteing child for nonterminal
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 31.08.2017 11:55
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy import *
from grammpy.exceptions import TreeDeletedException
class A(Nonterminal): pass
class B(Nonterminal): pass
class C(Nonterminal): pass
class From(Rule): rule = ([C], [A, B])
class To(Rule): rule = ([A], [B, C])
class NonterminalsTest(TestCase):
def test_correctChild(self):
a = A()
t = To()
a._set_to_rule(t)
self.assertEqual(a.to_rule, t)
def test_correctParent(self):
a = A()
f = From()
a._set_from_rule(f)
self.assertEqual(a.from_rule, f)
def test_deleteParent(self):
a = A()
f = From()
a._set_from_rule(f)
self.assertEqual(a.from_rule, f)
del f
with self.assertRaises(TreeDeletedException):
a.from_rule
def test_shouldNotDeleteChild(self):
a = A()
t = To()
a._set_to_rule(t)
del t
a.to_rule
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 31.08.2017 11:55
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy import *
from grammpy.exceptions import TreeDeletedException
class A(Nonterminal): pass
class B(Nonterminal): pass
class C(Nonterminal): pass
class From(Rule): rule = ([C], [A, B])
class To(Rule): rule = ([A], [B, C])
class NonterminalsTest(TestCase):
def test_correctChild(self):
a = A()
t = To()
a._set_to_rule(t)
self.assertEqual(a.to_rule, t)
def test_correctParent(self):
a = A()
f = From()
a._set_from_rule(f)
self.assertEqual(a.from_rule, f)
def test_deleteParent(self):
a = A()
f = From()
a._set_from_rule(f)
self.assertEqual(a.from_rule, f)
del f
with self.assertRaises(TreeDeletedException):
a.from_rule
if __name__ == '__main__':
main()
<commit_msg>Add test of deleteing child for nonterminal<commit_after>
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 31.08.2017 11:55
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy import *
from grammpy.exceptions import TreeDeletedException
class A(Nonterminal): pass
class B(Nonterminal): pass
class C(Nonterminal): pass
class From(Rule): rule = ([C], [A, B])
class To(Rule): rule = ([A], [B, C])
class NonterminalsTest(TestCase):
def test_correctChild(self):
a = A()
t = To()
a._set_to_rule(t)
self.assertEqual(a.to_rule, t)
def test_correctParent(self):
a = A()
f = From()
a._set_from_rule(f)
self.assertEqual(a.from_rule, f)
def test_deleteParent(self):
a = A()
f = From()
a._set_from_rule(f)
self.assertEqual(a.from_rule, f)
del f
with self.assertRaises(TreeDeletedException):
a.from_rule
def test_shouldNotDeleteChild(self):
a = A()
t = To()
a._set_to_rule(t)
del t
a.to_rule
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 31.08.2017 11:55
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy import *
from grammpy.exceptions import TreeDeletedException
class A(Nonterminal): pass
class B(Nonterminal): pass
class C(Nonterminal): pass
class From(Rule): rule = ([C], [A, B])
class To(Rule): rule = ([A], [B, C])
class NonterminalsTest(TestCase):
def test_correctChild(self):
a = A()
t = To()
a._set_to_rule(t)
self.assertEqual(a.to_rule, t)
def test_correctParent(self):
a = A()
f = From()
a._set_from_rule(f)
self.assertEqual(a.from_rule, f)
def test_deleteParent(self):
a = A()
f = From()
a._set_from_rule(f)
self.assertEqual(a.from_rule, f)
del f
with self.assertRaises(TreeDeletedException):
a.from_rule
if __name__ == '__main__':
main()
Add test of deleteing child for nonterminal#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 31.08.2017 11:55
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy import *
from grammpy.exceptions import TreeDeletedException
class A(Nonterminal): pass
class B(Nonterminal): pass
class C(Nonterminal): pass
class From(Rule): rule = ([C], [A, B])
class To(Rule): rule = ([A], [B, C])
class NonterminalsTest(TestCase):
def test_correctChild(self):
a = A()
t = To()
a._set_to_rule(t)
self.assertEqual(a.to_rule, t)
def test_correctParent(self):
a = A()
f = From()
a._set_from_rule(f)
self.assertEqual(a.from_rule, f)
def test_deleteParent(self):
a = A()
f = From()
a._set_from_rule(f)
self.assertEqual(a.from_rule, f)
del f
with self.assertRaises(TreeDeletedException):
a.from_rule
def test_shouldNotDeleteChild(self):
a = A()
t = To()
a._set_to_rule(t)
del t
a.to_rule
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 31.08.2017 11:55
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy import *
from grammpy.exceptions import TreeDeletedException
class A(Nonterminal): pass
class B(Nonterminal): pass
class C(Nonterminal): pass
class From(Rule): rule = ([C], [A, B])
class To(Rule): rule = ([A], [B, C])
class NonterminalsTest(TestCase):
def test_correctChild(self):
a = A()
t = To()
a._set_to_rule(t)
self.assertEqual(a.to_rule, t)
def test_correctParent(self):
a = A()
f = From()
a._set_from_rule(f)
self.assertEqual(a.from_rule, f)
def test_deleteParent(self):
a = A()
f = From()
a._set_from_rule(f)
self.assertEqual(a.from_rule, f)
del f
with self.assertRaises(TreeDeletedException):
a.from_rule
if __name__ == '__main__':
main()
<commit_msg>Add test of deleteing child for nonterminal<commit_after>#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 31.08.2017 11:55
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy import *
from grammpy.exceptions import TreeDeletedException
class A(Nonterminal): pass
class B(Nonterminal): pass
class C(Nonterminal): pass
class From(Rule): rule = ([C], [A, B])
class To(Rule): rule = ([A], [B, C])
class NonterminalsTest(TestCase):
def test_correctChild(self):
a = A()
t = To()
a._set_to_rule(t)
self.assertEqual(a.to_rule, t)
def test_correctParent(self):
a = A()
f = From()
a._set_from_rule(f)
self.assertEqual(a.from_rule, f)
def test_deleteParent(self):
a = A()
f = From()
a._set_from_rule(f)
self.assertEqual(a.from_rule, f)
del f
with self.assertRaises(TreeDeletedException):
a.from_rule
def test_shouldNotDeleteChild(self):
a = A()
t = To()
a._set_to_rule(t)
del t
a.to_rule
if __name__ == '__main__':
main()
|
1d321bb0bd6b8e5b6fc14704a6d1b29a365855d8
|
goatctf/core/models.py
|
goatctf/core/models.py
|
from django.contrib.auth.models import User
from django.db import models
import markdown
from core.settings import CHALLENGE_NAME_LENGTH, FLAG_LENGTH, TEAM_NAME_LENGTH
class Challenge(models.Model):
"""A challenge represents an individual problem to be solved."""
name = models.CharField(max_length=CHALLENGE_NAME_LENGTH)
points = models.IntegerField()
category = models.CharField(max_length=2)
flag = models.CharField(max_length=FLAG_LENGTH)
description_markdown = models.TextField()
description_html = models.TextField()
def save(self, *args, **kwargs):
self.description_html = markdown.markdown(self.description_markdown)
super(Challenge, self).save(*args, **kwargs)
class Team(models.Model):
"""A team is a collection of players."""
name = models.CharField(max_length=TEAM_NAME_LENGTH)
creator = models.ForeignKey("Player", related_name="created_teams")
class Player(User):
"""A player is a user with a team."""
team = models.ForeignKey("Team")
class Solution(models.Model):
"""A solution is a player's """
challenge = models.ForeignKey("Challenge")
solved_at = models.DateTimeField(auto_now_add=True)
solver = models.ForeignKey("Player")
|
from django.contrib.auth.models import User
from django.db import models
import markdown
from core.settings import CHALLENGE_NAME_LENGTH, FLAG_LENGTH, TEAM_NAME_LENGTH
class Challenge(models.Model):
"""A challenge represents an individual problem to be solved."""
CATEGORY_CHOICES = (
('be', 'Beer'),
('cr', 'Crypto'),
('ex', 'Exploitation'),
('fo', 'Forensics'),
('rn', 'Recon'),
('re', 'Reversing'),
('we', 'Web'),
('mi', 'Miscellaneous'),
)
name = models.CharField(max_length=CHALLENGE_NAME_LENGTH)
points = models.IntegerField()
category = models.CharField(max_length=2, choices=CATEGORY_CHOICES)
flag = models.CharField(max_length=FLAG_LENGTH)
description_markdown = models.TextField()
description_html = models.TextField()
def save(self, *args, **kwargs):
self.description_html = markdown.markdown(self.description_markdown)
super(Challenge, self).save(*args, **kwargs)
class Team(models.Model):
"""A team is a collection of players."""
name = models.CharField(max_length=TEAM_NAME_LENGTH)
creator = models.ForeignKey("Player", related_name="created_teams")
class Player(User):
"""A player is a user with a team."""
team = models.ForeignKey("Team")
class Solution(models.Model):
"""A solution is a player's """
challenge = models.ForeignKey("Challenge")
solved_at = models.DateTimeField(auto_now_add=True)
solver = models.ForeignKey("Player")
|
Add choices for challenge category
|
Add choices for challenge category
|
Python
|
mit
|
Without-Proper-Instructions/GoatCTF
|
from django.contrib.auth.models import User
from django.db import models
import markdown
from core.settings import CHALLENGE_NAME_LENGTH, FLAG_LENGTH, TEAM_NAME_LENGTH
class Challenge(models.Model):
"""A challenge represents an individual problem to be solved."""
name = models.CharField(max_length=CHALLENGE_NAME_LENGTH)
points = models.IntegerField()
category = models.CharField(max_length=2)
flag = models.CharField(max_length=FLAG_LENGTH)
description_markdown = models.TextField()
description_html = models.TextField()
def save(self, *args, **kwargs):
self.description_html = markdown.markdown(self.description_markdown)
super(Challenge, self).save(*args, **kwargs)
class Team(models.Model):
"""A team is a collection of players."""
name = models.CharField(max_length=TEAM_NAME_LENGTH)
creator = models.ForeignKey("Player", related_name="created_teams")
class Player(User):
"""A player is a user with a team."""
team = models.ForeignKey("Team")
class Solution(models.Model):
"""A solution is a player's """
challenge = models.ForeignKey("Challenge")
solved_at = models.DateTimeField(auto_now_add=True)
solver = models.ForeignKey("Player")
Add choices for challenge category
|
from django.contrib.auth.models import User
from django.db import models
import markdown
from core.settings import CHALLENGE_NAME_LENGTH, FLAG_LENGTH, TEAM_NAME_LENGTH
class Challenge(models.Model):
"""A challenge represents an individual problem to be solved."""
CATEGORY_CHOICES = (
('be', 'Beer'),
('cr', 'Crypto'),
('ex', 'Exploitation'),
('fo', 'Forensics'),
('rn', 'Recon'),
('re', 'Reversing'),
('we', 'Web'),
('mi', 'Miscellaneous'),
)
name = models.CharField(max_length=CHALLENGE_NAME_LENGTH)
points = models.IntegerField()
category = models.CharField(max_length=2, choices=CATEGORY_CHOICES)
flag = models.CharField(max_length=FLAG_LENGTH)
description_markdown = models.TextField()
description_html = models.TextField()
def save(self, *args, **kwargs):
self.description_html = markdown.markdown(self.description_markdown)
super(Challenge, self).save(*args, **kwargs)
class Team(models.Model):
"""A team is a collection of players."""
name = models.CharField(max_length=TEAM_NAME_LENGTH)
creator = models.ForeignKey("Player", related_name="created_teams")
class Player(User):
"""A player is a user with a team."""
team = models.ForeignKey("Team")
class Solution(models.Model):
"""A solution is a player's """
challenge = models.ForeignKey("Challenge")
solved_at = models.DateTimeField(auto_now_add=True)
solver = models.ForeignKey("Player")
|
<commit_before>from django.contrib.auth.models import User
from django.db import models
import markdown
from core.settings import CHALLENGE_NAME_LENGTH, FLAG_LENGTH, TEAM_NAME_LENGTH
class Challenge(models.Model):
"""A challenge represents an individual problem to be solved."""
name = models.CharField(max_length=CHALLENGE_NAME_LENGTH)
points = models.IntegerField()
category = models.CharField(max_length=2)
flag = models.CharField(max_length=FLAG_LENGTH)
description_markdown = models.TextField()
description_html = models.TextField()
def save(self, *args, **kwargs):
self.description_html = markdown.markdown(self.description_markdown)
super(Challenge, self).save(*args, **kwargs)
class Team(models.Model):
"""A team is a collection of players."""
name = models.CharField(max_length=TEAM_NAME_LENGTH)
creator = models.ForeignKey("Player", related_name="created_teams")
class Player(User):
"""A player is a user with a team."""
team = models.ForeignKey("Team")
class Solution(models.Model):
"""A solution is a player's """
challenge = models.ForeignKey("Challenge")
solved_at = models.DateTimeField(auto_now_add=True)
solver = models.ForeignKey("Player")
<commit_msg>Add choices for challenge category<commit_after>
|
from django.contrib.auth.models import User
from django.db import models
import markdown
from core.settings import CHALLENGE_NAME_LENGTH, FLAG_LENGTH, TEAM_NAME_LENGTH
class Challenge(models.Model):
"""A challenge represents an individual problem to be solved."""
CATEGORY_CHOICES = (
('be', 'Beer'),
('cr', 'Crypto'),
('ex', 'Exploitation'),
('fo', 'Forensics'),
('rn', 'Recon'),
('re', 'Reversing'),
('we', 'Web'),
('mi', 'Miscellaneous'),
)
name = models.CharField(max_length=CHALLENGE_NAME_LENGTH)
points = models.IntegerField()
category = models.CharField(max_length=2, choices=CATEGORY_CHOICES)
flag = models.CharField(max_length=FLAG_LENGTH)
description_markdown = models.TextField()
description_html = models.TextField()
def save(self, *args, **kwargs):
self.description_html = markdown.markdown(self.description_markdown)
super(Challenge, self).save(*args, **kwargs)
class Team(models.Model):
"""A team is a collection of players."""
name = models.CharField(max_length=TEAM_NAME_LENGTH)
creator = models.ForeignKey("Player", related_name="created_teams")
class Player(User):
"""A player is a user with a team."""
team = models.ForeignKey("Team")
class Solution(models.Model):
"""A solution is a player's """
challenge = models.ForeignKey("Challenge")
solved_at = models.DateTimeField(auto_now_add=True)
solver = models.ForeignKey("Player")
|
from django.contrib.auth.models import User
from django.db import models
import markdown
from core.settings import CHALLENGE_NAME_LENGTH, FLAG_LENGTH, TEAM_NAME_LENGTH
class Challenge(models.Model):
"""A challenge represents an individual problem to be solved."""
name = models.CharField(max_length=CHALLENGE_NAME_LENGTH)
points = models.IntegerField()
category = models.CharField(max_length=2)
flag = models.CharField(max_length=FLAG_LENGTH)
description_markdown = models.TextField()
description_html = models.TextField()
def save(self, *args, **kwargs):
self.description_html = markdown.markdown(self.description_markdown)
super(Challenge, self).save(*args, **kwargs)
class Team(models.Model):
"""A team is a collection of players."""
name = models.CharField(max_length=TEAM_NAME_LENGTH)
creator = models.ForeignKey("Player", related_name="created_teams")
class Player(User):
"""A player is a user with a team."""
team = models.ForeignKey("Team")
class Solution(models.Model):
"""A solution is a player's """
challenge = models.ForeignKey("Challenge")
solved_at = models.DateTimeField(auto_now_add=True)
solver = models.ForeignKey("Player")
Add choices for challenge categoryfrom django.contrib.auth.models import User
from django.db import models
import markdown
from core.settings import CHALLENGE_NAME_LENGTH, FLAG_LENGTH, TEAM_NAME_LENGTH
class Challenge(models.Model):
"""A challenge represents an individual problem to be solved."""
CATEGORY_CHOICES = (
('be', 'Beer'),
('cr', 'Crypto'),
('ex', 'Exploitation'),
('fo', 'Forensics'),
('rn', 'Recon'),
('re', 'Reversing'),
('we', 'Web'),
('mi', 'Miscellaneous'),
)
name = models.CharField(max_length=CHALLENGE_NAME_LENGTH)
points = models.IntegerField()
category = models.CharField(max_length=2, choices=CATEGORY_CHOICES)
flag = models.CharField(max_length=FLAG_LENGTH)
description_markdown = models.TextField()
description_html = models.TextField()
def save(self, *args, **kwargs):
self.description_html = markdown.markdown(self.description_markdown)
super(Challenge, self).save(*args, **kwargs)
class Team(models.Model):
"""A team is a collection of players."""
name = models.CharField(max_length=TEAM_NAME_LENGTH)
creator = models.ForeignKey("Player", related_name="created_teams")
class Player(User):
"""A player is a user with a team."""
team = models.ForeignKey("Team")
class Solution(models.Model):
"""A solution is a player's """
challenge = models.ForeignKey("Challenge")
solved_at = models.DateTimeField(auto_now_add=True)
solver = models.ForeignKey("Player")
|
<commit_before>from django.contrib.auth.models import User
from django.db import models
import markdown
from core.settings import CHALLENGE_NAME_LENGTH, FLAG_LENGTH, TEAM_NAME_LENGTH
class Challenge(models.Model):
"""A challenge represents an individual problem to be solved."""
name = models.CharField(max_length=CHALLENGE_NAME_LENGTH)
points = models.IntegerField()
category = models.CharField(max_length=2)
flag = models.CharField(max_length=FLAG_LENGTH)
description_markdown = models.TextField()
description_html = models.TextField()
def save(self, *args, **kwargs):
self.description_html = markdown.markdown(self.description_markdown)
super(Challenge, self).save(*args, **kwargs)
class Team(models.Model):
"""A team is a collection of players."""
name = models.CharField(max_length=TEAM_NAME_LENGTH)
creator = models.ForeignKey("Player", related_name="created_teams")
class Player(User):
"""A player is a user with a team."""
team = models.ForeignKey("Team")
class Solution(models.Model):
"""A solution is a player's """
challenge = models.ForeignKey("Challenge")
solved_at = models.DateTimeField(auto_now_add=True)
solver = models.ForeignKey("Player")
<commit_msg>Add choices for challenge category<commit_after>from django.contrib.auth.models import User
from django.db import models
import markdown
from core.settings import CHALLENGE_NAME_LENGTH, FLAG_LENGTH, TEAM_NAME_LENGTH
class Challenge(models.Model):
"""A challenge represents an individual problem to be solved."""
CATEGORY_CHOICES = (
('be', 'Beer'),
('cr', 'Crypto'),
('ex', 'Exploitation'),
('fo', 'Forensics'),
('rn', 'Recon'),
('re', 'Reversing'),
('we', 'Web'),
('mi', 'Miscellaneous'),
)
name = models.CharField(max_length=CHALLENGE_NAME_LENGTH)
points = models.IntegerField()
category = models.CharField(max_length=2, choices=CATEGORY_CHOICES)
flag = models.CharField(max_length=FLAG_LENGTH)
description_markdown = models.TextField()
description_html = models.TextField()
def save(self, *args, **kwargs):
self.description_html = markdown.markdown(self.description_markdown)
super(Challenge, self).save(*args, **kwargs)
class Team(models.Model):
"""A team is a collection of players."""
name = models.CharField(max_length=TEAM_NAME_LENGTH)
creator = models.ForeignKey("Player", related_name="created_teams")
class Player(User):
"""A player is a user with a team."""
team = models.ForeignKey("Team")
class Solution(models.Model):
"""A solution is a player's """
challenge = models.ForeignKey("Challenge")
solved_at = models.DateTimeField(auto_now_add=True)
solver = models.ForeignKey("Player")
|
6cbfee67047ac43d00bd1ff8fabb11dc33314aff
|
ledctl/ledctl.py
|
ledctl/ledctl.py
|
from flask import Flask, request
import pigpio
app = Flask(__name__)
#rgb 22, 27, 17
#base teal 40 97 15
GPIO_RED = 22
GPIO_GREEN = 27
GPIO_BLUE = 17
def to_PWM_dutycycle(string):
try:
i = int(string)
if i < 0:
i = 0
elif i > 255:
i = 255
return i
except ValueError:
return 0
@app.route("/")
def home():
return "Hello World!"
@app.route("/color")
def set_color():
args = request.args.to_dict()
r = to_PWM_dutycycle(args['r'])
g = to_PWM_dutycycle(args['g'])
b = to_PWM_dutycycle(args['b'])
pi = pigpio.pi()
pi.set_PWM_dutycycle(GPIO_RED, r)
pi.set_PWM_dutycycle(GPIO_GREEN, g)
pi.set_PWM_dutycycle(GPIO_BLUE, b)
return str(r) + ' ' + str(g) + ' ' + str(b)
if __name__ == "__main__":
#app.run(debug=True)
app.run(host='0.0.0.0', port=80, debug=True)
|
from flask import Flask, request
import pigpio
app = Flask(__name__)
#rgb 22, 27, 17
#base teal 40 97 15
GPIO_RED = 22
GPIO_GREEN = 27
GPIO_BLUE = 17
pi = pigpio.pi()
def to_PWM_dutycycle(string):
try:
i = int(string)
if i < 0:
i = 0
elif i > 255:
i = 255
return i
except ValueError:
return 0
@app.route("/")
def home():
return "Hello World!"
@app.route("/color")
def set_color():
args = request.args.to_dict()
r = to_PWM_dutycycle(args['r'])
g = to_PWM_dutycycle(args['g'])
b = to_PWM_dutycycle(args['b'])
pi.set_PWM_dutycycle(GPIO_RED, r)
pi.set_PWM_dutycycle(GPIO_GREEN, g)
pi.set_PWM_dutycycle(GPIO_BLUE, b)
return str(r) + ' ' + str(g) + ' ' + str(b)
if __name__ == "__main__":
#app.run(debug=True)
app.run(host='0.0.0.0', port=80, debug=True)
|
Create only one instance of pigpio
|
Create only one instance of pigpio
|
Python
|
mit
|
ayoy/ledctl
|
from flask import Flask, request
import pigpio
app = Flask(__name__)
#rgb 22, 27, 17
#base teal 40 97 15
GPIO_RED = 22
GPIO_GREEN = 27
GPIO_BLUE = 17
def to_PWM_dutycycle(string):
try:
i = int(string)
if i < 0:
i = 0
elif i > 255:
i = 255
return i
except ValueError:
return 0
@app.route("/")
def home():
return "Hello World!"
@app.route("/color")
def set_color():
args = request.args.to_dict()
r = to_PWM_dutycycle(args['r'])
g = to_PWM_dutycycle(args['g'])
b = to_PWM_dutycycle(args['b'])
pi = pigpio.pi()
pi.set_PWM_dutycycle(GPIO_RED, r)
pi.set_PWM_dutycycle(GPIO_GREEN, g)
pi.set_PWM_dutycycle(GPIO_BLUE, b)
return str(r) + ' ' + str(g) + ' ' + str(b)
if __name__ == "__main__":
#app.run(debug=True)
app.run(host='0.0.0.0', port=80, debug=True)
Create only one instance of pigpio
|
from flask import Flask, request
import pigpio
app = Flask(__name__)
#rgb 22, 27, 17
#base teal 40 97 15
GPIO_RED = 22
GPIO_GREEN = 27
GPIO_BLUE = 17
pi = pigpio.pi()
def to_PWM_dutycycle(string):
try:
i = int(string)
if i < 0:
i = 0
elif i > 255:
i = 255
return i
except ValueError:
return 0
@app.route("/")
def home():
return "Hello World!"
@app.route("/color")
def set_color():
args = request.args.to_dict()
r = to_PWM_dutycycle(args['r'])
g = to_PWM_dutycycle(args['g'])
b = to_PWM_dutycycle(args['b'])
pi.set_PWM_dutycycle(GPIO_RED, r)
pi.set_PWM_dutycycle(GPIO_GREEN, g)
pi.set_PWM_dutycycle(GPIO_BLUE, b)
return str(r) + ' ' + str(g) + ' ' + str(b)
if __name__ == "__main__":
#app.run(debug=True)
app.run(host='0.0.0.0', port=80, debug=True)
|
<commit_before>from flask import Flask, request
import pigpio
app = Flask(__name__)
#rgb 22, 27, 17
#base teal 40 97 15
GPIO_RED = 22
GPIO_GREEN = 27
GPIO_BLUE = 17
def to_PWM_dutycycle(string):
try:
i = int(string)
if i < 0:
i = 0
elif i > 255:
i = 255
return i
except ValueError:
return 0
@app.route("/")
def home():
return "Hello World!"
@app.route("/color")
def set_color():
args = request.args.to_dict()
r = to_PWM_dutycycle(args['r'])
g = to_PWM_dutycycle(args['g'])
b = to_PWM_dutycycle(args['b'])
pi = pigpio.pi()
pi.set_PWM_dutycycle(GPIO_RED, r)
pi.set_PWM_dutycycle(GPIO_GREEN, g)
pi.set_PWM_dutycycle(GPIO_BLUE, b)
return str(r) + ' ' + str(g) + ' ' + str(b)
if __name__ == "__main__":
#app.run(debug=True)
app.run(host='0.0.0.0', port=80, debug=True)
<commit_msg>Create only one instance of pigpio<commit_after>
|
from flask import Flask, request
import pigpio
app = Flask(__name__)
#rgb 22, 27, 17
#base teal 40 97 15
GPIO_RED = 22
GPIO_GREEN = 27
GPIO_BLUE = 17
pi = pigpio.pi()
def to_PWM_dutycycle(string):
try:
i = int(string)
if i < 0:
i = 0
elif i > 255:
i = 255
return i
except ValueError:
return 0
@app.route("/")
def home():
return "Hello World!"
@app.route("/color")
def set_color():
args = request.args.to_dict()
r = to_PWM_dutycycle(args['r'])
g = to_PWM_dutycycle(args['g'])
b = to_PWM_dutycycle(args['b'])
pi.set_PWM_dutycycle(GPIO_RED, r)
pi.set_PWM_dutycycle(GPIO_GREEN, g)
pi.set_PWM_dutycycle(GPIO_BLUE, b)
return str(r) + ' ' + str(g) + ' ' + str(b)
if __name__ == "__main__":
#app.run(debug=True)
app.run(host='0.0.0.0', port=80, debug=True)
|
from flask import Flask, request
import pigpio
app = Flask(__name__)
#rgb 22, 27, 17
#base teal 40 97 15
GPIO_RED = 22
GPIO_GREEN = 27
GPIO_BLUE = 17
def to_PWM_dutycycle(string):
try:
i = int(string)
if i < 0:
i = 0
elif i > 255:
i = 255
return i
except ValueError:
return 0
@app.route("/")
def home():
return "Hello World!"
@app.route("/color")
def set_color():
args = request.args.to_dict()
r = to_PWM_dutycycle(args['r'])
g = to_PWM_dutycycle(args['g'])
b = to_PWM_dutycycle(args['b'])
pi = pigpio.pi()
pi.set_PWM_dutycycle(GPIO_RED, r)
pi.set_PWM_dutycycle(GPIO_GREEN, g)
pi.set_PWM_dutycycle(GPIO_BLUE, b)
return str(r) + ' ' + str(g) + ' ' + str(b)
if __name__ == "__main__":
#app.run(debug=True)
app.run(host='0.0.0.0', port=80, debug=True)
Create only one instance of pigpiofrom flask import Flask, request
import pigpio
app = Flask(__name__)
#rgb 22, 27, 17
#base teal 40 97 15
GPIO_RED = 22
GPIO_GREEN = 27
GPIO_BLUE = 17
pi = pigpio.pi()
def to_PWM_dutycycle(string):
try:
i = int(string)
if i < 0:
i = 0
elif i > 255:
i = 255
return i
except ValueError:
return 0
@app.route("/")
def home():
return "Hello World!"
@app.route("/color")
def set_color():
args = request.args.to_dict()
r = to_PWM_dutycycle(args['r'])
g = to_PWM_dutycycle(args['g'])
b = to_PWM_dutycycle(args['b'])
pi.set_PWM_dutycycle(GPIO_RED, r)
pi.set_PWM_dutycycle(GPIO_GREEN, g)
pi.set_PWM_dutycycle(GPIO_BLUE, b)
return str(r) + ' ' + str(g) + ' ' + str(b)
if __name__ == "__main__":
#app.run(debug=True)
app.run(host='0.0.0.0', port=80, debug=True)
|
<commit_before>from flask import Flask, request
import pigpio
app = Flask(__name__)
#rgb 22, 27, 17
#base teal 40 97 15
GPIO_RED = 22
GPIO_GREEN = 27
GPIO_BLUE = 17
def to_PWM_dutycycle(string):
try:
i = int(string)
if i < 0:
i = 0
elif i > 255:
i = 255
return i
except ValueError:
return 0
@app.route("/")
def home():
return "Hello World!"
@app.route("/color")
def set_color():
args = request.args.to_dict()
r = to_PWM_dutycycle(args['r'])
g = to_PWM_dutycycle(args['g'])
b = to_PWM_dutycycle(args['b'])
pi = pigpio.pi()
pi.set_PWM_dutycycle(GPIO_RED, r)
pi.set_PWM_dutycycle(GPIO_GREEN, g)
pi.set_PWM_dutycycle(GPIO_BLUE, b)
return str(r) + ' ' + str(g) + ' ' + str(b)
if __name__ == "__main__":
#app.run(debug=True)
app.run(host='0.0.0.0', port=80, debug=True)
<commit_msg>Create only one instance of pigpio<commit_after>from flask import Flask, request
import pigpio
app = Flask(__name__)
#rgb 22, 27, 17
#base teal 40 97 15
GPIO_RED = 22
GPIO_GREEN = 27
GPIO_BLUE = 17
pi = pigpio.pi()
def to_PWM_dutycycle(string):
try:
i = int(string)
if i < 0:
i = 0
elif i > 255:
i = 255
return i
except ValueError:
return 0
@app.route("/")
def home():
return "Hello World!"
@app.route("/color")
def set_color():
args = request.args.to_dict()
r = to_PWM_dutycycle(args['r'])
g = to_PWM_dutycycle(args['g'])
b = to_PWM_dutycycle(args['b'])
pi.set_PWM_dutycycle(GPIO_RED, r)
pi.set_PWM_dutycycle(GPIO_GREEN, g)
pi.set_PWM_dutycycle(GPIO_BLUE, b)
return str(r) + ' ' + str(g) + ' ' + str(b)
if __name__ == "__main__":
#app.run(debug=True)
app.run(host='0.0.0.0', port=80, debug=True)
|
8b8fea0a212fb93118debc26306ed783196d96a0
|
models.py
|
models.py
|
from google.appengine.ext import db
from google.appengine.api.users import User
class Cfp(db.Model):
name = db.StringProperty()
fullname = db.StringProperty()
website = db.LinkProperty()
begin_conf_date = db.DateProperty()
end_conf_date = db.DateProperty()
submission_deadline = db.DateProperty()
notification_date = db.DateProperty()
country = db.StringProperty()
city = db.StringProperty()
rate = db.RatingProperty()
submitters = db.ListProperty(User)
category = db.StringProperty()
keywords = db.StringListProperty()
last_update = db.DateTimeProperty(auto_now=True)
def setWebsite(self, link):
self.website = db.Link(link)
def setAcceptanceRate(self, rate):
self.rate = db.Rating(rate)
def rfc3339_update():
return last_update.strftime('%Y-%m-%dT%H:%M:%SZ')
|
from google.appengine.ext import db
from google.appengine.api.users import User
class Cfp(db.Model):
name = db.StringProperty()
fullname = db.StringProperty()
website = db.LinkProperty()
begin_conf_date = db.DateProperty()
end_conf_date = db.DateProperty()
submission_deadline = db.DateProperty()
notification_date = db.DateProperty()
country = db.StringProperty()
city = db.StringProperty()
rate = db.RatingProperty()
submitters = db.ListProperty(User)
category = db.StringProperty()
keywords = db.StringListProperty()
last_update = db.DateTimeProperty(auto_now=True)
def setWebsite(self, link):
self.website = db.Link(link)
def setAcceptanceRate(self, rate):
self.rate = db.Rating(rate)
def rfc3339_update(self):
return self.last_update.strftime('%Y-%m-%dT%H:%M:%SZ')
|
Correct bug introduced in the previous commit (last update in feed entries).
|
Correct bug introduced in the previous commit (last update in feed entries).
|
Python
|
mit
|
CaptainPatate/ascfpmfsrt
|
from google.appengine.ext import db
from google.appengine.api.users import User
class Cfp(db.Model):
name = db.StringProperty()
fullname = db.StringProperty()
website = db.LinkProperty()
begin_conf_date = db.DateProperty()
end_conf_date = db.DateProperty()
submission_deadline = db.DateProperty()
notification_date = db.DateProperty()
country = db.StringProperty()
city = db.StringProperty()
rate = db.RatingProperty()
submitters = db.ListProperty(User)
category = db.StringProperty()
keywords = db.StringListProperty()
last_update = db.DateTimeProperty(auto_now=True)
def setWebsite(self, link):
self.website = db.Link(link)
def setAcceptanceRate(self, rate):
self.rate = db.Rating(rate)
def rfc3339_update():
return last_update.strftime('%Y-%m-%dT%H:%M:%SZ')
Correct bug introduced in the previous commit (last update in feed entries).
|
from google.appengine.ext import db
from google.appengine.api.users import User
class Cfp(db.Model):
name = db.StringProperty()
fullname = db.StringProperty()
website = db.LinkProperty()
begin_conf_date = db.DateProperty()
end_conf_date = db.DateProperty()
submission_deadline = db.DateProperty()
notification_date = db.DateProperty()
country = db.StringProperty()
city = db.StringProperty()
rate = db.RatingProperty()
submitters = db.ListProperty(User)
category = db.StringProperty()
keywords = db.StringListProperty()
last_update = db.DateTimeProperty(auto_now=True)
def setWebsite(self, link):
self.website = db.Link(link)
def setAcceptanceRate(self, rate):
self.rate = db.Rating(rate)
def rfc3339_update(self):
return self.last_update.strftime('%Y-%m-%dT%H:%M:%SZ')
|
<commit_before>from google.appengine.ext import db
from google.appengine.api.users import User
class Cfp(db.Model):
name = db.StringProperty()
fullname = db.StringProperty()
website = db.LinkProperty()
begin_conf_date = db.DateProperty()
end_conf_date = db.DateProperty()
submission_deadline = db.DateProperty()
notification_date = db.DateProperty()
country = db.StringProperty()
city = db.StringProperty()
rate = db.RatingProperty()
submitters = db.ListProperty(User)
category = db.StringProperty()
keywords = db.StringListProperty()
last_update = db.DateTimeProperty(auto_now=True)
def setWebsite(self, link):
self.website = db.Link(link)
def setAcceptanceRate(self, rate):
self.rate = db.Rating(rate)
def rfc3339_update():
return last_update.strftime('%Y-%m-%dT%H:%M:%SZ')
<commit_msg>Correct bug introduced in the previous commit (last update in feed entries).<commit_after>
|
from google.appengine.ext import db
from google.appengine.api.users import User
class Cfp(db.Model):
name = db.StringProperty()
fullname = db.StringProperty()
website = db.LinkProperty()
begin_conf_date = db.DateProperty()
end_conf_date = db.DateProperty()
submission_deadline = db.DateProperty()
notification_date = db.DateProperty()
country = db.StringProperty()
city = db.StringProperty()
rate = db.RatingProperty()
submitters = db.ListProperty(User)
category = db.StringProperty()
keywords = db.StringListProperty()
last_update = db.DateTimeProperty(auto_now=True)
def setWebsite(self, link):
self.website = db.Link(link)
def setAcceptanceRate(self, rate):
self.rate = db.Rating(rate)
def rfc3339_update(self):
return self.last_update.strftime('%Y-%m-%dT%H:%M:%SZ')
|
from google.appengine.ext import db
from google.appengine.api.users import User
class Cfp(db.Model):
name = db.StringProperty()
fullname = db.StringProperty()
website = db.LinkProperty()
begin_conf_date = db.DateProperty()
end_conf_date = db.DateProperty()
submission_deadline = db.DateProperty()
notification_date = db.DateProperty()
country = db.StringProperty()
city = db.StringProperty()
rate = db.RatingProperty()
submitters = db.ListProperty(User)
category = db.StringProperty()
keywords = db.StringListProperty()
last_update = db.DateTimeProperty(auto_now=True)
def setWebsite(self, link):
self.website = db.Link(link)
def setAcceptanceRate(self, rate):
self.rate = db.Rating(rate)
def rfc3339_update():
return last_update.strftime('%Y-%m-%dT%H:%M:%SZ')
Correct bug introduced in the previous commit (last update in feed entries).from google.appengine.ext import db
from google.appengine.api.users import User
class Cfp(db.Model):
name = db.StringProperty()
fullname = db.StringProperty()
website = db.LinkProperty()
begin_conf_date = db.DateProperty()
end_conf_date = db.DateProperty()
submission_deadline = db.DateProperty()
notification_date = db.DateProperty()
country = db.StringProperty()
city = db.StringProperty()
rate = db.RatingProperty()
submitters = db.ListProperty(User)
category = db.StringProperty()
keywords = db.StringListProperty()
last_update = db.DateTimeProperty(auto_now=True)
def setWebsite(self, link):
self.website = db.Link(link)
def setAcceptanceRate(self, rate):
self.rate = db.Rating(rate)
def rfc3339_update(self):
return self.last_update.strftime('%Y-%m-%dT%H:%M:%SZ')
|
<commit_before>from google.appengine.ext import db
from google.appengine.api.users import User
class Cfp(db.Model):
name = db.StringProperty()
fullname = db.StringProperty()
website = db.LinkProperty()
begin_conf_date = db.DateProperty()
end_conf_date = db.DateProperty()
submission_deadline = db.DateProperty()
notification_date = db.DateProperty()
country = db.StringProperty()
city = db.StringProperty()
rate = db.RatingProperty()
submitters = db.ListProperty(User)
category = db.StringProperty()
keywords = db.StringListProperty()
last_update = db.DateTimeProperty(auto_now=True)
def setWebsite(self, link):
self.website = db.Link(link)
def setAcceptanceRate(self, rate):
self.rate = db.Rating(rate)
def rfc3339_update():
return last_update.strftime('%Y-%m-%dT%H:%M:%SZ')
<commit_msg>Correct bug introduced in the previous commit (last update in feed entries).<commit_after>from google.appengine.ext import db
from google.appengine.api.users import User
class Cfp(db.Model):
name = db.StringProperty()
fullname = db.StringProperty()
website = db.LinkProperty()
begin_conf_date = db.DateProperty()
end_conf_date = db.DateProperty()
submission_deadline = db.DateProperty()
notification_date = db.DateProperty()
country = db.StringProperty()
city = db.StringProperty()
rate = db.RatingProperty()
submitters = db.ListProperty(User)
category = db.StringProperty()
keywords = db.StringListProperty()
last_update = db.DateTimeProperty(auto_now=True)
def setWebsite(self, link):
self.website = db.Link(link)
def setAcceptanceRate(self, rate):
self.rate = db.Rating(rate)
def rfc3339_update(self):
return self.last_update.strftime('%Y-%m-%dT%H:%M:%SZ')
|
d30358485b78a1257535f8d61611cac168584625
|
models.py
|
models.py
|
from scipy.io import loadmat
import numpy as np
import keras
from keras.preprocessing.image import load_img, img_to_array
from keras.models import Sequential
from keras.layers import Activation
from keras.layers import Dense, Dropout, Flatten
from keras.layers import Conv2D, MaxPooling2D
num_classes = 10
# input image dimensions
img_rows, img_cols = 124, 124
input_shape = (img_rows, img_cols, 1)
# input_shape = (1, img_rows, img_cols)
data_dir = 'data'
def create_model():
model = Sequential()
model.add(Conv2D(32, kernel_size=(3, 3),
activation='relu',
input_shape=input_shape))
model.add(Conv2D(64, (3, 3), activation='relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.25))
model.add(Flatten())
model.add(Dense(128, activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(num_classes, activation='softmax'))
model.compile(loss=keras.losses.categorical_crossentropy,
optimizer=keras.optimizers.Adadelta(),
metrics=['accuracy'])
# model.compile(loss='categorical_crossentropy',
# optimizer='adam',
# metrics=['accuracy'])
return model
def load_data(mat_file_dir):
x_img = []
y_gender = []
meta = loadmat(mat_file_dir)
img_paths = meta["full_path"]
genders = meta["gender"]
for i, path in enumerate(img_paths):
print('i:', i)
if i == 1:
continue
if i == 3:
continue
if i == 10:
continue
if i == 16:
continue
absPath = data_dir + '/' + path.strip()
print('loading:', absPath)
img = load_img(absPath, target_size=(img_rows, img_cols))
x_img.append( img_to_array(img) )
print('gender:', genders[0][i])
y_gender.append( genders[0][i] )
return np.array(x_img), np.array(y_gender)
# model = create_model()
# print(model)
# meta = loadmat('modified_wiki.mat')
x, y = load_data('modified_wiki.mat')
print('x:', x)
print('y:', y)
|
Add load image and create array data
|
Add load image and create array data
|
Python
|
mit
|
shioyang/model-comparator,shioyang/model-comparator,shioyang/model-comparator,shioyang/model-comparator
|
Add load image and create array data
|
from scipy.io import loadmat
import numpy as np
import keras
from keras.preprocessing.image import load_img, img_to_array
from keras.models import Sequential
from keras.layers import Activation
from keras.layers import Dense, Dropout, Flatten
from keras.layers import Conv2D, MaxPooling2D
num_classes = 10
# input image dimensions
img_rows, img_cols = 124, 124
input_shape = (img_rows, img_cols, 1)
# input_shape = (1, img_rows, img_cols)
data_dir = 'data'
def create_model():
model = Sequential()
model.add(Conv2D(32, kernel_size=(3, 3),
activation='relu',
input_shape=input_shape))
model.add(Conv2D(64, (3, 3), activation='relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.25))
model.add(Flatten())
model.add(Dense(128, activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(num_classes, activation='softmax'))
model.compile(loss=keras.losses.categorical_crossentropy,
optimizer=keras.optimizers.Adadelta(),
metrics=['accuracy'])
# model.compile(loss='categorical_crossentropy',
# optimizer='adam',
# metrics=['accuracy'])
return model
def load_data(mat_file_dir):
x_img = []
y_gender = []
meta = loadmat(mat_file_dir)
img_paths = meta["full_path"]
genders = meta["gender"]
for i, path in enumerate(img_paths):
print('i:', i)
if i == 1:
continue
if i == 3:
continue
if i == 10:
continue
if i == 16:
continue
absPath = data_dir + '/' + path.strip()
print('loading:', absPath)
img = load_img(absPath, target_size=(img_rows, img_cols))
x_img.append( img_to_array(img) )
print('gender:', genders[0][i])
y_gender.append( genders[0][i] )
return np.array(x_img), np.array(y_gender)
# model = create_model()
# print(model)
# meta = loadmat('modified_wiki.mat')
x, y = load_data('modified_wiki.mat')
print('x:', x)
print('y:', y)
|
<commit_before><commit_msg>Add load image and create array data<commit_after>
|
from scipy.io import loadmat
import numpy as np
import keras
from keras.preprocessing.image import load_img, img_to_array
from keras.models import Sequential
from keras.layers import Activation
from keras.layers import Dense, Dropout, Flatten
from keras.layers import Conv2D, MaxPooling2D
num_classes = 10
# input image dimensions
img_rows, img_cols = 124, 124
input_shape = (img_rows, img_cols, 1)
# input_shape = (1, img_rows, img_cols)
data_dir = 'data'
def create_model():
model = Sequential()
model.add(Conv2D(32, kernel_size=(3, 3),
activation='relu',
input_shape=input_shape))
model.add(Conv2D(64, (3, 3), activation='relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.25))
model.add(Flatten())
model.add(Dense(128, activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(num_classes, activation='softmax'))
model.compile(loss=keras.losses.categorical_crossentropy,
optimizer=keras.optimizers.Adadelta(),
metrics=['accuracy'])
# model.compile(loss='categorical_crossentropy',
# optimizer='adam',
# metrics=['accuracy'])
return model
def load_data(mat_file_dir):
x_img = []
y_gender = []
meta = loadmat(mat_file_dir)
img_paths = meta["full_path"]
genders = meta["gender"]
for i, path in enumerate(img_paths):
print('i:', i)
if i == 1:
continue
if i == 3:
continue
if i == 10:
continue
if i == 16:
continue
absPath = data_dir + '/' + path.strip()
print('loading:', absPath)
img = load_img(absPath, target_size=(img_rows, img_cols))
x_img.append( img_to_array(img) )
print('gender:', genders[0][i])
y_gender.append( genders[0][i] )
return np.array(x_img), np.array(y_gender)
# model = create_model()
# print(model)
# meta = loadmat('modified_wiki.mat')
x, y = load_data('modified_wiki.mat')
print('x:', x)
print('y:', y)
|
Add load image and create array datafrom scipy.io import loadmat
import numpy as np
import keras
from keras.preprocessing.image import load_img, img_to_array
from keras.models import Sequential
from keras.layers import Activation
from keras.layers import Dense, Dropout, Flatten
from keras.layers import Conv2D, MaxPooling2D
num_classes = 10
# input image dimensions
img_rows, img_cols = 124, 124
input_shape = (img_rows, img_cols, 1)
# input_shape = (1, img_rows, img_cols)
data_dir = 'data'
def create_model():
model = Sequential()
model.add(Conv2D(32, kernel_size=(3, 3),
activation='relu',
input_shape=input_shape))
model.add(Conv2D(64, (3, 3), activation='relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.25))
model.add(Flatten())
model.add(Dense(128, activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(num_classes, activation='softmax'))
model.compile(loss=keras.losses.categorical_crossentropy,
optimizer=keras.optimizers.Adadelta(),
metrics=['accuracy'])
# model.compile(loss='categorical_crossentropy',
# optimizer='adam',
# metrics=['accuracy'])
return model
def load_data(mat_file_dir):
x_img = []
y_gender = []
meta = loadmat(mat_file_dir)
img_paths = meta["full_path"]
genders = meta["gender"]
for i, path in enumerate(img_paths):
print('i:', i)
if i == 1:
continue
if i == 3:
continue
if i == 10:
continue
if i == 16:
continue
absPath = data_dir + '/' + path.strip()
print('loading:', absPath)
img = load_img(absPath, target_size=(img_rows, img_cols))
x_img.append( img_to_array(img) )
print('gender:', genders[0][i])
y_gender.append( genders[0][i] )
return np.array(x_img), np.array(y_gender)
# model = create_model()
# print(model)
# meta = loadmat('modified_wiki.mat')
x, y = load_data('modified_wiki.mat')
print('x:', x)
print('y:', y)
|
<commit_before><commit_msg>Add load image and create array data<commit_after>from scipy.io import loadmat
import numpy as np
import keras
from keras.preprocessing.image import load_img, img_to_array
from keras.models import Sequential
from keras.layers import Activation
from keras.layers import Dense, Dropout, Flatten
from keras.layers import Conv2D, MaxPooling2D
num_classes = 10
# input image dimensions
img_rows, img_cols = 124, 124
input_shape = (img_rows, img_cols, 1)
# input_shape = (1, img_rows, img_cols)
data_dir = 'data'
def create_model():
model = Sequential()
model.add(Conv2D(32, kernel_size=(3, 3),
activation='relu',
input_shape=input_shape))
model.add(Conv2D(64, (3, 3), activation='relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.25))
model.add(Flatten())
model.add(Dense(128, activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(num_classes, activation='softmax'))
model.compile(loss=keras.losses.categorical_crossentropy,
optimizer=keras.optimizers.Adadelta(),
metrics=['accuracy'])
# model.compile(loss='categorical_crossentropy',
# optimizer='adam',
# metrics=['accuracy'])
return model
def load_data(mat_file_dir):
x_img = []
y_gender = []
meta = loadmat(mat_file_dir)
img_paths = meta["full_path"]
genders = meta["gender"]
for i, path in enumerate(img_paths):
print('i:', i)
if i == 1:
continue
if i == 3:
continue
if i == 10:
continue
if i == 16:
continue
absPath = data_dir + '/' + path.strip()
print('loading:', absPath)
img = load_img(absPath, target_size=(img_rows, img_cols))
x_img.append( img_to_array(img) )
print('gender:', genders[0][i])
y_gender.append( genders[0][i] )
return np.array(x_img), np.array(y_gender)
# model = create_model()
# print(model)
# meta = loadmat('modified_wiki.mat')
x, y = load_data('modified_wiki.mat')
print('x:', x)
print('y:', y)
|
|
068675a641dc412416624c907fe7b1744d007a99
|
lib/js/Loader.py
|
lib/js/Loader.py
|
#
# JavaScript Tools
# Copyright 2010 Sebastian Werner
#
from js.core.Profiler import *
import logging
class Loader():
def __init__(self, classList):
self.__classList = classList
def generate(self, fileName=None, bootCode=None):
result = ["$LAB"]
pstart()
logging.info("Generating loader...")
for classObj in self.__classList:
if classObj == "WAIT":
result.append("wait()")
else:
result.append('script("%s")' % classObj.path)
if bootCode:
result.append("wait(function(){%s})" % bootCode)
result = "\n.".join(result)
pstop()
if fileName:
output = open(fileName, mode="w", encoding="utf-8")
output.write(result)
output.close()
else:
return result
|
#
# JavaScript Tools
# Copyright 2010 Sebastian Werner
#
from js.core.Profiler import *
import logging
class Loader():
def __init__(self, classList):
self.__classList = classList
def generate(self, fileName=None, bootCode=None):
result = ["$LAB"]
pstart()
logging.info("Generating loader...")
scripts = []
for classObj in self.__classList:
if classObj == "WAIT":
result.append('script(["%s"]).wait()' % '","'.join(scripts))
scripts = []
else:
scripts.append(classObj.path)
if bootCode:
result.append("wait(function(){%s})" % bootCode)
result = "\n.".join(result)
pstop()
if fileName:
output = open(fileName, mode="w", encoding="utf-8")
output.write(result)
output.close()
else:
return result
|
Make use of array support in script() of LABjs to fix recursion errors
|
Make use of array support in script() of LABjs to fix recursion errors
|
Python
|
mit
|
zynga/jasy,sebastian-software/jasy,zynga/jasy,sebastian-software/jasy
|
#
# JavaScript Tools
# Copyright 2010 Sebastian Werner
#
from js.core.Profiler import *
import logging
class Loader():
def __init__(self, classList):
self.__classList = classList
def generate(self, fileName=None, bootCode=None):
result = ["$LAB"]
pstart()
logging.info("Generating loader...")
for classObj in self.__classList:
if classObj == "WAIT":
result.append("wait()")
else:
result.append('script("%s")' % classObj.path)
if bootCode:
result.append("wait(function(){%s})" % bootCode)
result = "\n.".join(result)
pstop()
if fileName:
output = open(fileName, mode="w", encoding="utf-8")
output.write(result)
output.close()
else:
return result
Make use of array support in script() of LABjs to fix recursion errors
|
#
# JavaScript Tools
# Copyright 2010 Sebastian Werner
#
from js.core.Profiler import *
import logging
class Loader():
def __init__(self, classList):
self.__classList = classList
def generate(self, fileName=None, bootCode=None):
result = ["$LAB"]
pstart()
logging.info("Generating loader...")
scripts = []
for classObj in self.__classList:
if classObj == "WAIT":
result.append('script(["%s"]).wait()' % '","'.join(scripts))
scripts = []
else:
scripts.append(classObj.path)
if bootCode:
result.append("wait(function(){%s})" % bootCode)
result = "\n.".join(result)
pstop()
if fileName:
output = open(fileName, mode="w", encoding="utf-8")
output.write(result)
output.close()
else:
return result
|
<commit_before>#
# JavaScript Tools
# Copyright 2010 Sebastian Werner
#
from js.core.Profiler import *
import logging
class Loader():
def __init__(self, classList):
self.__classList = classList
def generate(self, fileName=None, bootCode=None):
result = ["$LAB"]
pstart()
logging.info("Generating loader...")
for classObj in self.__classList:
if classObj == "WAIT":
result.append("wait()")
else:
result.append('script("%s")' % classObj.path)
if bootCode:
result.append("wait(function(){%s})" % bootCode)
result = "\n.".join(result)
pstop()
if fileName:
output = open(fileName, mode="w", encoding="utf-8")
output.write(result)
output.close()
else:
return result
<commit_msg>Make use of array support in script() of LABjs to fix recursion errors<commit_after>
|
#
# JavaScript Tools
# Copyright 2010 Sebastian Werner
#
from js.core.Profiler import *
import logging
class Loader():
def __init__(self, classList):
self.__classList = classList
def generate(self, fileName=None, bootCode=None):
result = ["$LAB"]
pstart()
logging.info("Generating loader...")
scripts = []
for classObj in self.__classList:
if classObj == "WAIT":
result.append('script(["%s"]).wait()' % '","'.join(scripts))
scripts = []
else:
scripts.append(classObj.path)
if bootCode:
result.append("wait(function(){%s})" % bootCode)
result = "\n.".join(result)
pstop()
if fileName:
output = open(fileName, mode="w", encoding="utf-8")
output.write(result)
output.close()
else:
return result
|
#
# JavaScript Tools
# Copyright 2010 Sebastian Werner
#
from js.core.Profiler import *
import logging
class Loader():
def __init__(self, classList):
self.__classList = classList
def generate(self, fileName=None, bootCode=None):
result = ["$LAB"]
pstart()
logging.info("Generating loader...")
for classObj in self.__classList:
if classObj == "WAIT":
result.append("wait()")
else:
result.append('script("%s")' % classObj.path)
if bootCode:
result.append("wait(function(){%s})" % bootCode)
result = "\n.".join(result)
pstop()
if fileName:
output = open(fileName, mode="w", encoding="utf-8")
output.write(result)
output.close()
else:
return result
Make use of array support in script() of LABjs to fix recursion errors#
# JavaScript Tools
# Copyright 2010 Sebastian Werner
#
from js.core.Profiler import *
import logging
class Loader():
def __init__(self, classList):
self.__classList = classList
def generate(self, fileName=None, bootCode=None):
result = ["$LAB"]
pstart()
logging.info("Generating loader...")
scripts = []
for classObj in self.__classList:
if classObj == "WAIT":
result.append('script(["%s"]).wait()' % '","'.join(scripts))
scripts = []
else:
scripts.append(classObj.path)
if bootCode:
result.append("wait(function(){%s})" % bootCode)
result = "\n.".join(result)
pstop()
if fileName:
output = open(fileName, mode="w", encoding="utf-8")
output.write(result)
output.close()
else:
return result
|
<commit_before>#
# JavaScript Tools
# Copyright 2010 Sebastian Werner
#
from js.core.Profiler import *
import logging
class Loader():
def __init__(self, classList):
self.__classList = classList
def generate(self, fileName=None, bootCode=None):
result = ["$LAB"]
pstart()
logging.info("Generating loader...")
for classObj in self.__classList:
if classObj == "WAIT":
result.append("wait()")
else:
result.append('script("%s")' % classObj.path)
if bootCode:
result.append("wait(function(){%s})" % bootCode)
result = "\n.".join(result)
pstop()
if fileName:
output = open(fileName, mode="w", encoding="utf-8")
output.write(result)
output.close()
else:
return result
<commit_msg>Make use of array support in script() of LABjs to fix recursion errors<commit_after>#
# JavaScript Tools
# Copyright 2010 Sebastian Werner
#
from js.core.Profiler import *
import logging
class Loader():
def __init__(self, classList):
self.__classList = classList
def generate(self, fileName=None, bootCode=None):
result = ["$LAB"]
pstart()
logging.info("Generating loader...")
scripts = []
for classObj in self.__classList:
if classObj == "WAIT":
result.append('script(["%s"]).wait()' % '","'.join(scripts))
scripts = []
else:
scripts.append(classObj.path)
if bootCode:
result.append("wait(function(){%s})" % bootCode)
result = "\n.".join(result)
pstop()
if fileName:
output = open(fileName, mode="w", encoding="utf-8")
output.write(result)
output.close()
else:
return result
|
f8b35e2a0cf092441efe1350871814fd347d3627
|
tests/classifier/LinearSVC/LinearSVCJavaTest.py
|
tests/classifier/LinearSVC/LinearSVCJavaTest.py
|
# -*- coding: utf-8 -*-
from unittest import TestCase
from sklearn.svm.classes import LinearSVC
from ..Classifier import Classifier
from ...language.Java import Java
class LinearSVCJavaTest(Java, Classifier, TestCase):
def setUp(self):
super(LinearSVCJavaTest, self).setUp()
self.mdl = LinearSVC(C=1., random_state=0)
def tearDown(self):
super(LinearSVCJavaTest, self).tearDown()
|
# -*- coding: utf-8 -*-
from unittest import TestCase
from sklearn.datasets import load_digits
from sklearn.decomposition import PCA, NMF
from sklearn.feature_selection import SelectKBest
from sklearn.feature_selection import chi2
from sklearn.model_selection import GridSearchCV
from sklearn.pipeline import Pipeline
from sklearn.svm.classes import LinearSVC
from sklearn_porter import Porter
from ..Classifier import Classifier
from ...language.Java import Java
class LinearSVCJavaTest(Java, Classifier, TestCase):
def setUp(self):
super(LinearSVCJavaTest, self).setUp()
self.mdl = LinearSVC(C=1., random_state=0)
def tearDown(self):
super(LinearSVCJavaTest, self).tearDown()
def test_model_within_optimizer(self):
pipe = Pipeline([
('reduce_dim', PCA()),
('classify', LinearSVC())
])
n_features_options = [2, 4, 8]
c_options = [1, 10, 100, 1000]
param_grid = [
{
'reduce_dim': [PCA(iterated_power=7), NMF()],
'reduce_dim__n_components': n_features_options,
'classify__C': c_options
},
{
'reduce_dim': [SelectKBest(chi2)],
'reduce_dim__k': n_features_options,
'classify__C': c_options
},
]
grid = GridSearchCV(pipe, cv=3, n_jobs=1, param_grid=param_grid)
digits = load_digits()
grid.fit(digits.data, digits.target)
try:
Porter(grid, language='java')
except ValueError:
self.assertTrue(False)
else:
self.assertTrue(True)
|
Add test for using optimizers
|
Add test for using optimizers
|
Python
|
bsd-3-clause
|
nok/sklearn-porter
|
# -*- coding: utf-8 -*-
from unittest import TestCase
from sklearn.svm.classes import LinearSVC
from ..Classifier import Classifier
from ...language.Java import Java
class LinearSVCJavaTest(Java, Classifier, TestCase):
def setUp(self):
super(LinearSVCJavaTest, self).setUp()
self.mdl = LinearSVC(C=1., random_state=0)
def tearDown(self):
super(LinearSVCJavaTest, self).tearDown()
Add test for using optimizers
|
# -*- coding: utf-8 -*-
from unittest import TestCase
from sklearn.datasets import load_digits
from sklearn.decomposition import PCA, NMF
from sklearn.feature_selection import SelectKBest
from sklearn.feature_selection import chi2
from sklearn.model_selection import GridSearchCV
from sklearn.pipeline import Pipeline
from sklearn.svm.classes import LinearSVC
from sklearn_porter import Porter
from ..Classifier import Classifier
from ...language.Java import Java
class LinearSVCJavaTest(Java, Classifier, TestCase):
def setUp(self):
super(LinearSVCJavaTest, self).setUp()
self.mdl = LinearSVC(C=1., random_state=0)
def tearDown(self):
super(LinearSVCJavaTest, self).tearDown()
def test_model_within_optimizer(self):
pipe = Pipeline([
('reduce_dim', PCA()),
('classify', LinearSVC())
])
n_features_options = [2, 4, 8]
c_options = [1, 10, 100, 1000]
param_grid = [
{
'reduce_dim': [PCA(iterated_power=7), NMF()],
'reduce_dim__n_components': n_features_options,
'classify__C': c_options
},
{
'reduce_dim': [SelectKBest(chi2)],
'reduce_dim__k': n_features_options,
'classify__C': c_options
},
]
grid = GridSearchCV(pipe, cv=3, n_jobs=1, param_grid=param_grid)
digits = load_digits()
grid.fit(digits.data, digits.target)
try:
Porter(grid, language='java')
except ValueError:
self.assertTrue(False)
else:
self.assertTrue(True)
|
<commit_before># -*- coding: utf-8 -*-
from unittest import TestCase
from sklearn.svm.classes import LinearSVC
from ..Classifier import Classifier
from ...language.Java import Java
class LinearSVCJavaTest(Java, Classifier, TestCase):
def setUp(self):
super(LinearSVCJavaTest, self).setUp()
self.mdl = LinearSVC(C=1., random_state=0)
def tearDown(self):
super(LinearSVCJavaTest, self).tearDown()
<commit_msg>Add test for using optimizers<commit_after>
|
# -*- coding: utf-8 -*-
from unittest import TestCase
from sklearn.datasets import load_digits
from sklearn.decomposition import PCA, NMF
from sklearn.feature_selection import SelectKBest
from sklearn.feature_selection import chi2
from sklearn.model_selection import GridSearchCV
from sklearn.pipeline import Pipeline
from sklearn.svm.classes import LinearSVC
from sklearn_porter import Porter
from ..Classifier import Classifier
from ...language.Java import Java
class LinearSVCJavaTest(Java, Classifier, TestCase):
def setUp(self):
super(LinearSVCJavaTest, self).setUp()
self.mdl = LinearSVC(C=1., random_state=0)
def tearDown(self):
super(LinearSVCJavaTest, self).tearDown()
def test_model_within_optimizer(self):
pipe = Pipeline([
('reduce_dim', PCA()),
('classify', LinearSVC())
])
n_features_options = [2, 4, 8]
c_options = [1, 10, 100, 1000]
param_grid = [
{
'reduce_dim': [PCA(iterated_power=7), NMF()],
'reduce_dim__n_components': n_features_options,
'classify__C': c_options
},
{
'reduce_dim': [SelectKBest(chi2)],
'reduce_dim__k': n_features_options,
'classify__C': c_options
},
]
grid = GridSearchCV(pipe, cv=3, n_jobs=1, param_grid=param_grid)
digits = load_digits()
grid.fit(digits.data, digits.target)
try:
Porter(grid, language='java')
except ValueError:
self.assertTrue(False)
else:
self.assertTrue(True)
|
# -*- coding: utf-8 -*-
from unittest import TestCase
from sklearn.svm.classes import LinearSVC
from ..Classifier import Classifier
from ...language.Java import Java
class LinearSVCJavaTest(Java, Classifier, TestCase):
def setUp(self):
super(LinearSVCJavaTest, self).setUp()
self.mdl = LinearSVC(C=1., random_state=0)
def tearDown(self):
super(LinearSVCJavaTest, self).tearDown()
Add test for using optimizers# -*- coding: utf-8 -*-
from unittest import TestCase
from sklearn.datasets import load_digits
from sklearn.decomposition import PCA, NMF
from sklearn.feature_selection import SelectKBest
from sklearn.feature_selection import chi2
from sklearn.model_selection import GridSearchCV
from sklearn.pipeline import Pipeline
from sklearn.svm.classes import LinearSVC
from sklearn_porter import Porter
from ..Classifier import Classifier
from ...language.Java import Java
class LinearSVCJavaTest(Java, Classifier, TestCase):
def setUp(self):
super(LinearSVCJavaTest, self).setUp()
self.mdl = LinearSVC(C=1., random_state=0)
def tearDown(self):
super(LinearSVCJavaTest, self).tearDown()
def test_model_within_optimizer(self):
pipe = Pipeline([
('reduce_dim', PCA()),
('classify', LinearSVC())
])
n_features_options = [2, 4, 8]
c_options = [1, 10, 100, 1000]
param_grid = [
{
'reduce_dim': [PCA(iterated_power=7), NMF()],
'reduce_dim__n_components': n_features_options,
'classify__C': c_options
},
{
'reduce_dim': [SelectKBest(chi2)],
'reduce_dim__k': n_features_options,
'classify__C': c_options
},
]
grid = GridSearchCV(pipe, cv=3, n_jobs=1, param_grid=param_grid)
digits = load_digits()
grid.fit(digits.data, digits.target)
try:
Porter(grid, language='java')
except ValueError:
self.assertTrue(False)
else:
self.assertTrue(True)
|
<commit_before># -*- coding: utf-8 -*-
from unittest import TestCase
from sklearn.svm.classes import LinearSVC
from ..Classifier import Classifier
from ...language.Java import Java
class LinearSVCJavaTest(Java, Classifier, TestCase):
def setUp(self):
super(LinearSVCJavaTest, self).setUp()
self.mdl = LinearSVC(C=1., random_state=0)
def tearDown(self):
super(LinearSVCJavaTest, self).tearDown()
<commit_msg>Add test for using optimizers<commit_after># -*- coding: utf-8 -*-
from unittest import TestCase
from sklearn.datasets import load_digits
from sklearn.decomposition import PCA, NMF
from sklearn.feature_selection import SelectKBest
from sklearn.feature_selection import chi2
from sklearn.model_selection import GridSearchCV
from sklearn.pipeline import Pipeline
from sklearn.svm.classes import LinearSVC
from sklearn_porter import Porter
from ..Classifier import Classifier
from ...language.Java import Java
class LinearSVCJavaTest(Java, Classifier, TestCase):
def setUp(self):
super(LinearSVCJavaTest, self).setUp()
self.mdl = LinearSVC(C=1., random_state=0)
def tearDown(self):
super(LinearSVCJavaTest, self).tearDown()
def test_model_within_optimizer(self):
pipe = Pipeline([
('reduce_dim', PCA()),
('classify', LinearSVC())
])
n_features_options = [2, 4, 8]
c_options = [1, 10, 100, 1000]
param_grid = [
{
'reduce_dim': [PCA(iterated_power=7), NMF()],
'reduce_dim__n_components': n_features_options,
'classify__C': c_options
},
{
'reduce_dim': [SelectKBest(chi2)],
'reduce_dim__k': n_features_options,
'classify__C': c_options
},
]
grid = GridSearchCV(pipe, cv=3, n_jobs=1, param_grid=param_grid)
digits = load_digits()
grid.fit(digits.data, digits.target)
try:
Porter(grid, language='java')
except ValueError:
self.assertTrue(False)
else:
self.assertTrue(True)
|
237a66191295cce2cd52d78bcdb7cbe57e399e56
|
awx/main/management/commands/remove_instance.py
|
awx/main/management/commands/remove_instance.py
|
# Copyright (c) 2015 Ansible, Inc.
# All Rights Reserved
from django.core.management.base import CommandError
from awx.main.management.commands._base_instance import BaseCommandInstance
from awx.main.models import Instance
instance_str = BaseCommandInstance.instance_str
class Command(BaseCommandInstance):
"""Internal tower command.
Remove an existing instance from the HA instance table.
This command is idempotent.
This command will error out in the following conditions:
* Attempting to remove a primary instance.
"""
def __init__(self):
super(Command, self).__init__()
self.include_option_hostname_uuid_find()
def handle(self, *args, **options):
super(Command, self).handle(*args, **options)
# Is there an existing record for this machine? If so, retrieve that record and look for issues.
try:
# Get the instance.
instance = Instance.objects.get(**self.get_unique_fields())
# Sanity check: Do not remove the primary instance.
if instance.primary:
raise CommandError('I cowardly refuse to remove the primary instance %s.' % instance_str(instance))
# Remove the instance.
instance.delete()
print('Successfully removed instance %s.' % instance_str(instance))
except Instance.DoesNotExist:
print('No matching instance found to remove.')
|
# Copyright (c) 2015 Ansible, Inc.
# All Rights Reserved
from django.core.management.base import CommandError
from awx.main.management.commands._base_instance import BaseCommandInstance
from awx.main.models import Instance
instance_str = BaseCommandInstance.instance_str
class Command(BaseCommandInstance):
"""Internal tower command.
Remove an existing instance from the HA instance table.
This command is idempotent.
This command will error out in the following conditions:
* Attempting to remove a primary instance.
"""
def __init__(self):
super(Command, self).__init__()
self.include_option_hostname_uuid_find()
def handle(self, *args, **options):
super(Command, self).handle(*args, **options)
# Is there an existing record for this machine? If so, retrieve that record and look for issues.
try:
# Get the instance.
instance = Instance.objects.get(**self.get_unique_fields())
# Sanity check: Do not remove the primary instance.
if instance.primary:
raise CommandError('Can not remove primary instance %s. Another instance must be promoted to primary first.' % instance_str(instance))
# Remove the instance.
instance.delete()
print('Successfully removed instance %s.' % instance_str(instance))
except Instance.DoesNotExist:
print('No matching instance found to remove.')
|
Fix verbage around why we are disallowing removing a primary
|
Fix verbage around why we are disallowing removing a primary
|
Python
|
apache-2.0
|
wwitzel3/awx,wwitzel3/awx,snahelou/awx,snahelou/awx,snahelou/awx,wwitzel3/awx,wwitzel3/awx,snahelou/awx
|
# Copyright (c) 2015 Ansible, Inc.
# All Rights Reserved
from django.core.management.base import CommandError
from awx.main.management.commands._base_instance import BaseCommandInstance
from awx.main.models import Instance
instance_str = BaseCommandInstance.instance_str
class Command(BaseCommandInstance):
"""Internal tower command.
Remove an existing instance from the HA instance table.
This command is idempotent.
This command will error out in the following conditions:
* Attempting to remove a primary instance.
"""
def __init__(self):
super(Command, self).__init__()
self.include_option_hostname_uuid_find()
def handle(self, *args, **options):
super(Command, self).handle(*args, **options)
# Is there an existing record for this machine? If so, retrieve that record and look for issues.
try:
# Get the instance.
instance = Instance.objects.get(**self.get_unique_fields())
# Sanity check: Do not remove the primary instance.
if instance.primary:
raise CommandError('I cowardly refuse to remove the primary instance %s.' % instance_str(instance))
# Remove the instance.
instance.delete()
print('Successfully removed instance %s.' % instance_str(instance))
except Instance.DoesNotExist:
print('No matching instance found to remove.')
Fix verbage around why we are disallowing removing a primary
|
# Copyright (c) 2015 Ansible, Inc.
# All Rights Reserved
from django.core.management.base import CommandError
from awx.main.management.commands._base_instance import BaseCommandInstance
from awx.main.models import Instance
instance_str = BaseCommandInstance.instance_str
class Command(BaseCommandInstance):
"""Internal tower command.
Remove an existing instance from the HA instance table.
This command is idempotent.
This command will error out in the following conditions:
* Attempting to remove a primary instance.
"""
def __init__(self):
super(Command, self).__init__()
self.include_option_hostname_uuid_find()
def handle(self, *args, **options):
super(Command, self).handle(*args, **options)
# Is there an existing record for this machine? If so, retrieve that record and look for issues.
try:
# Get the instance.
instance = Instance.objects.get(**self.get_unique_fields())
# Sanity check: Do not remove the primary instance.
if instance.primary:
raise CommandError('Can not remove primary instance %s. Another instance must be promoted to primary first.' % instance_str(instance))
# Remove the instance.
instance.delete()
print('Successfully removed instance %s.' % instance_str(instance))
except Instance.DoesNotExist:
print('No matching instance found to remove.')
|
<commit_before># Copyright (c) 2015 Ansible, Inc.
# All Rights Reserved
from django.core.management.base import CommandError
from awx.main.management.commands._base_instance import BaseCommandInstance
from awx.main.models import Instance
instance_str = BaseCommandInstance.instance_str
class Command(BaseCommandInstance):
"""Internal tower command.
Remove an existing instance from the HA instance table.
This command is idempotent.
This command will error out in the following conditions:
* Attempting to remove a primary instance.
"""
def __init__(self):
super(Command, self).__init__()
self.include_option_hostname_uuid_find()
def handle(self, *args, **options):
super(Command, self).handle(*args, **options)
# Is there an existing record for this machine? If so, retrieve that record and look for issues.
try:
# Get the instance.
instance = Instance.objects.get(**self.get_unique_fields())
# Sanity check: Do not remove the primary instance.
if instance.primary:
raise CommandError('I cowardly refuse to remove the primary instance %s.' % instance_str(instance))
# Remove the instance.
instance.delete()
print('Successfully removed instance %s.' % instance_str(instance))
except Instance.DoesNotExist:
print('No matching instance found to remove.')
<commit_msg>Fix verbage around why we are disallowing removing a primary<commit_after>
|
# Copyright (c) 2015 Ansible, Inc.
# All Rights Reserved
from django.core.management.base import CommandError
from awx.main.management.commands._base_instance import BaseCommandInstance
from awx.main.models import Instance
instance_str = BaseCommandInstance.instance_str
class Command(BaseCommandInstance):
"""Internal tower command.
Remove an existing instance from the HA instance table.
This command is idempotent.
This command will error out in the following conditions:
* Attempting to remove a primary instance.
"""
def __init__(self):
super(Command, self).__init__()
self.include_option_hostname_uuid_find()
def handle(self, *args, **options):
super(Command, self).handle(*args, **options)
# Is there an existing record for this machine? If so, retrieve that record and look for issues.
try:
# Get the instance.
instance = Instance.objects.get(**self.get_unique_fields())
# Sanity check: Do not remove the primary instance.
if instance.primary:
raise CommandError('Can not remove primary instance %s. Another instance must be promoted to primary first.' % instance_str(instance))
# Remove the instance.
instance.delete()
print('Successfully removed instance %s.' % instance_str(instance))
except Instance.DoesNotExist:
print('No matching instance found to remove.')
|
# Copyright (c) 2015 Ansible, Inc.
# All Rights Reserved
from django.core.management.base import CommandError
from awx.main.management.commands._base_instance import BaseCommandInstance
from awx.main.models import Instance
instance_str = BaseCommandInstance.instance_str
class Command(BaseCommandInstance):
"""Internal tower command.
Remove an existing instance from the HA instance table.
This command is idempotent.
This command will error out in the following conditions:
* Attempting to remove a primary instance.
"""
def __init__(self):
super(Command, self).__init__()
self.include_option_hostname_uuid_find()
def handle(self, *args, **options):
super(Command, self).handle(*args, **options)
# Is there an existing record for this machine? If so, retrieve that record and look for issues.
try:
# Get the instance.
instance = Instance.objects.get(**self.get_unique_fields())
# Sanity check: Do not remove the primary instance.
if instance.primary:
raise CommandError('I cowardly refuse to remove the primary instance %s.' % instance_str(instance))
# Remove the instance.
instance.delete()
print('Successfully removed instance %s.' % instance_str(instance))
except Instance.DoesNotExist:
print('No matching instance found to remove.')
Fix verbage around why we are disallowing removing a primary# Copyright (c) 2015 Ansible, Inc.
# All Rights Reserved
from django.core.management.base import CommandError
from awx.main.management.commands._base_instance import BaseCommandInstance
from awx.main.models import Instance
instance_str = BaseCommandInstance.instance_str
class Command(BaseCommandInstance):
"""Internal tower command.
Remove an existing instance from the HA instance table.
This command is idempotent.
This command will error out in the following conditions:
* Attempting to remove a primary instance.
"""
def __init__(self):
super(Command, self).__init__()
self.include_option_hostname_uuid_find()
def handle(self, *args, **options):
super(Command, self).handle(*args, **options)
# Is there an existing record for this machine? If so, retrieve that record and look for issues.
try:
# Get the instance.
instance = Instance.objects.get(**self.get_unique_fields())
# Sanity check: Do not remove the primary instance.
if instance.primary:
raise CommandError('Can not remove primary instance %s. Another instance must be promoted to primary first.' % instance_str(instance))
# Remove the instance.
instance.delete()
print('Successfully removed instance %s.' % instance_str(instance))
except Instance.DoesNotExist:
print('No matching instance found to remove.')
|
<commit_before># Copyright (c) 2015 Ansible, Inc.
# All Rights Reserved
from django.core.management.base import CommandError
from awx.main.management.commands._base_instance import BaseCommandInstance
from awx.main.models import Instance
instance_str = BaseCommandInstance.instance_str
class Command(BaseCommandInstance):
"""Internal tower command.
Remove an existing instance from the HA instance table.
This command is idempotent.
This command will error out in the following conditions:
* Attempting to remove a primary instance.
"""
def __init__(self):
super(Command, self).__init__()
self.include_option_hostname_uuid_find()
def handle(self, *args, **options):
super(Command, self).handle(*args, **options)
# Is there an existing record for this machine? If so, retrieve that record and look for issues.
try:
# Get the instance.
instance = Instance.objects.get(**self.get_unique_fields())
# Sanity check: Do not remove the primary instance.
if instance.primary:
raise CommandError('I cowardly refuse to remove the primary instance %s.' % instance_str(instance))
# Remove the instance.
instance.delete()
print('Successfully removed instance %s.' % instance_str(instance))
except Instance.DoesNotExist:
print('No matching instance found to remove.')
<commit_msg>Fix verbage around why we are disallowing removing a primary<commit_after># Copyright (c) 2015 Ansible, Inc.
# All Rights Reserved
from django.core.management.base import CommandError
from awx.main.management.commands._base_instance import BaseCommandInstance
from awx.main.models import Instance
instance_str = BaseCommandInstance.instance_str
class Command(BaseCommandInstance):
"""Internal tower command.
Remove an existing instance from the HA instance table.
This command is idempotent.
This command will error out in the following conditions:
* Attempting to remove a primary instance.
"""
def __init__(self):
super(Command, self).__init__()
self.include_option_hostname_uuid_find()
def handle(self, *args, **options):
super(Command, self).handle(*args, **options)
# Is there an existing record for this machine? If so, retrieve that record and look for issues.
try:
# Get the instance.
instance = Instance.objects.get(**self.get_unique_fields())
# Sanity check: Do not remove the primary instance.
if instance.primary:
raise CommandError('Can not remove primary instance %s. Another instance must be promoted to primary first.' % instance_str(instance))
# Remove the instance.
instance.delete()
print('Successfully removed instance %s.' % instance_str(instance))
except Instance.DoesNotExist:
print('No matching instance found to remove.')
|
726f4d016a6e0e3c4d6c053afc98bfcae445620c
|
test_setup.py
|
test_setup.py
|
"""Test setup.py."""
import os
import subprocess
import sys
def test_setup():
"""Run setup.py check."""
command = [sys.executable, 'setup.py', 'check', '--metadata', '--strict']
assert subprocess.run(command).returncode == 0
def test_console_scripts():
"""Ensure console scripts were installed correctly."""
assert any(
os.path.isfile(os.path.join(directory, 'keysmith'))
for directory in sys.path
)
|
"""Test setup.py."""
import os
import subprocess
import sys
def test_setup():
"""Run setup.py check."""
command = [sys.executable, 'setup.py', 'check', '--metadata', '--strict']
assert subprocess.run(command).returncode == 0
def test_console_scripts():
"""Ensure console scripts were installed correctly."""
assert any(
os.path.isfile(os.path.join(directory, 'keysmith'))
for directory in os.environ['PATH'].split(':')
)
|
Use os.environ['PATH'] instead of sys.path
|
Use os.environ['PATH'] instead of sys.path
|
Python
|
bsd-3-clause
|
dmtucker/keysmith
|
"""Test setup.py."""
import os
import subprocess
import sys
def test_setup():
"""Run setup.py check."""
command = [sys.executable, 'setup.py', 'check', '--metadata', '--strict']
assert subprocess.run(command).returncode == 0
def test_console_scripts():
"""Ensure console scripts were installed correctly."""
assert any(
os.path.isfile(os.path.join(directory, 'keysmith'))
for directory in sys.path
)
Use os.environ['PATH'] instead of sys.path
|
"""Test setup.py."""
import os
import subprocess
import sys
def test_setup():
"""Run setup.py check."""
command = [sys.executable, 'setup.py', 'check', '--metadata', '--strict']
assert subprocess.run(command).returncode == 0
def test_console_scripts():
"""Ensure console scripts were installed correctly."""
assert any(
os.path.isfile(os.path.join(directory, 'keysmith'))
for directory in os.environ['PATH'].split(':')
)
|
<commit_before>"""Test setup.py."""
import os
import subprocess
import sys
def test_setup():
"""Run setup.py check."""
command = [sys.executable, 'setup.py', 'check', '--metadata', '--strict']
assert subprocess.run(command).returncode == 0
def test_console_scripts():
"""Ensure console scripts were installed correctly."""
assert any(
os.path.isfile(os.path.join(directory, 'keysmith'))
for directory in sys.path
)
<commit_msg>Use os.environ['PATH'] instead of sys.path<commit_after>
|
"""Test setup.py."""
import os
import subprocess
import sys
def test_setup():
"""Run setup.py check."""
command = [sys.executable, 'setup.py', 'check', '--metadata', '--strict']
assert subprocess.run(command).returncode == 0
def test_console_scripts():
"""Ensure console scripts were installed correctly."""
assert any(
os.path.isfile(os.path.join(directory, 'keysmith'))
for directory in os.environ['PATH'].split(':')
)
|
"""Test setup.py."""
import os
import subprocess
import sys
def test_setup():
"""Run setup.py check."""
command = [sys.executable, 'setup.py', 'check', '--metadata', '--strict']
assert subprocess.run(command).returncode == 0
def test_console_scripts():
"""Ensure console scripts were installed correctly."""
assert any(
os.path.isfile(os.path.join(directory, 'keysmith'))
for directory in sys.path
)
Use os.environ['PATH'] instead of sys.path"""Test setup.py."""
import os
import subprocess
import sys
def test_setup():
"""Run setup.py check."""
command = [sys.executable, 'setup.py', 'check', '--metadata', '--strict']
assert subprocess.run(command).returncode == 0
def test_console_scripts():
"""Ensure console scripts were installed correctly."""
assert any(
os.path.isfile(os.path.join(directory, 'keysmith'))
for directory in os.environ['PATH'].split(':')
)
|
<commit_before>"""Test setup.py."""
import os
import subprocess
import sys
def test_setup():
"""Run setup.py check."""
command = [sys.executable, 'setup.py', 'check', '--metadata', '--strict']
assert subprocess.run(command).returncode == 0
def test_console_scripts():
"""Ensure console scripts were installed correctly."""
assert any(
os.path.isfile(os.path.join(directory, 'keysmith'))
for directory in sys.path
)
<commit_msg>Use os.environ['PATH'] instead of sys.path<commit_after>"""Test setup.py."""
import os
import subprocess
import sys
def test_setup():
"""Run setup.py check."""
command = [sys.executable, 'setup.py', 'check', '--metadata', '--strict']
assert subprocess.run(command).returncode == 0
def test_console_scripts():
"""Ensure console scripts were installed correctly."""
assert any(
os.path.isfile(os.path.join(directory, 'keysmith'))
for directory in os.environ['PATH'].split(':')
)
|
22f52f97db77e0127172721eacc98196d32a77d7
|
Lib/importlib/test/import_/util.py
|
Lib/importlib/test/import_/util.py
|
import functools
import importlib._bootstrap
using___import__ = False
def import_(*args, **kwargs):
"""Delegate to allow for injecting different implementations of import."""
if using___import__:
return __import__(*args, **kwargs)
else:
return importlib._bootstrap.__import__(*args, **kwargs)
def importlib_only(fxn):
"""Decorator to mark which tests are not supported by the current
implementation of __import__()."""
def inner(*args, **kwargs):
if using___import__:
return
else:
return fxn(*args, **kwargs)
functools.update_wrapper(inner, fxn)
return inner
def mock_path_hook(*entries, importer):
"""A mock sys.path_hooks entry."""
def hook(entry):
if entry not in entries:
raise ImportError
return importer
return hook
|
import functools
import importlib
import importlib._bootstrap
import unittest
using___import__ = False
def import_(*args, **kwargs):
"""Delegate to allow for injecting different implementations of import."""
if using___import__:
return __import__(*args, **kwargs)
else:
return importlib._bootstrap.__import__(*args, **kwargs)
importlib_only = unittest.skipIf(using___import__, "importlib-specific test")
def mock_path_hook(*entries, importer):
"""A mock sys.path_hooks entry."""
def hook(entry):
if entry not in entries:
raise ImportError
return importer
return hook
|
Move a test-skipping decorator over to unittest.skipIf.
|
Move a test-skipping decorator over to unittest.skipIf.
|
Python
|
mit
|
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
|
import functools
import importlib._bootstrap
using___import__ = False
def import_(*args, **kwargs):
"""Delegate to allow for injecting different implementations of import."""
if using___import__:
return __import__(*args, **kwargs)
else:
return importlib._bootstrap.__import__(*args, **kwargs)
def importlib_only(fxn):
"""Decorator to mark which tests are not supported by the current
implementation of __import__()."""
def inner(*args, **kwargs):
if using___import__:
return
else:
return fxn(*args, **kwargs)
functools.update_wrapper(inner, fxn)
return inner
def mock_path_hook(*entries, importer):
"""A mock sys.path_hooks entry."""
def hook(entry):
if entry not in entries:
raise ImportError
return importer
return hook
Move a test-skipping decorator over to unittest.skipIf.
|
import functools
import importlib
import importlib._bootstrap
import unittest
using___import__ = False
def import_(*args, **kwargs):
"""Delegate to allow for injecting different implementations of import."""
if using___import__:
return __import__(*args, **kwargs)
else:
return importlib._bootstrap.__import__(*args, **kwargs)
importlib_only = unittest.skipIf(using___import__, "importlib-specific test")
def mock_path_hook(*entries, importer):
"""A mock sys.path_hooks entry."""
def hook(entry):
if entry not in entries:
raise ImportError
return importer
return hook
|
<commit_before>import functools
import importlib._bootstrap
using___import__ = False
def import_(*args, **kwargs):
"""Delegate to allow for injecting different implementations of import."""
if using___import__:
return __import__(*args, **kwargs)
else:
return importlib._bootstrap.__import__(*args, **kwargs)
def importlib_only(fxn):
"""Decorator to mark which tests are not supported by the current
implementation of __import__()."""
def inner(*args, **kwargs):
if using___import__:
return
else:
return fxn(*args, **kwargs)
functools.update_wrapper(inner, fxn)
return inner
def mock_path_hook(*entries, importer):
"""A mock sys.path_hooks entry."""
def hook(entry):
if entry not in entries:
raise ImportError
return importer
return hook
<commit_msg>Move a test-skipping decorator over to unittest.skipIf.<commit_after>
|
import functools
import importlib
import importlib._bootstrap
import unittest
using___import__ = False
def import_(*args, **kwargs):
"""Delegate to allow for injecting different implementations of import."""
if using___import__:
return __import__(*args, **kwargs)
else:
return importlib._bootstrap.__import__(*args, **kwargs)
importlib_only = unittest.skipIf(using___import__, "importlib-specific test")
def mock_path_hook(*entries, importer):
"""A mock sys.path_hooks entry."""
def hook(entry):
if entry not in entries:
raise ImportError
return importer
return hook
|
import functools
import importlib._bootstrap
using___import__ = False
def import_(*args, **kwargs):
"""Delegate to allow for injecting different implementations of import."""
if using___import__:
return __import__(*args, **kwargs)
else:
return importlib._bootstrap.__import__(*args, **kwargs)
def importlib_only(fxn):
"""Decorator to mark which tests are not supported by the current
implementation of __import__()."""
def inner(*args, **kwargs):
if using___import__:
return
else:
return fxn(*args, **kwargs)
functools.update_wrapper(inner, fxn)
return inner
def mock_path_hook(*entries, importer):
"""A mock sys.path_hooks entry."""
def hook(entry):
if entry not in entries:
raise ImportError
return importer
return hook
Move a test-skipping decorator over to unittest.skipIf.import functools
import importlib
import importlib._bootstrap
import unittest
using___import__ = False
def import_(*args, **kwargs):
"""Delegate to allow for injecting different implementations of import."""
if using___import__:
return __import__(*args, **kwargs)
else:
return importlib._bootstrap.__import__(*args, **kwargs)
importlib_only = unittest.skipIf(using___import__, "importlib-specific test")
def mock_path_hook(*entries, importer):
"""A mock sys.path_hooks entry."""
def hook(entry):
if entry not in entries:
raise ImportError
return importer
return hook
|
<commit_before>import functools
import importlib._bootstrap
using___import__ = False
def import_(*args, **kwargs):
"""Delegate to allow for injecting different implementations of import."""
if using___import__:
return __import__(*args, **kwargs)
else:
return importlib._bootstrap.__import__(*args, **kwargs)
def importlib_only(fxn):
"""Decorator to mark which tests are not supported by the current
implementation of __import__()."""
def inner(*args, **kwargs):
if using___import__:
return
else:
return fxn(*args, **kwargs)
functools.update_wrapper(inner, fxn)
return inner
def mock_path_hook(*entries, importer):
"""A mock sys.path_hooks entry."""
def hook(entry):
if entry not in entries:
raise ImportError
return importer
return hook
<commit_msg>Move a test-skipping decorator over to unittest.skipIf.<commit_after>import functools
import importlib
import importlib._bootstrap
import unittest
using___import__ = False
def import_(*args, **kwargs):
"""Delegate to allow for injecting different implementations of import."""
if using___import__:
return __import__(*args, **kwargs)
else:
return importlib._bootstrap.__import__(*args, **kwargs)
importlib_only = unittest.skipIf(using___import__, "importlib-specific test")
def mock_path_hook(*entries, importer):
"""A mock sys.path_hooks entry."""
def hook(entry):
if entry not in entries:
raise ImportError
return importer
return hook
|
0869ee91df9e379ac538300cabd6ef2c9e771c7b
|
plugins/admin.py
|
plugins/admin.py
|
import binascii
import git
import sys
import os
import logging
logger = logging.getLogger('root')
__match__ = r"!update|!reload"
def on_message(bot, channel, user, message):
requires_reload = message == '!reload'
if message == '!update':
local = git.Repo(os.getcwd())
origin = git.remote.Remote(local, 'origin')
head = local.heads[0]
logger.info("Updating from origin repository")
for pull_info in origin.pull():
if head.commit == pull_info.commit:
bot.send_text(channel, "`{}` is already up-to-date!".format(
bot.name))
break
requires_reload = True
commit_hash = binascii.hexlify(pull_info.commit.binsha).decode()
commit_message = pull_info.commit.message.strip()
bot.send_text(channel, "*Fast-forwarding* to `{}`".format(
commit_hash))
logger.debug("Fast-forwarding to {}".format(commit_hash))
bot.send_text(channel, "*Latest commit*: `{}`".format(
commit_message))
logger.debug("Latest commit: {}".format(commit_message))
if requires_reload:
bot.send_text(channel, "_Reloading...see you on the other side!_")
python = sys.executable
os.execl(python, python, *sys.argv)
|
import binascii
import git
import sys
import os
import logging
logger = logging.getLogger('root')
__match__ = r"!update|!reload"
def on_message(bot, channel, user, message):
requires_reload = message == '!reload'
if message == '!update':
local = git.Repo(os.getcwd())
origin = git.remote.Remote(local, 'origin')
prev_commit = local.heads[0].commit
logger.info("Updating from origin repository")
for pull_info in origin.pull():
if prev_commit == pull_info.commit:
bot.send_text(channel, "`{}` is already up-to-date!".format(
bot.name))
break
requires_reload = True
commit_hash = binascii.hexlify(pull_info.commit.binsha).decode()
commit_message = pull_info.commit.message.strip()
bot.send_text(channel, "*Fast-forwarding* to `{}`".format(
commit_hash))
logger.debug("Fast-forwarding to {}".format(commit_hash))
bot.send_text(channel, "*Latest commit*: `{}`".format(
commit_message))
logger.debug("Latest commit: {}".format(commit_message))
if requires_reload:
bot.send_text(channel, "_Reloading...see you on the other side!_")
python = sys.executable
os.execl(python, python, *sys.argv)
|
Use previous commit as reference point for updates
|
Use previous commit as reference point for updates
|
Python
|
mit
|
kvchen/keffbot-py,kvchen/keffbot
|
import binascii
import git
import sys
import os
import logging
logger = logging.getLogger('root')
__match__ = r"!update|!reload"
def on_message(bot, channel, user, message):
requires_reload = message == '!reload'
if message == '!update':
local = git.Repo(os.getcwd())
origin = git.remote.Remote(local, 'origin')
head = local.heads[0]
logger.info("Updating from origin repository")
for pull_info in origin.pull():
if head.commit == pull_info.commit:
bot.send_text(channel, "`{}` is already up-to-date!".format(
bot.name))
break
requires_reload = True
commit_hash = binascii.hexlify(pull_info.commit.binsha).decode()
commit_message = pull_info.commit.message.strip()
bot.send_text(channel, "*Fast-forwarding* to `{}`".format(
commit_hash))
logger.debug("Fast-forwarding to {}".format(commit_hash))
bot.send_text(channel, "*Latest commit*: `{}`".format(
commit_message))
logger.debug("Latest commit: {}".format(commit_message))
if requires_reload:
bot.send_text(channel, "_Reloading...see you on the other side!_")
python = sys.executable
os.execl(python, python, *sys.argv)
Use previous commit as reference point for updates
|
import binascii
import git
import sys
import os
import logging
logger = logging.getLogger('root')
__match__ = r"!update|!reload"
def on_message(bot, channel, user, message):
requires_reload = message == '!reload'
if message == '!update':
local = git.Repo(os.getcwd())
origin = git.remote.Remote(local, 'origin')
prev_commit = local.heads[0].commit
logger.info("Updating from origin repository")
for pull_info in origin.pull():
if prev_commit == pull_info.commit:
bot.send_text(channel, "`{}` is already up-to-date!".format(
bot.name))
break
requires_reload = True
commit_hash = binascii.hexlify(pull_info.commit.binsha).decode()
commit_message = pull_info.commit.message.strip()
bot.send_text(channel, "*Fast-forwarding* to `{}`".format(
commit_hash))
logger.debug("Fast-forwarding to {}".format(commit_hash))
bot.send_text(channel, "*Latest commit*: `{}`".format(
commit_message))
logger.debug("Latest commit: {}".format(commit_message))
if requires_reload:
bot.send_text(channel, "_Reloading...see you on the other side!_")
python = sys.executable
os.execl(python, python, *sys.argv)
|
<commit_before>import binascii
import git
import sys
import os
import logging
logger = logging.getLogger('root')
__match__ = r"!update|!reload"
def on_message(bot, channel, user, message):
requires_reload = message == '!reload'
if message == '!update':
local = git.Repo(os.getcwd())
origin = git.remote.Remote(local, 'origin')
head = local.heads[0]
logger.info("Updating from origin repository")
for pull_info in origin.pull():
if head.commit == pull_info.commit:
bot.send_text(channel, "`{}` is already up-to-date!".format(
bot.name))
break
requires_reload = True
commit_hash = binascii.hexlify(pull_info.commit.binsha).decode()
commit_message = pull_info.commit.message.strip()
bot.send_text(channel, "*Fast-forwarding* to `{}`".format(
commit_hash))
logger.debug("Fast-forwarding to {}".format(commit_hash))
bot.send_text(channel, "*Latest commit*: `{}`".format(
commit_message))
logger.debug("Latest commit: {}".format(commit_message))
if requires_reload:
bot.send_text(channel, "_Reloading...see you on the other side!_")
python = sys.executable
os.execl(python, python, *sys.argv)
<commit_msg>Use previous commit as reference point for updates<commit_after>
|
import binascii
import git
import sys
import os
import logging
logger = logging.getLogger('root')
__match__ = r"!update|!reload"
def on_message(bot, channel, user, message):
requires_reload = message == '!reload'
if message == '!update':
local = git.Repo(os.getcwd())
origin = git.remote.Remote(local, 'origin')
prev_commit = local.heads[0].commit
logger.info("Updating from origin repository")
for pull_info in origin.pull():
if prev_commit == pull_info.commit:
bot.send_text(channel, "`{}` is already up-to-date!".format(
bot.name))
break
requires_reload = True
commit_hash = binascii.hexlify(pull_info.commit.binsha).decode()
commit_message = pull_info.commit.message.strip()
bot.send_text(channel, "*Fast-forwarding* to `{}`".format(
commit_hash))
logger.debug("Fast-forwarding to {}".format(commit_hash))
bot.send_text(channel, "*Latest commit*: `{}`".format(
commit_message))
logger.debug("Latest commit: {}".format(commit_message))
if requires_reload:
bot.send_text(channel, "_Reloading...see you on the other side!_")
python = sys.executable
os.execl(python, python, *sys.argv)
|
import binascii
import git
import sys
import os
import logging
logger = logging.getLogger('root')
__match__ = r"!update|!reload"
def on_message(bot, channel, user, message):
requires_reload = message == '!reload'
if message == '!update':
local = git.Repo(os.getcwd())
origin = git.remote.Remote(local, 'origin')
head = local.heads[0]
logger.info("Updating from origin repository")
for pull_info in origin.pull():
if head.commit == pull_info.commit:
bot.send_text(channel, "`{}` is already up-to-date!".format(
bot.name))
break
requires_reload = True
commit_hash = binascii.hexlify(pull_info.commit.binsha).decode()
commit_message = pull_info.commit.message.strip()
bot.send_text(channel, "*Fast-forwarding* to `{}`".format(
commit_hash))
logger.debug("Fast-forwarding to {}".format(commit_hash))
bot.send_text(channel, "*Latest commit*: `{}`".format(
commit_message))
logger.debug("Latest commit: {}".format(commit_message))
if requires_reload:
bot.send_text(channel, "_Reloading...see you on the other side!_")
python = sys.executable
os.execl(python, python, *sys.argv)
Use previous commit as reference point for updatesimport binascii
import git
import sys
import os
import logging
logger = logging.getLogger('root')
__match__ = r"!update|!reload"
def on_message(bot, channel, user, message):
requires_reload = message == '!reload'
if message == '!update':
local = git.Repo(os.getcwd())
origin = git.remote.Remote(local, 'origin')
prev_commit = local.heads[0].commit
logger.info("Updating from origin repository")
for pull_info in origin.pull():
if prev_commit == pull_info.commit:
bot.send_text(channel, "`{}` is already up-to-date!".format(
bot.name))
break
requires_reload = True
commit_hash = binascii.hexlify(pull_info.commit.binsha).decode()
commit_message = pull_info.commit.message.strip()
bot.send_text(channel, "*Fast-forwarding* to `{}`".format(
commit_hash))
logger.debug("Fast-forwarding to {}".format(commit_hash))
bot.send_text(channel, "*Latest commit*: `{}`".format(
commit_message))
logger.debug("Latest commit: {}".format(commit_message))
if requires_reload:
bot.send_text(channel, "_Reloading...see you on the other side!_")
python = sys.executable
os.execl(python, python, *sys.argv)
|
<commit_before>import binascii
import git
import sys
import os
import logging
logger = logging.getLogger('root')
__match__ = r"!update|!reload"
def on_message(bot, channel, user, message):
requires_reload = message == '!reload'
if message == '!update':
local = git.Repo(os.getcwd())
origin = git.remote.Remote(local, 'origin')
head = local.heads[0]
logger.info("Updating from origin repository")
for pull_info in origin.pull():
if head.commit == pull_info.commit:
bot.send_text(channel, "`{}` is already up-to-date!".format(
bot.name))
break
requires_reload = True
commit_hash = binascii.hexlify(pull_info.commit.binsha).decode()
commit_message = pull_info.commit.message.strip()
bot.send_text(channel, "*Fast-forwarding* to `{}`".format(
commit_hash))
logger.debug("Fast-forwarding to {}".format(commit_hash))
bot.send_text(channel, "*Latest commit*: `{}`".format(
commit_message))
logger.debug("Latest commit: {}".format(commit_message))
if requires_reload:
bot.send_text(channel, "_Reloading...see you on the other side!_")
python = sys.executable
os.execl(python, python, *sys.argv)
<commit_msg>Use previous commit as reference point for updates<commit_after>import binascii
import git
import sys
import os
import logging
logger = logging.getLogger('root')
__match__ = r"!update|!reload"
def on_message(bot, channel, user, message):
requires_reload = message == '!reload'
if message == '!update':
local = git.Repo(os.getcwd())
origin = git.remote.Remote(local, 'origin')
prev_commit = local.heads[0].commit
logger.info("Updating from origin repository")
for pull_info in origin.pull():
if prev_commit == pull_info.commit:
bot.send_text(channel, "`{}` is already up-to-date!".format(
bot.name))
break
requires_reload = True
commit_hash = binascii.hexlify(pull_info.commit.binsha).decode()
commit_message = pull_info.commit.message.strip()
bot.send_text(channel, "*Fast-forwarding* to `{}`".format(
commit_hash))
logger.debug("Fast-forwarding to {}".format(commit_hash))
bot.send_text(channel, "*Latest commit*: `{}`".format(
commit_message))
logger.debug("Latest commit: {}".format(commit_message))
if requires_reload:
bot.send_text(channel, "_Reloading...see you on the other side!_")
python = sys.executable
os.execl(python, python, *sys.argv)
|
f641be2a8b841eea71f9a0fb5709972debb99df3
|
stingray/bispectrum.py
|
stingray/bispectrum.py
|
from __future__ import division
import numpy as np
from stingray import lightcurve
class Bispectrum(object):
def __init__(self, lc, maxlag, scale=None):
self.lc = lc
# change to fs = 1/lc.dt
self.maxlag = maxlag
self.scale = scale
self.fs = None
# Outputs
self.bispec = None
self.freq = None
self.cum3 = None
self.lag = None
|
from __future__ import division
import numpy as np
from stingray import lightcurve
class Bispectrum(object):
def __init__(self, lc, maxlag, scale=None):
def __init__(self, lc, maxlag, scale=None):
self._make_bispetrum(lc, maxlag, scale)
def _make_bispetrum(self, lc, maxlag, scale):
if not isinstance(lc, lightcurve.Lightcurve):
raise TypeError('lc must be a lightcurve.ightcurve object')
self.lc = lc
self.fs = 1 / lc.dt
self.n = self.lc.n
if not isinstance(maxlag, int):
raise ValueError('maxlag must be an integer')
# if negative maxlag is entered, convert it to +ve
if maxlag < 0:
self.maxlag = -maxlag
else:
self.maxlag = maxlag
if isinstance(scale, str) is False:
raise TypeError("scale must be a string")
if scale.lower() not in ["biased", "unbiased"]:
raise ValueError("scale can only be either 'biased' or 'unbiased'.")
self.scale = scale.lower()
# Other Atributes
self.bispec = None
self.freq = None
self.cum3 = None
self.lags = None
# converting to a row vector to apply matrix operations
self.signal = np.reshape(lc, (1, len(self.lc.counts)))
# Mean subtraction before bispecrum calculation
self.signal = self.signal - np.mean(lc.counts)
|
Make Bispectrum object assign values to attributes
|
Make Bispectrum object assign values to attributes
|
Python
|
mit
|
pabell/stingray,StingraySoftware/stingray,evandromr/stingray,abigailStev/stingray
|
from __future__ import division
import numpy as np
from stingray import lightcurve
class Bispectrum(object):
def __init__(self, lc, maxlag, scale=None):
self.lc = lc
# change to fs = 1/lc.dt
self.maxlag = maxlag
self.scale = scale
self.fs = None
# Outputs
self.bispec = None
self.freq = None
self.cum3 = None
self.lag = None
Make Bispectrum object assign values to attributes
|
from __future__ import division
import numpy as np
from stingray import lightcurve
class Bispectrum(object):
def __init__(self, lc, maxlag, scale=None):
def __init__(self, lc, maxlag, scale=None):
self._make_bispetrum(lc, maxlag, scale)
def _make_bispetrum(self, lc, maxlag, scale):
if not isinstance(lc, lightcurve.Lightcurve):
raise TypeError('lc must be a lightcurve.ightcurve object')
self.lc = lc
self.fs = 1 / lc.dt
self.n = self.lc.n
if not isinstance(maxlag, int):
raise ValueError('maxlag must be an integer')
# if negative maxlag is entered, convert it to +ve
if maxlag < 0:
self.maxlag = -maxlag
else:
self.maxlag = maxlag
if isinstance(scale, str) is False:
raise TypeError("scale must be a string")
if scale.lower() not in ["biased", "unbiased"]:
raise ValueError("scale can only be either 'biased' or 'unbiased'.")
self.scale = scale.lower()
# Other Atributes
self.bispec = None
self.freq = None
self.cum3 = None
self.lags = None
# converting to a row vector to apply matrix operations
self.signal = np.reshape(lc, (1, len(self.lc.counts)))
# Mean subtraction before bispecrum calculation
self.signal = self.signal - np.mean(lc.counts)
|
<commit_before>from __future__ import division
import numpy as np
from stingray import lightcurve
class Bispectrum(object):
def __init__(self, lc, maxlag, scale=None):
self.lc = lc
# change to fs = 1/lc.dt
self.maxlag = maxlag
self.scale = scale
self.fs = None
# Outputs
self.bispec = None
self.freq = None
self.cum3 = None
self.lag = None
<commit_msg>Make Bispectrum object assign values to attributes<commit_after>
|
from __future__ import division
import numpy as np
from stingray import lightcurve
class Bispectrum(object):
def __init__(self, lc, maxlag, scale=None):
def __init__(self, lc, maxlag, scale=None):
self._make_bispetrum(lc, maxlag, scale)
def _make_bispetrum(self, lc, maxlag, scale):
if not isinstance(lc, lightcurve.Lightcurve):
raise TypeError('lc must be a lightcurve.ightcurve object')
self.lc = lc
self.fs = 1 / lc.dt
self.n = self.lc.n
if not isinstance(maxlag, int):
raise ValueError('maxlag must be an integer')
# if negative maxlag is entered, convert it to +ve
if maxlag < 0:
self.maxlag = -maxlag
else:
self.maxlag = maxlag
if isinstance(scale, str) is False:
raise TypeError("scale must be a string")
if scale.lower() not in ["biased", "unbiased"]:
raise ValueError("scale can only be either 'biased' or 'unbiased'.")
self.scale = scale.lower()
# Other Atributes
self.bispec = None
self.freq = None
self.cum3 = None
self.lags = None
# converting to a row vector to apply matrix operations
self.signal = np.reshape(lc, (1, len(self.lc.counts)))
# Mean subtraction before bispecrum calculation
self.signal = self.signal - np.mean(lc.counts)
|
from __future__ import division
import numpy as np
from stingray import lightcurve
class Bispectrum(object):
def __init__(self, lc, maxlag, scale=None):
self.lc = lc
# change to fs = 1/lc.dt
self.maxlag = maxlag
self.scale = scale
self.fs = None
# Outputs
self.bispec = None
self.freq = None
self.cum3 = None
self.lag = None
Make Bispectrum object assign values to attributesfrom __future__ import division
import numpy as np
from stingray import lightcurve
class Bispectrum(object):
def __init__(self, lc, maxlag, scale=None):
def __init__(self, lc, maxlag, scale=None):
self._make_bispetrum(lc, maxlag, scale)
def _make_bispetrum(self, lc, maxlag, scale):
if not isinstance(lc, lightcurve.Lightcurve):
raise TypeError('lc must be a lightcurve.ightcurve object')
self.lc = lc
self.fs = 1 / lc.dt
self.n = self.lc.n
if not isinstance(maxlag, int):
raise ValueError('maxlag must be an integer')
# if negative maxlag is entered, convert it to +ve
if maxlag < 0:
self.maxlag = -maxlag
else:
self.maxlag = maxlag
if isinstance(scale, str) is False:
raise TypeError("scale must be a string")
if scale.lower() not in ["biased", "unbiased"]:
raise ValueError("scale can only be either 'biased' or 'unbiased'.")
self.scale = scale.lower()
# Other Atributes
self.bispec = None
self.freq = None
self.cum3 = None
self.lags = None
# converting to a row vector to apply matrix operations
self.signal = np.reshape(lc, (1, len(self.lc.counts)))
# Mean subtraction before bispecrum calculation
self.signal = self.signal - np.mean(lc.counts)
|
<commit_before>from __future__ import division
import numpy as np
from stingray import lightcurve
class Bispectrum(object):
def __init__(self, lc, maxlag, scale=None):
self.lc = lc
# change to fs = 1/lc.dt
self.maxlag = maxlag
self.scale = scale
self.fs = None
# Outputs
self.bispec = None
self.freq = None
self.cum3 = None
self.lag = None
<commit_msg>Make Bispectrum object assign values to attributes<commit_after>from __future__ import division
import numpy as np
from stingray import lightcurve
class Bispectrum(object):
def __init__(self, lc, maxlag, scale=None):
def __init__(self, lc, maxlag, scale=None):
self._make_bispetrum(lc, maxlag, scale)
def _make_bispetrum(self, lc, maxlag, scale):
if not isinstance(lc, lightcurve.Lightcurve):
raise TypeError('lc must be a lightcurve.ightcurve object')
self.lc = lc
self.fs = 1 / lc.dt
self.n = self.lc.n
if not isinstance(maxlag, int):
raise ValueError('maxlag must be an integer')
# if negative maxlag is entered, convert it to +ve
if maxlag < 0:
self.maxlag = -maxlag
else:
self.maxlag = maxlag
if isinstance(scale, str) is False:
raise TypeError("scale must be a string")
if scale.lower() not in ["biased", "unbiased"]:
raise ValueError("scale can only be either 'biased' or 'unbiased'.")
self.scale = scale.lower()
# Other Atributes
self.bispec = None
self.freq = None
self.cum3 = None
self.lags = None
# converting to a row vector to apply matrix operations
self.signal = np.reshape(lc, (1, len(self.lc.counts)))
# Mean subtraction before bispecrum calculation
self.signal = self.signal - np.mean(lc.counts)
|
04de16d7287bad5023b34efc072e104d8b35c29a
|
test/test.py
|
test/test.py
|
from RPi import GPIO
GPIO.setmode(GPIO.BCM)
num_pins = 28
pins = range(num_pins)
for pin in pins:
GPIO.setup(pin, GPIO.IN, GPIO.PUD_UP)
pin_states = {pin: GPIO.input(pin) for pin in pins}
print()
for pin, state in pin_states.items():
print("%2d: %s" % (pin, state))
active = sum(pin_states.values())
inactive = num_pins - active
print()
print("Total active: %i" % inactive)
print("Total inactive: %i" % active)
|
from RPi import GPIO
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
num_pins = 28
pins = range(num_pins)
for pin in pins:
GPIO.setup(pin, GPIO.IN, GPIO.PUD_UP)
pin_states = {pin: GPIO.input(pin) for pin in pins}
print()
for pin, state in pin_states.items():
print("%2d: %s" % (pin, state))
active = [pin for pin, state in pin_states.items() if not state]
inactive = [pin for pin, state in pin_states.items() if state]
print()
print("Total active: %s" % len(active))
print("Total inactive: %s" % len(inactive))
print()
print("Active pins: %s" % str(active))
print("Inactive pins: %s" % str(inactive))
|
Add printing of active/inactive pins
|
Add printing of active/inactive pins
|
Python
|
bsd-3-clause
|
raspberrypilearning/dots,RPi-Distro/python-rpi-dots
|
from RPi import GPIO
GPIO.setmode(GPIO.BCM)
num_pins = 28
pins = range(num_pins)
for pin in pins:
GPIO.setup(pin, GPIO.IN, GPIO.PUD_UP)
pin_states = {pin: GPIO.input(pin) for pin in pins}
print()
for pin, state in pin_states.items():
print("%2d: %s" % (pin, state))
active = sum(pin_states.values())
inactive = num_pins - active
print()
print("Total active: %i" % inactive)
print("Total inactive: %i" % active)
Add printing of active/inactive pins
|
from RPi import GPIO
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
num_pins = 28
pins = range(num_pins)
for pin in pins:
GPIO.setup(pin, GPIO.IN, GPIO.PUD_UP)
pin_states = {pin: GPIO.input(pin) for pin in pins}
print()
for pin, state in pin_states.items():
print("%2d: %s" % (pin, state))
active = [pin for pin, state in pin_states.items() if not state]
inactive = [pin for pin, state in pin_states.items() if state]
print()
print("Total active: %s" % len(active))
print("Total inactive: %s" % len(inactive))
print()
print("Active pins: %s" % str(active))
print("Inactive pins: %s" % str(inactive))
|
<commit_before>from RPi import GPIO
GPIO.setmode(GPIO.BCM)
num_pins = 28
pins = range(num_pins)
for pin in pins:
GPIO.setup(pin, GPIO.IN, GPIO.PUD_UP)
pin_states = {pin: GPIO.input(pin) for pin in pins}
print()
for pin, state in pin_states.items():
print("%2d: %s" % (pin, state))
active = sum(pin_states.values())
inactive = num_pins - active
print()
print("Total active: %i" % inactive)
print("Total inactive: %i" % active)
<commit_msg>Add printing of active/inactive pins<commit_after>
|
from RPi import GPIO
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
num_pins = 28
pins = range(num_pins)
for pin in pins:
GPIO.setup(pin, GPIO.IN, GPIO.PUD_UP)
pin_states = {pin: GPIO.input(pin) for pin in pins}
print()
for pin, state in pin_states.items():
print("%2d: %s" % (pin, state))
active = [pin for pin, state in pin_states.items() if not state]
inactive = [pin for pin, state in pin_states.items() if state]
print()
print("Total active: %s" % len(active))
print("Total inactive: %s" % len(inactive))
print()
print("Active pins: %s" % str(active))
print("Inactive pins: %s" % str(inactive))
|
from RPi import GPIO
GPIO.setmode(GPIO.BCM)
num_pins = 28
pins = range(num_pins)
for pin in pins:
GPIO.setup(pin, GPIO.IN, GPIO.PUD_UP)
pin_states = {pin: GPIO.input(pin) for pin in pins}
print()
for pin, state in pin_states.items():
print("%2d: %s" % (pin, state))
active = sum(pin_states.values())
inactive = num_pins - active
print()
print("Total active: %i" % inactive)
print("Total inactive: %i" % active)
Add printing of active/inactive pinsfrom RPi import GPIO
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
num_pins = 28
pins = range(num_pins)
for pin in pins:
GPIO.setup(pin, GPIO.IN, GPIO.PUD_UP)
pin_states = {pin: GPIO.input(pin) for pin in pins}
print()
for pin, state in pin_states.items():
print("%2d: %s" % (pin, state))
active = [pin for pin, state in pin_states.items() if not state]
inactive = [pin for pin, state in pin_states.items() if state]
print()
print("Total active: %s" % len(active))
print("Total inactive: %s" % len(inactive))
print()
print("Active pins: %s" % str(active))
print("Inactive pins: %s" % str(inactive))
|
<commit_before>from RPi import GPIO
GPIO.setmode(GPIO.BCM)
num_pins = 28
pins = range(num_pins)
for pin in pins:
GPIO.setup(pin, GPIO.IN, GPIO.PUD_UP)
pin_states = {pin: GPIO.input(pin) for pin in pins}
print()
for pin, state in pin_states.items():
print("%2d: %s" % (pin, state))
active = sum(pin_states.values())
inactive = num_pins - active
print()
print("Total active: %i" % inactive)
print("Total inactive: %i" % active)
<commit_msg>Add printing of active/inactive pins<commit_after>from RPi import GPIO
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
num_pins = 28
pins = range(num_pins)
for pin in pins:
GPIO.setup(pin, GPIO.IN, GPIO.PUD_UP)
pin_states = {pin: GPIO.input(pin) for pin in pins}
print()
for pin, state in pin_states.items():
print("%2d: %s" % (pin, state))
active = [pin for pin, state in pin_states.items() if not state]
inactive = [pin for pin, state in pin_states.items() if state]
print()
print("Total active: %s" % len(active))
print("Total inactive: %s" % len(inactive))
print()
print("Active pins: %s" % str(active))
print("Inactive pins: %s" % str(inactive))
|
5b00010451f9ea58936f98b72737a646d77e1bd9
|
server/tests/forms/test_RegistrationForm.py
|
server/tests/forms/test_RegistrationForm.py
|
import wtforms_json
import pytest
from forms.RegistrationForm import RegistrationForm
wtforms_json.init()
class TestRegistrationForm:
def test_valid(self):
json = {
'username': 'someusername',
'password': 'password',
'confirm': 'confirm',
'email': 'someemail@email.com'
}
form = RegistrationForm.from_json(json)
assert (form.validate())
|
import pytest
from forms.RegistrationForm import RegistrationForm
class TestRegistrationForm:
def test_valid(self):
json = {
'username': 'someusername',
'password': 'password',
'confirm': 'password',
'email': 'someemail@email.com'
}
form = RegistrationForm.from_json(json)
assert (form.validate())
def test_missing_username(self):
json = {
'password': 'password',
'confirm': 'password',
'email': 'someemail@email.com'
}
form = RegistrationForm.from_json(json)
assert form.validate() is False
def test_missing_password(self):
json = {
'username': 'someusername',
'confirm': 'password',
'email': 'someemail@email.com'
}
form = RegistrationForm.from_json(json)
assert form.validate() is False
def test_missing_confirm(self):
json = {
'username': 'someusername',
'password': 'password',
'email': 'someemail@email.com'
}
form = RegistrationForm.from_json(json)
assert form.validate() is False
def test_missing_email(self):
json = {
'username': 'someusername',
'password': 'password',
'confirm': 'password',
}
form = RegistrationForm.from_json(json)
assert form.validate() is False
def test_invalid_password_combination(self):
json = {
'username': 'someusername',
'password': 'password',
'confirm': 'WRONG',
'email': 'someemail@email.com'
}
form = RegistrationForm.from_json(json)
assert form.validate() is False
|
Add tests for failing registration forms
|
Add tests for failing registration forms
|
Python
|
mit
|
ganemone/ontheside,ganemone/ontheside,ganemone/ontheside
|
import wtforms_json
import pytest
from forms.RegistrationForm import RegistrationForm
wtforms_json.init()
class TestRegistrationForm:
def test_valid(self):
json = {
'username': 'someusername',
'password': 'password',
'confirm': 'confirm',
'email': 'someemail@email.com'
}
form = RegistrationForm.from_json(json)
assert (form.validate())
Add tests for failing registration forms
|
import pytest
from forms.RegistrationForm import RegistrationForm
class TestRegistrationForm:
def test_valid(self):
json = {
'username': 'someusername',
'password': 'password',
'confirm': 'password',
'email': 'someemail@email.com'
}
form = RegistrationForm.from_json(json)
assert (form.validate())
def test_missing_username(self):
json = {
'password': 'password',
'confirm': 'password',
'email': 'someemail@email.com'
}
form = RegistrationForm.from_json(json)
assert form.validate() is False
def test_missing_password(self):
json = {
'username': 'someusername',
'confirm': 'password',
'email': 'someemail@email.com'
}
form = RegistrationForm.from_json(json)
assert form.validate() is False
def test_missing_confirm(self):
json = {
'username': 'someusername',
'password': 'password',
'email': 'someemail@email.com'
}
form = RegistrationForm.from_json(json)
assert form.validate() is False
def test_missing_email(self):
json = {
'username': 'someusername',
'password': 'password',
'confirm': 'password',
}
form = RegistrationForm.from_json(json)
assert form.validate() is False
def test_invalid_password_combination(self):
json = {
'username': 'someusername',
'password': 'password',
'confirm': 'WRONG',
'email': 'someemail@email.com'
}
form = RegistrationForm.from_json(json)
assert form.validate() is False
|
<commit_before>import wtforms_json
import pytest
from forms.RegistrationForm import RegistrationForm
wtforms_json.init()
class TestRegistrationForm:
def test_valid(self):
json = {
'username': 'someusername',
'password': 'password',
'confirm': 'confirm',
'email': 'someemail@email.com'
}
form = RegistrationForm.from_json(json)
assert (form.validate())
<commit_msg>Add tests for failing registration forms<commit_after>
|
import pytest
from forms.RegistrationForm import RegistrationForm
class TestRegistrationForm:
def test_valid(self):
json = {
'username': 'someusername',
'password': 'password',
'confirm': 'password',
'email': 'someemail@email.com'
}
form = RegistrationForm.from_json(json)
assert (form.validate())
def test_missing_username(self):
json = {
'password': 'password',
'confirm': 'password',
'email': 'someemail@email.com'
}
form = RegistrationForm.from_json(json)
assert form.validate() is False
def test_missing_password(self):
json = {
'username': 'someusername',
'confirm': 'password',
'email': 'someemail@email.com'
}
form = RegistrationForm.from_json(json)
assert form.validate() is False
def test_missing_confirm(self):
json = {
'username': 'someusername',
'password': 'password',
'email': 'someemail@email.com'
}
form = RegistrationForm.from_json(json)
assert form.validate() is False
def test_missing_email(self):
json = {
'username': 'someusername',
'password': 'password',
'confirm': 'password',
}
form = RegistrationForm.from_json(json)
assert form.validate() is False
def test_invalid_password_combination(self):
json = {
'username': 'someusername',
'password': 'password',
'confirm': 'WRONG',
'email': 'someemail@email.com'
}
form = RegistrationForm.from_json(json)
assert form.validate() is False
|
import wtforms_json
import pytest
from forms.RegistrationForm import RegistrationForm
wtforms_json.init()
class TestRegistrationForm:
def test_valid(self):
json = {
'username': 'someusername',
'password': 'password',
'confirm': 'confirm',
'email': 'someemail@email.com'
}
form = RegistrationForm.from_json(json)
assert (form.validate())
Add tests for failing registration formsimport pytest
from forms.RegistrationForm import RegistrationForm
class TestRegistrationForm:
def test_valid(self):
json = {
'username': 'someusername',
'password': 'password',
'confirm': 'password',
'email': 'someemail@email.com'
}
form = RegistrationForm.from_json(json)
assert (form.validate())
def test_missing_username(self):
json = {
'password': 'password',
'confirm': 'password',
'email': 'someemail@email.com'
}
form = RegistrationForm.from_json(json)
assert form.validate() is False
def test_missing_password(self):
json = {
'username': 'someusername',
'confirm': 'password',
'email': 'someemail@email.com'
}
form = RegistrationForm.from_json(json)
assert form.validate() is False
def test_missing_confirm(self):
json = {
'username': 'someusername',
'password': 'password',
'email': 'someemail@email.com'
}
form = RegistrationForm.from_json(json)
assert form.validate() is False
def test_missing_email(self):
json = {
'username': 'someusername',
'password': 'password',
'confirm': 'password',
}
form = RegistrationForm.from_json(json)
assert form.validate() is False
def test_invalid_password_combination(self):
json = {
'username': 'someusername',
'password': 'password',
'confirm': 'WRONG',
'email': 'someemail@email.com'
}
form = RegistrationForm.from_json(json)
assert form.validate() is False
|
<commit_before>import wtforms_json
import pytest
from forms.RegistrationForm import RegistrationForm
wtforms_json.init()
class TestRegistrationForm:
def test_valid(self):
json = {
'username': 'someusername',
'password': 'password',
'confirm': 'confirm',
'email': 'someemail@email.com'
}
form = RegistrationForm.from_json(json)
assert (form.validate())
<commit_msg>Add tests for failing registration forms<commit_after>import pytest
from forms.RegistrationForm import RegistrationForm
class TestRegistrationForm:
def test_valid(self):
json = {
'username': 'someusername',
'password': 'password',
'confirm': 'password',
'email': 'someemail@email.com'
}
form = RegistrationForm.from_json(json)
assert (form.validate())
def test_missing_username(self):
json = {
'password': 'password',
'confirm': 'password',
'email': 'someemail@email.com'
}
form = RegistrationForm.from_json(json)
assert form.validate() is False
def test_missing_password(self):
json = {
'username': 'someusername',
'confirm': 'password',
'email': 'someemail@email.com'
}
form = RegistrationForm.from_json(json)
assert form.validate() is False
def test_missing_confirm(self):
json = {
'username': 'someusername',
'password': 'password',
'email': 'someemail@email.com'
}
form = RegistrationForm.from_json(json)
assert form.validate() is False
def test_missing_email(self):
json = {
'username': 'someusername',
'password': 'password',
'confirm': 'password',
}
form = RegistrationForm.from_json(json)
assert form.validate() is False
def test_invalid_password_combination(self):
json = {
'username': 'someusername',
'password': 'password',
'confirm': 'WRONG',
'email': 'someemail@email.com'
}
form = RegistrationForm.from_json(json)
assert form.validate() is False
|
c31bc6f1b0782a7d9c409e233a363be651594006
|
exporters/decompressors.py
|
exporters/decompressors.py
|
from exporters.pipeline.base_pipeline_item import BasePipelineItem
import logging
import zlib
__all__ = ['BaseDecompressor', 'ZLibDecompressor', 'NoDecompressor']
class BaseDecompressor(BasePipelineItem):
def decompress(self):
raise NotImplementedError()
def create_decompressor():
# create zlib decompressor enabling automatic header detection:
# See: http://stackoverflow.com/a/22310760/149872
AUTOMATIC_HEADER_DETECTION_MASK = 32
return zlib.decompressobj(AUTOMATIC_HEADER_DETECTION_MASK | zlib.MAX_WBITS)
class ZLibDecompressor(BaseDecompressor):
def decompress(self, stream):
try:
dec = create_decompressor()
for chunk in stream:
rv = dec.decompress(chunk)
if rv:
yield rv
if dec.unused_data:
stream.unshift(dec.unused_data)
dec = create_decompressor()
except zlib.error as e:
logging.error('Error decoding stream using ZlibDecompressor')
if str(e).startswith('Error -3 '):
logging.error("Use NoDecompressor if you're using uncompressed input")
raise
class NoDecompressor(BaseDecompressor):
def decompress(self, stream):
return stream # Input already uncompressed
|
from exporters.pipeline.base_pipeline_item import BasePipelineItem
import sys
import zlib
import six
__all__ = ['BaseDecompressor', 'ZLibDecompressor', 'NoDecompressor']
class BaseDecompressor(BasePipelineItem):
def decompress(self):
raise NotImplementedError()
def create_decompressor():
# create zlib decompressor enabling automatic header detection:
# See: http://stackoverflow.com/a/22310760/149872
AUTOMATIC_HEADER_DETECTION_MASK = 32
return zlib.decompressobj(AUTOMATIC_HEADER_DETECTION_MASK | zlib.MAX_WBITS)
class ZLibDecompressor(BaseDecompressor):
def decompress(self, stream):
try:
dec = create_decompressor()
for chunk in stream:
rv = dec.decompress(chunk)
if rv:
yield rv
if dec.unused_data:
stream.unshift(dec.unused_data)
dec = create_decompressor()
except zlib.error as e:
msg = str(e)
if msg.startswith('Error -3 '):
msg += ". Use NoDecompressor if you're using uncompressed input."
six.reraise(zlib.error, zlib.error(msg), sys.exc_info()[2])
class NoDecompressor(BaseDecompressor):
def decompress(self, stream):
return stream # Input already uncompressed
|
Append information to the zlib error
|
Append information to the zlib error
|
Python
|
bsd-3-clause
|
scrapinghub/exporters
|
from exporters.pipeline.base_pipeline_item import BasePipelineItem
import logging
import zlib
__all__ = ['BaseDecompressor', 'ZLibDecompressor', 'NoDecompressor']
class BaseDecompressor(BasePipelineItem):
def decompress(self):
raise NotImplementedError()
def create_decompressor():
# create zlib decompressor enabling automatic header detection:
# See: http://stackoverflow.com/a/22310760/149872
AUTOMATIC_HEADER_DETECTION_MASK = 32
return zlib.decompressobj(AUTOMATIC_HEADER_DETECTION_MASK | zlib.MAX_WBITS)
class ZLibDecompressor(BaseDecompressor):
def decompress(self, stream):
try:
dec = create_decompressor()
for chunk in stream:
rv = dec.decompress(chunk)
if rv:
yield rv
if dec.unused_data:
stream.unshift(dec.unused_data)
dec = create_decompressor()
except zlib.error as e:
logging.error('Error decoding stream using ZlibDecompressor')
if str(e).startswith('Error -3 '):
logging.error("Use NoDecompressor if you're using uncompressed input")
raise
class NoDecompressor(BaseDecompressor):
def decompress(self, stream):
return stream # Input already uncompressed
Append information to the zlib error
|
from exporters.pipeline.base_pipeline_item import BasePipelineItem
import sys
import zlib
import six
__all__ = ['BaseDecompressor', 'ZLibDecompressor', 'NoDecompressor']
class BaseDecompressor(BasePipelineItem):
def decompress(self):
raise NotImplementedError()
def create_decompressor():
# create zlib decompressor enabling automatic header detection:
# See: http://stackoverflow.com/a/22310760/149872
AUTOMATIC_HEADER_DETECTION_MASK = 32
return zlib.decompressobj(AUTOMATIC_HEADER_DETECTION_MASK | zlib.MAX_WBITS)
class ZLibDecompressor(BaseDecompressor):
def decompress(self, stream):
try:
dec = create_decompressor()
for chunk in stream:
rv = dec.decompress(chunk)
if rv:
yield rv
if dec.unused_data:
stream.unshift(dec.unused_data)
dec = create_decompressor()
except zlib.error as e:
msg = str(e)
if msg.startswith('Error -3 '):
msg += ". Use NoDecompressor if you're using uncompressed input."
six.reraise(zlib.error, zlib.error(msg), sys.exc_info()[2])
class NoDecompressor(BaseDecompressor):
def decompress(self, stream):
return stream # Input already uncompressed
|
<commit_before>from exporters.pipeline.base_pipeline_item import BasePipelineItem
import logging
import zlib
__all__ = ['BaseDecompressor', 'ZLibDecompressor', 'NoDecompressor']
class BaseDecompressor(BasePipelineItem):
def decompress(self):
raise NotImplementedError()
def create_decompressor():
# create zlib decompressor enabling automatic header detection:
# See: http://stackoverflow.com/a/22310760/149872
AUTOMATIC_HEADER_DETECTION_MASK = 32
return zlib.decompressobj(AUTOMATIC_HEADER_DETECTION_MASK | zlib.MAX_WBITS)
class ZLibDecompressor(BaseDecompressor):
def decompress(self, stream):
try:
dec = create_decompressor()
for chunk in stream:
rv = dec.decompress(chunk)
if rv:
yield rv
if dec.unused_data:
stream.unshift(dec.unused_data)
dec = create_decompressor()
except zlib.error as e:
logging.error('Error decoding stream using ZlibDecompressor')
if str(e).startswith('Error -3 '):
logging.error("Use NoDecompressor if you're using uncompressed input")
raise
class NoDecompressor(BaseDecompressor):
def decompress(self, stream):
return stream # Input already uncompressed
<commit_msg>Append information to the zlib error<commit_after>
|
from exporters.pipeline.base_pipeline_item import BasePipelineItem
import sys
import zlib
import six
__all__ = ['BaseDecompressor', 'ZLibDecompressor', 'NoDecompressor']
class BaseDecompressor(BasePipelineItem):
def decompress(self):
raise NotImplementedError()
def create_decompressor():
# create zlib decompressor enabling automatic header detection:
# See: http://stackoverflow.com/a/22310760/149872
AUTOMATIC_HEADER_DETECTION_MASK = 32
return zlib.decompressobj(AUTOMATIC_HEADER_DETECTION_MASK | zlib.MAX_WBITS)
class ZLibDecompressor(BaseDecompressor):
def decompress(self, stream):
try:
dec = create_decompressor()
for chunk in stream:
rv = dec.decompress(chunk)
if rv:
yield rv
if dec.unused_data:
stream.unshift(dec.unused_data)
dec = create_decompressor()
except zlib.error as e:
msg = str(e)
if msg.startswith('Error -3 '):
msg += ". Use NoDecompressor if you're using uncompressed input."
six.reraise(zlib.error, zlib.error(msg), sys.exc_info()[2])
class NoDecompressor(BaseDecompressor):
def decompress(self, stream):
return stream # Input already uncompressed
|
from exporters.pipeline.base_pipeline_item import BasePipelineItem
import logging
import zlib
__all__ = ['BaseDecompressor', 'ZLibDecompressor', 'NoDecompressor']
class BaseDecompressor(BasePipelineItem):
def decompress(self):
raise NotImplementedError()
def create_decompressor():
# create zlib decompressor enabling automatic header detection:
# See: http://stackoverflow.com/a/22310760/149872
AUTOMATIC_HEADER_DETECTION_MASK = 32
return zlib.decompressobj(AUTOMATIC_HEADER_DETECTION_MASK | zlib.MAX_WBITS)
class ZLibDecompressor(BaseDecompressor):
def decompress(self, stream):
try:
dec = create_decompressor()
for chunk in stream:
rv = dec.decompress(chunk)
if rv:
yield rv
if dec.unused_data:
stream.unshift(dec.unused_data)
dec = create_decompressor()
except zlib.error as e:
logging.error('Error decoding stream using ZlibDecompressor')
if str(e).startswith('Error -3 '):
logging.error("Use NoDecompressor if you're using uncompressed input")
raise
class NoDecompressor(BaseDecompressor):
def decompress(self, stream):
return stream # Input already uncompressed
Append information to the zlib errorfrom exporters.pipeline.base_pipeline_item import BasePipelineItem
import sys
import zlib
import six
__all__ = ['BaseDecompressor', 'ZLibDecompressor', 'NoDecompressor']
class BaseDecompressor(BasePipelineItem):
def decompress(self):
raise NotImplementedError()
def create_decompressor():
# create zlib decompressor enabling automatic header detection:
# See: http://stackoverflow.com/a/22310760/149872
AUTOMATIC_HEADER_DETECTION_MASK = 32
return zlib.decompressobj(AUTOMATIC_HEADER_DETECTION_MASK | zlib.MAX_WBITS)
class ZLibDecompressor(BaseDecompressor):
def decompress(self, stream):
try:
dec = create_decompressor()
for chunk in stream:
rv = dec.decompress(chunk)
if rv:
yield rv
if dec.unused_data:
stream.unshift(dec.unused_data)
dec = create_decompressor()
except zlib.error as e:
msg = str(e)
if msg.startswith('Error -3 '):
msg += ". Use NoDecompressor if you're using uncompressed input."
six.reraise(zlib.error, zlib.error(msg), sys.exc_info()[2])
class NoDecompressor(BaseDecompressor):
def decompress(self, stream):
return stream # Input already uncompressed
|
<commit_before>from exporters.pipeline.base_pipeline_item import BasePipelineItem
import logging
import zlib
__all__ = ['BaseDecompressor', 'ZLibDecompressor', 'NoDecompressor']
class BaseDecompressor(BasePipelineItem):
def decompress(self):
raise NotImplementedError()
def create_decompressor():
# create zlib decompressor enabling automatic header detection:
# See: http://stackoverflow.com/a/22310760/149872
AUTOMATIC_HEADER_DETECTION_MASK = 32
return zlib.decompressobj(AUTOMATIC_HEADER_DETECTION_MASK | zlib.MAX_WBITS)
class ZLibDecompressor(BaseDecompressor):
def decompress(self, stream):
try:
dec = create_decompressor()
for chunk in stream:
rv = dec.decompress(chunk)
if rv:
yield rv
if dec.unused_data:
stream.unshift(dec.unused_data)
dec = create_decompressor()
except zlib.error as e:
logging.error('Error decoding stream using ZlibDecompressor')
if str(e).startswith('Error -3 '):
logging.error("Use NoDecompressor if you're using uncompressed input")
raise
class NoDecompressor(BaseDecompressor):
def decompress(self, stream):
return stream # Input already uncompressed
<commit_msg>Append information to the zlib error<commit_after>from exporters.pipeline.base_pipeline_item import BasePipelineItem
import sys
import zlib
import six
__all__ = ['BaseDecompressor', 'ZLibDecompressor', 'NoDecompressor']
class BaseDecompressor(BasePipelineItem):
def decompress(self):
raise NotImplementedError()
def create_decompressor():
# create zlib decompressor enabling automatic header detection:
# See: http://stackoverflow.com/a/22310760/149872
AUTOMATIC_HEADER_DETECTION_MASK = 32
return zlib.decompressobj(AUTOMATIC_HEADER_DETECTION_MASK | zlib.MAX_WBITS)
class ZLibDecompressor(BaseDecompressor):
def decompress(self, stream):
try:
dec = create_decompressor()
for chunk in stream:
rv = dec.decompress(chunk)
if rv:
yield rv
if dec.unused_data:
stream.unshift(dec.unused_data)
dec = create_decompressor()
except zlib.error as e:
msg = str(e)
if msg.startswith('Error -3 '):
msg += ". Use NoDecompressor if you're using uncompressed input."
six.reraise(zlib.error, zlib.error(msg), sys.exc_info()[2])
class NoDecompressor(BaseDecompressor):
def decompress(self, stream):
return stream # Input already uncompressed
|
761aff647d3e20fc25f1911efa5d2235fe4b21d8
|
modoboa/extensions/admin/forms/forward.py
|
modoboa/extensions/admin/forms/forward.py
|
from django import forms
from django.utils.translation import ugettext as _, ugettext_lazy
from modoboa.lib.exceptions import BadRequest, PermDeniedException
from modoboa.lib.emailutils import split_mailbox
from modoboa.extensions.admin.models import (
Domain
)
class ForwardForm(forms.Form):
dest = forms.CharField(
label=ugettext_lazy("Recipient(s)"),
widget=forms.Textarea,
required=False,
help_text=ugettext_lazy("Indicate one or more recipients separated by a ','")
)
keepcopies = forms.BooleanField(
label=ugettext_lazy("Keep local copies"),
required=False,
help_text=ugettext_lazy("Forward messages and store copies into your local mailbox")
)
def parse_dest(self):
self.dests = []
rawdata = self.cleaned_data["dest"].strip()
if rawdata == "":
return
for d in rawdata.split(","):
local_part, domname = split_mailbox(d)
if not local_part or not domname or not len(domname):
raise BadRequest("Invalid mailbox syntax for %s" % d)
try:
Domain.objects.get(name=domname)
except Domain.DoesNotExist:
self.dests += [d]
else:
raise PermDeniedException(
_("You can't define a forward to a local destination. "
"Please ask your administrator to create an alias "
"instead.")
)
|
from django import forms
from django.utils.translation import ugettext as _, ugettext_lazy
from modoboa.lib.exceptions import BadRequest, PermDeniedException
from modoboa.lib.emailutils import split_mailbox
from modoboa.extensions.admin.models import (
Domain
)
class ForwardForm(forms.Form):
dest = forms.CharField(
label=ugettext_lazy("Recipient(s)"),
widget=forms.Textarea(attrs={"class": "form-control"}),
required=False,
help_text=ugettext_lazy("Indicate one or more recipients separated by a ','")
)
keepcopies = forms.BooleanField(
label=ugettext_lazy("Keep local copies"),
required=False,
help_text=ugettext_lazy("Forward messages and store copies into your local mailbox")
)
def parse_dest(self):
self.dests = []
rawdata = self.cleaned_data["dest"].strip()
if rawdata == "":
return
for d in rawdata.split(","):
local_part, domname = split_mailbox(d)
if not local_part or not domname or not len(domname):
raise BadRequest("Invalid mailbox syntax for %s" % d)
try:
Domain.objects.get(name=domname)
except Domain.DoesNotExist:
self.dests += [d]
else:
raise PermDeniedException(
_("You can't define a forward to a local destination. "
"Please ask your administrator to create an alias "
"instead.")
)
|
Add "form-control" attribute to some textareas
|
Add "form-control" attribute to some textareas
|
Python
|
isc
|
modoboa/modoboa,RavenB/modoboa,mehulsbhatt/modoboa,bearstech/modoboa,modoboa/modoboa,RavenB/modoboa,tonioo/modoboa,bearstech/modoboa,mehulsbhatt/modoboa,bearstech/modoboa,carragom/modoboa,modoboa/modoboa,tonioo/modoboa,carragom/modoboa,modoboa/modoboa,bearstech/modoboa,RavenB/modoboa,carragom/modoboa,tonioo/modoboa,mehulsbhatt/modoboa
|
from django import forms
from django.utils.translation import ugettext as _, ugettext_lazy
from modoboa.lib.exceptions import BadRequest, PermDeniedException
from modoboa.lib.emailutils import split_mailbox
from modoboa.extensions.admin.models import (
Domain
)
class ForwardForm(forms.Form):
dest = forms.CharField(
label=ugettext_lazy("Recipient(s)"),
widget=forms.Textarea,
required=False,
help_text=ugettext_lazy("Indicate one or more recipients separated by a ','")
)
keepcopies = forms.BooleanField(
label=ugettext_lazy("Keep local copies"),
required=False,
help_text=ugettext_lazy("Forward messages and store copies into your local mailbox")
)
def parse_dest(self):
self.dests = []
rawdata = self.cleaned_data["dest"].strip()
if rawdata == "":
return
for d in rawdata.split(","):
local_part, domname = split_mailbox(d)
if not local_part or not domname or not len(domname):
raise BadRequest("Invalid mailbox syntax for %s" % d)
try:
Domain.objects.get(name=domname)
except Domain.DoesNotExist:
self.dests += [d]
else:
raise PermDeniedException(
_("You can't define a forward to a local destination. "
"Please ask your administrator to create an alias "
"instead.")
)
Add "form-control" attribute to some textareas
|
from django import forms
from django.utils.translation import ugettext as _, ugettext_lazy
from modoboa.lib.exceptions import BadRequest, PermDeniedException
from modoboa.lib.emailutils import split_mailbox
from modoboa.extensions.admin.models import (
Domain
)
class ForwardForm(forms.Form):
dest = forms.CharField(
label=ugettext_lazy("Recipient(s)"),
widget=forms.Textarea(attrs={"class": "form-control"}),
required=False,
help_text=ugettext_lazy("Indicate one or more recipients separated by a ','")
)
keepcopies = forms.BooleanField(
label=ugettext_lazy("Keep local copies"),
required=False,
help_text=ugettext_lazy("Forward messages and store copies into your local mailbox")
)
def parse_dest(self):
self.dests = []
rawdata = self.cleaned_data["dest"].strip()
if rawdata == "":
return
for d in rawdata.split(","):
local_part, domname = split_mailbox(d)
if not local_part or not domname or not len(domname):
raise BadRequest("Invalid mailbox syntax for %s" % d)
try:
Domain.objects.get(name=domname)
except Domain.DoesNotExist:
self.dests += [d]
else:
raise PermDeniedException(
_("You can't define a forward to a local destination. "
"Please ask your administrator to create an alias "
"instead.")
)
|
<commit_before>from django import forms
from django.utils.translation import ugettext as _, ugettext_lazy
from modoboa.lib.exceptions import BadRequest, PermDeniedException
from modoboa.lib.emailutils import split_mailbox
from modoboa.extensions.admin.models import (
Domain
)
class ForwardForm(forms.Form):
dest = forms.CharField(
label=ugettext_lazy("Recipient(s)"),
widget=forms.Textarea,
required=False,
help_text=ugettext_lazy("Indicate one or more recipients separated by a ','")
)
keepcopies = forms.BooleanField(
label=ugettext_lazy("Keep local copies"),
required=False,
help_text=ugettext_lazy("Forward messages and store copies into your local mailbox")
)
def parse_dest(self):
self.dests = []
rawdata = self.cleaned_data["dest"].strip()
if rawdata == "":
return
for d in rawdata.split(","):
local_part, domname = split_mailbox(d)
if not local_part or not domname or not len(domname):
raise BadRequest("Invalid mailbox syntax for %s" % d)
try:
Domain.objects.get(name=domname)
except Domain.DoesNotExist:
self.dests += [d]
else:
raise PermDeniedException(
_("You can't define a forward to a local destination. "
"Please ask your administrator to create an alias "
"instead.")
)
<commit_msg>Add "form-control" attribute to some textareas<commit_after>
|
from django import forms
from django.utils.translation import ugettext as _, ugettext_lazy
from modoboa.lib.exceptions import BadRequest, PermDeniedException
from modoboa.lib.emailutils import split_mailbox
from modoboa.extensions.admin.models import (
Domain
)
class ForwardForm(forms.Form):
dest = forms.CharField(
label=ugettext_lazy("Recipient(s)"),
widget=forms.Textarea(attrs={"class": "form-control"}),
required=False,
help_text=ugettext_lazy("Indicate one or more recipients separated by a ','")
)
keepcopies = forms.BooleanField(
label=ugettext_lazy("Keep local copies"),
required=False,
help_text=ugettext_lazy("Forward messages and store copies into your local mailbox")
)
def parse_dest(self):
self.dests = []
rawdata = self.cleaned_data["dest"].strip()
if rawdata == "":
return
for d in rawdata.split(","):
local_part, domname = split_mailbox(d)
if not local_part or not domname or not len(domname):
raise BadRequest("Invalid mailbox syntax for %s" % d)
try:
Domain.objects.get(name=domname)
except Domain.DoesNotExist:
self.dests += [d]
else:
raise PermDeniedException(
_("You can't define a forward to a local destination. "
"Please ask your administrator to create an alias "
"instead.")
)
|
from django import forms
from django.utils.translation import ugettext as _, ugettext_lazy
from modoboa.lib.exceptions import BadRequest, PermDeniedException
from modoboa.lib.emailutils import split_mailbox
from modoboa.extensions.admin.models import (
Domain
)
class ForwardForm(forms.Form):
dest = forms.CharField(
label=ugettext_lazy("Recipient(s)"),
widget=forms.Textarea,
required=False,
help_text=ugettext_lazy("Indicate one or more recipients separated by a ','")
)
keepcopies = forms.BooleanField(
label=ugettext_lazy("Keep local copies"),
required=False,
help_text=ugettext_lazy("Forward messages and store copies into your local mailbox")
)
def parse_dest(self):
self.dests = []
rawdata = self.cleaned_data["dest"].strip()
if rawdata == "":
return
for d in rawdata.split(","):
local_part, domname = split_mailbox(d)
if not local_part or not domname or not len(domname):
raise BadRequest("Invalid mailbox syntax for %s" % d)
try:
Domain.objects.get(name=domname)
except Domain.DoesNotExist:
self.dests += [d]
else:
raise PermDeniedException(
_("You can't define a forward to a local destination. "
"Please ask your administrator to create an alias "
"instead.")
)
Add "form-control" attribute to some textareasfrom django import forms
from django.utils.translation import ugettext as _, ugettext_lazy
from modoboa.lib.exceptions import BadRequest, PermDeniedException
from modoboa.lib.emailutils import split_mailbox
from modoboa.extensions.admin.models import (
Domain
)
class ForwardForm(forms.Form):
dest = forms.CharField(
label=ugettext_lazy("Recipient(s)"),
widget=forms.Textarea(attrs={"class": "form-control"}),
required=False,
help_text=ugettext_lazy("Indicate one or more recipients separated by a ','")
)
keepcopies = forms.BooleanField(
label=ugettext_lazy("Keep local copies"),
required=False,
help_text=ugettext_lazy("Forward messages and store copies into your local mailbox")
)
def parse_dest(self):
self.dests = []
rawdata = self.cleaned_data["dest"].strip()
if rawdata == "":
return
for d in rawdata.split(","):
local_part, domname = split_mailbox(d)
if not local_part or not domname or not len(domname):
raise BadRequest("Invalid mailbox syntax for %s" % d)
try:
Domain.objects.get(name=domname)
except Domain.DoesNotExist:
self.dests += [d]
else:
raise PermDeniedException(
_("You can't define a forward to a local destination. "
"Please ask your administrator to create an alias "
"instead.")
)
|
<commit_before>from django import forms
from django.utils.translation import ugettext as _, ugettext_lazy
from modoboa.lib.exceptions import BadRequest, PermDeniedException
from modoboa.lib.emailutils import split_mailbox
from modoboa.extensions.admin.models import (
Domain
)
class ForwardForm(forms.Form):
dest = forms.CharField(
label=ugettext_lazy("Recipient(s)"),
widget=forms.Textarea,
required=False,
help_text=ugettext_lazy("Indicate one or more recipients separated by a ','")
)
keepcopies = forms.BooleanField(
label=ugettext_lazy("Keep local copies"),
required=False,
help_text=ugettext_lazy("Forward messages and store copies into your local mailbox")
)
def parse_dest(self):
self.dests = []
rawdata = self.cleaned_data["dest"].strip()
if rawdata == "":
return
for d in rawdata.split(","):
local_part, domname = split_mailbox(d)
if not local_part or not domname or not len(domname):
raise BadRequest("Invalid mailbox syntax for %s" % d)
try:
Domain.objects.get(name=domname)
except Domain.DoesNotExist:
self.dests += [d]
else:
raise PermDeniedException(
_("You can't define a forward to a local destination. "
"Please ask your administrator to create an alias "
"instead.")
)
<commit_msg>Add "form-control" attribute to some textareas<commit_after>from django import forms
from django.utils.translation import ugettext as _, ugettext_lazy
from modoboa.lib.exceptions import BadRequest, PermDeniedException
from modoboa.lib.emailutils import split_mailbox
from modoboa.extensions.admin.models import (
Domain
)
class ForwardForm(forms.Form):
dest = forms.CharField(
label=ugettext_lazy("Recipient(s)"),
widget=forms.Textarea(attrs={"class": "form-control"}),
required=False,
help_text=ugettext_lazy("Indicate one or more recipients separated by a ','")
)
keepcopies = forms.BooleanField(
label=ugettext_lazy("Keep local copies"),
required=False,
help_text=ugettext_lazy("Forward messages and store copies into your local mailbox")
)
def parse_dest(self):
self.dests = []
rawdata = self.cleaned_data["dest"].strip()
if rawdata == "":
return
for d in rawdata.split(","):
local_part, domname = split_mailbox(d)
if not local_part or not domname or not len(domname):
raise BadRequest("Invalid mailbox syntax for %s" % d)
try:
Domain.objects.get(name=domname)
except Domain.DoesNotExist:
self.dests += [d]
else:
raise PermDeniedException(
_("You can't define a forward to a local destination. "
"Please ask your administrator to create an alias "
"instead.")
)
|
a4163d9c1d1b2ce196b582eadc7befd545f804f1
|
corehq/ex-submodules/pillowtop/dao/interface.py
|
corehq/ex-submodules/pillowtop/dao/interface.py
|
from __future__ import unicode_literals
from abc import ABCMeta, abstractmethod
class DocumentStore(object):
"""
Very basic implementation of a document store.
"""
__metaclass__ = ABCMeta
@abstractmethod
def get_document(self, doc_id):
pass
@abstractmethod
def save_document(self, doc_id, document):
pass
@abstractmethod
def delete_document(self, doc_id):
pass
def iter_document_ids(self):
# todo: can convert to @abstractmethod once subclasses handle it
raise NotImplementedError('this function not yet implemented')
def iter_documents(self, ids):
# todo: can convert to @abstractmethod once subclasses handle it
raise NotImplementedError('this function not yet implemented')
class ReadOnlyDocumentStore(DocumentStore):
def save_document(self, doc_id, document):
raise NotImplementedError('This document store is read only!')
def delete_document(self, doc_id):
raise NotImplementedError('This document store is read only!')
|
from __future__ import unicode_literals
from abc import ABCMeta, abstractmethod
class DocumentStore(object):
"""
Very basic implementation of a document store.
"""
__metaclass__ = ABCMeta
@abstractmethod
def get_document(self, doc_id):
pass
@abstractmethod
def save_document(self, doc_id, document):
pass
@abstractmethod
def delete_document(self, doc_id):
pass
def iter_document_ids(self, last_id=None):
# todo: can convert to @abstractmethod once subclasses handle it
raise NotImplementedError('this function not yet implemented')
def iter_documents(self, ids):
# todo: can convert to @abstractmethod once subclasses handle it
raise NotImplementedError('this function not yet implemented')
class ReadOnlyDocumentStore(DocumentStore):
def save_document(self, doc_id, document):
raise NotImplementedError('This document store is read only!')
def delete_document(self, doc_id):
raise NotImplementedError('This document store is read only!')
|
Modify abstract parameter to match its children
|
Modify abstract parameter to match its children
All instances of this already accept the parameter, so we should codify
it as part of the spec.
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
from __future__ import unicode_literals
from abc import ABCMeta, abstractmethod
class DocumentStore(object):
"""
Very basic implementation of a document store.
"""
__metaclass__ = ABCMeta
@abstractmethod
def get_document(self, doc_id):
pass
@abstractmethod
def save_document(self, doc_id, document):
pass
@abstractmethod
def delete_document(self, doc_id):
pass
def iter_document_ids(self):
# todo: can convert to @abstractmethod once subclasses handle it
raise NotImplementedError('this function not yet implemented')
def iter_documents(self, ids):
# todo: can convert to @abstractmethod once subclasses handle it
raise NotImplementedError('this function not yet implemented')
class ReadOnlyDocumentStore(DocumentStore):
def save_document(self, doc_id, document):
raise NotImplementedError('This document store is read only!')
def delete_document(self, doc_id):
raise NotImplementedError('This document store is read only!')
Modify abstract parameter to match its children
All instances of this already accept the parameter, so we should codify
it as part of the spec.
|
from __future__ import unicode_literals
from abc import ABCMeta, abstractmethod
class DocumentStore(object):
"""
Very basic implementation of a document store.
"""
__metaclass__ = ABCMeta
@abstractmethod
def get_document(self, doc_id):
pass
@abstractmethod
def save_document(self, doc_id, document):
pass
@abstractmethod
def delete_document(self, doc_id):
pass
def iter_document_ids(self, last_id=None):
# todo: can convert to @abstractmethod once subclasses handle it
raise NotImplementedError('this function not yet implemented')
def iter_documents(self, ids):
# todo: can convert to @abstractmethod once subclasses handle it
raise NotImplementedError('this function not yet implemented')
class ReadOnlyDocumentStore(DocumentStore):
def save_document(self, doc_id, document):
raise NotImplementedError('This document store is read only!')
def delete_document(self, doc_id):
raise NotImplementedError('This document store is read only!')
|
<commit_before>from __future__ import unicode_literals
from abc import ABCMeta, abstractmethod
class DocumentStore(object):
"""
Very basic implementation of a document store.
"""
__metaclass__ = ABCMeta
@abstractmethod
def get_document(self, doc_id):
pass
@abstractmethod
def save_document(self, doc_id, document):
pass
@abstractmethod
def delete_document(self, doc_id):
pass
def iter_document_ids(self):
# todo: can convert to @abstractmethod once subclasses handle it
raise NotImplementedError('this function not yet implemented')
def iter_documents(self, ids):
# todo: can convert to @abstractmethod once subclasses handle it
raise NotImplementedError('this function not yet implemented')
class ReadOnlyDocumentStore(DocumentStore):
def save_document(self, doc_id, document):
raise NotImplementedError('This document store is read only!')
def delete_document(self, doc_id):
raise NotImplementedError('This document store is read only!')
<commit_msg>Modify abstract parameter to match its children
All instances of this already accept the parameter, so we should codify
it as part of the spec.<commit_after>
|
from __future__ import unicode_literals
from abc import ABCMeta, abstractmethod
class DocumentStore(object):
"""
Very basic implementation of a document store.
"""
__metaclass__ = ABCMeta
@abstractmethod
def get_document(self, doc_id):
pass
@abstractmethod
def save_document(self, doc_id, document):
pass
@abstractmethod
def delete_document(self, doc_id):
pass
def iter_document_ids(self, last_id=None):
# todo: can convert to @abstractmethod once subclasses handle it
raise NotImplementedError('this function not yet implemented')
def iter_documents(self, ids):
# todo: can convert to @abstractmethod once subclasses handle it
raise NotImplementedError('this function not yet implemented')
class ReadOnlyDocumentStore(DocumentStore):
def save_document(self, doc_id, document):
raise NotImplementedError('This document store is read only!')
def delete_document(self, doc_id):
raise NotImplementedError('This document store is read only!')
|
from __future__ import unicode_literals
from abc import ABCMeta, abstractmethod
class DocumentStore(object):
"""
Very basic implementation of a document store.
"""
__metaclass__ = ABCMeta
@abstractmethod
def get_document(self, doc_id):
pass
@abstractmethod
def save_document(self, doc_id, document):
pass
@abstractmethod
def delete_document(self, doc_id):
pass
def iter_document_ids(self):
# todo: can convert to @abstractmethod once subclasses handle it
raise NotImplementedError('this function not yet implemented')
def iter_documents(self, ids):
# todo: can convert to @abstractmethod once subclasses handle it
raise NotImplementedError('this function not yet implemented')
class ReadOnlyDocumentStore(DocumentStore):
def save_document(self, doc_id, document):
raise NotImplementedError('This document store is read only!')
def delete_document(self, doc_id):
raise NotImplementedError('This document store is read only!')
Modify abstract parameter to match its children
All instances of this already accept the parameter, so we should codify
it as part of the spec.from __future__ import unicode_literals
from abc import ABCMeta, abstractmethod
class DocumentStore(object):
"""
Very basic implementation of a document store.
"""
__metaclass__ = ABCMeta
@abstractmethod
def get_document(self, doc_id):
pass
@abstractmethod
def save_document(self, doc_id, document):
pass
@abstractmethod
def delete_document(self, doc_id):
pass
def iter_document_ids(self, last_id=None):
# todo: can convert to @abstractmethod once subclasses handle it
raise NotImplementedError('this function not yet implemented')
def iter_documents(self, ids):
# todo: can convert to @abstractmethod once subclasses handle it
raise NotImplementedError('this function not yet implemented')
class ReadOnlyDocumentStore(DocumentStore):
def save_document(self, doc_id, document):
raise NotImplementedError('This document store is read only!')
def delete_document(self, doc_id):
raise NotImplementedError('This document store is read only!')
|
<commit_before>from __future__ import unicode_literals
from abc import ABCMeta, abstractmethod
class DocumentStore(object):
"""
Very basic implementation of a document store.
"""
__metaclass__ = ABCMeta
@abstractmethod
def get_document(self, doc_id):
pass
@abstractmethod
def save_document(self, doc_id, document):
pass
@abstractmethod
def delete_document(self, doc_id):
pass
def iter_document_ids(self):
# todo: can convert to @abstractmethod once subclasses handle it
raise NotImplementedError('this function not yet implemented')
def iter_documents(self, ids):
# todo: can convert to @abstractmethod once subclasses handle it
raise NotImplementedError('this function not yet implemented')
class ReadOnlyDocumentStore(DocumentStore):
def save_document(self, doc_id, document):
raise NotImplementedError('This document store is read only!')
def delete_document(self, doc_id):
raise NotImplementedError('This document store is read only!')
<commit_msg>Modify abstract parameter to match its children
All instances of this already accept the parameter, so we should codify
it as part of the spec.<commit_after>from __future__ import unicode_literals
from abc import ABCMeta, abstractmethod
class DocumentStore(object):
"""
Very basic implementation of a document store.
"""
__metaclass__ = ABCMeta
@abstractmethod
def get_document(self, doc_id):
pass
@abstractmethod
def save_document(self, doc_id, document):
pass
@abstractmethod
def delete_document(self, doc_id):
pass
def iter_document_ids(self, last_id=None):
# todo: can convert to @abstractmethod once subclasses handle it
raise NotImplementedError('this function not yet implemented')
def iter_documents(self, ids):
# todo: can convert to @abstractmethod once subclasses handle it
raise NotImplementedError('this function not yet implemented')
class ReadOnlyDocumentStore(DocumentStore):
def save_document(self, doc_id, document):
raise NotImplementedError('This document store is read only!')
def delete_document(self, doc_id):
raise NotImplementedError('This document store is read only!')
|
8cfdda81d12845ad0e76f7a087995080a5420bfb
|
test/TestLineNumber.py
|
test/TestLineNumber.py
|
# Copyright (c) 2020 Albin Vass <albin.vass@gmail.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ansiblelint.rules.SudoRule import SudoRule
TEST_TASKLIST = """
- debug:
msg: test
- command: echo test
sudo: true
"""
def test_rule_linenumber(monkeypatch):
rule = SudoRule()
matches = rule.matchyaml(dict(path="", type='tasklist'), TEST_TASKLIST)
assert matches[0].linenumber == 5
|
# Copyright (c) 2020 Albin Vass <albin.vass@gmail.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ansiblelint.rules.SudoRule import SudoRule
TEST_TASKLIST = """
- debug:
msg: test
- command: echo test
sudo: true
"""
def test_rule_linenumber(monkeypatch):
"""Check that SudoRule offense contains a line number."""
rule = SudoRule()
matches = rule.matchyaml(dict(path="", type='tasklist'), TEST_TASKLIST)
assert matches[0].linenumber == 5
|
Add a docstring for `test_rule_linenumber`
|
Add a docstring for `test_rule_linenumber`
|
Python
|
mit
|
willthames/ansible-lint
|
# Copyright (c) 2020 Albin Vass <albin.vass@gmail.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ansiblelint.rules.SudoRule import SudoRule
TEST_TASKLIST = """
- debug:
msg: test
- command: echo test
sudo: true
"""
def test_rule_linenumber(monkeypatch):
rule = SudoRule()
matches = rule.matchyaml(dict(path="", type='tasklist'), TEST_TASKLIST)
assert matches[0].linenumber == 5
Add a docstring for `test_rule_linenumber`
|
# Copyright (c) 2020 Albin Vass <albin.vass@gmail.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ansiblelint.rules.SudoRule import SudoRule
TEST_TASKLIST = """
- debug:
msg: test
- command: echo test
sudo: true
"""
def test_rule_linenumber(monkeypatch):
"""Check that SudoRule offense contains a line number."""
rule = SudoRule()
matches = rule.matchyaml(dict(path="", type='tasklist'), TEST_TASKLIST)
assert matches[0].linenumber == 5
|
<commit_before># Copyright (c) 2020 Albin Vass <albin.vass@gmail.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ansiblelint.rules.SudoRule import SudoRule
TEST_TASKLIST = """
- debug:
msg: test
- command: echo test
sudo: true
"""
def test_rule_linenumber(monkeypatch):
rule = SudoRule()
matches = rule.matchyaml(dict(path="", type='tasklist'), TEST_TASKLIST)
assert matches[0].linenumber == 5
<commit_msg>Add a docstring for `test_rule_linenumber`<commit_after>
|
# Copyright (c) 2020 Albin Vass <albin.vass@gmail.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ansiblelint.rules.SudoRule import SudoRule
TEST_TASKLIST = """
- debug:
msg: test
- command: echo test
sudo: true
"""
def test_rule_linenumber(monkeypatch):
"""Check that SudoRule offense contains a line number."""
rule = SudoRule()
matches = rule.matchyaml(dict(path="", type='tasklist'), TEST_TASKLIST)
assert matches[0].linenumber == 5
|
# Copyright (c) 2020 Albin Vass <albin.vass@gmail.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ansiblelint.rules.SudoRule import SudoRule
TEST_TASKLIST = """
- debug:
msg: test
- command: echo test
sudo: true
"""
def test_rule_linenumber(monkeypatch):
rule = SudoRule()
matches = rule.matchyaml(dict(path="", type='tasklist'), TEST_TASKLIST)
assert matches[0].linenumber == 5
Add a docstring for `test_rule_linenumber`# Copyright (c) 2020 Albin Vass <albin.vass@gmail.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ansiblelint.rules.SudoRule import SudoRule
TEST_TASKLIST = """
- debug:
msg: test
- command: echo test
sudo: true
"""
def test_rule_linenumber(monkeypatch):
"""Check that SudoRule offense contains a line number."""
rule = SudoRule()
matches = rule.matchyaml(dict(path="", type='tasklist'), TEST_TASKLIST)
assert matches[0].linenumber == 5
|
<commit_before># Copyright (c) 2020 Albin Vass <albin.vass@gmail.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ansiblelint.rules.SudoRule import SudoRule
TEST_TASKLIST = """
- debug:
msg: test
- command: echo test
sudo: true
"""
def test_rule_linenumber(monkeypatch):
rule = SudoRule()
matches = rule.matchyaml(dict(path="", type='tasklist'), TEST_TASKLIST)
assert matches[0].linenumber == 5
<commit_msg>Add a docstring for `test_rule_linenumber`<commit_after># Copyright (c) 2020 Albin Vass <albin.vass@gmail.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ansiblelint.rules.SudoRule import SudoRule
TEST_TASKLIST = """
- debug:
msg: test
- command: echo test
sudo: true
"""
def test_rule_linenumber(monkeypatch):
"""Check that SudoRule offense contains a line number."""
rule = SudoRule()
matches = rule.matchyaml(dict(path="", type='tasklist'), TEST_TASKLIST)
assert matches[0].linenumber == 5
|
3e0e6971923bd6d753eca8bbb66c1f3c9efc2afb
|
piper/process.py
|
piper/process.py
|
import subprocess as sub
import logbook
from piper.logging import SEPARATOR
class Process(object):
"""
Helper class for running processes
"""
def __init__(self, ns, cmd, parent_key):
self.ns = ns
self.cmd = cmd
self.popen = None
self.success = None
self.log = logbook.Logger(parent_key + SEPARATOR + self.cmd)
def setup(self):
"""
Setup the Popen object used in execution
"""
self.log.debug('Spawning process handler')
self.popen = sub.Popen(
self.cmd.split(),
stdout=sub.PIPE,
stderr=sub.PIPE,
)
def run(self):
self.log.debug('Executing')
if self.ns.dry_run is True:
self.log.info('Not executing dry run.')
self.success = True
return
while not self.popen.poll():
# TODO: Gracefully handle stderr as well
line = self.popen.stdout.readline()
if not line:
break
self.log.info(line.decode('utf-8').rstrip())
exit = self.popen.wait()
self.log.debug('Exitcode {0}'.format(exit))
self.success = exit == 0
if not self.success:
self.log.error(self.popen.stderr.read())
|
import subprocess as sub
import logbook
from piper.logging import SEPARATOR
class Process(object):
"""
Helper class for running processes
"""
def __init__(self, ns, cmd, parent_key):
self.ns = ns
self.cmd = cmd
self.popen = None
self.success = None
self.log = logbook.Logger(parent_key + SEPARATOR + self.cmd)
def setup(self):
"""
Setup the Popen object used in execution
"""
self.log.debug('Spawning process handler')
self.popen = sub.Popen(
self.cmd.split(),
stdout=sub.PIPE,
stderr=sub.PIPE,
)
def run(self):
self.log.debug('Executing')
if self.ns.dry_run is True:
self.log.info('Not executing dry run.')
self.success = True
return
while not self.popen.poll():
# TODO: Gracefully handle stderr as well
line = self.popen.stdout.readline()
if not line:
break
self.log.info(line.decode('utf-8').rstrip())
exit = self.popen.wait()
self.log.debug('Exitcode {0}'.format(exit))
self.success = exit == 0
if not self.success:
self.log.error(self.popen.stderr.read().decode('utf-8'))
|
Fix decode bug in Process() failures
|
Fix decode bug in Process() failures
|
Python
|
mit
|
thiderman/piper
|
import subprocess as sub
import logbook
from piper.logging import SEPARATOR
class Process(object):
"""
Helper class for running processes
"""
def __init__(self, ns, cmd, parent_key):
self.ns = ns
self.cmd = cmd
self.popen = None
self.success = None
self.log = logbook.Logger(parent_key + SEPARATOR + self.cmd)
def setup(self):
"""
Setup the Popen object used in execution
"""
self.log.debug('Spawning process handler')
self.popen = sub.Popen(
self.cmd.split(),
stdout=sub.PIPE,
stderr=sub.PIPE,
)
def run(self):
self.log.debug('Executing')
if self.ns.dry_run is True:
self.log.info('Not executing dry run.')
self.success = True
return
while not self.popen.poll():
# TODO: Gracefully handle stderr as well
line = self.popen.stdout.readline()
if not line:
break
self.log.info(line.decode('utf-8').rstrip())
exit = self.popen.wait()
self.log.debug('Exitcode {0}'.format(exit))
self.success = exit == 0
if not self.success:
self.log.error(self.popen.stderr.read())
Fix decode bug in Process() failures
|
import subprocess as sub
import logbook
from piper.logging import SEPARATOR
class Process(object):
"""
Helper class for running processes
"""
def __init__(self, ns, cmd, parent_key):
self.ns = ns
self.cmd = cmd
self.popen = None
self.success = None
self.log = logbook.Logger(parent_key + SEPARATOR + self.cmd)
def setup(self):
"""
Setup the Popen object used in execution
"""
self.log.debug('Spawning process handler')
self.popen = sub.Popen(
self.cmd.split(),
stdout=sub.PIPE,
stderr=sub.PIPE,
)
def run(self):
self.log.debug('Executing')
if self.ns.dry_run is True:
self.log.info('Not executing dry run.')
self.success = True
return
while not self.popen.poll():
# TODO: Gracefully handle stderr as well
line = self.popen.stdout.readline()
if not line:
break
self.log.info(line.decode('utf-8').rstrip())
exit = self.popen.wait()
self.log.debug('Exitcode {0}'.format(exit))
self.success = exit == 0
if not self.success:
self.log.error(self.popen.stderr.read().decode('utf-8'))
|
<commit_before>import subprocess as sub
import logbook
from piper.logging import SEPARATOR
class Process(object):
"""
Helper class for running processes
"""
def __init__(self, ns, cmd, parent_key):
self.ns = ns
self.cmd = cmd
self.popen = None
self.success = None
self.log = logbook.Logger(parent_key + SEPARATOR + self.cmd)
def setup(self):
"""
Setup the Popen object used in execution
"""
self.log.debug('Spawning process handler')
self.popen = sub.Popen(
self.cmd.split(),
stdout=sub.PIPE,
stderr=sub.PIPE,
)
def run(self):
self.log.debug('Executing')
if self.ns.dry_run is True:
self.log.info('Not executing dry run.')
self.success = True
return
while not self.popen.poll():
# TODO: Gracefully handle stderr as well
line = self.popen.stdout.readline()
if not line:
break
self.log.info(line.decode('utf-8').rstrip())
exit = self.popen.wait()
self.log.debug('Exitcode {0}'.format(exit))
self.success = exit == 0
if not self.success:
self.log.error(self.popen.stderr.read())
<commit_msg>Fix decode bug in Process() failures<commit_after>
|
import subprocess as sub
import logbook
from piper.logging import SEPARATOR
class Process(object):
"""
Helper class for running processes
"""
def __init__(self, ns, cmd, parent_key):
self.ns = ns
self.cmd = cmd
self.popen = None
self.success = None
self.log = logbook.Logger(parent_key + SEPARATOR + self.cmd)
def setup(self):
"""
Setup the Popen object used in execution
"""
self.log.debug('Spawning process handler')
self.popen = sub.Popen(
self.cmd.split(),
stdout=sub.PIPE,
stderr=sub.PIPE,
)
def run(self):
self.log.debug('Executing')
if self.ns.dry_run is True:
self.log.info('Not executing dry run.')
self.success = True
return
while not self.popen.poll():
# TODO: Gracefully handle stderr as well
line = self.popen.stdout.readline()
if not line:
break
self.log.info(line.decode('utf-8').rstrip())
exit = self.popen.wait()
self.log.debug('Exitcode {0}'.format(exit))
self.success = exit == 0
if not self.success:
self.log.error(self.popen.stderr.read().decode('utf-8'))
|
import subprocess as sub
import logbook
from piper.logging import SEPARATOR
class Process(object):
"""
Helper class for running processes
"""
def __init__(self, ns, cmd, parent_key):
self.ns = ns
self.cmd = cmd
self.popen = None
self.success = None
self.log = logbook.Logger(parent_key + SEPARATOR + self.cmd)
def setup(self):
"""
Setup the Popen object used in execution
"""
self.log.debug('Spawning process handler')
self.popen = sub.Popen(
self.cmd.split(),
stdout=sub.PIPE,
stderr=sub.PIPE,
)
def run(self):
self.log.debug('Executing')
if self.ns.dry_run is True:
self.log.info('Not executing dry run.')
self.success = True
return
while not self.popen.poll():
# TODO: Gracefully handle stderr as well
line = self.popen.stdout.readline()
if not line:
break
self.log.info(line.decode('utf-8').rstrip())
exit = self.popen.wait()
self.log.debug('Exitcode {0}'.format(exit))
self.success = exit == 0
if not self.success:
self.log.error(self.popen.stderr.read())
Fix decode bug in Process() failuresimport subprocess as sub
import logbook
from piper.logging import SEPARATOR
class Process(object):
"""
Helper class for running processes
"""
def __init__(self, ns, cmd, parent_key):
self.ns = ns
self.cmd = cmd
self.popen = None
self.success = None
self.log = logbook.Logger(parent_key + SEPARATOR + self.cmd)
def setup(self):
"""
Setup the Popen object used in execution
"""
self.log.debug('Spawning process handler')
self.popen = sub.Popen(
self.cmd.split(),
stdout=sub.PIPE,
stderr=sub.PIPE,
)
def run(self):
self.log.debug('Executing')
if self.ns.dry_run is True:
self.log.info('Not executing dry run.')
self.success = True
return
while not self.popen.poll():
# TODO: Gracefully handle stderr as well
line = self.popen.stdout.readline()
if not line:
break
self.log.info(line.decode('utf-8').rstrip())
exit = self.popen.wait()
self.log.debug('Exitcode {0}'.format(exit))
self.success = exit == 0
if not self.success:
self.log.error(self.popen.stderr.read().decode('utf-8'))
|
<commit_before>import subprocess as sub
import logbook
from piper.logging import SEPARATOR
class Process(object):
"""
Helper class for running processes
"""
def __init__(self, ns, cmd, parent_key):
self.ns = ns
self.cmd = cmd
self.popen = None
self.success = None
self.log = logbook.Logger(parent_key + SEPARATOR + self.cmd)
def setup(self):
"""
Setup the Popen object used in execution
"""
self.log.debug('Spawning process handler')
self.popen = sub.Popen(
self.cmd.split(),
stdout=sub.PIPE,
stderr=sub.PIPE,
)
def run(self):
self.log.debug('Executing')
if self.ns.dry_run is True:
self.log.info('Not executing dry run.')
self.success = True
return
while not self.popen.poll():
# TODO: Gracefully handle stderr as well
line = self.popen.stdout.readline()
if not line:
break
self.log.info(line.decode('utf-8').rstrip())
exit = self.popen.wait()
self.log.debug('Exitcode {0}'.format(exit))
self.success = exit == 0
if not self.success:
self.log.error(self.popen.stderr.read())
<commit_msg>Fix decode bug in Process() failures<commit_after>import subprocess as sub
import logbook
from piper.logging import SEPARATOR
class Process(object):
"""
Helper class for running processes
"""
def __init__(self, ns, cmd, parent_key):
self.ns = ns
self.cmd = cmd
self.popen = None
self.success = None
self.log = logbook.Logger(parent_key + SEPARATOR + self.cmd)
def setup(self):
"""
Setup the Popen object used in execution
"""
self.log.debug('Spawning process handler')
self.popen = sub.Popen(
self.cmd.split(),
stdout=sub.PIPE,
stderr=sub.PIPE,
)
def run(self):
self.log.debug('Executing')
if self.ns.dry_run is True:
self.log.info('Not executing dry run.')
self.success = True
return
while not self.popen.poll():
# TODO: Gracefully handle stderr as well
line = self.popen.stdout.readline()
if not line:
break
self.log.info(line.decode('utf-8').rstrip())
exit = self.popen.wait()
self.log.debug('Exitcode {0}'.format(exit))
self.success = exit == 0
if not self.success:
self.log.error(self.popen.stderr.read().decode('utf-8'))
|
695304372ebe4ad76c5d6ce7dea7f39c28ffba07
|
libexec/wlint/punctuation-style.py
|
libexec/wlint/punctuation-style.py
|
#!/usr/bin/python3
import re
import wlint.common
import wlint.punctuation
class PunctuationStyle(wlint.common.Tool):
def __init__(self, description):
super().__init__(description)
self.checks = wlint.punctuation.PunctuationRules().rules
def setup(self, arguments):
self.result = 0
def process(self, fileHandle):
lineNumber = 0
for text in fileHandle:
lineNumber += 1
for message, fn in self.checks.items():
if fn(text, lambda pos: print(
"{}-{}:{} {}".format(fileHandle.name, lineNumber,
pos, message))):
self.result = 1
punctuationStyle = PunctuationStyle("Check for common punctuation issues")
punctuationStyle.execute()
exit(punctuationStyle.result)
|
#!/usr/bin/python3
import operator
import wlint.common
import wlint.punctuation
class PunctuationStyle(wlint.common.Tool):
def __init__(self, description):
super().__init__(description)
self.checks = wlint.punctuation.PunctuationRules().rules
def setup(self, arguments):
self.result = 0
def process(self, fileHandle):
lineNumber = 0
hits = []
for text in fileHandle:
lineNumber += 1
for message, fn in self.checks.items():
if fn(text, lambda pos: hits.append(lineNumber, pos, message)):
self.result = 1
hits.sort()
for (line, col, message) in hits:
print("{}-{}:{} {}".format(fileHandle.name, line, pos, message))
punctuationStyle = PunctuationStyle("Check for common punctuation issues")
punctuationStyle.execute()
exit(punctuationStyle.result)
|
Sort punctuation hits so output is based on line and column, not the order rules are checked
|
Sort punctuation hits so output is based on line and column, not the order rules are checked
|
Python
|
bsd-2-clause
|
snewell/wlint,snewell/wlint,snewell/writing-tools,snewell/writing-tools
|
#!/usr/bin/python3
import re
import wlint.common
import wlint.punctuation
class PunctuationStyle(wlint.common.Tool):
def __init__(self, description):
super().__init__(description)
self.checks = wlint.punctuation.PunctuationRules().rules
def setup(self, arguments):
self.result = 0
def process(self, fileHandle):
lineNumber = 0
for text in fileHandle:
lineNumber += 1
for message, fn in self.checks.items():
if fn(text, lambda pos: print(
"{}-{}:{} {}".format(fileHandle.name, lineNumber,
pos, message))):
self.result = 1
punctuationStyle = PunctuationStyle("Check for common punctuation issues")
punctuationStyle.execute()
exit(punctuationStyle.result)
Sort punctuation hits so output is based on line and column, not the order rules are checked
|
#!/usr/bin/python3
import operator
import wlint.common
import wlint.punctuation
class PunctuationStyle(wlint.common.Tool):
def __init__(self, description):
super().__init__(description)
self.checks = wlint.punctuation.PunctuationRules().rules
def setup(self, arguments):
self.result = 0
def process(self, fileHandle):
lineNumber = 0
hits = []
for text in fileHandle:
lineNumber += 1
for message, fn in self.checks.items():
if fn(text, lambda pos: hits.append(lineNumber, pos, message)):
self.result = 1
hits.sort()
for (line, col, message) in hits:
print("{}-{}:{} {}".format(fileHandle.name, line, pos, message))
punctuationStyle = PunctuationStyle("Check for common punctuation issues")
punctuationStyle.execute()
exit(punctuationStyle.result)
|
<commit_before>#!/usr/bin/python3
import re
import wlint.common
import wlint.punctuation
class PunctuationStyle(wlint.common.Tool):
def __init__(self, description):
super().__init__(description)
self.checks = wlint.punctuation.PunctuationRules().rules
def setup(self, arguments):
self.result = 0
def process(self, fileHandle):
lineNumber = 0
for text in fileHandle:
lineNumber += 1
for message, fn in self.checks.items():
if fn(text, lambda pos: print(
"{}-{}:{} {}".format(fileHandle.name, lineNumber,
pos, message))):
self.result = 1
punctuationStyle = PunctuationStyle("Check for common punctuation issues")
punctuationStyle.execute()
exit(punctuationStyle.result)
<commit_msg>Sort punctuation hits so output is based on line and column, not the order rules are checked<commit_after>
|
#!/usr/bin/python3
import operator
import wlint.common
import wlint.punctuation
class PunctuationStyle(wlint.common.Tool):
def __init__(self, description):
super().__init__(description)
self.checks = wlint.punctuation.PunctuationRules().rules
def setup(self, arguments):
self.result = 0
def process(self, fileHandle):
lineNumber = 0
hits = []
for text in fileHandle:
lineNumber += 1
for message, fn in self.checks.items():
if fn(text, lambda pos: hits.append(lineNumber, pos, message)):
self.result = 1
hits.sort()
for (line, col, message) in hits:
print("{}-{}:{} {}".format(fileHandle.name, line, pos, message))
punctuationStyle = PunctuationStyle("Check for common punctuation issues")
punctuationStyle.execute()
exit(punctuationStyle.result)
|
#!/usr/bin/python3
import re
import wlint.common
import wlint.punctuation
class PunctuationStyle(wlint.common.Tool):
def __init__(self, description):
super().__init__(description)
self.checks = wlint.punctuation.PunctuationRules().rules
def setup(self, arguments):
self.result = 0
def process(self, fileHandle):
lineNumber = 0
for text in fileHandle:
lineNumber += 1
for message, fn in self.checks.items():
if fn(text, lambda pos: print(
"{}-{}:{} {}".format(fileHandle.name, lineNumber,
pos, message))):
self.result = 1
punctuationStyle = PunctuationStyle("Check for common punctuation issues")
punctuationStyle.execute()
exit(punctuationStyle.result)
Sort punctuation hits so output is based on line and column, not the order rules are checked#!/usr/bin/python3
import operator
import wlint.common
import wlint.punctuation
class PunctuationStyle(wlint.common.Tool):
def __init__(self, description):
super().__init__(description)
self.checks = wlint.punctuation.PunctuationRules().rules
def setup(self, arguments):
self.result = 0
def process(self, fileHandle):
lineNumber = 0
hits = []
for text in fileHandle:
lineNumber += 1
for message, fn in self.checks.items():
if fn(text, lambda pos: hits.append(lineNumber, pos, message)):
self.result = 1
hits.sort()
for (line, col, message) in hits:
print("{}-{}:{} {}".format(fileHandle.name, line, pos, message))
punctuationStyle = PunctuationStyle("Check for common punctuation issues")
punctuationStyle.execute()
exit(punctuationStyle.result)
|
<commit_before>#!/usr/bin/python3
import re
import wlint.common
import wlint.punctuation
class PunctuationStyle(wlint.common.Tool):
def __init__(self, description):
super().__init__(description)
self.checks = wlint.punctuation.PunctuationRules().rules
def setup(self, arguments):
self.result = 0
def process(self, fileHandle):
lineNumber = 0
for text in fileHandle:
lineNumber += 1
for message, fn in self.checks.items():
if fn(text, lambda pos: print(
"{}-{}:{} {}".format(fileHandle.name, lineNumber,
pos, message))):
self.result = 1
punctuationStyle = PunctuationStyle("Check for common punctuation issues")
punctuationStyle.execute()
exit(punctuationStyle.result)
<commit_msg>Sort punctuation hits so output is based on line and column, not the order rules are checked<commit_after>#!/usr/bin/python3
import operator
import wlint.common
import wlint.punctuation
class PunctuationStyle(wlint.common.Tool):
def __init__(self, description):
super().__init__(description)
self.checks = wlint.punctuation.PunctuationRules().rules
def setup(self, arguments):
self.result = 0
def process(self, fileHandle):
lineNumber = 0
hits = []
for text in fileHandle:
lineNumber += 1
for message, fn in self.checks.items():
if fn(text, lambda pos: hits.append(lineNumber, pos, message)):
self.result = 1
hits.sort()
for (line, col, message) in hits:
print("{}-{}:{} {}".format(fileHandle.name, line, pos, message))
punctuationStyle = PunctuationStyle("Check for common punctuation issues")
punctuationStyle.execute()
exit(punctuationStyle.result)
|
8d85bfd34c291f01235eb630f458972cc11c58ad
|
embed_tweet.py
|
embed_tweet.py
|
"""
Embedded tweet plugin for Pelican
=================================
This plugin allows you to embed Twitter tweets into your articles.
And also provides a link for Twitter username.
i.e.
@username
will be replaced by a link to Twitter username page.
@username/status/tweetid
will be replaced by a `Embedded-tweet`_ API.
.. _Embedded-tweet: https://dev.twitter.com/docs/embedded-tweets
"""
from pelican import signals
import re
def embed_tweet(content):
content._content = re.sub(
r'(^|[^@\w])@(\w{1,15})\b',
'\\1<a href="https://twitter.com/\\2">@\\2</a>',
re.sub(
r'(^|[^@\w])@(\w{1,15})/status/(\d+)\b',
'\\1<blockquote class="twitter-tweet" align="center"><a href="https://twitter.com/\\2/status/\\3">Tweet of \\2/\\3</a></blockquote>',
content._content
)
) + '<script src="//platform.twitter.com/widgets.js" charset="utf-8"></script>'
def register():
signals.content_object_init.connect(embed_tweet)
|
"""
Embedded tweet plugin for Pelican
=================================
This plugin allows you to embed Twitter tweets into your articles.
And also provides a link for Twitter username.
i.e.
@username
will be replaced by a link to Twitter username page.
@username/status/tweetid
will be replaced by a `Embedded-tweet`_ API.
.. _Embedded-tweet: https://dev.twitter.com/docs/embedded-tweets
"""
from pelican import signals
import re
def embed_tweet(content):
if not content._content:
return
content._content = re.sub(
r'(^|[^@\w])@(\w{1,15})\b',
'\\1<a href="https://twitter.com/\\2">@\\2</a>',
re.sub(
r'(^|[^@\w])@(\w{1,15})/status/(\d+)\b',
'\\1<blockquote class="twitter-tweet" align="center"><a href="https://twitter.com/\\2/status/\\3">Tweet of \\2/\\3</a></blockquote>',
content._content
)
) + '<script src="//platform.twitter.com/widgets.js" charset="utf-8"></script>'
def register():
signals.content_object_init.connect(embed_tweet)
|
Check content._content before using it to prevent errors
|
Check content._content before using it to prevent errors
|
Python
|
mit
|
lqez/pelican-embed-tweet
|
"""
Embedded tweet plugin for Pelican
=================================
This plugin allows you to embed Twitter tweets into your articles.
And also provides a link for Twitter username.
i.e.
@username
will be replaced by a link to Twitter username page.
@username/status/tweetid
will be replaced by a `Embedded-tweet`_ API.
.. _Embedded-tweet: https://dev.twitter.com/docs/embedded-tweets
"""
from pelican import signals
import re
def embed_tweet(content):
content._content = re.sub(
r'(^|[^@\w])@(\w{1,15})\b',
'\\1<a href="https://twitter.com/\\2">@\\2</a>',
re.sub(
r'(^|[^@\w])@(\w{1,15})/status/(\d+)\b',
'\\1<blockquote class="twitter-tweet" align="center"><a href="https://twitter.com/\\2/status/\\3">Tweet of \\2/\\3</a></blockquote>',
content._content
)
) + '<script src="//platform.twitter.com/widgets.js" charset="utf-8"></script>'
def register():
signals.content_object_init.connect(embed_tweet)
Check content._content before using it to prevent errors
|
"""
Embedded tweet plugin for Pelican
=================================
This plugin allows you to embed Twitter tweets into your articles.
And also provides a link for Twitter username.
i.e.
@username
will be replaced by a link to Twitter username page.
@username/status/tweetid
will be replaced by a `Embedded-tweet`_ API.
.. _Embedded-tweet: https://dev.twitter.com/docs/embedded-tweets
"""
from pelican import signals
import re
def embed_tweet(content):
if not content._content:
return
content._content = re.sub(
r'(^|[^@\w])@(\w{1,15})\b',
'\\1<a href="https://twitter.com/\\2">@\\2</a>',
re.sub(
r'(^|[^@\w])@(\w{1,15})/status/(\d+)\b',
'\\1<blockquote class="twitter-tweet" align="center"><a href="https://twitter.com/\\2/status/\\3">Tweet of \\2/\\3</a></blockquote>',
content._content
)
) + '<script src="//platform.twitter.com/widgets.js" charset="utf-8"></script>'
def register():
signals.content_object_init.connect(embed_tweet)
|
<commit_before>"""
Embedded tweet plugin for Pelican
=================================
This plugin allows you to embed Twitter tweets into your articles.
And also provides a link for Twitter username.
i.e.
@username
will be replaced by a link to Twitter username page.
@username/status/tweetid
will be replaced by a `Embedded-tweet`_ API.
.. _Embedded-tweet: https://dev.twitter.com/docs/embedded-tweets
"""
from pelican import signals
import re
def embed_tweet(content):
content._content = re.sub(
r'(^|[^@\w])@(\w{1,15})\b',
'\\1<a href="https://twitter.com/\\2">@\\2</a>',
re.sub(
r'(^|[^@\w])@(\w{1,15})/status/(\d+)\b',
'\\1<blockquote class="twitter-tweet" align="center"><a href="https://twitter.com/\\2/status/\\3">Tweet of \\2/\\3</a></blockquote>',
content._content
)
) + '<script src="//platform.twitter.com/widgets.js" charset="utf-8"></script>'
def register():
signals.content_object_init.connect(embed_tweet)
<commit_msg>Check content._content before using it to prevent errors<commit_after>
|
"""
Embedded tweet plugin for Pelican
=================================
This plugin allows you to embed Twitter tweets into your articles.
And also provides a link for Twitter username.
i.e.
@username
will be replaced by a link to Twitter username page.
@username/status/tweetid
will be replaced by a `Embedded-tweet`_ API.
.. _Embedded-tweet: https://dev.twitter.com/docs/embedded-tweets
"""
from pelican import signals
import re
def embed_tweet(content):
if not content._content:
return
content._content = re.sub(
r'(^|[^@\w])@(\w{1,15})\b',
'\\1<a href="https://twitter.com/\\2">@\\2</a>',
re.sub(
r'(^|[^@\w])@(\w{1,15})/status/(\d+)\b',
'\\1<blockquote class="twitter-tweet" align="center"><a href="https://twitter.com/\\2/status/\\3">Tweet of \\2/\\3</a></blockquote>',
content._content
)
) + '<script src="//platform.twitter.com/widgets.js" charset="utf-8"></script>'
def register():
signals.content_object_init.connect(embed_tweet)
|
"""
Embedded tweet plugin for Pelican
=================================
This plugin allows you to embed Twitter tweets into your articles.
And also provides a link for Twitter username.
i.e.
@username
will be replaced by a link to Twitter username page.
@username/status/tweetid
will be replaced by a `Embedded-tweet`_ API.
.. _Embedded-tweet: https://dev.twitter.com/docs/embedded-tweets
"""
from pelican import signals
import re
def embed_tweet(content):
content._content = re.sub(
r'(^|[^@\w])@(\w{1,15})\b',
'\\1<a href="https://twitter.com/\\2">@\\2</a>',
re.sub(
r'(^|[^@\w])@(\w{1,15})/status/(\d+)\b',
'\\1<blockquote class="twitter-tweet" align="center"><a href="https://twitter.com/\\2/status/\\3">Tweet of \\2/\\3</a></blockquote>',
content._content
)
) + '<script src="//platform.twitter.com/widgets.js" charset="utf-8"></script>'
def register():
signals.content_object_init.connect(embed_tweet)
Check content._content before using it to prevent errors"""
Embedded tweet plugin for Pelican
=================================
This plugin allows you to embed Twitter tweets into your articles.
And also provides a link for Twitter username.
i.e.
@username
will be replaced by a link to Twitter username page.
@username/status/tweetid
will be replaced by a `Embedded-tweet`_ API.
.. _Embedded-tweet: https://dev.twitter.com/docs/embedded-tweets
"""
from pelican import signals
import re
def embed_tweet(content):
if not content._content:
return
content._content = re.sub(
r'(^|[^@\w])@(\w{1,15})\b',
'\\1<a href="https://twitter.com/\\2">@\\2</a>',
re.sub(
r'(^|[^@\w])@(\w{1,15})/status/(\d+)\b',
'\\1<blockquote class="twitter-tweet" align="center"><a href="https://twitter.com/\\2/status/\\3">Tweet of \\2/\\3</a></blockquote>',
content._content
)
) + '<script src="//platform.twitter.com/widgets.js" charset="utf-8"></script>'
def register():
signals.content_object_init.connect(embed_tweet)
|
<commit_before>"""
Embedded tweet plugin for Pelican
=================================
This plugin allows you to embed Twitter tweets into your articles.
And also provides a link for Twitter username.
i.e.
@username
will be replaced by a link to Twitter username page.
@username/status/tweetid
will be replaced by a `Embedded-tweet`_ API.
.. _Embedded-tweet: https://dev.twitter.com/docs/embedded-tweets
"""
from pelican import signals
import re
def embed_tweet(content):
content._content = re.sub(
r'(^|[^@\w])@(\w{1,15})\b',
'\\1<a href="https://twitter.com/\\2">@\\2</a>',
re.sub(
r'(^|[^@\w])@(\w{1,15})/status/(\d+)\b',
'\\1<blockquote class="twitter-tweet" align="center"><a href="https://twitter.com/\\2/status/\\3">Tweet of \\2/\\3</a></blockquote>',
content._content
)
) + '<script src="//platform.twitter.com/widgets.js" charset="utf-8"></script>'
def register():
signals.content_object_init.connect(embed_tweet)
<commit_msg>Check content._content before using it to prevent errors<commit_after>"""
Embedded tweet plugin for Pelican
=================================
This plugin allows you to embed Twitter tweets into your articles.
And also provides a link for Twitter username.
i.e.
@username
will be replaced by a link to Twitter username page.
@username/status/tweetid
will be replaced by a `Embedded-tweet`_ API.
.. _Embedded-tweet: https://dev.twitter.com/docs/embedded-tweets
"""
from pelican import signals
import re
def embed_tweet(content):
if not content._content:
return
content._content = re.sub(
r'(^|[^@\w])@(\w{1,15})\b',
'\\1<a href="https://twitter.com/\\2">@\\2</a>',
re.sub(
r'(^|[^@\w])@(\w{1,15})/status/(\d+)\b',
'\\1<blockquote class="twitter-tweet" align="center"><a href="https://twitter.com/\\2/status/\\3">Tweet of \\2/\\3</a></blockquote>',
content._content
)
) + '<script src="//platform.twitter.com/widgets.js" charset="utf-8"></script>'
def register():
signals.content_object_init.connect(embed_tweet)
|
531c9e943748c576c963b809a7f1052d611346b9
|
hearthstone/stringsfile.py
|
hearthstone/stringsfile.py
|
"""
Hearthstone Strings file
File format: TSV. Lines starting with `#` are ignored.
Key is always `TAG`
"""
import csv
from typing import Dict
import hearthstone_data
StringsRow = Dict[str, str]
StringsDict = Dict[str, StringsRow]
_cache: Dict[str, StringsDict] = {}
def load(fp) -> StringsDict:
reader = csv.DictReader(
filter(lambda row: row.strip() and not row.startswith("#"), fp),
delimiter="\t"
)
stripped_rows = [{k: v for k, v in row.items() if v} for row in reader]
return {stripped_row.pop("TAG"): stripped_row for stripped_row in stripped_rows}
def load_globalstrings(locale="enUS") -> StringsDict:
path: str = hearthstone_data.get_strings_file(locale, filename="GLOBAL.txt")
if path not in _cache:
with open(path, "r") as f:
_cache[path] = load(f)
return _cache[path]
if __name__ == "__main__":
import json
import sys
for path in sys.argv[1:]:
with open(path, "r") as f:
print(json.dumps(load(f)))
|
"""
Hearthstone Strings file
File format: TSV. Lines starting with `#` are ignored.
Key is always `TAG`
"""
import csv
from typing import Dict
import hearthstone_data
StringsRow = Dict[str, str]
StringsDict = Dict[str, StringsRow]
_cache: Dict[str, StringsDict] = {}
def load(fp) -> StringsDict:
reader = csv.DictReader(
filter(lambda row: row.strip() and not row.startswith("#"), fp),
delimiter="\t"
)
stripped_rows = [{k: v for k, v in row.items() if v} for row in reader]
return {stripped_row.pop("TAG"): stripped_row for stripped_row in stripped_rows}
def load_globalstrings(locale="enUS") -> StringsDict:
path: str = hearthstone_data.get_strings_file(locale, filename="GLOBAL.txt")
if path not in _cache:
with open(path, "r", encoding="utf-8-sig") as f:
_cache[path] = load(f)
return _cache[path]
if __name__ == "__main__":
import json
import sys
for path in sys.argv[1:]:
with open(path, "r") as f:
print(json.dumps(load(f)))
|
Fix BOM issue with latest strings
|
Fix BOM issue with latest strings
|
Python
|
mit
|
HearthSim/python-hearthstone
|
"""
Hearthstone Strings file
File format: TSV. Lines starting with `#` are ignored.
Key is always `TAG`
"""
import csv
from typing import Dict
import hearthstone_data
StringsRow = Dict[str, str]
StringsDict = Dict[str, StringsRow]
_cache: Dict[str, StringsDict] = {}
def load(fp) -> StringsDict:
reader = csv.DictReader(
filter(lambda row: row.strip() and not row.startswith("#"), fp),
delimiter="\t"
)
stripped_rows = [{k: v for k, v in row.items() if v} for row in reader]
return {stripped_row.pop("TAG"): stripped_row for stripped_row in stripped_rows}
def load_globalstrings(locale="enUS") -> StringsDict:
path: str = hearthstone_data.get_strings_file(locale, filename="GLOBAL.txt")
if path not in _cache:
with open(path, "r") as f:
_cache[path] = load(f)
return _cache[path]
if __name__ == "__main__":
import json
import sys
for path in sys.argv[1:]:
with open(path, "r") as f:
print(json.dumps(load(f)))
Fix BOM issue with latest strings
|
"""
Hearthstone Strings file
File format: TSV. Lines starting with `#` are ignored.
Key is always `TAG`
"""
import csv
from typing import Dict
import hearthstone_data
StringsRow = Dict[str, str]
StringsDict = Dict[str, StringsRow]
_cache: Dict[str, StringsDict] = {}
def load(fp) -> StringsDict:
reader = csv.DictReader(
filter(lambda row: row.strip() and not row.startswith("#"), fp),
delimiter="\t"
)
stripped_rows = [{k: v for k, v in row.items() if v} for row in reader]
return {stripped_row.pop("TAG"): stripped_row for stripped_row in stripped_rows}
def load_globalstrings(locale="enUS") -> StringsDict:
path: str = hearthstone_data.get_strings_file(locale, filename="GLOBAL.txt")
if path not in _cache:
with open(path, "r", encoding="utf-8-sig") as f:
_cache[path] = load(f)
return _cache[path]
if __name__ == "__main__":
import json
import sys
for path in sys.argv[1:]:
with open(path, "r") as f:
print(json.dumps(load(f)))
|
<commit_before>"""
Hearthstone Strings file
File format: TSV. Lines starting with `#` are ignored.
Key is always `TAG`
"""
import csv
from typing import Dict
import hearthstone_data
StringsRow = Dict[str, str]
StringsDict = Dict[str, StringsRow]
_cache: Dict[str, StringsDict] = {}
def load(fp) -> StringsDict:
reader = csv.DictReader(
filter(lambda row: row.strip() and not row.startswith("#"), fp),
delimiter="\t"
)
stripped_rows = [{k: v for k, v in row.items() if v} for row in reader]
return {stripped_row.pop("TAG"): stripped_row for stripped_row in stripped_rows}
def load_globalstrings(locale="enUS") -> StringsDict:
path: str = hearthstone_data.get_strings_file(locale, filename="GLOBAL.txt")
if path not in _cache:
with open(path, "r") as f:
_cache[path] = load(f)
return _cache[path]
if __name__ == "__main__":
import json
import sys
for path in sys.argv[1:]:
with open(path, "r") as f:
print(json.dumps(load(f)))
<commit_msg>Fix BOM issue with latest strings<commit_after>
|
"""
Hearthstone Strings file
File format: TSV. Lines starting with `#` are ignored.
Key is always `TAG`
"""
import csv
from typing import Dict
import hearthstone_data
StringsRow = Dict[str, str]
StringsDict = Dict[str, StringsRow]
_cache: Dict[str, StringsDict] = {}
def load(fp) -> StringsDict:
reader = csv.DictReader(
filter(lambda row: row.strip() and not row.startswith("#"), fp),
delimiter="\t"
)
stripped_rows = [{k: v for k, v in row.items() if v} for row in reader]
return {stripped_row.pop("TAG"): stripped_row for stripped_row in stripped_rows}
def load_globalstrings(locale="enUS") -> StringsDict:
path: str = hearthstone_data.get_strings_file(locale, filename="GLOBAL.txt")
if path not in _cache:
with open(path, "r", encoding="utf-8-sig") as f:
_cache[path] = load(f)
return _cache[path]
if __name__ == "__main__":
import json
import sys
for path in sys.argv[1:]:
with open(path, "r") as f:
print(json.dumps(load(f)))
|
"""
Hearthstone Strings file
File format: TSV. Lines starting with `#` are ignored.
Key is always `TAG`
"""
import csv
from typing import Dict
import hearthstone_data
StringsRow = Dict[str, str]
StringsDict = Dict[str, StringsRow]
_cache: Dict[str, StringsDict] = {}
def load(fp) -> StringsDict:
reader = csv.DictReader(
filter(lambda row: row.strip() and not row.startswith("#"), fp),
delimiter="\t"
)
stripped_rows = [{k: v for k, v in row.items() if v} for row in reader]
return {stripped_row.pop("TAG"): stripped_row for stripped_row in stripped_rows}
def load_globalstrings(locale="enUS") -> StringsDict:
path: str = hearthstone_data.get_strings_file(locale, filename="GLOBAL.txt")
if path not in _cache:
with open(path, "r") as f:
_cache[path] = load(f)
return _cache[path]
if __name__ == "__main__":
import json
import sys
for path in sys.argv[1:]:
with open(path, "r") as f:
print(json.dumps(load(f)))
Fix BOM issue with latest strings"""
Hearthstone Strings file
File format: TSV. Lines starting with `#` are ignored.
Key is always `TAG`
"""
import csv
from typing import Dict
import hearthstone_data
StringsRow = Dict[str, str]
StringsDict = Dict[str, StringsRow]
_cache: Dict[str, StringsDict] = {}
def load(fp) -> StringsDict:
reader = csv.DictReader(
filter(lambda row: row.strip() and not row.startswith("#"), fp),
delimiter="\t"
)
stripped_rows = [{k: v for k, v in row.items() if v} for row in reader]
return {stripped_row.pop("TAG"): stripped_row for stripped_row in stripped_rows}
def load_globalstrings(locale="enUS") -> StringsDict:
path: str = hearthstone_data.get_strings_file(locale, filename="GLOBAL.txt")
if path not in _cache:
with open(path, "r", encoding="utf-8-sig") as f:
_cache[path] = load(f)
return _cache[path]
if __name__ == "__main__":
import json
import sys
for path in sys.argv[1:]:
with open(path, "r") as f:
print(json.dumps(load(f)))
|
<commit_before>"""
Hearthstone Strings file
File format: TSV. Lines starting with `#` are ignored.
Key is always `TAG`
"""
import csv
from typing import Dict
import hearthstone_data
StringsRow = Dict[str, str]
StringsDict = Dict[str, StringsRow]
_cache: Dict[str, StringsDict] = {}
def load(fp) -> StringsDict:
reader = csv.DictReader(
filter(lambda row: row.strip() and not row.startswith("#"), fp),
delimiter="\t"
)
stripped_rows = [{k: v for k, v in row.items() if v} for row in reader]
return {stripped_row.pop("TAG"): stripped_row for stripped_row in stripped_rows}
def load_globalstrings(locale="enUS") -> StringsDict:
path: str = hearthstone_data.get_strings_file(locale, filename="GLOBAL.txt")
if path not in _cache:
with open(path, "r") as f:
_cache[path] = load(f)
return _cache[path]
if __name__ == "__main__":
import json
import sys
for path in sys.argv[1:]:
with open(path, "r") as f:
print(json.dumps(load(f)))
<commit_msg>Fix BOM issue with latest strings<commit_after>"""
Hearthstone Strings file
File format: TSV. Lines starting with `#` are ignored.
Key is always `TAG`
"""
import csv
from typing import Dict
import hearthstone_data
StringsRow = Dict[str, str]
StringsDict = Dict[str, StringsRow]
_cache: Dict[str, StringsDict] = {}
def load(fp) -> StringsDict:
reader = csv.DictReader(
filter(lambda row: row.strip() and not row.startswith("#"), fp),
delimiter="\t"
)
stripped_rows = [{k: v for k, v in row.items() if v} for row in reader]
return {stripped_row.pop("TAG"): stripped_row for stripped_row in stripped_rows}
def load_globalstrings(locale="enUS") -> StringsDict:
path: str = hearthstone_data.get_strings_file(locale, filename="GLOBAL.txt")
if path not in _cache:
with open(path, "r", encoding="utf-8-sig") as f:
_cache[path] = load(f)
return _cache[path]
if __name__ == "__main__":
import json
import sys
for path in sys.argv[1:]:
with open(path, "r") as f:
print(json.dumps(load(f)))
|
c85ff679ef7a4c88dc4c625f69faed76ef195111
|
frappe/website/page_renderers/not_found_page.py
|
frappe/website/page_renderers/not_found_page.py
|
import os
from urllib.parse import urlparse
import frappe
from frappe.website.page_renderers.template_page import TemplatePage
from frappe.website.utils import can_cache
HOMEPAGE_PATHS = ('/', '/index', 'index')
class NotFoundPage(TemplatePage):
def __init__(self, path, http_status_code):
self.request_path = path
self.request_url = frappe.local.request.url if hasattr(frappe.local, 'request') else ''
path = '404'
http_status_code = 404
super().__init__(path=path, http_status_code=http_status_code)
def can_render(self):
return True
def render(self):
if self.can_cache_404():
frappe.cache().hset('website_404', self.request_url, True)
return super().render()
def can_cache_404(self):
# do not cache 404 for custom homepages
return can_cache() and self.request_url and not self.is_custom_home_page()
def is_custom_home_page(self):
url_parts = urlparse(self.request_url)
request_url = os.path.splitext(url_parts.path)[0]
request_path = os.path.splitext(self.request_path)[0]
return request_url in HOMEPAGE_PATHS and request_path not in HOMEPAGE_PATHS
|
import os
from urllib.parse import urlparse
import frappe
from frappe.website.page_renderers.template_page import TemplatePage
from frappe.website.utils import can_cache
HOMEPAGE_PATHS = ('/', '/index', 'index')
class NotFoundPage(TemplatePage):
def __init__(self, path, http_status_code=None):
self.request_path = path
self.request_url = frappe.local.request.url if hasattr(frappe.local, 'request') else ''
path = '404'
http_status_code = http_status_code or 404
super().__init__(path=path, http_status_code=http_status_code)
def can_render(self):
return True
def render(self):
if self.can_cache_404():
frappe.cache().hset('website_404', self.request_url, True)
return super().render()
def can_cache_404(self):
# do not cache 404 for custom homepages
return can_cache() and self.request_url and not self.is_custom_home_page()
def is_custom_home_page(self):
url_parts = urlparse(self.request_url)
request_url = os.path.splitext(url_parts.path)[0]
request_path = os.path.splitext(self.request_path)[0]
return request_url in HOMEPAGE_PATHS and request_path not in HOMEPAGE_PATHS
|
Set default value for http_status_code
|
fix(NotFoundPage): Set default value for http_status_code
|
Python
|
mit
|
mhbu50/frappe,yashodhank/frappe,mhbu50/frappe,almeidapaulopt/frappe,StrellaGroup/frappe,mhbu50/frappe,almeidapaulopt/frappe,yashodhank/frappe,StrellaGroup/frappe,almeidapaulopt/frappe,frappe/frappe,frappe/frappe,yashodhank/frappe,yashodhank/frappe,almeidapaulopt/frappe,frappe/frappe,StrellaGroup/frappe,mhbu50/frappe
|
import os
from urllib.parse import urlparse
import frappe
from frappe.website.page_renderers.template_page import TemplatePage
from frappe.website.utils import can_cache
HOMEPAGE_PATHS = ('/', '/index', 'index')
class NotFoundPage(TemplatePage):
def __init__(self, path, http_status_code):
self.request_path = path
self.request_url = frappe.local.request.url if hasattr(frappe.local, 'request') else ''
path = '404'
http_status_code = 404
super().__init__(path=path, http_status_code=http_status_code)
def can_render(self):
return True
def render(self):
if self.can_cache_404():
frappe.cache().hset('website_404', self.request_url, True)
return super().render()
def can_cache_404(self):
# do not cache 404 for custom homepages
return can_cache() and self.request_url and not self.is_custom_home_page()
def is_custom_home_page(self):
url_parts = urlparse(self.request_url)
request_url = os.path.splitext(url_parts.path)[0]
request_path = os.path.splitext(self.request_path)[0]
return request_url in HOMEPAGE_PATHS and request_path not in HOMEPAGE_PATHS
fix(NotFoundPage): Set default value for http_status_code
|
import os
from urllib.parse import urlparse
import frappe
from frappe.website.page_renderers.template_page import TemplatePage
from frappe.website.utils import can_cache
HOMEPAGE_PATHS = ('/', '/index', 'index')
class NotFoundPage(TemplatePage):
def __init__(self, path, http_status_code=None):
self.request_path = path
self.request_url = frappe.local.request.url if hasattr(frappe.local, 'request') else ''
path = '404'
http_status_code = http_status_code or 404
super().__init__(path=path, http_status_code=http_status_code)
def can_render(self):
return True
def render(self):
if self.can_cache_404():
frappe.cache().hset('website_404', self.request_url, True)
return super().render()
def can_cache_404(self):
# do not cache 404 for custom homepages
return can_cache() and self.request_url and not self.is_custom_home_page()
def is_custom_home_page(self):
url_parts = urlparse(self.request_url)
request_url = os.path.splitext(url_parts.path)[0]
request_path = os.path.splitext(self.request_path)[0]
return request_url in HOMEPAGE_PATHS and request_path not in HOMEPAGE_PATHS
|
<commit_before>import os
from urllib.parse import urlparse
import frappe
from frappe.website.page_renderers.template_page import TemplatePage
from frappe.website.utils import can_cache
HOMEPAGE_PATHS = ('/', '/index', 'index')
class NotFoundPage(TemplatePage):
def __init__(self, path, http_status_code):
self.request_path = path
self.request_url = frappe.local.request.url if hasattr(frappe.local, 'request') else ''
path = '404'
http_status_code = 404
super().__init__(path=path, http_status_code=http_status_code)
def can_render(self):
return True
def render(self):
if self.can_cache_404():
frappe.cache().hset('website_404', self.request_url, True)
return super().render()
def can_cache_404(self):
# do not cache 404 for custom homepages
return can_cache() and self.request_url and not self.is_custom_home_page()
def is_custom_home_page(self):
url_parts = urlparse(self.request_url)
request_url = os.path.splitext(url_parts.path)[0]
request_path = os.path.splitext(self.request_path)[0]
return request_url in HOMEPAGE_PATHS and request_path not in HOMEPAGE_PATHS
<commit_msg>fix(NotFoundPage): Set default value for http_status_code<commit_after>
|
import os
from urllib.parse import urlparse
import frappe
from frappe.website.page_renderers.template_page import TemplatePage
from frappe.website.utils import can_cache
HOMEPAGE_PATHS = ('/', '/index', 'index')
class NotFoundPage(TemplatePage):
def __init__(self, path, http_status_code=None):
self.request_path = path
self.request_url = frappe.local.request.url if hasattr(frappe.local, 'request') else ''
path = '404'
http_status_code = http_status_code or 404
super().__init__(path=path, http_status_code=http_status_code)
def can_render(self):
return True
def render(self):
if self.can_cache_404():
frappe.cache().hset('website_404', self.request_url, True)
return super().render()
def can_cache_404(self):
# do not cache 404 for custom homepages
return can_cache() and self.request_url and not self.is_custom_home_page()
def is_custom_home_page(self):
url_parts = urlparse(self.request_url)
request_url = os.path.splitext(url_parts.path)[0]
request_path = os.path.splitext(self.request_path)[0]
return request_url in HOMEPAGE_PATHS and request_path not in HOMEPAGE_PATHS
|
import os
from urllib.parse import urlparse
import frappe
from frappe.website.page_renderers.template_page import TemplatePage
from frappe.website.utils import can_cache
HOMEPAGE_PATHS = ('/', '/index', 'index')
class NotFoundPage(TemplatePage):
def __init__(self, path, http_status_code):
self.request_path = path
self.request_url = frappe.local.request.url if hasattr(frappe.local, 'request') else ''
path = '404'
http_status_code = 404
super().__init__(path=path, http_status_code=http_status_code)
def can_render(self):
return True
def render(self):
if self.can_cache_404():
frappe.cache().hset('website_404', self.request_url, True)
return super().render()
def can_cache_404(self):
# do not cache 404 for custom homepages
return can_cache() and self.request_url and not self.is_custom_home_page()
def is_custom_home_page(self):
url_parts = urlparse(self.request_url)
request_url = os.path.splitext(url_parts.path)[0]
request_path = os.path.splitext(self.request_path)[0]
return request_url in HOMEPAGE_PATHS and request_path not in HOMEPAGE_PATHS
fix(NotFoundPage): Set default value for http_status_codeimport os
from urllib.parse import urlparse
import frappe
from frappe.website.page_renderers.template_page import TemplatePage
from frappe.website.utils import can_cache
HOMEPAGE_PATHS = ('/', '/index', 'index')
class NotFoundPage(TemplatePage):
def __init__(self, path, http_status_code=None):
self.request_path = path
self.request_url = frappe.local.request.url if hasattr(frappe.local, 'request') else ''
path = '404'
http_status_code = http_status_code or 404
super().__init__(path=path, http_status_code=http_status_code)
def can_render(self):
return True
def render(self):
if self.can_cache_404():
frappe.cache().hset('website_404', self.request_url, True)
return super().render()
def can_cache_404(self):
# do not cache 404 for custom homepages
return can_cache() and self.request_url and not self.is_custom_home_page()
def is_custom_home_page(self):
url_parts = urlparse(self.request_url)
request_url = os.path.splitext(url_parts.path)[0]
request_path = os.path.splitext(self.request_path)[0]
return request_url in HOMEPAGE_PATHS and request_path not in HOMEPAGE_PATHS
|
<commit_before>import os
from urllib.parse import urlparse
import frappe
from frappe.website.page_renderers.template_page import TemplatePage
from frappe.website.utils import can_cache
HOMEPAGE_PATHS = ('/', '/index', 'index')
class NotFoundPage(TemplatePage):
def __init__(self, path, http_status_code):
self.request_path = path
self.request_url = frappe.local.request.url if hasattr(frappe.local, 'request') else ''
path = '404'
http_status_code = 404
super().__init__(path=path, http_status_code=http_status_code)
def can_render(self):
return True
def render(self):
if self.can_cache_404():
frappe.cache().hset('website_404', self.request_url, True)
return super().render()
def can_cache_404(self):
# do not cache 404 for custom homepages
return can_cache() and self.request_url and not self.is_custom_home_page()
def is_custom_home_page(self):
url_parts = urlparse(self.request_url)
request_url = os.path.splitext(url_parts.path)[0]
request_path = os.path.splitext(self.request_path)[0]
return request_url in HOMEPAGE_PATHS and request_path not in HOMEPAGE_PATHS
<commit_msg>fix(NotFoundPage): Set default value for http_status_code<commit_after>import os
from urllib.parse import urlparse
import frappe
from frappe.website.page_renderers.template_page import TemplatePage
from frappe.website.utils import can_cache
HOMEPAGE_PATHS = ('/', '/index', 'index')
class NotFoundPage(TemplatePage):
def __init__(self, path, http_status_code=None):
self.request_path = path
self.request_url = frappe.local.request.url if hasattr(frappe.local, 'request') else ''
path = '404'
http_status_code = http_status_code or 404
super().__init__(path=path, http_status_code=http_status_code)
def can_render(self):
return True
def render(self):
if self.can_cache_404():
frappe.cache().hset('website_404', self.request_url, True)
return super().render()
def can_cache_404(self):
# do not cache 404 for custom homepages
return can_cache() and self.request_url and not self.is_custom_home_page()
def is_custom_home_page(self):
url_parts = urlparse(self.request_url)
request_url = os.path.splitext(url_parts.path)[0]
request_path = os.path.splitext(self.request_path)[0]
return request_url in HOMEPAGE_PATHS and request_path not in HOMEPAGE_PATHS
|
74800bf43f9b0f130a7b096afd20db373e7dad1e
|
web/blueprints/helpers/exception.py
|
web/blueprints/helpers/exception.py
|
import traceback
from flask import flash
from sqlalchemy.exc import InternalError
from pycroft.helpers import AutoNumber
from pycroft.model import session
from pycroft.lib.net import MacExistsException, SubnetFullException
from pycroft.model.host import MulticastFlagException
from pycroft.model.types import InvalidMACAddressException
def web_execute(function, success_message, *args, **kwargs):
try:
result = function(*args, **kwargs)
if success_message:
flash(success_message, 'success')
return result, True
except MacExistsException:
flash("Die MAC-Adresse ist bereits in Verwendung.", 'error')
session.session.rollback()
except SubnetFullException:
flash("Das IP-Subnetz ist voll.", 'error')
session.session.rollback()
except MulticastFlagException:
flash("Die MAC-Adresse enthält ein aktives Multicast-Bit.", 'error')
session.session.rollback()
except InvalidMACAddressException:
flash("Die MAC-Adresse ist ungültig.", 'error')
session.session.rollback()
except InternalError as e:
# Special case: Username already taken in Abe
# Should be removed after migration
if "Username already taken in Abe!" in str(e):
flash("Dieser Benutzername wird bereits in Abe verwendet.", 'error')
session.session.rollback()
else:
raise e
except Exception as e:
traceback.print_exc()
flash("Es ist ein unerwarteter Fehler aufgetreten: {}".format(e), "error")
session.session.rollback()
return None, False
|
import traceback
from flask import flash
from pycroft.lib.net import MacExistsException, SubnetFullException
from pycroft.model import session
from pycroft.model.host import MulticastFlagException
from pycroft.model.types import InvalidMACAddressException
def web_execute(function, success_message, *args, **kwargs):
try:
result = function(*args, **kwargs)
if success_message:
flash(success_message, 'success')
return result, True
except MacExistsException:
flash("Die MAC-Adresse ist bereits in Verwendung.", 'error')
session.session.rollback()
except SubnetFullException:
flash("Das IP-Subnetz ist voll.", 'error')
session.session.rollback()
except MulticastFlagException:
flash("Die MAC-Adresse enthält ein aktives Multicast-Bit.", 'error')
session.session.rollback()
except InvalidMACAddressException:
flash("Die MAC-Adresse ist ungültig.", 'error')
session.session.rollback()
except Exception as e:
traceback.print_exc()
flash("Es ist ein unerwarteter Fehler aufgetreten: {}".format(e), "error")
session.session.rollback()
return None, False
|
Remove “username taken in abe” handling
|
Remove “username taken in abe” handling
|
Python
|
apache-2.0
|
agdsn/pycroft,agdsn/pycroft,agdsn/pycroft,agdsn/pycroft,agdsn/pycroft
|
import traceback
from flask import flash
from sqlalchemy.exc import InternalError
from pycroft.helpers import AutoNumber
from pycroft.model import session
from pycroft.lib.net import MacExistsException, SubnetFullException
from pycroft.model.host import MulticastFlagException
from pycroft.model.types import InvalidMACAddressException
def web_execute(function, success_message, *args, **kwargs):
try:
result = function(*args, **kwargs)
if success_message:
flash(success_message, 'success')
return result, True
except MacExistsException:
flash("Die MAC-Adresse ist bereits in Verwendung.", 'error')
session.session.rollback()
except SubnetFullException:
flash("Das IP-Subnetz ist voll.", 'error')
session.session.rollback()
except MulticastFlagException:
flash("Die MAC-Adresse enthält ein aktives Multicast-Bit.", 'error')
session.session.rollback()
except InvalidMACAddressException:
flash("Die MAC-Adresse ist ungültig.", 'error')
session.session.rollback()
except InternalError as e:
# Special case: Username already taken in Abe
# Should be removed after migration
if "Username already taken in Abe!" in str(e):
flash("Dieser Benutzername wird bereits in Abe verwendet.", 'error')
session.session.rollback()
else:
raise e
except Exception as e:
traceback.print_exc()
flash("Es ist ein unerwarteter Fehler aufgetreten: {}".format(e), "error")
session.session.rollback()
return None, False
Remove “username taken in abe” handling
|
import traceback
from flask import flash
from pycroft.lib.net import MacExistsException, SubnetFullException
from pycroft.model import session
from pycroft.model.host import MulticastFlagException
from pycroft.model.types import InvalidMACAddressException
def web_execute(function, success_message, *args, **kwargs):
try:
result = function(*args, **kwargs)
if success_message:
flash(success_message, 'success')
return result, True
except MacExistsException:
flash("Die MAC-Adresse ist bereits in Verwendung.", 'error')
session.session.rollback()
except SubnetFullException:
flash("Das IP-Subnetz ist voll.", 'error')
session.session.rollback()
except MulticastFlagException:
flash("Die MAC-Adresse enthält ein aktives Multicast-Bit.", 'error')
session.session.rollback()
except InvalidMACAddressException:
flash("Die MAC-Adresse ist ungültig.", 'error')
session.session.rollback()
except Exception as e:
traceback.print_exc()
flash("Es ist ein unerwarteter Fehler aufgetreten: {}".format(e), "error")
session.session.rollback()
return None, False
|
<commit_before>import traceback
from flask import flash
from sqlalchemy.exc import InternalError
from pycroft.helpers import AutoNumber
from pycroft.model import session
from pycroft.lib.net import MacExistsException, SubnetFullException
from pycroft.model.host import MulticastFlagException
from pycroft.model.types import InvalidMACAddressException
def web_execute(function, success_message, *args, **kwargs):
try:
result = function(*args, **kwargs)
if success_message:
flash(success_message, 'success')
return result, True
except MacExistsException:
flash("Die MAC-Adresse ist bereits in Verwendung.", 'error')
session.session.rollback()
except SubnetFullException:
flash("Das IP-Subnetz ist voll.", 'error')
session.session.rollback()
except MulticastFlagException:
flash("Die MAC-Adresse enthält ein aktives Multicast-Bit.", 'error')
session.session.rollback()
except InvalidMACAddressException:
flash("Die MAC-Adresse ist ungültig.", 'error')
session.session.rollback()
except InternalError as e:
# Special case: Username already taken in Abe
# Should be removed after migration
if "Username already taken in Abe!" in str(e):
flash("Dieser Benutzername wird bereits in Abe verwendet.", 'error')
session.session.rollback()
else:
raise e
except Exception as e:
traceback.print_exc()
flash("Es ist ein unerwarteter Fehler aufgetreten: {}".format(e), "error")
session.session.rollback()
return None, False
<commit_msg>Remove “username taken in abe” handling<commit_after>
|
import traceback
from flask import flash
from pycroft.lib.net import MacExistsException, SubnetFullException
from pycroft.model import session
from pycroft.model.host import MulticastFlagException
from pycroft.model.types import InvalidMACAddressException
def web_execute(function, success_message, *args, **kwargs):
try:
result = function(*args, **kwargs)
if success_message:
flash(success_message, 'success')
return result, True
except MacExistsException:
flash("Die MAC-Adresse ist bereits in Verwendung.", 'error')
session.session.rollback()
except SubnetFullException:
flash("Das IP-Subnetz ist voll.", 'error')
session.session.rollback()
except MulticastFlagException:
flash("Die MAC-Adresse enthält ein aktives Multicast-Bit.", 'error')
session.session.rollback()
except InvalidMACAddressException:
flash("Die MAC-Adresse ist ungültig.", 'error')
session.session.rollback()
except Exception as e:
traceback.print_exc()
flash("Es ist ein unerwarteter Fehler aufgetreten: {}".format(e), "error")
session.session.rollback()
return None, False
|
import traceback
from flask import flash
from sqlalchemy.exc import InternalError
from pycroft.helpers import AutoNumber
from pycroft.model import session
from pycroft.lib.net import MacExistsException, SubnetFullException
from pycroft.model.host import MulticastFlagException
from pycroft.model.types import InvalidMACAddressException
def web_execute(function, success_message, *args, **kwargs):
try:
result = function(*args, **kwargs)
if success_message:
flash(success_message, 'success')
return result, True
except MacExistsException:
flash("Die MAC-Adresse ist bereits in Verwendung.", 'error')
session.session.rollback()
except SubnetFullException:
flash("Das IP-Subnetz ist voll.", 'error')
session.session.rollback()
except MulticastFlagException:
flash("Die MAC-Adresse enthält ein aktives Multicast-Bit.", 'error')
session.session.rollback()
except InvalidMACAddressException:
flash("Die MAC-Adresse ist ungültig.", 'error')
session.session.rollback()
except InternalError as e:
# Special case: Username already taken in Abe
# Should be removed after migration
if "Username already taken in Abe!" in str(e):
flash("Dieser Benutzername wird bereits in Abe verwendet.", 'error')
session.session.rollback()
else:
raise e
except Exception as e:
traceback.print_exc()
flash("Es ist ein unerwarteter Fehler aufgetreten: {}".format(e), "error")
session.session.rollback()
return None, False
Remove “username taken in abe” handlingimport traceback
from flask import flash
from pycroft.lib.net import MacExistsException, SubnetFullException
from pycroft.model import session
from pycroft.model.host import MulticastFlagException
from pycroft.model.types import InvalidMACAddressException
def web_execute(function, success_message, *args, **kwargs):
try:
result = function(*args, **kwargs)
if success_message:
flash(success_message, 'success')
return result, True
except MacExistsException:
flash("Die MAC-Adresse ist bereits in Verwendung.", 'error')
session.session.rollback()
except SubnetFullException:
flash("Das IP-Subnetz ist voll.", 'error')
session.session.rollback()
except MulticastFlagException:
flash("Die MAC-Adresse enthält ein aktives Multicast-Bit.", 'error')
session.session.rollback()
except InvalidMACAddressException:
flash("Die MAC-Adresse ist ungültig.", 'error')
session.session.rollback()
except Exception as e:
traceback.print_exc()
flash("Es ist ein unerwarteter Fehler aufgetreten: {}".format(e), "error")
session.session.rollback()
return None, False
|
<commit_before>import traceback
from flask import flash
from sqlalchemy.exc import InternalError
from pycroft.helpers import AutoNumber
from pycroft.model import session
from pycroft.lib.net import MacExistsException, SubnetFullException
from pycroft.model.host import MulticastFlagException
from pycroft.model.types import InvalidMACAddressException
def web_execute(function, success_message, *args, **kwargs):
try:
result = function(*args, **kwargs)
if success_message:
flash(success_message, 'success')
return result, True
except MacExistsException:
flash("Die MAC-Adresse ist bereits in Verwendung.", 'error')
session.session.rollback()
except SubnetFullException:
flash("Das IP-Subnetz ist voll.", 'error')
session.session.rollback()
except MulticastFlagException:
flash("Die MAC-Adresse enthält ein aktives Multicast-Bit.", 'error')
session.session.rollback()
except InvalidMACAddressException:
flash("Die MAC-Adresse ist ungültig.", 'error')
session.session.rollback()
except InternalError as e:
# Special case: Username already taken in Abe
# Should be removed after migration
if "Username already taken in Abe!" in str(e):
flash("Dieser Benutzername wird bereits in Abe verwendet.", 'error')
session.session.rollback()
else:
raise e
except Exception as e:
traceback.print_exc()
flash("Es ist ein unerwarteter Fehler aufgetreten: {}".format(e), "error")
session.session.rollback()
return None, False
<commit_msg>Remove “username taken in abe” handling<commit_after>import traceback
from flask import flash
from pycroft.lib.net import MacExistsException, SubnetFullException
from pycroft.model import session
from pycroft.model.host import MulticastFlagException
from pycroft.model.types import InvalidMACAddressException
def web_execute(function, success_message, *args, **kwargs):
try:
result = function(*args, **kwargs)
if success_message:
flash(success_message, 'success')
return result, True
except MacExistsException:
flash("Die MAC-Adresse ist bereits in Verwendung.", 'error')
session.session.rollback()
except SubnetFullException:
flash("Das IP-Subnetz ist voll.", 'error')
session.session.rollback()
except MulticastFlagException:
flash("Die MAC-Adresse enthält ein aktives Multicast-Bit.", 'error')
session.session.rollback()
except InvalidMACAddressException:
flash("Die MAC-Adresse ist ungültig.", 'error')
session.session.rollback()
except Exception as e:
traceback.print_exc()
flash("Es ist ein unerwarteter Fehler aufgetreten: {}".format(e), "error")
session.session.rollback()
return None, False
|
06a70ae323f0eb1fe50c1f01a31ef9548a24b00c
|
tests/test_favicons.py
|
tests/test_favicons.py
|
from django.test import TestCase
from mock import patch
from feedhq.feeds.models import Favicon, Feed
from .factories import FeedFactory
from . import responses
class FaviconTests(TestCase):
@patch("requests.get")
def test_existing_favicon_new_feed(self, get):
get.return_value = responses(304)
FeedFactory.create(url='http://example.com/feed')
self.assertEqual(Feed.objects.values_list('favicon', flat=True)[0], '')
# Simulate a 1st call of update_favicon which creates a Favicon entry
Favicon.objects.create(url='http://example.com/feed',
favicon='favicons/example.com.ico')
Favicon.objects.update_favicon('http://example.com/feed')
self.assertEqual(Feed.objects.values_list('favicon', flat=True)[0],
'favicons/example.com.ico')
|
from mock import patch
from feedhq.feeds.models import Favicon, Feed
from .factories import FeedFactory
from . import responses, TestCase
class FaviconTests(TestCase):
@patch("requests.get")
def test_existing_favicon_new_feed(self, get):
get.return_value = responses(304)
FeedFactory.create(url='http://example.com/feed')
self.assertEqual(Feed.objects.values_list('favicon', flat=True)[0], '')
# Simulate a 1st call of update_favicon which creates a Favicon entry
Favicon.objects.create(url='http://example.com/feed',
favicon='favicons/example.com.ico')
Favicon.objects.update_favicon('http://example.com/feed')
self.assertEqual(Feed.objects.values_list('favicon', flat=True)[0],
'favicons/example.com.ico')
|
Use base TestCase to properly cleanup indices
|
Use base TestCase to properly cleanup indices
|
Python
|
bsd-3-clause
|
rmoorman/feedhq,feedhq/feedhq,rmoorman/feedhq,feedhq/feedhq,rmoorman/feedhq,rmoorman/feedhq,rmoorman/feedhq,vincentbernat/feedhq,feedhq/feedhq,vincentbernat/feedhq,feedhq/feedhq,vincentbernat/feedhq,feedhq/feedhq,vincentbernat/feedhq,vincentbernat/feedhq
|
from django.test import TestCase
from mock import patch
from feedhq.feeds.models import Favicon, Feed
from .factories import FeedFactory
from . import responses
class FaviconTests(TestCase):
@patch("requests.get")
def test_existing_favicon_new_feed(self, get):
get.return_value = responses(304)
FeedFactory.create(url='http://example.com/feed')
self.assertEqual(Feed.objects.values_list('favicon', flat=True)[0], '')
# Simulate a 1st call of update_favicon which creates a Favicon entry
Favicon.objects.create(url='http://example.com/feed',
favicon='favicons/example.com.ico')
Favicon.objects.update_favicon('http://example.com/feed')
self.assertEqual(Feed.objects.values_list('favicon', flat=True)[0],
'favicons/example.com.ico')
Use base TestCase to properly cleanup indices
|
from mock import patch
from feedhq.feeds.models import Favicon, Feed
from .factories import FeedFactory
from . import responses, TestCase
class FaviconTests(TestCase):
@patch("requests.get")
def test_existing_favicon_new_feed(self, get):
get.return_value = responses(304)
FeedFactory.create(url='http://example.com/feed')
self.assertEqual(Feed.objects.values_list('favicon', flat=True)[0], '')
# Simulate a 1st call of update_favicon which creates a Favicon entry
Favicon.objects.create(url='http://example.com/feed',
favicon='favicons/example.com.ico')
Favicon.objects.update_favicon('http://example.com/feed')
self.assertEqual(Feed.objects.values_list('favicon', flat=True)[0],
'favicons/example.com.ico')
|
<commit_before>from django.test import TestCase
from mock import patch
from feedhq.feeds.models import Favicon, Feed
from .factories import FeedFactory
from . import responses
class FaviconTests(TestCase):
@patch("requests.get")
def test_existing_favicon_new_feed(self, get):
get.return_value = responses(304)
FeedFactory.create(url='http://example.com/feed')
self.assertEqual(Feed.objects.values_list('favicon', flat=True)[0], '')
# Simulate a 1st call of update_favicon which creates a Favicon entry
Favicon.objects.create(url='http://example.com/feed',
favicon='favicons/example.com.ico')
Favicon.objects.update_favicon('http://example.com/feed')
self.assertEqual(Feed.objects.values_list('favicon', flat=True)[0],
'favicons/example.com.ico')
<commit_msg>Use base TestCase to properly cleanup indices<commit_after>
|
from mock import patch
from feedhq.feeds.models import Favicon, Feed
from .factories import FeedFactory
from . import responses, TestCase
class FaviconTests(TestCase):
@patch("requests.get")
def test_existing_favicon_new_feed(self, get):
get.return_value = responses(304)
FeedFactory.create(url='http://example.com/feed')
self.assertEqual(Feed.objects.values_list('favicon', flat=True)[0], '')
# Simulate a 1st call of update_favicon which creates a Favicon entry
Favicon.objects.create(url='http://example.com/feed',
favicon='favicons/example.com.ico')
Favicon.objects.update_favicon('http://example.com/feed')
self.assertEqual(Feed.objects.values_list('favicon', flat=True)[0],
'favicons/example.com.ico')
|
from django.test import TestCase
from mock import patch
from feedhq.feeds.models import Favicon, Feed
from .factories import FeedFactory
from . import responses
class FaviconTests(TestCase):
@patch("requests.get")
def test_existing_favicon_new_feed(self, get):
get.return_value = responses(304)
FeedFactory.create(url='http://example.com/feed')
self.assertEqual(Feed.objects.values_list('favicon', flat=True)[0], '')
# Simulate a 1st call of update_favicon which creates a Favicon entry
Favicon.objects.create(url='http://example.com/feed',
favicon='favicons/example.com.ico')
Favicon.objects.update_favicon('http://example.com/feed')
self.assertEqual(Feed.objects.values_list('favicon', flat=True)[0],
'favicons/example.com.ico')
Use base TestCase to properly cleanup indicesfrom mock import patch
from feedhq.feeds.models import Favicon, Feed
from .factories import FeedFactory
from . import responses, TestCase
class FaviconTests(TestCase):
@patch("requests.get")
def test_existing_favicon_new_feed(self, get):
get.return_value = responses(304)
FeedFactory.create(url='http://example.com/feed')
self.assertEqual(Feed.objects.values_list('favicon', flat=True)[0], '')
# Simulate a 1st call of update_favicon which creates a Favicon entry
Favicon.objects.create(url='http://example.com/feed',
favicon='favicons/example.com.ico')
Favicon.objects.update_favicon('http://example.com/feed')
self.assertEqual(Feed.objects.values_list('favicon', flat=True)[0],
'favicons/example.com.ico')
|
<commit_before>from django.test import TestCase
from mock import patch
from feedhq.feeds.models import Favicon, Feed
from .factories import FeedFactory
from . import responses
class FaviconTests(TestCase):
@patch("requests.get")
def test_existing_favicon_new_feed(self, get):
get.return_value = responses(304)
FeedFactory.create(url='http://example.com/feed')
self.assertEqual(Feed.objects.values_list('favicon', flat=True)[0], '')
# Simulate a 1st call of update_favicon which creates a Favicon entry
Favicon.objects.create(url='http://example.com/feed',
favicon='favicons/example.com.ico')
Favicon.objects.update_favicon('http://example.com/feed')
self.assertEqual(Feed.objects.values_list('favicon', flat=True)[0],
'favicons/example.com.ico')
<commit_msg>Use base TestCase to properly cleanup indices<commit_after>from mock import patch
from feedhq.feeds.models import Favicon, Feed
from .factories import FeedFactory
from . import responses, TestCase
class FaviconTests(TestCase):
@patch("requests.get")
def test_existing_favicon_new_feed(self, get):
get.return_value = responses(304)
FeedFactory.create(url='http://example.com/feed')
self.assertEqual(Feed.objects.values_list('favicon', flat=True)[0], '')
# Simulate a 1st call of update_favicon which creates a Favicon entry
Favicon.objects.create(url='http://example.com/feed',
favicon='favicons/example.com.ico')
Favicon.objects.update_favicon('http://example.com/feed')
self.assertEqual(Feed.objects.values_list('favicon', flat=True)[0],
'favicons/example.com.ico')
|
f089a7828ac2eb42b437166880eef6fea102a8e1
|
speech/google/cloud/speech/__init__.py
|
speech/google/cloud/speech/__init__.py
|
# Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Google Cloud Speech API wrapper."""
from google.cloud.speech.client import Client
from google.cloud.speech.connection import Connection
|
# Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Google Cloud Speech API wrapper."""
from google.cloud.speech.client import Client
from google.cloud.speech.connection import Connection
from google.cloud.speech.encoding import Encoding
|
Make Encoding accessible from speech.Encoding.
|
Make Encoding accessible from speech.Encoding.
|
Python
|
apache-2.0
|
jonparrott/google-cloud-python,dhermes/gcloud-python,tswast/google-cloud-python,tseaver/google-cloud-python,googleapis/google-cloud-python,daspecster/google-cloud-python,Fkawala/gcloud-python,Fkawala/gcloud-python,dhermes/google-cloud-python,dhermes/gcloud-python,GoogleCloudPlatform/gcloud-python,dhermes/google-cloud-python,jgeewax/gcloud-python,calpeyser/google-cloud-python,tartavull/google-cloud-python,tseaver/gcloud-python,jonparrott/gcloud-python,daspecster/google-cloud-python,tseaver/google-cloud-python,jonparrott/google-cloud-python,tseaver/gcloud-python,tseaver/google-cloud-python,GoogleCloudPlatform/gcloud-python,tswast/google-cloud-python,calpeyser/google-cloud-python,quom/google-cloud-python,jonparrott/gcloud-python,jgeewax/gcloud-python,dhermes/google-cloud-python,tswast/google-cloud-python,tartavull/google-cloud-python,googleapis/google-cloud-python,quom/google-cloud-python
|
# Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Google Cloud Speech API wrapper."""
from google.cloud.speech.client import Client
from google.cloud.speech.connection import Connection
Make Encoding accessible from speech.Encoding.
|
# Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Google Cloud Speech API wrapper."""
from google.cloud.speech.client import Client
from google.cloud.speech.connection import Connection
from google.cloud.speech.encoding import Encoding
|
<commit_before># Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Google Cloud Speech API wrapper."""
from google.cloud.speech.client import Client
from google.cloud.speech.connection import Connection
<commit_msg>Make Encoding accessible from speech.Encoding.<commit_after>
|
# Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Google Cloud Speech API wrapper."""
from google.cloud.speech.client import Client
from google.cloud.speech.connection import Connection
from google.cloud.speech.encoding import Encoding
|
# Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Google Cloud Speech API wrapper."""
from google.cloud.speech.client import Client
from google.cloud.speech.connection import Connection
Make Encoding accessible from speech.Encoding.# Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Google Cloud Speech API wrapper."""
from google.cloud.speech.client import Client
from google.cloud.speech.connection import Connection
from google.cloud.speech.encoding import Encoding
|
<commit_before># Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Google Cloud Speech API wrapper."""
from google.cloud.speech.client import Client
from google.cloud.speech.connection import Connection
<commit_msg>Make Encoding accessible from speech.Encoding.<commit_after># Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Google Cloud Speech API wrapper."""
from google.cloud.speech.client import Client
from google.cloud.speech.connection import Connection
from google.cloud.speech.encoding import Encoding
|
b59f09f02c3f22ba53f08790babd75348153d64b
|
tests/hashes_test.py
|
tests/hashes_test.py
|
from nose.tools import istest, assert_equal
from whack.hashes import Hasher
@istest
def hashing_the_same_single_value_gives_the_same_hash():
def create_hash():
hasher = Hasher()
hasher.update("one")
return hasher.hexdigest()
assert_equal(create_hash(), create_hash())
|
from nose.tools import istest, assert_equal
from whack.hashes import Hasher
@istest
def hashing_the_same_single_value_gives_the_same_hash():
def create_hash():
hasher = Hasher()
hasher.update("one")
return hasher.hexdigest()
assert_equal(create_hash(), create_hash())
@istest
def hashing_multiple_values_in_the_same_order_gives_the_same_hash():
def create_hash():
hasher = Hasher()
hasher.update("one")
hasher.update("two")
return hasher.hexdigest()
assert_equal(create_hash(), create_hash())
|
Add test for hashing multiple values
|
Add test for hashing multiple values
|
Python
|
bsd-2-clause
|
mwilliamson/whack
|
from nose.tools import istest, assert_equal
from whack.hashes import Hasher
@istest
def hashing_the_same_single_value_gives_the_same_hash():
def create_hash():
hasher = Hasher()
hasher.update("one")
return hasher.hexdigest()
assert_equal(create_hash(), create_hash())
Add test for hashing multiple values
|
from nose.tools import istest, assert_equal
from whack.hashes import Hasher
@istest
def hashing_the_same_single_value_gives_the_same_hash():
def create_hash():
hasher = Hasher()
hasher.update("one")
return hasher.hexdigest()
assert_equal(create_hash(), create_hash())
@istest
def hashing_multiple_values_in_the_same_order_gives_the_same_hash():
def create_hash():
hasher = Hasher()
hasher.update("one")
hasher.update("two")
return hasher.hexdigest()
assert_equal(create_hash(), create_hash())
|
<commit_before>from nose.tools import istest, assert_equal
from whack.hashes import Hasher
@istest
def hashing_the_same_single_value_gives_the_same_hash():
def create_hash():
hasher = Hasher()
hasher.update("one")
return hasher.hexdigest()
assert_equal(create_hash(), create_hash())
<commit_msg>Add test for hashing multiple values<commit_after>
|
from nose.tools import istest, assert_equal
from whack.hashes import Hasher
@istest
def hashing_the_same_single_value_gives_the_same_hash():
def create_hash():
hasher = Hasher()
hasher.update("one")
return hasher.hexdigest()
assert_equal(create_hash(), create_hash())
@istest
def hashing_multiple_values_in_the_same_order_gives_the_same_hash():
def create_hash():
hasher = Hasher()
hasher.update("one")
hasher.update("two")
return hasher.hexdigest()
assert_equal(create_hash(), create_hash())
|
from nose.tools import istest, assert_equal
from whack.hashes import Hasher
@istest
def hashing_the_same_single_value_gives_the_same_hash():
def create_hash():
hasher = Hasher()
hasher.update("one")
return hasher.hexdigest()
assert_equal(create_hash(), create_hash())
Add test for hashing multiple valuesfrom nose.tools import istest, assert_equal
from whack.hashes import Hasher
@istest
def hashing_the_same_single_value_gives_the_same_hash():
def create_hash():
hasher = Hasher()
hasher.update("one")
return hasher.hexdigest()
assert_equal(create_hash(), create_hash())
@istest
def hashing_multiple_values_in_the_same_order_gives_the_same_hash():
def create_hash():
hasher = Hasher()
hasher.update("one")
hasher.update("two")
return hasher.hexdigest()
assert_equal(create_hash(), create_hash())
|
<commit_before>from nose.tools import istest, assert_equal
from whack.hashes import Hasher
@istest
def hashing_the_same_single_value_gives_the_same_hash():
def create_hash():
hasher = Hasher()
hasher.update("one")
return hasher.hexdigest()
assert_equal(create_hash(), create_hash())
<commit_msg>Add test for hashing multiple values<commit_after>from nose.tools import istest, assert_equal
from whack.hashes import Hasher
@istest
def hashing_the_same_single_value_gives_the_same_hash():
def create_hash():
hasher = Hasher()
hasher.update("one")
return hasher.hexdigest()
assert_equal(create_hash(), create_hash())
@istest
def hashing_multiple_values_in_the_same_order_gives_the_same_hash():
def create_hash():
hasher = Hasher()
hasher.update("one")
hasher.update("two")
return hasher.hexdigest()
assert_equal(create_hash(), create_hash())
|
39c5decd98e8d4feb6c1bbfa487faf35396c8b12
|
logdna/__init__.py
|
logdna/__init__.py
|
from .logdna import LogDNAHandler
__all__ = ['LogDNAHandler']
|
from .logdna import LogDNAHandler
__all__ = ['LogDNAHandler']
# Publish this class to the "logging.handlers" module so that it can be use
# from a logging config file via logging.config.fileConfig().
import logging.handlers
logging.handlers.LogDNAHandler = LogDNAHandler
|
Make available via config file
|
feat(handlers): Make available via config file
- Add to `logging.handlers` such that LogDNAHandler can be configured
via a logging config file
|
Python
|
mit
|
logdna/python
|
from .logdna import LogDNAHandler
__all__ = ['LogDNAHandler']
feat(handlers): Make available via config file
- Add to `logging.handlers` such that LogDNAHandler can be configured
via a logging config file
|
from .logdna import LogDNAHandler
__all__ = ['LogDNAHandler']
# Publish this class to the "logging.handlers" module so that it can be use
# from a logging config file via logging.config.fileConfig().
import logging.handlers
logging.handlers.LogDNAHandler = LogDNAHandler
|
<commit_before>from .logdna import LogDNAHandler
__all__ = ['LogDNAHandler']
<commit_msg>feat(handlers): Make available via config file
- Add to `logging.handlers` such that LogDNAHandler can be configured
via a logging config file<commit_after>
|
from .logdna import LogDNAHandler
__all__ = ['LogDNAHandler']
# Publish this class to the "logging.handlers" module so that it can be use
# from a logging config file via logging.config.fileConfig().
import logging.handlers
logging.handlers.LogDNAHandler = LogDNAHandler
|
from .logdna import LogDNAHandler
__all__ = ['LogDNAHandler']
feat(handlers): Make available via config file
- Add to `logging.handlers` such that LogDNAHandler can be configured
via a logging config filefrom .logdna import LogDNAHandler
__all__ = ['LogDNAHandler']
# Publish this class to the "logging.handlers" module so that it can be use
# from a logging config file via logging.config.fileConfig().
import logging.handlers
logging.handlers.LogDNAHandler = LogDNAHandler
|
<commit_before>from .logdna import LogDNAHandler
__all__ = ['LogDNAHandler']
<commit_msg>feat(handlers): Make available via config file
- Add to `logging.handlers` such that LogDNAHandler can be configured
via a logging config file<commit_after>from .logdna import LogDNAHandler
__all__ = ['LogDNAHandler']
# Publish this class to the "logging.handlers" module so that it can be use
# from a logging config file via logging.config.fileConfig().
import logging.handlers
logging.handlers.LogDNAHandler = LogDNAHandler
|
2096f7f2a840d4b506c2493179408903dd045d21
|
golang/main.py
|
golang/main.py
|
from evolution_master.runners import pkg, download
# Install for Arch
with pkg.pacman() as pkg_man:
pkg_man.install('go')
# Install for Debian & Ubuntu
with pkg.apt() as pkg_man:
pkg_man.install('golang')
# Install for OSX
with pkg.brew() as pkg_man:
pkg_man.install('go')
# Install for Windows
with download.https() as downloader, pkg.msiexec() as installer:
downloader.get('https://storage.googleapis.com/golang/go1.5.1.windows-amd64.msi')
downloader.checksum('sha1', '0a439f49b546b82f85adf84a79bbf40de2b3d5ba')
installer.install_flags('/qn' '/norestart')
installer.await(downloader.finished())
|
from evolution_master.runners import pkg, download
# Install for Arch
with pkg.pacman() as pkg_man:
pkg_man.install('go')
# Install for Debian & Ubuntu
with pkg.apt() as pkg_man:
pkg_man.install('golang')
# TODO: make this a runner and require a switch to enable this
pkg_man.install('golang-go-darwin-amd64',
'golang-go-freebsd-amd64',
'golang-go-netbsd-amd64',
'golang-go-windows-amd64')
# Install for OSX
with pkg.brew() as pkg_man:
pkg_man.install('go')
# Install for Windows
with download.https() as downloader, pkg.msiexec() as installer:
downloader.get('https://storage.googleapis.com/golang/go1.5.1.windows-amd64.msi')
downloader.checksum('sha1', '0a439f49b546b82f85adf84a79bbf40de2b3d5ba')
installer.install_flags('/qn' '/norestart')
installer.await(downloader.finished())
|
Add cross compile for debian
|
Add cross compile for debian
|
Python
|
mit
|
hatchery/genepool,hatchery/Genepool2
|
from evolution_master.runners import pkg, download
# Install for Arch
with pkg.pacman() as pkg_man:
pkg_man.install('go')
# Install for Debian & Ubuntu
with pkg.apt() as pkg_man:
pkg_man.install('golang')
# Install for OSX
with pkg.brew() as pkg_man:
pkg_man.install('go')
# Install for Windows
with download.https() as downloader, pkg.msiexec() as installer:
downloader.get('https://storage.googleapis.com/golang/go1.5.1.windows-amd64.msi')
downloader.checksum('sha1', '0a439f49b546b82f85adf84a79bbf40de2b3d5ba')
installer.install_flags('/qn' '/norestart')
installer.await(downloader.finished())
Add cross compile for debian
|
from evolution_master.runners import pkg, download
# Install for Arch
with pkg.pacman() as pkg_man:
pkg_man.install('go')
# Install for Debian & Ubuntu
with pkg.apt() as pkg_man:
pkg_man.install('golang')
# TODO: make this a runner and require a switch to enable this
pkg_man.install('golang-go-darwin-amd64',
'golang-go-freebsd-amd64',
'golang-go-netbsd-amd64',
'golang-go-windows-amd64')
# Install for OSX
with pkg.brew() as pkg_man:
pkg_man.install('go')
# Install for Windows
with download.https() as downloader, pkg.msiexec() as installer:
downloader.get('https://storage.googleapis.com/golang/go1.5.1.windows-amd64.msi')
downloader.checksum('sha1', '0a439f49b546b82f85adf84a79bbf40de2b3d5ba')
installer.install_flags('/qn' '/norestart')
installer.await(downloader.finished())
|
<commit_before>from evolution_master.runners import pkg, download
# Install for Arch
with pkg.pacman() as pkg_man:
pkg_man.install('go')
# Install for Debian & Ubuntu
with pkg.apt() as pkg_man:
pkg_man.install('golang')
# Install for OSX
with pkg.brew() as pkg_man:
pkg_man.install('go')
# Install for Windows
with download.https() as downloader, pkg.msiexec() as installer:
downloader.get('https://storage.googleapis.com/golang/go1.5.1.windows-amd64.msi')
downloader.checksum('sha1', '0a439f49b546b82f85adf84a79bbf40de2b3d5ba')
installer.install_flags('/qn' '/norestart')
installer.await(downloader.finished())
<commit_msg>Add cross compile for debian<commit_after>
|
from evolution_master.runners import pkg, download
# Install for Arch
with pkg.pacman() as pkg_man:
pkg_man.install('go')
# Install for Debian & Ubuntu
with pkg.apt() as pkg_man:
pkg_man.install('golang')
# TODO: make this a runner and require a switch to enable this
pkg_man.install('golang-go-darwin-amd64',
'golang-go-freebsd-amd64',
'golang-go-netbsd-amd64',
'golang-go-windows-amd64')
# Install for OSX
with pkg.brew() as pkg_man:
pkg_man.install('go')
# Install for Windows
with download.https() as downloader, pkg.msiexec() as installer:
downloader.get('https://storage.googleapis.com/golang/go1.5.1.windows-amd64.msi')
downloader.checksum('sha1', '0a439f49b546b82f85adf84a79bbf40de2b3d5ba')
installer.install_flags('/qn' '/norestart')
installer.await(downloader.finished())
|
from evolution_master.runners import pkg, download
# Install for Arch
with pkg.pacman() as pkg_man:
pkg_man.install('go')
# Install for Debian & Ubuntu
with pkg.apt() as pkg_man:
pkg_man.install('golang')
# Install for OSX
with pkg.brew() as pkg_man:
pkg_man.install('go')
# Install for Windows
with download.https() as downloader, pkg.msiexec() as installer:
downloader.get('https://storage.googleapis.com/golang/go1.5.1.windows-amd64.msi')
downloader.checksum('sha1', '0a439f49b546b82f85adf84a79bbf40de2b3d5ba')
installer.install_flags('/qn' '/norestart')
installer.await(downloader.finished())
Add cross compile for debianfrom evolution_master.runners import pkg, download
# Install for Arch
with pkg.pacman() as pkg_man:
pkg_man.install('go')
# Install for Debian & Ubuntu
with pkg.apt() as pkg_man:
pkg_man.install('golang')
# TODO: make this a runner and require a switch to enable this
pkg_man.install('golang-go-darwin-amd64',
'golang-go-freebsd-amd64',
'golang-go-netbsd-amd64',
'golang-go-windows-amd64')
# Install for OSX
with pkg.brew() as pkg_man:
pkg_man.install('go')
# Install for Windows
with download.https() as downloader, pkg.msiexec() as installer:
downloader.get('https://storage.googleapis.com/golang/go1.5.1.windows-amd64.msi')
downloader.checksum('sha1', '0a439f49b546b82f85adf84a79bbf40de2b3d5ba')
installer.install_flags('/qn' '/norestart')
installer.await(downloader.finished())
|
<commit_before>from evolution_master.runners import pkg, download
# Install for Arch
with pkg.pacman() as pkg_man:
pkg_man.install('go')
# Install for Debian & Ubuntu
with pkg.apt() as pkg_man:
pkg_man.install('golang')
# Install for OSX
with pkg.brew() as pkg_man:
pkg_man.install('go')
# Install for Windows
with download.https() as downloader, pkg.msiexec() as installer:
downloader.get('https://storage.googleapis.com/golang/go1.5.1.windows-amd64.msi')
downloader.checksum('sha1', '0a439f49b546b82f85adf84a79bbf40de2b3d5ba')
installer.install_flags('/qn' '/norestart')
installer.await(downloader.finished())
<commit_msg>Add cross compile for debian<commit_after>from evolution_master.runners import pkg, download
# Install for Arch
with pkg.pacman() as pkg_man:
pkg_man.install('go')
# Install for Debian & Ubuntu
with pkg.apt() as pkg_man:
pkg_man.install('golang')
# TODO: make this a runner and require a switch to enable this
pkg_man.install('golang-go-darwin-amd64',
'golang-go-freebsd-amd64',
'golang-go-netbsd-amd64',
'golang-go-windows-amd64')
# Install for OSX
with pkg.brew() as pkg_man:
pkg_man.install('go')
# Install for Windows
with download.https() as downloader, pkg.msiexec() as installer:
downloader.get('https://storage.googleapis.com/golang/go1.5.1.windows-amd64.msi')
downloader.checksum('sha1', '0a439f49b546b82f85adf84a79bbf40de2b3d5ba')
installer.install_flags('/qn' '/norestart')
installer.await(downloader.finished())
|
0fad98f84a6d02d4507ff9acb59d4764d4e4fabc
|
rdmo/__init__.py
|
rdmo/__init__.py
|
__title__ = 'rdmo'
__version__ = '1.0.5'
__author__ = 'Jochen Klar'
__email__ = 'jklar@aip.de'
__license__ = 'Apache-2.0'
__copyright__ = 'Copyright 2015-2018 Leibniz Institute for Astrophysics Potsdam (AIP)'
VERSION = __version__
|
__title__ = 'rdmo'
__version__ = '1.0.6'
__author__ = 'Jochen Klar'
__email__ = 'jklar@aip.de'
__license__ = 'Apache-2.0'
__copyright__ = 'Copyright 2015-2018 Leibniz Institute for Astrophysics Potsdam (AIP)'
VERSION = __version__
|
Bump version number to 1.0.6
|
Bump version number to 1.0.6
|
Python
|
apache-2.0
|
rdmorganiser/rdmo,DMPwerkzeug/DMPwerkzeug,DMPwerkzeug/DMPwerkzeug,rdmorganiser/rdmo,rdmorganiser/rdmo,DMPwerkzeug/DMPwerkzeug
|
__title__ = 'rdmo'
__version__ = '1.0.5'
__author__ = 'Jochen Klar'
__email__ = 'jklar@aip.de'
__license__ = 'Apache-2.0'
__copyright__ = 'Copyright 2015-2018 Leibniz Institute for Astrophysics Potsdam (AIP)'
VERSION = __version__
Bump version number to 1.0.6
|
__title__ = 'rdmo'
__version__ = '1.0.6'
__author__ = 'Jochen Klar'
__email__ = 'jklar@aip.de'
__license__ = 'Apache-2.0'
__copyright__ = 'Copyright 2015-2018 Leibniz Institute for Astrophysics Potsdam (AIP)'
VERSION = __version__
|
<commit_before>__title__ = 'rdmo'
__version__ = '1.0.5'
__author__ = 'Jochen Klar'
__email__ = 'jklar@aip.de'
__license__ = 'Apache-2.0'
__copyright__ = 'Copyright 2015-2018 Leibniz Institute for Astrophysics Potsdam (AIP)'
VERSION = __version__
<commit_msg>Bump version number to 1.0.6<commit_after>
|
__title__ = 'rdmo'
__version__ = '1.0.6'
__author__ = 'Jochen Klar'
__email__ = 'jklar@aip.de'
__license__ = 'Apache-2.0'
__copyright__ = 'Copyright 2015-2018 Leibniz Institute for Astrophysics Potsdam (AIP)'
VERSION = __version__
|
__title__ = 'rdmo'
__version__ = '1.0.5'
__author__ = 'Jochen Klar'
__email__ = 'jklar@aip.de'
__license__ = 'Apache-2.0'
__copyright__ = 'Copyright 2015-2018 Leibniz Institute for Astrophysics Potsdam (AIP)'
VERSION = __version__
Bump version number to 1.0.6__title__ = 'rdmo'
__version__ = '1.0.6'
__author__ = 'Jochen Klar'
__email__ = 'jklar@aip.de'
__license__ = 'Apache-2.0'
__copyright__ = 'Copyright 2015-2018 Leibniz Institute for Astrophysics Potsdam (AIP)'
VERSION = __version__
|
<commit_before>__title__ = 'rdmo'
__version__ = '1.0.5'
__author__ = 'Jochen Klar'
__email__ = 'jklar@aip.de'
__license__ = 'Apache-2.0'
__copyright__ = 'Copyright 2015-2018 Leibniz Institute for Astrophysics Potsdam (AIP)'
VERSION = __version__
<commit_msg>Bump version number to 1.0.6<commit_after>__title__ = 'rdmo'
__version__ = '1.0.6'
__author__ = 'Jochen Klar'
__email__ = 'jklar@aip.de'
__license__ = 'Apache-2.0'
__copyright__ = 'Copyright 2015-2018 Leibniz Institute for Astrophysics Potsdam (AIP)'
VERSION = __version__
|
aafa37c83c1464c16c2c6b69cc1546a537ec99a3
|
main/forms.py
|
main/forms.py
|
from django import forms
from main.fields import RegexField
class IndexForm(forms.Form):
id_list = forms.CharField(
widget=forms.Textarea, label='ID List',
help_text='List of students IDs to query, one per line.')
student_id_regex = RegexField(
label='Student ID regex',
help_text='Regular expression used to match the student ID in each '
'line. If cannot match (or a student is not found in the '
'database), then the line is left as is.',
initial=r'\b\d{7,}\b',
widget=forms.TextInput(attrs={'placeholder': r'\b\d{7,}\b'}))
|
from django import forms
from main.fields import RegexField
class IndexForm(forms.Form):
id_list = forms.CharField(
help_text='List of students IDs to query, one per line.',
label='ID List',
widget=forms.Textarea(attrs={
'placeholder': 'Random text\n1234567\n7654321'}))
student_id_regex = RegexField(
label='Student ID regex',
help_text='Regular expression used to match the student ID in each '
'line. If cannot match (or a student is not found in the '
'database), then the line is left as is.',
initial=r'\b\d{7,}\b',
widget=forms.TextInput(attrs={'placeholder': r'\b\d{7,}\b'}))
|
Add placeholder to ID list field
|
Add placeholder to ID list field
|
Python
|
mit
|
m4tx/usos-id-mapper,m4tx/usos-id-mapper
|
from django import forms
from main.fields import RegexField
class IndexForm(forms.Form):
id_list = forms.CharField(
widget=forms.Textarea, label='ID List',
help_text='List of students IDs to query, one per line.')
student_id_regex = RegexField(
label='Student ID regex',
help_text='Regular expression used to match the student ID in each '
'line. If cannot match (or a student is not found in the '
'database), then the line is left as is.',
initial=r'\b\d{7,}\b',
widget=forms.TextInput(attrs={'placeholder': r'\b\d{7,}\b'}))
Add placeholder to ID list field
|
from django import forms
from main.fields import RegexField
class IndexForm(forms.Form):
id_list = forms.CharField(
help_text='List of students IDs to query, one per line.',
label='ID List',
widget=forms.Textarea(attrs={
'placeholder': 'Random text\n1234567\n7654321'}))
student_id_regex = RegexField(
label='Student ID regex',
help_text='Regular expression used to match the student ID in each '
'line. If cannot match (or a student is not found in the '
'database), then the line is left as is.',
initial=r'\b\d{7,}\b',
widget=forms.TextInput(attrs={'placeholder': r'\b\d{7,}\b'}))
|
<commit_before>from django import forms
from main.fields import RegexField
class IndexForm(forms.Form):
id_list = forms.CharField(
widget=forms.Textarea, label='ID List',
help_text='List of students IDs to query, one per line.')
student_id_regex = RegexField(
label='Student ID regex',
help_text='Regular expression used to match the student ID in each '
'line. If cannot match (or a student is not found in the '
'database), then the line is left as is.',
initial=r'\b\d{7,}\b',
widget=forms.TextInput(attrs={'placeholder': r'\b\d{7,}\b'}))
<commit_msg>Add placeholder to ID list field<commit_after>
|
from django import forms
from main.fields import RegexField
class IndexForm(forms.Form):
id_list = forms.CharField(
help_text='List of students IDs to query, one per line.',
label='ID List',
widget=forms.Textarea(attrs={
'placeholder': 'Random text\n1234567\n7654321'}))
student_id_regex = RegexField(
label='Student ID regex',
help_text='Regular expression used to match the student ID in each '
'line. If cannot match (or a student is not found in the '
'database), then the line is left as is.',
initial=r'\b\d{7,}\b',
widget=forms.TextInput(attrs={'placeholder': r'\b\d{7,}\b'}))
|
from django import forms
from main.fields import RegexField
class IndexForm(forms.Form):
id_list = forms.CharField(
widget=forms.Textarea, label='ID List',
help_text='List of students IDs to query, one per line.')
student_id_regex = RegexField(
label='Student ID regex',
help_text='Regular expression used to match the student ID in each '
'line. If cannot match (or a student is not found in the '
'database), then the line is left as is.',
initial=r'\b\d{7,}\b',
widget=forms.TextInput(attrs={'placeholder': r'\b\d{7,}\b'}))
Add placeholder to ID list fieldfrom django import forms
from main.fields import RegexField
class IndexForm(forms.Form):
id_list = forms.CharField(
help_text='List of students IDs to query, one per line.',
label='ID List',
widget=forms.Textarea(attrs={
'placeholder': 'Random text\n1234567\n7654321'}))
student_id_regex = RegexField(
label='Student ID regex',
help_text='Regular expression used to match the student ID in each '
'line. If cannot match (or a student is not found in the '
'database), then the line is left as is.',
initial=r'\b\d{7,}\b',
widget=forms.TextInput(attrs={'placeholder': r'\b\d{7,}\b'}))
|
<commit_before>from django import forms
from main.fields import RegexField
class IndexForm(forms.Form):
id_list = forms.CharField(
widget=forms.Textarea, label='ID List',
help_text='List of students IDs to query, one per line.')
student_id_regex = RegexField(
label='Student ID regex',
help_text='Regular expression used to match the student ID in each '
'line. If cannot match (or a student is not found in the '
'database), then the line is left as is.',
initial=r'\b\d{7,}\b',
widget=forms.TextInput(attrs={'placeholder': r'\b\d{7,}\b'}))
<commit_msg>Add placeholder to ID list field<commit_after>from django import forms
from main.fields import RegexField
class IndexForm(forms.Form):
id_list = forms.CharField(
help_text='List of students IDs to query, one per line.',
label='ID List',
widget=forms.Textarea(attrs={
'placeholder': 'Random text\n1234567\n7654321'}))
student_id_regex = RegexField(
label='Student ID regex',
help_text='Regular expression used to match the student ID in each '
'line. If cannot match (or a student is not found in the '
'database), then the line is left as is.',
initial=r'\b\d{7,}\b',
widget=forms.TextInput(attrs={'placeholder': r'\b\d{7,}\b'}))
|
3331ddc41446c1b40cdd6cc7ff3a85af60e7bd5b
|
registry/urls.py
|
registry/urls.py
|
from django.conf.urls import patterns, include, url
from api.views import PackagesListView, PackagesFindView, PackagesSearchView
urlpatterns = patterns('',
url(r'^packages/?$', PackagesListView.as_view(), name='list'),
url(r'^packages/(?P<name>[-\w]+)/?$', PackagesFindView.as_view(), name='find'),
url(r'^packages/search/(?P<name>[-\w]+)/?$', PackagesSearchView.as_view(), name='search')
)
|
from django.conf.urls import patterns, include, url
from api.views import PackagesListView, PackagesFindView, PackagesSearchView
urlpatterns = patterns('',
url(r'^packages/?$', PackagesListView.as_view(), name='list'),
url(r'^packages/(?P<name>[-\w]+)/?$', PackagesFindView.as_view(), name='find'),
url(r'^packages/search/(?P<name>[-\w\.]+)/?$', PackagesSearchView.as_view(), name='search')
)
|
Allow dots in package names.
|
Allow dots in package names.
Packages such as `backbone.wreqr` and `backbone.babysitter` where 404ing fixes #9
|
Python
|
mit
|
toranb/django-bower-registry
|
from django.conf.urls import patterns, include, url
from api.views import PackagesListView, PackagesFindView, PackagesSearchView
urlpatterns = patterns('',
url(r'^packages/?$', PackagesListView.as_view(), name='list'),
url(r'^packages/(?P<name>[-\w]+)/?$', PackagesFindView.as_view(), name='find'),
url(r'^packages/search/(?P<name>[-\w]+)/?$', PackagesSearchView.as_view(), name='search')
)Allow dots in package names.
Packages such as `backbone.wreqr` and `backbone.babysitter` where 404ing fixes #9
|
from django.conf.urls import patterns, include, url
from api.views import PackagesListView, PackagesFindView, PackagesSearchView
urlpatterns = patterns('',
url(r'^packages/?$', PackagesListView.as_view(), name='list'),
url(r'^packages/(?P<name>[-\w]+)/?$', PackagesFindView.as_view(), name='find'),
url(r'^packages/search/(?P<name>[-\w\.]+)/?$', PackagesSearchView.as_view(), name='search')
)
|
<commit_before>from django.conf.urls import patterns, include, url
from api.views import PackagesListView, PackagesFindView, PackagesSearchView
urlpatterns = patterns('',
url(r'^packages/?$', PackagesListView.as_view(), name='list'),
url(r'^packages/(?P<name>[-\w]+)/?$', PackagesFindView.as_view(), name='find'),
url(r'^packages/search/(?P<name>[-\w]+)/?$', PackagesSearchView.as_view(), name='search')
)<commit_msg>Allow dots in package names.
Packages such as `backbone.wreqr` and `backbone.babysitter` where 404ing fixes #9<commit_after>
|
from django.conf.urls import patterns, include, url
from api.views import PackagesListView, PackagesFindView, PackagesSearchView
urlpatterns = patterns('',
url(r'^packages/?$', PackagesListView.as_view(), name='list'),
url(r'^packages/(?P<name>[-\w]+)/?$', PackagesFindView.as_view(), name='find'),
url(r'^packages/search/(?P<name>[-\w\.]+)/?$', PackagesSearchView.as_view(), name='search')
)
|
from django.conf.urls import patterns, include, url
from api.views import PackagesListView, PackagesFindView, PackagesSearchView
urlpatterns = patterns('',
url(r'^packages/?$', PackagesListView.as_view(), name='list'),
url(r'^packages/(?P<name>[-\w]+)/?$', PackagesFindView.as_view(), name='find'),
url(r'^packages/search/(?P<name>[-\w]+)/?$', PackagesSearchView.as_view(), name='search')
)Allow dots in package names.
Packages such as `backbone.wreqr` and `backbone.babysitter` where 404ing fixes #9from django.conf.urls import patterns, include, url
from api.views import PackagesListView, PackagesFindView, PackagesSearchView
urlpatterns = patterns('',
url(r'^packages/?$', PackagesListView.as_view(), name='list'),
url(r'^packages/(?P<name>[-\w]+)/?$', PackagesFindView.as_view(), name='find'),
url(r'^packages/search/(?P<name>[-\w\.]+)/?$', PackagesSearchView.as_view(), name='search')
)
|
<commit_before>from django.conf.urls import patterns, include, url
from api.views import PackagesListView, PackagesFindView, PackagesSearchView
urlpatterns = patterns('',
url(r'^packages/?$', PackagesListView.as_view(), name='list'),
url(r'^packages/(?P<name>[-\w]+)/?$', PackagesFindView.as_view(), name='find'),
url(r'^packages/search/(?P<name>[-\w]+)/?$', PackagesSearchView.as_view(), name='search')
)<commit_msg>Allow dots in package names.
Packages such as `backbone.wreqr` and `backbone.babysitter` where 404ing fixes #9<commit_after>from django.conf.urls import patterns, include, url
from api.views import PackagesListView, PackagesFindView, PackagesSearchView
urlpatterns = patterns('',
url(r'^packages/?$', PackagesListView.as_view(), name='list'),
url(r'^packages/(?P<name>[-\w]+)/?$', PackagesFindView.as_view(), name='find'),
url(r'^packages/search/(?P<name>[-\w\.]+)/?$', PackagesSearchView.as_view(), name='search')
)
|
20801daf2d774f2c976265edc72e436094930589
|
saleor/urls.py
|
saleor/urls.py
|
from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib.staticfiles.views import serve
from django.views.decorators.csrf import csrf_exempt
from .graphql.api import schema
from .graphql.views import GraphQLView
from .plugins.views import handle_plugin_webhook
from .product.views import digital_product
urlpatterns = [
url(r"^graphql/", csrf_exempt(GraphQLView.as_view(schema=schema)), name="api"),
url(
r"^digital-download/(?P<token>[0-9A-Za-z_\-]+)/$",
digital_product,
name="digital-product",
),
url(
r"plugins/(?P<plugin_id>[.0-9A-Za-z_\-]+)/",
handle_plugin_webhook,
name="plugins",
),
]
if settings.DEBUG:
import warnings
from .core import views
try:
import debug_toolbar
except ImportError:
warnings.warn(
"The debug toolbar was not installed. Ignore the error. \
settings.py should already have warned the user about it."
)
else:
urlpatterns += [
url(r"^__debug__/", include(debug_toolbar.urls)) # type: ignore
]
urlpatterns += static("/media/", document_root=settings.MEDIA_ROOT) + [
url(r"^static/(?P<path>.*)$", serve),
url(r"^", views.home, name="home"),
]
|
from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib.staticfiles.views import serve
from django.views.decorators.csrf import csrf_exempt
from .graphql.api import schema
from .graphql.views import GraphQLView
from .plugins.views import handle_plugin_webhook
from .product.views import digital_product
urlpatterns = [
url(r"^graphql/$", csrf_exempt(GraphQLView.as_view(schema=schema)), name="api"),
url(
r"^digital-download/(?P<token>[0-9A-Za-z_\-]+)/$",
digital_product,
name="digital-product",
),
url(
r"plugins/(?P<plugin_id>[.0-9A-Za-z_\-]+)/",
handle_plugin_webhook,
name="plugins",
),
]
if settings.DEBUG:
import warnings
from .core import views
try:
import debug_toolbar
except ImportError:
warnings.warn(
"The debug toolbar was not installed. Ignore the error. \
settings.py should already have warned the user about it."
)
else:
urlpatterns += [
url(r"^__debug__/", include(debug_toolbar.urls)) # type: ignore
]
urlpatterns += static("/media/", document_root=settings.MEDIA_ROOT) + [
url(r"^static/(?P<path>.*)$", serve),
url(r"^", views.home, name="home"),
]
|
Update url patterns - accept only exact patterns
|
Update url patterns - accept only exact patterns
|
Python
|
bsd-3-clause
|
mociepka/saleor,mociepka/saleor,mociepka/saleor
|
from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib.staticfiles.views import serve
from django.views.decorators.csrf import csrf_exempt
from .graphql.api import schema
from .graphql.views import GraphQLView
from .plugins.views import handle_plugin_webhook
from .product.views import digital_product
urlpatterns = [
url(r"^graphql/", csrf_exempt(GraphQLView.as_view(schema=schema)), name="api"),
url(
r"^digital-download/(?P<token>[0-9A-Za-z_\-]+)/$",
digital_product,
name="digital-product",
),
url(
r"plugins/(?P<plugin_id>[.0-9A-Za-z_\-]+)/",
handle_plugin_webhook,
name="plugins",
),
]
if settings.DEBUG:
import warnings
from .core import views
try:
import debug_toolbar
except ImportError:
warnings.warn(
"The debug toolbar was not installed. Ignore the error. \
settings.py should already have warned the user about it."
)
else:
urlpatterns += [
url(r"^__debug__/", include(debug_toolbar.urls)) # type: ignore
]
urlpatterns += static("/media/", document_root=settings.MEDIA_ROOT) + [
url(r"^static/(?P<path>.*)$", serve),
url(r"^", views.home, name="home"),
]
Update url patterns - accept only exact patterns
|
from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib.staticfiles.views import serve
from django.views.decorators.csrf import csrf_exempt
from .graphql.api import schema
from .graphql.views import GraphQLView
from .plugins.views import handle_plugin_webhook
from .product.views import digital_product
urlpatterns = [
url(r"^graphql/$", csrf_exempt(GraphQLView.as_view(schema=schema)), name="api"),
url(
r"^digital-download/(?P<token>[0-9A-Za-z_\-]+)/$",
digital_product,
name="digital-product",
),
url(
r"plugins/(?P<plugin_id>[.0-9A-Za-z_\-]+)/",
handle_plugin_webhook,
name="plugins",
),
]
if settings.DEBUG:
import warnings
from .core import views
try:
import debug_toolbar
except ImportError:
warnings.warn(
"The debug toolbar was not installed. Ignore the error. \
settings.py should already have warned the user about it."
)
else:
urlpatterns += [
url(r"^__debug__/", include(debug_toolbar.urls)) # type: ignore
]
urlpatterns += static("/media/", document_root=settings.MEDIA_ROOT) + [
url(r"^static/(?P<path>.*)$", serve),
url(r"^", views.home, name="home"),
]
|
<commit_before>from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib.staticfiles.views import serve
from django.views.decorators.csrf import csrf_exempt
from .graphql.api import schema
from .graphql.views import GraphQLView
from .plugins.views import handle_plugin_webhook
from .product.views import digital_product
urlpatterns = [
url(r"^graphql/", csrf_exempt(GraphQLView.as_view(schema=schema)), name="api"),
url(
r"^digital-download/(?P<token>[0-9A-Za-z_\-]+)/$",
digital_product,
name="digital-product",
),
url(
r"plugins/(?P<plugin_id>[.0-9A-Za-z_\-]+)/",
handle_plugin_webhook,
name="plugins",
),
]
if settings.DEBUG:
import warnings
from .core import views
try:
import debug_toolbar
except ImportError:
warnings.warn(
"The debug toolbar was not installed. Ignore the error. \
settings.py should already have warned the user about it."
)
else:
urlpatterns += [
url(r"^__debug__/", include(debug_toolbar.urls)) # type: ignore
]
urlpatterns += static("/media/", document_root=settings.MEDIA_ROOT) + [
url(r"^static/(?P<path>.*)$", serve),
url(r"^", views.home, name="home"),
]
<commit_msg>Update url patterns - accept only exact patterns<commit_after>
|
from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib.staticfiles.views import serve
from django.views.decorators.csrf import csrf_exempt
from .graphql.api import schema
from .graphql.views import GraphQLView
from .plugins.views import handle_plugin_webhook
from .product.views import digital_product
urlpatterns = [
url(r"^graphql/$", csrf_exempt(GraphQLView.as_view(schema=schema)), name="api"),
url(
r"^digital-download/(?P<token>[0-9A-Za-z_\-]+)/$",
digital_product,
name="digital-product",
),
url(
r"plugins/(?P<plugin_id>[.0-9A-Za-z_\-]+)/",
handle_plugin_webhook,
name="plugins",
),
]
if settings.DEBUG:
import warnings
from .core import views
try:
import debug_toolbar
except ImportError:
warnings.warn(
"The debug toolbar was not installed. Ignore the error. \
settings.py should already have warned the user about it."
)
else:
urlpatterns += [
url(r"^__debug__/", include(debug_toolbar.urls)) # type: ignore
]
urlpatterns += static("/media/", document_root=settings.MEDIA_ROOT) + [
url(r"^static/(?P<path>.*)$", serve),
url(r"^", views.home, name="home"),
]
|
from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib.staticfiles.views import serve
from django.views.decorators.csrf import csrf_exempt
from .graphql.api import schema
from .graphql.views import GraphQLView
from .plugins.views import handle_plugin_webhook
from .product.views import digital_product
urlpatterns = [
url(r"^graphql/", csrf_exempt(GraphQLView.as_view(schema=schema)), name="api"),
url(
r"^digital-download/(?P<token>[0-9A-Za-z_\-]+)/$",
digital_product,
name="digital-product",
),
url(
r"plugins/(?P<plugin_id>[.0-9A-Za-z_\-]+)/",
handle_plugin_webhook,
name="plugins",
),
]
if settings.DEBUG:
import warnings
from .core import views
try:
import debug_toolbar
except ImportError:
warnings.warn(
"The debug toolbar was not installed. Ignore the error. \
settings.py should already have warned the user about it."
)
else:
urlpatterns += [
url(r"^__debug__/", include(debug_toolbar.urls)) # type: ignore
]
urlpatterns += static("/media/", document_root=settings.MEDIA_ROOT) + [
url(r"^static/(?P<path>.*)$", serve),
url(r"^", views.home, name="home"),
]
Update url patterns - accept only exact patternsfrom django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib.staticfiles.views import serve
from django.views.decorators.csrf import csrf_exempt
from .graphql.api import schema
from .graphql.views import GraphQLView
from .plugins.views import handle_plugin_webhook
from .product.views import digital_product
urlpatterns = [
url(r"^graphql/$", csrf_exempt(GraphQLView.as_view(schema=schema)), name="api"),
url(
r"^digital-download/(?P<token>[0-9A-Za-z_\-]+)/$",
digital_product,
name="digital-product",
),
url(
r"plugins/(?P<plugin_id>[.0-9A-Za-z_\-]+)/",
handle_plugin_webhook,
name="plugins",
),
]
if settings.DEBUG:
import warnings
from .core import views
try:
import debug_toolbar
except ImportError:
warnings.warn(
"The debug toolbar was not installed. Ignore the error. \
settings.py should already have warned the user about it."
)
else:
urlpatterns += [
url(r"^__debug__/", include(debug_toolbar.urls)) # type: ignore
]
urlpatterns += static("/media/", document_root=settings.MEDIA_ROOT) + [
url(r"^static/(?P<path>.*)$", serve),
url(r"^", views.home, name="home"),
]
|
<commit_before>from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib.staticfiles.views import serve
from django.views.decorators.csrf import csrf_exempt
from .graphql.api import schema
from .graphql.views import GraphQLView
from .plugins.views import handle_plugin_webhook
from .product.views import digital_product
urlpatterns = [
url(r"^graphql/", csrf_exempt(GraphQLView.as_view(schema=schema)), name="api"),
url(
r"^digital-download/(?P<token>[0-9A-Za-z_\-]+)/$",
digital_product,
name="digital-product",
),
url(
r"plugins/(?P<plugin_id>[.0-9A-Za-z_\-]+)/",
handle_plugin_webhook,
name="plugins",
),
]
if settings.DEBUG:
import warnings
from .core import views
try:
import debug_toolbar
except ImportError:
warnings.warn(
"The debug toolbar was not installed. Ignore the error. \
settings.py should already have warned the user about it."
)
else:
urlpatterns += [
url(r"^__debug__/", include(debug_toolbar.urls)) # type: ignore
]
urlpatterns += static("/media/", document_root=settings.MEDIA_ROOT) + [
url(r"^static/(?P<path>.*)$", serve),
url(r"^", views.home, name="home"),
]
<commit_msg>Update url patterns - accept only exact patterns<commit_after>from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib.staticfiles.views import serve
from django.views.decorators.csrf import csrf_exempt
from .graphql.api import schema
from .graphql.views import GraphQLView
from .plugins.views import handle_plugin_webhook
from .product.views import digital_product
urlpatterns = [
url(r"^graphql/$", csrf_exempt(GraphQLView.as_view(schema=schema)), name="api"),
url(
r"^digital-download/(?P<token>[0-9A-Za-z_\-]+)/$",
digital_product,
name="digital-product",
),
url(
r"plugins/(?P<plugin_id>[.0-9A-Za-z_\-]+)/",
handle_plugin_webhook,
name="plugins",
),
]
if settings.DEBUG:
import warnings
from .core import views
try:
import debug_toolbar
except ImportError:
warnings.warn(
"The debug toolbar was not installed. Ignore the error. \
settings.py should already have warned the user about it."
)
else:
urlpatterns += [
url(r"^__debug__/", include(debug_toolbar.urls)) # type: ignore
]
urlpatterns += static("/media/", document_root=settings.MEDIA_ROOT) + [
url(r"^static/(?P<path>.*)$", serve),
url(r"^", views.home, name="home"),
]
|
b8938849ce9239836d2b601dd43324284f1b5604
|
whatinstalled.py
|
whatinstalled.py
|
import os
history_file = os.path.expanduser("~")+"/.bash_history"
f = open(history_file,"r")
keywords = ["pip", "tar", "brew", "apt-get", "install", "luarocks", "easy_install", "gem", "npm"]
for line in f:
for item in keywords:
if item in line:
print(line[:-1])
break
|
import os
history_file = os.path.expanduser("~")+"/.bash_history"
f = open(history_file,"r")
keywords = ["pip", "tar", "brew", "apt-get", "aptitude", "apt", "install", "luarocks", "easy_install", "gem", "npm", "bower"]
for line in f:
for item in keywords:
if item in line:
print(line[:-1])
break
|
Add aptitude and apt + bower
|
Add aptitude and apt + bower
|
Python
|
mit
|
AlexMili/WhatInstalled
|
import os
history_file = os.path.expanduser("~")+"/.bash_history"
f = open(history_file,"r")
keywords = ["pip", "tar", "brew", "apt-get", "install", "luarocks", "easy_install", "gem", "npm"]
for line in f:
for item in keywords:
if item in line:
print(line[:-1])
breakAdd aptitude and apt + bower
|
import os
history_file = os.path.expanduser("~")+"/.bash_history"
f = open(history_file,"r")
keywords = ["pip", "tar", "brew", "apt-get", "aptitude", "apt", "install", "luarocks", "easy_install", "gem", "npm", "bower"]
for line in f:
for item in keywords:
if item in line:
print(line[:-1])
break
|
<commit_before>import os
history_file = os.path.expanduser("~")+"/.bash_history"
f = open(history_file,"r")
keywords = ["pip", "tar", "brew", "apt-get", "install", "luarocks", "easy_install", "gem", "npm"]
for line in f:
for item in keywords:
if item in line:
print(line[:-1])
break<commit_msg>Add aptitude and apt + bower<commit_after>
|
import os
history_file = os.path.expanduser("~")+"/.bash_history"
f = open(history_file,"r")
keywords = ["pip", "tar", "brew", "apt-get", "aptitude", "apt", "install", "luarocks", "easy_install", "gem", "npm", "bower"]
for line in f:
for item in keywords:
if item in line:
print(line[:-1])
break
|
import os
history_file = os.path.expanduser("~")+"/.bash_history"
f = open(history_file,"r")
keywords = ["pip", "tar", "brew", "apt-get", "install", "luarocks", "easy_install", "gem", "npm"]
for line in f:
for item in keywords:
if item in line:
print(line[:-1])
breakAdd aptitude and apt + bowerimport os
history_file = os.path.expanduser("~")+"/.bash_history"
f = open(history_file,"r")
keywords = ["pip", "tar", "brew", "apt-get", "aptitude", "apt", "install", "luarocks", "easy_install", "gem", "npm", "bower"]
for line in f:
for item in keywords:
if item in line:
print(line[:-1])
break
|
<commit_before>import os
history_file = os.path.expanduser("~")+"/.bash_history"
f = open(history_file,"r")
keywords = ["pip", "tar", "brew", "apt-get", "install", "luarocks", "easy_install", "gem", "npm"]
for line in f:
for item in keywords:
if item in line:
print(line[:-1])
break<commit_msg>Add aptitude and apt + bower<commit_after>import os
history_file = os.path.expanduser("~")+"/.bash_history"
f = open(history_file,"r")
keywords = ["pip", "tar", "brew", "apt-get", "aptitude", "apt", "install", "luarocks", "easy_install", "gem", "npm", "bower"]
for line in f:
for item in keywords:
if item in line:
print(line[:-1])
break
|
ef281c765f46ead27105f78fe634ace64fca776b
|
yowsup/layers/protocol_iq/layer.py
|
yowsup/layers/protocol_iq/layer.py
|
from yowsup.layers import YowProtocolLayer
from yowsup.common import YowConstants
from .protocolentities import *
class YowIqProtocolLayer(YowProtocolLayer):
def __init__(self):
handleMap = {
"iq": (self.recvIq, self.sendIq)
}
super(YowIqProtocolLayer, self).__init__(handleMap)
def __str__(self):
return "Iq Layer"
def sendIq(self, entity):
if entity.getXmlns() == "w:p":
self.toLower(entity.toProtocolTreeNode())
def recvIq(self, node):
if node["xmlns"] == "urn:xmpp:ping":
entity = PongResultIqProtocolEntity(YowConstants.DOMAIN, node["id"])
self.toLower(entity.toProtocolTreeNode())
elif node["type"] == "error":
self.toUpper(ErrorIqProtocolEntity.fromProtocolTreeNode(node))
|
from yowsup.layers import YowProtocolLayer
from yowsup.common import YowConstants
from .protocolentities import *
class YowIqProtocolLayer(YowProtocolLayer):
def __init__(self):
handleMap = {
"iq": (self.recvIq, self.sendIq)
}
super(YowIqProtocolLayer, self).__init__(handleMap)
def __str__(self):
return "Iq Layer"
def sendIq(self, entity):
if entity.getXmlns() == "w:p":
self.toLower(entity.toProtocolTreeNode())
def recvIq(self, node):
if node["xmlns"] == "urn:xmpp:ping":
entity = PongResultIqProtocolEntity(YowConstants.DOMAIN, node["id"])
self.toLower(entity.toProtocolTreeNode())
elif node["type"] == "error":
self.toUpper(ErrorIqProtocolEntity.fromProtocolTreeNode(node))
elif node["type"] == "result" and not len(node.getAllChildren()):
#allowing only unidentifiable result (has no children) iq through this layer. (ex: ping result)
self.toUpper(ResultIqProtocolEntity.fromProtocolTreeNode(node))
|
Handle Ping result in ResultIqProtocolEntity
|
Handle Ping result in ResultIqProtocolEntity
refs #402
|
Python
|
mit
|
ongair/yowsup,biji/yowsup
|
from yowsup.layers import YowProtocolLayer
from yowsup.common import YowConstants
from .protocolentities import *
class YowIqProtocolLayer(YowProtocolLayer):
def __init__(self):
handleMap = {
"iq": (self.recvIq, self.sendIq)
}
super(YowIqProtocolLayer, self).__init__(handleMap)
def __str__(self):
return "Iq Layer"
def sendIq(self, entity):
if entity.getXmlns() == "w:p":
self.toLower(entity.toProtocolTreeNode())
def recvIq(self, node):
if node["xmlns"] == "urn:xmpp:ping":
entity = PongResultIqProtocolEntity(YowConstants.DOMAIN, node["id"])
self.toLower(entity.toProtocolTreeNode())
elif node["type"] == "error":
self.toUpper(ErrorIqProtocolEntity.fromProtocolTreeNode(node))
Handle Ping result in ResultIqProtocolEntity
refs #402
|
from yowsup.layers import YowProtocolLayer
from yowsup.common import YowConstants
from .protocolentities import *
class YowIqProtocolLayer(YowProtocolLayer):
def __init__(self):
handleMap = {
"iq": (self.recvIq, self.sendIq)
}
super(YowIqProtocolLayer, self).__init__(handleMap)
def __str__(self):
return "Iq Layer"
def sendIq(self, entity):
if entity.getXmlns() == "w:p":
self.toLower(entity.toProtocolTreeNode())
def recvIq(self, node):
if node["xmlns"] == "urn:xmpp:ping":
entity = PongResultIqProtocolEntity(YowConstants.DOMAIN, node["id"])
self.toLower(entity.toProtocolTreeNode())
elif node["type"] == "error":
self.toUpper(ErrorIqProtocolEntity.fromProtocolTreeNode(node))
elif node["type"] == "result" and not len(node.getAllChildren()):
#allowing only unidentifiable result (has no children) iq through this layer. (ex: ping result)
self.toUpper(ResultIqProtocolEntity.fromProtocolTreeNode(node))
|
<commit_before>from yowsup.layers import YowProtocolLayer
from yowsup.common import YowConstants
from .protocolentities import *
class YowIqProtocolLayer(YowProtocolLayer):
def __init__(self):
handleMap = {
"iq": (self.recvIq, self.sendIq)
}
super(YowIqProtocolLayer, self).__init__(handleMap)
def __str__(self):
return "Iq Layer"
def sendIq(self, entity):
if entity.getXmlns() == "w:p":
self.toLower(entity.toProtocolTreeNode())
def recvIq(self, node):
if node["xmlns"] == "urn:xmpp:ping":
entity = PongResultIqProtocolEntity(YowConstants.DOMAIN, node["id"])
self.toLower(entity.toProtocolTreeNode())
elif node["type"] == "error":
self.toUpper(ErrorIqProtocolEntity.fromProtocolTreeNode(node))
<commit_msg>Handle Ping result in ResultIqProtocolEntity
refs #402<commit_after>
|
from yowsup.layers import YowProtocolLayer
from yowsup.common import YowConstants
from .protocolentities import *
class YowIqProtocolLayer(YowProtocolLayer):
def __init__(self):
handleMap = {
"iq": (self.recvIq, self.sendIq)
}
super(YowIqProtocolLayer, self).__init__(handleMap)
def __str__(self):
return "Iq Layer"
def sendIq(self, entity):
if entity.getXmlns() == "w:p":
self.toLower(entity.toProtocolTreeNode())
def recvIq(self, node):
if node["xmlns"] == "urn:xmpp:ping":
entity = PongResultIqProtocolEntity(YowConstants.DOMAIN, node["id"])
self.toLower(entity.toProtocolTreeNode())
elif node["type"] == "error":
self.toUpper(ErrorIqProtocolEntity.fromProtocolTreeNode(node))
elif node["type"] == "result" and not len(node.getAllChildren()):
#allowing only unidentifiable result (has no children) iq through this layer. (ex: ping result)
self.toUpper(ResultIqProtocolEntity.fromProtocolTreeNode(node))
|
from yowsup.layers import YowProtocolLayer
from yowsup.common import YowConstants
from .protocolentities import *
class YowIqProtocolLayer(YowProtocolLayer):
def __init__(self):
handleMap = {
"iq": (self.recvIq, self.sendIq)
}
super(YowIqProtocolLayer, self).__init__(handleMap)
def __str__(self):
return "Iq Layer"
def sendIq(self, entity):
if entity.getXmlns() == "w:p":
self.toLower(entity.toProtocolTreeNode())
def recvIq(self, node):
if node["xmlns"] == "urn:xmpp:ping":
entity = PongResultIqProtocolEntity(YowConstants.DOMAIN, node["id"])
self.toLower(entity.toProtocolTreeNode())
elif node["type"] == "error":
self.toUpper(ErrorIqProtocolEntity.fromProtocolTreeNode(node))
Handle Ping result in ResultIqProtocolEntity
refs #402from yowsup.layers import YowProtocolLayer
from yowsup.common import YowConstants
from .protocolentities import *
class YowIqProtocolLayer(YowProtocolLayer):
def __init__(self):
handleMap = {
"iq": (self.recvIq, self.sendIq)
}
super(YowIqProtocolLayer, self).__init__(handleMap)
def __str__(self):
return "Iq Layer"
def sendIq(self, entity):
if entity.getXmlns() == "w:p":
self.toLower(entity.toProtocolTreeNode())
def recvIq(self, node):
if node["xmlns"] == "urn:xmpp:ping":
entity = PongResultIqProtocolEntity(YowConstants.DOMAIN, node["id"])
self.toLower(entity.toProtocolTreeNode())
elif node["type"] == "error":
self.toUpper(ErrorIqProtocolEntity.fromProtocolTreeNode(node))
elif node["type"] == "result" and not len(node.getAllChildren()):
#allowing only unidentifiable result (has no children) iq through this layer. (ex: ping result)
self.toUpper(ResultIqProtocolEntity.fromProtocolTreeNode(node))
|
<commit_before>from yowsup.layers import YowProtocolLayer
from yowsup.common import YowConstants
from .protocolentities import *
class YowIqProtocolLayer(YowProtocolLayer):
def __init__(self):
handleMap = {
"iq": (self.recvIq, self.sendIq)
}
super(YowIqProtocolLayer, self).__init__(handleMap)
def __str__(self):
return "Iq Layer"
def sendIq(self, entity):
if entity.getXmlns() == "w:p":
self.toLower(entity.toProtocolTreeNode())
def recvIq(self, node):
if node["xmlns"] == "urn:xmpp:ping":
entity = PongResultIqProtocolEntity(YowConstants.DOMAIN, node["id"])
self.toLower(entity.toProtocolTreeNode())
elif node["type"] == "error":
self.toUpper(ErrorIqProtocolEntity.fromProtocolTreeNode(node))
<commit_msg>Handle Ping result in ResultIqProtocolEntity
refs #402<commit_after>from yowsup.layers import YowProtocolLayer
from yowsup.common import YowConstants
from .protocolentities import *
class YowIqProtocolLayer(YowProtocolLayer):
def __init__(self):
handleMap = {
"iq": (self.recvIq, self.sendIq)
}
super(YowIqProtocolLayer, self).__init__(handleMap)
def __str__(self):
return "Iq Layer"
def sendIq(self, entity):
if entity.getXmlns() == "w:p":
self.toLower(entity.toProtocolTreeNode())
def recvIq(self, node):
if node["xmlns"] == "urn:xmpp:ping":
entity = PongResultIqProtocolEntity(YowConstants.DOMAIN, node["id"])
self.toLower(entity.toProtocolTreeNode())
elif node["type"] == "error":
self.toUpper(ErrorIqProtocolEntity.fromProtocolTreeNode(node))
elif node["type"] == "result" and not len(node.getAllChildren()):
#allowing only unidentifiable result (has no children) iq through this layer. (ex: ping result)
self.toUpper(ResultIqProtocolEntity.fromProtocolTreeNode(node))
|
dbeaefca7643edd67ea9990c1f665f0ecc5b34d0
|
pebble/PblCommand.py
|
pebble/PblCommand.py
|
import os
class PblCommand:
name = ''
help = ''
def run(args):
pass
def configure_subparser(self, parser):
parser.add_argument('--sdk', help='Path to Pebble SDK (ie: ~/pebble-dev/PebbleSDK-2.X/)')
parser.add_argument('--debug', action='store_true',
help = 'Enable debugging output')
def sdk_path(self, args):
"""
Tries to guess the location of the Pebble SDK
"""
if args.sdk:
return args.sdk
else:
return os.path.normpath(os.path.join(os.path.dirname(__file__), '..', '..'))
|
import os
import logging
class PblCommand:
name = ''
help = ''
def run(args):
pass
def configure_subparser(self, parser):
parser.add_argument('--sdk', help='Path to Pebble SDK (ie: ~/pebble-dev/PebbleSDK-2.X/)')
parser.add_argument('--debug', action='store_true',
help = 'Enable debugging output')
def sdk_path(self, args):
"""
Tries to guess the location of the Pebble SDK
"""
sdk_path = os.getenv('PEBBLE_SDK_PATH')
if args.sdk:
return args.sdk
elif sdk_path:
if not os.path.exists(sdk_path):
raise Exception("SDK path {} doesn't exist!".format(sdk_path))
logging.info("Overriding Pebble SDK Path with '%s'", sdk_path)
return sdk_path
else:
return os.path.normpath(os.path.join(os.path.dirname(__file__), '..', '..'))
|
Allow SDK location to be overridden by environment variable.
|
Allow SDK location to be overridden by environment variable.
|
Python
|
mit
|
pebble/libpebble,pebble/libpebble,pebble/libpebble,pebble/libpebble
|
import os
class PblCommand:
name = ''
help = ''
def run(args):
pass
def configure_subparser(self, parser):
parser.add_argument('--sdk', help='Path to Pebble SDK (ie: ~/pebble-dev/PebbleSDK-2.X/)')
parser.add_argument('--debug', action='store_true',
help = 'Enable debugging output')
def sdk_path(self, args):
"""
Tries to guess the location of the Pebble SDK
"""
if args.sdk:
return args.sdk
else:
return os.path.normpath(os.path.join(os.path.dirname(__file__), '..', '..'))
Allow SDK location to be overridden by environment variable.
|
import os
import logging
class PblCommand:
name = ''
help = ''
def run(args):
pass
def configure_subparser(self, parser):
parser.add_argument('--sdk', help='Path to Pebble SDK (ie: ~/pebble-dev/PebbleSDK-2.X/)')
parser.add_argument('--debug', action='store_true',
help = 'Enable debugging output')
def sdk_path(self, args):
"""
Tries to guess the location of the Pebble SDK
"""
sdk_path = os.getenv('PEBBLE_SDK_PATH')
if args.sdk:
return args.sdk
elif sdk_path:
if not os.path.exists(sdk_path):
raise Exception("SDK path {} doesn't exist!".format(sdk_path))
logging.info("Overriding Pebble SDK Path with '%s'", sdk_path)
return sdk_path
else:
return os.path.normpath(os.path.join(os.path.dirname(__file__), '..', '..'))
|
<commit_before>import os
class PblCommand:
name = ''
help = ''
def run(args):
pass
def configure_subparser(self, parser):
parser.add_argument('--sdk', help='Path to Pebble SDK (ie: ~/pebble-dev/PebbleSDK-2.X/)')
parser.add_argument('--debug', action='store_true',
help = 'Enable debugging output')
def sdk_path(self, args):
"""
Tries to guess the location of the Pebble SDK
"""
if args.sdk:
return args.sdk
else:
return os.path.normpath(os.path.join(os.path.dirname(__file__), '..', '..'))
<commit_msg>Allow SDK location to be overridden by environment variable.<commit_after>
|
import os
import logging
class PblCommand:
name = ''
help = ''
def run(args):
pass
def configure_subparser(self, parser):
parser.add_argument('--sdk', help='Path to Pebble SDK (ie: ~/pebble-dev/PebbleSDK-2.X/)')
parser.add_argument('--debug', action='store_true',
help = 'Enable debugging output')
def sdk_path(self, args):
"""
Tries to guess the location of the Pebble SDK
"""
sdk_path = os.getenv('PEBBLE_SDK_PATH')
if args.sdk:
return args.sdk
elif sdk_path:
if not os.path.exists(sdk_path):
raise Exception("SDK path {} doesn't exist!".format(sdk_path))
logging.info("Overriding Pebble SDK Path with '%s'", sdk_path)
return sdk_path
else:
return os.path.normpath(os.path.join(os.path.dirname(__file__), '..', '..'))
|
import os
class PblCommand:
name = ''
help = ''
def run(args):
pass
def configure_subparser(self, parser):
parser.add_argument('--sdk', help='Path to Pebble SDK (ie: ~/pebble-dev/PebbleSDK-2.X/)')
parser.add_argument('--debug', action='store_true',
help = 'Enable debugging output')
def sdk_path(self, args):
"""
Tries to guess the location of the Pebble SDK
"""
if args.sdk:
return args.sdk
else:
return os.path.normpath(os.path.join(os.path.dirname(__file__), '..', '..'))
Allow SDK location to be overridden by environment variable.import os
import logging
class PblCommand:
name = ''
help = ''
def run(args):
pass
def configure_subparser(self, parser):
parser.add_argument('--sdk', help='Path to Pebble SDK (ie: ~/pebble-dev/PebbleSDK-2.X/)')
parser.add_argument('--debug', action='store_true',
help = 'Enable debugging output')
def sdk_path(self, args):
"""
Tries to guess the location of the Pebble SDK
"""
sdk_path = os.getenv('PEBBLE_SDK_PATH')
if args.sdk:
return args.sdk
elif sdk_path:
if not os.path.exists(sdk_path):
raise Exception("SDK path {} doesn't exist!".format(sdk_path))
logging.info("Overriding Pebble SDK Path with '%s'", sdk_path)
return sdk_path
else:
return os.path.normpath(os.path.join(os.path.dirname(__file__), '..', '..'))
|
<commit_before>import os
class PblCommand:
name = ''
help = ''
def run(args):
pass
def configure_subparser(self, parser):
parser.add_argument('--sdk', help='Path to Pebble SDK (ie: ~/pebble-dev/PebbleSDK-2.X/)')
parser.add_argument('--debug', action='store_true',
help = 'Enable debugging output')
def sdk_path(self, args):
"""
Tries to guess the location of the Pebble SDK
"""
if args.sdk:
return args.sdk
else:
return os.path.normpath(os.path.join(os.path.dirname(__file__), '..', '..'))
<commit_msg>Allow SDK location to be overridden by environment variable.<commit_after>import os
import logging
class PblCommand:
name = ''
help = ''
def run(args):
pass
def configure_subparser(self, parser):
parser.add_argument('--sdk', help='Path to Pebble SDK (ie: ~/pebble-dev/PebbleSDK-2.X/)')
parser.add_argument('--debug', action='store_true',
help = 'Enable debugging output')
def sdk_path(self, args):
"""
Tries to guess the location of the Pebble SDK
"""
sdk_path = os.getenv('PEBBLE_SDK_PATH')
if args.sdk:
return args.sdk
elif sdk_path:
if not os.path.exists(sdk_path):
raise Exception("SDK path {} doesn't exist!".format(sdk_path))
logging.info("Overriding Pebble SDK Path with '%s'", sdk_path)
return sdk_path
else:
return os.path.normpath(os.path.join(os.path.dirname(__file__), '..', '..'))
|
c3e27aee3aa27a81dff2e8aaa5ab9be13725f329
|
mesos/compute_cluster_url.py
|
mesos/compute_cluster_url.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
# Get the Mesos cluster URL, assuming the EC2 script environment variables
# are all available.
active_master = os.getenv("MESOS_MASTERS").split("\n")[0]
zoo_list = os.getenv("MESOS_ZOO_LIST")
if zoo_list.strip() == "NONE":
print active_master + ":5050"
else:
zoo_nodes = zoo_list.trim().split("\n")
print "zoo://" + ",".join(["%s:2181/mesos" % node for node in zoo_nodes])
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
# Get the Mesos cluster URL, assuming the EC2 script environment variables
# are all available.
active_master = os.getenv("MESOS_MASTERS").split("\n")[0]
zoo_list = os.getenv("MESOS_ZOO_LIST")
if zoo_list.strip() == "NONE":
print "mesos://" + active_master + ":5050"
else:
zoo_nodes = zoo_list.trim().split("\n")
print "zoo://" + ",".join(["%s:2181/mesos" % node for node in zoo_nodes])
|
Include URI scheme in Mesos cluster_url
|
Include URI scheme in Mesos cluster_url
|
Python
|
apache-2.0
|
serialx/spark-ec2,madhavhugar/spark-ec2,SiGe/spark-ec2,romanini/spark-ec2,madhavhugar/spark-ec2,uronce-cc/spark-ec2,serialx/spark-ec2,inreachventures/spark-ec2,BrandwatchLtd/spark-ec2,tomerk/spark-ec2,jyt109/spark-ec2,GordonWang/spark-ec2,romanini/spark-ec2,Wealthport/spark-ec2,pluribus-labs/spark-ec2,paulomagalhaes/spark-ec2,reactormonk/spark-ec2,rfarjadi/spark-ec2,silvaurus/spark-ec2,stat-37601/spark-ec2,rfarjadi/spark-ec2,tomerk/spark-ec2,1024inc/spark-ec2,jeffusan/spark-ec2,smartkiwi/spark-ec2,SiGe/spark-ec2,denbkh/amplab-spark-ec2,saurfang/spark-ec2,uronce-cc/spark-ec2,mesos/spark-ec2,howardlinus/spark-ec2,Gaojiaqi/spark-ec2,apanda/spark-ec2,dmtran-osc/spark-ec2,zhuj/spark-ec2,dm-tran/spark-ec2,amplab/spark-ec2,serialx/spark-ec2,paulomagalhaes/spark-ec2,Wealthport/spark-ec2,lckung/spark-ec2,denbkh/mesos-spark-ec2,skp33-info/spark-ec2,silvaurus/spark-ec2,1024inc/spark-ec2,apanda/spark-ec2,lckung/spark-ec2,skp33-info/spark-ec2,piskvorky/spark-ec2,Gaojiaqi/spark-ec2,lckung/spark-ec2,zhuj/spark-ec2,smartkiwi/spark-ec2,megatron-me-uk/spark-ec2,dmtran-osc/spark-ec2,madhavhugar/spark-ec2,Verdad/spark-ec2-public,lckung/spark-ec2,denbkh/mesos-spark-ec2,megatron-me-uk/spark-ec2,hiconversion/spark-ec2,denbkh/amplab-spark-ec2,SRIKANTH-GIT/spark-ec2,denbkh/amplab-spark-ec2,amplab/spark-ec2,paulomagalhaes/spark-ec2,jyt109/spark-ec2,hiconversion/spark-ec2,SiGe/spark-ec2,saurfang/spark-ec2,jey/spark-ec2,prateek-s/spark-ec2,amplab/spark-ec2,stat-37601/spark-ec2,jeffusan/spark-ec2,romanini/spark-ec2,pluribus-labs/spark-ec2,1024inc/spark-ec2,jcazevedo/spark-ec2,disconn3ct/spark-ec2,megatron-me-uk/spark-ec2,reactormonk/spark-ec2,prateek-s/spark-ec2,apanda/spark-ec2,dm-tran/spark-ec2,skp33-info/spark-ec2,Verdad/spark-ec2-public,jyt109/spark-ec2,GordonWang/spark-ec2,disconn3ct/spark-ec2,dm-tran/spark-ec2,jeffusan/spark-ec2,BrandwatchLtd/spark-ec2,Verdad/spark-ec2-public,SRIKANTH-GIT/spark-ec2,saurfang/spark-ec2,jey/spark-ec2,prateek-s/spark-ec2,jcazevedo/spark-ec2,piskvorky/spark-ec2,reactormonk/spark-ec2,mesos/spark-ec2,piskvorky/spark-ec2,hiconversion/spark-ec2,silvaurus/spark-ec2,Gaojiaqi/spark-ec2,dmtran-osc/spark-ec2,rfarjadi/spark-ec2,disconn3ct/spark-ec2,howardlinus/spark-ec2,mesos/spark-ec2,tomerk/spark-ec2,pluribus-labs/spark-ec2,jcazevedo/spark-ec2,zhuj/spark-ec2,denbkh/mesos-spark-ec2,inreachventures/spark-ec2,Wealthport/spark-ec2,BrandwatchLtd/spark-ec2,Verdad/spark-ec2-public,SRIKANTH-GIT/spark-ec2,howardlinus/spark-ec2
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
# Get the Mesos cluster URL, assuming the EC2 script environment variables
# are all available.
active_master = os.getenv("MESOS_MASTERS").split("\n")[0]
zoo_list = os.getenv("MESOS_ZOO_LIST")
if zoo_list.strip() == "NONE":
print active_master + ":5050"
else:
zoo_nodes = zoo_list.trim().split("\n")
print "zoo://" + ",".join(["%s:2181/mesos" % node for node in zoo_nodes])
Include URI scheme in Mesos cluster_url
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
# Get the Mesos cluster URL, assuming the EC2 script environment variables
# are all available.
active_master = os.getenv("MESOS_MASTERS").split("\n")[0]
zoo_list = os.getenv("MESOS_ZOO_LIST")
if zoo_list.strip() == "NONE":
print "mesos://" + active_master + ":5050"
else:
zoo_nodes = zoo_list.trim().split("\n")
print "zoo://" + ",".join(["%s:2181/mesos" % node for node in zoo_nodes])
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
# Get the Mesos cluster URL, assuming the EC2 script environment variables
# are all available.
active_master = os.getenv("MESOS_MASTERS").split("\n")[0]
zoo_list = os.getenv("MESOS_ZOO_LIST")
if zoo_list.strip() == "NONE":
print active_master + ":5050"
else:
zoo_nodes = zoo_list.trim().split("\n")
print "zoo://" + ",".join(["%s:2181/mesos" % node for node in zoo_nodes])
<commit_msg>Include URI scheme in Mesos cluster_url<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
# Get the Mesos cluster URL, assuming the EC2 script environment variables
# are all available.
active_master = os.getenv("MESOS_MASTERS").split("\n")[0]
zoo_list = os.getenv("MESOS_ZOO_LIST")
if zoo_list.strip() == "NONE":
print "mesos://" + active_master + ":5050"
else:
zoo_nodes = zoo_list.trim().split("\n")
print "zoo://" + ",".join(["%s:2181/mesos" % node for node in zoo_nodes])
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
# Get the Mesos cluster URL, assuming the EC2 script environment variables
# are all available.
active_master = os.getenv("MESOS_MASTERS").split("\n")[0]
zoo_list = os.getenv("MESOS_ZOO_LIST")
if zoo_list.strip() == "NONE":
print active_master + ":5050"
else:
zoo_nodes = zoo_list.trim().split("\n")
print "zoo://" + ",".join(["%s:2181/mesos" % node for node in zoo_nodes])
Include URI scheme in Mesos cluster_url#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
# Get the Mesos cluster URL, assuming the EC2 script environment variables
# are all available.
active_master = os.getenv("MESOS_MASTERS").split("\n")[0]
zoo_list = os.getenv("MESOS_ZOO_LIST")
if zoo_list.strip() == "NONE":
print "mesos://" + active_master + ":5050"
else:
zoo_nodes = zoo_list.trim().split("\n")
print "zoo://" + ",".join(["%s:2181/mesos" % node for node in zoo_nodes])
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
# Get the Mesos cluster URL, assuming the EC2 script environment variables
# are all available.
active_master = os.getenv("MESOS_MASTERS").split("\n")[0]
zoo_list = os.getenv("MESOS_ZOO_LIST")
if zoo_list.strip() == "NONE":
print active_master + ":5050"
else:
zoo_nodes = zoo_list.trim().split("\n")
print "zoo://" + ",".join(["%s:2181/mesos" % node for node in zoo_nodes])
<commit_msg>Include URI scheme in Mesos cluster_url<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
# Get the Mesos cluster URL, assuming the EC2 script environment variables
# are all available.
active_master = os.getenv("MESOS_MASTERS").split("\n")[0]
zoo_list = os.getenv("MESOS_ZOO_LIST")
if zoo_list.strip() == "NONE":
print "mesos://" + active_master + ":5050"
else:
zoo_nodes = zoo_list.trim().split("\n")
print "zoo://" + ",".join(["%s:2181/mesos" % node for node in zoo_nodes])
|
5a47ca87858bb08fcaac4a38322dc04eaf74cac2
|
src/foremast/utils/get_sns_topic_arn.py
|
src/foremast/utils/get_sns_topic_arn.py
|
"""SNS Topic functions."""
import logging
import boto3
from ..exceptions import SNSTopicNotFound
LOG = logging.getLogger(__name__)
def get_sns_topic_arn(topic_name, account, region):
"""Get SNS topic ARN.
Args:
topic_name (str): Name of the topic to lookup.
account (str): Environment, e.g. dev
region (str): Region name, e.g. us-east-1
Returns:
str: ARN for requested topic name
"""
session = boto3.Session(profile_name=account, region_name=region)
sns_client = session.client('sns')
topics = sns_client.list_topics()['Topics']
matched_topic = None
for topic in topics:
topic_arn = topic['TopicArn']
if topic_name == topic_arn.split(':')[-1]:
matched_topic = topic_arn
break
else:
LOG.critical("No topic with name %s found.", topic_name)
raise SNSTopicNotFound('No topic with name {0} found'.format(topic_name))
return matched_topic
|
"""SNS Topic functions."""
import logging
import boto3
from ..exceptions import SNSTopicNotFound
LOG = logging.getLogger(__name__)
def get_sns_topic_arn(topic_name, account, region):
"""Get SNS topic ARN.
Args:
topic_name (str): Name of the topic to lookup.
account (str): Environment, e.g. dev
region (str): Region name, e.g. us-east-1
Returns:
str: ARN for requested topic name
"""
if topic_name.count(':') == 5:
return topic_name
session = boto3.Session(profile_name=account, region_name=region)
sns_client = session.client('sns')
topics = sns_client.list_topics()['Topics']
matched_topic = None
for topic in topics:
topic_arn = topic['TopicArn']
if topic_name == topic_arn.split(':')[-1]:
matched_topic = topic_arn
break
else:
LOG.critical("No topic with name %s found.", topic_name)
raise SNSTopicNotFound('No topic with name {0} found'.format(topic_name))
return matched_topic
|
Return ARN directly if topic name appears to be an ARN
|
Return ARN directly if topic name appears to be an ARN
|
Python
|
apache-2.0
|
gogoair/foremast,gogoair/foremast
|
"""SNS Topic functions."""
import logging
import boto3
from ..exceptions import SNSTopicNotFound
LOG = logging.getLogger(__name__)
def get_sns_topic_arn(topic_name, account, region):
"""Get SNS topic ARN.
Args:
topic_name (str): Name of the topic to lookup.
account (str): Environment, e.g. dev
region (str): Region name, e.g. us-east-1
Returns:
str: ARN for requested topic name
"""
session = boto3.Session(profile_name=account, region_name=region)
sns_client = session.client('sns')
topics = sns_client.list_topics()['Topics']
matched_topic = None
for topic in topics:
topic_arn = topic['TopicArn']
if topic_name == topic_arn.split(':')[-1]:
matched_topic = topic_arn
break
else:
LOG.critical("No topic with name %s found.", topic_name)
raise SNSTopicNotFound('No topic with name {0} found'.format(topic_name))
return matched_topic
Return ARN directly if topic name appears to be an ARN
|
"""SNS Topic functions."""
import logging
import boto3
from ..exceptions import SNSTopicNotFound
LOG = logging.getLogger(__name__)
def get_sns_topic_arn(topic_name, account, region):
"""Get SNS topic ARN.
Args:
topic_name (str): Name of the topic to lookup.
account (str): Environment, e.g. dev
region (str): Region name, e.g. us-east-1
Returns:
str: ARN for requested topic name
"""
if topic_name.count(':') == 5:
return topic_name
session = boto3.Session(profile_name=account, region_name=region)
sns_client = session.client('sns')
topics = sns_client.list_topics()['Topics']
matched_topic = None
for topic in topics:
topic_arn = topic['TopicArn']
if topic_name == topic_arn.split(':')[-1]:
matched_topic = topic_arn
break
else:
LOG.critical("No topic with name %s found.", topic_name)
raise SNSTopicNotFound('No topic with name {0} found'.format(topic_name))
return matched_topic
|
<commit_before>"""SNS Topic functions."""
import logging
import boto3
from ..exceptions import SNSTopicNotFound
LOG = logging.getLogger(__name__)
def get_sns_topic_arn(topic_name, account, region):
"""Get SNS topic ARN.
Args:
topic_name (str): Name of the topic to lookup.
account (str): Environment, e.g. dev
region (str): Region name, e.g. us-east-1
Returns:
str: ARN for requested topic name
"""
session = boto3.Session(profile_name=account, region_name=region)
sns_client = session.client('sns')
topics = sns_client.list_topics()['Topics']
matched_topic = None
for topic in topics:
topic_arn = topic['TopicArn']
if topic_name == topic_arn.split(':')[-1]:
matched_topic = topic_arn
break
else:
LOG.critical("No topic with name %s found.", topic_name)
raise SNSTopicNotFound('No topic with name {0} found'.format(topic_name))
return matched_topic
<commit_msg>Return ARN directly if topic name appears to be an ARN<commit_after>
|
"""SNS Topic functions."""
import logging
import boto3
from ..exceptions import SNSTopicNotFound
LOG = logging.getLogger(__name__)
def get_sns_topic_arn(topic_name, account, region):
"""Get SNS topic ARN.
Args:
topic_name (str): Name of the topic to lookup.
account (str): Environment, e.g. dev
region (str): Region name, e.g. us-east-1
Returns:
str: ARN for requested topic name
"""
if topic_name.count(':') == 5:
return topic_name
session = boto3.Session(profile_name=account, region_name=region)
sns_client = session.client('sns')
topics = sns_client.list_topics()['Topics']
matched_topic = None
for topic in topics:
topic_arn = topic['TopicArn']
if topic_name == topic_arn.split(':')[-1]:
matched_topic = topic_arn
break
else:
LOG.critical("No topic with name %s found.", topic_name)
raise SNSTopicNotFound('No topic with name {0} found'.format(topic_name))
return matched_topic
|
"""SNS Topic functions."""
import logging
import boto3
from ..exceptions import SNSTopicNotFound
LOG = logging.getLogger(__name__)
def get_sns_topic_arn(topic_name, account, region):
"""Get SNS topic ARN.
Args:
topic_name (str): Name of the topic to lookup.
account (str): Environment, e.g. dev
region (str): Region name, e.g. us-east-1
Returns:
str: ARN for requested topic name
"""
session = boto3.Session(profile_name=account, region_name=region)
sns_client = session.client('sns')
topics = sns_client.list_topics()['Topics']
matched_topic = None
for topic in topics:
topic_arn = topic['TopicArn']
if topic_name == topic_arn.split(':')[-1]:
matched_topic = topic_arn
break
else:
LOG.critical("No topic with name %s found.", topic_name)
raise SNSTopicNotFound('No topic with name {0} found'.format(topic_name))
return matched_topic
Return ARN directly if topic name appears to be an ARN"""SNS Topic functions."""
import logging
import boto3
from ..exceptions import SNSTopicNotFound
LOG = logging.getLogger(__name__)
def get_sns_topic_arn(topic_name, account, region):
"""Get SNS topic ARN.
Args:
topic_name (str): Name of the topic to lookup.
account (str): Environment, e.g. dev
region (str): Region name, e.g. us-east-1
Returns:
str: ARN for requested topic name
"""
if topic_name.count(':') == 5:
return topic_name
session = boto3.Session(profile_name=account, region_name=region)
sns_client = session.client('sns')
topics = sns_client.list_topics()['Topics']
matched_topic = None
for topic in topics:
topic_arn = topic['TopicArn']
if topic_name == topic_arn.split(':')[-1]:
matched_topic = topic_arn
break
else:
LOG.critical("No topic with name %s found.", topic_name)
raise SNSTopicNotFound('No topic with name {0} found'.format(topic_name))
return matched_topic
|
<commit_before>"""SNS Topic functions."""
import logging
import boto3
from ..exceptions import SNSTopicNotFound
LOG = logging.getLogger(__name__)
def get_sns_topic_arn(topic_name, account, region):
"""Get SNS topic ARN.
Args:
topic_name (str): Name of the topic to lookup.
account (str): Environment, e.g. dev
region (str): Region name, e.g. us-east-1
Returns:
str: ARN for requested topic name
"""
session = boto3.Session(profile_name=account, region_name=region)
sns_client = session.client('sns')
topics = sns_client.list_topics()['Topics']
matched_topic = None
for topic in topics:
topic_arn = topic['TopicArn']
if topic_name == topic_arn.split(':')[-1]:
matched_topic = topic_arn
break
else:
LOG.critical("No topic with name %s found.", topic_name)
raise SNSTopicNotFound('No topic with name {0} found'.format(topic_name))
return matched_topic
<commit_msg>Return ARN directly if topic name appears to be an ARN<commit_after>"""SNS Topic functions."""
import logging
import boto3
from ..exceptions import SNSTopicNotFound
LOG = logging.getLogger(__name__)
def get_sns_topic_arn(topic_name, account, region):
"""Get SNS topic ARN.
Args:
topic_name (str): Name of the topic to lookup.
account (str): Environment, e.g. dev
region (str): Region name, e.g. us-east-1
Returns:
str: ARN for requested topic name
"""
if topic_name.count(':') == 5:
return topic_name
session = boto3.Session(profile_name=account, region_name=region)
sns_client = session.client('sns')
topics = sns_client.list_topics()['Topics']
matched_topic = None
for topic in topics:
topic_arn = topic['TopicArn']
if topic_name == topic_arn.split(':')[-1]:
matched_topic = topic_arn
break
else:
LOG.critical("No topic with name %s found.", topic_name)
raise SNSTopicNotFound('No topic with name {0} found'.format(topic_name))
return matched_topic
|
e2d80effa425bc12544f3bbf1ad5546b5af40572
|
insert_sort.py
|
insert_sort.py
|
def insert_sort(my_list):
for i in range(1, len(my_list)):
j = i - 1
key = my_list[i]
while (j >= 0) and (my_list[j] > key):
my_list[j + 1] = my_list[j]
j -= 1
my_list[j + 1] = key
if __name__ == '__main__':
|
def insert_sort(my_list):
for i in range(1, len(my_list)):
j = i - 1
key = my_list[i]
while (j >= 0) and (my_list[j] > key):
my_list[j + 1] = my_list[j]
j -= 1
my_list[j + 1] = key
if __name__ == '__main__':
import timeit
best_list = [i for i in range(5000)]
worst_list = [i for i in range(5000)][::-1]
def best_case():
return insert_sort(best_list)
def worst_case():
return insert_sort(worst_list)
print "Best case 5000 in order: {}".format(
timeit.timeit('best_case()', setup='from __main__ import best_case',
number=100)
)
print "Best case 5000 reverse order: {}".format(
timeit.timeit('worst_case()', setup='from __main__ import worst_case',
number=100)
)
|
Add timing to __name__ block
|
Add timing to __name__ block
|
Python
|
mit
|
nbeck90/data_structures_2
|
def insert_sort(my_list):
for i in range(1, len(my_list)):
j = i - 1
key = my_list[i]
while (j >= 0) and (my_list[j] > key):
my_list[j + 1] = my_list[j]
j -= 1
my_list[j + 1] = key
if __name__ == '__main__':
Add timing to __name__ block
|
def insert_sort(my_list):
for i in range(1, len(my_list)):
j = i - 1
key = my_list[i]
while (j >= 0) and (my_list[j] > key):
my_list[j + 1] = my_list[j]
j -= 1
my_list[j + 1] = key
if __name__ == '__main__':
import timeit
best_list = [i for i in range(5000)]
worst_list = [i for i in range(5000)][::-1]
def best_case():
return insert_sort(best_list)
def worst_case():
return insert_sort(worst_list)
print "Best case 5000 in order: {}".format(
timeit.timeit('best_case()', setup='from __main__ import best_case',
number=100)
)
print "Best case 5000 reverse order: {}".format(
timeit.timeit('worst_case()', setup='from __main__ import worst_case',
number=100)
)
|
<commit_before>def insert_sort(my_list):
for i in range(1, len(my_list)):
j = i - 1
key = my_list[i]
while (j >= 0) and (my_list[j] > key):
my_list[j + 1] = my_list[j]
j -= 1
my_list[j + 1] = key
if __name__ == '__main__':
<commit_msg>Add timing to __name__ block<commit_after>
|
def insert_sort(my_list):
for i in range(1, len(my_list)):
j = i - 1
key = my_list[i]
while (j >= 0) and (my_list[j] > key):
my_list[j + 1] = my_list[j]
j -= 1
my_list[j + 1] = key
if __name__ == '__main__':
import timeit
best_list = [i for i in range(5000)]
worst_list = [i for i in range(5000)][::-1]
def best_case():
return insert_sort(best_list)
def worst_case():
return insert_sort(worst_list)
print "Best case 5000 in order: {}".format(
timeit.timeit('best_case()', setup='from __main__ import best_case',
number=100)
)
print "Best case 5000 reverse order: {}".format(
timeit.timeit('worst_case()', setup='from __main__ import worst_case',
number=100)
)
|
def insert_sort(my_list):
for i in range(1, len(my_list)):
j = i - 1
key = my_list[i]
while (j >= 0) and (my_list[j] > key):
my_list[j + 1] = my_list[j]
j -= 1
my_list[j + 1] = key
if __name__ == '__main__':
Add timing to __name__ blockdef insert_sort(my_list):
for i in range(1, len(my_list)):
j = i - 1
key = my_list[i]
while (j >= 0) and (my_list[j] > key):
my_list[j + 1] = my_list[j]
j -= 1
my_list[j + 1] = key
if __name__ == '__main__':
import timeit
best_list = [i for i in range(5000)]
worst_list = [i for i in range(5000)][::-1]
def best_case():
return insert_sort(best_list)
def worst_case():
return insert_sort(worst_list)
print "Best case 5000 in order: {}".format(
timeit.timeit('best_case()', setup='from __main__ import best_case',
number=100)
)
print "Best case 5000 reverse order: {}".format(
timeit.timeit('worst_case()', setup='from __main__ import worst_case',
number=100)
)
|
<commit_before>def insert_sort(my_list):
for i in range(1, len(my_list)):
j = i - 1
key = my_list[i]
while (j >= 0) and (my_list[j] > key):
my_list[j + 1] = my_list[j]
j -= 1
my_list[j + 1] = key
if __name__ == '__main__':
<commit_msg>Add timing to __name__ block<commit_after>def insert_sort(my_list):
for i in range(1, len(my_list)):
j = i - 1
key = my_list[i]
while (j >= 0) and (my_list[j] > key):
my_list[j + 1] = my_list[j]
j -= 1
my_list[j + 1] = key
if __name__ == '__main__':
import timeit
best_list = [i for i in range(5000)]
worst_list = [i for i in range(5000)][::-1]
def best_case():
return insert_sort(best_list)
def worst_case():
return insert_sort(worst_list)
print "Best case 5000 in order: {}".format(
timeit.timeit('best_case()', setup='from __main__ import best_case',
number=100)
)
print "Best case 5000 reverse order: {}".format(
timeit.timeit('worst_case()', setup='from __main__ import worst_case',
number=100)
)
|
7506e93942333a28f1e66c95016071760382a071
|
packages/Python/lldbsuite/test/repl/pounwrapping/TestPOUnwrapping.py
|
packages/Python/lldbsuite/test/repl/pounwrapping/TestPOUnwrapping.py
|
# TestPOUnwrapping.py
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ------------------------------------------------------------------------------
"""Test that we can correctly handle a nested generic type."""
import lldbsuite.test.lldbrepl as lldbrepl
import lldbsuite.test.decorators as decorators
class REPLBasicTestCase(lldbrepl.REPLTest):
mydir = lldbrepl.REPLTest.compute_mydir(__file__)
@decorators.expectedFailureAll(
oslist=[
"macosx",
"linux"],
bugnumber="rdar://35264910")
def doTest(self):
self.command(
'''class Foo<T,U> {
var t: T?
var u: U?
init() { t = nil; u = nil }
init(_ x: T, _ y: U) { t = x; u = y }
};(Foo<String,Double>(),Foo<Double,String>(3.14,"hello"))''',
patterns=[
r'\$R0: \(Foo<String, Double>, Foo<Double, String>\) = {',
r'0 = {',
r't = nil',
r'u = nil',
r'1 = {',
r't = 3\.14[0-9]+', 'u = "hello"'])
|
# TestPOUnwrapping.py
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ------------------------------------------------------------------------------
"""Test that we can correctly handle a nested generic type."""
import lldbsuite.test.lldbrepl as lldbrepl
import lldbsuite.test.decorators as decorators
class REPLBasicTestCase(lldbrepl.REPLTest):
mydir = lldbrepl.REPLTest.compute_mydir(__file__)
def doTest(self):
self.command(
'''class Foo<T,U> {
var t: T?
var u: U?
init() { t = nil; u = nil }
init(_ x: T, _ y: U) { t = x; u = y }
};(Foo<String,Double>(),Foo<Double,String>(3.14,"hello"))''',
patterns=[
r'\$R0: \(Foo<String, Double>, Foo<Double, String>\) = {',
r'0 = {',
r't = nil',
r'u = nil',
r'1 = {',
r't = 3\.14[0-9]+', 'u = "hello"'])
|
Revert "Disable test that fails on bot"
|
Revert "Disable test that fails on bot"
This reverts commit e214e46e748881e6418ffac374a87d6ad30fcfea.
I have reverted the swift commit that was causing this failure.
rdar://35264910
|
Python
|
apache-2.0
|
apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb
|
# TestPOUnwrapping.py
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ------------------------------------------------------------------------------
"""Test that we can correctly handle a nested generic type."""
import lldbsuite.test.lldbrepl as lldbrepl
import lldbsuite.test.decorators as decorators
class REPLBasicTestCase(lldbrepl.REPLTest):
mydir = lldbrepl.REPLTest.compute_mydir(__file__)
@decorators.expectedFailureAll(
oslist=[
"macosx",
"linux"],
bugnumber="rdar://35264910")
def doTest(self):
self.command(
'''class Foo<T,U> {
var t: T?
var u: U?
init() { t = nil; u = nil }
init(_ x: T, _ y: U) { t = x; u = y }
};(Foo<String,Double>(),Foo<Double,String>(3.14,"hello"))''',
patterns=[
r'\$R0: \(Foo<String, Double>, Foo<Double, String>\) = {',
r'0 = {',
r't = nil',
r'u = nil',
r'1 = {',
r't = 3\.14[0-9]+', 'u = "hello"'])
Revert "Disable test that fails on bot"
This reverts commit e214e46e748881e6418ffac374a87d6ad30fcfea.
I have reverted the swift commit that was causing this failure.
rdar://35264910
|
# TestPOUnwrapping.py
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ------------------------------------------------------------------------------
"""Test that we can correctly handle a nested generic type."""
import lldbsuite.test.lldbrepl as lldbrepl
import lldbsuite.test.decorators as decorators
class REPLBasicTestCase(lldbrepl.REPLTest):
mydir = lldbrepl.REPLTest.compute_mydir(__file__)
def doTest(self):
self.command(
'''class Foo<T,U> {
var t: T?
var u: U?
init() { t = nil; u = nil }
init(_ x: T, _ y: U) { t = x; u = y }
};(Foo<String,Double>(),Foo<Double,String>(3.14,"hello"))''',
patterns=[
r'\$R0: \(Foo<String, Double>, Foo<Double, String>\) = {',
r'0 = {',
r't = nil',
r'u = nil',
r'1 = {',
r't = 3\.14[0-9]+', 'u = "hello"'])
|
<commit_before># TestPOUnwrapping.py
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ------------------------------------------------------------------------------
"""Test that we can correctly handle a nested generic type."""
import lldbsuite.test.lldbrepl as lldbrepl
import lldbsuite.test.decorators as decorators
class REPLBasicTestCase(lldbrepl.REPLTest):
mydir = lldbrepl.REPLTest.compute_mydir(__file__)
@decorators.expectedFailureAll(
oslist=[
"macosx",
"linux"],
bugnumber="rdar://35264910")
def doTest(self):
self.command(
'''class Foo<T,U> {
var t: T?
var u: U?
init() { t = nil; u = nil }
init(_ x: T, _ y: U) { t = x; u = y }
};(Foo<String,Double>(),Foo<Double,String>(3.14,"hello"))''',
patterns=[
r'\$R0: \(Foo<String, Double>, Foo<Double, String>\) = {',
r'0 = {',
r't = nil',
r'u = nil',
r'1 = {',
r't = 3\.14[0-9]+', 'u = "hello"'])
<commit_msg>Revert "Disable test that fails on bot"
This reverts commit e214e46e748881e6418ffac374a87d6ad30fcfea.
I have reverted the swift commit that was causing this failure.
rdar://35264910<commit_after>
|
# TestPOUnwrapping.py
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ------------------------------------------------------------------------------
"""Test that we can correctly handle a nested generic type."""
import lldbsuite.test.lldbrepl as lldbrepl
import lldbsuite.test.decorators as decorators
class REPLBasicTestCase(lldbrepl.REPLTest):
mydir = lldbrepl.REPLTest.compute_mydir(__file__)
def doTest(self):
self.command(
'''class Foo<T,U> {
var t: T?
var u: U?
init() { t = nil; u = nil }
init(_ x: T, _ y: U) { t = x; u = y }
};(Foo<String,Double>(),Foo<Double,String>(3.14,"hello"))''',
patterns=[
r'\$R0: \(Foo<String, Double>, Foo<Double, String>\) = {',
r'0 = {',
r't = nil',
r'u = nil',
r'1 = {',
r't = 3\.14[0-9]+', 'u = "hello"'])
|
# TestPOUnwrapping.py
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ------------------------------------------------------------------------------
"""Test that we can correctly handle a nested generic type."""
import lldbsuite.test.lldbrepl as lldbrepl
import lldbsuite.test.decorators as decorators
class REPLBasicTestCase(lldbrepl.REPLTest):
mydir = lldbrepl.REPLTest.compute_mydir(__file__)
@decorators.expectedFailureAll(
oslist=[
"macosx",
"linux"],
bugnumber="rdar://35264910")
def doTest(self):
self.command(
'''class Foo<T,U> {
var t: T?
var u: U?
init() { t = nil; u = nil }
init(_ x: T, _ y: U) { t = x; u = y }
};(Foo<String,Double>(),Foo<Double,String>(3.14,"hello"))''',
patterns=[
r'\$R0: \(Foo<String, Double>, Foo<Double, String>\) = {',
r'0 = {',
r't = nil',
r'u = nil',
r'1 = {',
r't = 3\.14[0-9]+', 'u = "hello"'])
Revert "Disable test that fails on bot"
This reverts commit e214e46e748881e6418ffac374a87d6ad30fcfea.
I have reverted the swift commit that was causing this failure.
rdar://35264910# TestPOUnwrapping.py
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ------------------------------------------------------------------------------
"""Test that we can correctly handle a nested generic type."""
import lldbsuite.test.lldbrepl as lldbrepl
import lldbsuite.test.decorators as decorators
class REPLBasicTestCase(lldbrepl.REPLTest):
mydir = lldbrepl.REPLTest.compute_mydir(__file__)
def doTest(self):
self.command(
'''class Foo<T,U> {
var t: T?
var u: U?
init() { t = nil; u = nil }
init(_ x: T, _ y: U) { t = x; u = y }
};(Foo<String,Double>(),Foo<Double,String>(3.14,"hello"))''',
patterns=[
r'\$R0: \(Foo<String, Double>, Foo<Double, String>\) = {',
r'0 = {',
r't = nil',
r'u = nil',
r'1 = {',
r't = 3\.14[0-9]+', 'u = "hello"'])
|
<commit_before># TestPOUnwrapping.py
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ------------------------------------------------------------------------------
"""Test that we can correctly handle a nested generic type."""
import lldbsuite.test.lldbrepl as lldbrepl
import lldbsuite.test.decorators as decorators
class REPLBasicTestCase(lldbrepl.REPLTest):
mydir = lldbrepl.REPLTest.compute_mydir(__file__)
@decorators.expectedFailureAll(
oslist=[
"macosx",
"linux"],
bugnumber="rdar://35264910")
def doTest(self):
self.command(
'''class Foo<T,U> {
var t: T?
var u: U?
init() { t = nil; u = nil }
init(_ x: T, _ y: U) { t = x; u = y }
};(Foo<String,Double>(),Foo<Double,String>(3.14,"hello"))''',
patterns=[
r'\$R0: \(Foo<String, Double>, Foo<Double, String>\) = {',
r'0 = {',
r't = nil',
r'u = nil',
r'1 = {',
r't = 3\.14[0-9]+', 'u = "hello"'])
<commit_msg>Revert "Disable test that fails on bot"
This reverts commit e214e46e748881e6418ffac374a87d6ad30fcfea.
I have reverted the swift commit that was causing this failure.
rdar://35264910<commit_after># TestPOUnwrapping.py
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ------------------------------------------------------------------------------
"""Test that we can correctly handle a nested generic type."""
import lldbsuite.test.lldbrepl as lldbrepl
import lldbsuite.test.decorators as decorators
class REPLBasicTestCase(lldbrepl.REPLTest):
mydir = lldbrepl.REPLTest.compute_mydir(__file__)
def doTest(self):
self.command(
'''class Foo<T,U> {
var t: T?
var u: U?
init() { t = nil; u = nil }
init(_ x: T, _ y: U) { t = x; u = y }
};(Foo<String,Double>(),Foo<Double,String>(3.14,"hello"))''',
patterns=[
r'\$R0: \(Foo<String, Double>, Foo<Double, String>\) = {',
r'0 = {',
r't = nil',
r'u = nil',
r'1 = {',
r't = 3\.14[0-9]+', 'u = "hello"'])
|
dfa72ed557b0206e4f19584d317d202f6e4b84c9
|
tests/read/test_read_api.py
|
tests/read/test_read_api.py
|
import unittest
import urllib
import datetime
from hamcrest import *
from mock import patch
import pytz
from backdrop.read import api
class ReadApiTestCase(unittest.TestCase):
def setUp(self):
self.app = api.app.test_client()
# @patch('backdrop.core.storage.Bucket.query')
# def test_period_query_is_executed(self, mock_query):
# mock_query.return_value = None
# self.app.get('/foo&period=week')
# mock_query.assert_called_with({'foo': 'bar'})
@patch('backdrop.core.storage.Bucket.query')
def test_filter_by_query_is_executed(self, mock_query):
mock_query.return_value = None
self.app.get('/foo?filter_by=zombies:yes')
mock_query.assert_called_with(filter_by=[[u'zombies',u'yes']])
@patch('backdrop.core.storage.Bucket.query')
def test_group_by_query_is_executed(self, mock_query):
mock_query.return_value = None
self.app.get('/foo?group_by=zombies')
mock_query.assert_called_with(group_by=u'zombies')
@patch('backdrop.core.storage.Bucket.query')
def test_start_at_is_executed(self, mock_query):
mock_query.return_value = None
expected_start_at = datetime.datetime(2012, 12, 12, 8, 12, 43,
tzinfo=pytz.UTC)
self.app.get(
'/foo?start_at=' + urllib.quote("2012-12-12T08:12:43+00:00")
)
mock_query.assert_called_with(start_at=expected_start_at)
@patch('backdrop.core.storage.Bucket.query')
def test_start_at_is_executed(self, mock_query):
mock_query.return_value = None
expected_end_at = datetime.datetime(2012, 12, 12, 8, 12, 43,
tzinfo=pytz.UTC)
self.app.get(
'/foo?end_at=' + urllib.quote("2012-12-12T08:12:43+00:00")
)
mock_query.assert_called_with(end_at=expected_end_at)
|
Test api queries result in correct storage queries
|
Test api queries result in correct storage queries
|
Python
|
mit
|
alphagov/backdrop,alphagov/backdrop,alphagov/backdrop
|
Test api queries result in correct storage queries
|
import unittest
import urllib
import datetime
from hamcrest import *
from mock import patch
import pytz
from backdrop.read import api
class ReadApiTestCase(unittest.TestCase):
def setUp(self):
self.app = api.app.test_client()
# @patch('backdrop.core.storage.Bucket.query')
# def test_period_query_is_executed(self, mock_query):
# mock_query.return_value = None
# self.app.get('/foo&period=week')
# mock_query.assert_called_with({'foo': 'bar'})
@patch('backdrop.core.storage.Bucket.query')
def test_filter_by_query_is_executed(self, mock_query):
mock_query.return_value = None
self.app.get('/foo?filter_by=zombies:yes')
mock_query.assert_called_with(filter_by=[[u'zombies',u'yes']])
@patch('backdrop.core.storage.Bucket.query')
def test_group_by_query_is_executed(self, mock_query):
mock_query.return_value = None
self.app.get('/foo?group_by=zombies')
mock_query.assert_called_with(group_by=u'zombies')
@patch('backdrop.core.storage.Bucket.query')
def test_start_at_is_executed(self, mock_query):
mock_query.return_value = None
expected_start_at = datetime.datetime(2012, 12, 12, 8, 12, 43,
tzinfo=pytz.UTC)
self.app.get(
'/foo?start_at=' + urllib.quote("2012-12-12T08:12:43+00:00")
)
mock_query.assert_called_with(start_at=expected_start_at)
@patch('backdrop.core.storage.Bucket.query')
def test_start_at_is_executed(self, mock_query):
mock_query.return_value = None
expected_end_at = datetime.datetime(2012, 12, 12, 8, 12, 43,
tzinfo=pytz.UTC)
self.app.get(
'/foo?end_at=' + urllib.quote("2012-12-12T08:12:43+00:00")
)
mock_query.assert_called_with(end_at=expected_end_at)
|
<commit_before><commit_msg>Test api queries result in correct storage queries<commit_after>
|
import unittest
import urllib
import datetime
from hamcrest import *
from mock import patch
import pytz
from backdrop.read import api
class ReadApiTestCase(unittest.TestCase):
def setUp(self):
self.app = api.app.test_client()
# @patch('backdrop.core.storage.Bucket.query')
# def test_period_query_is_executed(self, mock_query):
# mock_query.return_value = None
# self.app.get('/foo&period=week')
# mock_query.assert_called_with({'foo': 'bar'})
@patch('backdrop.core.storage.Bucket.query')
def test_filter_by_query_is_executed(self, mock_query):
mock_query.return_value = None
self.app.get('/foo?filter_by=zombies:yes')
mock_query.assert_called_with(filter_by=[[u'zombies',u'yes']])
@patch('backdrop.core.storage.Bucket.query')
def test_group_by_query_is_executed(self, mock_query):
mock_query.return_value = None
self.app.get('/foo?group_by=zombies')
mock_query.assert_called_with(group_by=u'zombies')
@patch('backdrop.core.storage.Bucket.query')
def test_start_at_is_executed(self, mock_query):
mock_query.return_value = None
expected_start_at = datetime.datetime(2012, 12, 12, 8, 12, 43,
tzinfo=pytz.UTC)
self.app.get(
'/foo?start_at=' + urllib.quote("2012-12-12T08:12:43+00:00")
)
mock_query.assert_called_with(start_at=expected_start_at)
@patch('backdrop.core.storage.Bucket.query')
def test_start_at_is_executed(self, mock_query):
mock_query.return_value = None
expected_end_at = datetime.datetime(2012, 12, 12, 8, 12, 43,
tzinfo=pytz.UTC)
self.app.get(
'/foo?end_at=' + urllib.quote("2012-12-12T08:12:43+00:00")
)
mock_query.assert_called_with(end_at=expected_end_at)
|
Test api queries result in correct storage queriesimport unittest
import urllib
import datetime
from hamcrest import *
from mock import patch
import pytz
from backdrop.read import api
class ReadApiTestCase(unittest.TestCase):
def setUp(self):
self.app = api.app.test_client()
# @patch('backdrop.core.storage.Bucket.query')
# def test_period_query_is_executed(self, mock_query):
# mock_query.return_value = None
# self.app.get('/foo&period=week')
# mock_query.assert_called_with({'foo': 'bar'})
@patch('backdrop.core.storage.Bucket.query')
def test_filter_by_query_is_executed(self, mock_query):
mock_query.return_value = None
self.app.get('/foo?filter_by=zombies:yes')
mock_query.assert_called_with(filter_by=[[u'zombies',u'yes']])
@patch('backdrop.core.storage.Bucket.query')
def test_group_by_query_is_executed(self, mock_query):
mock_query.return_value = None
self.app.get('/foo?group_by=zombies')
mock_query.assert_called_with(group_by=u'zombies')
@patch('backdrop.core.storage.Bucket.query')
def test_start_at_is_executed(self, mock_query):
mock_query.return_value = None
expected_start_at = datetime.datetime(2012, 12, 12, 8, 12, 43,
tzinfo=pytz.UTC)
self.app.get(
'/foo?start_at=' + urllib.quote("2012-12-12T08:12:43+00:00")
)
mock_query.assert_called_with(start_at=expected_start_at)
@patch('backdrop.core.storage.Bucket.query')
def test_start_at_is_executed(self, mock_query):
mock_query.return_value = None
expected_end_at = datetime.datetime(2012, 12, 12, 8, 12, 43,
tzinfo=pytz.UTC)
self.app.get(
'/foo?end_at=' + urllib.quote("2012-12-12T08:12:43+00:00")
)
mock_query.assert_called_with(end_at=expected_end_at)
|
<commit_before><commit_msg>Test api queries result in correct storage queries<commit_after>import unittest
import urllib
import datetime
from hamcrest import *
from mock import patch
import pytz
from backdrop.read import api
class ReadApiTestCase(unittest.TestCase):
def setUp(self):
self.app = api.app.test_client()
# @patch('backdrop.core.storage.Bucket.query')
# def test_period_query_is_executed(self, mock_query):
# mock_query.return_value = None
# self.app.get('/foo&period=week')
# mock_query.assert_called_with({'foo': 'bar'})
@patch('backdrop.core.storage.Bucket.query')
def test_filter_by_query_is_executed(self, mock_query):
mock_query.return_value = None
self.app.get('/foo?filter_by=zombies:yes')
mock_query.assert_called_with(filter_by=[[u'zombies',u'yes']])
@patch('backdrop.core.storage.Bucket.query')
def test_group_by_query_is_executed(self, mock_query):
mock_query.return_value = None
self.app.get('/foo?group_by=zombies')
mock_query.assert_called_with(group_by=u'zombies')
@patch('backdrop.core.storage.Bucket.query')
def test_start_at_is_executed(self, mock_query):
mock_query.return_value = None
expected_start_at = datetime.datetime(2012, 12, 12, 8, 12, 43,
tzinfo=pytz.UTC)
self.app.get(
'/foo?start_at=' + urllib.quote("2012-12-12T08:12:43+00:00")
)
mock_query.assert_called_with(start_at=expected_start_at)
@patch('backdrop.core.storage.Bucket.query')
def test_start_at_is_executed(self, mock_query):
mock_query.return_value = None
expected_end_at = datetime.datetime(2012, 12, 12, 8, 12, 43,
tzinfo=pytz.UTC)
self.app.get(
'/foo?end_at=' + urllib.quote("2012-12-12T08:12:43+00:00")
)
mock_query.assert_called_with(end_at=expected_end_at)
|
|
04bccd678ba7a67373b94695d7d87d0cf95dffd6
|
tests/unit/app_unit_test.py
|
tests/unit/app_unit_test.py
|
# -*- coding: utf-8 -*-
"""
Unit Test: orchard.app
"""
import unittest
import orchard
class AppUnitTest(unittest.TestCase):
def setUp(self):
app = orchard.create_app('Testing')
self.app_context = app.app_context()
self.app_context.push()
self.client = app.test_client(use_cookies = True)
def tearDown(self):
self.app_context.pop()
def test_index(self):
response = self.client.get('/')
data = response.get_data(as_text = True)
self.assertEqual(response.status_code, 200)
self.assertTrue('Welcome to the Orchard!' in data)
response = self.client.get('/BMeu')
data = response.get_data(as_text = True)
self.assertEqual(response.status_code, 200)
self.assertTrue('Welcome to the Orchard, BMeu!' in data)
|
# -*- coding: utf-8 -*-
"""
Unit Test: orchard.app
"""
import unittest
import orchard
class AppUnitTest(unittest.TestCase):
def setUp(self):
app = orchard.create_app('Testing')
app.config['BABEL_DEFAULT_LOCALE'] = 'en'
self.app_context = app.app_context()
self.app_context.push()
self.client = app.test_client(use_cookies = True)
def tearDown(self):
self.app_context.pop()
def test_index(self):
response = self.client.get('/')
data = response.get_data(as_text = True)
self.assertEqual(response.status_code, 200)
self.assertTrue('Welcome to the Orchard!' in data)
response = self.client.get('/BMeu')
data = response.get_data(as_text = True)
self.assertEqual(response.status_code, 200)
self.assertTrue('Welcome to the Orchard, BMeu!' in data)
|
Set default locale in test to avoid test failures when different default is used than expected.
|
Set default locale in test to avoid test failures when different default is used than expected.
|
Python
|
mit
|
BMeu/Orchard,BMeu/Orchard
|
# -*- coding: utf-8 -*-
"""
Unit Test: orchard.app
"""
import unittest
import orchard
class AppUnitTest(unittest.TestCase):
def setUp(self):
app = orchard.create_app('Testing')
self.app_context = app.app_context()
self.app_context.push()
self.client = app.test_client(use_cookies = True)
def tearDown(self):
self.app_context.pop()
def test_index(self):
response = self.client.get('/')
data = response.get_data(as_text = True)
self.assertEqual(response.status_code, 200)
self.assertTrue('Welcome to the Orchard!' in data)
response = self.client.get('/BMeu')
data = response.get_data(as_text = True)
self.assertEqual(response.status_code, 200)
self.assertTrue('Welcome to the Orchard, BMeu!' in data)
Set default locale in test to avoid test failures when different default is used than expected.
|
# -*- coding: utf-8 -*-
"""
Unit Test: orchard.app
"""
import unittest
import orchard
class AppUnitTest(unittest.TestCase):
def setUp(self):
app = orchard.create_app('Testing')
app.config['BABEL_DEFAULT_LOCALE'] = 'en'
self.app_context = app.app_context()
self.app_context.push()
self.client = app.test_client(use_cookies = True)
def tearDown(self):
self.app_context.pop()
def test_index(self):
response = self.client.get('/')
data = response.get_data(as_text = True)
self.assertEqual(response.status_code, 200)
self.assertTrue('Welcome to the Orchard!' in data)
response = self.client.get('/BMeu')
data = response.get_data(as_text = True)
self.assertEqual(response.status_code, 200)
self.assertTrue('Welcome to the Orchard, BMeu!' in data)
|
<commit_before># -*- coding: utf-8 -*-
"""
Unit Test: orchard.app
"""
import unittest
import orchard
class AppUnitTest(unittest.TestCase):
def setUp(self):
app = orchard.create_app('Testing')
self.app_context = app.app_context()
self.app_context.push()
self.client = app.test_client(use_cookies = True)
def tearDown(self):
self.app_context.pop()
def test_index(self):
response = self.client.get('/')
data = response.get_data(as_text = True)
self.assertEqual(response.status_code, 200)
self.assertTrue('Welcome to the Orchard!' in data)
response = self.client.get('/BMeu')
data = response.get_data(as_text = True)
self.assertEqual(response.status_code, 200)
self.assertTrue('Welcome to the Orchard, BMeu!' in data)
<commit_msg>Set default locale in test to avoid test failures when different default is used than expected.<commit_after>
|
# -*- coding: utf-8 -*-
"""
Unit Test: orchard.app
"""
import unittest
import orchard
class AppUnitTest(unittest.TestCase):
def setUp(self):
app = orchard.create_app('Testing')
app.config['BABEL_DEFAULT_LOCALE'] = 'en'
self.app_context = app.app_context()
self.app_context.push()
self.client = app.test_client(use_cookies = True)
def tearDown(self):
self.app_context.pop()
def test_index(self):
response = self.client.get('/')
data = response.get_data(as_text = True)
self.assertEqual(response.status_code, 200)
self.assertTrue('Welcome to the Orchard!' in data)
response = self.client.get('/BMeu')
data = response.get_data(as_text = True)
self.assertEqual(response.status_code, 200)
self.assertTrue('Welcome to the Orchard, BMeu!' in data)
|
# -*- coding: utf-8 -*-
"""
Unit Test: orchard.app
"""
import unittest
import orchard
class AppUnitTest(unittest.TestCase):
def setUp(self):
app = orchard.create_app('Testing')
self.app_context = app.app_context()
self.app_context.push()
self.client = app.test_client(use_cookies = True)
def tearDown(self):
self.app_context.pop()
def test_index(self):
response = self.client.get('/')
data = response.get_data(as_text = True)
self.assertEqual(response.status_code, 200)
self.assertTrue('Welcome to the Orchard!' in data)
response = self.client.get('/BMeu')
data = response.get_data(as_text = True)
self.assertEqual(response.status_code, 200)
self.assertTrue('Welcome to the Orchard, BMeu!' in data)
Set default locale in test to avoid test failures when different default is used than expected.# -*- coding: utf-8 -*-
"""
Unit Test: orchard.app
"""
import unittest
import orchard
class AppUnitTest(unittest.TestCase):
def setUp(self):
app = orchard.create_app('Testing')
app.config['BABEL_DEFAULT_LOCALE'] = 'en'
self.app_context = app.app_context()
self.app_context.push()
self.client = app.test_client(use_cookies = True)
def tearDown(self):
self.app_context.pop()
def test_index(self):
response = self.client.get('/')
data = response.get_data(as_text = True)
self.assertEqual(response.status_code, 200)
self.assertTrue('Welcome to the Orchard!' in data)
response = self.client.get('/BMeu')
data = response.get_data(as_text = True)
self.assertEqual(response.status_code, 200)
self.assertTrue('Welcome to the Orchard, BMeu!' in data)
|
<commit_before># -*- coding: utf-8 -*-
"""
Unit Test: orchard.app
"""
import unittest
import orchard
class AppUnitTest(unittest.TestCase):
def setUp(self):
app = orchard.create_app('Testing')
self.app_context = app.app_context()
self.app_context.push()
self.client = app.test_client(use_cookies = True)
def tearDown(self):
self.app_context.pop()
def test_index(self):
response = self.client.get('/')
data = response.get_data(as_text = True)
self.assertEqual(response.status_code, 200)
self.assertTrue('Welcome to the Orchard!' in data)
response = self.client.get('/BMeu')
data = response.get_data(as_text = True)
self.assertEqual(response.status_code, 200)
self.assertTrue('Welcome to the Orchard, BMeu!' in data)
<commit_msg>Set default locale in test to avoid test failures when different default is used than expected.<commit_after># -*- coding: utf-8 -*-
"""
Unit Test: orchard.app
"""
import unittest
import orchard
class AppUnitTest(unittest.TestCase):
def setUp(self):
app = orchard.create_app('Testing')
app.config['BABEL_DEFAULT_LOCALE'] = 'en'
self.app_context = app.app_context()
self.app_context.push()
self.client = app.test_client(use_cookies = True)
def tearDown(self):
self.app_context.pop()
def test_index(self):
response = self.client.get('/')
data = response.get_data(as_text = True)
self.assertEqual(response.status_code, 200)
self.assertTrue('Welcome to the Orchard!' in data)
response = self.client.get('/BMeu')
data = response.get_data(as_text = True)
self.assertEqual(response.status_code, 200)
self.assertTrue('Welcome to the Orchard, BMeu!' in data)
|
874477151ba07dd976dd53f604682f018a3c223f
|
yolk/__init__.py
|
yolk/__init__.py
|
"""yolk.
Author: Rob Cakebread <cakebread at gmail>
License : BSD
"""
__version__ = '0.8.1'
|
"""yolk.
Author: Rob Cakebread <cakebread at gmail>
License : BSD
"""
__version__ = '0.8.2'
|
Increment patch version to 0.8.2
|
Increment patch version to 0.8.2
|
Python
|
bsd-3-clause
|
myint/yolk,myint/yolk
|
"""yolk.
Author: Rob Cakebread <cakebread at gmail>
License : BSD
"""
__version__ = '0.8.1'
Increment patch version to 0.8.2
|
"""yolk.
Author: Rob Cakebread <cakebread at gmail>
License : BSD
"""
__version__ = '0.8.2'
|
<commit_before>"""yolk.
Author: Rob Cakebread <cakebread at gmail>
License : BSD
"""
__version__ = '0.8.1'
<commit_msg>Increment patch version to 0.8.2<commit_after>
|
"""yolk.
Author: Rob Cakebread <cakebread at gmail>
License : BSD
"""
__version__ = '0.8.2'
|
"""yolk.
Author: Rob Cakebread <cakebread at gmail>
License : BSD
"""
__version__ = '0.8.1'
Increment patch version to 0.8.2"""yolk.
Author: Rob Cakebread <cakebread at gmail>
License : BSD
"""
__version__ = '0.8.2'
|
<commit_before>"""yolk.
Author: Rob Cakebread <cakebread at gmail>
License : BSD
"""
__version__ = '0.8.1'
<commit_msg>Increment patch version to 0.8.2<commit_after>"""yolk.
Author: Rob Cakebread <cakebread at gmail>
License : BSD
"""
__version__ = '0.8.2'
|
2bc249dc4996c0cccfe61a3d8bf1658fa987e7cf
|
costcocr/writers/csv.py
|
costcocr/writers/csv.py
|
def csv():
def Receipt(meta, body, variables):
output = []
def add(s) : output.append(s)
if "store" in meta:
add("# Store: {}".format(meta["store"]))
if "date" in meta:
add("# Date: {}".format(meta["date"]))
if "location" in meta:
add("# Location: {}".format(meta["location"]))
add(body)
return "\n".join(output)
def ItemList(s): return s
def ItemListSep(): return "\n"
## Could also be used to collapse the fields, or exclude some.
def Item(name, cost, discount, tax):
return "{}, {}, {}, {}".format(name, cost, discount, tax)
return {
"Receipt" : Receipt,
"ItemList" : ItemList,
"ItemListSep" : ItemListSep,
"Item" : Item,
}
|
def Receipt(meta, body, variables):
output = []
def add(s) : output.append(s)
if "store" in meta:
add("# Store: {}".format(meta["store"]))
if "date" in meta:
add("# Date: {}".format(meta["date"]))
if "location" in meta:
add("# Location: {}".format(meta["location"]))
add(body)
return "\n".join(output)
def ItemList(s): return s
def ItemListSep(): return "\n"
## Could also be used to collapse the fields, or exclude some.
def Item(name, cost, discount, tax):
return "{}, {}, {}, {}".format(name, cost, discount, tax)
|
Convert CSV writer to a module definition.
|
Convert CSV writer to a module definition.
Use __import__ to import it as a dictionary.
|
Python
|
bsd-3-clause
|
rdodesigns/costcocr
|
def csv():
def Receipt(meta, body, variables):
output = []
def add(s) : output.append(s)
if "store" in meta:
add("# Store: {}".format(meta["store"]))
if "date" in meta:
add("# Date: {}".format(meta["date"]))
if "location" in meta:
add("# Location: {}".format(meta["location"]))
add(body)
return "\n".join(output)
def ItemList(s): return s
def ItemListSep(): return "\n"
## Could also be used to collapse the fields, or exclude some.
def Item(name, cost, discount, tax):
return "{}, {}, {}, {}".format(name, cost, discount, tax)
return {
"Receipt" : Receipt,
"ItemList" : ItemList,
"ItemListSep" : ItemListSep,
"Item" : Item,
}
Convert CSV writer to a module definition.
Use __import__ to import it as a dictionary.
|
def Receipt(meta, body, variables):
output = []
def add(s) : output.append(s)
if "store" in meta:
add("# Store: {}".format(meta["store"]))
if "date" in meta:
add("# Date: {}".format(meta["date"]))
if "location" in meta:
add("# Location: {}".format(meta["location"]))
add(body)
return "\n".join(output)
def ItemList(s): return s
def ItemListSep(): return "\n"
## Could also be used to collapse the fields, or exclude some.
def Item(name, cost, discount, tax):
return "{}, {}, {}, {}".format(name, cost, discount, tax)
|
<commit_before>
def csv():
def Receipt(meta, body, variables):
output = []
def add(s) : output.append(s)
if "store" in meta:
add("# Store: {}".format(meta["store"]))
if "date" in meta:
add("# Date: {}".format(meta["date"]))
if "location" in meta:
add("# Location: {}".format(meta["location"]))
add(body)
return "\n".join(output)
def ItemList(s): return s
def ItemListSep(): return "\n"
## Could also be used to collapse the fields, or exclude some.
def Item(name, cost, discount, tax):
return "{}, {}, {}, {}".format(name, cost, discount, tax)
return {
"Receipt" : Receipt,
"ItemList" : ItemList,
"ItemListSep" : ItemListSep,
"Item" : Item,
}
<commit_msg>Convert CSV writer to a module definition.
Use __import__ to import it as a dictionary.<commit_after>
|
def Receipt(meta, body, variables):
output = []
def add(s) : output.append(s)
if "store" in meta:
add("# Store: {}".format(meta["store"]))
if "date" in meta:
add("# Date: {}".format(meta["date"]))
if "location" in meta:
add("# Location: {}".format(meta["location"]))
add(body)
return "\n".join(output)
def ItemList(s): return s
def ItemListSep(): return "\n"
## Could also be used to collapse the fields, or exclude some.
def Item(name, cost, discount, tax):
return "{}, {}, {}, {}".format(name, cost, discount, tax)
|
def csv():
def Receipt(meta, body, variables):
output = []
def add(s) : output.append(s)
if "store" in meta:
add("# Store: {}".format(meta["store"]))
if "date" in meta:
add("# Date: {}".format(meta["date"]))
if "location" in meta:
add("# Location: {}".format(meta["location"]))
add(body)
return "\n".join(output)
def ItemList(s): return s
def ItemListSep(): return "\n"
## Could also be used to collapse the fields, or exclude some.
def Item(name, cost, discount, tax):
return "{}, {}, {}, {}".format(name, cost, discount, tax)
return {
"Receipt" : Receipt,
"ItemList" : ItemList,
"ItemListSep" : ItemListSep,
"Item" : Item,
}
Convert CSV writer to a module definition.
Use __import__ to import it as a dictionary.def Receipt(meta, body, variables):
output = []
def add(s) : output.append(s)
if "store" in meta:
add("# Store: {}".format(meta["store"]))
if "date" in meta:
add("# Date: {}".format(meta["date"]))
if "location" in meta:
add("# Location: {}".format(meta["location"]))
add(body)
return "\n".join(output)
def ItemList(s): return s
def ItemListSep(): return "\n"
## Could also be used to collapse the fields, or exclude some.
def Item(name, cost, discount, tax):
return "{}, {}, {}, {}".format(name, cost, discount, tax)
|
<commit_before>
def csv():
def Receipt(meta, body, variables):
output = []
def add(s) : output.append(s)
if "store" in meta:
add("# Store: {}".format(meta["store"]))
if "date" in meta:
add("# Date: {}".format(meta["date"]))
if "location" in meta:
add("# Location: {}".format(meta["location"]))
add(body)
return "\n".join(output)
def ItemList(s): return s
def ItemListSep(): return "\n"
## Could also be used to collapse the fields, or exclude some.
def Item(name, cost, discount, tax):
return "{}, {}, {}, {}".format(name, cost, discount, tax)
return {
"Receipt" : Receipt,
"ItemList" : ItemList,
"ItemListSep" : ItemListSep,
"Item" : Item,
}
<commit_msg>Convert CSV writer to a module definition.
Use __import__ to import it as a dictionary.<commit_after>def Receipt(meta, body, variables):
output = []
def add(s) : output.append(s)
if "store" in meta:
add("# Store: {}".format(meta["store"]))
if "date" in meta:
add("# Date: {}".format(meta["date"]))
if "location" in meta:
add("# Location: {}".format(meta["location"]))
add(body)
return "\n".join(output)
def ItemList(s): return s
def ItemListSep(): return "\n"
## Could also be used to collapse the fields, or exclude some.
def Item(name, cost, discount, tax):
return "{}, {}, {}, {}".format(name, cost, discount, tax)
|
7fa8ba8cffcf3e6dd4748389e1a43776f095a559
|
postatus/settings.py
|
postatus/settings.py
|
import os
def truthy(item):
return item.lower().startswith('t')
DEBUG = truthy(os.environ.get('DEBUG', 'True'))
PROJECTS = {
'SUMO': {
'name': 'SUMO',
'url': 'https://support.mozilla.org/',
'postatus_url': 'https://support.mozilla.org/media/postatus.txt',
'verbatim_url': 'https://localize.mozilla.org/projects/sumo/',
'verbatim_locale_url': 'https://localize.mozilla.org/%s/sumo/',
'l10n_completion_url': 'https://support.mozilla.org/media/uploads/l10n_history.json',
},
'Input': {
'name': 'Input',
'url': 'https://input.mozilla.org/',
'postatus_url': 'https://input.mozilla.org/media/postatus.txt',
'verbatim_url': 'https://localize.mozilla.org/projects/input/',
'verbatim_locale_url': 'https://localize.mozilla.org/%s/input/',
'l10n_completion_url': 'https://input.mozilla.org/static/l10n_completion.json',
},
}
|
import os
def truthy(item):
return item.lower().startswith('t')
DEBUG = truthy(os.environ.get('DEBUG', 'True'))
PROJECTS = {
'SUMO': {
'name': 'SUMO',
'url': 'https://support.mozilla.org/',
'postatus_url': 'https://support.mozilla.org/media/postatus.txt',
'verbatim_url': 'https://localize.mozilla.org/projects/sumo/',
'verbatim_locale_url': 'https://localize.mozilla.org/%s/sumo/',
'l10n_completion_url': 'https://support.mozilla.org/media/uploads/l10n_history.json',
},
'Input': {
'name': 'Input',
'url': 'https://input.mozilla.org/',
'postatus_url': 'https://input.mozilla.org/media/postatus.txt',
'verbatim_url': 'https://localize.mozilla.org/projects/input/',
'verbatim_locale_url': 'https://localize.mozilla.org/%s/input/',
'l10n_completion_url': 'https://people.mozilla.org/~wkahngreene/l10n/fjord_completion.json',
},
}
|
Update fjord l10n_completion file location
|
Update fjord l10n_completion file location
|
Python
|
bsd-3-clause
|
willkg/postatus,willkg/postatus,willkg/postatus
|
import os
def truthy(item):
return item.lower().startswith('t')
DEBUG = truthy(os.environ.get('DEBUG', 'True'))
PROJECTS = {
'SUMO': {
'name': 'SUMO',
'url': 'https://support.mozilla.org/',
'postatus_url': 'https://support.mozilla.org/media/postatus.txt',
'verbatim_url': 'https://localize.mozilla.org/projects/sumo/',
'verbatim_locale_url': 'https://localize.mozilla.org/%s/sumo/',
'l10n_completion_url': 'https://support.mozilla.org/media/uploads/l10n_history.json',
},
'Input': {
'name': 'Input',
'url': 'https://input.mozilla.org/',
'postatus_url': 'https://input.mozilla.org/media/postatus.txt',
'verbatim_url': 'https://localize.mozilla.org/projects/input/',
'verbatim_locale_url': 'https://localize.mozilla.org/%s/input/',
'l10n_completion_url': 'https://input.mozilla.org/static/l10n_completion.json',
},
}
Update fjord l10n_completion file location
|
import os
def truthy(item):
return item.lower().startswith('t')
DEBUG = truthy(os.environ.get('DEBUG', 'True'))
PROJECTS = {
'SUMO': {
'name': 'SUMO',
'url': 'https://support.mozilla.org/',
'postatus_url': 'https://support.mozilla.org/media/postatus.txt',
'verbatim_url': 'https://localize.mozilla.org/projects/sumo/',
'verbatim_locale_url': 'https://localize.mozilla.org/%s/sumo/',
'l10n_completion_url': 'https://support.mozilla.org/media/uploads/l10n_history.json',
},
'Input': {
'name': 'Input',
'url': 'https://input.mozilla.org/',
'postatus_url': 'https://input.mozilla.org/media/postatus.txt',
'verbatim_url': 'https://localize.mozilla.org/projects/input/',
'verbatim_locale_url': 'https://localize.mozilla.org/%s/input/',
'l10n_completion_url': 'https://people.mozilla.org/~wkahngreene/l10n/fjord_completion.json',
},
}
|
<commit_before>import os
def truthy(item):
return item.lower().startswith('t')
DEBUG = truthy(os.environ.get('DEBUG', 'True'))
PROJECTS = {
'SUMO': {
'name': 'SUMO',
'url': 'https://support.mozilla.org/',
'postatus_url': 'https://support.mozilla.org/media/postatus.txt',
'verbatim_url': 'https://localize.mozilla.org/projects/sumo/',
'verbatim_locale_url': 'https://localize.mozilla.org/%s/sumo/',
'l10n_completion_url': 'https://support.mozilla.org/media/uploads/l10n_history.json',
},
'Input': {
'name': 'Input',
'url': 'https://input.mozilla.org/',
'postatus_url': 'https://input.mozilla.org/media/postatus.txt',
'verbatim_url': 'https://localize.mozilla.org/projects/input/',
'verbatim_locale_url': 'https://localize.mozilla.org/%s/input/',
'l10n_completion_url': 'https://input.mozilla.org/static/l10n_completion.json',
},
}
<commit_msg>Update fjord l10n_completion file location<commit_after>
|
import os
def truthy(item):
return item.lower().startswith('t')
DEBUG = truthy(os.environ.get('DEBUG', 'True'))
PROJECTS = {
'SUMO': {
'name': 'SUMO',
'url': 'https://support.mozilla.org/',
'postatus_url': 'https://support.mozilla.org/media/postatus.txt',
'verbatim_url': 'https://localize.mozilla.org/projects/sumo/',
'verbatim_locale_url': 'https://localize.mozilla.org/%s/sumo/',
'l10n_completion_url': 'https://support.mozilla.org/media/uploads/l10n_history.json',
},
'Input': {
'name': 'Input',
'url': 'https://input.mozilla.org/',
'postatus_url': 'https://input.mozilla.org/media/postatus.txt',
'verbatim_url': 'https://localize.mozilla.org/projects/input/',
'verbatim_locale_url': 'https://localize.mozilla.org/%s/input/',
'l10n_completion_url': 'https://people.mozilla.org/~wkahngreene/l10n/fjord_completion.json',
},
}
|
import os
def truthy(item):
return item.lower().startswith('t')
DEBUG = truthy(os.environ.get('DEBUG', 'True'))
PROJECTS = {
'SUMO': {
'name': 'SUMO',
'url': 'https://support.mozilla.org/',
'postatus_url': 'https://support.mozilla.org/media/postatus.txt',
'verbatim_url': 'https://localize.mozilla.org/projects/sumo/',
'verbatim_locale_url': 'https://localize.mozilla.org/%s/sumo/',
'l10n_completion_url': 'https://support.mozilla.org/media/uploads/l10n_history.json',
},
'Input': {
'name': 'Input',
'url': 'https://input.mozilla.org/',
'postatus_url': 'https://input.mozilla.org/media/postatus.txt',
'verbatim_url': 'https://localize.mozilla.org/projects/input/',
'verbatim_locale_url': 'https://localize.mozilla.org/%s/input/',
'l10n_completion_url': 'https://input.mozilla.org/static/l10n_completion.json',
},
}
Update fjord l10n_completion file locationimport os
def truthy(item):
return item.lower().startswith('t')
DEBUG = truthy(os.environ.get('DEBUG', 'True'))
PROJECTS = {
'SUMO': {
'name': 'SUMO',
'url': 'https://support.mozilla.org/',
'postatus_url': 'https://support.mozilla.org/media/postatus.txt',
'verbatim_url': 'https://localize.mozilla.org/projects/sumo/',
'verbatim_locale_url': 'https://localize.mozilla.org/%s/sumo/',
'l10n_completion_url': 'https://support.mozilla.org/media/uploads/l10n_history.json',
},
'Input': {
'name': 'Input',
'url': 'https://input.mozilla.org/',
'postatus_url': 'https://input.mozilla.org/media/postatus.txt',
'verbatim_url': 'https://localize.mozilla.org/projects/input/',
'verbatim_locale_url': 'https://localize.mozilla.org/%s/input/',
'l10n_completion_url': 'https://people.mozilla.org/~wkahngreene/l10n/fjord_completion.json',
},
}
|
<commit_before>import os
def truthy(item):
return item.lower().startswith('t')
DEBUG = truthy(os.environ.get('DEBUG', 'True'))
PROJECTS = {
'SUMO': {
'name': 'SUMO',
'url': 'https://support.mozilla.org/',
'postatus_url': 'https://support.mozilla.org/media/postatus.txt',
'verbatim_url': 'https://localize.mozilla.org/projects/sumo/',
'verbatim_locale_url': 'https://localize.mozilla.org/%s/sumo/',
'l10n_completion_url': 'https://support.mozilla.org/media/uploads/l10n_history.json',
},
'Input': {
'name': 'Input',
'url': 'https://input.mozilla.org/',
'postatus_url': 'https://input.mozilla.org/media/postatus.txt',
'verbatim_url': 'https://localize.mozilla.org/projects/input/',
'verbatim_locale_url': 'https://localize.mozilla.org/%s/input/',
'l10n_completion_url': 'https://input.mozilla.org/static/l10n_completion.json',
},
}
<commit_msg>Update fjord l10n_completion file location<commit_after>import os
def truthy(item):
return item.lower().startswith('t')
DEBUG = truthy(os.environ.get('DEBUG', 'True'))
PROJECTS = {
'SUMO': {
'name': 'SUMO',
'url': 'https://support.mozilla.org/',
'postatus_url': 'https://support.mozilla.org/media/postatus.txt',
'verbatim_url': 'https://localize.mozilla.org/projects/sumo/',
'verbatim_locale_url': 'https://localize.mozilla.org/%s/sumo/',
'l10n_completion_url': 'https://support.mozilla.org/media/uploads/l10n_history.json',
},
'Input': {
'name': 'Input',
'url': 'https://input.mozilla.org/',
'postatus_url': 'https://input.mozilla.org/media/postatus.txt',
'verbatim_url': 'https://localize.mozilla.org/projects/input/',
'verbatim_locale_url': 'https://localize.mozilla.org/%s/input/',
'l10n_completion_url': 'https://people.mozilla.org/~wkahngreene/l10n/fjord_completion.json',
},
}
|
e276753b458ef7c4c05469324173a455c0e2db46
|
tests/basics/subclass-native3.py
|
tests/basics/subclass-native3.py
|
class MyExc(Exception):
pass
e = MyExc(100, "Some error")
print(e)
print(repr(e))
print(e.args)
|
class MyExc(Exception):
pass
e = MyExc(100, "Some error")
print(e)
print(repr(e))
print(e.args)
try:
raise MyExc("Some error")
except MyExc as e:
print("Caught exception:", repr(e))
try:
raise MyExc("Some error2")
except Exception as e:
print("Caught exception:", repr(e))
try:
raise MyExc("Some error2")
except:
print("Caught user exception")
|
Add testcases for catching user Exception subclasses.
|
tests: Add testcases for catching user Exception subclasses.
|
Python
|
mit
|
SungEun-Steve-Kim/test-mp,kerneltask/micropython,oopy/micropython,vitiral/micropython,martinribelotta/micropython,xyb/micropython,omtinez/micropython,HenrikSolver/micropython,ChuckM/micropython,utopiaprince/micropython,ericsnowcurrently/micropython,feilongfl/micropython,ericsnowcurrently/micropython,dxxb/micropython,skybird6672/micropython,hosaka/micropython,dmazzella/micropython,EcmaXp/micropython,micropython/micropython-esp32,Timmenem/micropython,stonegithubs/micropython,redbear/micropython,martinribelotta/micropython,tdautc19841202/micropython,aethaniel/micropython,mpalomer/micropython,torwag/micropython,rubencabrera/micropython,cloudformdesign/micropython,MrSurly/micropython,adafruit/circuitpython,rubencabrera/micropython,selste/micropython,HenrikSolver/micropython,feilongfl/micropython,pramasoul/micropython,cnoviello/micropython,Peetz0r/micropython-esp32,heisewangluo/micropython,dxxb/micropython,jimkmc/micropython,kerneltask/micropython,bvernoux/micropython,blazewicz/micropython,ahotam/micropython,pozetroninc/micropython,adafruit/micropython,noahwilliamsson/micropython,misterdanb/micropython,toolmacher/micropython,lowRISC/micropython,cwyark/micropython,hosaka/micropython,ericsnowcurrently/micropython,slzatz/micropython,praemdonck/micropython,Vogtinator/micropython,warner83/micropython,danicampora/micropython,dinau/micropython,slzatz/micropython,KISSMonX/micropython,neilh10/micropython,HenrikSolver/micropython,tralamazza/micropython,xuxiaoxin/micropython,mhoffma/micropython,kerneltask/micropython,omtinez/micropython,orionrobots/micropython,jimkmc/micropython,xyb/micropython,noahwilliamsson/micropython,pfalcon/micropython,hiway/micropython,ahotam/micropython,trezor/micropython,ericsnowcurrently/micropython,bvernoux/micropython,dhylands/micropython,pozetroninc/micropython,noahchense/micropython,jmarcelino/pycom-micropython,martinribelotta/micropython,warner83/micropython,micropython/micropython-esp32,utopiaprince/micropython,warner83/micropython,adamkh/micropython,pozetroninc/micropython,supergis/micropython,pozetroninc/micropython,firstval/micropython,lowRISC/micropython,MrSurly/micropython-esp32,dmazzella/micropython,tuc-osg/micropython,swegener/micropython,blmorris/micropython,TDAbboud/micropython,henriknelson/micropython,blazewicz/micropython,pramasoul/micropython,utopiaprince/micropython,jmarcelino/pycom-micropython,oopy/micropython,SHA2017-badge/micropython-esp32,swegener/micropython,mhoffma/micropython,ChuckM/micropython,Vogtinator/micropython,PappaPeppar/micropython,misterdanb/micropython,MrSurly/micropython-esp32,methoxid/micropystat,adamkh/micropython,ganshun666/micropython,jlillest/micropython,dinau/micropython,jlillest/micropython,cnoviello/micropython,methoxid/micropystat,kostyll/micropython,adafruit/circuitpython,praemdonck/micropython,KISSMonX/micropython,neilh10/micropython,trezor/micropython,ceramos/micropython,deshipu/micropython,tralamazza/micropython,infinnovation/micropython,dinau/micropython,neilh10/micropython,pfalcon/micropython,tralamazza/micropython,utopiaprince/micropython,galenhz/micropython,hiway/micropython,KISSMonX/micropython,toolmacher/micropython,vitiral/micropython,slzatz/micropython,ceramos/micropython,xyb/micropython,TDAbboud/micropython,vriera/micropython,tobbad/micropython,SHA2017-badge/micropython-esp32,emfcamp/micropython,rubencabrera/micropython,TDAbboud/micropython,stonegithubs/micropython,noahwilliamsson/micropython,mhoffma/micropython,puuu/micropython,torwag/micropython,supergis/micropython,praemdonck/micropython,kostyll/micropython,cloudformdesign/micropython,kostyll/micropython,toolmacher/micropython,tralamazza/micropython,micropython/micropython-esp32,mpalomer/micropython,pfalcon/micropython,AriZuu/micropython,micropython/micropython-esp32,PappaPeppar/micropython,stonegithubs/micropython,chrisdearman/micropython,paul-xxx/micropython,Timmenem/micropython,emfcamp/micropython,mianos/micropython,turbinenreiter/micropython,dxxb/micropython,hosaka/micropython,hosaka/micropython,deshipu/micropython,warner83/micropython,chrisdearman/micropython,PappaPeppar/micropython,ernesto-g/micropython,pramasoul/micropython,vriera/micropython,tuc-osg/micropython,turbinenreiter/micropython,alex-march/micropython,SungEun-Steve-Kim/test-mp,Peetz0r/micropython-esp32,matthewelse/micropython,noahwilliamsson/micropython,tuc-osg/micropython,dmazzella/micropython,methoxid/micropystat,dhylands/micropython,danicampora/micropython,turbinenreiter/micropython,henriknelson/micropython,MrSurly/micropython-esp32,feilongfl/micropython,hiway/micropython,blazewicz/micropython,henriknelson/micropython,selste/micropython,tuc-osg/micropython,noahchense/micropython,Peetz0r/micropython-esp32,PappaPeppar/micropython,cloudformdesign/micropython,MrSurly/micropython,ruffy91/micropython,jmarcelino/pycom-micropython,lbattraw/micropython,EcmaXp/micropython,suda/micropython,emfcamp/micropython,bvernoux/micropython,methoxid/micropystat,supergis/micropython,SHA2017-badge/micropython-esp32,supergis/micropython,ahotam/micropython,Peetz0r/micropython-esp32,danicampora/micropython,cwyark/micropython,orionrobots/micropython,utopiaprince/micropython,tdautc19841202/micropython,henriknelson/micropython,kostyll/micropython,xhat/micropython,paul-xxx/micropython,blazewicz/micropython,matthewelse/micropython,lowRISC/micropython,slzatz/micropython,matthewelse/micropython,galenhz/micropython,ruffy91/micropython,heisewangluo/micropython,MrSurly/micropython-esp32,neilh10/micropython,alex-march/micropython,adafruit/circuitpython,alex-robbins/micropython,SungEun-Steve-Kim/test-mp,mgyenik/micropython,alex-robbins/micropython,selste/micropython,pfalcon/micropython,ChuckM/micropython,adafruit/circuitpython,swegener/micropython,praemdonck/micropython,drrk/micropython,aethaniel/micropython,tobbad/micropython,MrSurly/micropython,mhoffma/micropython,ernesto-g/micropython,ahotam/micropython,dhylands/micropython,stonegithubs/micropython,vitiral/micropython,jlillest/micropython,skybird6672/micropython,bvernoux/micropython,AriZuu/micropython,AriZuu/micropython,skybird6672/micropython,drrk/micropython,SHA2017-badge/micropython-esp32,galenhz/micropython,mianos/micropython,danicampora/micropython,ganshun666/micropython,EcmaXp/micropython,ganshun666/micropython,tdautc19841202/micropython,suda/micropython,jlillest/micropython,ganshun666/micropython,alex-march/micropython,mianos/micropython,ryannathans/micropython,matthewelse/micropython,Timmenem/micropython,torwag/micropython,xyb/micropython,mpalomer/micropython,lowRISC/micropython,lbattraw/micropython,suda/micropython,cloudformdesign/micropython,kerneltask/micropython,noahchense/micropython,heisewangluo/micropython,torwag/micropython,tdautc19841202/micropython,stonegithubs/micropython,alex-march/micropython,puuu/micropython,martinribelotta/micropython,pfalcon/micropython,adafruit/micropython,tobbad/micropython,TDAbboud/micropython,alex-robbins/micropython,ceramos/micropython,alex-robbins/micropython,AriZuu/micropython,paul-xxx/micropython,kostyll/micropython,henriknelson/micropython,alex-robbins/micropython,ernesto-g/micropython,torwag/micropython,vitiral/micropython,ceramos/micropython,misterdanb/micropython,redbear/micropython,praemdonck/micropython,pozetroninc/micropython,dinau/micropython,swegener/micropython,misterdanb/micropython,micropython/micropython-esp32,warner83/micropython,xhat/micropython,Vogtinator/micropython,rubencabrera/micropython,infinnovation/micropython,ChuckM/micropython,EcmaXp/micropython,jmarcelino/pycom-micropython,jimkmc/micropython,mhoffma/micropython,noahchense/micropython,dxxb/micropython,matthewelse/micropython,suda/micropython,vitiral/micropython,pramasoul/micropython,xuxiaoxin/micropython,alex-march/micropython,Peetz0r/micropython-esp32,Vogtinator/micropython,noahchense/micropython,ahotam/micropython,infinnovation/micropython,mgyenik/micropython,mgyenik/micropython,adamkh/micropython,oopy/micropython,MrSurly/micropython,dmazzella/micropython,orionrobots/micropython,redbear/micropython,mgyenik/micropython,aethaniel/micropython,jlillest/micropython,cnoviello/micropython,martinribelotta/micropython,ChuckM/micropython,adafruit/circuitpython,tobbad/micropython,SungEun-Steve-Kim/test-mp,lbattraw/micropython,tdautc19841202/micropython,ryannathans/micropython,ryannathans/micropython,xhat/micropython,chrisdearman/micropython,hiway/micropython,ruffy91/micropython,feilongfl/micropython,cwyark/micropython,selste/micropython,adafruit/circuitpython,oopy/micropython,adamkh/micropython,suda/micropython,skybird6672/micropython,jimkmc/micropython,SHA2017-badge/micropython-esp32,ryannathans/micropython,blmorris/micropython,kerneltask/micropython,noahwilliamsson/micropython,emfcamp/micropython,blazewicz/micropython,selste/micropython,KISSMonX/micropython,pramasoul/micropython,Timmenem/micropython,heisewangluo/micropython,slzatz/micropython,hiway/micropython,ganshun666/micropython,emfcamp/micropython,neilh10/micropython,paul-xxx/micropython,infinnovation/micropython,orionrobots/micropython,SungEun-Steve-Kim/test-mp,xuxiaoxin/micropython,misterdanb/micropython,TDAbboud/micropython,feilongfl/micropython,redbear/micropython,HenrikSolver/micropython,ryannathans/micropython,blmorris/micropython,infinnovation/micropython,puuu/micropython,mpalomer/micropython,dhylands/micropython,supergis/micropython,dinau/micropython,puuu/micropython,bvernoux/micropython,danicampora/micropython,lowRISC/micropython,mpalomer/micropython,MrSurly/micropython-esp32,Timmenem/micropython,firstval/micropython,xuxiaoxin/micropython,xhat/micropython,aethaniel/micropython,mgyenik/micropython,ruffy91/micropython,toolmacher/micropython,aethaniel/micropython,adamkh/micropython,xyb/micropython,drrk/micropython,HenrikSolver/micropython,redbear/micropython,cnoviello/micropython,ceramos/micropython,ernesto-g/micropython,ericsnowcurrently/micropython,vriera/micropython,trezor/micropython,jimkmc/micropython,toolmacher/micropython,ernesto-g/micropython,blmorris/micropython,lbattraw/micropython,turbinenreiter/micropython,oopy/micropython,cwyark/micropython,trezor/micropython,vriera/micropython,MrSurly/micropython,EcmaXp/micropython,tobbad/micropython,adafruit/micropython,cnoviello/micropython,drrk/micropython,dhylands/micropython,cloudformdesign/micropython,xhat/micropython,deshipu/micropython,chrisdearman/micropython,omtinez/micropython,rubencabrera/micropython,galenhz/micropython,omtinez/micropython,deshipu/micropython,swegener/micropython,vriera/micropython,firstval/micropython,tuc-osg/micropython,methoxid/micropystat,cwyark/micropython,paul-xxx/micropython,skybird6672/micropython,mianos/micropython,firstval/micropython,turbinenreiter/micropython,heisewangluo/micropython,lbattraw/micropython,AriZuu/micropython,drrk/micropython,PappaPeppar/micropython,adafruit/micropython,KISSMonX/micropython,deshipu/micropython,hosaka/micropython,firstval/micropython,ruffy91/micropython,blmorris/micropython,omtinez/micropython,xuxiaoxin/micropython,galenhz/micropython,chrisdearman/micropython,trezor/micropython,adafruit/micropython,matthewelse/micropython,dxxb/micropython,mianos/micropython,Vogtinator/micropython,puuu/micropython,orionrobots/micropython,jmarcelino/pycom-micropython
|
class MyExc(Exception):
pass
e = MyExc(100, "Some error")
print(e)
print(repr(e))
print(e.args)
tests: Add testcases for catching user Exception subclasses.
|
class MyExc(Exception):
pass
e = MyExc(100, "Some error")
print(e)
print(repr(e))
print(e.args)
try:
raise MyExc("Some error")
except MyExc as e:
print("Caught exception:", repr(e))
try:
raise MyExc("Some error2")
except Exception as e:
print("Caught exception:", repr(e))
try:
raise MyExc("Some error2")
except:
print("Caught user exception")
|
<commit_before>class MyExc(Exception):
pass
e = MyExc(100, "Some error")
print(e)
print(repr(e))
print(e.args)
<commit_msg>tests: Add testcases for catching user Exception subclasses.<commit_after>
|
class MyExc(Exception):
pass
e = MyExc(100, "Some error")
print(e)
print(repr(e))
print(e.args)
try:
raise MyExc("Some error")
except MyExc as e:
print("Caught exception:", repr(e))
try:
raise MyExc("Some error2")
except Exception as e:
print("Caught exception:", repr(e))
try:
raise MyExc("Some error2")
except:
print("Caught user exception")
|
class MyExc(Exception):
pass
e = MyExc(100, "Some error")
print(e)
print(repr(e))
print(e.args)
tests: Add testcases for catching user Exception subclasses.class MyExc(Exception):
pass
e = MyExc(100, "Some error")
print(e)
print(repr(e))
print(e.args)
try:
raise MyExc("Some error")
except MyExc as e:
print("Caught exception:", repr(e))
try:
raise MyExc("Some error2")
except Exception as e:
print("Caught exception:", repr(e))
try:
raise MyExc("Some error2")
except:
print("Caught user exception")
|
<commit_before>class MyExc(Exception):
pass
e = MyExc(100, "Some error")
print(e)
print(repr(e))
print(e.args)
<commit_msg>tests: Add testcases for catching user Exception subclasses.<commit_after>class MyExc(Exception):
pass
e = MyExc(100, "Some error")
print(e)
print(repr(e))
print(e.args)
try:
raise MyExc("Some error")
except MyExc as e:
print("Caught exception:", repr(e))
try:
raise MyExc("Some error2")
except Exception as e:
print("Caught exception:", repr(e))
try:
raise MyExc("Some error2")
except:
print("Caught user exception")
|
4d161278963252d8502f1be2bfb857bcb379f540
|
test/python/topology/test_utilities.py
|
test/python/topology/test_utilities.py
|
# Licensed Materials - Property of IBM
# Copyright IBM Corp. 2017
import sys
import streamsx.topology.context
def standalone(test, topo):
rc = streamsx.topology.context.submit("STANDALONE", topo)
test.assertEqual(0, rc)
|
# Licensed Materials - Property of IBM
# Copyright IBM Corp. 2017
import sys
import streamsx.topology.context
def standalone(test, topo):
rc = streamsx.topology.context.submit("STANDALONE", topo)
test.assertEqual(0, rc['return_code'])
|
Return value of submit is now a json dict
|
Return value of submit is now a json dict
|
Python
|
apache-2.0
|
ibmkendrick/streamsx.topology,ibmkendrick/streamsx.topology,ddebrunner/streamsx.topology,IBMStreams/streamsx.topology,IBMStreams/streamsx.topology,wmarshall484/streamsx.topology,ibmkendrick/streamsx.topology,ddebrunner/streamsx.topology,ddebrunner/streamsx.topology,IBMStreams/streamsx.topology,IBMStreams/streamsx.topology,ibmkendrick/streamsx.topology,ibmkendrick/streamsx.topology,wmarshall484/streamsx.topology,IBMStreams/streamsx.topology,ddebrunner/streamsx.topology,ddebrunner/streamsx.topology,wmarshall484/streamsx.topology,ddebrunner/streamsx.topology,IBMStreams/streamsx.topology,wmarshall484/streamsx.topology,ibmkendrick/streamsx.topology,IBMStreams/streamsx.topology,wmarshall484/streamsx.topology,ddebrunner/streamsx.topology,wmarshall484/streamsx.topology,wmarshall484/streamsx.topology,wmarshall484/streamsx.topology,ibmkendrick/streamsx.topology
|
# Licensed Materials - Property of IBM
# Copyright IBM Corp. 2017
import sys
import streamsx.topology.context
def standalone(test, topo):
rc = streamsx.topology.context.submit("STANDALONE", topo)
test.assertEqual(0, rc)
Return value of submit is now a json dict
|
# Licensed Materials - Property of IBM
# Copyright IBM Corp. 2017
import sys
import streamsx.topology.context
def standalone(test, topo):
rc = streamsx.topology.context.submit("STANDALONE", topo)
test.assertEqual(0, rc['return_code'])
|
<commit_before># Licensed Materials - Property of IBM
# Copyright IBM Corp. 2017
import sys
import streamsx.topology.context
def standalone(test, topo):
rc = streamsx.topology.context.submit("STANDALONE", topo)
test.assertEqual(0, rc)
<commit_msg>Return value of submit is now a json dict<commit_after>
|
# Licensed Materials - Property of IBM
# Copyright IBM Corp. 2017
import sys
import streamsx.topology.context
def standalone(test, topo):
rc = streamsx.topology.context.submit("STANDALONE", topo)
test.assertEqual(0, rc['return_code'])
|
# Licensed Materials - Property of IBM
# Copyright IBM Corp. 2017
import sys
import streamsx.topology.context
def standalone(test, topo):
rc = streamsx.topology.context.submit("STANDALONE", topo)
test.assertEqual(0, rc)
Return value of submit is now a json dict# Licensed Materials - Property of IBM
# Copyright IBM Corp. 2017
import sys
import streamsx.topology.context
def standalone(test, topo):
rc = streamsx.topology.context.submit("STANDALONE", topo)
test.assertEqual(0, rc['return_code'])
|
<commit_before># Licensed Materials - Property of IBM
# Copyright IBM Corp. 2017
import sys
import streamsx.topology.context
def standalone(test, topo):
rc = streamsx.topology.context.submit("STANDALONE", topo)
test.assertEqual(0, rc)
<commit_msg>Return value of submit is now a json dict<commit_after># Licensed Materials - Property of IBM
# Copyright IBM Corp. 2017
import sys
import streamsx.topology.context
def standalone(test, topo):
rc = streamsx.topology.context.submit("STANDALONE", topo)
test.assertEqual(0, rc['return_code'])
|
024597e8f32b49844c333fa551862563f2c508ca
|
pymatgen/__init__.py
|
pymatgen/__init__.py
|
from __future__ import unicode_literals
__author__ = "Pymatgen Development Team"
__email__ ="pymatgen@googlegroups.com"
__maintainer__ = "Shyue Ping Ong"
__maintainer_email__ ="shyuep@gmail.com"
__date__ = "Jul 8 2016"
__version__ = "4.0.2"
# Useful aliases for commonly used objects and modules.
# Allows from pymatgen import <class> for quick usage.
from .core import *
from .electronic_structure.core import Spin, Orbital
from .matproj.rest import MPRester
from monty.json import MontyEncoder, MontyDecoder, MSONable
|
from __future__ import unicode_literals
__author__ = "Pymatgen Development Team"
__email__ ="pymatgen@googlegroups.com"
__maintainer__ = "Shyue Ping Ong"
__maintainer_email__ ="shyuep@gmail.com"
__date__ = "Jul 8 2016"
__version__ = "4.0.2"
# Order of imports is important on some systems to avoid
# failures when loading shared libraries.
import spglib
from . import optimization, util
del(spglib, optimization, util)
# Useful aliases for commonly used objects and modules.
# Allows from pymatgen import <class> for quick usage.
from .core import *
from .electronic_structure.core import Spin, Orbital
from .matproj.rest import MPRester
from monty.json import MontyEncoder, MontyDecoder, MSONable
|
Fix for DLL load failures when importing top-level package
|
Fix for DLL load failures when importing top-level package
The order of import affects the success of import pymatgen on certain 64-bit Windows setups and therefore a workaround has been added.
Former-commit-id: 0ff590e053d5e0be2959720a861d06d7ba74d132 [formerly 0da252fe086eaa98942c4eaa39750739964be96f]
Former-commit-id: 259a290c8ed61366c6ba7ffb838c4de947826e7d
|
Python
|
mit
|
gpetretto/pymatgen,dongsenfo/pymatgen,czhengsci/pymatgen,blondegeek/pymatgen,davidwaroquiers/pymatgen,johnson1228/pymatgen,fraricci/pymatgen,dongsenfo/pymatgen,czhengsci/pymatgen,Bismarrck/pymatgen,setten/pymatgen,vorwerkc/pymatgen,tallakahath/pymatgen,tschaume/pymatgen,johnson1228/pymatgen,nisse3000/pymatgen,richardtran415/pymatgen,xhqu1981/pymatgen,tschaume/pymatgen,mbkumar/pymatgen,gpetretto/pymatgen,fraricci/pymatgen,ndardenne/pymatgen,blondegeek/pymatgen,xhqu1981/pymatgen,fraricci/pymatgen,gpetretto/pymatgen,mbkumar/pymatgen,tallakahath/pymatgen,gVallverdu/pymatgen,ndardenne/pymatgen,davidwaroquiers/pymatgen,richardtran415/pymatgen,tschaume/pymatgen,Bismarrck/pymatgen,nisse3000/pymatgen,montoyjh/pymatgen,montoyjh/pymatgen,blondegeek/pymatgen,matk86/pymatgen,gVallverdu/pymatgen,gmatteo/pymatgen,fraricci/pymatgen,Bismarrck/pymatgen,tallakahath/pymatgen,vorwerkc/pymatgen,aykol/pymatgen,davidwaroquiers/pymatgen,mbkumar/pymatgen,matk86/pymatgen,xhqu1981/pymatgen,dongsenfo/pymatgen,czhengsci/pymatgen,montoyjh/pymatgen,matk86/pymatgen,gpetretto/pymatgen,matk86/pymatgen,tschaume/pymatgen,richardtran415/pymatgen,gVallverdu/pymatgen,Bismarrck/pymatgen,gVallverdu/pymatgen,nisse3000/pymatgen,vorwerkc/pymatgen,aykol/pymatgen,dongsenfo/pymatgen,johnson1228/pymatgen,johnson1228/pymatgen,setten/pymatgen,Bismarrck/pymatgen,richardtran415/pymatgen,czhengsci/pymatgen,nisse3000/pymatgen,setten/pymatgen,blondegeek/pymatgen,mbkumar/pymatgen,davidwaroquiers/pymatgen,tschaume/pymatgen,vorwerkc/pymatgen,gmatteo/pymatgen,montoyjh/pymatgen,aykol/pymatgen,ndardenne/pymatgen,setten/pymatgen
|
from __future__ import unicode_literals
__author__ = "Pymatgen Development Team"
__email__ ="pymatgen@googlegroups.com"
__maintainer__ = "Shyue Ping Ong"
__maintainer_email__ ="shyuep@gmail.com"
__date__ = "Jul 8 2016"
__version__ = "4.0.2"
# Useful aliases for commonly used objects and modules.
# Allows from pymatgen import <class> for quick usage.
from .core import *
from .electronic_structure.core import Spin, Orbital
from .matproj.rest import MPRester
from monty.json import MontyEncoder, MontyDecoder, MSONable
Fix for DLL load failures when importing top-level package
The order of import affects the success of import pymatgen on certain 64-bit Windows setups and therefore a workaround has been added.
Former-commit-id: 0ff590e053d5e0be2959720a861d06d7ba74d132 [formerly 0da252fe086eaa98942c4eaa39750739964be96f]
Former-commit-id: 259a290c8ed61366c6ba7ffb838c4de947826e7d
|
from __future__ import unicode_literals
__author__ = "Pymatgen Development Team"
__email__ ="pymatgen@googlegroups.com"
__maintainer__ = "Shyue Ping Ong"
__maintainer_email__ ="shyuep@gmail.com"
__date__ = "Jul 8 2016"
__version__ = "4.0.2"
# Order of imports is important on some systems to avoid
# failures when loading shared libraries.
import spglib
from . import optimization, util
del(spglib, optimization, util)
# Useful aliases for commonly used objects and modules.
# Allows from pymatgen import <class> for quick usage.
from .core import *
from .electronic_structure.core import Spin, Orbital
from .matproj.rest import MPRester
from monty.json import MontyEncoder, MontyDecoder, MSONable
|
<commit_before>from __future__ import unicode_literals
__author__ = "Pymatgen Development Team"
__email__ ="pymatgen@googlegroups.com"
__maintainer__ = "Shyue Ping Ong"
__maintainer_email__ ="shyuep@gmail.com"
__date__ = "Jul 8 2016"
__version__ = "4.0.2"
# Useful aliases for commonly used objects and modules.
# Allows from pymatgen import <class> for quick usage.
from .core import *
from .electronic_structure.core import Spin, Orbital
from .matproj.rest import MPRester
from monty.json import MontyEncoder, MontyDecoder, MSONable
<commit_msg>Fix for DLL load failures when importing top-level package
The order of import affects the success of import pymatgen on certain 64-bit Windows setups and therefore a workaround has been added.
Former-commit-id: 0ff590e053d5e0be2959720a861d06d7ba74d132 [formerly 0da252fe086eaa98942c4eaa39750739964be96f]
Former-commit-id: 259a290c8ed61366c6ba7ffb838c4de947826e7d<commit_after>
|
from __future__ import unicode_literals
__author__ = "Pymatgen Development Team"
__email__ ="pymatgen@googlegroups.com"
__maintainer__ = "Shyue Ping Ong"
__maintainer_email__ ="shyuep@gmail.com"
__date__ = "Jul 8 2016"
__version__ = "4.0.2"
# Order of imports is important on some systems to avoid
# failures when loading shared libraries.
import spglib
from . import optimization, util
del(spglib, optimization, util)
# Useful aliases for commonly used objects and modules.
# Allows from pymatgen import <class> for quick usage.
from .core import *
from .electronic_structure.core import Spin, Orbital
from .matproj.rest import MPRester
from monty.json import MontyEncoder, MontyDecoder, MSONable
|
from __future__ import unicode_literals
__author__ = "Pymatgen Development Team"
__email__ ="pymatgen@googlegroups.com"
__maintainer__ = "Shyue Ping Ong"
__maintainer_email__ ="shyuep@gmail.com"
__date__ = "Jul 8 2016"
__version__ = "4.0.2"
# Useful aliases for commonly used objects and modules.
# Allows from pymatgen import <class> for quick usage.
from .core import *
from .electronic_structure.core import Spin, Orbital
from .matproj.rest import MPRester
from monty.json import MontyEncoder, MontyDecoder, MSONable
Fix for DLL load failures when importing top-level package
The order of import affects the success of import pymatgen on certain 64-bit Windows setups and therefore a workaround has been added.
Former-commit-id: 0ff590e053d5e0be2959720a861d06d7ba74d132 [formerly 0da252fe086eaa98942c4eaa39750739964be96f]
Former-commit-id: 259a290c8ed61366c6ba7ffb838c4de947826e7dfrom __future__ import unicode_literals
__author__ = "Pymatgen Development Team"
__email__ ="pymatgen@googlegroups.com"
__maintainer__ = "Shyue Ping Ong"
__maintainer_email__ ="shyuep@gmail.com"
__date__ = "Jul 8 2016"
__version__ = "4.0.2"
# Order of imports is important on some systems to avoid
# failures when loading shared libraries.
import spglib
from . import optimization, util
del(spglib, optimization, util)
# Useful aliases for commonly used objects and modules.
# Allows from pymatgen import <class> for quick usage.
from .core import *
from .electronic_structure.core import Spin, Orbital
from .matproj.rest import MPRester
from monty.json import MontyEncoder, MontyDecoder, MSONable
|
<commit_before>from __future__ import unicode_literals
__author__ = "Pymatgen Development Team"
__email__ ="pymatgen@googlegroups.com"
__maintainer__ = "Shyue Ping Ong"
__maintainer_email__ ="shyuep@gmail.com"
__date__ = "Jul 8 2016"
__version__ = "4.0.2"
# Useful aliases for commonly used objects and modules.
# Allows from pymatgen import <class> for quick usage.
from .core import *
from .electronic_structure.core import Spin, Orbital
from .matproj.rest import MPRester
from monty.json import MontyEncoder, MontyDecoder, MSONable
<commit_msg>Fix for DLL load failures when importing top-level package
The order of import affects the success of import pymatgen on certain 64-bit Windows setups and therefore a workaround has been added.
Former-commit-id: 0ff590e053d5e0be2959720a861d06d7ba74d132 [formerly 0da252fe086eaa98942c4eaa39750739964be96f]
Former-commit-id: 259a290c8ed61366c6ba7ffb838c4de947826e7d<commit_after>from __future__ import unicode_literals
__author__ = "Pymatgen Development Team"
__email__ ="pymatgen@googlegroups.com"
__maintainer__ = "Shyue Ping Ong"
__maintainer_email__ ="shyuep@gmail.com"
__date__ = "Jul 8 2016"
__version__ = "4.0.2"
# Order of imports is important on some systems to avoid
# failures when loading shared libraries.
import spglib
from . import optimization, util
del(spglib, optimization, util)
# Useful aliases for commonly used objects and modules.
# Allows from pymatgen import <class> for quick usage.
from .core import *
from .electronic_structure.core import Spin, Orbital
from .matproj.rest import MPRester
from monty.json import MontyEncoder, MontyDecoder, MSONable
|
7fb30506b9de18d39e47d13a2e85c06484cfdecd
|
tests/modules/test_enumerable.py
|
tests/modules/test_enumerable.py
|
class TestKernel(object):
def test_inject(self, ec):
w_res = ec.space.execute(ec, """
return (5..10).inject(1) do |prod, n|
prod * n
end
""")
assert ec.space.int_w(w_res) == 15120
w_res = ec.space.execute(ec, """
return (1..10).inject 0 do |sum, n|
sum + n
end
""")
assert ec.space.int_w(w_res) == 45
def test_each_with_index(self, ec):
w_res = ec.space.execute(ec, """
result = []
(5..10).each_with_index do |n, idx|
result << [n, idx]
end
return result
""")
assert [[ec.space.int_w(w_x) for w_x in ec.space.listview(w_sub)] for w_sub in ec.space.listview(w_res)] == [[5, 0], [6, 1], [7, 2], [8, 3], [9, 4]]
|
class TestEnumberable(object):
def test_inject(self, ec):
w_res = ec.space.execute(ec, """
return (5..10).inject(1) do |prod, n|
prod * n
end
""")
assert ec.space.int_w(w_res) == 15120
w_res = ec.space.execute(ec, """
return (1..10).inject 0 do |sum, n|
sum + n
end
""")
assert ec.space.int_w(w_res) == 45
def test_each_with_index(self, ec):
w_res = ec.space.execute(ec, """
result = []
(5..10).each_with_index do |n, idx|
result << [n, idx]
end
return result
""")
assert [[ec.space.int_w(w_x) for w_x in ec.space.listview(w_sub)] for w_sub in ec.space.listview(w_res)] == [[5, 0], [6, 1], [7, 2], [8, 3], [9, 4]]
|
Fix name of test object for enumerables
|
Fix name of test object for enumerables
|
Python
|
bsd-3-clause
|
babelsberg/babelsberg-r,babelsberg/babelsberg-r,topazproject/topaz,topazproject/topaz,babelsberg/babelsberg-r,kachick/topaz,topazproject/topaz,babelsberg/babelsberg-r,topazproject/topaz,kachick/topaz,babelsberg/babelsberg-r,kachick/topaz
|
class TestKernel(object):
def test_inject(self, ec):
w_res = ec.space.execute(ec, """
return (5..10).inject(1) do |prod, n|
prod * n
end
""")
assert ec.space.int_w(w_res) == 15120
w_res = ec.space.execute(ec, """
return (1..10).inject 0 do |sum, n|
sum + n
end
""")
assert ec.space.int_w(w_res) == 45
def test_each_with_index(self, ec):
w_res = ec.space.execute(ec, """
result = []
(5..10).each_with_index do |n, idx|
result << [n, idx]
end
return result
""")
assert [[ec.space.int_w(w_x) for w_x in ec.space.listview(w_sub)] for w_sub in ec.space.listview(w_res)] == [[5, 0], [6, 1], [7, 2], [8, 3], [9, 4]]
Fix name of test object for enumerables
|
class TestEnumberable(object):
def test_inject(self, ec):
w_res = ec.space.execute(ec, """
return (5..10).inject(1) do |prod, n|
prod * n
end
""")
assert ec.space.int_w(w_res) == 15120
w_res = ec.space.execute(ec, """
return (1..10).inject 0 do |sum, n|
sum + n
end
""")
assert ec.space.int_w(w_res) == 45
def test_each_with_index(self, ec):
w_res = ec.space.execute(ec, """
result = []
(5..10).each_with_index do |n, idx|
result << [n, idx]
end
return result
""")
assert [[ec.space.int_w(w_x) for w_x in ec.space.listview(w_sub)] for w_sub in ec.space.listview(w_res)] == [[5, 0], [6, 1], [7, 2], [8, 3], [9, 4]]
|
<commit_before>class TestKernel(object):
def test_inject(self, ec):
w_res = ec.space.execute(ec, """
return (5..10).inject(1) do |prod, n|
prod * n
end
""")
assert ec.space.int_w(w_res) == 15120
w_res = ec.space.execute(ec, """
return (1..10).inject 0 do |sum, n|
sum + n
end
""")
assert ec.space.int_w(w_res) == 45
def test_each_with_index(self, ec):
w_res = ec.space.execute(ec, """
result = []
(5..10).each_with_index do |n, idx|
result << [n, idx]
end
return result
""")
assert [[ec.space.int_w(w_x) for w_x in ec.space.listview(w_sub)] for w_sub in ec.space.listview(w_res)] == [[5, 0], [6, 1], [7, 2], [8, 3], [9, 4]]
<commit_msg>Fix name of test object for enumerables<commit_after>
|
class TestEnumberable(object):
def test_inject(self, ec):
w_res = ec.space.execute(ec, """
return (5..10).inject(1) do |prod, n|
prod * n
end
""")
assert ec.space.int_w(w_res) == 15120
w_res = ec.space.execute(ec, """
return (1..10).inject 0 do |sum, n|
sum + n
end
""")
assert ec.space.int_w(w_res) == 45
def test_each_with_index(self, ec):
w_res = ec.space.execute(ec, """
result = []
(5..10).each_with_index do |n, idx|
result << [n, idx]
end
return result
""")
assert [[ec.space.int_w(w_x) for w_x in ec.space.listview(w_sub)] for w_sub in ec.space.listview(w_res)] == [[5, 0], [6, 1], [7, 2], [8, 3], [9, 4]]
|
class TestKernel(object):
def test_inject(self, ec):
w_res = ec.space.execute(ec, """
return (5..10).inject(1) do |prod, n|
prod * n
end
""")
assert ec.space.int_w(w_res) == 15120
w_res = ec.space.execute(ec, """
return (1..10).inject 0 do |sum, n|
sum + n
end
""")
assert ec.space.int_w(w_res) == 45
def test_each_with_index(self, ec):
w_res = ec.space.execute(ec, """
result = []
(5..10).each_with_index do |n, idx|
result << [n, idx]
end
return result
""")
assert [[ec.space.int_w(w_x) for w_x in ec.space.listview(w_sub)] for w_sub in ec.space.listview(w_res)] == [[5, 0], [6, 1], [7, 2], [8, 3], [9, 4]]
Fix name of test object for enumerablesclass TestEnumberable(object):
def test_inject(self, ec):
w_res = ec.space.execute(ec, """
return (5..10).inject(1) do |prod, n|
prod * n
end
""")
assert ec.space.int_w(w_res) == 15120
w_res = ec.space.execute(ec, """
return (1..10).inject 0 do |sum, n|
sum + n
end
""")
assert ec.space.int_w(w_res) == 45
def test_each_with_index(self, ec):
w_res = ec.space.execute(ec, """
result = []
(5..10).each_with_index do |n, idx|
result << [n, idx]
end
return result
""")
assert [[ec.space.int_w(w_x) for w_x in ec.space.listview(w_sub)] for w_sub in ec.space.listview(w_res)] == [[5, 0], [6, 1], [7, 2], [8, 3], [9, 4]]
|
<commit_before>class TestKernel(object):
def test_inject(self, ec):
w_res = ec.space.execute(ec, """
return (5..10).inject(1) do |prod, n|
prod * n
end
""")
assert ec.space.int_w(w_res) == 15120
w_res = ec.space.execute(ec, """
return (1..10).inject 0 do |sum, n|
sum + n
end
""")
assert ec.space.int_w(w_res) == 45
def test_each_with_index(self, ec):
w_res = ec.space.execute(ec, """
result = []
(5..10).each_with_index do |n, idx|
result << [n, idx]
end
return result
""")
assert [[ec.space.int_w(w_x) for w_x in ec.space.listview(w_sub)] for w_sub in ec.space.listview(w_res)] == [[5, 0], [6, 1], [7, 2], [8, 3], [9, 4]]
<commit_msg>Fix name of test object for enumerables<commit_after>class TestEnumberable(object):
def test_inject(self, ec):
w_res = ec.space.execute(ec, """
return (5..10).inject(1) do |prod, n|
prod * n
end
""")
assert ec.space.int_w(w_res) == 15120
w_res = ec.space.execute(ec, """
return (1..10).inject 0 do |sum, n|
sum + n
end
""")
assert ec.space.int_w(w_res) == 45
def test_each_with_index(self, ec):
w_res = ec.space.execute(ec, """
result = []
(5..10).each_with_index do |n, idx|
result << [n, idx]
end
return result
""")
assert [[ec.space.int_w(w_x) for w_x in ec.space.listview(w_sub)] for w_sub in ec.space.listview(w_res)] == [[5, 0], [6, 1], [7, 2], [8, 3], [9, 4]]
|
875451aa1639b6342fa53340edc59c6c521a1e37
|
python/microphone.py
|
python/microphone.py
|
import time
import numpy as np
import pyaudio
import config
def start_stream(callback):
p = pyaudio.PyAudio()
frames_per_buffer = int(config.MIC_RATE / config.FPS)
stream = p.open(format=pyaudio.paInt16,
channels=1,
rate=config.MIC_RATE,
input=True,
frames_per_buffer=frames_per_buffer)
overflows = 0
prev_ovf_time = time.time()
while True:
try:
y = np.fromstring(stream.read(frames_per_buffer, exception_on_overflow=False), dtype=np.int16)
y = y.astype(np.float32)
stream.read(get_read_available(), exception_on_overflow=False)
callback(y)
except IOError:
overflows += 1
if time.time() > prev_ovf_time + 1:
prev_ovf_time = time.time()
print('Audio buffer has overflowed {} times'.format(overflows))
stream.stop_stream()
stream.close()
p.terminate()
|
import time
import numpy as np
import pyaudio
import config
def start_stream(callback):
p = pyaudio.PyAudio()
frames_per_buffer = int(config.MIC_RATE / config.FPS)
stream = p.open(format=pyaudio.paInt16,
channels=1,
rate=config.MIC_RATE,
input=True,
frames_per_buffer=frames_per_buffer)
overflows = 0
prev_ovf_time = time.time()
while True:
try:
y = np.fromstring(stream.read(frames_per_buffer, exception_on_overflow=False), dtype=np.int16)
y = y.astype(np.float32)
stream.read(stream.get_read_available(), exception_on_overflow=False)
callback(y)
except IOError:
overflows += 1
if time.time() > prev_ovf_time + 1:
prev_ovf_time = time.time()
print('Audio buffer has overflowed {} times'.format(overflows))
stream.stop_stream()
stream.close()
p.terminate()
|
Fix syntax bug on stream emptying
|
Fix syntax bug on stream emptying
|
Python
|
mit
|
scottlawsonbc/audio-reactive-led-strip,joeybab3/audio-reactive-led-strip,joeybab3/audio-reactive-led-strip,scottlawsonbc/audio-reactive-led-strip
|
import time
import numpy as np
import pyaudio
import config
def start_stream(callback):
p = pyaudio.PyAudio()
frames_per_buffer = int(config.MIC_RATE / config.FPS)
stream = p.open(format=pyaudio.paInt16,
channels=1,
rate=config.MIC_RATE,
input=True,
frames_per_buffer=frames_per_buffer)
overflows = 0
prev_ovf_time = time.time()
while True:
try:
y = np.fromstring(stream.read(frames_per_buffer, exception_on_overflow=False), dtype=np.int16)
y = y.astype(np.float32)
stream.read(get_read_available(), exception_on_overflow=False)
callback(y)
except IOError:
overflows += 1
if time.time() > prev_ovf_time + 1:
prev_ovf_time = time.time()
print('Audio buffer has overflowed {} times'.format(overflows))
stream.stop_stream()
stream.close()
p.terminate()
Fix syntax bug on stream emptying
|
import time
import numpy as np
import pyaudio
import config
def start_stream(callback):
p = pyaudio.PyAudio()
frames_per_buffer = int(config.MIC_RATE / config.FPS)
stream = p.open(format=pyaudio.paInt16,
channels=1,
rate=config.MIC_RATE,
input=True,
frames_per_buffer=frames_per_buffer)
overflows = 0
prev_ovf_time = time.time()
while True:
try:
y = np.fromstring(stream.read(frames_per_buffer, exception_on_overflow=False), dtype=np.int16)
y = y.astype(np.float32)
stream.read(stream.get_read_available(), exception_on_overflow=False)
callback(y)
except IOError:
overflows += 1
if time.time() > prev_ovf_time + 1:
prev_ovf_time = time.time()
print('Audio buffer has overflowed {} times'.format(overflows))
stream.stop_stream()
stream.close()
p.terminate()
|
<commit_before>import time
import numpy as np
import pyaudio
import config
def start_stream(callback):
p = pyaudio.PyAudio()
frames_per_buffer = int(config.MIC_RATE / config.FPS)
stream = p.open(format=pyaudio.paInt16,
channels=1,
rate=config.MIC_RATE,
input=True,
frames_per_buffer=frames_per_buffer)
overflows = 0
prev_ovf_time = time.time()
while True:
try:
y = np.fromstring(stream.read(frames_per_buffer, exception_on_overflow=False), dtype=np.int16)
y = y.astype(np.float32)
stream.read(get_read_available(), exception_on_overflow=False)
callback(y)
except IOError:
overflows += 1
if time.time() > prev_ovf_time + 1:
prev_ovf_time = time.time()
print('Audio buffer has overflowed {} times'.format(overflows))
stream.stop_stream()
stream.close()
p.terminate()
<commit_msg>Fix syntax bug on stream emptying<commit_after>
|
import time
import numpy as np
import pyaudio
import config
def start_stream(callback):
p = pyaudio.PyAudio()
frames_per_buffer = int(config.MIC_RATE / config.FPS)
stream = p.open(format=pyaudio.paInt16,
channels=1,
rate=config.MIC_RATE,
input=True,
frames_per_buffer=frames_per_buffer)
overflows = 0
prev_ovf_time = time.time()
while True:
try:
y = np.fromstring(stream.read(frames_per_buffer, exception_on_overflow=False), dtype=np.int16)
y = y.astype(np.float32)
stream.read(stream.get_read_available(), exception_on_overflow=False)
callback(y)
except IOError:
overflows += 1
if time.time() > prev_ovf_time + 1:
prev_ovf_time = time.time()
print('Audio buffer has overflowed {} times'.format(overflows))
stream.stop_stream()
stream.close()
p.terminate()
|
import time
import numpy as np
import pyaudio
import config
def start_stream(callback):
p = pyaudio.PyAudio()
frames_per_buffer = int(config.MIC_RATE / config.FPS)
stream = p.open(format=pyaudio.paInt16,
channels=1,
rate=config.MIC_RATE,
input=True,
frames_per_buffer=frames_per_buffer)
overflows = 0
prev_ovf_time = time.time()
while True:
try:
y = np.fromstring(stream.read(frames_per_buffer, exception_on_overflow=False), dtype=np.int16)
y = y.astype(np.float32)
stream.read(get_read_available(), exception_on_overflow=False)
callback(y)
except IOError:
overflows += 1
if time.time() > prev_ovf_time + 1:
prev_ovf_time = time.time()
print('Audio buffer has overflowed {} times'.format(overflows))
stream.stop_stream()
stream.close()
p.terminate()
Fix syntax bug on stream emptyingimport time
import numpy as np
import pyaudio
import config
def start_stream(callback):
p = pyaudio.PyAudio()
frames_per_buffer = int(config.MIC_RATE / config.FPS)
stream = p.open(format=pyaudio.paInt16,
channels=1,
rate=config.MIC_RATE,
input=True,
frames_per_buffer=frames_per_buffer)
overflows = 0
prev_ovf_time = time.time()
while True:
try:
y = np.fromstring(stream.read(frames_per_buffer, exception_on_overflow=False), dtype=np.int16)
y = y.astype(np.float32)
stream.read(stream.get_read_available(), exception_on_overflow=False)
callback(y)
except IOError:
overflows += 1
if time.time() > prev_ovf_time + 1:
prev_ovf_time = time.time()
print('Audio buffer has overflowed {} times'.format(overflows))
stream.stop_stream()
stream.close()
p.terminate()
|
<commit_before>import time
import numpy as np
import pyaudio
import config
def start_stream(callback):
p = pyaudio.PyAudio()
frames_per_buffer = int(config.MIC_RATE / config.FPS)
stream = p.open(format=pyaudio.paInt16,
channels=1,
rate=config.MIC_RATE,
input=True,
frames_per_buffer=frames_per_buffer)
overflows = 0
prev_ovf_time = time.time()
while True:
try:
y = np.fromstring(stream.read(frames_per_buffer, exception_on_overflow=False), dtype=np.int16)
y = y.astype(np.float32)
stream.read(get_read_available(), exception_on_overflow=False)
callback(y)
except IOError:
overflows += 1
if time.time() > prev_ovf_time + 1:
prev_ovf_time = time.time()
print('Audio buffer has overflowed {} times'.format(overflows))
stream.stop_stream()
stream.close()
p.terminate()
<commit_msg>Fix syntax bug on stream emptying<commit_after>import time
import numpy as np
import pyaudio
import config
def start_stream(callback):
p = pyaudio.PyAudio()
frames_per_buffer = int(config.MIC_RATE / config.FPS)
stream = p.open(format=pyaudio.paInt16,
channels=1,
rate=config.MIC_RATE,
input=True,
frames_per_buffer=frames_per_buffer)
overflows = 0
prev_ovf_time = time.time()
while True:
try:
y = np.fromstring(stream.read(frames_per_buffer, exception_on_overflow=False), dtype=np.int16)
y = y.astype(np.float32)
stream.read(stream.get_read_available(), exception_on_overflow=False)
callback(y)
except IOError:
overflows += 1
if time.time() > prev_ovf_time + 1:
prev_ovf_time = time.time()
print('Audio buffer has overflowed {} times'.format(overflows))
stream.stop_stream()
stream.close()
p.terminate()
|
26ccc283dfe6ac4c3505cef78d27e9a27221b8b6
|
readthedocs/core/subdomain_urls.py
|
readthedocs/core/subdomain_urls.py
|
from django.conf.urls.defaults import url, patterns
from urls import urlpatterns as main_patterns
urlpatterns = patterns('',
url(r'^projects/(?P<project_slug>[\w.-]+)/(?P<lang_slug>\w{2})/(?P<version_slug>[\w.-]+)/(?P<filename>.*)$',
'core.views.subproject_serve_docs',
name='subproject_docs_detail'
),
url(r'^(?P<lang_slug>\w{2})/(?P<version_slug>[\w.-]+)/(?P<filename>.+)$',
'core.views.serve_docs',
name='docs_detail'
),
url(r'^(?P<lang_slug>\w{2})/(?P<version_slug>.*)/$',
'core.views.serve_docs',
{'filename': 'index.html'},
name='docs_detail'
),
url(r'^(?P<version_slug>.*)/$',
'projects.views.public.subdomain_handler',
name='version_subdomain_handler'
),
url(r'^$', 'projects.views.public.subdomain_handler'),
)
urlpatterns += main_patterns
|
from django.conf.urls.defaults import url, patterns
from urls import urlpatterns as main_patterns
urlpatterns = patterns('',
url(r'^projects/(?P<project_slug>[\w.-]+)/(?P<lang_slug>\w{2})/(?P<version_slug>[\w.-]+)/(?P<filename>.*)$',
'core.views.subproject_serve_docs',
name='subproject_docs_detail'
),
url(r'^(?P<lang_slug>\w{2})/(?P<version_slug>[\w.-]+)/(?P<filename>.*)$',
'core.views.serve_docs',
name='docs_detail'
),
url(r'^(?P<lang_slug>\w{2})/(?P<version_slug>.*)/$',
'core.views.serve_docs',
{'filename': 'index.html'},
name='docs_detail'
),
url(r'^(?P<version_slug>.*)/$',
'projects.views.public.subdomain_handler',
name='version_subdomain_handler'
),
url(r'^$', 'projects.views.public.subdomain_handler'),
)
urlpatterns += main_patterns
|
Fix regex on subdomain urls so empty string will match.
|
Fix regex on subdomain urls so empty string will match.
|
Python
|
mit
|
kdkeyser/readthedocs.org,fujita-shintaro/readthedocs.org,GovReady/readthedocs.org,sunnyzwh/readthedocs.org,fujita-shintaro/readthedocs.org,titiushko/readthedocs.org,asampat3090/readthedocs.org,takluyver/readthedocs.org,SteveViss/readthedocs.org,ojii/readthedocs.org,mrshoki/readthedocs.org,ojii/readthedocs.org,CedarLogic/readthedocs.org,Carreau/readthedocs.org,michaelmcandrew/readthedocs.org,sunnyzwh/readthedocs.org,emawind84/readthedocs.org,asampat3090/readthedocs.org,royalwang/readthedocs.org,kdkeyser/readthedocs.org,techtonik/readthedocs.org,kdkeyser/readthedocs.org,safwanrahman/readthedocs.org,wijerasa/readthedocs.org,sid-kap/readthedocs.org,techtonik/readthedocs.org,singingwolfboy/readthedocs.org,attakei/readthedocs-oauth,Carreau/readthedocs.org,SteveViss/readthedocs.org,agjohnson/readthedocs.org,asampat3090/readthedocs.org,kenwang76/readthedocs.org,agjohnson/readthedocs.org,atsuyim/readthedocs.org,mrshoki/readthedocs.org,jerel/readthedocs.org,espdev/readthedocs.org,cgourlay/readthedocs.org,espdev/readthedocs.org,nyergler/pythonslides,michaelmcandrew/readthedocs.org,takluyver/readthedocs.org,kenshinthebattosai/readthedocs.org,nyergler/pythonslides,sils1297/readthedocs.org,kenshinthebattosai/readthedocs.org,stevepiercy/readthedocs.org,stevepiercy/readthedocs.org,titiushko/readthedocs.org,gjtorikian/readthedocs.org,singingwolfboy/readthedocs.org,wanghaven/readthedocs.org,Tazer/readthedocs.org,emawind84/readthedocs.org,singingwolfboy/readthedocs.org,SteveViss/readthedocs.org,stevepiercy/readthedocs.org,dirn/readthedocs.org,soulshake/readthedocs.org,CedarLogic/readthedocs.org,attakei/readthedocs-oauth,sunnyzwh/readthedocs.org,kenwang76/readthedocs.org,dirn/readthedocs.org,davidfischer/readthedocs.org,dirn/readthedocs.org,istresearch/readthedocs.org,clarkperkins/readthedocs.org,cgourlay/readthedocs.org,KamranMackey/readthedocs.org,istresearch/readthedocs.org,Carreau/readthedocs.org,Tazer/readthedocs.org,davidfischer/readthedocs.org,techtonik/readthedocs.org,hach-que/readthedocs.org,techtonik/readthedocs.org,emawind84/readthedocs.org,emawind84/readthedocs.org,wanghaven/readthedocs.org,Tazer/readthedocs.org,attakei/readthedocs-oauth,soulshake/readthedocs.org,mhils/readthedocs.org,mhils/readthedocs.org,ojii/readthedocs.org,gjtorikian/readthedocs.org,GovReady/readthedocs.org,wijerasa/readthedocs.org,CedarLogic/readthedocs.org,dirn/readthedocs.org,KamranMackey/readthedocs.org,kenshinthebattosai/readthedocs.org,GovReady/readthedocs.org,mrshoki/readthedocs.org,wijerasa/readthedocs.org,cgourlay/readthedocs.org,attakei/readthedocs-oauth,nikolas/readthedocs.org,asampat3090/readthedocs.org,jerel/readthedocs.org,raven47git/readthedocs.org,clarkperkins/readthedocs.org,nikolas/readthedocs.org,sunnyzwh/readthedocs.org,agjohnson/readthedocs.org,espdev/readthedocs.org,nyergler/pythonslides,nyergler/pythonslides,d0ugal/readthedocs.org,pombredanne/readthedocs.org,KamranMackey/readthedocs.org,sils1297/readthedocs.org,cgourlay/readthedocs.org,kenwang76/readthedocs.org,sid-kap/readthedocs.org,sils1297/readthedocs.org,safwanrahman/readthedocs.org,d0ugal/readthedocs.org,GovReady/readthedocs.org,soulshake/readthedocs.org,stevepiercy/readthedocs.org,LukasBoersma/readthedocs.org,takluyver/readthedocs.org,sid-kap/readthedocs.org,jerel/readthedocs.org,sid-kap/readthedocs.org,rtfd/readthedocs.org,takluyver/readthedocs.org,kenwang76/readthedocs.org,gjtorikian/readthedocs.org,tddv/readthedocs.org,michaelmcandrew/readthedocs.org,sils1297/readthedocs.org,tddv/readthedocs.org,mhils/readthedocs.org,nikolas/readthedocs.org,raven47git/readthedocs.org,laplaceliu/readthedocs.org,atsuyim/readthedocs.org,soulshake/readthedocs.org,royalwang/readthedocs.org,raven47git/readthedocs.org,davidfischer/readthedocs.org,tddv/readthedocs.org,espdev/readthedocs.org,fujita-shintaro/readthedocs.org,safwanrahman/readthedocs.org,agjohnson/readthedocs.org,davidfischer/readthedocs.org,rtfd/readthedocs.org,VishvajitP/readthedocs.org,pombredanne/readthedocs.org,titiushko/readthedocs.org,istresearch/readthedocs.org,hach-que/readthedocs.org,d0ugal/readthedocs.org,Carreau/readthedocs.org,VishvajitP/readthedocs.org,VishvajitP/readthedocs.org,safwanrahman/readthedocs.org,laplaceliu/readthedocs.org,LukasBoersma/readthedocs.org,nikolas/readthedocs.org,singingwolfboy/readthedocs.org,michaelmcandrew/readthedocs.org,gjtorikian/readthedocs.org,wanghaven/readthedocs.org,kenshinthebattosai/readthedocs.org,atsuyim/readthedocs.org,rtfd/readthedocs.org,royalwang/readthedocs.org,clarkperkins/readthedocs.org,VishvajitP/readthedocs.org,wijerasa/readthedocs.org,hach-que/readthedocs.org,CedarLogic/readthedocs.org,rtfd/readthedocs.org,mhils/readthedocs.org,istresearch/readthedocs.org,SteveViss/readthedocs.org,laplaceliu/readthedocs.org,atsuyim/readthedocs.org,Tazer/readthedocs.org,KamranMackey/readthedocs.org,LukasBoersma/readthedocs.org,ojii/readthedocs.org,fujita-shintaro/readthedocs.org,LukasBoersma/readthedocs.org,wanghaven/readthedocs.org,mrshoki/readthedocs.org,royalwang/readthedocs.org,clarkperkins/readthedocs.org,jerel/readthedocs.org,raven47git/readthedocs.org,pombredanne/readthedocs.org,kdkeyser/readthedocs.org,hach-que/readthedocs.org,d0ugal/readthedocs.org,laplaceliu/readthedocs.org,titiushko/readthedocs.org,espdev/readthedocs.org
|
from django.conf.urls.defaults import url, patterns
from urls import urlpatterns as main_patterns
urlpatterns = patterns('',
url(r'^projects/(?P<project_slug>[\w.-]+)/(?P<lang_slug>\w{2})/(?P<version_slug>[\w.-]+)/(?P<filename>.*)$',
'core.views.subproject_serve_docs',
name='subproject_docs_detail'
),
url(r'^(?P<lang_slug>\w{2})/(?P<version_slug>[\w.-]+)/(?P<filename>.+)$',
'core.views.serve_docs',
name='docs_detail'
),
url(r'^(?P<lang_slug>\w{2})/(?P<version_slug>.*)/$',
'core.views.serve_docs',
{'filename': 'index.html'},
name='docs_detail'
),
url(r'^(?P<version_slug>.*)/$',
'projects.views.public.subdomain_handler',
name='version_subdomain_handler'
),
url(r'^$', 'projects.views.public.subdomain_handler'),
)
urlpatterns += main_patterns
Fix regex on subdomain urls so empty string will match.
|
from django.conf.urls.defaults import url, patterns
from urls import urlpatterns as main_patterns
urlpatterns = patterns('',
url(r'^projects/(?P<project_slug>[\w.-]+)/(?P<lang_slug>\w{2})/(?P<version_slug>[\w.-]+)/(?P<filename>.*)$',
'core.views.subproject_serve_docs',
name='subproject_docs_detail'
),
url(r'^(?P<lang_slug>\w{2})/(?P<version_slug>[\w.-]+)/(?P<filename>.*)$',
'core.views.serve_docs',
name='docs_detail'
),
url(r'^(?P<lang_slug>\w{2})/(?P<version_slug>.*)/$',
'core.views.serve_docs',
{'filename': 'index.html'},
name='docs_detail'
),
url(r'^(?P<version_slug>.*)/$',
'projects.views.public.subdomain_handler',
name='version_subdomain_handler'
),
url(r'^$', 'projects.views.public.subdomain_handler'),
)
urlpatterns += main_patterns
|
<commit_before>from django.conf.urls.defaults import url, patterns
from urls import urlpatterns as main_patterns
urlpatterns = patterns('',
url(r'^projects/(?P<project_slug>[\w.-]+)/(?P<lang_slug>\w{2})/(?P<version_slug>[\w.-]+)/(?P<filename>.*)$',
'core.views.subproject_serve_docs',
name='subproject_docs_detail'
),
url(r'^(?P<lang_slug>\w{2})/(?P<version_slug>[\w.-]+)/(?P<filename>.+)$',
'core.views.serve_docs',
name='docs_detail'
),
url(r'^(?P<lang_slug>\w{2})/(?P<version_slug>.*)/$',
'core.views.serve_docs',
{'filename': 'index.html'},
name='docs_detail'
),
url(r'^(?P<version_slug>.*)/$',
'projects.views.public.subdomain_handler',
name='version_subdomain_handler'
),
url(r'^$', 'projects.views.public.subdomain_handler'),
)
urlpatterns += main_patterns
<commit_msg>Fix regex on subdomain urls so empty string will match.<commit_after>
|
from django.conf.urls.defaults import url, patterns
from urls import urlpatterns as main_patterns
urlpatterns = patterns('',
url(r'^projects/(?P<project_slug>[\w.-]+)/(?P<lang_slug>\w{2})/(?P<version_slug>[\w.-]+)/(?P<filename>.*)$',
'core.views.subproject_serve_docs',
name='subproject_docs_detail'
),
url(r'^(?P<lang_slug>\w{2})/(?P<version_slug>[\w.-]+)/(?P<filename>.*)$',
'core.views.serve_docs',
name='docs_detail'
),
url(r'^(?P<lang_slug>\w{2})/(?P<version_slug>.*)/$',
'core.views.serve_docs',
{'filename': 'index.html'},
name='docs_detail'
),
url(r'^(?P<version_slug>.*)/$',
'projects.views.public.subdomain_handler',
name='version_subdomain_handler'
),
url(r'^$', 'projects.views.public.subdomain_handler'),
)
urlpatterns += main_patterns
|
from django.conf.urls.defaults import url, patterns
from urls import urlpatterns as main_patterns
urlpatterns = patterns('',
url(r'^projects/(?P<project_slug>[\w.-]+)/(?P<lang_slug>\w{2})/(?P<version_slug>[\w.-]+)/(?P<filename>.*)$',
'core.views.subproject_serve_docs',
name='subproject_docs_detail'
),
url(r'^(?P<lang_slug>\w{2})/(?P<version_slug>[\w.-]+)/(?P<filename>.+)$',
'core.views.serve_docs',
name='docs_detail'
),
url(r'^(?P<lang_slug>\w{2})/(?P<version_slug>.*)/$',
'core.views.serve_docs',
{'filename': 'index.html'},
name='docs_detail'
),
url(r'^(?P<version_slug>.*)/$',
'projects.views.public.subdomain_handler',
name='version_subdomain_handler'
),
url(r'^$', 'projects.views.public.subdomain_handler'),
)
urlpatterns += main_patterns
Fix regex on subdomain urls so empty string will match.from django.conf.urls.defaults import url, patterns
from urls import urlpatterns as main_patterns
urlpatterns = patterns('',
url(r'^projects/(?P<project_slug>[\w.-]+)/(?P<lang_slug>\w{2})/(?P<version_slug>[\w.-]+)/(?P<filename>.*)$',
'core.views.subproject_serve_docs',
name='subproject_docs_detail'
),
url(r'^(?P<lang_slug>\w{2})/(?P<version_slug>[\w.-]+)/(?P<filename>.*)$',
'core.views.serve_docs',
name='docs_detail'
),
url(r'^(?P<lang_slug>\w{2})/(?P<version_slug>.*)/$',
'core.views.serve_docs',
{'filename': 'index.html'},
name='docs_detail'
),
url(r'^(?P<version_slug>.*)/$',
'projects.views.public.subdomain_handler',
name='version_subdomain_handler'
),
url(r'^$', 'projects.views.public.subdomain_handler'),
)
urlpatterns += main_patterns
|
<commit_before>from django.conf.urls.defaults import url, patterns
from urls import urlpatterns as main_patterns
urlpatterns = patterns('',
url(r'^projects/(?P<project_slug>[\w.-]+)/(?P<lang_slug>\w{2})/(?P<version_slug>[\w.-]+)/(?P<filename>.*)$',
'core.views.subproject_serve_docs',
name='subproject_docs_detail'
),
url(r'^(?P<lang_slug>\w{2})/(?P<version_slug>[\w.-]+)/(?P<filename>.+)$',
'core.views.serve_docs',
name='docs_detail'
),
url(r'^(?P<lang_slug>\w{2})/(?P<version_slug>.*)/$',
'core.views.serve_docs',
{'filename': 'index.html'},
name='docs_detail'
),
url(r'^(?P<version_slug>.*)/$',
'projects.views.public.subdomain_handler',
name='version_subdomain_handler'
),
url(r'^$', 'projects.views.public.subdomain_handler'),
)
urlpatterns += main_patterns
<commit_msg>Fix regex on subdomain urls so empty string will match.<commit_after>from django.conf.urls.defaults import url, patterns
from urls import urlpatterns as main_patterns
urlpatterns = patterns('',
url(r'^projects/(?P<project_slug>[\w.-]+)/(?P<lang_slug>\w{2})/(?P<version_slug>[\w.-]+)/(?P<filename>.*)$',
'core.views.subproject_serve_docs',
name='subproject_docs_detail'
),
url(r'^(?P<lang_slug>\w{2})/(?P<version_slug>[\w.-]+)/(?P<filename>.*)$',
'core.views.serve_docs',
name='docs_detail'
),
url(r'^(?P<lang_slug>\w{2})/(?P<version_slug>.*)/$',
'core.views.serve_docs',
{'filename': 'index.html'},
name='docs_detail'
),
url(r'^(?P<version_slug>.*)/$',
'projects.views.public.subdomain_handler',
name='version_subdomain_handler'
),
url(r'^$', 'projects.views.public.subdomain_handler'),
)
urlpatterns += main_patterns
|
594923a44d80a2879eb1ed5b9b0a6be11e13c88f
|
tests/Epsilon_tests/ImportTest.py
|
tests/Epsilon_tests/ImportTest.py
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy import EPS
from grammpy import EPSILON
class ImportTest(TestCase):
def test_idSame(self):
self.assertEqual(id(EPS),id(EPSILON))
def test_equal(self):
self.assertEqual(EPS, EPSILON)
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy import EPS
from grammpy import EPSILON
class ImportTest(TestCase):
def test_idSame(self):
self.assertEqual(id(EPS), id(EPSILON))
def test_equal(self):
self.assertEqual(EPS, EPSILON)
def test_equalToSelf(self):
self.assertEqual(EPS, EPS)
def test_notEqualToNumber(self):
self.assertNotEqual(EPS, 5)
def test_notEqualToString(self):
self.assertNotEqual(EPS, "asdf")
def test_notEqualToObject(self):
self.assertNotEqual(EPS, object())
if __name__ == '__main__':
main()
|
Revert "Revert "Add tests to compare epsilon with another objects""
|
Revert "Revert "Add tests to compare epsilon with another objects""
This reverts commit d13b3d89124d03f563c2ee2143ae16eec7d0b191.
|
Python
|
mit
|
PatrikValkovic/grammpy
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy import EPS
from grammpy import EPSILON
class ImportTest(TestCase):
def test_idSame(self):
self.assertEqual(id(EPS),id(EPSILON))
def test_equal(self):
self.assertEqual(EPS, EPSILON)
if __name__ == '__main__':
main()
Revert "Revert "Add tests to compare epsilon with another objects""
This reverts commit d13b3d89124d03f563c2ee2143ae16eec7d0b191.
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy import EPS
from grammpy import EPSILON
class ImportTest(TestCase):
def test_idSame(self):
self.assertEqual(id(EPS), id(EPSILON))
def test_equal(self):
self.assertEqual(EPS, EPSILON)
def test_equalToSelf(self):
self.assertEqual(EPS, EPS)
def test_notEqualToNumber(self):
self.assertNotEqual(EPS, 5)
def test_notEqualToString(self):
self.assertNotEqual(EPS, "asdf")
def test_notEqualToObject(self):
self.assertNotEqual(EPS, object())
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy import EPS
from grammpy import EPSILON
class ImportTest(TestCase):
def test_idSame(self):
self.assertEqual(id(EPS),id(EPSILON))
def test_equal(self):
self.assertEqual(EPS, EPSILON)
if __name__ == '__main__':
main()
<commit_msg>Revert "Revert "Add tests to compare epsilon with another objects""
This reverts commit d13b3d89124d03f563c2ee2143ae16eec7d0b191.<commit_after>
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy import EPS
from grammpy import EPSILON
class ImportTest(TestCase):
def test_idSame(self):
self.assertEqual(id(EPS), id(EPSILON))
def test_equal(self):
self.assertEqual(EPS, EPSILON)
def test_equalToSelf(self):
self.assertEqual(EPS, EPS)
def test_notEqualToNumber(self):
self.assertNotEqual(EPS, 5)
def test_notEqualToString(self):
self.assertNotEqual(EPS, "asdf")
def test_notEqualToObject(self):
self.assertNotEqual(EPS, object())
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy import EPS
from grammpy import EPSILON
class ImportTest(TestCase):
def test_idSame(self):
self.assertEqual(id(EPS),id(EPSILON))
def test_equal(self):
self.assertEqual(EPS, EPSILON)
if __name__ == '__main__':
main()
Revert "Revert "Add tests to compare epsilon with another objects""
This reverts commit d13b3d89124d03f563c2ee2143ae16eec7d0b191.#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy import EPS
from grammpy import EPSILON
class ImportTest(TestCase):
def test_idSame(self):
self.assertEqual(id(EPS), id(EPSILON))
def test_equal(self):
self.assertEqual(EPS, EPSILON)
def test_equalToSelf(self):
self.assertEqual(EPS, EPS)
def test_notEqualToNumber(self):
self.assertNotEqual(EPS, 5)
def test_notEqualToString(self):
self.assertNotEqual(EPS, "asdf")
def test_notEqualToObject(self):
self.assertNotEqual(EPS, object())
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy import EPS
from grammpy import EPSILON
class ImportTest(TestCase):
def test_idSame(self):
self.assertEqual(id(EPS),id(EPSILON))
def test_equal(self):
self.assertEqual(EPS, EPSILON)
if __name__ == '__main__':
main()
<commit_msg>Revert "Revert "Add tests to compare epsilon with another objects""
This reverts commit d13b3d89124d03f563c2ee2143ae16eec7d0b191.<commit_after>#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy import EPS
from grammpy import EPSILON
class ImportTest(TestCase):
def test_idSame(self):
self.assertEqual(id(EPS), id(EPSILON))
def test_equal(self):
self.assertEqual(EPS, EPSILON)
def test_equalToSelf(self):
self.assertEqual(EPS, EPS)
def test_notEqualToNumber(self):
self.assertNotEqual(EPS, 5)
def test_notEqualToString(self):
self.assertNotEqual(EPS, "asdf")
def test_notEqualToObject(self):
self.assertNotEqual(EPS, object())
if __name__ == '__main__':
main()
|
957047b0ba6be692b2b8385ffb41ae9d626bfe7b
|
tests/basics/OverflowFunctions.py
|
tests/basics/OverflowFunctions.py
|
# Copyright 2012, Kay Hayen, mailto:kayhayen@gmx.de
#
# Python tests originally created or extracted from other peoples work. The
# parts were too small to be protected.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
def starImporterFunction():
from sys import *
print "Version", version.split()[0]
starImporterFunction()
def deepExec():
for_closure = 3
def deeper():
for_closure_as_well = 4
def execFunction():
code = "f=2"
# Can fool it to nest
exec code in None, None
print "Locals now", locals()
print "Closure one level up was taken", for_closure_as_well
print "Closure two levels up was taken", for_closure
print "Globals still work", starImporterFunction
print "Added local from code", f
execFunction()
deeper()
deepExec()
|
# Copyright 2012, Kay Hayen, mailto:kayhayen@gmx.de
#
# Python tests originally created or extracted from other peoples work. The
# parts were too small to be protected.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
def starImporterFunction():
from sys import *
print "Version", version.split()[0].split( "." )[:-1]
starImporterFunction()
def deepExec():
for_closure = 3
def deeper():
for_closure_as_well = 4
def execFunction():
code = "f=2"
# Can fool it to nest
exec code in None, None
print "Locals now", locals()
print "Closure one level up was taken", for_closure_as_well
print "Closure two levels up was taken", for_closure
print "Globals still work", starImporterFunction
print "Added local from code", f
execFunction()
deeper()
deepExec()
|
Make the test robust against usage for comparisons between minor Python versions.
|
Make the test robust against usage for comparisons between minor Python versions.
Typically, for Wine, I have an older version installed, than my Debian has, and
this then fails the test without strict need.
|
Python
|
apache-2.0
|
tempbottle/Nuitka,kayhayen/Nuitka,wfxiang08/Nuitka,tempbottle/Nuitka,wfxiang08/Nuitka,kayhayen/Nuitka,kayhayen/Nuitka,tempbottle/Nuitka,wfxiang08/Nuitka,kayhayen/Nuitka,wfxiang08/Nuitka,tempbottle/Nuitka
|
# Copyright 2012, Kay Hayen, mailto:kayhayen@gmx.de
#
# Python tests originally created or extracted from other peoples work. The
# parts were too small to be protected.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
def starImporterFunction():
from sys import *
print "Version", version.split()[0]
starImporterFunction()
def deepExec():
for_closure = 3
def deeper():
for_closure_as_well = 4
def execFunction():
code = "f=2"
# Can fool it to nest
exec code in None, None
print "Locals now", locals()
print "Closure one level up was taken", for_closure_as_well
print "Closure two levels up was taken", for_closure
print "Globals still work", starImporterFunction
print "Added local from code", f
execFunction()
deeper()
deepExec()
Make the test robust against usage for comparisons between minor Python versions.
Typically, for Wine, I have an older version installed, than my Debian has, and
this then fails the test without strict need.
|
# Copyright 2012, Kay Hayen, mailto:kayhayen@gmx.de
#
# Python tests originally created or extracted from other peoples work. The
# parts were too small to be protected.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
def starImporterFunction():
from sys import *
print "Version", version.split()[0].split( "." )[:-1]
starImporterFunction()
def deepExec():
for_closure = 3
def deeper():
for_closure_as_well = 4
def execFunction():
code = "f=2"
# Can fool it to nest
exec code in None, None
print "Locals now", locals()
print "Closure one level up was taken", for_closure_as_well
print "Closure two levels up was taken", for_closure
print "Globals still work", starImporterFunction
print "Added local from code", f
execFunction()
deeper()
deepExec()
|
<commit_before># Copyright 2012, Kay Hayen, mailto:kayhayen@gmx.de
#
# Python tests originally created or extracted from other peoples work. The
# parts were too small to be protected.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
def starImporterFunction():
from sys import *
print "Version", version.split()[0]
starImporterFunction()
def deepExec():
for_closure = 3
def deeper():
for_closure_as_well = 4
def execFunction():
code = "f=2"
# Can fool it to nest
exec code in None, None
print "Locals now", locals()
print "Closure one level up was taken", for_closure_as_well
print "Closure two levels up was taken", for_closure
print "Globals still work", starImporterFunction
print "Added local from code", f
execFunction()
deeper()
deepExec()
<commit_msg>Make the test robust against usage for comparisons between minor Python versions.
Typically, for Wine, I have an older version installed, than my Debian has, and
this then fails the test without strict need.<commit_after>
|
# Copyright 2012, Kay Hayen, mailto:kayhayen@gmx.de
#
# Python tests originally created or extracted from other peoples work. The
# parts were too small to be protected.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
def starImporterFunction():
from sys import *
print "Version", version.split()[0].split( "." )[:-1]
starImporterFunction()
def deepExec():
for_closure = 3
def deeper():
for_closure_as_well = 4
def execFunction():
code = "f=2"
# Can fool it to nest
exec code in None, None
print "Locals now", locals()
print "Closure one level up was taken", for_closure_as_well
print "Closure two levels up was taken", for_closure
print "Globals still work", starImporterFunction
print "Added local from code", f
execFunction()
deeper()
deepExec()
|
# Copyright 2012, Kay Hayen, mailto:kayhayen@gmx.de
#
# Python tests originally created or extracted from other peoples work. The
# parts were too small to be protected.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
def starImporterFunction():
from sys import *
print "Version", version.split()[0]
starImporterFunction()
def deepExec():
for_closure = 3
def deeper():
for_closure_as_well = 4
def execFunction():
code = "f=2"
# Can fool it to nest
exec code in None, None
print "Locals now", locals()
print "Closure one level up was taken", for_closure_as_well
print "Closure two levels up was taken", for_closure
print "Globals still work", starImporterFunction
print "Added local from code", f
execFunction()
deeper()
deepExec()
Make the test robust against usage for comparisons between minor Python versions.
Typically, for Wine, I have an older version installed, than my Debian has, and
this then fails the test without strict need.# Copyright 2012, Kay Hayen, mailto:kayhayen@gmx.de
#
# Python tests originally created or extracted from other peoples work. The
# parts were too small to be protected.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
def starImporterFunction():
from sys import *
print "Version", version.split()[0].split( "." )[:-1]
starImporterFunction()
def deepExec():
for_closure = 3
def deeper():
for_closure_as_well = 4
def execFunction():
code = "f=2"
# Can fool it to nest
exec code in None, None
print "Locals now", locals()
print "Closure one level up was taken", for_closure_as_well
print "Closure two levels up was taken", for_closure
print "Globals still work", starImporterFunction
print "Added local from code", f
execFunction()
deeper()
deepExec()
|
<commit_before># Copyright 2012, Kay Hayen, mailto:kayhayen@gmx.de
#
# Python tests originally created or extracted from other peoples work. The
# parts were too small to be protected.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
def starImporterFunction():
from sys import *
print "Version", version.split()[0]
starImporterFunction()
def deepExec():
for_closure = 3
def deeper():
for_closure_as_well = 4
def execFunction():
code = "f=2"
# Can fool it to nest
exec code in None, None
print "Locals now", locals()
print "Closure one level up was taken", for_closure_as_well
print "Closure two levels up was taken", for_closure
print "Globals still work", starImporterFunction
print "Added local from code", f
execFunction()
deeper()
deepExec()
<commit_msg>Make the test robust against usage for comparisons between minor Python versions.
Typically, for Wine, I have an older version installed, than my Debian has, and
this then fails the test without strict need.<commit_after># Copyright 2012, Kay Hayen, mailto:kayhayen@gmx.de
#
# Python tests originally created or extracted from other peoples work. The
# parts were too small to be protected.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
def starImporterFunction():
from sys import *
print "Version", version.split()[0].split( "." )[:-1]
starImporterFunction()
def deepExec():
for_closure = 3
def deeper():
for_closure_as_well = 4
def execFunction():
code = "f=2"
# Can fool it to nest
exec code in None, None
print "Locals now", locals()
print "Closure one level up was taken", for_closure_as_well
print "Closure two levels up was taken", for_closure
print "Globals still work", starImporterFunction
print "Added local from code", f
execFunction()
deeper()
deepExec()
|
34fda0b20a87b94d7413054bfcfc81dad0ecde19
|
utils/get_message.py
|
utils/get_message.py
|
import amqp
from contextlib import closing
def get_message(queue):
""" Get the first message from a queue.
The first message from a queue is retrieved. If there is no such message, the function exits quietly.
:param queue: The name of the queue from which to get the message.
Usage::
>>> from utils import get_message
>>> message = get_message('queue')
"""
with closing(amqp.Connection()) as connection:
channel = connection.channel()
return channel.basic_get(queue=queue)
|
import amqp
from contextlib import closing
def __get_channel(connection):
return connection.channel()
def __get_message_from_queue(channel, queue):
return channel.basic_get(queue=queue)
def get_message(queue):
""" Get the first message from a queue.
The first message from a queue is retrieved. If there is no such message, the function exits quietly.
:param queue: The name of the queue from which to get the message.
Usage::
>>> from utils import get_message
>>> message = get_message('queue')
"""
with closing(amqp.Connection()) as connection:
channel = __get_channel(connection)
return __get_message_from_queue(channel, queue)
|
Revert "Remove redundant functions (one too many levels of abstraction)@"
|
Revert "Remove redundant functions (one too many levels of abstraction)@"
This reverts commit 9c5bf06d1427db9839b1531aa08e66574c7b4582.
|
Python
|
mit
|
jdgillespie91/trackerSpend,jdgillespie91/trackerSpend
|
import amqp
from contextlib import closing
def get_message(queue):
""" Get the first message from a queue.
The first message from a queue is retrieved. If there is no such message, the function exits quietly.
:param queue: The name of the queue from which to get the message.
Usage::
>>> from utils import get_message
>>> message = get_message('queue')
"""
with closing(amqp.Connection()) as connection:
channel = connection.channel()
return channel.basic_get(queue=queue)
Revert "Remove redundant functions (one too many levels of abstraction)@"
This reverts commit 9c5bf06d1427db9839b1531aa08e66574c7b4582.
|
import amqp
from contextlib import closing
def __get_channel(connection):
return connection.channel()
def __get_message_from_queue(channel, queue):
return channel.basic_get(queue=queue)
def get_message(queue):
""" Get the first message from a queue.
The first message from a queue is retrieved. If there is no such message, the function exits quietly.
:param queue: The name of the queue from which to get the message.
Usage::
>>> from utils import get_message
>>> message = get_message('queue')
"""
with closing(amqp.Connection()) as connection:
channel = __get_channel(connection)
return __get_message_from_queue(channel, queue)
|
<commit_before>import amqp
from contextlib import closing
def get_message(queue):
""" Get the first message from a queue.
The first message from a queue is retrieved. If there is no such message, the function exits quietly.
:param queue: The name of the queue from which to get the message.
Usage::
>>> from utils import get_message
>>> message = get_message('queue')
"""
with closing(amqp.Connection()) as connection:
channel = connection.channel()
return channel.basic_get(queue=queue)
<commit_msg>Revert "Remove redundant functions (one too many levels of abstraction)@"
This reverts commit 9c5bf06d1427db9839b1531aa08e66574c7b4582.<commit_after>
|
import amqp
from contextlib import closing
def __get_channel(connection):
return connection.channel()
def __get_message_from_queue(channel, queue):
return channel.basic_get(queue=queue)
def get_message(queue):
""" Get the first message from a queue.
The first message from a queue is retrieved. If there is no such message, the function exits quietly.
:param queue: The name of the queue from which to get the message.
Usage::
>>> from utils import get_message
>>> message = get_message('queue')
"""
with closing(amqp.Connection()) as connection:
channel = __get_channel(connection)
return __get_message_from_queue(channel, queue)
|
import amqp
from contextlib import closing
def get_message(queue):
""" Get the first message from a queue.
The first message from a queue is retrieved. If there is no such message, the function exits quietly.
:param queue: The name of the queue from which to get the message.
Usage::
>>> from utils import get_message
>>> message = get_message('queue')
"""
with closing(amqp.Connection()) as connection:
channel = connection.channel()
return channel.basic_get(queue=queue)
Revert "Remove redundant functions (one too many levels of abstraction)@"
This reverts commit 9c5bf06d1427db9839b1531aa08e66574c7b4582.import amqp
from contextlib import closing
def __get_channel(connection):
return connection.channel()
def __get_message_from_queue(channel, queue):
return channel.basic_get(queue=queue)
def get_message(queue):
""" Get the first message from a queue.
The first message from a queue is retrieved. If there is no such message, the function exits quietly.
:param queue: The name of the queue from which to get the message.
Usage::
>>> from utils import get_message
>>> message = get_message('queue')
"""
with closing(amqp.Connection()) as connection:
channel = __get_channel(connection)
return __get_message_from_queue(channel, queue)
|
<commit_before>import amqp
from contextlib import closing
def get_message(queue):
""" Get the first message from a queue.
The first message from a queue is retrieved. If there is no such message, the function exits quietly.
:param queue: The name of the queue from which to get the message.
Usage::
>>> from utils import get_message
>>> message = get_message('queue')
"""
with closing(amqp.Connection()) as connection:
channel = connection.channel()
return channel.basic_get(queue=queue)
<commit_msg>Revert "Remove redundant functions (one too many levels of abstraction)@"
This reverts commit 9c5bf06d1427db9839b1531aa08e66574c7b4582.<commit_after>import amqp
from contextlib import closing
def __get_channel(connection):
return connection.channel()
def __get_message_from_queue(channel, queue):
return channel.basic_get(queue=queue)
def get_message(queue):
""" Get the first message from a queue.
The first message from a queue is retrieved. If there is no such message, the function exits quietly.
:param queue: The name of the queue from which to get the message.
Usage::
>>> from utils import get_message
>>> message = get_message('queue')
"""
with closing(amqp.Connection()) as connection:
channel = __get_channel(connection)
return __get_message_from_queue(channel, queue)
|
7eb8da13a873604f12dd9a4b9e890be7447115c4
|
tests/services/authorization/test_service.py
|
tests/services/authorization/test_service.py
|
"""
:Copyright: 2006-2018 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from byceps.services.authorization import service as authorization_service
from tests.base import AbstractAppTestCase
class AuthorizationServiceTestCase(AbstractAppTestCase):
def test_get_permission_ids_for_user(self):
board_moderator_role = create_role_with_permissions('board_moderator', [
'board_topic_hide',
'board_topic_pin',
])
news_editor_role = create_role_with_permissions('news_editor', [
'news_item_create',
])
user = self.create_user()
permissions_before = authorization_service.get_permission_ids_for_user(user.id)
assert permissions_before == frozenset()
assign_roles_to_user(user.id, {board_moderator_role, news_editor_role})
permissions_after = authorization_service.get_permission_ids_for_user(user.id)
assert permissions_after == {
'board_topic_hide',
'board_topic_pin',
'news_item_create',
}
def create_role_with_permissions(role_id, permission_ids):
role = authorization_service.create_role(role_id, role_id)
for permission_id in permission_ids:
permission = authorization_service.create_permission(permission_id,
permission_id)
authorization_service.assign_permission_to_role(permission.id, role.id)
return role
def assign_roles_to_user(user_id, roles):
for role in roles:
authorization_service.assign_role_to_user(user_id, role.id)
|
"""
:Copyright: 2006-2018 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from byceps.services.authorization import service as authorization_service
from tests.base import AbstractAppTestCase
from tests.helpers import assign_permissions_to_user
class AuthorizationServiceTestCase(AbstractAppTestCase):
def test_get_permission_ids_for_user(self):
user = self.create_user()
permissions_before = authorization_service.get_permission_ids_for_user(user.id)
assert permissions_before == frozenset()
assign_permissions_to_user(user.id, 'board_moderator', {
'board_topic_hide',
'board_topic_pin',
})
assign_permissions_to_user(user.id, 'news_editor', {
'news_item_create',
})
permissions_after = authorization_service.get_permission_ids_for_user(user.id)
assert permissions_after == {
'board_topic_hide',
'board_topic_pin',
'news_item_create',
}
|
Use existing test helper to create and assign permissions and roles
|
Use existing test helper to create and assign permissions and roles
|
Python
|
bsd-3-clause
|
m-ober/byceps,homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps,m-ober/byceps,m-ober/byceps
|
"""
:Copyright: 2006-2018 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from byceps.services.authorization import service as authorization_service
from tests.base import AbstractAppTestCase
class AuthorizationServiceTestCase(AbstractAppTestCase):
def test_get_permission_ids_for_user(self):
board_moderator_role = create_role_with_permissions('board_moderator', [
'board_topic_hide',
'board_topic_pin',
])
news_editor_role = create_role_with_permissions('news_editor', [
'news_item_create',
])
user = self.create_user()
permissions_before = authorization_service.get_permission_ids_for_user(user.id)
assert permissions_before == frozenset()
assign_roles_to_user(user.id, {board_moderator_role, news_editor_role})
permissions_after = authorization_service.get_permission_ids_for_user(user.id)
assert permissions_after == {
'board_topic_hide',
'board_topic_pin',
'news_item_create',
}
def create_role_with_permissions(role_id, permission_ids):
role = authorization_service.create_role(role_id, role_id)
for permission_id in permission_ids:
permission = authorization_service.create_permission(permission_id,
permission_id)
authorization_service.assign_permission_to_role(permission.id, role.id)
return role
def assign_roles_to_user(user_id, roles):
for role in roles:
authorization_service.assign_role_to_user(user_id, role.id)
Use existing test helper to create and assign permissions and roles
|
"""
:Copyright: 2006-2018 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from byceps.services.authorization import service as authorization_service
from tests.base import AbstractAppTestCase
from tests.helpers import assign_permissions_to_user
class AuthorizationServiceTestCase(AbstractAppTestCase):
def test_get_permission_ids_for_user(self):
user = self.create_user()
permissions_before = authorization_service.get_permission_ids_for_user(user.id)
assert permissions_before == frozenset()
assign_permissions_to_user(user.id, 'board_moderator', {
'board_topic_hide',
'board_topic_pin',
})
assign_permissions_to_user(user.id, 'news_editor', {
'news_item_create',
})
permissions_after = authorization_service.get_permission_ids_for_user(user.id)
assert permissions_after == {
'board_topic_hide',
'board_topic_pin',
'news_item_create',
}
|
<commit_before>"""
:Copyright: 2006-2018 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from byceps.services.authorization import service as authorization_service
from tests.base import AbstractAppTestCase
class AuthorizationServiceTestCase(AbstractAppTestCase):
def test_get_permission_ids_for_user(self):
board_moderator_role = create_role_with_permissions('board_moderator', [
'board_topic_hide',
'board_topic_pin',
])
news_editor_role = create_role_with_permissions('news_editor', [
'news_item_create',
])
user = self.create_user()
permissions_before = authorization_service.get_permission_ids_for_user(user.id)
assert permissions_before == frozenset()
assign_roles_to_user(user.id, {board_moderator_role, news_editor_role})
permissions_after = authorization_service.get_permission_ids_for_user(user.id)
assert permissions_after == {
'board_topic_hide',
'board_topic_pin',
'news_item_create',
}
def create_role_with_permissions(role_id, permission_ids):
role = authorization_service.create_role(role_id, role_id)
for permission_id in permission_ids:
permission = authorization_service.create_permission(permission_id,
permission_id)
authorization_service.assign_permission_to_role(permission.id, role.id)
return role
def assign_roles_to_user(user_id, roles):
for role in roles:
authorization_service.assign_role_to_user(user_id, role.id)
<commit_msg>Use existing test helper to create and assign permissions and roles<commit_after>
|
"""
:Copyright: 2006-2018 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from byceps.services.authorization import service as authorization_service
from tests.base import AbstractAppTestCase
from tests.helpers import assign_permissions_to_user
class AuthorizationServiceTestCase(AbstractAppTestCase):
def test_get_permission_ids_for_user(self):
user = self.create_user()
permissions_before = authorization_service.get_permission_ids_for_user(user.id)
assert permissions_before == frozenset()
assign_permissions_to_user(user.id, 'board_moderator', {
'board_topic_hide',
'board_topic_pin',
})
assign_permissions_to_user(user.id, 'news_editor', {
'news_item_create',
})
permissions_after = authorization_service.get_permission_ids_for_user(user.id)
assert permissions_after == {
'board_topic_hide',
'board_topic_pin',
'news_item_create',
}
|
"""
:Copyright: 2006-2018 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from byceps.services.authorization import service as authorization_service
from tests.base import AbstractAppTestCase
class AuthorizationServiceTestCase(AbstractAppTestCase):
def test_get_permission_ids_for_user(self):
board_moderator_role = create_role_with_permissions('board_moderator', [
'board_topic_hide',
'board_topic_pin',
])
news_editor_role = create_role_with_permissions('news_editor', [
'news_item_create',
])
user = self.create_user()
permissions_before = authorization_service.get_permission_ids_for_user(user.id)
assert permissions_before == frozenset()
assign_roles_to_user(user.id, {board_moderator_role, news_editor_role})
permissions_after = authorization_service.get_permission_ids_for_user(user.id)
assert permissions_after == {
'board_topic_hide',
'board_topic_pin',
'news_item_create',
}
def create_role_with_permissions(role_id, permission_ids):
role = authorization_service.create_role(role_id, role_id)
for permission_id in permission_ids:
permission = authorization_service.create_permission(permission_id,
permission_id)
authorization_service.assign_permission_to_role(permission.id, role.id)
return role
def assign_roles_to_user(user_id, roles):
for role in roles:
authorization_service.assign_role_to_user(user_id, role.id)
Use existing test helper to create and assign permissions and roles"""
:Copyright: 2006-2018 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from byceps.services.authorization import service as authorization_service
from tests.base import AbstractAppTestCase
from tests.helpers import assign_permissions_to_user
class AuthorizationServiceTestCase(AbstractAppTestCase):
def test_get_permission_ids_for_user(self):
user = self.create_user()
permissions_before = authorization_service.get_permission_ids_for_user(user.id)
assert permissions_before == frozenset()
assign_permissions_to_user(user.id, 'board_moderator', {
'board_topic_hide',
'board_topic_pin',
})
assign_permissions_to_user(user.id, 'news_editor', {
'news_item_create',
})
permissions_after = authorization_service.get_permission_ids_for_user(user.id)
assert permissions_after == {
'board_topic_hide',
'board_topic_pin',
'news_item_create',
}
|
<commit_before>"""
:Copyright: 2006-2018 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from byceps.services.authorization import service as authorization_service
from tests.base import AbstractAppTestCase
class AuthorizationServiceTestCase(AbstractAppTestCase):
def test_get_permission_ids_for_user(self):
board_moderator_role = create_role_with_permissions('board_moderator', [
'board_topic_hide',
'board_topic_pin',
])
news_editor_role = create_role_with_permissions('news_editor', [
'news_item_create',
])
user = self.create_user()
permissions_before = authorization_service.get_permission_ids_for_user(user.id)
assert permissions_before == frozenset()
assign_roles_to_user(user.id, {board_moderator_role, news_editor_role})
permissions_after = authorization_service.get_permission_ids_for_user(user.id)
assert permissions_after == {
'board_topic_hide',
'board_topic_pin',
'news_item_create',
}
def create_role_with_permissions(role_id, permission_ids):
role = authorization_service.create_role(role_id, role_id)
for permission_id in permission_ids:
permission = authorization_service.create_permission(permission_id,
permission_id)
authorization_service.assign_permission_to_role(permission.id, role.id)
return role
def assign_roles_to_user(user_id, roles):
for role in roles:
authorization_service.assign_role_to_user(user_id, role.id)
<commit_msg>Use existing test helper to create and assign permissions and roles<commit_after>"""
:Copyright: 2006-2018 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from byceps.services.authorization import service as authorization_service
from tests.base import AbstractAppTestCase
from tests.helpers import assign_permissions_to_user
class AuthorizationServiceTestCase(AbstractAppTestCase):
def test_get_permission_ids_for_user(self):
user = self.create_user()
permissions_before = authorization_service.get_permission_ids_for_user(user.id)
assert permissions_before == frozenset()
assign_permissions_to_user(user.id, 'board_moderator', {
'board_topic_hide',
'board_topic_pin',
})
assign_permissions_to_user(user.id, 'news_editor', {
'news_item_create',
})
permissions_after = authorization_service.get_permission_ids_for_user(user.id)
assert permissions_after == {
'board_topic_hide',
'board_topic_pin',
'news_item_create',
}
|
2b6f3687c5203364ce3d935e10a05fbcc1b16ed5
|
tests/messenger/messaging_test.py
|
tests/messenger/messaging_test.py
|
import os
import vcr_unittest
from app.messenger import messaging
class MessagingTestCase(vcr_unittest.VCRTestCase):
def setUp(self):
self.access_token = os.environ['ACCESS_TOKEN']
self.user_id = '100011269503253'
def test_send_main_menu(self):
response = messaging.send_main_menu(self.access_token, self.user_id)
self.assertTrue('recipient_id' in response)
self.assertTrue('message_id' in response)
self.assertEqual(response['recipient_id'], self.user_id)
|
import os
import vcr_unittest
from app.messenger import messaging
class MessagingTestCase(vcr_unittest.VCRTestCase):
def setUp(self):
self.access_token = os.environ['ACCESS_TOKEN']
self.user_id = '474276666029691'
def test_send_main_menu(self):
response = messaging.send_main_menu(self.access_token, self.user_id)
self.assertTrue('recipient_id' in response)
self.assertTrue('message_id' in response)
self.assertEqual(response['recipient_id'], self.user_id)
|
Replace test user_id with a correct one
|
Replace test user_id with a correct one
|
Python
|
mit
|
Stark-Mountain/meetup-facebook-bot,Stark-Mountain/meetup-facebook-bot
|
import os
import vcr_unittest
from app.messenger import messaging
class MessagingTestCase(vcr_unittest.VCRTestCase):
def setUp(self):
self.access_token = os.environ['ACCESS_TOKEN']
self.user_id = '100011269503253'
def test_send_main_menu(self):
response = messaging.send_main_menu(self.access_token, self.user_id)
self.assertTrue('recipient_id' in response)
self.assertTrue('message_id' in response)
self.assertEqual(response['recipient_id'], self.user_id)
Replace test user_id with a correct one
|
import os
import vcr_unittest
from app.messenger import messaging
class MessagingTestCase(vcr_unittest.VCRTestCase):
def setUp(self):
self.access_token = os.environ['ACCESS_TOKEN']
self.user_id = '474276666029691'
def test_send_main_menu(self):
response = messaging.send_main_menu(self.access_token, self.user_id)
self.assertTrue('recipient_id' in response)
self.assertTrue('message_id' in response)
self.assertEqual(response['recipient_id'], self.user_id)
|
<commit_before>import os
import vcr_unittest
from app.messenger import messaging
class MessagingTestCase(vcr_unittest.VCRTestCase):
def setUp(self):
self.access_token = os.environ['ACCESS_TOKEN']
self.user_id = '100011269503253'
def test_send_main_menu(self):
response = messaging.send_main_menu(self.access_token, self.user_id)
self.assertTrue('recipient_id' in response)
self.assertTrue('message_id' in response)
self.assertEqual(response['recipient_id'], self.user_id)
<commit_msg>Replace test user_id with a correct one<commit_after>
|
import os
import vcr_unittest
from app.messenger import messaging
class MessagingTestCase(vcr_unittest.VCRTestCase):
def setUp(self):
self.access_token = os.environ['ACCESS_TOKEN']
self.user_id = '474276666029691'
def test_send_main_menu(self):
response = messaging.send_main_menu(self.access_token, self.user_id)
self.assertTrue('recipient_id' in response)
self.assertTrue('message_id' in response)
self.assertEqual(response['recipient_id'], self.user_id)
|
import os
import vcr_unittest
from app.messenger import messaging
class MessagingTestCase(vcr_unittest.VCRTestCase):
def setUp(self):
self.access_token = os.environ['ACCESS_TOKEN']
self.user_id = '100011269503253'
def test_send_main_menu(self):
response = messaging.send_main_menu(self.access_token, self.user_id)
self.assertTrue('recipient_id' in response)
self.assertTrue('message_id' in response)
self.assertEqual(response['recipient_id'], self.user_id)
Replace test user_id with a correct oneimport os
import vcr_unittest
from app.messenger import messaging
class MessagingTestCase(vcr_unittest.VCRTestCase):
def setUp(self):
self.access_token = os.environ['ACCESS_TOKEN']
self.user_id = '474276666029691'
def test_send_main_menu(self):
response = messaging.send_main_menu(self.access_token, self.user_id)
self.assertTrue('recipient_id' in response)
self.assertTrue('message_id' in response)
self.assertEqual(response['recipient_id'], self.user_id)
|
<commit_before>import os
import vcr_unittest
from app.messenger import messaging
class MessagingTestCase(vcr_unittest.VCRTestCase):
def setUp(self):
self.access_token = os.environ['ACCESS_TOKEN']
self.user_id = '100011269503253'
def test_send_main_menu(self):
response = messaging.send_main_menu(self.access_token, self.user_id)
self.assertTrue('recipient_id' in response)
self.assertTrue('message_id' in response)
self.assertEqual(response['recipient_id'], self.user_id)
<commit_msg>Replace test user_id with a correct one<commit_after>import os
import vcr_unittest
from app.messenger import messaging
class MessagingTestCase(vcr_unittest.VCRTestCase):
def setUp(self):
self.access_token = os.environ['ACCESS_TOKEN']
self.user_id = '474276666029691'
def test_send_main_menu(self):
response = messaging.send_main_menu(self.access_token, self.user_id)
self.assertTrue('recipient_id' in response)
self.assertTrue('message_id' in response)
self.assertEqual(response['recipient_id'], self.user_id)
|
bfed3c6b45810d2dacfbf71e499e450a0c762ad7
|
django_rq/decorators.py
|
django_rq/decorators.py
|
from rq.decorators import job
from .queues import get_queue
class job(job):
"""
The same as RQ's job decorator, but it works automatically works out
the ``connection`` argument from RQ_QUEUES.
"""
def __init__(self, queue, connection=None, *args, **kwargs):
if isinstance(queue, basestring):
try:
queue = get_queue(queue)
if connection is None:
connection = queue.connection
except KeyError:
pass
super(job, self).__init__(queue, connection, *args, **kwargs)
|
from rq.decorators import job as _rq_job
from .queues import get_queue
def job(func_or_queue, connection=None, *args, **kwargs):
"""
The same as RQ's job decorator, but it works automatically works out
the ``connection`` argument from RQ_QUEUES.
And also, it allows simplified ``@job`` syntax to put job into
default queue.
"""
if callable(func_or_queue):
func = func_or_queue
queue = 'default'
else:
func = None
queue = func_or_queue
if isinstance(queue, basestring):
try:
queue = get_queue(queue)
if connection is None:
connection = queue.connection
except KeyError:
pass
decorator = _rq_job(queue, connection=connection, *args, **kwargs)
if func:
return decorator(func)
return decorator
|
Allow simple syntax for `job` decorator.
|
Allow simple syntax for `job` decorator.
It is nice to use simple syntax:
@job
def some_func():
pass
in cases, when you have only 'default' queue.
|
Python
|
mit
|
mjec/django-rq,ui/django-rq,sbussetti/django-rq,meteozond/django-rq,viaregio/django-rq,1024inc/django-rq,lechup/django-rq,ryanisnan/django-rq,sbussetti/django-rq,lechup/django-rq,ryanisnan/django-rq,1024inc/django-rq,meteozond/django-rq,ui/django-rq,mjec/django-rq,viaregio/django-rq
|
from rq.decorators import job
from .queues import get_queue
class job(job):
"""
The same as RQ's job decorator, but it works automatically works out
the ``connection`` argument from RQ_QUEUES.
"""
def __init__(self, queue, connection=None, *args, **kwargs):
if isinstance(queue, basestring):
try:
queue = get_queue(queue)
if connection is None:
connection = queue.connection
except KeyError:
pass
super(job, self).__init__(queue, connection, *args, **kwargs)Allow simple syntax for `job` decorator.
It is nice to use simple syntax:
@job
def some_func():
pass
in cases, when you have only 'default' queue.
|
from rq.decorators import job as _rq_job
from .queues import get_queue
def job(func_or_queue, connection=None, *args, **kwargs):
"""
The same as RQ's job decorator, but it works automatically works out
the ``connection`` argument from RQ_QUEUES.
And also, it allows simplified ``@job`` syntax to put job into
default queue.
"""
if callable(func_or_queue):
func = func_or_queue
queue = 'default'
else:
func = None
queue = func_or_queue
if isinstance(queue, basestring):
try:
queue = get_queue(queue)
if connection is None:
connection = queue.connection
except KeyError:
pass
decorator = _rq_job(queue, connection=connection, *args, **kwargs)
if func:
return decorator(func)
return decorator
|
<commit_before>from rq.decorators import job
from .queues import get_queue
class job(job):
"""
The same as RQ's job decorator, but it works automatically works out
the ``connection`` argument from RQ_QUEUES.
"""
def __init__(self, queue, connection=None, *args, **kwargs):
if isinstance(queue, basestring):
try:
queue = get_queue(queue)
if connection is None:
connection = queue.connection
except KeyError:
pass
super(job, self).__init__(queue, connection, *args, **kwargs)<commit_msg>Allow simple syntax for `job` decorator.
It is nice to use simple syntax:
@job
def some_func():
pass
in cases, when you have only 'default' queue.<commit_after>
|
from rq.decorators import job as _rq_job
from .queues import get_queue
def job(func_or_queue, connection=None, *args, **kwargs):
"""
The same as RQ's job decorator, but it works automatically works out
the ``connection`` argument from RQ_QUEUES.
And also, it allows simplified ``@job`` syntax to put job into
default queue.
"""
if callable(func_or_queue):
func = func_or_queue
queue = 'default'
else:
func = None
queue = func_or_queue
if isinstance(queue, basestring):
try:
queue = get_queue(queue)
if connection is None:
connection = queue.connection
except KeyError:
pass
decorator = _rq_job(queue, connection=connection, *args, **kwargs)
if func:
return decorator(func)
return decorator
|
from rq.decorators import job
from .queues import get_queue
class job(job):
"""
The same as RQ's job decorator, but it works automatically works out
the ``connection`` argument from RQ_QUEUES.
"""
def __init__(self, queue, connection=None, *args, **kwargs):
if isinstance(queue, basestring):
try:
queue = get_queue(queue)
if connection is None:
connection = queue.connection
except KeyError:
pass
super(job, self).__init__(queue, connection, *args, **kwargs)Allow simple syntax for `job` decorator.
It is nice to use simple syntax:
@job
def some_func():
pass
in cases, when you have only 'default' queue.from rq.decorators import job as _rq_job
from .queues import get_queue
def job(func_or_queue, connection=None, *args, **kwargs):
"""
The same as RQ's job decorator, but it works automatically works out
the ``connection`` argument from RQ_QUEUES.
And also, it allows simplified ``@job`` syntax to put job into
default queue.
"""
if callable(func_or_queue):
func = func_or_queue
queue = 'default'
else:
func = None
queue = func_or_queue
if isinstance(queue, basestring):
try:
queue = get_queue(queue)
if connection is None:
connection = queue.connection
except KeyError:
pass
decorator = _rq_job(queue, connection=connection, *args, **kwargs)
if func:
return decorator(func)
return decorator
|
<commit_before>from rq.decorators import job
from .queues import get_queue
class job(job):
"""
The same as RQ's job decorator, but it works automatically works out
the ``connection`` argument from RQ_QUEUES.
"""
def __init__(self, queue, connection=None, *args, **kwargs):
if isinstance(queue, basestring):
try:
queue = get_queue(queue)
if connection is None:
connection = queue.connection
except KeyError:
pass
super(job, self).__init__(queue, connection, *args, **kwargs)<commit_msg>Allow simple syntax for `job` decorator.
It is nice to use simple syntax:
@job
def some_func():
pass
in cases, when you have only 'default' queue.<commit_after>from rq.decorators import job as _rq_job
from .queues import get_queue
def job(func_or_queue, connection=None, *args, **kwargs):
"""
The same as RQ's job decorator, but it works automatically works out
the ``connection`` argument from RQ_QUEUES.
And also, it allows simplified ``@job`` syntax to put job into
default queue.
"""
if callable(func_or_queue):
func = func_or_queue
queue = 'default'
else:
func = None
queue = func_or_queue
if isinstance(queue, basestring):
try:
queue = get_queue(queue)
if connection is None:
connection = queue.connection
except KeyError:
pass
decorator = _rq_job(queue, connection=connection, *args, **kwargs)
if func:
return decorator(func)
return decorator
|
748a9ebf425f7ff4b28c34bc371735d2a892ec58
|
snoop/ipython.py
|
snoop/ipython.py
|
import ast
from snoop import snoop
from IPython import get_ipython
from IPython.core.magic import Magics, cell_magic, magics_class
@magics_class
class SnoopMagics(Magics):
@cell_magic
def snoop(self, _line, cell):
shell = get_ipython()
filename = shell.compile.cache(cell)
code = shell.compile(cell, filename, 'exec')
tracer = snoop()
tracer.variable_whitelist = set()
for node in ast.walk(ast.parse(cell)):
if isinstance(node, ast.Name):
tracer.variable_whitelist.add(node.id)
tracer.target_codes.add(code)
with tracer:
shell.ex(code)
|
import ast
from snoop import snoop
from IPython.core.magic import Magics, cell_magic, magics_class
@magics_class
class SnoopMagics(Magics):
@cell_magic
def snoop(self, _line, cell):
filename = self.shell.compile.cache(cell)
code = self.shell.compile(cell, filename, 'exec')
tracer = snoop()
tracer.variable_whitelist = set()
for node in ast.walk(ast.parse(cell)):
if isinstance(node, ast.Name):
tracer.variable_whitelist.add(node.id)
tracer.target_codes.add(code)
with tracer:
self.shell.ex(code)
|
Use shell attribute of magics class
|
Use shell attribute of magics class
|
Python
|
mit
|
alexmojaki/snoop,alexmojaki/snoop
|
import ast
from snoop import snoop
from IPython import get_ipython
from IPython.core.magic import Magics, cell_magic, magics_class
@magics_class
class SnoopMagics(Magics):
@cell_magic
def snoop(self, _line, cell):
shell = get_ipython()
filename = shell.compile.cache(cell)
code = shell.compile(cell, filename, 'exec')
tracer = snoop()
tracer.variable_whitelist = set()
for node in ast.walk(ast.parse(cell)):
if isinstance(node, ast.Name):
tracer.variable_whitelist.add(node.id)
tracer.target_codes.add(code)
with tracer:
shell.ex(code)
Use shell attribute of magics class
|
import ast
from snoop import snoop
from IPython.core.magic import Magics, cell_magic, magics_class
@magics_class
class SnoopMagics(Magics):
@cell_magic
def snoop(self, _line, cell):
filename = self.shell.compile.cache(cell)
code = self.shell.compile(cell, filename, 'exec')
tracer = snoop()
tracer.variable_whitelist = set()
for node in ast.walk(ast.parse(cell)):
if isinstance(node, ast.Name):
tracer.variable_whitelist.add(node.id)
tracer.target_codes.add(code)
with tracer:
self.shell.ex(code)
|
<commit_before>import ast
from snoop import snoop
from IPython import get_ipython
from IPython.core.magic import Magics, cell_magic, magics_class
@magics_class
class SnoopMagics(Magics):
@cell_magic
def snoop(self, _line, cell):
shell = get_ipython()
filename = shell.compile.cache(cell)
code = shell.compile(cell, filename, 'exec')
tracer = snoop()
tracer.variable_whitelist = set()
for node in ast.walk(ast.parse(cell)):
if isinstance(node, ast.Name):
tracer.variable_whitelist.add(node.id)
tracer.target_codes.add(code)
with tracer:
shell.ex(code)
<commit_msg>Use shell attribute of magics class<commit_after>
|
import ast
from snoop import snoop
from IPython.core.magic import Magics, cell_magic, magics_class
@magics_class
class SnoopMagics(Magics):
@cell_magic
def snoop(self, _line, cell):
filename = self.shell.compile.cache(cell)
code = self.shell.compile(cell, filename, 'exec')
tracer = snoop()
tracer.variable_whitelist = set()
for node in ast.walk(ast.parse(cell)):
if isinstance(node, ast.Name):
tracer.variable_whitelist.add(node.id)
tracer.target_codes.add(code)
with tracer:
self.shell.ex(code)
|
import ast
from snoop import snoop
from IPython import get_ipython
from IPython.core.magic import Magics, cell_magic, magics_class
@magics_class
class SnoopMagics(Magics):
@cell_magic
def snoop(self, _line, cell):
shell = get_ipython()
filename = shell.compile.cache(cell)
code = shell.compile(cell, filename, 'exec')
tracer = snoop()
tracer.variable_whitelist = set()
for node in ast.walk(ast.parse(cell)):
if isinstance(node, ast.Name):
tracer.variable_whitelist.add(node.id)
tracer.target_codes.add(code)
with tracer:
shell.ex(code)
Use shell attribute of magics classimport ast
from snoop import snoop
from IPython.core.magic import Magics, cell_magic, magics_class
@magics_class
class SnoopMagics(Magics):
@cell_magic
def snoop(self, _line, cell):
filename = self.shell.compile.cache(cell)
code = self.shell.compile(cell, filename, 'exec')
tracer = snoop()
tracer.variable_whitelist = set()
for node in ast.walk(ast.parse(cell)):
if isinstance(node, ast.Name):
tracer.variable_whitelist.add(node.id)
tracer.target_codes.add(code)
with tracer:
self.shell.ex(code)
|
<commit_before>import ast
from snoop import snoop
from IPython import get_ipython
from IPython.core.magic import Magics, cell_magic, magics_class
@magics_class
class SnoopMagics(Magics):
@cell_magic
def snoop(self, _line, cell):
shell = get_ipython()
filename = shell.compile.cache(cell)
code = shell.compile(cell, filename, 'exec')
tracer = snoop()
tracer.variable_whitelist = set()
for node in ast.walk(ast.parse(cell)):
if isinstance(node, ast.Name):
tracer.variable_whitelist.add(node.id)
tracer.target_codes.add(code)
with tracer:
shell.ex(code)
<commit_msg>Use shell attribute of magics class<commit_after>import ast
from snoop import snoop
from IPython.core.magic import Magics, cell_magic, magics_class
@magics_class
class SnoopMagics(Magics):
@cell_magic
def snoop(self, _line, cell):
filename = self.shell.compile.cache(cell)
code = self.shell.compile(cell, filename, 'exec')
tracer = snoop()
tracer.variable_whitelist = set()
for node in ast.walk(ast.parse(cell)):
if isinstance(node, ast.Name):
tracer.variable_whitelist.add(node.id)
tracer.target_codes.add(code)
with tracer:
self.shell.ex(code)
|
e5876287aacda81bac2d9937e285d132c7133094
|
test/tests/python-imports/container.py
|
test/tests/python-imports/container.py
|
import platform
isWindows = platform.system() == 'Windows'
isNotPypy = platform.python_implementation() != 'PyPy'
isCaveman = platform.python_version_tuple()[0] == '2'
if not isWindows:
import curses
import readline
if isCaveman:
import gdbm
else:
import dbm.gnu
if isNotPypy:
# PyPy and Python 2 don't support lzma
import lzma
assert(lzma.decompress(lzma.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS')
import bz2
assert(bz2.decompress(bz2.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS')
import zlib
assert(zlib.decompress(zlib.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS')
|
import platform
isWindows = platform.system() == 'Windows'
isNotPypy = platform.python_implementation() != 'PyPy'
isCaveman = platform.python_version_tuple()[0] == '2'
if not isWindows:
import curses
import readline
if isCaveman:
import gdbm
else:
import dbm.gnu
if isNotPypy:
# PyPy and Python 2 don't support lzma
import lzma
assert(lzma.decompress(lzma.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS')
import bz2
assert(bz2.decompress(bz2.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS')
import zlib
assert(zlib.decompress(zlib.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS')
if not isCaveman:
import lzma
assert(lzma.decompress(lzma.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS')
|
Add "lzma" to "python-imports" test
|
Add "lzma" to "python-imports" test
|
Python
|
apache-2.0
|
docker-library/official-images,infosiftr/stackbrew,31z4/official-images,dinogun/official-images,docker-library/official-images,docker-solr/official-images,docker-library/official-images,docker-library/official-images,neo-technology/docker-official-images,neo-technology/docker-official-images,thresheek/official-images,31z4/official-images,31z4/official-images,dinogun/official-images,neo-technology/docker-official-images,infosiftr/stackbrew,thresheek/official-images,dinogun/official-images,thresheek/official-images,neo-technology/docker-official-images,thresheek/official-images,neo-technology/docker-official-images,dinogun/official-images,31z4/official-images,infosiftr/stackbrew,thresheek/official-images,thresheek/official-images,dinogun/official-images,neo-technology/docker-official-images,docker-solr/official-images,docker-solr/official-images,neo-technology/docker-official-images,docker-library/official-images,docker-solr/official-images,neo-technology/docker-official-images,thresheek/official-images,31z4/official-images,dinogun/official-images,dinogun/official-images,dinogun/official-images,infosiftr/stackbrew,infosiftr/stackbrew,thresheek/official-images,docker-library/official-images,docker-library/official-images,docker-solr/official-images,31z4/official-images,infosiftr/stackbrew,thresheek/official-images,neo-technology/docker-official-images,thresheek/official-images,neo-technology/docker-official-images,thresheek/official-images,infosiftr/stackbrew,docker-library/official-images,docker-solr/official-images,docker-solr/official-images,thresheek/official-images,neo-technology/docker-official-images,31z4/official-images,docker-library/official-images,docker-library/official-images,docker-solr/official-images,docker-library/official-images,docker-solr/official-images,infosiftr/stackbrew,31z4/official-images,infosiftr/stackbrew,docker-solr/official-images,dinogun/official-images,docker-library/official-images,neo-technology/docker-official-images,dinogun/official-images,neo-technology/docker-official-images,infosiftr/stackbrew,dinogun/official-images,infosiftr/stackbrew,31z4/official-images,docker-solr/official-images,thresheek/official-images,infosiftr/stackbrew,docker-library/official-images,dinogun/official-images,docker-library/official-images,thresheek/official-images,docker-solr/official-images,infosiftr/stackbrew,31z4/official-images,dinogun/official-images,infosiftr/stackbrew,docker-solr/official-images,31z4/official-images,neo-technology/docker-official-images,31z4/official-images,docker-solr/official-images,31z4/official-images,31z4/official-images
|
import platform
isWindows = platform.system() == 'Windows'
isNotPypy = platform.python_implementation() != 'PyPy'
isCaveman = platform.python_version_tuple()[0] == '2'
if not isWindows:
import curses
import readline
if isCaveman:
import gdbm
else:
import dbm.gnu
if isNotPypy:
# PyPy and Python 2 don't support lzma
import lzma
assert(lzma.decompress(lzma.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS')
import bz2
assert(bz2.decompress(bz2.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS')
import zlib
assert(zlib.decompress(zlib.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS')
Add "lzma" to "python-imports" test
|
import platform
isWindows = platform.system() == 'Windows'
isNotPypy = platform.python_implementation() != 'PyPy'
isCaveman = platform.python_version_tuple()[0] == '2'
if not isWindows:
import curses
import readline
if isCaveman:
import gdbm
else:
import dbm.gnu
if isNotPypy:
# PyPy and Python 2 don't support lzma
import lzma
assert(lzma.decompress(lzma.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS')
import bz2
assert(bz2.decompress(bz2.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS')
import zlib
assert(zlib.decompress(zlib.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS')
if not isCaveman:
import lzma
assert(lzma.decompress(lzma.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS')
|
<commit_before>import platform
isWindows = platform.system() == 'Windows'
isNotPypy = platform.python_implementation() != 'PyPy'
isCaveman = platform.python_version_tuple()[0] == '2'
if not isWindows:
import curses
import readline
if isCaveman:
import gdbm
else:
import dbm.gnu
if isNotPypy:
# PyPy and Python 2 don't support lzma
import lzma
assert(lzma.decompress(lzma.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS')
import bz2
assert(bz2.decompress(bz2.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS')
import zlib
assert(zlib.decompress(zlib.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS')
<commit_msg>Add "lzma" to "python-imports" test<commit_after>
|
import platform
isWindows = platform.system() == 'Windows'
isNotPypy = platform.python_implementation() != 'PyPy'
isCaveman = platform.python_version_tuple()[0] == '2'
if not isWindows:
import curses
import readline
if isCaveman:
import gdbm
else:
import dbm.gnu
if isNotPypy:
# PyPy and Python 2 don't support lzma
import lzma
assert(lzma.decompress(lzma.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS')
import bz2
assert(bz2.decompress(bz2.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS')
import zlib
assert(zlib.decompress(zlib.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS')
if not isCaveman:
import lzma
assert(lzma.decompress(lzma.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS')
|
import platform
isWindows = platform.system() == 'Windows'
isNotPypy = platform.python_implementation() != 'PyPy'
isCaveman = platform.python_version_tuple()[0] == '2'
if not isWindows:
import curses
import readline
if isCaveman:
import gdbm
else:
import dbm.gnu
if isNotPypy:
# PyPy and Python 2 don't support lzma
import lzma
assert(lzma.decompress(lzma.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS')
import bz2
assert(bz2.decompress(bz2.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS')
import zlib
assert(zlib.decompress(zlib.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS')
Add "lzma" to "python-imports" testimport platform
isWindows = platform.system() == 'Windows'
isNotPypy = platform.python_implementation() != 'PyPy'
isCaveman = platform.python_version_tuple()[0] == '2'
if not isWindows:
import curses
import readline
if isCaveman:
import gdbm
else:
import dbm.gnu
if isNotPypy:
# PyPy and Python 2 don't support lzma
import lzma
assert(lzma.decompress(lzma.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS')
import bz2
assert(bz2.decompress(bz2.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS')
import zlib
assert(zlib.decompress(zlib.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS')
if not isCaveman:
import lzma
assert(lzma.decompress(lzma.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS')
|
<commit_before>import platform
isWindows = platform.system() == 'Windows'
isNotPypy = platform.python_implementation() != 'PyPy'
isCaveman = platform.python_version_tuple()[0] == '2'
if not isWindows:
import curses
import readline
if isCaveman:
import gdbm
else:
import dbm.gnu
if isNotPypy:
# PyPy and Python 2 don't support lzma
import lzma
assert(lzma.decompress(lzma.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS')
import bz2
assert(bz2.decompress(bz2.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS')
import zlib
assert(zlib.decompress(zlib.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS')
<commit_msg>Add "lzma" to "python-imports" test<commit_after>import platform
isWindows = platform.system() == 'Windows'
isNotPypy = platform.python_implementation() != 'PyPy'
isCaveman = platform.python_version_tuple()[0] == '2'
if not isWindows:
import curses
import readline
if isCaveman:
import gdbm
else:
import dbm.gnu
if isNotPypy:
# PyPy and Python 2 don't support lzma
import lzma
assert(lzma.decompress(lzma.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS')
import bz2
assert(bz2.decompress(bz2.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS')
import zlib
assert(zlib.decompress(zlib.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS')
if not isCaveman:
import lzma
assert(lzma.decompress(lzma.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS')
|
a5791ea2a229b13eb84cc92bd20d87df93687d5e
|
cactus/skeleton/plugins/sprites.disabled.py
|
cactus/skeleton/plugins/sprites.disabled.py
|
import os
import sys
import pipes
import shutil
import subprocess
"""
This plugin uses glue to sprite images:
http://glue.readthedocs.org/en/latest/quickstart.html
Install:
(Only if you want to sprite jpg too)
brew install libjpeg
(Only if you want to optimize pngs with optipng)
brew install optipng
sudo easy_install pip
sudo pip uninstall pil
sudo pip install pil
sudo pip install glue
"""
try:
import glue
except Exception, e:
sys.exit('Could not use glue: %s\nMaybe install: sudo easy_install glue' % e)
IMG_PATH = 'static/img/sprites'
CSS_PATH = 'static/css/sprites'
KEY = '_PREV_CHECKSUM'
def checksum(path):
command = 'md5 `find %s -type f`' % pipes.quote(IMG_PATH)
return subprocess.check_output(command, shell=True)
def preBuild(site):
currChecksum = checksum(IMG_PATH)
prevChecksum = getattr(site, KEY, None)
# Don't run if none of the images has changed
if currChecksum == prevChecksum:
return
if os.path.isdir(CSS_PATH):
shutil.rmtree(CSS_PATH)
os.mkdir(CSS_PATH)
os.system('glue --cachebuster --crop --optipng "%s" "%s" --project' % (IMG_PATH, CSS_PATH))
setattr(site, KEY, currChecksum)
|
import os
import sys
import pipes
import shutil
import subprocess
"""
This plugin uses glue to sprite images:
http://glue.readthedocs.org/en/latest/quickstart.html
Install:
(Only if you want to sprite jpg too)
brew install libjpeg
sudo easy_install pip
sudo pip uninstall pil
sudo pip install pil
sudo pip install glue
"""
try:
import glue
except Exception, e:
sys.exit('Could not use glue: %s\nMaybe install: sudo easy_install glue' % e)
IMG_PATH = 'static/img/sprites'
CSS_PATH = 'static/css/sprites'
KEY = '_PREV_CHECKSUM'
def checksum(path):
command = 'md5 `find %s -type f`' % pipes.quote(IMG_PATH)
return subprocess.check_output(command, shell=True)
def preBuild(site):
currChecksum = checksum(IMG_PATH)
prevChecksum = getattr(site, KEY, None)
# Don't run if none of the images has changed
if currChecksum == prevChecksum:
return
if os.path.isdir(CSS_PATH):
shutil.rmtree(CSS_PATH)
os.mkdir(CSS_PATH)
os.system('glue --cachebuster --crop "%s" "%s" --project' % (IMG_PATH, CSS_PATH))
setattr(site, KEY, currChecksum)
|
Remove deprecated --optipng from glue options
|
Remove deprecated --optipng from glue options
Optipng is now deprecated in glue, so including that option in
Cactus doesn't really make sense.
https://github.com/jorgebastida/glue/blob/master/docs/changelog.rst#09
|
Python
|
bsd-3-clause
|
dreadatour/Cactus,fjxhkj/Cactus,juvham/Cactus,andyzsf/Cactus-,Knownly/Cactus,danielmorosan/Cactus,ibarria0/Cactus,danielmorosan/Cactus,page-io/Cactus,koobs/Cactus,andyzsf/Cactus-,PegasusWang/Cactus,chaudum/Cactus,danielmorosan/Cactus,dreadatour/Cactus,juvham/Cactus,eudicots/Cactus,page-io/Cactus,koobs/Cactus,dreadatour/Cactus,chaudum/Cactus,ibarria0/Cactus,Bluetide/Cactus,eudicots/Cactus,fjxhkj/Cactus,koenbok/Cactus,Bluetide/Cactus,PegasusWang/Cactus,Knownly/Cactus,page-io/Cactus,andyzsf/Cactus-,eudicots/Cactus,ibarria0/Cactus,Knownly/Cactus,koenbok/Cactus,PegasusWang/Cactus,koenbok/Cactus,chaudum/Cactus,Bluetide/Cactus,juvham/Cactus,koobs/Cactus,fjxhkj/Cactus
|
import os
import sys
import pipes
import shutil
import subprocess
"""
This plugin uses glue to sprite images:
http://glue.readthedocs.org/en/latest/quickstart.html
Install:
(Only if you want to sprite jpg too)
brew install libjpeg
(Only if you want to optimize pngs with optipng)
brew install optipng
sudo easy_install pip
sudo pip uninstall pil
sudo pip install pil
sudo pip install glue
"""
try:
import glue
except Exception, e:
sys.exit('Could not use glue: %s\nMaybe install: sudo easy_install glue' % e)
IMG_PATH = 'static/img/sprites'
CSS_PATH = 'static/css/sprites'
KEY = '_PREV_CHECKSUM'
def checksum(path):
command = 'md5 `find %s -type f`' % pipes.quote(IMG_PATH)
return subprocess.check_output(command, shell=True)
def preBuild(site):
currChecksum = checksum(IMG_PATH)
prevChecksum = getattr(site, KEY, None)
# Don't run if none of the images has changed
if currChecksum == prevChecksum:
return
if os.path.isdir(CSS_PATH):
shutil.rmtree(CSS_PATH)
os.mkdir(CSS_PATH)
os.system('glue --cachebuster --crop --optipng "%s" "%s" --project' % (IMG_PATH, CSS_PATH))
setattr(site, KEY, currChecksum)Remove deprecated --optipng from glue options
Optipng is now deprecated in glue, so including that option in
Cactus doesn't really make sense.
https://github.com/jorgebastida/glue/blob/master/docs/changelog.rst#09
|
import os
import sys
import pipes
import shutil
import subprocess
"""
This plugin uses glue to sprite images:
http://glue.readthedocs.org/en/latest/quickstart.html
Install:
(Only if you want to sprite jpg too)
brew install libjpeg
sudo easy_install pip
sudo pip uninstall pil
sudo pip install pil
sudo pip install glue
"""
try:
import glue
except Exception, e:
sys.exit('Could not use glue: %s\nMaybe install: sudo easy_install glue' % e)
IMG_PATH = 'static/img/sprites'
CSS_PATH = 'static/css/sprites'
KEY = '_PREV_CHECKSUM'
def checksum(path):
command = 'md5 `find %s -type f`' % pipes.quote(IMG_PATH)
return subprocess.check_output(command, shell=True)
def preBuild(site):
currChecksum = checksum(IMG_PATH)
prevChecksum = getattr(site, KEY, None)
# Don't run if none of the images has changed
if currChecksum == prevChecksum:
return
if os.path.isdir(CSS_PATH):
shutil.rmtree(CSS_PATH)
os.mkdir(CSS_PATH)
os.system('glue --cachebuster --crop "%s" "%s" --project' % (IMG_PATH, CSS_PATH))
setattr(site, KEY, currChecksum)
|
<commit_before>import os
import sys
import pipes
import shutil
import subprocess
"""
This plugin uses glue to sprite images:
http://glue.readthedocs.org/en/latest/quickstart.html
Install:
(Only if you want to sprite jpg too)
brew install libjpeg
(Only if you want to optimize pngs with optipng)
brew install optipng
sudo easy_install pip
sudo pip uninstall pil
sudo pip install pil
sudo pip install glue
"""
try:
import glue
except Exception, e:
sys.exit('Could not use glue: %s\nMaybe install: sudo easy_install glue' % e)
IMG_PATH = 'static/img/sprites'
CSS_PATH = 'static/css/sprites'
KEY = '_PREV_CHECKSUM'
def checksum(path):
command = 'md5 `find %s -type f`' % pipes.quote(IMG_PATH)
return subprocess.check_output(command, shell=True)
def preBuild(site):
currChecksum = checksum(IMG_PATH)
prevChecksum = getattr(site, KEY, None)
# Don't run if none of the images has changed
if currChecksum == prevChecksum:
return
if os.path.isdir(CSS_PATH):
shutil.rmtree(CSS_PATH)
os.mkdir(CSS_PATH)
os.system('glue --cachebuster --crop --optipng "%s" "%s" --project' % (IMG_PATH, CSS_PATH))
setattr(site, KEY, currChecksum)<commit_msg>Remove deprecated --optipng from glue options
Optipng is now deprecated in glue, so including that option in
Cactus doesn't really make sense.
https://github.com/jorgebastida/glue/blob/master/docs/changelog.rst#09<commit_after>
|
import os
import sys
import pipes
import shutil
import subprocess
"""
This plugin uses glue to sprite images:
http://glue.readthedocs.org/en/latest/quickstart.html
Install:
(Only if you want to sprite jpg too)
brew install libjpeg
sudo easy_install pip
sudo pip uninstall pil
sudo pip install pil
sudo pip install glue
"""
try:
import glue
except Exception, e:
sys.exit('Could not use glue: %s\nMaybe install: sudo easy_install glue' % e)
IMG_PATH = 'static/img/sprites'
CSS_PATH = 'static/css/sprites'
KEY = '_PREV_CHECKSUM'
def checksum(path):
command = 'md5 `find %s -type f`' % pipes.quote(IMG_PATH)
return subprocess.check_output(command, shell=True)
def preBuild(site):
currChecksum = checksum(IMG_PATH)
prevChecksum = getattr(site, KEY, None)
# Don't run if none of the images has changed
if currChecksum == prevChecksum:
return
if os.path.isdir(CSS_PATH):
shutil.rmtree(CSS_PATH)
os.mkdir(CSS_PATH)
os.system('glue --cachebuster --crop "%s" "%s" --project' % (IMG_PATH, CSS_PATH))
setattr(site, KEY, currChecksum)
|
import os
import sys
import pipes
import shutil
import subprocess
"""
This plugin uses glue to sprite images:
http://glue.readthedocs.org/en/latest/quickstart.html
Install:
(Only if you want to sprite jpg too)
brew install libjpeg
(Only if you want to optimize pngs with optipng)
brew install optipng
sudo easy_install pip
sudo pip uninstall pil
sudo pip install pil
sudo pip install glue
"""
try:
import glue
except Exception, e:
sys.exit('Could not use glue: %s\nMaybe install: sudo easy_install glue' % e)
IMG_PATH = 'static/img/sprites'
CSS_PATH = 'static/css/sprites'
KEY = '_PREV_CHECKSUM'
def checksum(path):
command = 'md5 `find %s -type f`' % pipes.quote(IMG_PATH)
return subprocess.check_output(command, shell=True)
def preBuild(site):
currChecksum = checksum(IMG_PATH)
prevChecksum = getattr(site, KEY, None)
# Don't run if none of the images has changed
if currChecksum == prevChecksum:
return
if os.path.isdir(CSS_PATH):
shutil.rmtree(CSS_PATH)
os.mkdir(CSS_PATH)
os.system('glue --cachebuster --crop --optipng "%s" "%s" --project' % (IMG_PATH, CSS_PATH))
setattr(site, KEY, currChecksum)Remove deprecated --optipng from glue options
Optipng is now deprecated in glue, so including that option in
Cactus doesn't really make sense.
https://github.com/jorgebastida/glue/blob/master/docs/changelog.rst#09import os
import sys
import pipes
import shutil
import subprocess
"""
This plugin uses glue to sprite images:
http://glue.readthedocs.org/en/latest/quickstart.html
Install:
(Only if you want to sprite jpg too)
brew install libjpeg
sudo easy_install pip
sudo pip uninstall pil
sudo pip install pil
sudo pip install glue
"""
try:
import glue
except Exception, e:
sys.exit('Could not use glue: %s\nMaybe install: sudo easy_install glue' % e)
IMG_PATH = 'static/img/sprites'
CSS_PATH = 'static/css/sprites'
KEY = '_PREV_CHECKSUM'
def checksum(path):
command = 'md5 `find %s -type f`' % pipes.quote(IMG_PATH)
return subprocess.check_output(command, shell=True)
def preBuild(site):
currChecksum = checksum(IMG_PATH)
prevChecksum = getattr(site, KEY, None)
# Don't run if none of the images has changed
if currChecksum == prevChecksum:
return
if os.path.isdir(CSS_PATH):
shutil.rmtree(CSS_PATH)
os.mkdir(CSS_PATH)
os.system('glue --cachebuster --crop "%s" "%s" --project' % (IMG_PATH, CSS_PATH))
setattr(site, KEY, currChecksum)
|
<commit_before>import os
import sys
import pipes
import shutil
import subprocess
"""
This plugin uses glue to sprite images:
http://glue.readthedocs.org/en/latest/quickstart.html
Install:
(Only if you want to sprite jpg too)
brew install libjpeg
(Only if you want to optimize pngs with optipng)
brew install optipng
sudo easy_install pip
sudo pip uninstall pil
sudo pip install pil
sudo pip install glue
"""
try:
import glue
except Exception, e:
sys.exit('Could not use glue: %s\nMaybe install: sudo easy_install glue' % e)
IMG_PATH = 'static/img/sprites'
CSS_PATH = 'static/css/sprites'
KEY = '_PREV_CHECKSUM'
def checksum(path):
command = 'md5 `find %s -type f`' % pipes.quote(IMG_PATH)
return subprocess.check_output(command, shell=True)
def preBuild(site):
currChecksum = checksum(IMG_PATH)
prevChecksum = getattr(site, KEY, None)
# Don't run if none of the images has changed
if currChecksum == prevChecksum:
return
if os.path.isdir(CSS_PATH):
shutil.rmtree(CSS_PATH)
os.mkdir(CSS_PATH)
os.system('glue --cachebuster --crop --optipng "%s" "%s" --project' % (IMG_PATH, CSS_PATH))
setattr(site, KEY, currChecksum)<commit_msg>Remove deprecated --optipng from glue options
Optipng is now deprecated in glue, so including that option in
Cactus doesn't really make sense.
https://github.com/jorgebastida/glue/blob/master/docs/changelog.rst#09<commit_after>import os
import sys
import pipes
import shutil
import subprocess
"""
This plugin uses glue to sprite images:
http://glue.readthedocs.org/en/latest/quickstart.html
Install:
(Only if you want to sprite jpg too)
brew install libjpeg
sudo easy_install pip
sudo pip uninstall pil
sudo pip install pil
sudo pip install glue
"""
try:
import glue
except Exception, e:
sys.exit('Could not use glue: %s\nMaybe install: sudo easy_install glue' % e)
IMG_PATH = 'static/img/sprites'
CSS_PATH = 'static/css/sprites'
KEY = '_PREV_CHECKSUM'
def checksum(path):
command = 'md5 `find %s -type f`' % pipes.quote(IMG_PATH)
return subprocess.check_output(command, shell=True)
def preBuild(site):
currChecksum = checksum(IMG_PATH)
prevChecksum = getattr(site, KEY, None)
# Don't run if none of the images has changed
if currChecksum == prevChecksum:
return
if os.path.isdir(CSS_PATH):
shutil.rmtree(CSS_PATH)
os.mkdir(CSS_PATH)
os.system('glue --cachebuster --crop "%s" "%s" --project' % (IMG_PATH, CSS_PATH))
setattr(site, KEY, currChecksum)
|
1da0f795cdedd1de3bdcc03d6171f9a143ee8e5b
|
backdrop/admin/config/development.py
|
backdrop/admin/config/development.py
|
LOG_LEVEL = "DEBUG"
SINGLE_SIGN_ON = True
BACKDROP_ADMIN_UI_HOST = "http://backdrop-admin.dev.gov.uk"
ALLOW_TEST_SIGNIN=True
SECRET_KEY = "something unique and secret"
DATABASE_NAME = "backdrop"
MONGO_HOST = 'localhost'
MONGO_PORT = 27017
try:
from development_environment import *
except ImportError:
from development_environment_sample import *
|
LOG_LEVEL = "DEBUG"
BACKDROP_ADMIN_UI_HOST = "http://backdrop-admin.dev.gov.uk"
ALLOW_TEST_SIGNIN=True
SECRET_KEY = "something unique and secret"
DATABASE_NAME = "backdrop"
MONGO_HOST = 'localhost'
MONGO_PORT = 27017
try:
from development_environment import *
except ImportError:
from development_environment_sample import *
|
Remove flag to enable single sign on
|
Remove flag to enable single sign on
|
Python
|
mit
|
alphagov/backdrop,alphagov/backdrop,alphagov/backdrop
|
LOG_LEVEL = "DEBUG"
SINGLE_SIGN_ON = True
BACKDROP_ADMIN_UI_HOST = "http://backdrop-admin.dev.gov.uk"
ALLOW_TEST_SIGNIN=True
SECRET_KEY = "something unique and secret"
DATABASE_NAME = "backdrop"
MONGO_HOST = 'localhost'
MONGO_PORT = 27017
try:
from development_environment import *
except ImportError:
from development_environment_sample import *
Remove flag to enable single sign on
|
LOG_LEVEL = "DEBUG"
BACKDROP_ADMIN_UI_HOST = "http://backdrop-admin.dev.gov.uk"
ALLOW_TEST_SIGNIN=True
SECRET_KEY = "something unique and secret"
DATABASE_NAME = "backdrop"
MONGO_HOST = 'localhost'
MONGO_PORT = 27017
try:
from development_environment import *
except ImportError:
from development_environment_sample import *
|
<commit_before>LOG_LEVEL = "DEBUG"
SINGLE_SIGN_ON = True
BACKDROP_ADMIN_UI_HOST = "http://backdrop-admin.dev.gov.uk"
ALLOW_TEST_SIGNIN=True
SECRET_KEY = "something unique and secret"
DATABASE_NAME = "backdrop"
MONGO_HOST = 'localhost'
MONGO_PORT = 27017
try:
from development_environment import *
except ImportError:
from development_environment_sample import *
<commit_msg>Remove flag to enable single sign on<commit_after>
|
LOG_LEVEL = "DEBUG"
BACKDROP_ADMIN_UI_HOST = "http://backdrop-admin.dev.gov.uk"
ALLOW_TEST_SIGNIN=True
SECRET_KEY = "something unique and secret"
DATABASE_NAME = "backdrop"
MONGO_HOST = 'localhost'
MONGO_PORT = 27017
try:
from development_environment import *
except ImportError:
from development_environment_sample import *
|
LOG_LEVEL = "DEBUG"
SINGLE_SIGN_ON = True
BACKDROP_ADMIN_UI_HOST = "http://backdrop-admin.dev.gov.uk"
ALLOW_TEST_SIGNIN=True
SECRET_KEY = "something unique and secret"
DATABASE_NAME = "backdrop"
MONGO_HOST = 'localhost'
MONGO_PORT = 27017
try:
from development_environment import *
except ImportError:
from development_environment_sample import *
Remove flag to enable single sign onLOG_LEVEL = "DEBUG"
BACKDROP_ADMIN_UI_HOST = "http://backdrop-admin.dev.gov.uk"
ALLOW_TEST_SIGNIN=True
SECRET_KEY = "something unique and secret"
DATABASE_NAME = "backdrop"
MONGO_HOST = 'localhost'
MONGO_PORT = 27017
try:
from development_environment import *
except ImportError:
from development_environment_sample import *
|
<commit_before>LOG_LEVEL = "DEBUG"
SINGLE_SIGN_ON = True
BACKDROP_ADMIN_UI_HOST = "http://backdrop-admin.dev.gov.uk"
ALLOW_TEST_SIGNIN=True
SECRET_KEY = "something unique and secret"
DATABASE_NAME = "backdrop"
MONGO_HOST = 'localhost'
MONGO_PORT = 27017
try:
from development_environment import *
except ImportError:
from development_environment_sample import *
<commit_msg>Remove flag to enable single sign on<commit_after>LOG_LEVEL = "DEBUG"
BACKDROP_ADMIN_UI_HOST = "http://backdrop-admin.dev.gov.uk"
ALLOW_TEST_SIGNIN=True
SECRET_KEY = "something unique and secret"
DATABASE_NAME = "backdrop"
MONGO_HOST = 'localhost'
MONGO_PORT = 27017
try:
from development_environment import *
except ImportError:
from development_environment_sample import *
|
52bbd54503cbf3fe3e4db2e8033967351bb638b0
|
pi/web/temperature_client.py
|
pi/web/temperature_client.py
|
import requests
"""Client to retrieve the current temperature reading"""
class TemperatureClient:
def __init__(self, host, port):
self.host = host
self.port = port
def get(self):
"""Retrieve the reading from the temperature server"""
location = "http://{0}:{1}/".format(self.host, self.port)
response = requests.get(location)
return r.json()
|
import requests
"""Client to retrieve the current temperature reading"""
class TemperatureClient:
def __init__(self, host, port):
self.host = host
self.port = port
def get(self):
"""Retrieve the reading from the temperature server"""
location = "http://{0}:{1}/".format(self.host, self.port)
response = requests.get(location)
return response.json()
|
Fix incorrect variable from TemperatureClient
|
Fix incorrect variable from TemperatureClient
|
Python
|
mit
|
drewtempelmeyer/harbor-roasts
|
import requests
"""Client to retrieve the current temperature reading"""
class TemperatureClient:
def __init__(self, host, port):
self.host = host
self.port = port
def get(self):
"""Retrieve the reading from the temperature server"""
location = "http://{0}:{1}/".format(self.host, self.port)
response = requests.get(location)
return r.json()
Fix incorrect variable from TemperatureClient
|
import requests
"""Client to retrieve the current temperature reading"""
class TemperatureClient:
def __init__(self, host, port):
self.host = host
self.port = port
def get(self):
"""Retrieve the reading from the temperature server"""
location = "http://{0}:{1}/".format(self.host, self.port)
response = requests.get(location)
return response.json()
|
<commit_before>import requests
"""Client to retrieve the current temperature reading"""
class TemperatureClient:
def __init__(self, host, port):
self.host = host
self.port = port
def get(self):
"""Retrieve the reading from the temperature server"""
location = "http://{0}:{1}/".format(self.host, self.port)
response = requests.get(location)
return r.json()
<commit_msg>Fix incorrect variable from TemperatureClient<commit_after>
|
import requests
"""Client to retrieve the current temperature reading"""
class TemperatureClient:
def __init__(self, host, port):
self.host = host
self.port = port
def get(self):
"""Retrieve the reading from the temperature server"""
location = "http://{0}:{1}/".format(self.host, self.port)
response = requests.get(location)
return response.json()
|
import requests
"""Client to retrieve the current temperature reading"""
class TemperatureClient:
def __init__(self, host, port):
self.host = host
self.port = port
def get(self):
"""Retrieve the reading from the temperature server"""
location = "http://{0}:{1}/".format(self.host, self.port)
response = requests.get(location)
return r.json()
Fix incorrect variable from TemperatureClientimport requests
"""Client to retrieve the current temperature reading"""
class TemperatureClient:
def __init__(self, host, port):
self.host = host
self.port = port
def get(self):
"""Retrieve the reading from the temperature server"""
location = "http://{0}:{1}/".format(self.host, self.port)
response = requests.get(location)
return response.json()
|
<commit_before>import requests
"""Client to retrieve the current temperature reading"""
class TemperatureClient:
def __init__(self, host, port):
self.host = host
self.port = port
def get(self):
"""Retrieve the reading from the temperature server"""
location = "http://{0}:{1}/".format(self.host, self.port)
response = requests.get(location)
return r.json()
<commit_msg>Fix incorrect variable from TemperatureClient<commit_after>import requests
"""Client to retrieve the current temperature reading"""
class TemperatureClient:
def __init__(self, host, port):
self.host = host
self.port = port
def get(self):
"""Retrieve the reading from the temperature server"""
location = "http://{0}:{1}/".format(self.host, self.port)
response = requests.get(location)
return response.json()
|
e2f2fbc0df695102c4d51bdf0e633798c3ae8417
|
yawf/messages/submessage.py
|
yawf/messages/submessage.py
|
from . import Message
class Submessage(object):
need_lock_object = True
def __init__(self, obj, message_id, sender, raw_params, need_lock_object=True):
self.obj = obj
self.sender = sender
self.message_id = message_id
self.raw_params = raw_params
self.need_lock_object = need_lock_object
super(Submessage, self).__init__()
def as_message(self, parent):
return Message(self.sender, self.message_id, self.raw_params,
parent_message_id=parent.unique_id,
message_group=parent.message_group,
)
def dispatch(self, parent_obj, parent_message):
from yawf.dispatch import dispatch_message
message = self.as_message(parent_message)
return dispatch_message(
self.obj,
message=message,
defer_side_effect=True,
need_lock_object=self.need_lock_object)
class RecursiveSubmessage(Submessage):
def __init__(self, message_id, sender, raw_params):
super(RecursiveSubmessage, self).__init__(
obj=None,
sender=sender, message_id=message_id, raw_params=raw_params)
def dispatch(self, parent_obj, parent_message):
from yawf.dispatch import dispatch_message
message = self.as_message(parent_message)
return dispatch_message(
parent_obj,
message=message,
defer_side_effect=True,
need_lock_object=False)
|
from . import Message
class Submessage(object):
need_lock_object = True
def __init__(self, obj, message_id, sender, raw_params=None, need_lock_object=True):
self.obj = obj
self.sender = sender
self.message_id = message_id
self.raw_params = raw_params
self.need_lock_object = need_lock_object
super(Submessage, self).__init__()
def as_message(self, parent):
return Message(self.sender, self.message_id, self.raw_params,
parent_message_id=parent.unique_id,
message_group=parent.message_group,
)
def dispatch(self, parent_obj, parent_message):
from yawf.dispatch import dispatch_message
message = self.as_message(parent_message)
return dispatch_message(
self.obj,
message=message,
defer_side_effect=True,
need_lock_object=self.need_lock_object)
class RecursiveSubmessage(Submessage):
def __init__(self, message_id, sender, raw_params=None):
super(RecursiveSubmessage, self).__init__(
obj=None,
sender=sender, message_id=message_id, raw_params=raw_params)
def dispatch(self, parent_obj, parent_message):
from yawf.dispatch import dispatch_message
message = self.as_message(parent_message)
return dispatch_message(
parent_obj,
message=message,
defer_side_effect=True,
need_lock_object=False)
|
Make raw_params an optional argument in Submessage
|
Make raw_params an optional argument in Submessage
|
Python
|
mit
|
freevoid/yawf
|
from . import Message
class Submessage(object):
need_lock_object = True
def __init__(self, obj, message_id, sender, raw_params, need_lock_object=True):
self.obj = obj
self.sender = sender
self.message_id = message_id
self.raw_params = raw_params
self.need_lock_object = need_lock_object
super(Submessage, self).__init__()
def as_message(self, parent):
return Message(self.sender, self.message_id, self.raw_params,
parent_message_id=parent.unique_id,
message_group=parent.message_group,
)
def dispatch(self, parent_obj, parent_message):
from yawf.dispatch import dispatch_message
message = self.as_message(parent_message)
return dispatch_message(
self.obj,
message=message,
defer_side_effect=True,
need_lock_object=self.need_lock_object)
class RecursiveSubmessage(Submessage):
def __init__(self, message_id, sender, raw_params):
super(RecursiveSubmessage, self).__init__(
obj=None,
sender=sender, message_id=message_id, raw_params=raw_params)
def dispatch(self, parent_obj, parent_message):
from yawf.dispatch import dispatch_message
message = self.as_message(parent_message)
return dispatch_message(
parent_obj,
message=message,
defer_side_effect=True,
need_lock_object=False)
Make raw_params an optional argument in Submessage
|
from . import Message
class Submessage(object):
need_lock_object = True
def __init__(self, obj, message_id, sender, raw_params=None, need_lock_object=True):
self.obj = obj
self.sender = sender
self.message_id = message_id
self.raw_params = raw_params
self.need_lock_object = need_lock_object
super(Submessage, self).__init__()
def as_message(self, parent):
return Message(self.sender, self.message_id, self.raw_params,
parent_message_id=parent.unique_id,
message_group=parent.message_group,
)
def dispatch(self, parent_obj, parent_message):
from yawf.dispatch import dispatch_message
message = self.as_message(parent_message)
return dispatch_message(
self.obj,
message=message,
defer_side_effect=True,
need_lock_object=self.need_lock_object)
class RecursiveSubmessage(Submessage):
def __init__(self, message_id, sender, raw_params=None):
super(RecursiveSubmessage, self).__init__(
obj=None,
sender=sender, message_id=message_id, raw_params=raw_params)
def dispatch(self, parent_obj, parent_message):
from yawf.dispatch import dispatch_message
message = self.as_message(parent_message)
return dispatch_message(
parent_obj,
message=message,
defer_side_effect=True,
need_lock_object=False)
|
<commit_before>from . import Message
class Submessage(object):
need_lock_object = True
def __init__(self, obj, message_id, sender, raw_params, need_lock_object=True):
self.obj = obj
self.sender = sender
self.message_id = message_id
self.raw_params = raw_params
self.need_lock_object = need_lock_object
super(Submessage, self).__init__()
def as_message(self, parent):
return Message(self.sender, self.message_id, self.raw_params,
parent_message_id=parent.unique_id,
message_group=parent.message_group,
)
def dispatch(self, parent_obj, parent_message):
from yawf.dispatch import dispatch_message
message = self.as_message(parent_message)
return dispatch_message(
self.obj,
message=message,
defer_side_effect=True,
need_lock_object=self.need_lock_object)
class RecursiveSubmessage(Submessage):
def __init__(self, message_id, sender, raw_params):
super(RecursiveSubmessage, self).__init__(
obj=None,
sender=sender, message_id=message_id, raw_params=raw_params)
def dispatch(self, parent_obj, parent_message):
from yawf.dispatch import dispatch_message
message = self.as_message(parent_message)
return dispatch_message(
parent_obj,
message=message,
defer_side_effect=True,
need_lock_object=False)
<commit_msg>Make raw_params an optional argument in Submessage<commit_after>
|
from . import Message
class Submessage(object):
need_lock_object = True
def __init__(self, obj, message_id, sender, raw_params=None, need_lock_object=True):
self.obj = obj
self.sender = sender
self.message_id = message_id
self.raw_params = raw_params
self.need_lock_object = need_lock_object
super(Submessage, self).__init__()
def as_message(self, parent):
return Message(self.sender, self.message_id, self.raw_params,
parent_message_id=parent.unique_id,
message_group=parent.message_group,
)
def dispatch(self, parent_obj, parent_message):
from yawf.dispatch import dispatch_message
message = self.as_message(parent_message)
return dispatch_message(
self.obj,
message=message,
defer_side_effect=True,
need_lock_object=self.need_lock_object)
class RecursiveSubmessage(Submessage):
def __init__(self, message_id, sender, raw_params=None):
super(RecursiveSubmessage, self).__init__(
obj=None,
sender=sender, message_id=message_id, raw_params=raw_params)
def dispatch(self, parent_obj, parent_message):
from yawf.dispatch import dispatch_message
message = self.as_message(parent_message)
return dispatch_message(
parent_obj,
message=message,
defer_side_effect=True,
need_lock_object=False)
|
from . import Message
class Submessage(object):
need_lock_object = True
def __init__(self, obj, message_id, sender, raw_params, need_lock_object=True):
self.obj = obj
self.sender = sender
self.message_id = message_id
self.raw_params = raw_params
self.need_lock_object = need_lock_object
super(Submessage, self).__init__()
def as_message(self, parent):
return Message(self.sender, self.message_id, self.raw_params,
parent_message_id=parent.unique_id,
message_group=parent.message_group,
)
def dispatch(self, parent_obj, parent_message):
from yawf.dispatch import dispatch_message
message = self.as_message(parent_message)
return dispatch_message(
self.obj,
message=message,
defer_side_effect=True,
need_lock_object=self.need_lock_object)
class RecursiveSubmessage(Submessage):
def __init__(self, message_id, sender, raw_params):
super(RecursiveSubmessage, self).__init__(
obj=None,
sender=sender, message_id=message_id, raw_params=raw_params)
def dispatch(self, parent_obj, parent_message):
from yawf.dispatch import dispatch_message
message = self.as_message(parent_message)
return dispatch_message(
parent_obj,
message=message,
defer_side_effect=True,
need_lock_object=False)
Make raw_params an optional argument in Submessagefrom . import Message
class Submessage(object):
need_lock_object = True
def __init__(self, obj, message_id, sender, raw_params=None, need_lock_object=True):
self.obj = obj
self.sender = sender
self.message_id = message_id
self.raw_params = raw_params
self.need_lock_object = need_lock_object
super(Submessage, self).__init__()
def as_message(self, parent):
return Message(self.sender, self.message_id, self.raw_params,
parent_message_id=parent.unique_id,
message_group=parent.message_group,
)
def dispatch(self, parent_obj, parent_message):
from yawf.dispatch import dispatch_message
message = self.as_message(parent_message)
return dispatch_message(
self.obj,
message=message,
defer_side_effect=True,
need_lock_object=self.need_lock_object)
class RecursiveSubmessage(Submessage):
def __init__(self, message_id, sender, raw_params=None):
super(RecursiveSubmessage, self).__init__(
obj=None,
sender=sender, message_id=message_id, raw_params=raw_params)
def dispatch(self, parent_obj, parent_message):
from yawf.dispatch import dispatch_message
message = self.as_message(parent_message)
return dispatch_message(
parent_obj,
message=message,
defer_side_effect=True,
need_lock_object=False)
|
<commit_before>from . import Message
class Submessage(object):
need_lock_object = True
def __init__(self, obj, message_id, sender, raw_params, need_lock_object=True):
self.obj = obj
self.sender = sender
self.message_id = message_id
self.raw_params = raw_params
self.need_lock_object = need_lock_object
super(Submessage, self).__init__()
def as_message(self, parent):
return Message(self.sender, self.message_id, self.raw_params,
parent_message_id=parent.unique_id,
message_group=parent.message_group,
)
def dispatch(self, parent_obj, parent_message):
from yawf.dispatch import dispatch_message
message = self.as_message(parent_message)
return dispatch_message(
self.obj,
message=message,
defer_side_effect=True,
need_lock_object=self.need_lock_object)
class RecursiveSubmessage(Submessage):
def __init__(self, message_id, sender, raw_params):
super(RecursiveSubmessage, self).__init__(
obj=None,
sender=sender, message_id=message_id, raw_params=raw_params)
def dispatch(self, parent_obj, parent_message):
from yawf.dispatch import dispatch_message
message = self.as_message(parent_message)
return dispatch_message(
parent_obj,
message=message,
defer_side_effect=True,
need_lock_object=False)
<commit_msg>Make raw_params an optional argument in Submessage<commit_after>from . import Message
class Submessage(object):
need_lock_object = True
def __init__(self, obj, message_id, sender, raw_params=None, need_lock_object=True):
self.obj = obj
self.sender = sender
self.message_id = message_id
self.raw_params = raw_params
self.need_lock_object = need_lock_object
super(Submessage, self).__init__()
def as_message(self, parent):
return Message(self.sender, self.message_id, self.raw_params,
parent_message_id=parent.unique_id,
message_group=parent.message_group,
)
def dispatch(self, parent_obj, parent_message):
from yawf.dispatch import dispatch_message
message = self.as_message(parent_message)
return dispatch_message(
self.obj,
message=message,
defer_side_effect=True,
need_lock_object=self.need_lock_object)
class RecursiveSubmessage(Submessage):
def __init__(self, message_id, sender, raw_params=None):
super(RecursiveSubmessage, self).__init__(
obj=None,
sender=sender, message_id=message_id, raw_params=raw_params)
def dispatch(self, parent_obj, parent_message):
from yawf.dispatch import dispatch_message
message = self.as_message(parent_message)
return dispatch_message(
parent_obj,
message=message,
defer_side_effect=True,
need_lock_object=False)
|
7406e2fdcc19566e4a577c8dd4a3484f401580c7
|
pinry/settings/docker.py
|
pinry/settings/docker.py
|
import logging
from .base import *
# SECURITY WARNING: keep the secret key used in production secret!
if 'SECRET_KEY' not in os.environ:
logging.warning(
"No SECRET_KEY given in environ, please have a check"
)
SECRET_KEY = os.environ.get('SECRET_KEY', "PLEASE_REPLACE_ME")
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
# SECURITY WARNING: use your actual domain name in production!
ALLOWED_HOSTS = ['*']
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'postgres',
'USER': 'postgres',
'HOST': 'db',
'PORT': 5432,
}
}
USE_X_FORWARDED_HOST = True
try:
from .local_settings import *
except ImportError:
pass
|
import logging
from .base import *
# SECURITY WARNING: keep the secret key used in production secret!
if 'SECRET_KEY' not in os.environ:
logging.warning(
"No SECRET_KEY given in environ, please have a check"
)
SECRET_KEY = os.environ.get('SECRET_KEY', "PLEASE_REPLACE_ME")
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
# SECURITY WARNING: use your actual domain name in production!
ALLOWED_HOSTS = ['*']
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'postgres',
'USER': 'postgres',
'HOST': 'db',
'PORT': 5432,
}
}
USE_X_FORWARDED_HOST = True
REST_FRAMEWORK['DEFAULT_RENDERER_CLASSES'] = [
'rest_framework.renderers.JSONRenderer',
]
try:
from .local_settings import *
except ImportError:
pass
|
Allow JsonRender only in production mode
|
Feature: Allow JsonRender only in production mode
|
Python
|
bsd-2-clause
|
lapo-luchini/pinry,pinry/pinry,pinry/pinry,pinry/pinry,pinry/pinry,lapo-luchini/pinry,lapo-luchini/pinry,lapo-luchini/pinry
|
import logging
from .base import *
# SECURITY WARNING: keep the secret key used in production secret!
if 'SECRET_KEY' not in os.environ:
logging.warning(
"No SECRET_KEY given in environ, please have a check"
)
SECRET_KEY = os.environ.get('SECRET_KEY', "PLEASE_REPLACE_ME")
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
# SECURITY WARNING: use your actual domain name in production!
ALLOWED_HOSTS = ['*']
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'postgres',
'USER': 'postgres',
'HOST': 'db',
'PORT': 5432,
}
}
USE_X_FORWARDED_HOST = True
try:
from .local_settings import *
except ImportError:
pass
Feature: Allow JsonRender only in production mode
|
import logging
from .base import *
# SECURITY WARNING: keep the secret key used in production secret!
if 'SECRET_KEY' not in os.environ:
logging.warning(
"No SECRET_KEY given in environ, please have a check"
)
SECRET_KEY = os.environ.get('SECRET_KEY', "PLEASE_REPLACE_ME")
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
# SECURITY WARNING: use your actual domain name in production!
ALLOWED_HOSTS = ['*']
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'postgres',
'USER': 'postgres',
'HOST': 'db',
'PORT': 5432,
}
}
USE_X_FORWARDED_HOST = True
REST_FRAMEWORK['DEFAULT_RENDERER_CLASSES'] = [
'rest_framework.renderers.JSONRenderer',
]
try:
from .local_settings import *
except ImportError:
pass
|
<commit_before>import logging
from .base import *
# SECURITY WARNING: keep the secret key used in production secret!
if 'SECRET_KEY' not in os.environ:
logging.warning(
"No SECRET_KEY given in environ, please have a check"
)
SECRET_KEY = os.environ.get('SECRET_KEY', "PLEASE_REPLACE_ME")
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
# SECURITY WARNING: use your actual domain name in production!
ALLOWED_HOSTS = ['*']
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'postgres',
'USER': 'postgres',
'HOST': 'db',
'PORT': 5432,
}
}
USE_X_FORWARDED_HOST = True
try:
from .local_settings import *
except ImportError:
pass
<commit_msg>Feature: Allow JsonRender only in production mode<commit_after>
|
import logging
from .base import *
# SECURITY WARNING: keep the secret key used in production secret!
if 'SECRET_KEY' not in os.environ:
logging.warning(
"No SECRET_KEY given in environ, please have a check"
)
SECRET_KEY = os.environ.get('SECRET_KEY', "PLEASE_REPLACE_ME")
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
# SECURITY WARNING: use your actual domain name in production!
ALLOWED_HOSTS = ['*']
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'postgres',
'USER': 'postgres',
'HOST': 'db',
'PORT': 5432,
}
}
USE_X_FORWARDED_HOST = True
REST_FRAMEWORK['DEFAULT_RENDERER_CLASSES'] = [
'rest_framework.renderers.JSONRenderer',
]
try:
from .local_settings import *
except ImportError:
pass
|
import logging
from .base import *
# SECURITY WARNING: keep the secret key used in production secret!
if 'SECRET_KEY' not in os.environ:
logging.warning(
"No SECRET_KEY given in environ, please have a check"
)
SECRET_KEY = os.environ.get('SECRET_KEY', "PLEASE_REPLACE_ME")
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
# SECURITY WARNING: use your actual domain name in production!
ALLOWED_HOSTS = ['*']
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'postgres',
'USER': 'postgres',
'HOST': 'db',
'PORT': 5432,
}
}
USE_X_FORWARDED_HOST = True
try:
from .local_settings import *
except ImportError:
pass
Feature: Allow JsonRender only in production modeimport logging
from .base import *
# SECURITY WARNING: keep the secret key used in production secret!
if 'SECRET_KEY' not in os.environ:
logging.warning(
"No SECRET_KEY given in environ, please have a check"
)
SECRET_KEY = os.environ.get('SECRET_KEY', "PLEASE_REPLACE_ME")
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
# SECURITY WARNING: use your actual domain name in production!
ALLOWED_HOSTS = ['*']
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'postgres',
'USER': 'postgres',
'HOST': 'db',
'PORT': 5432,
}
}
USE_X_FORWARDED_HOST = True
REST_FRAMEWORK['DEFAULT_RENDERER_CLASSES'] = [
'rest_framework.renderers.JSONRenderer',
]
try:
from .local_settings import *
except ImportError:
pass
|
<commit_before>import logging
from .base import *
# SECURITY WARNING: keep the secret key used in production secret!
if 'SECRET_KEY' not in os.environ:
logging.warning(
"No SECRET_KEY given in environ, please have a check"
)
SECRET_KEY = os.environ.get('SECRET_KEY', "PLEASE_REPLACE_ME")
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
# SECURITY WARNING: use your actual domain name in production!
ALLOWED_HOSTS = ['*']
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'postgres',
'USER': 'postgres',
'HOST': 'db',
'PORT': 5432,
}
}
USE_X_FORWARDED_HOST = True
try:
from .local_settings import *
except ImportError:
pass
<commit_msg>Feature: Allow JsonRender only in production mode<commit_after>import logging
from .base import *
# SECURITY WARNING: keep the secret key used in production secret!
if 'SECRET_KEY' not in os.environ:
logging.warning(
"No SECRET_KEY given in environ, please have a check"
)
SECRET_KEY = os.environ.get('SECRET_KEY', "PLEASE_REPLACE_ME")
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
# SECURITY WARNING: use your actual domain name in production!
ALLOWED_HOSTS = ['*']
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'postgres',
'USER': 'postgres',
'HOST': 'db',
'PORT': 5432,
}
}
USE_X_FORWARDED_HOST = True
REST_FRAMEWORK['DEFAULT_RENDERER_CLASSES'] = [
'rest_framework.renderers.JSONRenderer',
]
try:
from .local_settings import *
except ImportError:
pass
|
d0139d460b1f4710e8f870700ecf51336538d430
|
examples/basic_flask.py
|
examples/basic_flask.py
|
import flask
import mmstats
application = app = flask.Flask(__name__)
app.config['DEBUG'] = True
class Stats(mmstats.MmStats):
ok = mmstats.CounterField(label="mmstats.example.ok")
bad = mmstats.CounterField(label="mmstats.example.bad")
working = mmstats.BoolField(label="mmstats.example.working")
stats = Stats()
def set_working(sender):
stats.working = True
flask.request_started.connect(set_working, app)
def unset_working(sender, response):
stats.working = False
flask.request_finished.connect(unset_working, app)
def inc_response(sender, response):
if response.status_code == 200:
stats.ok.inc()
elif response.status_code == 500:
stats.bad.inc()
flask.request_finished.connect(inc_response, app)
@app.route('/200')
def ok():
return 'OK'
@app.route('/500')
def bad():
return 'oh noes!', 500
@app.route('/status')
def status():
return """\
<html>
<body>
<pre>
ok: %s
bad: %s
working: %s
</pre>
</body>
</html>""" % (stats.ok, stats.bad, stats.working)
if __name__ == '__main__':
app.run(host='0.0.0.0', port=5001)
|
import flask
import mmstats
application = app = flask.Flask(__name__)
app.config['DEBUG'] = True
class Stats(mmstats.MmStats):
ok = mmstats.CounterField(label="mmstats.example.ok")
bad = mmstats.CounterField(label="mmstats.example.bad")
working = mmstats.BoolField(label="mmstats.example.working")
stats = Stats()
def set_working(sender):
stats.working = True
flask.request_started.connect(set_working, app)
def unset_working(sender, response):
stats.working = False
flask.request_finished.connect(unset_working, app)
def inc_response(sender, response):
if response.status_code == 200:
stats.ok.inc()
elif response.status_code == 500:
stats.bad.inc()
flask.request_finished.connect(inc_response, app)
@app.route('/')
def ok():
return "OK or see /status for a single process's status"
@app.route('/500')
def bad():
return 'oh noes!', 500
@app.route('/status')
def status():
return """\
<html>
<body>
<pre>
ok: %s
bad: %s
working: %s
</pre>
</body>
</html>""" % (stats.ok, stats.bad, stats.working)
if __name__ == '__main__':
app.run(host='0.0.0.0', port=5001)
|
Make the default OK page in the flask example obvious to find
|
Make the default OK page in the flask example obvious to find
|
Python
|
bsd-3-clause
|
schmichael/mmstats,schmichael/mmstats,schmichael/mmstats,schmichael/mmstats
|
import flask
import mmstats
application = app = flask.Flask(__name__)
app.config['DEBUG'] = True
class Stats(mmstats.MmStats):
ok = mmstats.CounterField(label="mmstats.example.ok")
bad = mmstats.CounterField(label="mmstats.example.bad")
working = mmstats.BoolField(label="mmstats.example.working")
stats = Stats()
def set_working(sender):
stats.working = True
flask.request_started.connect(set_working, app)
def unset_working(sender, response):
stats.working = False
flask.request_finished.connect(unset_working, app)
def inc_response(sender, response):
if response.status_code == 200:
stats.ok.inc()
elif response.status_code == 500:
stats.bad.inc()
flask.request_finished.connect(inc_response, app)
@app.route('/200')
def ok():
return 'OK'
@app.route('/500')
def bad():
return 'oh noes!', 500
@app.route('/status')
def status():
return """\
<html>
<body>
<pre>
ok: %s
bad: %s
working: %s
</pre>
</body>
</html>""" % (stats.ok, stats.bad, stats.working)
if __name__ == '__main__':
app.run(host='0.0.0.0', port=5001)
Make the default OK page in the flask example obvious to find
|
import flask
import mmstats
application = app = flask.Flask(__name__)
app.config['DEBUG'] = True
class Stats(mmstats.MmStats):
ok = mmstats.CounterField(label="mmstats.example.ok")
bad = mmstats.CounterField(label="mmstats.example.bad")
working = mmstats.BoolField(label="mmstats.example.working")
stats = Stats()
def set_working(sender):
stats.working = True
flask.request_started.connect(set_working, app)
def unset_working(sender, response):
stats.working = False
flask.request_finished.connect(unset_working, app)
def inc_response(sender, response):
if response.status_code == 200:
stats.ok.inc()
elif response.status_code == 500:
stats.bad.inc()
flask.request_finished.connect(inc_response, app)
@app.route('/')
def ok():
return "OK or see /status for a single process's status"
@app.route('/500')
def bad():
return 'oh noes!', 500
@app.route('/status')
def status():
return """\
<html>
<body>
<pre>
ok: %s
bad: %s
working: %s
</pre>
</body>
</html>""" % (stats.ok, stats.bad, stats.working)
if __name__ == '__main__':
app.run(host='0.0.0.0', port=5001)
|
<commit_before>import flask
import mmstats
application = app = flask.Flask(__name__)
app.config['DEBUG'] = True
class Stats(mmstats.MmStats):
ok = mmstats.CounterField(label="mmstats.example.ok")
bad = mmstats.CounterField(label="mmstats.example.bad")
working = mmstats.BoolField(label="mmstats.example.working")
stats = Stats()
def set_working(sender):
stats.working = True
flask.request_started.connect(set_working, app)
def unset_working(sender, response):
stats.working = False
flask.request_finished.connect(unset_working, app)
def inc_response(sender, response):
if response.status_code == 200:
stats.ok.inc()
elif response.status_code == 500:
stats.bad.inc()
flask.request_finished.connect(inc_response, app)
@app.route('/200')
def ok():
return 'OK'
@app.route('/500')
def bad():
return 'oh noes!', 500
@app.route('/status')
def status():
return """\
<html>
<body>
<pre>
ok: %s
bad: %s
working: %s
</pre>
</body>
</html>""" % (stats.ok, stats.bad, stats.working)
if __name__ == '__main__':
app.run(host='0.0.0.0', port=5001)
<commit_msg>Make the default OK page in the flask example obvious to find<commit_after>
|
import flask
import mmstats
application = app = flask.Flask(__name__)
app.config['DEBUG'] = True
class Stats(mmstats.MmStats):
ok = mmstats.CounterField(label="mmstats.example.ok")
bad = mmstats.CounterField(label="mmstats.example.bad")
working = mmstats.BoolField(label="mmstats.example.working")
stats = Stats()
def set_working(sender):
stats.working = True
flask.request_started.connect(set_working, app)
def unset_working(sender, response):
stats.working = False
flask.request_finished.connect(unset_working, app)
def inc_response(sender, response):
if response.status_code == 200:
stats.ok.inc()
elif response.status_code == 500:
stats.bad.inc()
flask.request_finished.connect(inc_response, app)
@app.route('/')
def ok():
return "OK or see /status for a single process's status"
@app.route('/500')
def bad():
return 'oh noes!', 500
@app.route('/status')
def status():
return """\
<html>
<body>
<pre>
ok: %s
bad: %s
working: %s
</pre>
</body>
</html>""" % (stats.ok, stats.bad, stats.working)
if __name__ == '__main__':
app.run(host='0.0.0.0', port=5001)
|
import flask
import mmstats
application = app = flask.Flask(__name__)
app.config['DEBUG'] = True
class Stats(mmstats.MmStats):
ok = mmstats.CounterField(label="mmstats.example.ok")
bad = mmstats.CounterField(label="mmstats.example.bad")
working = mmstats.BoolField(label="mmstats.example.working")
stats = Stats()
def set_working(sender):
stats.working = True
flask.request_started.connect(set_working, app)
def unset_working(sender, response):
stats.working = False
flask.request_finished.connect(unset_working, app)
def inc_response(sender, response):
if response.status_code == 200:
stats.ok.inc()
elif response.status_code == 500:
stats.bad.inc()
flask.request_finished.connect(inc_response, app)
@app.route('/200')
def ok():
return 'OK'
@app.route('/500')
def bad():
return 'oh noes!', 500
@app.route('/status')
def status():
return """\
<html>
<body>
<pre>
ok: %s
bad: %s
working: %s
</pre>
</body>
</html>""" % (stats.ok, stats.bad, stats.working)
if __name__ == '__main__':
app.run(host='0.0.0.0', port=5001)
Make the default OK page in the flask example obvious to findimport flask
import mmstats
application = app = flask.Flask(__name__)
app.config['DEBUG'] = True
class Stats(mmstats.MmStats):
ok = mmstats.CounterField(label="mmstats.example.ok")
bad = mmstats.CounterField(label="mmstats.example.bad")
working = mmstats.BoolField(label="mmstats.example.working")
stats = Stats()
def set_working(sender):
stats.working = True
flask.request_started.connect(set_working, app)
def unset_working(sender, response):
stats.working = False
flask.request_finished.connect(unset_working, app)
def inc_response(sender, response):
if response.status_code == 200:
stats.ok.inc()
elif response.status_code == 500:
stats.bad.inc()
flask.request_finished.connect(inc_response, app)
@app.route('/')
def ok():
return "OK or see /status for a single process's status"
@app.route('/500')
def bad():
return 'oh noes!', 500
@app.route('/status')
def status():
return """\
<html>
<body>
<pre>
ok: %s
bad: %s
working: %s
</pre>
</body>
</html>""" % (stats.ok, stats.bad, stats.working)
if __name__ == '__main__':
app.run(host='0.0.0.0', port=5001)
|
<commit_before>import flask
import mmstats
application = app = flask.Flask(__name__)
app.config['DEBUG'] = True
class Stats(mmstats.MmStats):
ok = mmstats.CounterField(label="mmstats.example.ok")
bad = mmstats.CounterField(label="mmstats.example.bad")
working = mmstats.BoolField(label="mmstats.example.working")
stats = Stats()
def set_working(sender):
stats.working = True
flask.request_started.connect(set_working, app)
def unset_working(sender, response):
stats.working = False
flask.request_finished.connect(unset_working, app)
def inc_response(sender, response):
if response.status_code == 200:
stats.ok.inc()
elif response.status_code == 500:
stats.bad.inc()
flask.request_finished.connect(inc_response, app)
@app.route('/200')
def ok():
return 'OK'
@app.route('/500')
def bad():
return 'oh noes!', 500
@app.route('/status')
def status():
return """\
<html>
<body>
<pre>
ok: %s
bad: %s
working: %s
</pre>
</body>
</html>""" % (stats.ok, stats.bad, stats.working)
if __name__ == '__main__':
app.run(host='0.0.0.0', port=5001)
<commit_msg>Make the default OK page in the flask example obvious to find<commit_after>import flask
import mmstats
application = app = flask.Flask(__name__)
app.config['DEBUG'] = True
class Stats(mmstats.MmStats):
ok = mmstats.CounterField(label="mmstats.example.ok")
bad = mmstats.CounterField(label="mmstats.example.bad")
working = mmstats.BoolField(label="mmstats.example.working")
stats = Stats()
def set_working(sender):
stats.working = True
flask.request_started.connect(set_working, app)
def unset_working(sender, response):
stats.working = False
flask.request_finished.connect(unset_working, app)
def inc_response(sender, response):
if response.status_code == 200:
stats.ok.inc()
elif response.status_code == 500:
stats.bad.inc()
flask.request_finished.connect(inc_response, app)
@app.route('/')
def ok():
return "OK or see /status for a single process's status"
@app.route('/500')
def bad():
return 'oh noes!', 500
@app.route('/status')
def status():
return """\
<html>
<body>
<pre>
ok: %s
bad: %s
working: %s
</pre>
</body>
</html>""" % (stats.ok, stats.bad, stats.working)
if __name__ == '__main__':
app.run(host='0.0.0.0', port=5001)
|
49ea86d93d75afb1c3a3f95dd72a78b6d78f04cc
|
sitecustomize.py
|
sitecustomize.py
|
import sys
import os
from combinator.branchmgr import theBranchManager
theBranchManager.addPaths()
for key in sys.modules.keys():
# Unload all Combinator modules that had to be loaded in order to call
# addPaths(). Although the very very beginning of this script needs to
# load the trunk combinator (or whichever one your shell points at), once
# the path has been set up, newer versions of combinator may be used; for
# example, the 'whbranch', 'chbranch' and 'mkbranch' commands should all
# import Combinator from the current Divmod branch. This is especially
# required so that Combinator's tests can be run on the currently-active
# Combinator rather than the one responsible for setting up the
# environment.
if key == 'combinator' or key.startswith('combinator'):
del sys.modules[key]
# Install stuff as a user, by default.
if sys.platform != 'darwin':
# For use with setup.py...
if sys.platform.startswith('win'):
execprefix = os.path.abspath(os.path.expanduser("~/Python"))
else:
# Don't exactly know how Darwin fits in here - I think distutils is
# buggy...?
execprefix = os.path.abspath(os.path.expanduser("~/.local"))
import sys
class DistSysProxy:
def __getattr__(self, attr):
if attr in ('prefix', 'exec_prefix'):
return execprefix
else:
return getattr(sys, attr)
sys.modules['distutils.command.sys'] = DistSysProxy()
|
import sys
import os
from combinator.branchmgr import theBranchManager
theBranchManager.addPaths()
for key in sys.modules.keys():
# Unload all Combinator modules that had to be loaded in order to call
# addPaths(). Although the very very beginning of this script needs to
# load the trunk combinator (or whichever one your shell points at), once
# the path has been set up, newer versions of combinator may be used; for
# example, the 'whbranch', 'chbranch' and 'mkbranch' commands should all
# import Combinator from the current Divmod branch. This is especially
# required so that Combinator's tests can be run on the currently-active
# Combinator rather than the one responsible for setting up the
# environment.
if key == 'combinator' or key.startswith('combinator'):
del sys.modules[key]
|
Remove distutils-mangling code from Combinator which breaks setuptools.
|
Remove distutils-mangling code from Combinator which breaks setuptools.
After this change, Combinator will no longer attempt to force 'python setup.py install' to put things into your home directory. Use `setup.py --prefix ~/.local`, or, if your package is trying to use setuptools, `python setup.py --site-dirs ~/.local/lib/python2.5/site-packages --prefix ~/.local install`.
Author: glyph
Reviewer: dried
Fixes #493
|
Python
|
mit
|
habnabit/Combinator,habnabit/Combinator
|
import sys
import os
from combinator.branchmgr import theBranchManager
theBranchManager.addPaths()
for key in sys.modules.keys():
# Unload all Combinator modules that had to be loaded in order to call
# addPaths(). Although the very very beginning of this script needs to
# load the trunk combinator (or whichever one your shell points at), once
# the path has been set up, newer versions of combinator may be used; for
# example, the 'whbranch', 'chbranch' and 'mkbranch' commands should all
# import Combinator from the current Divmod branch. This is especially
# required so that Combinator's tests can be run on the currently-active
# Combinator rather than the one responsible for setting up the
# environment.
if key == 'combinator' or key.startswith('combinator'):
del sys.modules[key]
# Install stuff as a user, by default.
if sys.platform != 'darwin':
# For use with setup.py...
if sys.platform.startswith('win'):
execprefix = os.path.abspath(os.path.expanduser("~/Python"))
else:
# Don't exactly know how Darwin fits in here - I think distutils is
# buggy...?
execprefix = os.path.abspath(os.path.expanduser("~/.local"))
import sys
class DistSysProxy:
def __getattr__(self, attr):
if attr in ('prefix', 'exec_prefix'):
return execprefix
else:
return getattr(sys, attr)
sys.modules['distutils.command.sys'] = DistSysProxy()
Remove distutils-mangling code from Combinator which breaks setuptools.
After this change, Combinator will no longer attempt to force 'python setup.py install' to put things into your home directory. Use `setup.py --prefix ~/.local`, or, if your package is trying to use setuptools, `python setup.py --site-dirs ~/.local/lib/python2.5/site-packages --prefix ~/.local install`.
Author: glyph
Reviewer: dried
Fixes #493
|
import sys
import os
from combinator.branchmgr import theBranchManager
theBranchManager.addPaths()
for key in sys.modules.keys():
# Unload all Combinator modules that had to be loaded in order to call
# addPaths(). Although the very very beginning of this script needs to
# load the trunk combinator (or whichever one your shell points at), once
# the path has been set up, newer versions of combinator may be used; for
# example, the 'whbranch', 'chbranch' and 'mkbranch' commands should all
# import Combinator from the current Divmod branch. This is especially
# required so that Combinator's tests can be run on the currently-active
# Combinator rather than the one responsible for setting up the
# environment.
if key == 'combinator' or key.startswith('combinator'):
del sys.modules[key]
|
<commit_before>
import sys
import os
from combinator.branchmgr import theBranchManager
theBranchManager.addPaths()
for key in sys.modules.keys():
# Unload all Combinator modules that had to be loaded in order to call
# addPaths(). Although the very very beginning of this script needs to
# load the trunk combinator (or whichever one your shell points at), once
# the path has been set up, newer versions of combinator may be used; for
# example, the 'whbranch', 'chbranch' and 'mkbranch' commands should all
# import Combinator from the current Divmod branch. This is especially
# required so that Combinator's tests can be run on the currently-active
# Combinator rather than the one responsible for setting up the
# environment.
if key == 'combinator' or key.startswith('combinator'):
del sys.modules[key]
# Install stuff as a user, by default.
if sys.platform != 'darwin':
# For use with setup.py...
if sys.platform.startswith('win'):
execprefix = os.path.abspath(os.path.expanduser("~/Python"))
else:
# Don't exactly know how Darwin fits in here - I think distutils is
# buggy...?
execprefix = os.path.abspath(os.path.expanduser("~/.local"))
import sys
class DistSysProxy:
def __getattr__(self, attr):
if attr in ('prefix', 'exec_prefix'):
return execprefix
else:
return getattr(sys, attr)
sys.modules['distutils.command.sys'] = DistSysProxy()
<commit_msg>Remove distutils-mangling code from Combinator which breaks setuptools.
After this change, Combinator will no longer attempt to force 'python setup.py install' to put things into your home directory. Use `setup.py --prefix ~/.local`, or, if your package is trying to use setuptools, `python setup.py --site-dirs ~/.local/lib/python2.5/site-packages --prefix ~/.local install`.
Author: glyph
Reviewer: dried
Fixes #493<commit_after>
|
import sys
import os
from combinator.branchmgr import theBranchManager
theBranchManager.addPaths()
for key in sys.modules.keys():
# Unload all Combinator modules that had to be loaded in order to call
# addPaths(). Although the very very beginning of this script needs to
# load the trunk combinator (or whichever one your shell points at), once
# the path has been set up, newer versions of combinator may be used; for
# example, the 'whbranch', 'chbranch' and 'mkbranch' commands should all
# import Combinator from the current Divmod branch. This is especially
# required so that Combinator's tests can be run on the currently-active
# Combinator rather than the one responsible for setting up the
# environment.
if key == 'combinator' or key.startswith('combinator'):
del sys.modules[key]
|
import sys
import os
from combinator.branchmgr import theBranchManager
theBranchManager.addPaths()
for key in sys.modules.keys():
# Unload all Combinator modules that had to be loaded in order to call
# addPaths(). Although the very very beginning of this script needs to
# load the trunk combinator (or whichever one your shell points at), once
# the path has been set up, newer versions of combinator may be used; for
# example, the 'whbranch', 'chbranch' and 'mkbranch' commands should all
# import Combinator from the current Divmod branch. This is especially
# required so that Combinator's tests can be run on the currently-active
# Combinator rather than the one responsible for setting up the
# environment.
if key == 'combinator' or key.startswith('combinator'):
del sys.modules[key]
# Install stuff as a user, by default.
if sys.platform != 'darwin':
# For use with setup.py...
if sys.platform.startswith('win'):
execprefix = os.path.abspath(os.path.expanduser("~/Python"))
else:
# Don't exactly know how Darwin fits in here - I think distutils is
# buggy...?
execprefix = os.path.abspath(os.path.expanduser("~/.local"))
import sys
class DistSysProxy:
def __getattr__(self, attr):
if attr in ('prefix', 'exec_prefix'):
return execprefix
else:
return getattr(sys, attr)
sys.modules['distutils.command.sys'] = DistSysProxy()
Remove distutils-mangling code from Combinator which breaks setuptools.
After this change, Combinator will no longer attempt to force 'python setup.py install' to put things into your home directory. Use `setup.py --prefix ~/.local`, or, if your package is trying to use setuptools, `python setup.py --site-dirs ~/.local/lib/python2.5/site-packages --prefix ~/.local install`.
Author: glyph
Reviewer: dried
Fixes #493
import sys
import os
from combinator.branchmgr import theBranchManager
theBranchManager.addPaths()
for key in sys.modules.keys():
# Unload all Combinator modules that had to be loaded in order to call
# addPaths(). Although the very very beginning of this script needs to
# load the trunk combinator (or whichever one your shell points at), once
# the path has been set up, newer versions of combinator may be used; for
# example, the 'whbranch', 'chbranch' and 'mkbranch' commands should all
# import Combinator from the current Divmod branch. This is especially
# required so that Combinator's tests can be run on the currently-active
# Combinator rather than the one responsible for setting up the
# environment.
if key == 'combinator' or key.startswith('combinator'):
del sys.modules[key]
|
<commit_before>
import sys
import os
from combinator.branchmgr import theBranchManager
theBranchManager.addPaths()
for key in sys.modules.keys():
# Unload all Combinator modules that had to be loaded in order to call
# addPaths(). Although the very very beginning of this script needs to
# load the trunk combinator (or whichever one your shell points at), once
# the path has been set up, newer versions of combinator may be used; for
# example, the 'whbranch', 'chbranch' and 'mkbranch' commands should all
# import Combinator from the current Divmod branch. This is especially
# required so that Combinator's tests can be run on the currently-active
# Combinator rather than the one responsible for setting up the
# environment.
if key == 'combinator' or key.startswith('combinator'):
del sys.modules[key]
# Install stuff as a user, by default.
if sys.platform != 'darwin':
# For use with setup.py...
if sys.platform.startswith('win'):
execprefix = os.path.abspath(os.path.expanduser("~/Python"))
else:
# Don't exactly know how Darwin fits in here - I think distutils is
# buggy...?
execprefix = os.path.abspath(os.path.expanduser("~/.local"))
import sys
class DistSysProxy:
def __getattr__(self, attr):
if attr in ('prefix', 'exec_prefix'):
return execprefix
else:
return getattr(sys, attr)
sys.modules['distutils.command.sys'] = DistSysProxy()
<commit_msg>Remove distutils-mangling code from Combinator which breaks setuptools.
After this change, Combinator will no longer attempt to force 'python setup.py install' to put things into your home directory. Use `setup.py --prefix ~/.local`, or, if your package is trying to use setuptools, `python setup.py --site-dirs ~/.local/lib/python2.5/site-packages --prefix ~/.local install`.
Author: glyph
Reviewer: dried
Fixes #493<commit_after>
import sys
import os
from combinator.branchmgr import theBranchManager
theBranchManager.addPaths()
for key in sys.modules.keys():
# Unload all Combinator modules that had to be loaded in order to call
# addPaths(). Although the very very beginning of this script needs to
# load the trunk combinator (or whichever one your shell points at), once
# the path has been set up, newer versions of combinator may be used; for
# example, the 'whbranch', 'chbranch' and 'mkbranch' commands should all
# import Combinator from the current Divmod branch. This is especially
# required so that Combinator's tests can be run on the currently-active
# Combinator rather than the one responsible for setting up the
# environment.
if key == 'combinator' or key.startswith('combinator'):
del sys.modules[key]
|
7525ddcd1a0c668045f37e87cbafa4a598b10148
|
apps/__init__.py
|
apps/__init__.py
|
## module loader, goes to see which submodules have 'html' directories
## and declares them at the toplevel
import os,importlib
def find_module_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [o for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'__init__.py']))]
return subdirs
def find_html_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [(o,os.path.sep.join([curdir,o,'html'])) for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'html']))]
return dict(subdirs)
MODULES = {}
_html_dirs = find_html_dirs()
[ MODULES.update({m_name:{'module':importlib.import_module('.'.join(['apps',m_name])), 'html':_html_dirs.get(m_name)}}) for m_name in find_module_dirs() ]
|
## module loader, goes to see which submodules have 'html' directories
## and declares them at the toplevel
import os,importlib
def find_module_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [o for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'__init__.py']))]
return subdirs
def find_html_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [(o,os.path.sep.join([curdir,o,'html'])) for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'html']))]
return dict(subdirs)
def import_app(app):
try:
importlib.import_module(app)
except Exception as e:
logging.error("Couldn't load app: {0}, error: {1}".format(app, e))
MODULES = {}
_html_dirs = find_html_dirs()
[ MODULES.update({m_name:{'module': import_app('.'.join(['apps',m_name])), 'html':_html_dirs.get(m_name)}}) for m_name in find_module_dirs() ]
|
Handle application erroring to not break the server
|
Handle application erroring to not break the server
|
Python
|
agpl-3.0
|
indx/indx-core,indx/indx-core,indx/indx-core,indx/indx-core,indx/indx-core
|
## module loader, goes to see which submodules have 'html' directories
## and declares them at the toplevel
import os,importlib
def find_module_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [o for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'__init__.py']))]
return subdirs
def find_html_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [(o,os.path.sep.join([curdir,o,'html'])) for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'html']))]
return dict(subdirs)
MODULES = {}
_html_dirs = find_html_dirs()
[ MODULES.update({m_name:{'module':importlib.import_module('.'.join(['apps',m_name])), 'html':_html_dirs.get(m_name)}}) for m_name in find_module_dirs() ]
Handle application erroring to not break the server
|
## module loader, goes to see which submodules have 'html' directories
## and declares them at the toplevel
import os,importlib
def find_module_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [o for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'__init__.py']))]
return subdirs
def find_html_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [(o,os.path.sep.join([curdir,o,'html'])) for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'html']))]
return dict(subdirs)
def import_app(app):
try:
importlib.import_module(app)
except Exception as e:
logging.error("Couldn't load app: {0}, error: {1}".format(app, e))
MODULES = {}
_html_dirs = find_html_dirs()
[ MODULES.update({m_name:{'module': import_app('.'.join(['apps',m_name])), 'html':_html_dirs.get(m_name)}}) for m_name in find_module_dirs() ]
|
<commit_before>## module loader, goes to see which submodules have 'html' directories
## and declares them at the toplevel
import os,importlib
def find_module_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [o for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'__init__.py']))]
return subdirs
def find_html_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [(o,os.path.sep.join([curdir,o,'html'])) for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'html']))]
return dict(subdirs)
MODULES = {}
_html_dirs = find_html_dirs()
[ MODULES.update({m_name:{'module':importlib.import_module('.'.join(['apps',m_name])), 'html':_html_dirs.get(m_name)}}) for m_name in find_module_dirs() ]
<commit_msg>Handle application erroring to not break the server<commit_after>
|
## module loader, goes to see which submodules have 'html' directories
## and declares them at the toplevel
import os,importlib
def find_module_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [o for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'__init__.py']))]
return subdirs
def find_html_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [(o,os.path.sep.join([curdir,o,'html'])) for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'html']))]
return dict(subdirs)
def import_app(app):
try:
importlib.import_module(app)
except Exception as e:
logging.error("Couldn't load app: {0}, error: {1}".format(app, e))
MODULES = {}
_html_dirs = find_html_dirs()
[ MODULES.update({m_name:{'module': import_app('.'.join(['apps',m_name])), 'html':_html_dirs.get(m_name)}}) for m_name in find_module_dirs() ]
|
## module loader, goes to see which submodules have 'html' directories
## and declares them at the toplevel
import os,importlib
def find_module_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [o for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'__init__.py']))]
return subdirs
def find_html_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [(o,os.path.sep.join([curdir,o,'html'])) for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'html']))]
return dict(subdirs)
MODULES = {}
_html_dirs = find_html_dirs()
[ MODULES.update({m_name:{'module':importlib.import_module('.'.join(['apps',m_name])), 'html':_html_dirs.get(m_name)}}) for m_name in find_module_dirs() ]
Handle application erroring to not break the server## module loader, goes to see which submodules have 'html' directories
## and declares them at the toplevel
import os,importlib
def find_module_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [o for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'__init__.py']))]
return subdirs
def find_html_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [(o,os.path.sep.join([curdir,o,'html'])) for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'html']))]
return dict(subdirs)
def import_app(app):
try:
importlib.import_module(app)
except Exception as e:
logging.error("Couldn't load app: {0}, error: {1}".format(app, e))
MODULES = {}
_html_dirs = find_html_dirs()
[ MODULES.update({m_name:{'module': import_app('.'.join(['apps',m_name])), 'html':_html_dirs.get(m_name)}}) for m_name in find_module_dirs() ]
|
<commit_before>## module loader, goes to see which submodules have 'html' directories
## and declares them at the toplevel
import os,importlib
def find_module_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [o for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'__init__.py']))]
return subdirs
def find_html_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [(o,os.path.sep.join([curdir,o,'html'])) for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'html']))]
return dict(subdirs)
MODULES = {}
_html_dirs = find_html_dirs()
[ MODULES.update({m_name:{'module':importlib.import_module('.'.join(['apps',m_name])), 'html':_html_dirs.get(m_name)}}) for m_name in find_module_dirs() ]
<commit_msg>Handle application erroring to not break the server<commit_after>## module loader, goes to see which submodules have 'html' directories
## and declares them at the toplevel
import os,importlib
def find_module_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [o for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'__init__.py']))]
return subdirs
def find_html_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [(o,os.path.sep.join([curdir,o,'html'])) for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'html']))]
return dict(subdirs)
def import_app(app):
try:
importlib.import_module(app)
except Exception as e:
logging.error("Couldn't load app: {0}, error: {1}".format(app, e))
MODULES = {}
_html_dirs = find_html_dirs()
[ MODULES.update({m_name:{'module': import_app('.'.join(['apps',m_name])), 'html':_html_dirs.get(m_name)}}) for m_name in find_module_dirs() ]
|
72fb6ca12b685809bd5de0c5df9f051eef1163c4
|
test/TestBaseUtils.py
|
test/TestBaseUtils.py
|
import unittest
import sys
sys.path.append('../src')
import BaseUtils
class TestBaseUtils(unittest.TestCase):
def test_word_segmenter(self):
segments = BaseUtils.get_words('this is a random sentence')
self.assertEqual(segments, ['this', 'is', 'a', 'random', 'sentence'])
def test_word_segmenter_ignores_whitespace(self):
segments = BaseUtils.get_words('this is a random sentence')
self.assertEqual(segments, ['this', 'is', 'a', 'random', 'sentence'])
def test_word_segmenter_ignores_special_chars(self):
segments = BaseUtils.get_words('this is $$%%a random --00sentence')
self.assertEqual(segments, ['this', 'is', 'a', 'random', 'sentence'])
if __name__ == '__main__':
unittest.main()
|
''' Tests for BaseUtils
'''
import unittest
import sys
sys.path.append('../src')
import BaseUtils
class TestBaseUtils(unittest.TestCase):
''' Main test class for the BaseUtils '''
def test_word_segmenter_with_empty(self):
''' For an empty string, the segmenter returns
just an empty list '''
segments = BaseUtils.get_words('')
self.assertEqual(segments, [])
def test_word_segmenter(self):
''' The word segmenter returns the expected
array of strings '''
segments = BaseUtils.get_words('this is a random sentence')
self.assertEqual(segments, ['this', 'is', 'a', 'random', 'sentence'])
def test_ignoring_whitespace(self):
''' Whitespace in the input string is ignored
in the input string '''
segments = BaseUtils.get_words('this is a random sentence')
self.assertEqual(segments, ['this', 'is', 'a', 'random', 'sentence'])
def test_ignoring_special_chars(self):
''' If there are special characters in the input,
they are ignored as well '''
segments = BaseUtils.get_words('this is $$%%a random --00sentence')
self.assertEqual(segments, ['this', 'is', 'a', 'random', 'sentence'])
if __name__ == '__main__':
unittest.main()
|
Add test for empty string; cleanup
|
Add test for empty string; cleanup
|
Python
|
bsd-2-clause
|
ambidextrousTx/RNLTK
|
import unittest
import sys
sys.path.append('../src')
import BaseUtils
class TestBaseUtils(unittest.TestCase):
def test_word_segmenter(self):
segments = BaseUtils.get_words('this is a random sentence')
self.assertEqual(segments, ['this', 'is', 'a', 'random', 'sentence'])
def test_word_segmenter_ignores_whitespace(self):
segments = BaseUtils.get_words('this is a random sentence')
self.assertEqual(segments, ['this', 'is', 'a', 'random', 'sentence'])
def test_word_segmenter_ignores_special_chars(self):
segments = BaseUtils.get_words('this is $$%%a random --00sentence')
self.assertEqual(segments, ['this', 'is', 'a', 'random', 'sentence'])
if __name__ == '__main__':
unittest.main()
Add test for empty string; cleanup
|
''' Tests for BaseUtils
'''
import unittest
import sys
sys.path.append('../src')
import BaseUtils
class TestBaseUtils(unittest.TestCase):
''' Main test class for the BaseUtils '''
def test_word_segmenter_with_empty(self):
''' For an empty string, the segmenter returns
just an empty list '''
segments = BaseUtils.get_words('')
self.assertEqual(segments, [])
def test_word_segmenter(self):
''' The word segmenter returns the expected
array of strings '''
segments = BaseUtils.get_words('this is a random sentence')
self.assertEqual(segments, ['this', 'is', 'a', 'random', 'sentence'])
def test_ignoring_whitespace(self):
''' Whitespace in the input string is ignored
in the input string '''
segments = BaseUtils.get_words('this is a random sentence')
self.assertEqual(segments, ['this', 'is', 'a', 'random', 'sentence'])
def test_ignoring_special_chars(self):
''' If there are special characters in the input,
they are ignored as well '''
segments = BaseUtils.get_words('this is $$%%a random --00sentence')
self.assertEqual(segments, ['this', 'is', 'a', 'random', 'sentence'])
if __name__ == '__main__':
unittest.main()
|
<commit_before>import unittest
import sys
sys.path.append('../src')
import BaseUtils
class TestBaseUtils(unittest.TestCase):
def test_word_segmenter(self):
segments = BaseUtils.get_words('this is a random sentence')
self.assertEqual(segments, ['this', 'is', 'a', 'random', 'sentence'])
def test_word_segmenter_ignores_whitespace(self):
segments = BaseUtils.get_words('this is a random sentence')
self.assertEqual(segments, ['this', 'is', 'a', 'random', 'sentence'])
def test_word_segmenter_ignores_special_chars(self):
segments = BaseUtils.get_words('this is $$%%a random --00sentence')
self.assertEqual(segments, ['this', 'is', 'a', 'random', 'sentence'])
if __name__ == '__main__':
unittest.main()
<commit_msg>Add test for empty string; cleanup<commit_after>
|
''' Tests for BaseUtils
'''
import unittest
import sys
sys.path.append('../src')
import BaseUtils
class TestBaseUtils(unittest.TestCase):
''' Main test class for the BaseUtils '''
def test_word_segmenter_with_empty(self):
''' For an empty string, the segmenter returns
just an empty list '''
segments = BaseUtils.get_words('')
self.assertEqual(segments, [])
def test_word_segmenter(self):
''' The word segmenter returns the expected
array of strings '''
segments = BaseUtils.get_words('this is a random sentence')
self.assertEqual(segments, ['this', 'is', 'a', 'random', 'sentence'])
def test_ignoring_whitespace(self):
''' Whitespace in the input string is ignored
in the input string '''
segments = BaseUtils.get_words('this is a random sentence')
self.assertEqual(segments, ['this', 'is', 'a', 'random', 'sentence'])
def test_ignoring_special_chars(self):
''' If there are special characters in the input,
they are ignored as well '''
segments = BaseUtils.get_words('this is $$%%a random --00sentence')
self.assertEqual(segments, ['this', 'is', 'a', 'random', 'sentence'])
if __name__ == '__main__':
unittest.main()
|
import unittest
import sys
sys.path.append('../src')
import BaseUtils
class TestBaseUtils(unittest.TestCase):
def test_word_segmenter(self):
segments = BaseUtils.get_words('this is a random sentence')
self.assertEqual(segments, ['this', 'is', 'a', 'random', 'sentence'])
def test_word_segmenter_ignores_whitespace(self):
segments = BaseUtils.get_words('this is a random sentence')
self.assertEqual(segments, ['this', 'is', 'a', 'random', 'sentence'])
def test_word_segmenter_ignores_special_chars(self):
segments = BaseUtils.get_words('this is $$%%a random --00sentence')
self.assertEqual(segments, ['this', 'is', 'a', 'random', 'sentence'])
if __name__ == '__main__':
unittest.main()
Add test for empty string; cleanup''' Tests for BaseUtils
'''
import unittest
import sys
sys.path.append('../src')
import BaseUtils
class TestBaseUtils(unittest.TestCase):
''' Main test class for the BaseUtils '''
def test_word_segmenter_with_empty(self):
''' For an empty string, the segmenter returns
just an empty list '''
segments = BaseUtils.get_words('')
self.assertEqual(segments, [])
def test_word_segmenter(self):
''' The word segmenter returns the expected
array of strings '''
segments = BaseUtils.get_words('this is a random sentence')
self.assertEqual(segments, ['this', 'is', 'a', 'random', 'sentence'])
def test_ignoring_whitespace(self):
''' Whitespace in the input string is ignored
in the input string '''
segments = BaseUtils.get_words('this is a random sentence')
self.assertEqual(segments, ['this', 'is', 'a', 'random', 'sentence'])
def test_ignoring_special_chars(self):
''' If there are special characters in the input,
they are ignored as well '''
segments = BaseUtils.get_words('this is $$%%a random --00sentence')
self.assertEqual(segments, ['this', 'is', 'a', 'random', 'sentence'])
if __name__ == '__main__':
unittest.main()
|
<commit_before>import unittest
import sys
sys.path.append('../src')
import BaseUtils
class TestBaseUtils(unittest.TestCase):
def test_word_segmenter(self):
segments = BaseUtils.get_words('this is a random sentence')
self.assertEqual(segments, ['this', 'is', 'a', 'random', 'sentence'])
def test_word_segmenter_ignores_whitespace(self):
segments = BaseUtils.get_words('this is a random sentence')
self.assertEqual(segments, ['this', 'is', 'a', 'random', 'sentence'])
def test_word_segmenter_ignores_special_chars(self):
segments = BaseUtils.get_words('this is $$%%a random --00sentence')
self.assertEqual(segments, ['this', 'is', 'a', 'random', 'sentence'])
if __name__ == '__main__':
unittest.main()
<commit_msg>Add test for empty string; cleanup<commit_after>''' Tests for BaseUtils
'''
import unittest
import sys
sys.path.append('../src')
import BaseUtils
class TestBaseUtils(unittest.TestCase):
''' Main test class for the BaseUtils '''
def test_word_segmenter_with_empty(self):
''' For an empty string, the segmenter returns
just an empty list '''
segments = BaseUtils.get_words('')
self.assertEqual(segments, [])
def test_word_segmenter(self):
''' The word segmenter returns the expected
array of strings '''
segments = BaseUtils.get_words('this is a random sentence')
self.assertEqual(segments, ['this', 'is', 'a', 'random', 'sentence'])
def test_ignoring_whitespace(self):
''' Whitespace in the input string is ignored
in the input string '''
segments = BaseUtils.get_words('this is a random sentence')
self.assertEqual(segments, ['this', 'is', 'a', 'random', 'sentence'])
def test_ignoring_special_chars(self):
''' If there are special characters in the input,
they are ignored as well '''
segments = BaseUtils.get_words('this is $$%%a random --00sentence')
self.assertEqual(segments, ['this', 'is', 'a', 'random', 'sentence'])
if __name__ == '__main__':
unittest.main()
|
73c9173f801048cba0a9a72e78073c454a1ba0c4
|
rinse/client.py
|
rinse/client.py
|
"""SOAP client."""
from __future__ import print_function
import requests
from rinse import ENVELOPE_XSD
from rinse.util import SCHEMA
from rinse.response import RinseResponse
class SoapClient(object):
"""Rinse SOAP client."""
__session = None
def __init__(self, url, debug=False, **kwargs):
"""Set base attributes."""
self.url = url
self.debug = debug
self.kwargs = kwargs
self.operations = {}
self.soap_schema = SCHEMA[ENVELOPE_XSD]
@property
def _session(self):
"""Cached instance of requests.Session."""
if self.__session is None:
self.__session = requests.Session()
return self.__session
@_session.setter
def _session(self, session):
"""Allow injecting your own instance of requests.Session."""
self.__session = session
def __call__(self, msg, action="", build_response=RinseResponse,
debug=False):
"""Post 'msg' to remote service."""
# generate HTTP request from msg
request = msg.request(self.url, action).prepare()
if debug or self.debug:
print('{} {}'.format(request.method, self.url))
print(
''.join(
'{}: {}\n'.format(name, val)
for name, val
in sorted(request.headers.items())
)
)
print(request.content)
# perform HTTP(s) POST
resp = self._session.send(request)
return build_response(resp)
|
"""SOAP client."""
from __future__ import print_function
import requests
from rinse import ENVELOPE_XSD
from rinse.util import SCHEMA
from rinse.response import RinseResponse
class SoapClient(object):
"""Rinse SOAP client."""
__session = None
def __init__(self, url, debug=False, **kwargs):
"""Set base attributes."""
self.url = url
self.debug = debug
self.kwargs = kwargs
self.operations = {}
self.soap_schema = SCHEMA[ENVELOPE_XSD]
@property
def _session(self):
"""Cached instance of requests.Session."""
if self.__session is None:
self.__session = requests.Session()
return self.__session
@_session.setter
def _session(self, session):
"""Allow injecting your own instance of requests.Session."""
self.__session = session
def __call__(self, msg, action="", build_response=RinseResponse,
debug=False):
"""Post 'msg' to remote service."""
# generate HTTP request from msg
request = msg.request(self.url, action).prepare()
if debug or self.debug:
print('{} {}'.format(request.method, self.url))
print(
''.join(
'{}: {}\n'.format(name, val)
for name, val
in sorted(request.headers.items())
)
)
print(request.content)
# perform HTTP(s) POST
resp = self._session.send(request)
return build_response(resp)
|
Fix line continuation indent level.
|
Fix line continuation indent level.
|
Python
|
mit
|
MarkusH/rinse,MarkusH/rinse,simudream/rinse,tysonclugg/rinse,simudream/rinse,tysonclugg/rinse
|
"""SOAP client."""
from __future__ import print_function
import requests
from rinse import ENVELOPE_XSD
from rinse.util import SCHEMA
from rinse.response import RinseResponse
class SoapClient(object):
"""Rinse SOAP client."""
__session = None
def __init__(self, url, debug=False, **kwargs):
"""Set base attributes."""
self.url = url
self.debug = debug
self.kwargs = kwargs
self.operations = {}
self.soap_schema = SCHEMA[ENVELOPE_XSD]
@property
def _session(self):
"""Cached instance of requests.Session."""
if self.__session is None:
self.__session = requests.Session()
return self.__session
@_session.setter
def _session(self, session):
"""Allow injecting your own instance of requests.Session."""
self.__session = session
def __call__(self, msg, action="", build_response=RinseResponse,
debug=False):
"""Post 'msg' to remote service."""
# generate HTTP request from msg
request = msg.request(self.url, action).prepare()
if debug or self.debug:
print('{} {}'.format(request.method, self.url))
print(
''.join(
'{}: {}\n'.format(name, val)
for name, val
in sorted(request.headers.items())
)
)
print(request.content)
# perform HTTP(s) POST
resp = self._session.send(request)
return build_response(resp)
Fix line continuation indent level.
|
"""SOAP client."""
from __future__ import print_function
import requests
from rinse import ENVELOPE_XSD
from rinse.util import SCHEMA
from rinse.response import RinseResponse
class SoapClient(object):
"""Rinse SOAP client."""
__session = None
def __init__(self, url, debug=False, **kwargs):
"""Set base attributes."""
self.url = url
self.debug = debug
self.kwargs = kwargs
self.operations = {}
self.soap_schema = SCHEMA[ENVELOPE_XSD]
@property
def _session(self):
"""Cached instance of requests.Session."""
if self.__session is None:
self.__session = requests.Session()
return self.__session
@_session.setter
def _session(self, session):
"""Allow injecting your own instance of requests.Session."""
self.__session = session
def __call__(self, msg, action="", build_response=RinseResponse,
debug=False):
"""Post 'msg' to remote service."""
# generate HTTP request from msg
request = msg.request(self.url, action).prepare()
if debug or self.debug:
print('{} {}'.format(request.method, self.url))
print(
''.join(
'{}: {}\n'.format(name, val)
for name, val
in sorted(request.headers.items())
)
)
print(request.content)
# perform HTTP(s) POST
resp = self._session.send(request)
return build_response(resp)
|
<commit_before>"""SOAP client."""
from __future__ import print_function
import requests
from rinse import ENVELOPE_XSD
from rinse.util import SCHEMA
from rinse.response import RinseResponse
class SoapClient(object):
"""Rinse SOAP client."""
__session = None
def __init__(self, url, debug=False, **kwargs):
"""Set base attributes."""
self.url = url
self.debug = debug
self.kwargs = kwargs
self.operations = {}
self.soap_schema = SCHEMA[ENVELOPE_XSD]
@property
def _session(self):
"""Cached instance of requests.Session."""
if self.__session is None:
self.__session = requests.Session()
return self.__session
@_session.setter
def _session(self, session):
"""Allow injecting your own instance of requests.Session."""
self.__session = session
def __call__(self, msg, action="", build_response=RinseResponse,
debug=False):
"""Post 'msg' to remote service."""
# generate HTTP request from msg
request = msg.request(self.url, action).prepare()
if debug or self.debug:
print('{} {}'.format(request.method, self.url))
print(
''.join(
'{}: {}\n'.format(name, val)
for name, val
in sorted(request.headers.items())
)
)
print(request.content)
# perform HTTP(s) POST
resp = self._session.send(request)
return build_response(resp)
<commit_msg>Fix line continuation indent level.<commit_after>
|
"""SOAP client."""
from __future__ import print_function
import requests
from rinse import ENVELOPE_XSD
from rinse.util import SCHEMA
from rinse.response import RinseResponse
class SoapClient(object):
"""Rinse SOAP client."""
__session = None
def __init__(self, url, debug=False, **kwargs):
"""Set base attributes."""
self.url = url
self.debug = debug
self.kwargs = kwargs
self.operations = {}
self.soap_schema = SCHEMA[ENVELOPE_XSD]
@property
def _session(self):
"""Cached instance of requests.Session."""
if self.__session is None:
self.__session = requests.Session()
return self.__session
@_session.setter
def _session(self, session):
"""Allow injecting your own instance of requests.Session."""
self.__session = session
def __call__(self, msg, action="", build_response=RinseResponse,
debug=False):
"""Post 'msg' to remote service."""
# generate HTTP request from msg
request = msg.request(self.url, action).prepare()
if debug or self.debug:
print('{} {}'.format(request.method, self.url))
print(
''.join(
'{}: {}\n'.format(name, val)
for name, val
in sorted(request.headers.items())
)
)
print(request.content)
# perform HTTP(s) POST
resp = self._session.send(request)
return build_response(resp)
|
"""SOAP client."""
from __future__ import print_function
import requests
from rinse import ENVELOPE_XSD
from rinse.util import SCHEMA
from rinse.response import RinseResponse
class SoapClient(object):
"""Rinse SOAP client."""
__session = None
def __init__(self, url, debug=False, **kwargs):
"""Set base attributes."""
self.url = url
self.debug = debug
self.kwargs = kwargs
self.operations = {}
self.soap_schema = SCHEMA[ENVELOPE_XSD]
@property
def _session(self):
"""Cached instance of requests.Session."""
if self.__session is None:
self.__session = requests.Session()
return self.__session
@_session.setter
def _session(self, session):
"""Allow injecting your own instance of requests.Session."""
self.__session = session
def __call__(self, msg, action="", build_response=RinseResponse,
debug=False):
"""Post 'msg' to remote service."""
# generate HTTP request from msg
request = msg.request(self.url, action).prepare()
if debug or self.debug:
print('{} {}'.format(request.method, self.url))
print(
''.join(
'{}: {}\n'.format(name, val)
for name, val
in sorted(request.headers.items())
)
)
print(request.content)
# perform HTTP(s) POST
resp = self._session.send(request)
return build_response(resp)
Fix line continuation indent level."""SOAP client."""
from __future__ import print_function
import requests
from rinse import ENVELOPE_XSD
from rinse.util import SCHEMA
from rinse.response import RinseResponse
class SoapClient(object):
"""Rinse SOAP client."""
__session = None
def __init__(self, url, debug=False, **kwargs):
"""Set base attributes."""
self.url = url
self.debug = debug
self.kwargs = kwargs
self.operations = {}
self.soap_schema = SCHEMA[ENVELOPE_XSD]
@property
def _session(self):
"""Cached instance of requests.Session."""
if self.__session is None:
self.__session = requests.Session()
return self.__session
@_session.setter
def _session(self, session):
"""Allow injecting your own instance of requests.Session."""
self.__session = session
def __call__(self, msg, action="", build_response=RinseResponse,
debug=False):
"""Post 'msg' to remote service."""
# generate HTTP request from msg
request = msg.request(self.url, action).prepare()
if debug or self.debug:
print('{} {}'.format(request.method, self.url))
print(
''.join(
'{}: {}\n'.format(name, val)
for name, val
in sorted(request.headers.items())
)
)
print(request.content)
# perform HTTP(s) POST
resp = self._session.send(request)
return build_response(resp)
|
<commit_before>"""SOAP client."""
from __future__ import print_function
import requests
from rinse import ENVELOPE_XSD
from rinse.util import SCHEMA
from rinse.response import RinseResponse
class SoapClient(object):
"""Rinse SOAP client."""
__session = None
def __init__(self, url, debug=False, **kwargs):
"""Set base attributes."""
self.url = url
self.debug = debug
self.kwargs = kwargs
self.operations = {}
self.soap_schema = SCHEMA[ENVELOPE_XSD]
@property
def _session(self):
"""Cached instance of requests.Session."""
if self.__session is None:
self.__session = requests.Session()
return self.__session
@_session.setter
def _session(self, session):
"""Allow injecting your own instance of requests.Session."""
self.__session = session
def __call__(self, msg, action="", build_response=RinseResponse,
debug=False):
"""Post 'msg' to remote service."""
# generate HTTP request from msg
request = msg.request(self.url, action).prepare()
if debug or self.debug:
print('{} {}'.format(request.method, self.url))
print(
''.join(
'{}: {}\n'.format(name, val)
for name, val
in sorted(request.headers.items())
)
)
print(request.content)
# perform HTTP(s) POST
resp = self._session.send(request)
return build_response(resp)
<commit_msg>Fix line continuation indent level.<commit_after>"""SOAP client."""
from __future__ import print_function
import requests
from rinse import ENVELOPE_XSD
from rinse.util import SCHEMA
from rinse.response import RinseResponse
class SoapClient(object):
"""Rinse SOAP client."""
__session = None
def __init__(self, url, debug=False, **kwargs):
"""Set base attributes."""
self.url = url
self.debug = debug
self.kwargs = kwargs
self.operations = {}
self.soap_schema = SCHEMA[ENVELOPE_XSD]
@property
def _session(self):
"""Cached instance of requests.Session."""
if self.__session is None:
self.__session = requests.Session()
return self.__session
@_session.setter
def _session(self, session):
"""Allow injecting your own instance of requests.Session."""
self.__session = session
def __call__(self, msg, action="", build_response=RinseResponse,
debug=False):
"""Post 'msg' to remote service."""
# generate HTTP request from msg
request = msg.request(self.url, action).prepare()
if debug or self.debug:
print('{} {}'.format(request.method, self.url))
print(
''.join(
'{}: {}\n'.format(name, val)
for name, val
in sorted(request.headers.items())
)
)
print(request.content)
# perform HTTP(s) POST
resp = self._session.send(request)
return build_response(resp)
|
2b8208e2f6ba8554aefc8e984132a0d4084c26ec
|
open_folder.py
|
open_folder.py
|
import os, platform
# I intend to hide the Operating Specific details of opening a folder
# here in this module.
#
# On Mac OS X you do this with "open"
# e.g. "open '\Users\golliher\Documents\Tickler File'"
# On Windows you do this with "explorer"
# e.g. "explorer c:\Documents and Settings\Tickler File"
# On Linux xdg-open is a desktop-independant tool
def open_folder(path):
'''Runs operating specific command to open a folder. MacOS, Linux & Windows supported'''
path = os.path.normpath(path)
try:
platform_cmd_formatstring = { 'Darwin': "open '%s'", # note the quotation marks around path
'Linux': "xdg-open '%s'",
'Windows': "explorer %s"}[platform.system()]
except:
raise Exception("Your operating system was not recognized. Unable to determine how to open folders for you.")
platform_cmd = platform_cmd_formatstring % path
os.popen(platform_cmd)
|
import os, platform, subprocess
# I intend to hide the Operating Specific details of opening a folder
# here in this module.
#
# On Mac OS X you do this with "open"
# e.g. "open '\Users\golliher\Documents\Tickler File'"
# On Windows you do this with "explorer"
# e.g. "explorer c:\Documents and Settings\Tickler File"
# On Linux xdg-open is a desktop-independant tool
def open_folder(path):
'''Runs operating specific command to open a folder. MacOS, Linux & Windows supported'''
path = os.path.normpath(path)
if not os.path.isdir(path):
raise Exception("Folder does not exist.")
# Find the operating system command to open a folder
try:
platform_cmd = { 'Darwin': "open", # note the quotation marks around path
'Linux': "xdg-open",
'Windows': "explorer"}[platform.system()]
except:
raise Exception("Your operating system was not recognized. Unable to determine how to open folders for you.")
# Run the operating system command to open the folder
try:
subprocess.check_call([platform_cmd,path])
except OSError, e:
raise Exception("Failed attempt executing folder opening command for your OS. \nCMD: %s\nARG: %s\nRESULT: %s\n" % (platform_cmd, path, e.strerror) )
|
Raise exception if path not found, os not found, or command execution fails.
|
Raise exception if path not found, os not found, or command execution fails.
|
Python
|
mit
|
golliher/dg-tickler-file
|
import os, platform
# I intend to hide the Operating Specific details of opening a folder
# here in this module.
#
# On Mac OS X you do this with "open"
# e.g. "open '\Users\golliher\Documents\Tickler File'"
# On Windows you do this with "explorer"
# e.g. "explorer c:\Documents and Settings\Tickler File"
# On Linux xdg-open is a desktop-independant tool
def open_folder(path):
'''Runs operating specific command to open a folder. MacOS, Linux & Windows supported'''
path = os.path.normpath(path)
try:
platform_cmd_formatstring = { 'Darwin': "open '%s'", # note the quotation marks around path
'Linux': "xdg-open '%s'",
'Windows': "explorer %s"}[platform.system()]
except:
raise Exception("Your operating system was not recognized. Unable to determine how to open folders for you.")
platform_cmd = platform_cmd_formatstring % path
os.popen(platform_cmd)
Raise exception if path not found, os not found, or command execution fails.
|
import os, platform, subprocess
# I intend to hide the Operating Specific details of opening a folder
# here in this module.
#
# On Mac OS X you do this with "open"
# e.g. "open '\Users\golliher\Documents\Tickler File'"
# On Windows you do this with "explorer"
# e.g. "explorer c:\Documents and Settings\Tickler File"
# On Linux xdg-open is a desktop-independant tool
def open_folder(path):
'''Runs operating specific command to open a folder. MacOS, Linux & Windows supported'''
path = os.path.normpath(path)
if not os.path.isdir(path):
raise Exception("Folder does not exist.")
# Find the operating system command to open a folder
try:
platform_cmd = { 'Darwin': "open", # note the quotation marks around path
'Linux': "xdg-open",
'Windows': "explorer"}[platform.system()]
except:
raise Exception("Your operating system was not recognized. Unable to determine how to open folders for you.")
# Run the operating system command to open the folder
try:
subprocess.check_call([platform_cmd,path])
except OSError, e:
raise Exception("Failed attempt executing folder opening command for your OS. \nCMD: %s\nARG: %s\nRESULT: %s\n" % (platform_cmd, path, e.strerror) )
|
<commit_before>import os, platform
# I intend to hide the Operating Specific details of opening a folder
# here in this module.
#
# On Mac OS X you do this with "open"
# e.g. "open '\Users\golliher\Documents\Tickler File'"
# On Windows you do this with "explorer"
# e.g. "explorer c:\Documents and Settings\Tickler File"
# On Linux xdg-open is a desktop-independant tool
def open_folder(path):
'''Runs operating specific command to open a folder. MacOS, Linux & Windows supported'''
path = os.path.normpath(path)
try:
platform_cmd_formatstring = { 'Darwin': "open '%s'", # note the quotation marks around path
'Linux': "xdg-open '%s'",
'Windows': "explorer %s"}[platform.system()]
except:
raise Exception("Your operating system was not recognized. Unable to determine how to open folders for you.")
platform_cmd = platform_cmd_formatstring % path
os.popen(platform_cmd)
<commit_msg>Raise exception if path not found, os not found, or command execution fails.<commit_after>
|
import os, platform, subprocess
# I intend to hide the Operating Specific details of opening a folder
# here in this module.
#
# On Mac OS X you do this with "open"
# e.g. "open '\Users\golliher\Documents\Tickler File'"
# On Windows you do this with "explorer"
# e.g. "explorer c:\Documents and Settings\Tickler File"
# On Linux xdg-open is a desktop-independant tool
def open_folder(path):
'''Runs operating specific command to open a folder. MacOS, Linux & Windows supported'''
path = os.path.normpath(path)
if not os.path.isdir(path):
raise Exception("Folder does not exist.")
# Find the operating system command to open a folder
try:
platform_cmd = { 'Darwin': "open", # note the quotation marks around path
'Linux': "xdg-open",
'Windows': "explorer"}[platform.system()]
except:
raise Exception("Your operating system was not recognized. Unable to determine how to open folders for you.")
# Run the operating system command to open the folder
try:
subprocess.check_call([platform_cmd,path])
except OSError, e:
raise Exception("Failed attempt executing folder opening command for your OS. \nCMD: %s\nARG: %s\nRESULT: %s\n" % (platform_cmd, path, e.strerror) )
|
import os, platform
# I intend to hide the Operating Specific details of opening a folder
# here in this module.
#
# On Mac OS X you do this with "open"
# e.g. "open '\Users\golliher\Documents\Tickler File'"
# On Windows you do this with "explorer"
# e.g. "explorer c:\Documents and Settings\Tickler File"
# On Linux xdg-open is a desktop-independant tool
def open_folder(path):
'''Runs operating specific command to open a folder. MacOS, Linux & Windows supported'''
path = os.path.normpath(path)
try:
platform_cmd_formatstring = { 'Darwin': "open '%s'", # note the quotation marks around path
'Linux': "xdg-open '%s'",
'Windows': "explorer %s"}[platform.system()]
except:
raise Exception("Your operating system was not recognized. Unable to determine how to open folders for you.")
platform_cmd = platform_cmd_formatstring % path
os.popen(platform_cmd)
Raise exception if path not found, os not found, or command execution fails.import os, platform, subprocess
# I intend to hide the Operating Specific details of opening a folder
# here in this module.
#
# On Mac OS X you do this with "open"
# e.g. "open '\Users\golliher\Documents\Tickler File'"
# On Windows you do this with "explorer"
# e.g. "explorer c:\Documents and Settings\Tickler File"
# On Linux xdg-open is a desktop-independant tool
def open_folder(path):
'''Runs operating specific command to open a folder. MacOS, Linux & Windows supported'''
path = os.path.normpath(path)
if not os.path.isdir(path):
raise Exception("Folder does not exist.")
# Find the operating system command to open a folder
try:
platform_cmd = { 'Darwin': "open", # note the quotation marks around path
'Linux': "xdg-open",
'Windows': "explorer"}[platform.system()]
except:
raise Exception("Your operating system was not recognized. Unable to determine how to open folders for you.")
# Run the operating system command to open the folder
try:
subprocess.check_call([platform_cmd,path])
except OSError, e:
raise Exception("Failed attempt executing folder opening command for your OS. \nCMD: %s\nARG: %s\nRESULT: %s\n" % (platform_cmd, path, e.strerror) )
|
<commit_before>import os, platform
# I intend to hide the Operating Specific details of opening a folder
# here in this module.
#
# On Mac OS X you do this with "open"
# e.g. "open '\Users\golliher\Documents\Tickler File'"
# On Windows you do this with "explorer"
# e.g. "explorer c:\Documents and Settings\Tickler File"
# On Linux xdg-open is a desktop-independant tool
def open_folder(path):
'''Runs operating specific command to open a folder. MacOS, Linux & Windows supported'''
path = os.path.normpath(path)
try:
platform_cmd_formatstring = { 'Darwin': "open '%s'", # note the quotation marks around path
'Linux': "xdg-open '%s'",
'Windows': "explorer %s"}[platform.system()]
except:
raise Exception("Your operating system was not recognized. Unable to determine how to open folders for you.")
platform_cmd = platform_cmd_formatstring % path
os.popen(platform_cmd)
<commit_msg>Raise exception if path not found, os not found, or command execution fails.<commit_after>import os, platform, subprocess
# I intend to hide the Operating Specific details of opening a folder
# here in this module.
#
# On Mac OS X you do this with "open"
# e.g. "open '\Users\golliher\Documents\Tickler File'"
# On Windows you do this with "explorer"
# e.g. "explorer c:\Documents and Settings\Tickler File"
# On Linux xdg-open is a desktop-independant tool
def open_folder(path):
'''Runs operating specific command to open a folder. MacOS, Linux & Windows supported'''
path = os.path.normpath(path)
if not os.path.isdir(path):
raise Exception("Folder does not exist.")
# Find the operating system command to open a folder
try:
platform_cmd = { 'Darwin': "open", # note the quotation marks around path
'Linux': "xdg-open",
'Windows': "explorer"}[platform.system()]
except:
raise Exception("Your operating system was not recognized. Unable to determine how to open folders for you.")
# Run the operating system command to open the folder
try:
subprocess.check_call([platform_cmd,path])
except OSError, e:
raise Exception("Failed attempt executing folder opening command for your OS. \nCMD: %s\nARG: %s\nRESULT: %s\n" % (platform_cmd, path, e.strerror) )
|
6ed5d899d8c2fbef8c4b40180c497421b9f8e6c4
|
map_service/serializers.py
|
map_service/serializers.py
|
from map_service.models import LkState
from map_service.models import SpatialObjects
from rest_framework_mongoengine import serializers
class LkStateSerializer(serializers.DocumentSerializer):
class Meta:
model = LkState
class SpatialObjectsSerializer(serializers.DocumentSerializer):
class Meta:
model = SpatialObjects
fields = '__all__'
|
from map_service.models import LkState
from map_service.models import SpatialObjects
from rest_framework_mongoengine import serializers
class LkStateSerializer(serializers.DocumentSerializer):
class Meta:
model = LkState
fields = '__all__'
class SpatialObjectsSerializer(serializers.DocumentSerializer):
class Meta:
model = SpatialObjects
fields = '__all__'
|
Fix Compatibility issue with django 1.8+
|
Fix Compatibility issue with django 1.8+
|
Python
|
apache-2.0
|
tmkasun/Knnect,tmkasun/Knnect,tmkasun/Knnect,tmkasun/Knnect
|
from map_service.models import LkState
from map_service.models import SpatialObjects
from rest_framework_mongoengine import serializers
class LkStateSerializer(serializers.DocumentSerializer):
class Meta:
model = LkState
class SpatialObjectsSerializer(serializers.DocumentSerializer):
class Meta:
model = SpatialObjects
fields = '__all__'
Fix Compatibility issue with django 1.8+
|
from map_service.models import LkState
from map_service.models import SpatialObjects
from rest_framework_mongoengine import serializers
class LkStateSerializer(serializers.DocumentSerializer):
class Meta:
model = LkState
fields = '__all__'
class SpatialObjectsSerializer(serializers.DocumentSerializer):
class Meta:
model = SpatialObjects
fields = '__all__'
|
<commit_before>from map_service.models import LkState
from map_service.models import SpatialObjects
from rest_framework_mongoengine import serializers
class LkStateSerializer(serializers.DocumentSerializer):
class Meta:
model = LkState
class SpatialObjectsSerializer(serializers.DocumentSerializer):
class Meta:
model = SpatialObjects
fields = '__all__'
<commit_msg>Fix Compatibility issue with django 1.8+<commit_after>
|
from map_service.models import LkState
from map_service.models import SpatialObjects
from rest_framework_mongoengine import serializers
class LkStateSerializer(serializers.DocumentSerializer):
class Meta:
model = LkState
fields = '__all__'
class SpatialObjectsSerializer(serializers.DocumentSerializer):
class Meta:
model = SpatialObjects
fields = '__all__'
|
from map_service.models import LkState
from map_service.models import SpatialObjects
from rest_framework_mongoengine import serializers
class LkStateSerializer(serializers.DocumentSerializer):
class Meta:
model = LkState
class SpatialObjectsSerializer(serializers.DocumentSerializer):
class Meta:
model = SpatialObjects
fields = '__all__'
Fix Compatibility issue with django 1.8+from map_service.models import LkState
from map_service.models import SpatialObjects
from rest_framework_mongoengine import serializers
class LkStateSerializer(serializers.DocumentSerializer):
class Meta:
model = LkState
fields = '__all__'
class SpatialObjectsSerializer(serializers.DocumentSerializer):
class Meta:
model = SpatialObjects
fields = '__all__'
|
<commit_before>from map_service.models import LkState
from map_service.models import SpatialObjects
from rest_framework_mongoengine import serializers
class LkStateSerializer(serializers.DocumentSerializer):
class Meta:
model = LkState
class SpatialObjectsSerializer(serializers.DocumentSerializer):
class Meta:
model = SpatialObjects
fields = '__all__'
<commit_msg>Fix Compatibility issue with django 1.8+<commit_after>from map_service.models import LkState
from map_service.models import SpatialObjects
from rest_framework_mongoengine import serializers
class LkStateSerializer(serializers.DocumentSerializer):
class Meta:
model = LkState
fields = '__all__'
class SpatialObjectsSerializer(serializers.DocumentSerializer):
class Meta:
model = SpatialObjects
fields = '__all__'
|
b69e7f094514e0027f5fabda9d4c127c5cc1d512
|
sir/__main__.py
|
sir/__main__.py
|
# Copyright (c) 2014 Wieland Hoffmann
# License: MIT, see LICENSE for details
import argparse
import logging
from . import config
from .indexing import reindex
logger = logging.getLogger("sir")
def watch(args):
raise NotImplementedError
def main():
loghandler = logging.StreamHandler()
formatter = logging.Formatter(fmt="%(asctime)s %(threadName)s %(levelname)s: %(message)s")
loghandler.setFormatter(formatter)
logger.addHandler(loghandler)
parser = argparse.ArgumentParser()
parser.add_argument("-d", "--debug", action="store_true")
subparsers = parser.add_subparsers()
reindex_parser = subparsers.add_parser("reindex", help="Reindexes all or a single entity type")
reindex_parser.set_defaults(func=reindex)
reindex_parser.add_argument('--entities', action='append', help='The entities to reindex')
watch_parser = subparsers.add_parser("watch", help="Watches for incoming messages on an AMQP queue")
watch_parser.set_defaults(func=watch)
args = parser.parse_args()
if args.debug:
logger.setLevel(logging.DEBUG)
else:
logger.setLevel(logging.INFO)
config.read_config()
func = args.func
args = vars(args)
func(args["entities"], args["debug"])
if __name__ == '__main__':
main()
|
# Copyright (c) 2014 Wieland Hoffmann
# License: MIT, see LICENSE for details
import argparse
import logging
from . import config
from .indexing import reindex
logger = logging.getLogger("sir")
def watch(args):
raise NotImplementedError
def main():
loghandler = logging.StreamHandler()
formatter = logging.Formatter(fmt="%(threadName)s %(asctime)s %(levelname)s: %(message)s")
loghandler.setFormatter(formatter)
logger.addHandler(loghandler)
parser = argparse.ArgumentParser()
parser.add_argument("-d", "--debug", action="store_true")
subparsers = parser.add_subparsers()
reindex_parser = subparsers.add_parser("reindex", help="Reindexes all or a single entity type")
reindex_parser.set_defaults(func=reindex)
reindex_parser.add_argument('--entities', action='append', help='The entities to reindex')
watch_parser = subparsers.add_parser("watch", help="Watches for incoming messages on an AMQP queue")
watch_parser.set_defaults(func=watch)
args = parser.parse_args()
if args.debug:
logger.setLevel(logging.DEBUG)
else:
logger.setLevel(logging.INFO)
config.read_config()
func = args.func
args = vars(args)
func(args["entities"], args["debug"])
if __name__ == '__main__':
main()
|
Change the logging format to begin with the threadname
|
Change the logging format to begin with the threadname
|
Python
|
mit
|
jeffweeksio/sir
|
# Copyright (c) 2014 Wieland Hoffmann
# License: MIT, see LICENSE for details
import argparse
import logging
from . import config
from .indexing import reindex
logger = logging.getLogger("sir")
def watch(args):
raise NotImplementedError
def main():
loghandler = logging.StreamHandler()
formatter = logging.Formatter(fmt="%(asctime)s %(threadName)s %(levelname)s: %(message)s")
loghandler.setFormatter(formatter)
logger.addHandler(loghandler)
parser = argparse.ArgumentParser()
parser.add_argument("-d", "--debug", action="store_true")
subparsers = parser.add_subparsers()
reindex_parser = subparsers.add_parser("reindex", help="Reindexes all or a single entity type")
reindex_parser.set_defaults(func=reindex)
reindex_parser.add_argument('--entities', action='append', help='The entities to reindex')
watch_parser = subparsers.add_parser("watch", help="Watches for incoming messages on an AMQP queue")
watch_parser.set_defaults(func=watch)
args = parser.parse_args()
if args.debug:
logger.setLevel(logging.DEBUG)
else:
logger.setLevel(logging.INFO)
config.read_config()
func = args.func
args = vars(args)
func(args["entities"], args["debug"])
if __name__ == '__main__':
main()Change the logging format to begin with the threadname
|
# Copyright (c) 2014 Wieland Hoffmann
# License: MIT, see LICENSE for details
import argparse
import logging
from . import config
from .indexing import reindex
logger = logging.getLogger("sir")
def watch(args):
raise NotImplementedError
def main():
loghandler = logging.StreamHandler()
formatter = logging.Formatter(fmt="%(threadName)s %(asctime)s %(levelname)s: %(message)s")
loghandler.setFormatter(formatter)
logger.addHandler(loghandler)
parser = argparse.ArgumentParser()
parser.add_argument("-d", "--debug", action="store_true")
subparsers = parser.add_subparsers()
reindex_parser = subparsers.add_parser("reindex", help="Reindexes all or a single entity type")
reindex_parser.set_defaults(func=reindex)
reindex_parser.add_argument('--entities', action='append', help='The entities to reindex')
watch_parser = subparsers.add_parser("watch", help="Watches for incoming messages on an AMQP queue")
watch_parser.set_defaults(func=watch)
args = parser.parse_args()
if args.debug:
logger.setLevel(logging.DEBUG)
else:
logger.setLevel(logging.INFO)
config.read_config()
func = args.func
args = vars(args)
func(args["entities"], args["debug"])
if __name__ == '__main__':
main()
|
<commit_before># Copyright (c) 2014 Wieland Hoffmann
# License: MIT, see LICENSE for details
import argparse
import logging
from . import config
from .indexing import reindex
logger = logging.getLogger("sir")
def watch(args):
raise NotImplementedError
def main():
loghandler = logging.StreamHandler()
formatter = logging.Formatter(fmt="%(asctime)s %(threadName)s %(levelname)s: %(message)s")
loghandler.setFormatter(formatter)
logger.addHandler(loghandler)
parser = argparse.ArgumentParser()
parser.add_argument("-d", "--debug", action="store_true")
subparsers = parser.add_subparsers()
reindex_parser = subparsers.add_parser("reindex", help="Reindexes all or a single entity type")
reindex_parser.set_defaults(func=reindex)
reindex_parser.add_argument('--entities', action='append', help='The entities to reindex')
watch_parser = subparsers.add_parser("watch", help="Watches for incoming messages on an AMQP queue")
watch_parser.set_defaults(func=watch)
args = parser.parse_args()
if args.debug:
logger.setLevel(logging.DEBUG)
else:
logger.setLevel(logging.INFO)
config.read_config()
func = args.func
args = vars(args)
func(args["entities"], args["debug"])
if __name__ == '__main__':
main()<commit_msg>Change the logging format to begin with the threadname<commit_after>
|
# Copyright (c) 2014 Wieland Hoffmann
# License: MIT, see LICENSE for details
import argparse
import logging
from . import config
from .indexing import reindex
logger = logging.getLogger("sir")
def watch(args):
raise NotImplementedError
def main():
loghandler = logging.StreamHandler()
formatter = logging.Formatter(fmt="%(threadName)s %(asctime)s %(levelname)s: %(message)s")
loghandler.setFormatter(formatter)
logger.addHandler(loghandler)
parser = argparse.ArgumentParser()
parser.add_argument("-d", "--debug", action="store_true")
subparsers = parser.add_subparsers()
reindex_parser = subparsers.add_parser("reindex", help="Reindexes all or a single entity type")
reindex_parser.set_defaults(func=reindex)
reindex_parser.add_argument('--entities', action='append', help='The entities to reindex')
watch_parser = subparsers.add_parser("watch", help="Watches for incoming messages on an AMQP queue")
watch_parser.set_defaults(func=watch)
args = parser.parse_args()
if args.debug:
logger.setLevel(logging.DEBUG)
else:
logger.setLevel(logging.INFO)
config.read_config()
func = args.func
args = vars(args)
func(args["entities"], args["debug"])
if __name__ == '__main__':
main()
|
# Copyright (c) 2014 Wieland Hoffmann
# License: MIT, see LICENSE for details
import argparse
import logging
from . import config
from .indexing import reindex
logger = logging.getLogger("sir")
def watch(args):
raise NotImplementedError
def main():
loghandler = logging.StreamHandler()
formatter = logging.Formatter(fmt="%(asctime)s %(threadName)s %(levelname)s: %(message)s")
loghandler.setFormatter(formatter)
logger.addHandler(loghandler)
parser = argparse.ArgumentParser()
parser.add_argument("-d", "--debug", action="store_true")
subparsers = parser.add_subparsers()
reindex_parser = subparsers.add_parser("reindex", help="Reindexes all or a single entity type")
reindex_parser.set_defaults(func=reindex)
reindex_parser.add_argument('--entities', action='append', help='The entities to reindex')
watch_parser = subparsers.add_parser("watch", help="Watches for incoming messages on an AMQP queue")
watch_parser.set_defaults(func=watch)
args = parser.parse_args()
if args.debug:
logger.setLevel(logging.DEBUG)
else:
logger.setLevel(logging.INFO)
config.read_config()
func = args.func
args = vars(args)
func(args["entities"], args["debug"])
if __name__ == '__main__':
main()Change the logging format to begin with the threadname# Copyright (c) 2014 Wieland Hoffmann
# License: MIT, see LICENSE for details
import argparse
import logging
from . import config
from .indexing import reindex
logger = logging.getLogger("sir")
def watch(args):
raise NotImplementedError
def main():
loghandler = logging.StreamHandler()
formatter = logging.Formatter(fmt="%(threadName)s %(asctime)s %(levelname)s: %(message)s")
loghandler.setFormatter(formatter)
logger.addHandler(loghandler)
parser = argparse.ArgumentParser()
parser.add_argument("-d", "--debug", action="store_true")
subparsers = parser.add_subparsers()
reindex_parser = subparsers.add_parser("reindex", help="Reindexes all or a single entity type")
reindex_parser.set_defaults(func=reindex)
reindex_parser.add_argument('--entities', action='append', help='The entities to reindex')
watch_parser = subparsers.add_parser("watch", help="Watches for incoming messages on an AMQP queue")
watch_parser.set_defaults(func=watch)
args = parser.parse_args()
if args.debug:
logger.setLevel(logging.DEBUG)
else:
logger.setLevel(logging.INFO)
config.read_config()
func = args.func
args = vars(args)
func(args["entities"], args["debug"])
if __name__ == '__main__':
main()
|
<commit_before># Copyright (c) 2014 Wieland Hoffmann
# License: MIT, see LICENSE for details
import argparse
import logging
from . import config
from .indexing import reindex
logger = logging.getLogger("sir")
def watch(args):
raise NotImplementedError
def main():
loghandler = logging.StreamHandler()
formatter = logging.Formatter(fmt="%(asctime)s %(threadName)s %(levelname)s: %(message)s")
loghandler.setFormatter(formatter)
logger.addHandler(loghandler)
parser = argparse.ArgumentParser()
parser.add_argument("-d", "--debug", action="store_true")
subparsers = parser.add_subparsers()
reindex_parser = subparsers.add_parser("reindex", help="Reindexes all or a single entity type")
reindex_parser.set_defaults(func=reindex)
reindex_parser.add_argument('--entities', action='append', help='The entities to reindex')
watch_parser = subparsers.add_parser("watch", help="Watches for incoming messages on an AMQP queue")
watch_parser.set_defaults(func=watch)
args = parser.parse_args()
if args.debug:
logger.setLevel(logging.DEBUG)
else:
logger.setLevel(logging.INFO)
config.read_config()
func = args.func
args = vars(args)
func(args["entities"], args["debug"])
if __name__ == '__main__':
main()<commit_msg>Change the logging format to begin with the threadname<commit_after># Copyright (c) 2014 Wieland Hoffmann
# License: MIT, see LICENSE for details
import argparse
import logging
from . import config
from .indexing import reindex
logger = logging.getLogger("sir")
def watch(args):
raise NotImplementedError
def main():
loghandler = logging.StreamHandler()
formatter = logging.Formatter(fmt="%(threadName)s %(asctime)s %(levelname)s: %(message)s")
loghandler.setFormatter(formatter)
logger.addHandler(loghandler)
parser = argparse.ArgumentParser()
parser.add_argument("-d", "--debug", action="store_true")
subparsers = parser.add_subparsers()
reindex_parser = subparsers.add_parser("reindex", help="Reindexes all or a single entity type")
reindex_parser.set_defaults(func=reindex)
reindex_parser.add_argument('--entities', action='append', help='The entities to reindex')
watch_parser = subparsers.add_parser("watch", help="Watches for incoming messages on an AMQP queue")
watch_parser.set_defaults(func=watch)
args = parser.parse_args()
if args.debug:
logger.setLevel(logging.DEBUG)
else:
logger.setLevel(logging.INFO)
config.read_config()
func = args.func
args = vars(args)
func(args["entities"], args["debug"])
if __name__ == '__main__':
main()
|
5ffb645e36fdbb3feae52ac6dfedb1b492f45b8f
|
examples/list.py
|
examples/list.py
|
# Copyright (c) 2013 Jordan Halterman <jordan.halterman@gmail.com>
# See LICENSE for details.
import sys, os
sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
from active_redis import ActiveRedis
redis = ActiveRedis()
# Create an unnamed list.
mylist = redis.list()
# Append items to the list.
mylist.append('foo')
mylist.append('bar')
# Note that when appending a complex data structure the structure
# will be serialized to JSON when written to Redis. However, the
# structure will still be monitored for changes, so even once a
# list is serialized it can still be mutated, and Active Redis
# will capture changes and re-serialize the list.
mylist.append(['foo', 'bar'])
mylist[2].append('baz')
# We can also create a named list by passing a key to the constructor.
mylist = redis.list('mylist')
mylist.append('foo')
del mylist
mylist = redis.list('mylist')
print mylist # [u'foo']
mylist.delete()
print mylist # []
|
# Copyright (c) 2013 Jordan Halterman <jordan.halterman@gmail.com>
# See LICENSE for details.
import sys, os
sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
from active_redis import ActiveRedis
redis = ActiveRedis()
# Create an unnamed list.
mylist = redis.list()
# Append items to the list.
mylist.append('foo')
mylist.append('bar')
# Note that when appending a complex data structure the structure
# will be serialized to JSON when written to Redis. However, the
# structure will still be monitored for changes, so even once a
# list is serialized it can still be mutated, and Active Redis
# will capture changes and re-serialize the list.
mylist.append(['foo', 'bar'])
mylist[2].append('baz')
mylist.delete()
# We can also create a named list by passing a key to the constructor.
mylist = redis.list('mylist')
mylist.append('foo')
del mylist
mylist = redis.list('mylist')
print mylist # [u'foo']
mylist.delete()
print mylist # []
|
Update dict and set examples.
|
Update dict and set examples.
|
Python
|
mit
|
kuujo/active-redis
|
# Copyright (c) 2013 Jordan Halterman <jordan.halterman@gmail.com>
# See LICENSE for details.
import sys, os
sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
from active_redis import ActiveRedis
redis = ActiveRedis()
# Create an unnamed list.
mylist = redis.list()
# Append items to the list.
mylist.append('foo')
mylist.append('bar')
# Note that when appending a complex data structure the structure
# will be serialized to JSON when written to Redis. However, the
# structure will still be monitored for changes, so even once a
# list is serialized it can still be mutated, and Active Redis
# will capture changes and re-serialize the list.
mylist.append(['foo', 'bar'])
mylist[2].append('baz')
# We can also create a named list by passing a key to the constructor.
mylist = redis.list('mylist')
mylist.append('foo')
del mylist
mylist = redis.list('mylist')
print mylist # [u'foo']
mylist.delete()
print mylist # []
Update dict and set examples.
|
# Copyright (c) 2013 Jordan Halterman <jordan.halterman@gmail.com>
# See LICENSE for details.
import sys, os
sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
from active_redis import ActiveRedis
redis = ActiveRedis()
# Create an unnamed list.
mylist = redis.list()
# Append items to the list.
mylist.append('foo')
mylist.append('bar')
# Note that when appending a complex data structure the structure
# will be serialized to JSON when written to Redis. However, the
# structure will still be monitored for changes, so even once a
# list is serialized it can still be mutated, and Active Redis
# will capture changes and re-serialize the list.
mylist.append(['foo', 'bar'])
mylist[2].append('baz')
mylist.delete()
# We can also create a named list by passing a key to the constructor.
mylist = redis.list('mylist')
mylist.append('foo')
del mylist
mylist = redis.list('mylist')
print mylist # [u'foo']
mylist.delete()
print mylist # []
|
<commit_before># Copyright (c) 2013 Jordan Halterman <jordan.halterman@gmail.com>
# See LICENSE for details.
import sys, os
sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
from active_redis import ActiveRedis
redis = ActiveRedis()
# Create an unnamed list.
mylist = redis.list()
# Append items to the list.
mylist.append('foo')
mylist.append('bar')
# Note that when appending a complex data structure the structure
# will be serialized to JSON when written to Redis. However, the
# structure will still be monitored for changes, so even once a
# list is serialized it can still be mutated, and Active Redis
# will capture changes and re-serialize the list.
mylist.append(['foo', 'bar'])
mylist[2].append('baz')
# We can also create a named list by passing a key to the constructor.
mylist = redis.list('mylist')
mylist.append('foo')
del mylist
mylist = redis.list('mylist')
print mylist # [u'foo']
mylist.delete()
print mylist # []
<commit_msg>Update dict and set examples.<commit_after>
|
# Copyright (c) 2013 Jordan Halterman <jordan.halterman@gmail.com>
# See LICENSE for details.
import sys, os
sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
from active_redis import ActiveRedis
redis = ActiveRedis()
# Create an unnamed list.
mylist = redis.list()
# Append items to the list.
mylist.append('foo')
mylist.append('bar')
# Note that when appending a complex data structure the structure
# will be serialized to JSON when written to Redis. However, the
# structure will still be monitored for changes, so even once a
# list is serialized it can still be mutated, and Active Redis
# will capture changes and re-serialize the list.
mylist.append(['foo', 'bar'])
mylist[2].append('baz')
mylist.delete()
# We can also create a named list by passing a key to the constructor.
mylist = redis.list('mylist')
mylist.append('foo')
del mylist
mylist = redis.list('mylist')
print mylist # [u'foo']
mylist.delete()
print mylist # []
|
# Copyright (c) 2013 Jordan Halterman <jordan.halterman@gmail.com>
# See LICENSE for details.
import sys, os
sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
from active_redis import ActiveRedis
redis = ActiveRedis()
# Create an unnamed list.
mylist = redis.list()
# Append items to the list.
mylist.append('foo')
mylist.append('bar')
# Note that when appending a complex data structure the structure
# will be serialized to JSON when written to Redis. However, the
# structure will still be monitored for changes, so even once a
# list is serialized it can still be mutated, and Active Redis
# will capture changes and re-serialize the list.
mylist.append(['foo', 'bar'])
mylist[2].append('baz')
# We can also create a named list by passing a key to the constructor.
mylist = redis.list('mylist')
mylist.append('foo')
del mylist
mylist = redis.list('mylist')
print mylist # [u'foo']
mylist.delete()
print mylist # []
Update dict and set examples.# Copyright (c) 2013 Jordan Halterman <jordan.halterman@gmail.com>
# See LICENSE for details.
import sys, os
sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
from active_redis import ActiveRedis
redis = ActiveRedis()
# Create an unnamed list.
mylist = redis.list()
# Append items to the list.
mylist.append('foo')
mylist.append('bar')
# Note that when appending a complex data structure the structure
# will be serialized to JSON when written to Redis. However, the
# structure will still be monitored for changes, so even once a
# list is serialized it can still be mutated, and Active Redis
# will capture changes and re-serialize the list.
mylist.append(['foo', 'bar'])
mylist[2].append('baz')
mylist.delete()
# We can also create a named list by passing a key to the constructor.
mylist = redis.list('mylist')
mylist.append('foo')
del mylist
mylist = redis.list('mylist')
print mylist # [u'foo']
mylist.delete()
print mylist # []
|
<commit_before># Copyright (c) 2013 Jordan Halterman <jordan.halterman@gmail.com>
# See LICENSE for details.
import sys, os
sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
from active_redis import ActiveRedis
redis = ActiveRedis()
# Create an unnamed list.
mylist = redis.list()
# Append items to the list.
mylist.append('foo')
mylist.append('bar')
# Note that when appending a complex data structure the structure
# will be serialized to JSON when written to Redis. However, the
# structure will still be monitored for changes, so even once a
# list is serialized it can still be mutated, and Active Redis
# will capture changes and re-serialize the list.
mylist.append(['foo', 'bar'])
mylist[2].append('baz')
# We can also create a named list by passing a key to the constructor.
mylist = redis.list('mylist')
mylist.append('foo')
del mylist
mylist = redis.list('mylist')
print mylist # [u'foo']
mylist.delete()
print mylist # []
<commit_msg>Update dict and set examples.<commit_after># Copyright (c) 2013 Jordan Halterman <jordan.halterman@gmail.com>
# See LICENSE for details.
import sys, os
sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
from active_redis import ActiveRedis
redis = ActiveRedis()
# Create an unnamed list.
mylist = redis.list()
# Append items to the list.
mylist.append('foo')
mylist.append('bar')
# Note that when appending a complex data structure the structure
# will be serialized to JSON when written to Redis. However, the
# structure will still be monitored for changes, so even once a
# list is serialized it can still be mutated, and Active Redis
# will capture changes and re-serialize the list.
mylist.append(['foo', 'bar'])
mylist[2].append('baz')
mylist.delete()
# We can also create a named list by passing a key to the constructor.
mylist = redis.list('mylist')
mylist.append('foo')
del mylist
mylist = redis.list('mylist')
print mylist # [u'foo']
mylist.delete()
print mylist # []
|
ce4bb4b0868e45459771531b9008f492f920c406
|
project/commands/main.py
|
project/commands/main.py
|
"""The ``main`` function chooses and runs a subcommand."""
from __future__ import absolute_import, print_function
from argparse import ArgumentParser, RawDescriptionHelpFormatter
import project.commands.launch as launch
import project.commands.prepare as prepare
import project.commands.activate as activate
def _run_parser(args):
"""Internal function to run the parsing of params and run the commands. Allows mocking."""
parser = ArgumentParser("Anaconda project tool", epilog=__doc__, formatter_class=RawDescriptionHelpFormatter)
subparsers = parser.add_subparsers()
preset = subparsers.add_parser('launch', description="Runs the project")
preset.add_argument('project_dir', default='.')
# preset.add_argument('ui_mode')
preset.set_defaults(main=launch.main)
preset = subparsers.add_parser('prepare', description="Configure the project to run.")
preset.add_argument('project_dir', default='.')
preset.set_defaults(main=prepare.main)
preset = subparsers.add_parser('activate', description="Prepare project and outputs lines to be sourced.")
preset.add_argument('project_dir', default='.')
preset.set_defaults(main=activate.main)
args = parser.parse_args(args)
args.main(args.project_dir)
def main(argv):
"""Start the launch command."""
_run_parser(argv)
|
"""The ``main`` function chooses and runs a subcommand."""
from __future__ import absolute_import, print_function
import sys
from argparse import ArgumentParser, RawDescriptionHelpFormatter
import project.commands.launch as launch
import project.commands.prepare as prepare
import project.commands.activate as activate
def _run_parser(args):
"""Internal function to run the parsing of params and run the commands. Allows mocking."""
parser = ArgumentParser("Anaconda project tool", epilog=__doc__, formatter_class=RawDescriptionHelpFormatter)
subparsers = parser.add_subparsers()
preset = subparsers.add_parser('launch', description="Runs the project")
preset.add_argument('project_dir', default='.')
# preset.add_argument('ui_mode')
preset.set_defaults(main=launch.main)
preset = subparsers.add_parser('prepare', description="Configure the project to run.")
preset.add_argument('project_dir', default='.')
preset.set_defaults(main=prepare.main)
preset = subparsers.add_parser('activate', description="Prepare project and outputs lines to be sourced.")
preset.add_argument('project_dir', default='.')
preset.set_defaults(main=activate.main)
args = parser.parse_args(args)
args.main(args.project_dir)
def main(argv):
"""Start the launch command."""
_run_parser(argv)
# the various sub-mains exit(0) if they succeed, if we get here
# we must not have called one of them
sys.exit(1)
|
Revert "remove unecessary sys.exit call"
|
Revert "remove unecessary sys.exit call"
This reverts commit 50b79fec1e96c1e5e5cc17f58f2c4ccfba16d6d6.
|
Python
|
bsd-3-clause
|
conda/kapsel,conda/kapsel
|
"""The ``main`` function chooses and runs a subcommand."""
from __future__ import absolute_import, print_function
from argparse import ArgumentParser, RawDescriptionHelpFormatter
import project.commands.launch as launch
import project.commands.prepare as prepare
import project.commands.activate as activate
def _run_parser(args):
"""Internal function to run the parsing of params and run the commands. Allows mocking."""
parser = ArgumentParser("Anaconda project tool", epilog=__doc__, formatter_class=RawDescriptionHelpFormatter)
subparsers = parser.add_subparsers()
preset = subparsers.add_parser('launch', description="Runs the project")
preset.add_argument('project_dir', default='.')
# preset.add_argument('ui_mode')
preset.set_defaults(main=launch.main)
preset = subparsers.add_parser('prepare', description="Configure the project to run.")
preset.add_argument('project_dir', default='.')
preset.set_defaults(main=prepare.main)
preset = subparsers.add_parser('activate', description="Prepare project and outputs lines to be sourced.")
preset.add_argument('project_dir', default='.')
preset.set_defaults(main=activate.main)
args = parser.parse_args(args)
args.main(args.project_dir)
def main(argv):
"""Start the launch command."""
_run_parser(argv)
Revert "remove unecessary sys.exit call"
This reverts commit 50b79fec1e96c1e5e5cc17f58f2c4ccfba16d6d6.
|
"""The ``main`` function chooses and runs a subcommand."""
from __future__ import absolute_import, print_function
import sys
from argparse import ArgumentParser, RawDescriptionHelpFormatter
import project.commands.launch as launch
import project.commands.prepare as prepare
import project.commands.activate as activate
def _run_parser(args):
"""Internal function to run the parsing of params and run the commands. Allows mocking."""
parser = ArgumentParser("Anaconda project tool", epilog=__doc__, formatter_class=RawDescriptionHelpFormatter)
subparsers = parser.add_subparsers()
preset = subparsers.add_parser('launch', description="Runs the project")
preset.add_argument('project_dir', default='.')
# preset.add_argument('ui_mode')
preset.set_defaults(main=launch.main)
preset = subparsers.add_parser('prepare', description="Configure the project to run.")
preset.add_argument('project_dir', default='.')
preset.set_defaults(main=prepare.main)
preset = subparsers.add_parser('activate', description="Prepare project and outputs lines to be sourced.")
preset.add_argument('project_dir', default='.')
preset.set_defaults(main=activate.main)
args = parser.parse_args(args)
args.main(args.project_dir)
def main(argv):
"""Start the launch command."""
_run_parser(argv)
# the various sub-mains exit(0) if they succeed, if we get here
# we must not have called one of them
sys.exit(1)
|
<commit_before>"""The ``main`` function chooses and runs a subcommand."""
from __future__ import absolute_import, print_function
from argparse import ArgumentParser, RawDescriptionHelpFormatter
import project.commands.launch as launch
import project.commands.prepare as prepare
import project.commands.activate as activate
def _run_parser(args):
"""Internal function to run the parsing of params and run the commands. Allows mocking."""
parser = ArgumentParser("Anaconda project tool", epilog=__doc__, formatter_class=RawDescriptionHelpFormatter)
subparsers = parser.add_subparsers()
preset = subparsers.add_parser('launch', description="Runs the project")
preset.add_argument('project_dir', default='.')
# preset.add_argument('ui_mode')
preset.set_defaults(main=launch.main)
preset = subparsers.add_parser('prepare', description="Configure the project to run.")
preset.add_argument('project_dir', default='.')
preset.set_defaults(main=prepare.main)
preset = subparsers.add_parser('activate', description="Prepare project and outputs lines to be sourced.")
preset.add_argument('project_dir', default='.')
preset.set_defaults(main=activate.main)
args = parser.parse_args(args)
args.main(args.project_dir)
def main(argv):
"""Start the launch command."""
_run_parser(argv)
<commit_msg>Revert "remove unecessary sys.exit call"
This reverts commit 50b79fec1e96c1e5e5cc17f58f2c4ccfba16d6d6.<commit_after>
|
"""The ``main`` function chooses and runs a subcommand."""
from __future__ import absolute_import, print_function
import sys
from argparse import ArgumentParser, RawDescriptionHelpFormatter
import project.commands.launch as launch
import project.commands.prepare as prepare
import project.commands.activate as activate
def _run_parser(args):
"""Internal function to run the parsing of params and run the commands. Allows mocking."""
parser = ArgumentParser("Anaconda project tool", epilog=__doc__, formatter_class=RawDescriptionHelpFormatter)
subparsers = parser.add_subparsers()
preset = subparsers.add_parser('launch', description="Runs the project")
preset.add_argument('project_dir', default='.')
# preset.add_argument('ui_mode')
preset.set_defaults(main=launch.main)
preset = subparsers.add_parser('prepare', description="Configure the project to run.")
preset.add_argument('project_dir', default='.')
preset.set_defaults(main=prepare.main)
preset = subparsers.add_parser('activate', description="Prepare project and outputs lines to be sourced.")
preset.add_argument('project_dir', default='.')
preset.set_defaults(main=activate.main)
args = parser.parse_args(args)
args.main(args.project_dir)
def main(argv):
"""Start the launch command."""
_run_parser(argv)
# the various sub-mains exit(0) if they succeed, if we get here
# we must not have called one of them
sys.exit(1)
|
"""The ``main`` function chooses and runs a subcommand."""
from __future__ import absolute_import, print_function
from argparse import ArgumentParser, RawDescriptionHelpFormatter
import project.commands.launch as launch
import project.commands.prepare as prepare
import project.commands.activate as activate
def _run_parser(args):
"""Internal function to run the parsing of params and run the commands. Allows mocking."""
parser = ArgumentParser("Anaconda project tool", epilog=__doc__, formatter_class=RawDescriptionHelpFormatter)
subparsers = parser.add_subparsers()
preset = subparsers.add_parser('launch', description="Runs the project")
preset.add_argument('project_dir', default='.')
# preset.add_argument('ui_mode')
preset.set_defaults(main=launch.main)
preset = subparsers.add_parser('prepare', description="Configure the project to run.")
preset.add_argument('project_dir', default='.')
preset.set_defaults(main=prepare.main)
preset = subparsers.add_parser('activate', description="Prepare project and outputs lines to be sourced.")
preset.add_argument('project_dir', default='.')
preset.set_defaults(main=activate.main)
args = parser.parse_args(args)
args.main(args.project_dir)
def main(argv):
"""Start the launch command."""
_run_parser(argv)
Revert "remove unecessary sys.exit call"
This reverts commit 50b79fec1e96c1e5e5cc17f58f2c4ccfba16d6d6."""The ``main`` function chooses and runs a subcommand."""
from __future__ import absolute_import, print_function
import sys
from argparse import ArgumentParser, RawDescriptionHelpFormatter
import project.commands.launch as launch
import project.commands.prepare as prepare
import project.commands.activate as activate
def _run_parser(args):
"""Internal function to run the parsing of params and run the commands. Allows mocking."""
parser = ArgumentParser("Anaconda project tool", epilog=__doc__, formatter_class=RawDescriptionHelpFormatter)
subparsers = parser.add_subparsers()
preset = subparsers.add_parser('launch', description="Runs the project")
preset.add_argument('project_dir', default='.')
# preset.add_argument('ui_mode')
preset.set_defaults(main=launch.main)
preset = subparsers.add_parser('prepare', description="Configure the project to run.")
preset.add_argument('project_dir', default='.')
preset.set_defaults(main=prepare.main)
preset = subparsers.add_parser('activate', description="Prepare project and outputs lines to be sourced.")
preset.add_argument('project_dir', default='.')
preset.set_defaults(main=activate.main)
args = parser.parse_args(args)
args.main(args.project_dir)
def main(argv):
"""Start the launch command."""
_run_parser(argv)
# the various sub-mains exit(0) if they succeed, if we get here
# we must not have called one of them
sys.exit(1)
|
<commit_before>"""The ``main`` function chooses and runs a subcommand."""
from __future__ import absolute_import, print_function
from argparse import ArgumentParser, RawDescriptionHelpFormatter
import project.commands.launch as launch
import project.commands.prepare as prepare
import project.commands.activate as activate
def _run_parser(args):
"""Internal function to run the parsing of params and run the commands. Allows mocking."""
parser = ArgumentParser("Anaconda project tool", epilog=__doc__, formatter_class=RawDescriptionHelpFormatter)
subparsers = parser.add_subparsers()
preset = subparsers.add_parser('launch', description="Runs the project")
preset.add_argument('project_dir', default='.')
# preset.add_argument('ui_mode')
preset.set_defaults(main=launch.main)
preset = subparsers.add_parser('prepare', description="Configure the project to run.")
preset.add_argument('project_dir', default='.')
preset.set_defaults(main=prepare.main)
preset = subparsers.add_parser('activate', description="Prepare project and outputs lines to be sourced.")
preset.add_argument('project_dir', default='.')
preset.set_defaults(main=activate.main)
args = parser.parse_args(args)
args.main(args.project_dir)
def main(argv):
"""Start the launch command."""
_run_parser(argv)
<commit_msg>Revert "remove unecessary sys.exit call"
This reverts commit 50b79fec1e96c1e5e5cc17f58f2c4ccfba16d6d6.<commit_after>"""The ``main`` function chooses and runs a subcommand."""
from __future__ import absolute_import, print_function
import sys
from argparse import ArgumentParser, RawDescriptionHelpFormatter
import project.commands.launch as launch
import project.commands.prepare as prepare
import project.commands.activate as activate
def _run_parser(args):
"""Internal function to run the parsing of params and run the commands. Allows mocking."""
parser = ArgumentParser("Anaconda project tool", epilog=__doc__, formatter_class=RawDescriptionHelpFormatter)
subparsers = parser.add_subparsers()
preset = subparsers.add_parser('launch', description="Runs the project")
preset.add_argument('project_dir', default='.')
# preset.add_argument('ui_mode')
preset.set_defaults(main=launch.main)
preset = subparsers.add_parser('prepare', description="Configure the project to run.")
preset.add_argument('project_dir', default='.')
preset.set_defaults(main=prepare.main)
preset = subparsers.add_parser('activate', description="Prepare project and outputs lines to be sourced.")
preset.add_argument('project_dir', default='.')
preset.set_defaults(main=activate.main)
args = parser.parse_args(args)
args.main(args.project_dir)
def main(argv):
"""Start the launch command."""
_run_parser(argv)
# the various sub-mains exit(0) if they succeed, if we get here
# we must not have called one of them
sys.exit(1)
|
f808dbbd28e750a7be440394865e708c78938c6c
|
test/test_compiled.py
|
test/test_compiled.py
|
"""
Test compiled module
"""
import os
import platform
import sys
import jedi
from .helpers import cwd_at
@cwd_at('extensions')
def test_compiled():
if platform.architecture()[0] == '64bit':
package_name = "compiled%s%s" % sys.version_info[:2]
if os.path.exists(package_name):
s = jedi.Script("from %s import compiled; compiled." % package_name)
assert len(s.completions()) >= 2
|
"""
Test compiled module
"""
import os
import platform
import sys
import jedi
from .helpers import cwd_at
@cwd_at('extensions')
def test_compiled():
if platform.architecture()[0] == '64bit':
package_name = "compiled%s%s" % sys.version_info[:2]
sys.path.insert(0, os.getcwd())
if os.path.exists(package_name):
s = jedi.Script("from %s import compiled; compiled." % package_name)
assert len(s.completions()) >= 2
|
Change sys.path for the test to succeed.
|
Change sys.path for the test to succeed.
Tested locally with a python3 extension module (in
/extensions/compiled33).
Also tested that reverting a75773cf9f7a9fde2a7b2c77b9846b4f0dd5b711 make
the test fail.
|
Python
|
mit
|
flurischt/jedi,tjwei/jedi,jonashaag/jedi,dwillmer/jedi,tjwei/jedi,WoLpH/jedi,flurischt/jedi,dwillmer/jedi,mfussenegger/jedi,jonashaag/jedi,WoLpH/jedi,mfussenegger/jedi
|
"""
Test compiled module
"""
import os
import platform
import sys
import jedi
from .helpers import cwd_at
@cwd_at('extensions')
def test_compiled():
if platform.architecture()[0] == '64bit':
package_name = "compiled%s%s" % sys.version_info[:2]
if os.path.exists(package_name):
s = jedi.Script("from %s import compiled; compiled." % package_name)
assert len(s.completions()) >= 2
Change sys.path for the test to succeed.
Tested locally with a python3 extension module (in
/extensions/compiled33).
Also tested that reverting a75773cf9f7a9fde2a7b2c77b9846b4f0dd5b711 make
the test fail.
|
"""
Test compiled module
"""
import os
import platform
import sys
import jedi
from .helpers import cwd_at
@cwd_at('extensions')
def test_compiled():
if platform.architecture()[0] == '64bit':
package_name = "compiled%s%s" % sys.version_info[:2]
sys.path.insert(0, os.getcwd())
if os.path.exists(package_name):
s = jedi.Script("from %s import compiled; compiled." % package_name)
assert len(s.completions()) >= 2
|
<commit_before>"""
Test compiled module
"""
import os
import platform
import sys
import jedi
from .helpers import cwd_at
@cwd_at('extensions')
def test_compiled():
if platform.architecture()[0] == '64bit':
package_name = "compiled%s%s" % sys.version_info[:2]
if os.path.exists(package_name):
s = jedi.Script("from %s import compiled; compiled." % package_name)
assert len(s.completions()) >= 2
<commit_msg>Change sys.path for the test to succeed.
Tested locally with a python3 extension module (in
/extensions/compiled33).
Also tested that reverting a75773cf9f7a9fde2a7b2c77b9846b4f0dd5b711 make
the test fail.<commit_after>
|
"""
Test compiled module
"""
import os
import platform
import sys
import jedi
from .helpers import cwd_at
@cwd_at('extensions')
def test_compiled():
if platform.architecture()[0] == '64bit':
package_name = "compiled%s%s" % sys.version_info[:2]
sys.path.insert(0, os.getcwd())
if os.path.exists(package_name):
s = jedi.Script("from %s import compiled; compiled." % package_name)
assert len(s.completions()) >= 2
|
"""
Test compiled module
"""
import os
import platform
import sys
import jedi
from .helpers import cwd_at
@cwd_at('extensions')
def test_compiled():
if platform.architecture()[0] == '64bit':
package_name = "compiled%s%s" % sys.version_info[:2]
if os.path.exists(package_name):
s = jedi.Script("from %s import compiled; compiled." % package_name)
assert len(s.completions()) >= 2
Change sys.path for the test to succeed.
Tested locally with a python3 extension module (in
/extensions/compiled33).
Also tested that reverting a75773cf9f7a9fde2a7b2c77b9846b4f0dd5b711 make
the test fail."""
Test compiled module
"""
import os
import platform
import sys
import jedi
from .helpers import cwd_at
@cwd_at('extensions')
def test_compiled():
if platform.architecture()[0] == '64bit':
package_name = "compiled%s%s" % sys.version_info[:2]
sys.path.insert(0, os.getcwd())
if os.path.exists(package_name):
s = jedi.Script("from %s import compiled; compiled." % package_name)
assert len(s.completions()) >= 2
|
<commit_before>"""
Test compiled module
"""
import os
import platform
import sys
import jedi
from .helpers import cwd_at
@cwd_at('extensions')
def test_compiled():
if platform.architecture()[0] == '64bit':
package_name = "compiled%s%s" % sys.version_info[:2]
if os.path.exists(package_name):
s = jedi.Script("from %s import compiled; compiled." % package_name)
assert len(s.completions()) >= 2
<commit_msg>Change sys.path for the test to succeed.
Tested locally with a python3 extension module (in
/extensions/compiled33).
Also tested that reverting a75773cf9f7a9fde2a7b2c77b9846b4f0dd5b711 make
the test fail.<commit_after>"""
Test compiled module
"""
import os
import platform
import sys
import jedi
from .helpers import cwd_at
@cwd_at('extensions')
def test_compiled():
if platform.architecture()[0] == '64bit':
package_name = "compiled%s%s" % sys.version_info[:2]
sys.path.insert(0, os.getcwd())
if os.path.exists(package_name):
s = jedi.Script("from %s import compiled; compiled." % package_name)
assert len(s.completions()) >= 2
|
00df7af980d0e469173ad4f3d82cb7c68b51ea21
|
fancyflags/_metadata.py
|
fancyflags/_metadata.py
|
# Copyright 2021 DeepMind Technologies Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Package metadata.
This is kept in a separate module so that it can be imported from setup.py, at
a time when the package dependencies may not have been installed yet.
"""
__version__ = 'a0.1' # https://www.python.org/dev/peps/pep-0440/
|
# Copyright 2021 DeepMind Technologies Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Package metadata.
This is kept in a separate module so that it can be imported from setup.py, at
a time when the package dependencies may not have been installed yet.
"""
__version__ = '1.0' # https://www.python.org/dev/peps/pep-0440/
|
Update fancyflags version to 1.0
|
Update fancyflags version to 1.0
PiperOrigin-RevId: 354582100
Change-Id: Ib4ce53fd0fc197c0f92cd8d46348dec8849c51df
|
Python
|
apache-2.0
|
deepmind/fancyflags
|
# Copyright 2021 DeepMind Technologies Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Package metadata.
This is kept in a separate module so that it can be imported from setup.py, at
a time when the package dependencies may not have been installed yet.
"""
__version__ = 'a0.1' # https://www.python.org/dev/peps/pep-0440/
Update fancyflags version to 1.0
PiperOrigin-RevId: 354582100
Change-Id: Ib4ce53fd0fc197c0f92cd8d46348dec8849c51df
|
# Copyright 2021 DeepMind Technologies Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Package metadata.
This is kept in a separate module so that it can be imported from setup.py, at
a time when the package dependencies may not have been installed yet.
"""
__version__ = '1.0' # https://www.python.org/dev/peps/pep-0440/
|
<commit_before># Copyright 2021 DeepMind Technologies Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Package metadata.
This is kept in a separate module so that it can be imported from setup.py, at
a time when the package dependencies may not have been installed yet.
"""
__version__ = 'a0.1' # https://www.python.org/dev/peps/pep-0440/
<commit_msg>Update fancyflags version to 1.0
PiperOrigin-RevId: 354582100
Change-Id: Ib4ce53fd0fc197c0f92cd8d46348dec8849c51df<commit_after>
|
# Copyright 2021 DeepMind Technologies Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Package metadata.
This is kept in a separate module so that it can be imported from setup.py, at
a time when the package dependencies may not have been installed yet.
"""
__version__ = '1.0' # https://www.python.org/dev/peps/pep-0440/
|
# Copyright 2021 DeepMind Technologies Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Package metadata.
This is kept in a separate module so that it can be imported from setup.py, at
a time when the package dependencies may not have been installed yet.
"""
__version__ = 'a0.1' # https://www.python.org/dev/peps/pep-0440/
Update fancyflags version to 1.0
PiperOrigin-RevId: 354582100
Change-Id: Ib4ce53fd0fc197c0f92cd8d46348dec8849c51df# Copyright 2021 DeepMind Technologies Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Package metadata.
This is kept in a separate module so that it can be imported from setup.py, at
a time when the package dependencies may not have been installed yet.
"""
__version__ = '1.0' # https://www.python.org/dev/peps/pep-0440/
|
<commit_before># Copyright 2021 DeepMind Technologies Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Package metadata.
This is kept in a separate module so that it can be imported from setup.py, at
a time when the package dependencies may not have been installed yet.
"""
__version__ = 'a0.1' # https://www.python.org/dev/peps/pep-0440/
<commit_msg>Update fancyflags version to 1.0
PiperOrigin-RevId: 354582100
Change-Id: Ib4ce53fd0fc197c0f92cd8d46348dec8849c51df<commit_after># Copyright 2021 DeepMind Technologies Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Package metadata.
This is kept in a separate module so that it can be imported from setup.py, at
a time when the package dependencies may not have been installed yet.
"""
__version__ = '1.0' # https://www.python.org/dev/peps/pep-0440/
|
7c4f3f8b4d3ac92290af45bba4995ba266b78704
|
osuapi/__init__.py
|
osuapi/__init__.py
|
__title__ = "osssss"
__author__ = "khazhyk"
__license__ = "MIT"
__copyright__ = "Copyright khazhyk"
__version__ = "0.0.8"
from .osu import OsuApi
from .connectors import *
from .model import OsuMode, OsuMod, BeatmapStatus, BeatmapGenre, BeatmapLanguage
|
__title__ = "osuapi"
__author__ = "khazhyk"
__license__ = "MIT"
__copyright__ = "Copyright khazhyk"
__version__ = "0.0.8"
from .osu import OsuApi
from .connectors import *
from .model import OsuMode, OsuMod, BeatmapStatus, BeatmapGenre, BeatmapLanguage
|
Rename to osuapi to match module.
|
Rename to osuapi to match module.
|
Python
|
mit
|
Phxntxm/osuapi,khazhyk/osuapi
|
__title__ = "osssss"
__author__ = "khazhyk"
__license__ = "MIT"
__copyright__ = "Copyright khazhyk"
__version__ = "0.0.8"
from .osu import OsuApi
from .connectors import *
from .model import OsuMode, OsuMod, BeatmapStatus, BeatmapGenre, BeatmapLanguage
Rename to osuapi to match module.
|
__title__ = "osuapi"
__author__ = "khazhyk"
__license__ = "MIT"
__copyright__ = "Copyright khazhyk"
__version__ = "0.0.8"
from .osu import OsuApi
from .connectors import *
from .model import OsuMode, OsuMod, BeatmapStatus, BeatmapGenre, BeatmapLanguage
|
<commit_before>__title__ = "osssss"
__author__ = "khazhyk"
__license__ = "MIT"
__copyright__ = "Copyright khazhyk"
__version__ = "0.0.8"
from .osu import OsuApi
from .connectors import *
from .model import OsuMode, OsuMod, BeatmapStatus, BeatmapGenre, BeatmapLanguage
<commit_msg>Rename to osuapi to match module.<commit_after>
|
__title__ = "osuapi"
__author__ = "khazhyk"
__license__ = "MIT"
__copyright__ = "Copyright khazhyk"
__version__ = "0.0.8"
from .osu import OsuApi
from .connectors import *
from .model import OsuMode, OsuMod, BeatmapStatus, BeatmapGenre, BeatmapLanguage
|
__title__ = "osssss"
__author__ = "khazhyk"
__license__ = "MIT"
__copyright__ = "Copyright khazhyk"
__version__ = "0.0.8"
from .osu import OsuApi
from .connectors import *
from .model import OsuMode, OsuMod, BeatmapStatus, BeatmapGenre, BeatmapLanguage
Rename to osuapi to match module.__title__ = "osuapi"
__author__ = "khazhyk"
__license__ = "MIT"
__copyright__ = "Copyright khazhyk"
__version__ = "0.0.8"
from .osu import OsuApi
from .connectors import *
from .model import OsuMode, OsuMod, BeatmapStatus, BeatmapGenre, BeatmapLanguage
|
<commit_before>__title__ = "osssss"
__author__ = "khazhyk"
__license__ = "MIT"
__copyright__ = "Copyright khazhyk"
__version__ = "0.0.8"
from .osu import OsuApi
from .connectors import *
from .model import OsuMode, OsuMod, BeatmapStatus, BeatmapGenre, BeatmapLanguage
<commit_msg>Rename to osuapi to match module.<commit_after>__title__ = "osuapi"
__author__ = "khazhyk"
__license__ = "MIT"
__copyright__ = "Copyright khazhyk"
__version__ = "0.0.8"
from .osu import OsuApi
from .connectors import *
from .model import OsuMode, OsuMod, BeatmapStatus, BeatmapGenre, BeatmapLanguage
|
5a2d8a580795be37312a6ccfd2e3d0ffca28dc1c
|
bot/config.py
|
bot/config.py
|
import logging
import pytz
BOT_URL = ""
LAST_UPDATE_ID_FILE = "last_update"
TAGS_FILE = "tags"
POLL_PERIOD = 1
MAX_TAGS = 5
LOGGING_LEVEL = logging.DEBUG
LOCAL_TIMEZONE = pytz.timezone('America/Mexico_City')
|
import os
import logging
import pytz
BOT_URL = os.getenv("BOT_URL", "")
LAST_UPDATE_ID_FILE = "last_update"
TAGS_FILE = "tags"
POLL_PERIOD = 1
MAX_TAGS = 5
LOGGING_LEVEL = logging.DEBUG
LOCAL_TIMEZONE = pytz.timezone('America/Mexico_City')
|
Enable BOT_URL from env variable
|
Enable BOT_URL from env variable
|
Python
|
mit
|
cesar0094/telegram-tldrbot
|
import logging
import pytz
BOT_URL = ""
LAST_UPDATE_ID_FILE = "last_update"
TAGS_FILE = "tags"
POLL_PERIOD = 1
MAX_TAGS = 5
LOGGING_LEVEL = logging.DEBUG
LOCAL_TIMEZONE = pytz.timezone('America/Mexico_City')
Enable BOT_URL from env variable
|
import os
import logging
import pytz
BOT_URL = os.getenv("BOT_URL", "")
LAST_UPDATE_ID_FILE = "last_update"
TAGS_FILE = "tags"
POLL_PERIOD = 1
MAX_TAGS = 5
LOGGING_LEVEL = logging.DEBUG
LOCAL_TIMEZONE = pytz.timezone('America/Mexico_City')
|
<commit_before>import logging
import pytz
BOT_URL = ""
LAST_UPDATE_ID_FILE = "last_update"
TAGS_FILE = "tags"
POLL_PERIOD = 1
MAX_TAGS = 5
LOGGING_LEVEL = logging.DEBUG
LOCAL_TIMEZONE = pytz.timezone('America/Mexico_City')
<commit_msg>Enable BOT_URL from env variable<commit_after>
|
import os
import logging
import pytz
BOT_URL = os.getenv("BOT_URL", "")
LAST_UPDATE_ID_FILE = "last_update"
TAGS_FILE = "tags"
POLL_PERIOD = 1
MAX_TAGS = 5
LOGGING_LEVEL = logging.DEBUG
LOCAL_TIMEZONE = pytz.timezone('America/Mexico_City')
|
import logging
import pytz
BOT_URL = ""
LAST_UPDATE_ID_FILE = "last_update"
TAGS_FILE = "tags"
POLL_PERIOD = 1
MAX_TAGS = 5
LOGGING_LEVEL = logging.DEBUG
LOCAL_TIMEZONE = pytz.timezone('America/Mexico_City')
Enable BOT_URL from env variableimport os
import logging
import pytz
BOT_URL = os.getenv("BOT_URL", "")
LAST_UPDATE_ID_FILE = "last_update"
TAGS_FILE = "tags"
POLL_PERIOD = 1
MAX_TAGS = 5
LOGGING_LEVEL = logging.DEBUG
LOCAL_TIMEZONE = pytz.timezone('America/Mexico_City')
|
<commit_before>import logging
import pytz
BOT_URL = ""
LAST_UPDATE_ID_FILE = "last_update"
TAGS_FILE = "tags"
POLL_PERIOD = 1
MAX_TAGS = 5
LOGGING_LEVEL = logging.DEBUG
LOCAL_TIMEZONE = pytz.timezone('America/Mexico_City')
<commit_msg>Enable BOT_URL from env variable<commit_after>import os
import logging
import pytz
BOT_URL = os.getenv("BOT_URL", "")
LAST_UPDATE_ID_FILE = "last_update"
TAGS_FILE = "tags"
POLL_PERIOD = 1
MAX_TAGS = 5
LOGGING_LEVEL = logging.DEBUG
LOCAL_TIMEZONE = pytz.timezone('America/Mexico_City')
|
df227a375c1cf5fdd0ad23505799e7c6f7177b9c
|
InvenTree/InvenTree/validators.py
|
InvenTree/InvenTree/validators.py
|
"""
Custom field validators for InvenTree
"""
from django.core.exceptions import ValidationError
from django.utils.translation import gettext_lazy as _
def validate_part_name(value):
# Prevent some illegal characters in part names
for c in ['/', '\\', '|', '#', '$']:
if c in str(value):
raise ValidationError(
_('Invalid character in part name')
)
|
"""
Custom field validators for InvenTree
"""
from django.core.exceptions import ValidationError
from django.utils.translation import gettext_lazy as _
def validate_part_name(value):
# Prevent some illegal characters in part names
for c in ['|', '#', '$']:
if c in str(value):
raise ValidationError(
_('Invalid character in part name')
)
|
Allow some more chars in part names
|
Allow some more chars in part names
|
Python
|
mit
|
inventree/InvenTree,inventree/InvenTree,SchrodingersGat/InvenTree,inventree/InvenTree,SchrodingersGat/InvenTree,inventree/InvenTree,SchrodingersGat/InvenTree,SchrodingersGat/InvenTree
|
"""
Custom field validators for InvenTree
"""
from django.core.exceptions import ValidationError
from django.utils.translation import gettext_lazy as _
def validate_part_name(value):
# Prevent some illegal characters in part names
for c in ['/', '\\', '|', '#', '$']:
if c in str(value):
raise ValidationError(
_('Invalid character in part name')
)
Allow some more chars in part names
|
"""
Custom field validators for InvenTree
"""
from django.core.exceptions import ValidationError
from django.utils.translation import gettext_lazy as _
def validate_part_name(value):
# Prevent some illegal characters in part names
for c in ['|', '#', '$']:
if c in str(value):
raise ValidationError(
_('Invalid character in part name')
)
|
<commit_before>"""
Custom field validators for InvenTree
"""
from django.core.exceptions import ValidationError
from django.utils.translation import gettext_lazy as _
def validate_part_name(value):
# Prevent some illegal characters in part names
for c in ['/', '\\', '|', '#', '$']:
if c in str(value):
raise ValidationError(
_('Invalid character in part name')
)
<commit_msg>Allow some more chars in part names<commit_after>
|
"""
Custom field validators for InvenTree
"""
from django.core.exceptions import ValidationError
from django.utils.translation import gettext_lazy as _
def validate_part_name(value):
# Prevent some illegal characters in part names
for c in ['|', '#', '$']:
if c in str(value):
raise ValidationError(
_('Invalid character in part name')
)
|
"""
Custom field validators for InvenTree
"""
from django.core.exceptions import ValidationError
from django.utils.translation import gettext_lazy as _
def validate_part_name(value):
# Prevent some illegal characters in part names
for c in ['/', '\\', '|', '#', '$']:
if c in str(value):
raise ValidationError(
_('Invalid character in part name')
)
Allow some more chars in part names"""
Custom field validators for InvenTree
"""
from django.core.exceptions import ValidationError
from django.utils.translation import gettext_lazy as _
def validate_part_name(value):
# Prevent some illegal characters in part names
for c in ['|', '#', '$']:
if c in str(value):
raise ValidationError(
_('Invalid character in part name')
)
|
<commit_before>"""
Custom field validators for InvenTree
"""
from django.core.exceptions import ValidationError
from django.utils.translation import gettext_lazy as _
def validate_part_name(value):
# Prevent some illegal characters in part names
for c in ['/', '\\', '|', '#', '$']:
if c in str(value):
raise ValidationError(
_('Invalid character in part name')
)
<commit_msg>Allow some more chars in part names<commit_after>"""
Custom field validators for InvenTree
"""
from django.core.exceptions import ValidationError
from django.utils.translation import gettext_lazy as _
def validate_part_name(value):
# Prevent some illegal characters in part names
for c in ['|', '#', '$']:
if c in str(value):
raise ValidationError(
_('Invalid character in part name')
)
|
33496a58852bcdb2ef9f3cbe1881b06efd48b624
|
script/sample/submitshell.py
|
script/sample/submitshell.py
|
#!/usr/bin/env python
import multyvac
multyvac.config.set_key(api_key='admin', api_secret_key='12345', api_url='http://docker:8000/api')
jid = multyvac.shell_submit(cmd='for i in {1..10}; do echo $i && sleep 10; done')
print("Submitted job [{}].".format(jid))
job = multyvac.get(jid)
result = job.get_result()
print("Result: [{}]".format(result))
|
#!/usr/bin/env python
from __future__ import print_function
import multyvac
import sys
multyvac.config.set_key(api_key='admin', api_secret_key='12345', api_url='http://docker:8000/api')
jobs = {
"stdout result": {
"cmd": 'echo "success"',
},
"file result": {
"cmd": 'echo "success" > /tmp/out',
"_result_source": "file:/tmp/out",
},
"stdin": {
"cmd": 'cat',
"_stdin": "success",
},
}
longest = 0
for name in jobs.keys():
if len(name) > longest:
longest = len(name)
success = 0
failure = 0
for (name, kwargs) in jobs.items():
jid = multyvac.shell_submit(**kwargs)
print("{:<{}}: job {} ...".format(name, longest, jid), end='')
result = multyvac.get(jid).get_result().strip('\n')
print(" result [{}]".format(result))
if result == "success":
success += 1
else:
failure += 1
print("{} pass / {} fail".format(success, failure))
if failure > 0:
sys.exit(1)
|
Test different mechanisms for job submission.
|
Test different mechanisms for job submission.
|
Python
|
bsd-3-clause
|
cloudpipe/cloudpipe,cloudpipe/cloudpipe,cloudpipe/cloudpipe
|
#!/usr/bin/env python
import multyvac
multyvac.config.set_key(api_key='admin', api_secret_key='12345', api_url='http://docker:8000/api')
jid = multyvac.shell_submit(cmd='for i in {1..10}; do echo $i && sleep 10; done')
print("Submitted job [{}].".format(jid))
job = multyvac.get(jid)
result = job.get_result()
print("Result: [{}]".format(result))
Test different mechanisms for job submission.
|
#!/usr/bin/env python
from __future__ import print_function
import multyvac
import sys
multyvac.config.set_key(api_key='admin', api_secret_key='12345', api_url='http://docker:8000/api')
jobs = {
"stdout result": {
"cmd": 'echo "success"',
},
"file result": {
"cmd": 'echo "success" > /tmp/out',
"_result_source": "file:/tmp/out",
},
"stdin": {
"cmd": 'cat',
"_stdin": "success",
},
}
longest = 0
for name in jobs.keys():
if len(name) > longest:
longest = len(name)
success = 0
failure = 0
for (name, kwargs) in jobs.items():
jid = multyvac.shell_submit(**kwargs)
print("{:<{}}: job {} ...".format(name, longest, jid), end='')
result = multyvac.get(jid).get_result().strip('\n')
print(" result [{}]".format(result))
if result == "success":
success += 1
else:
failure += 1
print("{} pass / {} fail".format(success, failure))
if failure > 0:
sys.exit(1)
|
<commit_before>#!/usr/bin/env python
import multyvac
multyvac.config.set_key(api_key='admin', api_secret_key='12345', api_url='http://docker:8000/api')
jid = multyvac.shell_submit(cmd='for i in {1..10}; do echo $i && sleep 10; done')
print("Submitted job [{}].".format(jid))
job = multyvac.get(jid)
result = job.get_result()
print("Result: [{}]".format(result))
<commit_msg>Test different mechanisms for job submission.<commit_after>
|
#!/usr/bin/env python
from __future__ import print_function
import multyvac
import sys
multyvac.config.set_key(api_key='admin', api_secret_key='12345', api_url='http://docker:8000/api')
jobs = {
"stdout result": {
"cmd": 'echo "success"',
},
"file result": {
"cmd": 'echo "success" > /tmp/out',
"_result_source": "file:/tmp/out",
},
"stdin": {
"cmd": 'cat',
"_stdin": "success",
},
}
longest = 0
for name in jobs.keys():
if len(name) > longest:
longest = len(name)
success = 0
failure = 0
for (name, kwargs) in jobs.items():
jid = multyvac.shell_submit(**kwargs)
print("{:<{}}: job {} ...".format(name, longest, jid), end='')
result = multyvac.get(jid).get_result().strip('\n')
print(" result [{}]".format(result))
if result == "success":
success += 1
else:
failure += 1
print("{} pass / {} fail".format(success, failure))
if failure > 0:
sys.exit(1)
|
#!/usr/bin/env python
import multyvac
multyvac.config.set_key(api_key='admin', api_secret_key='12345', api_url='http://docker:8000/api')
jid = multyvac.shell_submit(cmd='for i in {1..10}; do echo $i && sleep 10; done')
print("Submitted job [{}].".format(jid))
job = multyvac.get(jid)
result = job.get_result()
print("Result: [{}]".format(result))
Test different mechanisms for job submission.#!/usr/bin/env python
from __future__ import print_function
import multyvac
import sys
multyvac.config.set_key(api_key='admin', api_secret_key='12345', api_url='http://docker:8000/api')
jobs = {
"stdout result": {
"cmd": 'echo "success"',
},
"file result": {
"cmd": 'echo "success" > /tmp/out',
"_result_source": "file:/tmp/out",
},
"stdin": {
"cmd": 'cat',
"_stdin": "success",
},
}
longest = 0
for name in jobs.keys():
if len(name) > longest:
longest = len(name)
success = 0
failure = 0
for (name, kwargs) in jobs.items():
jid = multyvac.shell_submit(**kwargs)
print("{:<{}}: job {} ...".format(name, longest, jid), end='')
result = multyvac.get(jid).get_result().strip('\n')
print(" result [{}]".format(result))
if result == "success":
success += 1
else:
failure += 1
print("{} pass / {} fail".format(success, failure))
if failure > 0:
sys.exit(1)
|
<commit_before>#!/usr/bin/env python
import multyvac
multyvac.config.set_key(api_key='admin', api_secret_key='12345', api_url='http://docker:8000/api')
jid = multyvac.shell_submit(cmd='for i in {1..10}; do echo $i && sleep 10; done')
print("Submitted job [{}].".format(jid))
job = multyvac.get(jid)
result = job.get_result()
print("Result: [{}]".format(result))
<commit_msg>Test different mechanisms for job submission.<commit_after>#!/usr/bin/env python
from __future__ import print_function
import multyvac
import sys
multyvac.config.set_key(api_key='admin', api_secret_key='12345', api_url='http://docker:8000/api')
jobs = {
"stdout result": {
"cmd": 'echo "success"',
},
"file result": {
"cmd": 'echo "success" > /tmp/out',
"_result_source": "file:/tmp/out",
},
"stdin": {
"cmd": 'cat',
"_stdin": "success",
},
}
longest = 0
for name in jobs.keys():
if len(name) > longest:
longest = len(name)
success = 0
failure = 0
for (name, kwargs) in jobs.items():
jid = multyvac.shell_submit(**kwargs)
print("{:<{}}: job {} ...".format(name, longest, jid), end='')
result = multyvac.get(jid).get_result().strip('\n')
print(" result [{}]".format(result))
if result == "success":
success += 1
else:
failure += 1
print("{} pass / {} fail".format(success, failure))
if failure > 0:
sys.exit(1)
|
ca138dac13d032ac8f55ca0a5ebf4e1ffe2cab72
|
fuckit_commit.py
|
fuckit_commit.py
|
'''
This module will send SMS reminders periodically, using Twilio.
The aim is to encourage user to code, commit and push to GitHub everyday
'''
import requests
from twilio.rest import TwilioRestClient
from datetime import datetime, date
def send_sms():
'''
Send SMS reminder
'''
config = {'account_sid' : '', 'auth_token' : ''}
client = TwilioRestClient(config['account_sid'], config['auth_token'])
message = client.messages.create(to="", from_="",
body="Hello there!")
def check_commit_activity():
'''
Check if there was any change in the commit history of the user
'''
get_recent_event = requests.get("https://api.github.com/users/ueg1990/events/public")
print datetime.strptime(get_recent_event.json()[0]['created_at'].split('T')[0], '%Y-%m-%d').date()
print date.today()
print datetime.strptime(get_recent_event.json()[0]['created_at'].split('T')[0], '%Y-%m-%d').date() == date.today()
print type(datetime.strptime(get_recent_event.json()[0]['created_at'].split('T')[0], '%Y-%m-%d').date()), type(date.today())
return False
def main():
check_commit_activity()
# send_sms()
if __name__ == "__main__":
main()
|
'''
This module will send SMS reminders periodically, using Twilio.
The aim is to encourage user to code, commit and push to GitHub everyday
'''
import requests
from twilio.rest import TwilioRestClient
from datetime import datetime, date
def send_sms():
'''
Send SMS reminder
'''
config = {'account_sid' : '', 'auth_token' : ''}
client = TwilioRestClient(config['account_sid'], config['auth_token'])
message = client.messages.create(to="", from_="",
body="Hello there!")
def check_commit_activity():
'''
Check if there was any change in the commit history of the user. If date of
latest event is the same as current date, commit was made, hence return True
else return False. Returning False triggers sending of an SMS reminder
'''
get_recent_event = requests.get("https://api.github.com/users/%s/events/public" % '')
event_date = datetime.strptime(get_recent_event.json()[0]['created_at'].split('T')[0], '%Y-%m-%d').date()
return event_date == date.today()
def main():
if not check_commit_activity():
send_sms()
if __name__ == "__main__":
main()
|
Add condition to check for latest commit activity
|
Add condition to check for latest commit activity
|
Python
|
mit
|
ueg1990/fuckit_commit
|
'''
This module will send SMS reminders periodically, using Twilio.
The aim is to encourage user to code, commit and push to GitHub everyday
'''
import requests
from twilio.rest import TwilioRestClient
from datetime import datetime, date
def send_sms():
'''
Send SMS reminder
'''
config = {'account_sid' : '', 'auth_token' : ''}
client = TwilioRestClient(config['account_sid'], config['auth_token'])
message = client.messages.create(to="", from_="",
body="Hello there!")
def check_commit_activity():
'''
Check if there was any change in the commit history of the user
'''
get_recent_event = requests.get("https://api.github.com/users/ueg1990/events/public")
print datetime.strptime(get_recent_event.json()[0]['created_at'].split('T')[0], '%Y-%m-%d').date()
print date.today()
print datetime.strptime(get_recent_event.json()[0]['created_at'].split('T')[0], '%Y-%m-%d').date() == date.today()
print type(datetime.strptime(get_recent_event.json()[0]['created_at'].split('T')[0], '%Y-%m-%d').date()), type(date.today())
return False
def main():
check_commit_activity()
# send_sms()
if __name__ == "__main__":
main()
Add condition to check for latest commit activity
|
'''
This module will send SMS reminders periodically, using Twilio.
The aim is to encourage user to code, commit and push to GitHub everyday
'''
import requests
from twilio.rest import TwilioRestClient
from datetime import datetime, date
def send_sms():
'''
Send SMS reminder
'''
config = {'account_sid' : '', 'auth_token' : ''}
client = TwilioRestClient(config['account_sid'], config['auth_token'])
message = client.messages.create(to="", from_="",
body="Hello there!")
def check_commit_activity():
'''
Check if there was any change in the commit history of the user. If date of
latest event is the same as current date, commit was made, hence return True
else return False. Returning False triggers sending of an SMS reminder
'''
get_recent_event = requests.get("https://api.github.com/users/%s/events/public" % '')
event_date = datetime.strptime(get_recent_event.json()[0]['created_at'].split('T')[0], '%Y-%m-%d').date()
return event_date == date.today()
def main():
if not check_commit_activity():
send_sms()
if __name__ == "__main__":
main()
|
<commit_before>'''
This module will send SMS reminders periodically, using Twilio.
The aim is to encourage user to code, commit and push to GitHub everyday
'''
import requests
from twilio.rest import TwilioRestClient
from datetime import datetime, date
def send_sms():
'''
Send SMS reminder
'''
config = {'account_sid' : '', 'auth_token' : ''}
client = TwilioRestClient(config['account_sid'], config['auth_token'])
message = client.messages.create(to="", from_="",
body="Hello there!")
def check_commit_activity():
'''
Check if there was any change in the commit history of the user
'''
get_recent_event = requests.get("https://api.github.com/users/ueg1990/events/public")
print datetime.strptime(get_recent_event.json()[0]['created_at'].split('T')[0], '%Y-%m-%d').date()
print date.today()
print datetime.strptime(get_recent_event.json()[0]['created_at'].split('T')[0], '%Y-%m-%d').date() == date.today()
print type(datetime.strptime(get_recent_event.json()[0]['created_at'].split('T')[0], '%Y-%m-%d').date()), type(date.today())
return False
def main():
check_commit_activity()
# send_sms()
if __name__ == "__main__":
main()
<commit_msg>Add condition to check for latest commit activity<commit_after>
|
'''
This module will send SMS reminders periodically, using Twilio.
The aim is to encourage user to code, commit and push to GitHub everyday
'''
import requests
from twilio.rest import TwilioRestClient
from datetime import datetime, date
def send_sms():
'''
Send SMS reminder
'''
config = {'account_sid' : '', 'auth_token' : ''}
client = TwilioRestClient(config['account_sid'], config['auth_token'])
message = client.messages.create(to="", from_="",
body="Hello there!")
def check_commit_activity():
'''
Check if there was any change in the commit history of the user. If date of
latest event is the same as current date, commit was made, hence return True
else return False. Returning False triggers sending of an SMS reminder
'''
get_recent_event = requests.get("https://api.github.com/users/%s/events/public" % '')
event_date = datetime.strptime(get_recent_event.json()[0]['created_at'].split('T')[0], '%Y-%m-%d').date()
return event_date == date.today()
def main():
if not check_commit_activity():
send_sms()
if __name__ == "__main__":
main()
|
'''
This module will send SMS reminders periodically, using Twilio.
The aim is to encourage user to code, commit and push to GitHub everyday
'''
import requests
from twilio.rest import TwilioRestClient
from datetime import datetime, date
def send_sms():
'''
Send SMS reminder
'''
config = {'account_sid' : '', 'auth_token' : ''}
client = TwilioRestClient(config['account_sid'], config['auth_token'])
message = client.messages.create(to="", from_="",
body="Hello there!")
def check_commit_activity():
'''
Check if there was any change in the commit history of the user
'''
get_recent_event = requests.get("https://api.github.com/users/ueg1990/events/public")
print datetime.strptime(get_recent_event.json()[0]['created_at'].split('T')[0], '%Y-%m-%d').date()
print date.today()
print datetime.strptime(get_recent_event.json()[0]['created_at'].split('T')[0], '%Y-%m-%d').date() == date.today()
print type(datetime.strptime(get_recent_event.json()[0]['created_at'].split('T')[0], '%Y-%m-%d').date()), type(date.today())
return False
def main():
check_commit_activity()
# send_sms()
if __name__ == "__main__":
main()
Add condition to check for latest commit activity'''
This module will send SMS reminders periodically, using Twilio.
The aim is to encourage user to code, commit and push to GitHub everyday
'''
import requests
from twilio.rest import TwilioRestClient
from datetime import datetime, date
def send_sms():
'''
Send SMS reminder
'''
config = {'account_sid' : '', 'auth_token' : ''}
client = TwilioRestClient(config['account_sid'], config['auth_token'])
message = client.messages.create(to="", from_="",
body="Hello there!")
def check_commit_activity():
'''
Check if there was any change in the commit history of the user. If date of
latest event is the same as current date, commit was made, hence return True
else return False. Returning False triggers sending of an SMS reminder
'''
get_recent_event = requests.get("https://api.github.com/users/%s/events/public" % '')
event_date = datetime.strptime(get_recent_event.json()[0]['created_at'].split('T')[0], '%Y-%m-%d').date()
return event_date == date.today()
def main():
if not check_commit_activity():
send_sms()
if __name__ == "__main__":
main()
|
<commit_before>'''
This module will send SMS reminders periodically, using Twilio.
The aim is to encourage user to code, commit and push to GitHub everyday
'''
import requests
from twilio.rest import TwilioRestClient
from datetime import datetime, date
def send_sms():
'''
Send SMS reminder
'''
config = {'account_sid' : '', 'auth_token' : ''}
client = TwilioRestClient(config['account_sid'], config['auth_token'])
message = client.messages.create(to="", from_="",
body="Hello there!")
def check_commit_activity():
'''
Check if there was any change in the commit history of the user
'''
get_recent_event = requests.get("https://api.github.com/users/ueg1990/events/public")
print datetime.strptime(get_recent_event.json()[0]['created_at'].split('T')[0], '%Y-%m-%d').date()
print date.today()
print datetime.strptime(get_recent_event.json()[0]['created_at'].split('T')[0], '%Y-%m-%d').date() == date.today()
print type(datetime.strptime(get_recent_event.json()[0]['created_at'].split('T')[0], '%Y-%m-%d').date()), type(date.today())
return False
def main():
check_commit_activity()
# send_sms()
if __name__ == "__main__":
main()
<commit_msg>Add condition to check for latest commit activity<commit_after>'''
This module will send SMS reminders periodically, using Twilio.
The aim is to encourage user to code, commit and push to GitHub everyday
'''
import requests
from twilio.rest import TwilioRestClient
from datetime import datetime, date
def send_sms():
'''
Send SMS reminder
'''
config = {'account_sid' : '', 'auth_token' : ''}
client = TwilioRestClient(config['account_sid'], config['auth_token'])
message = client.messages.create(to="", from_="",
body="Hello there!")
def check_commit_activity():
'''
Check if there was any change in the commit history of the user. If date of
latest event is the same as current date, commit was made, hence return True
else return False. Returning False triggers sending of an SMS reminder
'''
get_recent_event = requests.get("https://api.github.com/users/%s/events/public" % '')
event_date = datetime.strptime(get_recent_event.json()[0]['created_at'].split('T')[0], '%Y-%m-%d').date()
return event_date == date.today()
def main():
if not check_commit_activity():
send_sms()
if __name__ == "__main__":
main()
|
fd9f9bb2f471a8c14e7d34276060be953795538f
|
nightreads/emails/admin.py
|
nightreads/emails/admin.py
|
from django.contrib import admin
from django.conf.urls import url
from .models import Email, Tag
from .views import SendEmailAdminView, UpdateTargetCountView
from .forms import EmailAdminForm
class EmailAdmin(admin.ModelAdmin):
form = EmailAdminForm
readonly_fields = ('targetted_users', 'is_sent',)
add_fieldsets = (
(None, {
'fields': ('subject', 'message'),
}),
)
def get_fieldsets(self, request, obj=None):
if not obj:
return self.add_fieldsets
return super(EmailAdmin, self).get_fieldsets(request, obj)
def get_urls(self):
urls = super(EmailAdmin, self).get_urls()
my_urls = [
url(r'^(?P<pk>\d+)/send_email/$',
self.admin_site.admin_view(SendEmailAdminView.as_view()),
name='send_email'),
url(r'^(?P<pk>\d+)/update_target_count/$',
self.admin_site.admin_view(UpdateTargetCountView.as_view()),
name='update_target_count'),
]
return my_urls + urls
admin.site.register(Email, EmailAdmin)
admin.site.register(Tag)
|
from django.contrib import admin
from django.conf.urls import url
from .models import Email, Tag
from .views import SendEmailAdminView, UpdateTargetCountView
from .forms import EmailAdminForm
class EmailAdmin(admin.ModelAdmin):
list_display = ['__str__', 'is_sent']
list_filter = ['is_sent']
form = EmailAdminForm
readonly_fields = ('targetted_users', 'is_sent',)
add_fieldsets = (
(None, {
'fields': ('subject', 'message'),
}),
)
def get_fieldsets(self, request, obj=None):
if not obj:
return self.add_fieldsets
return super(EmailAdmin, self).get_fieldsets(request, obj)
def get_urls(self):
urls = super(EmailAdmin, self).get_urls()
my_urls = [
url(r'^(?P<pk>\d+)/send_email/$',
self.admin_site.admin_view(SendEmailAdminView.as_view()),
name='send_email'),
url(r'^(?P<pk>\d+)/update_target_count/$',
self.admin_site.admin_view(UpdateTargetCountView.as_view()),
name='update_target_count'),
]
return my_urls + urls
admin.site.register(Email, EmailAdmin)
admin.site.register(Tag)
|
Add filters to Email Admin List View
|
Add filters to Email Admin List View
|
Python
|
mit
|
avinassh/nightreads,avinassh/nightreads
|
from django.contrib import admin
from django.conf.urls import url
from .models import Email, Tag
from .views import SendEmailAdminView, UpdateTargetCountView
from .forms import EmailAdminForm
class EmailAdmin(admin.ModelAdmin):
form = EmailAdminForm
readonly_fields = ('targetted_users', 'is_sent',)
add_fieldsets = (
(None, {
'fields': ('subject', 'message'),
}),
)
def get_fieldsets(self, request, obj=None):
if not obj:
return self.add_fieldsets
return super(EmailAdmin, self).get_fieldsets(request, obj)
def get_urls(self):
urls = super(EmailAdmin, self).get_urls()
my_urls = [
url(r'^(?P<pk>\d+)/send_email/$',
self.admin_site.admin_view(SendEmailAdminView.as_view()),
name='send_email'),
url(r'^(?P<pk>\d+)/update_target_count/$',
self.admin_site.admin_view(UpdateTargetCountView.as_view()),
name='update_target_count'),
]
return my_urls + urls
admin.site.register(Email, EmailAdmin)
admin.site.register(Tag)
Add filters to Email Admin List View
|
from django.contrib import admin
from django.conf.urls import url
from .models import Email, Tag
from .views import SendEmailAdminView, UpdateTargetCountView
from .forms import EmailAdminForm
class EmailAdmin(admin.ModelAdmin):
list_display = ['__str__', 'is_sent']
list_filter = ['is_sent']
form = EmailAdminForm
readonly_fields = ('targetted_users', 'is_sent',)
add_fieldsets = (
(None, {
'fields': ('subject', 'message'),
}),
)
def get_fieldsets(self, request, obj=None):
if not obj:
return self.add_fieldsets
return super(EmailAdmin, self).get_fieldsets(request, obj)
def get_urls(self):
urls = super(EmailAdmin, self).get_urls()
my_urls = [
url(r'^(?P<pk>\d+)/send_email/$',
self.admin_site.admin_view(SendEmailAdminView.as_view()),
name='send_email'),
url(r'^(?P<pk>\d+)/update_target_count/$',
self.admin_site.admin_view(UpdateTargetCountView.as_view()),
name='update_target_count'),
]
return my_urls + urls
admin.site.register(Email, EmailAdmin)
admin.site.register(Tag)
|
<commit_before>from django.contrib import admin
from django.conf.urls import url
from .models import Email, Tag
from .views import SendEmailAdminView, UpdateTargetCountView
from .forms import EmailAdminForm
class EmailAdmin(admin.ModelAdmin):
form = EmailAdminForm
readonly_fields = ('targetted_users', 'is_sent',)
add_fieldsets = (
(None, {
'fields': ('subject', 'message'),
}),
)
def get_fieldsets(self, request, obj=None):
if not obj:
return self.add_fieldsets
return super(EmailAdmin, self).get_fieldsets(request, obj)
def get_urls(self):
urls = super(EmailAdmin, self).get_urls()
my_urls = [
url(r'^(?P<pk>\d+)/send_email/$',
self.admin_site.admin_view(SendEmailAdminView.as_view()),
name='send_email'),
url(r'^(?P<pk>\d+)/update_target_count/$',
self.admin_site.admin_view(UpdateTargetCountView.as_view()),
name='update_target_count'),
]
return my_urls + urls
admin.site.register(Email, EmailAdmin)
admin.site.register(Tag)
<commit_msg>Add filters to Email Admin List View<commit_after>
|
from django.contrib import admin
from django.conf.urls import url
from .models import Email, Tag
from .views import SendEmailAdminView, UpdateTargetCountView
from .forms import EmailAdminForm
class EmailAdmin(admin.ModelAdmin):
list_display = ['__str__', 'is_sent']
list_filter = ['is_sent']
form = EmailAdminForm
readonly_fields = ('targetted_users', 'is_sent',)
add_fieldsets = (
(None, {
'fields': ('subject', 'message'),
}),
)
def get_fieldsets(self, request, obj=None):
if not obj:
return self.add_fieldsets
return super(EmailAdmin, self).get_fieldsets(request, obj)
def get_urls(self):
urls = super(EmailAdmin, self).get_urls()
my_urls = [
url(r'^(?P<pk>\d+)/send_email/$',
self.admin_site.admin_view(SendEmailAdminView.as_view()),
name='send_email'),
url(r'^(?P<pk>\d+)/update_target_count/$',
self.admin_site.admin_view(UpdateTargetCountView.as_view()),
name='update_target_count'),
]
return my_urls + urls
admin.site.register(Email, EmailAdmin)
admin.site.register(Tag)
|
from django.contrib import admin
from django.conf.urls import url
from .models import Email, Tag
from .views import SendEmailAdminView, UpdateTargetCountView
from .forms import EmailAdminForm
class EmailAdmin(admin.ModelAdmin):
form = EmailAdminForm
readonly_fields = ('targetted_users', 'is_sent',)
add_fieldsets = (
(None, {
'fields': ('subject', 'message'),
}),
)
def get_fieldsets(self, request, obj=None):
if not obj:
return self.add_fieldsets
return super(EmailAdmin, self).get_fieldsets(request, obj)
def get_urls(self):
urls = super(EmailAdmin, self).get_urls()
my_urls = [
url(r'^(?P<pk>\d+)/send_email/$',
self.admin_site.admin_view(SendEmailAdminView.as_view()),
name='send_email'),
url(r'^(?P<pk>\d+)/update_target_count/$',
self.admin_site.admin_view(UpdateTargetCountView.as_view()),
name='update_target_count'),
]
return my_urls + urls
admin.site.register(Email, EmailAdmin)
admin.site.register(Tag)
Add filters to Email Admin List Viewfrom django.contrib import admin
from django.conf.urls import url
from .models import Email, Tag
from .views import SendEmailAdminView, UpdateTargetCountView
from .forms import EmailAdminForm
class EmailAdmin(admin.ModelAdmin):
list_display = ['__str__', 'is_sent']
list_filter = ['is_sent']
form = EmailAdminForm
readonly_fields = ('targetted_users', 'is_sent',)
add_fieldsets = (
(None, {
'fields': ('subject', 'message'),
}),
)
def get_fieldsets(self, request, obj=None):
if not obj:
return self.add_fieldsets
return super(EmailAdmin, self).get_fieldsets(request, obj)
def get_urls(self):
urls = super(EmailAdmin, self).get_urls()
my_urls = [
url(r'^(?P<pk>\d+)/send_email/$',
self.admin_site.admin_view(SendEmailAdminView.as_view()),
name='send_email'),
url(r'^(?P<pk>\d+)/update_target_count/$',
self.admin_site.admin_view(UpdateTargetCountView.as_view()),
name='update_target_count'),
]
return my_urls + urls
admin.site.register(Email, EmailAdmin)
admin.site.register(Tag)
|
<commit_before>from django.contrib import admin
from django.conf.urls import url
from .models import Email, Tag
from .views import SendEmailAdminView, UpdateTargetCountView
from .forms import EmailAdminForm
class EmailAdmin(admin.ModelAdmin):
form = EmailAdminForm
readonly_fields = ('targetted_users', 'is_sent',)
add_fieldsets = (
(None, {
'fields': ('subject', 'message'),
}),
)
def get_fieldsets(self, request, obj=None):
if not obj:
return self.add_fieldsets
return super(EmailAdmin, self).get_fieldsets(request, obj)
def get_urls(self):
urls = super(EmailAdmin, self).get_urls()
my_urls = [
url(r'^(?P<pk>\d+)/send_email/$',
self.admin_site.admin_view(SendEmailAdminView.as_view()),
name='send_email'),
url(r'^(?P<pk>\d+)/update_target_count/$',
self.admin_site.admin_view(UpdateTargetCountView.as_view()),
name='update_target_count'),
]
return my_urls + urls
admin.site.register(Email, EmailAdmin)
admin.site.register(Tag)
<commit_msg>Add filters to Email Admin List View<commit_after>from django.contrib import admin
from django.conf.urls import url
from .models import Email, Tag
from .views import SendEmailAdminView, UpdateTargetCountView
from .forms import EmailAdminForm
class EmailAdmin(admin.ModelAdmin):
list_display = ['__str__', 'is_sent']
list_filter = ['is_sent']
form = EmailAdminForm
readonly_fields = ('targetted_users', 'is_sent',)
add_fieldsets = (
(None, {
'fields': ('subject', 'message'),
}),
)
def get_fieldsets(self, request, obj=None):
if not obj:
return self.add_fieldsets
return super(EmailAdmin, self).get_fieldsets(request, obj)
def get_urls(self):
urls = super(EmailAdmin, self).get_urls()
my_urls = [
url(r'^(?P<pk>\d+)/send_email/$',
self.admin_site.admin_view(SendEmailAdminView.as_view()),
name='send_email'),
url(r'^(?P<pk>\d+)/update_target_count/$',
self.admin_site.admin_view(UpdateTargetCountView.as_view()),
name='update_target_count'),
]
return my_urls + urls
admin.site.register(Email, EmailAdmin)
admin.site.register(Tag)
|
7ba23ab480df92025c3d76c4afa6d56987088899
|
serialenum.py
|
serialenum.py
|
import os
import os.path
import sys
def enumerate():
ports = []
if sys.platform == 'win32':
# Iterate through registry because WMI does not show virtual serial ports
import _winreg
key = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, r'HARDWARE\DEVICEMAP\SERIALCOMM')
i = 0
while True:
try:
ports.append(_winreg.EnumValue(key, i)[1])
i = i + 1
except WindowsError:
break
elif sys.platform == 'linux2':
if os.path.exists('/dev/serial/by-id'):
entries = os.listdir('/dev/serial/by-id')
dirs = [os.readlink(os.path.join('/dev/serial/by-id', x))
for x in entries]
ports.extend([os.path.normpath(os.path.join('/dev/serial/by-id', x))
for x in dirs])
return ports
|
import os
import os.path
import sys
def enumerate():
ports = []
if sys.platform == 'win32':
# Iterate through registry because WMI does not show virtual serial ports
import _winreg
key = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, r'HARDWARE\DEVICEMAP\SERIALCOMM')
i = 0
while True:
try:
ports.append(_winreg.EnumValue(key, i)[1])
i = i + 1
except WindowsError:
break
elif sys.platform == 'linux2':
if os.path.exists('/dev/serial/by-id'):
entries = os.listdir('/dev/serial/by-id')
dirs = [os.readlink(os.path.join('/dev/serial/by-id', x))
for x in entries]
ports.extend([os.path.normpath(os.path.join('/dev/serial/by-id', x))
for x in dirs])
else:
return None
return ports
|
Return None for unsupported platforms
|
Return None for unsupported platforms
|
Python
|
bsd-2-clause
|
djs/serialenum
|
import os
import os.path
import sys
def enumerate():
ports = []
if sys.platform == 'win32':
# Iterate through registry because WMI does not show virtual serial ports
import _winreg
key = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, r'HARDWARE\DEVICEMAP\SERIALCOMM')
i = 0
while True:
try:
ports.append(_winreg.EnumValue(key, i)[1])
i = i + 1
except WindowsError:
break
elif sys.platform == 'linux2':
if os.path.exists('/dev/serial/by-id'):
entries = os.listdir('/dev/serial/by-id')
dirs = [os.readlink(os.path.join('/dev/serial/by-id', x))
for x in entries]
ports.extend([os.path.normpath(os.path.join('/dev/serial/by-id', x))
for x in dirs])
return ports
Return None for unsupported platforms
|
import os
import os.path
import sys
def enumerate():
ports = []
if sys.platform == 'win32':
# Iterate through registry because WMI does not show virtual serial ports
import _winreg
key = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, r'HARDWARE\DEVICEMAP\SERIALCOMM')
i = 0
while True:
try:
ports.append(_winreg.EnumValue(key, i)[1])
i = i + 1
except WindowsError:
break
elif sys.platform == 'linux2':
if os.path.exists('/dev/serial/by-id'):
entries = os.listdir('/dev/serial/by-id')
dirs = [os.readlink(os.path.join('/dev/serial/by-id', x))
for x in entries]
ports.extend([os.path.normpath(os.path.join('/dev/serial/by-id', x))
for x in dirs])
else:
return None
return ports
|
<commit_before>import os
import os.path
import sys
def enumerate():
ports = []
if sys.platform == 'win32':
# Iterate through registry because WMI does not show virtual serial ports
import _winreg
key = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, r'HARDWARE\DEVICEMAP\SERIALCOMM')
i = 0
while True:
try:
ports.append(_winreg.EnumValue(key, i)[1])
i = i + 1
except WindowsError:
break
elif sys.platform == 'linux2':
if os.path.exists('/dev/serial/by-id'):
entries = os.listdir('/dev/serial/by-id')
dirs = [os.readlink(os.path.join('/dev/serial/by-id', x))
for x in entries]
ports.extend([os.path.normpath(os.path.join('/dev/serial/by-id', x))
for x in dirs])
return ports
<commit_msg>Return None for unsupported platforms<commit_after>
|
import os
import os.path
import sys
def enumerate():
ports = []
if sys.platform == 'win32':
# Iterate through registry because WMI does not show virtual serial ports
import _winreg
key = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, r'HARDWARE\DEVICEMAP\SERIALCOMM')
i = 0
while True:
try:
ports.append(_winreg.EnumValue(key, i)[1])
i = i + 1
except WindowsError:
break
elif sys.platform == 'linux2':
if os.path.exists('/dev/serial/by-id'):
entries = os.listdir('/dev/serial/by-id')
dirs = [os.readlink(os.path.join('/dev/serial/by-id', x))
for x in entries]
ports.extend([os.path.normpath(os.path.join('/dev/serial/by-id', x))
for x in dirs])
else:
return None
return ports
|
import os
import os.path
import sys
def enumerate():
ports = []
if sys.platform == 'win32':
# Iterate through registry because WMI does not show virtual serial ports
import _winreg
key = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, r'HARDWARE\DEVICEMAP\SERIALCOMM')
i = 0
while True:
try:
ports.append(_winreg.EnumValue(key, i)[1])
i = i + 1
except WindowsError:
break
elif sys.platform == 'linux2':
if os.path.exists('/dev/serial/by-id'):
entries = os.listdir('/dev/serial/by-id')
dirs = [os.readlink(os.path.join('/dev/serial/by-id', x))
for x in entries]
ports.extend([os.path.normpath(os.path.join('/dev/serial/by-id', x))
for x in dirs])
return ports
Return None for unsupported platformsimport os
import os.path
import sys
def enumerate():
ports = []
if sys.platform == 'win32':
# Iterate through registry because WMI does not show virtual serial ports
import _winreg
key = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, r'HARDWARE\DEVICEMAP\SERIALCOMM')
i = 0
while True:
try:
ports.append(_winreg.EnumValue(key, i)[1])
i = i + 1
except WindowsError:
break
elif sys.platform == 'linux2':
if os.path.exists('/dev/serial/by-id'):
entries = os.listdir('/dev/serial/by-id')
dirs = [os.readlink(os.path.join('/dev/serial/by-id', x))
for x in entries]
ports.extend([os.path.normpath(os.path.join('/dev/serial/by-id', x))
for x in dirs])
else:
return None
return ports
|
<commit_before>import os
import os.path
import sys
def enumerate():
ports = []
if sys.platform == 'win32':
# Iterate through registry because WMI does not show virtual serial ports
import _winreg
key = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, r'HARDWARE\DEVICEMAP\SERIALCOMM')
i = 0
while True:
try:
ports.append(_winreg.EnumValue(key, i)[1])
i = i + 1
except WindowsError:
break
elif sys.platform == 'linux2':
if os.path.exists('/dev/serial/by-id'):
entries = os.listdir('/dev/serial/by-id')
dirs = [os.readlink(os.path.join('/dev/serial/by-id', x))
for x in entries]
ports.extend([os.path.normpath(os.path.join('/dev/serial/by-id', x))
for x in dirs])
return ports
<commit_msg>Return None for unsupported platforms<commit_after>import os
import os.path
import sys
def enumerate():
ports = []
if sys.platform == 'win32':
# Iterate through registry because WMI does not show virtual serial ports
import _winreg
key = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, r'HARDWARE\DEVICEMAP\SERIALCOMM')
i = 0
while True:
try:
ports.append(_winreg.EnumValue(key, i)[1])
i = i + 1
except WindowsError:
break
elif sys.platform == 'linux2':
if os.path.exists('/dev/serial/by-id'):
entries = os.listdir('/dev/serial/by-id')
dirs = [os.readlink(os.path.join('/dev/serial/by-id', x))
for x in entries]
ports.extend([os.path.normpath(os.path.join('/dev/serial/by-id', x))
for x in dirs])
else:
return None
return ports
|
8fa895189696e83e6120875886bc8888e0509195
|
bin/confluent-server.py
|
bin/confluent-server.py
|
import sys
import os
path = os.path.dirname(os.path.realpath(__file__))
path = os.path.realpath(os.path.join(path, '..'))
sys.path.append(path)
from confluent import main
main.run()
|
import sys
import os
path = os.path.dirname(os.path.realpath(__file__))
path = os.path.realpath(os.path.join(path, '..'))
sys.path.append(path)
from confluent import main
#import cProfile
#import time
#p = cProfile.Profile(time.clock)
#p.enable()
#try:
main.run()
#except:
# pass
#p.disable()
#p.print_stats(sort='cumulative')
#p.print_stats(sort='time')
|
Put comments in to hint a decent strategy to profile runtime performance
|
Put comments in to hint a decent strategy to profile runtime performance
To do performance optimization in this sort of application, this is
about as well as I have been able to manage in python. I will say perl with
NYTProf seems to be significantly better for data, but this is servicable.
I tried yappi, but it goes wildly inaccurate with this codebase. Because of
the eventlet plumbing, cProfile is still pretty misleading. Best strategy
seems to be review cumulative time with a healthy grain of salt around the
top items until you get down to info that makes sense. For example, trampoline
unfairly gets a great deal of the 'blame' by taking on nearly all the activity.
internal time seems to miss a great deal of important information.
|
Python
|
apache-2.0
|
chenglch/confluent,whowutwut/confluent,jufm/confluent,jufm/confluent,michaelfardu/thinkconfluent,xcat2/confluent,jjohnson42/confluent,jjohnson42/confluent,whowutwut/confluent,chenglch/confluent,michaelfardu/thinkconfluent,jufm/confluent,michaelfardu/thinkconfluent,xcat2/confluent,xcat2/confluent,xcat2/confluent,jjohnson42/confluent,jufm/confluent,chenglch/confluent,whowutwut/confluent,whowutwut/confluent,michaelfardu/thinkconfluent,chenglch/confluent,jjohnson42/confluent,xcat2/confluent,jjohnson42/confluent,jufm/confluent,chenglch/confluent,michaelfardu/thinkconfluent
|
import sys
import os
path = os.path.dirname(os.path.realpath(__file__))
path = os.path.realpath(os.path.join(path, '..'))
sys.path.append(path)
from confluent import main
main.run()
Put comments in to hint a decent strategy to profile runtime performance
To do performance optimization in this sort of application, this is
about as well as I have been able to manage in python. I will say perl with
NYTProf seems to be significantly better for data, but this is servicable.
I tried yappi, but it goes wildly inaccurate with this codebase. Because of
the eventlet plumbing, cProfile is still pretty misleading. Best strategy
seems to be review cumulative time with a healthy grain of salt around the
top items until you get down to info that makes sense. For example, trampoline
unfairly gets a great deal of the 'blame' by taking on nearly all the activity.
internal time seems to miss a great deal of important information.
|
import sys
import os
path = os.path.dirname(os.path.realpath(__file__))
path = os.path.realpath(os.path.join(path, '..'))
sys.path.append(path)
from confluent import main
#import cProfile
#import time
#p = cProfile.Profile(time.clock)
#p.enable()
#try:
main.run()
#except:
# pass
#p.disable()
#p.print_stats(sort='cumulative')
#p.print_stats(sort='time')
|
<commit_before>import sys
import os
path = os.path.dirname(os.path.realpath(__file__))
path = os.path.realpath(os.path.join(path, '..'))
sys.path.append(path)
from confluent import main
main.run()
<commit_msg>Put comments in to hint a decent strategy to profile runtime performance
To do performance optimization in this sort of application, this is
about as well as I have been able to manage in python. I will say perl with
NYTProf seems to be significantly better for data, but this is servicable.
I tried yappi, but it goes wildly inaccurate with this codebase. Because of
the eventlet plumbing, cProfile is still pretty misleading. Best strategy
seems to be review cumulative time with a healthy grain of salt around the
top items until you get down to info that makes sense. For example, trampoline
unfairly gets a great deal of the 'blame' by taking on nearly all the activity.
internal time seems to miss a great deal of important information.<commit_after>
|
import sys
import os
path = os.path.dirname(os.path.realpath(__file__))
path = os.path.realpath(os.path.join(path, '..'))
sys.path.append(path)
from confluent import main
#import cProfile
#import time
#p = cProfile.Profile(time.clock)
#p.enable()
#try:
main.run()
#except:
# pass
#p.disable()
#p.print_stats(sort='cumulative')
#p.print_stats(sort='time')
|
import sys
import os
path = os.path.dirname(os.path.realpath(__file__))
path = os.path.realpath(os.path.join(path, '..'))
sys.path.append(path)
from confluent import main
main.run()
Put comments in to hint a decent strategy to profile runtime performance
To do performance optimization in this sort of application, this is
about as well as I have been able to manage in python. I will say perl with
NYTProf seems to be significantly better for data, but this is servicable.
I tried yappi, but it goes wildly inaccurate with this codebase. Because of
the eventlet plumbing, cProfile is still pretty misleading. Best strategy
seems to be review cumulative time with a healthy grain of salt around the
top items until you get down to info that makes sense. For example, trampoline
unfairly gets a great deal of the 'blame' by taking on nearly all the activity.
internal time seems to miss a great deal of important information.import sys
import os
path = os.path.dirname(os.path.realpath(__file__))
path = os.path.realpath(os.path.join(path, '..'))
sys.path.append(path)
from confluent import main
#import cProfile
#import time
#p = cProfile.Profile(time.clock)
#p.enable()
#try:
main.run()
#except:
# pass
#p.disable()
#p.print_stats(sort='cumulative')
#p.print_stats(sort='time')
|
<commit_before>import sys
import os
path = os.path.dirname(os.path.realpath(__file__))
path = os.path.realpath(os.path.join(path, '..'))
sys.path.append(path)
from confluent import main
main.run()
<commit_msg>Put comments in to hint a decent strategy to profile runtime performance
To do performance optimization in this sort of application, this is
about as well as I have been able to manage in python. I will say perl with
NYTProf seems to be significantly better for data, but this is servicable.
I tried yappi, but it goes wildly inaccurate with this codebase. Because of
the eventlet plumbing, cProfile is still pretty misleading. Best strategy
seems to be review cumulative time with a healthy grain of salt around the
top items until you get down to info that makes sense. For example, trampoline
unfairly gets a great deal of the 'blame' by taking on nearly all the activity.
internal time seems to miss a great deal of important information.<commit_after>import sys
import os
path = os.path.dirname(os.path.realpath(__file__))
path = os.path.realpath(os.path.join(path, '..'))
sys.path.append(path)
from confluent import main
#import cProfile
#import time
#p = cProfile.Profile(time.clock)
#p.enable()
#try:
main.run()
#except:
# pass
#p.disable()
#p.print_stats(sort='cumulative')
#p.print_stats(sort='time')
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.