commit stringlengths 40 40 | old_file stringlengths 4 118 | new_file stringlengths 4 118 | old_contents stringlengths 0 2.94k | new_contents stringlengths 1 4.43k | subject stringlengths 15 444 | message stringlengths 16 3.45k | lang stringclasses 1 value | license stringclasses 13 values | repos stringlengths 5 43.2k | prompt stringlengths 17 4.58k | response stringlengths 1 4.43k | prompt_tagged stringlengths 58 4.62k | response_tagged stringlengths 1 4.43k | text stringlengths 132 7.29k | text_tagged stringlengths 173 7.33k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
09cfd33df218725aa88d2f64d87868056c2778ba | indra/tests/test_biogrid.py | indra/tests/test_biogrid.py | from __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
from indra.databases import biogrid_client
from indra.util import unicode_strs
from nose.plugins.attrib import attr
@attr('webservice', 'nonpublic')
def test_biogrid_request():
results = biogrid_client._send_request(['MAP2K1', 'MAPK1'])
assert results is not None
assert unicode_strs(results)
| from __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
from indra.databases import biogrid_client
from indra.util import unicode_strs
from nose.plugins.attrib import attr
from indra.sources.biogrid import process_file
from indra.statements import Complex
@attr('webservice', 'nonpublic')
def test_biogrid_request():
results = biogrid_client._send_request(['MAP2K1', 'MAPK1'])
assert results is not None
assert unicode_strs(results)
def test_biogrid_tsv():
# Download biogrid file form the web and process it
bp = process_file(None)
# We should have a lot of statementse
statements = bp.statements
assert(len(statements) > 500000)
# Any given statement should be a complex, with appropriate evidence
s0 = statements[0]
assert(isinstance(s0, Complex))
ev = s0.evidence[0]
assert(ev.source_api == 'biogrid')
assert(ev.text is None)
assert(ev.pmid is not None)
assert('tsv_row' in ev.annotations)
# The first statement in the file involves MAP2K4 and FLNC
assert(str(s0.members[0]) == 'MAP2K4()')
assert(str(s0.members[1]) == 'FLNC()')
| Add test for downloading and parsing biogrid tsv file | Add test for downloading and parsing biogrid tsv file
| Python | bsd-2-clause | johnbachman/belpy,pvtodorov/indra,sorgerlab/indra,sorgerlab/belpy,bgyori/indra,johnbachman/indra,sorgerlab/indra,pvtodorov/indra,sorgerlab/belpy,pvtodorov/indra,johnbachman/belpy,sorgerlab/belpy,johnbachman/indra,pvtodorov/indra,johnbachman/belpy,bgyori/indra,johnbachman/indra,sorgerlab/indra,bgyori/indra | from __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
from indra.databases import biogrid_client
from indra.util import unicode_strs
from nose.plugins.attrib import attr
@attr('webservice', 'nonpublic')
def test_biogrid_request():
results = biogrid_client._send_request(['MAP2K1', 'MAPK1'])
assert results is not None
assert unicode_strs(results)
Add test for downloading and parsing biogrid tsv file | from __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
from indra.databases import biogrid_client
from indra.util import unicode_strs
from nose.plugins.attrib import attr
from indra.sources.biogrid import process_file
from indra.statements import Complex
@attr('webservice', 'nonpublic')
def test_biogrid_request():
results = biogrid_client._send_request(['MAP2K1', 'MAPK1'])
assert results is not None
assert unicode_strs(results)
def test_biogrid_tsv():
# Download biogrid file form the web and process it
bp = process_file(None)
# We should have a lot of statementse
statements = bp.statements
assert(len(statements) > 500000)
# Any given statement should be a complex, with appropriate evidence
s0 = statements[0]
assert(isinstance(s0, Complex))
ev = s0.evidence[0]
assert(ev.source_api == 'biogrid')
assert(ev.text is None)
assert(ev.pmid is not None)
assert('tsv_row' in ev.annotations)
# The first statement in the file involves MAP2K4 and FLNC
assert(str(s0.members[0]) == 'MAP2K4()')
assert(str(s0.members[1]) == 'FLNC()')
| <commit_before>from __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
from indra.databases import biogrid_client
from indra.util import unicode_strs
from nose.plugins.attrib import attr
@attr('webservice', 'nonpublic')
def test_biogrid_request():
results = biogrid_client._send_request(['MAP2K1', 'MAPK1'])
assert results is not None
assert unicode_strs(results)
<commit_msg>Add test for downloading and parsing biogrid tsv file<commit_after> | from __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
from indra.databases import biogrid_client
from indra.util import unicode_strs
from nose.plugins.attrib import attr
from indra.sources.biogrid import process_file
from indra.statements import Complex
@attr('webservice', 'nonpublic')
def test_biogrid_request():
results = biogrid_client._send_request(['MAP2K1', 'MAPK1'])
assert results is not None
assert unicode_strs(results)
def test_biogrid_tsv():
# Download biogrid file form the web and process it
bp = process_file(None)
# We should have a lot of statementse
statements = bp.statements
assert(len(statements) > 500000)
# Any given statement should be a complex, with appropriate evidence
s0 = statements[0]
assert(isinstance(s0, Complex))
ev = s0.evidence[0]
assert(ev.source_api == 'biogrid')
assert(ev.text is None)
assert(ev.pmid is not None)
assert('tsv_row' in ev.annotations)
# The first statement in the file involves MAP2K4 and FLNC
assert(str(s0.members[0]) == 'MAP2K4()')
assert(str(s0.members[1]) == 'FLNC()')
| from __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
from indra.databases import biogrid_client
from indra.util import unicode_strs
from nose.plugins.attrib import attr
@attr('webservice', 'nonpublic')
def test_biogrid_request():
results = biogrid_client._send_request(['MAP2K1', 'MAPK1'])
assert results is not None
assert unicode_strs(results)
Add test for downloading and parsing biogrid tsv filefrom __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
from indra.databases import biogrid_client
from indra.util import unicode_strs
from nose.plugins.attrib import attr
from indra.sources.biogrid import process_file
from indra.statements import Complex
@attr('webservice', 'nonpublic')
def test_biogrid_request():
results = biogrid_client._send_request(['MAP2K1', 'MAPK1'])
assert results is not None
assert unicode_strs(results)
def test_biogrid_tsv():
# Download biogrid file form the web and process it
bp = process_file(None)
# We should have a lot of statementse
statements = bp.statements
assert(len(statements) > 500000)
# Any given statement should be a complex, with appropriate evidence
s0 = statements[0]
assert(isinstance(s0, Complex))
ev = s0.evidence[0]
assert(ev.source_api == 'biogrid')
assert(ev.text is None)
assert(ev.pmid is not None)
assert('tsv_row' in ev.annotations)
# The first statement in the file involves MAP2K4 and FLNC
assert(str(s0.members[0]) == 'MAP2K4()')
assert(str(s0.members[1]) == 'FLNC()')
| <commit_before>from __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
from indra.databases import biogrid_client
from indra.util import unicode_strs
from nose.plugins.attrib import attr
@attr('webservice', 'nonpublic')
def test_biogrid_request():
results = biogrid_client._send_request(['MAP2K1', 'MAPK1'])
assert results is not None
assert unicode_strs(results)
<commit_msg>Add test for downloading and parsing biogrid tsv file<commit_after>from __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
from indra.databases import biogrid_client
from indra.util import unicode_strs
from nose.plugins.attrib import attr
from indra.sources.biogrid import process_file
from indra.statements import Complex
@attr('webservice', 'nonpublic')
def test_biogrid_request():
results = biogrid_client._send_request(['MAP2K1', 'MAPK1'])
assert results is not None
assert unicode_strs(results)
def test_biogrid_tsv():
# Download biogrid file form the web and process it
bp = process_file(None)
# We should have a lot of statementse
statements = bp.statements
assert(len(statements) > 500000)
# Any given statement should be a complex, with appropriate evidence
s0 = statements[0]
assert(isinstance(s0, Complex))
ev = s0.evidence[0]
assert(ev.source_api == 'biogrid')
assert(ev.text is None)
assert(ev.pmid is not None)
assert('tsv_row' in ev.annotations)
# The first statement in the file involves MAP2K4 and FLNC
assert(str(s0.members[0]) == 'MAP2K4()')
assert(str(s0.members[1]) == 'FLNC()')
|
91d50ff929a25345860d9dd91a92473db1fea932 | cla_backend/libs/eligibility_calculator/constants/disposable_income.py | cla_backend/libs/eligibility_calculator/constants/disposable_income.py | LIMIT = 73300
PARTNER_ALLOWANCE = 18191
CHILD_ALLOWANCE = 29149
CHILDLESS_HOUSING_CAP = 54500
EMPLOYMENT_COSTS_ALLOWANCE = 4500
| LIMIT = 73300
PARTNER_ALLOWANCE = 18141
CHILD_ALLOWANCE = 29070
CHILDLESS_HOUSING_CAP = 54500
EMPLOYMENT_COSTS_ALLOWANCE = 4500
| Change dependant allowance amounts to match policy change | Change dependant allowance amounts to match policy change
| Python | mit | ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend | LIMIT = 73300
PARTNER_ALLOWANCE = 18191
CHILD_ALLOWANCE = 29149
CHILDLESS_HOUSING_CAP = 54500
EMPLOYMENT_COSTS_ALLOWANCE = 4500
Change dependant allowance amounts to match policy change | LIMIT = 73300
PARTNER_ALLOWANCE = 18141
CHILD_ALLOWANCE = 29070
CHILDLESS_HOUSING_CAP = 54500
EMPLOYMENT_COSTS_ALLOWANCE = 4500
| <commit_before>LIMIT = 73300
PARTNER_ALLOWANCE = 18191
CHILD_ALLOWANCE = 29149
CHILDLESS_HOUSING_CAP = 54500
EMPLOYMENT_COSTS_ALLOWANCE = 4500
<commit_msg>Change dependant allowance amounts to match policy change<commit_after> | LIMIT = 73300
PARTNER_ALLOWANCE = 18141
CHILD_ALLOWANCE = 29070
CHILDLESS_HOUSING_CAP = 54500
EMPLOYMENT_COSTS_ALLOWANCE = 4500
| LIMIT = 73300
PARTNER_ALLOWANCE = 18191
CHILD_ALLOWANCE = 29149
CHILDLESS_HOUSING_CAP = 54500
EMPLOYMENT_COSTS_ALLOWANCE = 4500
Change dependant allowance amounts to match policy changeLIMIT = 73300
PARTNER_ALLOWANCE = 18141
CHILD_ALLOWANCE = 29070
CHILDLESS_HOUSING_CAP = 54500
EMPLOYMENT_COSTS_ALLOWANCE = 4500
| <commit_before>LIMIT = 73300
PARTNER_ALLOWANCE = 18191
CHILD_ALLOWANCE = 29149
CHILDLESS_HOUSING_CAP = 54500
EMPLOYMENT_COSTS_ALLOWANCE = 4500
<commit_msg>Change dependant allowance amounts to match policy change<commit_after>LIMIT = 73300
PARTNER_ALLOWANCE = 18141
CHILD_ALLOWANCE = 29070
CHILDLESS_HOUSING_CAP = 54500
EMPLOYMENT_COSTS_ALLOWANCE = 4500
|
5da62bbe9df92df58dea742120f4e78555509bd0 | lib/log_processor.py | lib/log_processor.py | import re
import snmpy
class log_processor(snmpy.plugin):
def create(self):
for k, v in sorted(self.conf['objects'].items()):
extra = {
'count': re.compile(v['count']),
'reset': re.compile(v['reset']) if 'reset' in v else None,
'start': int(v['start']) if 'start' in v else 0,
'rotate': bool(v['rotate']) if 'rotate' in v else False
}
self.data['1.%s' % k] = 'string', v['label']
self.data['2.%s' % k] = 'integer', extra['start'], extra
self.tail()
@snmpy.plugin.task
def tail(self):
for line in snmpy.plugin.tail(self.conf['file_name'], True):
if line is True:
for item in self.data['2.0':]:
if self.data[item:'rotate'] and line is True:
self.data[item] = self.data[item:'start']
continue
for item in self.data['2.0':]:
count = self.data[item:'count'].search(line)
if count:
self.data[item] = self.data[item:True] + (int(count.group(1)) if len(count.groups()) > 0 else 1)
break
if self.data[item:'reset'] is not None and self.data[item:'reset'].search(line):
self.data[item] = self.data[item:'start']
break
| import glob
import re
import snmpy
class log_processor(snmpy.plugin):
def create(self):
for k, v in sorted(self.conf['objects'].items()):
extra = {
'count': re.compile(v['count']),
'reset': re.compile(v['reset']) if 'reset' in v else None,
'start': int(v['start']) if 'start' in v else 0,
'rotate': bool(v['rotate']) if 'rotate' in v else False
}
self.data['1.%s' % k] = 'string', v['label']
self.data['2.%s' % k] = 'integer', extra['start'], extra
self.tail()
@snmpy.plugin.task
def tail(self):
for line in snmpy.plugin.tail(glob.glob(self.conf['file_name'])[0], True):
if line is True:
for item in self.data['2.0':]:
if self.data[item:'rotate'] and line is True:
self.data[item] = self.data[item:'start']
continue
for item in self.data['2.0':]:
count = self.data[item:'count'].search(line)
if count:
self.data[item] = self.data[item:True] + (int(count.group(1)) if len(count.groups()) > 0 else 1)
break
if self.data[item:'reset'] is not None and self.data[item:'reset'].search(line):
self.data[item] = self.data[item:'start']
break
| Support file globbing for log processor since names could be dynamic (based on hostname, etc.). | Support file globbing for log processor since names could be dynamic (based on hostname, etc.).
| Python | mit | mk23/snmpy,mk23/snmpy | import re
import snmpy
class log_processor(snmpy.plugin):
def create(self):
for k, v in sorted(self.conf['objects'].items()):
extra = {
'count': re.compile(v['count']),
'reset': re.compile(v['reset']) if 'reset' in v else None,
'start': int(v['start']) if 'start' in v else 0,
'rotate': bool(v['rotate']) if 'rotate' in v else False
}
self.data['1.%s' % k] = 'string', v['label']
self.data['2.%s' % k] = 'integer', extra['start'], extra
self.tail()
@snmpy.plugin.task
def tail(self):
for line in snmpy.plugin.tail(self.conf['file_name'], True):
if line is True:
for item in self.data['2.0':]:
if self.data[item:'rotate'] and line is True:
self.data[item] = self.data[item:'start']
continue
for item in self.data['2.0':]:
count = self.data[item:'count'].search(line)
if count:
self.data[item] = self.data[item:True] + (int(count.group(1)) if len(count.groups()) > 0 else 1)
break
if self.data[item:'reset'] is not None and self.data[item:'reset'].search(line):
self.data[item] = self.data[item:'start']
break
Support file globbing for log processor since names could be dynamic (based on hostname, etc.). | import glob
import re
import snmpy
class log_processor(snmpy.plugin):
def create(self):
for k, v in sorted(self.conf['objects'].items()):
extra = {
'count': re.compile(v['count']),
'reset': re.compile(v['reset']) if 'reset' in v else None,
'start': int(v['start']) if 'start' in v else 0,
'rotate': bool(v['rotate']) if 'rotate' in v else False
}
self.data['1.%s' % k] = 'string', v['label']
self.data['2.%s' % k] = 'integer', extra['start'], extra
self.tail()
@snmpy.plugin.task
def tail(self):
for line in snmpy.plugin.tail(glob.glob(self.conf['file_name'])[0], True):
if line is True:
for item in self.data['2.0':]:
if self.data[item:'rotate'] and line is True:
self.data[item] = self.data[item:'start']
continue
for item in self.data['2.0':]:
count = self.data[item:'count'].search(line)
if count:
self.data[item] = self.data[item:True] + (int(count.group(1)) if len(count.groups()) > 0 else 1)
break
if self.data[item:'reset'] is not None and self.data[item:'reset'].search(line):
self.data[item] = self.data[item:'start']
break
| <commit_before>import re
import snmpy
class log_processor(snmpy.plugin):
def create(self):
for k, v in sorted(self.conf['objects'].items()):
extra = {
'count': re.compile(v['count']),
'reset': re.compile(v['reset']) if 'reset' in v else None,
'start': int(v['start']) if 'start' in v else 0,
'rotate': bool(v['rotate']) if 'rotate' in v else False
}
self.data['1.%s' % k] = 'string', v['label']
self.data['2.%s' % k] = 'integer', extra['start'], extra
self.tail()
@snmpy.plugin.task
def tail(self):
for line in snmpy.plugin.tail(self.conf['file_name'], True):
if line is True:
for item in self.data['2.0':]:
if self.data[item:'rotate'] and line is True:
self.data[item] = self.data[item:'start']
continue
for item in self.data['2.0':]:
count = self.data[item:'count'].search(line)
if count:
self.data[item] = self.data[item:True] + (int(count.group(1)) if len(count.groups()) > 0 else 1)
break
if self.data[item:'reset'] is not None and self.data[item:'reset'].search(line):
self.data[item] = self.data[item:'start']
break
<commit_msg>Support file globbing for log processor since names could be dynamic (based on hostname, etc.).<commit_after> | import glob
import re
import snmpy
class log_processor(snmpy.plugin):
def create(self):
for k, v in sorted(self.conf['objects'].items()):
extra = {
'count': re.compile(v['count']),
'reset': re.compile(v['reset']) if 'reset' in v else None,
'start': int(v['start']) if 'start' in v else 0,
'rotate': bool(v['rotate']) if 'rotate' in v else False
}
self.data['1.%s' % k] = 'string', v['label']
self.data['2.%s' % k] = 'integer', extra['start'], extra
self.tail()
@snmpy.plugin.task
def tail(self):
for line in snmpy.plugin.tail(glob.glob(self.conf['file_name'])[0], True):
if line is True:
for item in self.data['2.0':]:
if self.data[item:'rotate'] and line is True:
self.data[item] = self.data[item:'start']
continue
for item in self.data['2.0':]:
count = self.data[item:'count'].search(line)
if count:
self.data[item] = self.data[item:True] + (int(count.group(1)) if len(count.groups()) > 0 else 1)
break
if self.data[item:'reset'] is not None and self.data[item:'reset'].search(line):
self.data[item] = self.data[item:'start']
break
| import re
import snmpy
class log_processor(snmpy.plugin):
def create(self):
for k, v in sorted(self.conf['objects'].items()):
extra = {
'count': re.compile(v['count']),
'reset': re.compile(v['reset']) if 'reset' in v else None,
'start': int(v['start']) if 'start' in v else 0,
'rotate': bool(v['rotate']) if 'rotate' in v else False
}
self.data['1.%s' % k] = 'string', v['label']
self.data['2.%s' % k] = 'integer', extra['start'], extra
self.tail()
@snmpy.plugin.task
def tail(self):
for line in snmpy.plugin.tail(self.conf['file_name'], True):
if line is True:
for item in self.data['2.0':]:
if self.data[item:'rotate'] and line is True:
self.data[item] = self.data[item:'start']
continue
for item in self.data['2.0':]:
count = self.data[item:'count'].search(line)
if count:
self.data[item] = self.data[item:True] + (int(count.group(1)) if len(count.groups()) > 0 else 1)
break
if self.data[item:'reset'] is not None and self.data[item:'reset'].search(line):
self.data[item] = self.data[item:'start']
break
Support file globbing for log processor since names could be dynamic (based on hostname, etc.).import glob
import re
import snmpy
class log_processor(snmpy.plugin):
def create(self):
for k, v in sorted(self.conf['objects'].items()):
extra = {
'count': re.compile(v['count']),
'reset': re.compile(v['reset']) if 'reset' in v else None,
'start': int(v['start']) if 'start' in v else 0,
'rotate': bool(v['rotate']) if 'rotate' in v else False
}
self.data['1.%s' % k] = 'string', v['label']
self.data['2.%s' % k] = 'integer', extra['start'], extra
self.tail()
@snmpy.plugin.task
def tail(self):
for line in snmpy.plugin.tail(glob.glob(self.conf['file_name'])[0], True):
if line is True:
for item in self.data['2.0':]:
if self.data[item:'rotate'] and line is True:
self.data[item] = self.data[item:'start']
continue
for item in self.data['2.0':]:
count = self.data[item:'count'].search(line)
if count:
self.data[item] = self.data[item:True] + (int(count.group(1)) if len(count.groups()) > 0 else 1)
break
if self.data[item:'reset'] is not None and self.data[item:'reset'].search(line):
self.data[item] = self.data[item:'start']
break
| <commit_before>import re
import snmpy
class log_processor(snmpy.plugin):
def create(self):
for k, v in sorted(self.conf['objects'].items()):
extra = {
'count': re.compile(v['count']),
'reset': re.compile(v['reset']) if 'reset' in v else None,
'start': int(v['start']) if 'start' in v else 0,
'rotate': bool(v['rotate']) if 'rotate' in v else False
}
self.data['1.%s' % k] = 'string', v['label']
self.data['2.%s' % k] = 'integer', extra['start'], extra
self.tail()
@snmpy.plugin.task
def tail(self):
for line in snmpy.plugin.tail(self.conf['file_name'], True):
if line is True:
for item in self.data['2.0':]:
if self.data[item:'rotate'] and line is True:
self.data[item] = self.data[item:'start']
continue
for item in self.data['2.0':]:
count = self.data[item:'count'].search(line)
if count:
self.data[item] = self.data[item:True] + (int(count.group(1)) if len(count.groups()) > 0 else 1)
break
if self.data[item:'reset'] is not None and self.data[item:'reset'].search(line):
self.data[item] = self.data[item:'start']
break
<commit_msg>Support file globbing for log processor since names could be dynamic (based on hostname, etc.).<commit_after>import glob
import re
import snmpy
class log_processor(snmpy.plugin):
def create(self):
for k, v in sorted(self.conf['objects'].items()):
extra = {
'count': re.compile(v['count']),
'reset': re.compile(v['reset']) if 'reset' in v else None,
'start': int(v['start']) if 'start' in v else 0,
'rotate': bool(v['rotate']) if 'rotate' in v else False
}
self.data['1.%s' % k] = 'string', v['label']
self.data['2.%s' % k] = 'integer', extra['start'], extra
self.tail()
@snmpy.plugin.task
def tail(self):
for line in snmpy.plugin.tail(glob.glob(self.conf['file_name'])[0], True):
if line is True:
for item in self.data['2.0':]:
if self.data[item:'rotate'] and line is True:
self.data[item] = self.data[item:'start']
continue
for item in self.data['2.0':]:
count = self.data[item:'count'].search(line)
if count:
self.data[item] = self.data[item:True] + (int(count.group(1)) if len(count.groups()) > 0 else 1)
break
if self.data[item:'reset'] is not None and self.data[item:'reset'].search(line):
self.data[item] = self.data[item:'start']
break
|
0d70cc8fb27240390e252881615f740103535c93 | testsuite/python3.py | testsuite/python3.py | #!/usr/bin/env python3
from typing import ClassVar, List
# Annotated function (Issue #29)
def foo(x: int) -> int:
return x + 1
# Annotated variables #575
CONST: int = 42
class Class:
cls_var: ClassVar[str]
def m(self):
xs: List[int] = []
| #!/usr/bin/env python3
from typing import ClassVar, List
# Annotated function (Issue #29)
def foo(x: int) -> int:
return x + 1
# Annotated variables #575
CONST: int = 42
class Class:
cls_var: ClassVar[str]
for_var: ClassVar[str]
while_var: ClassVar[str]
def_var: ClassVar[str]
if_var: ClassVar[str]
elif_var: ClassVar[str]
else_var: ClassVar[str]
try_var: ClassVar[str]
except_var: ClassVar[str]
finally_var: ClassVar[str]
with_var: ClassVar[str]
def m(self):
xs: List[int] = []
| Add test when variable with annotation start with a keyword | Add test when variable with annotation start with a keyword | Python | mit | PyCQA/pep8 | #!/usr/bin/env python3
from typing import ClassVar, List
# Annotated function (Issue #29)
def foo(x: int) -> int:
return x + 1
# Annotated variables #575
CONST: int = 42
class Class:
cls_var: ClassVar[str]
def m(self):
xs: List[int] = []
Add test when variable with annotation start with a keyword | #!/usr/bin/env python3
from typing import ClassVar, List
# Annotated function (Issue #29)
def foo(x: int) -> int:
return x + 1
# Annotated variables #575
CONST: int = 42
class Class:
cls_var: ClassVar[str]
for_var: ClassVar[str]
while_var: ClassVar[str]
def_var: ClassVar[str]
if_var: ClassVar[str]
elif_var: ClassVar[str]
else_var: ClassVar[str]
try_var: ClassVar[str]
except_var: ClassVar[str]
finally_var: ClassVar[str]
with_var: ClassVar[str]
def m(self):
xs: List[int] = []
| <commit_before>#!/usr/bin/env python3
from typing import ClassVar, List
# Annotated function (Issue #29)
def foo(x: int) -> int:
return x + 1
# Annotated variables #575
CONST: int = 42
class Class:
cls_var: ClassVar[str]
def m(self):
xs: List[int] = []
<commit_msg>Add test when variable with annotation start with a keyword<commit_after> | #!/usr/bin/env python3
from typing import ClassVar, List
# Annotated function (Issue #29)
def foo(x: int) -> int:
return x + 1
# Annotated variables #575
CONST: int = 42
class Class:
cls_var: ClassVar[str]
for_var: ClassVar[str]
while_var: ClassVar[str]
def_var: ClassVar[str]
if_var: ClassVar[str]
elif_var: ClassVar[str]
else_var: ClassVar[str]
try_var: ClassVar[str]
except_var: ClassVar[str]
finally_var: ClassVar[str]
with_var: ClassVar[str]
def m(self):
xs: List[int] = []
| #!/usr/bin/env python3
from typing import ClassVar, List
# Annotated function (Issue #29)
def foo(x: int) -> int:
return x + 1
# Annotated variables #575
CONST: int = 42
class Class:
cls_var: ClassVar[str]
def m(self):
xs: List[int] = []
Add test when variable with annotation start with a keyword#!/usr/bin/env python3
from typing import ClassVar, List
# Annotated function (Issue #29)
def foo(x: int) -> int:
return x + 1
# Annotated variables #575
CONST: int = 42
class Class:
cls_var: ClassVar[str]
for_var: ClassVar[str]
while_var: ClassVar[str]
def_var: ClassVar[str]
if_var: ClassVar[str]
elif_var: ClassVar[str]
else_var: ClassVar[str]
try_var: ClassVar[str]
except_var: ClassVar[str]
finally_var: ClassVar[str]
with_var: ClassVar[str]
def m(self):
xs: List[int] = []
| <commit_before>#!/usr/bin/env python3
from typing import ClassVar, List
# Annotated function (Issue #29)
def foo(x: int) -> int:
return x + 1
# Annotated variables #575
CONST: int = 42
class Class:
cls_var: ClassVar[str]
def m(self):
xs: List[int] = []
<commit_msg>Add test when variable with annotation start with a keyword<commit_after>#!/usr/bin/env python3
from typing import ClassVar, List
# Annotated function (Issue #29)
def foo(x: int) -> int:
return x + 1
# Annotated variables #575
CONST: int = 42
class Class:
cls_var: ClassVar[str]
for_var: ClassVar[str]
while_var: ClassVar[str]
def_var: ClassVar[str]
if_var: ClassVar[str]
elif_var: ClassVar[str]
else_var: ClassVar[str]
try_var: ClassVar[str]
except_var: ClassVar[str]
finally_var: ClassVar[str]
with_var: ClassVar[str]
def m(self):
xs: List[int] = []
|
7202c5027158356b8276c118d48609d5b2e1b369 | api/setup.py | api/setup.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import humbug
import glob
import os.path
from distutils.core import setup
setup(name='humbug',
version=humbug.__version__,
description='Bindings for the Humbug message API',
author='Humbug, Inc.',
author_email='humbug@humbughq.com',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Communications :: Chat',
],
url='https://humbughq.com/dist/api/',
packages=['humbug'],
data_files=[('share/humbug/examples', ["examples/humbugrc", "examples/send-message"])] + \
[(os.path.join('share/humbug/', relpath),
glob.glob(os.path.join(relpath, '*'))) for relpath in
glob.glob("integrations/*")
],
scripts=["bin/humbug-send"],
)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import humbug
import glob
import os
from distutils.core import setup
setup(name='humbug',
version=humbug.__version__,
description='Bindings for the Humbug message API',
author='Humbug, Inc.',
author_email='humbug@humbughq.com',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Communications :: Chat',
],
url='https://humbughq.com/dist/api/',
packages=['humbug'],
data_files=[('share/humbug/examples', ["examples/humbugrc", "examples/send-message"])] + \
[(os.path.join('share/humbug/', relpath),
glob.glob(os.path.join(relpath, '*'))) for relpath in
glob.glob("integrations/*")] + \
[('share/humbug/demos',
[os.path.join("demos", relpath) for relpath in
os.listdir("demos")])],
scripts=["bin/humbug-send"],
)
| Install the demos directory in /usr/local/share/ as well. | Install the demos directory in /usr/local/share/ as well.
(imported from commit 7516e5318f9e30544f53703ead4bca60f2dd7828)
| Python | apache-2.0 | blaze225/zulip,RobotCaleb/zulip,natanovia/zulip,ryanbackman/zulip,andersk/zulip,blaze225/zulip,peguin40/zulip,hackerkid/zulip,deer-hope/zulip,paxapy/zulip,hafeez3000/zulip,susansls/zulip,mohsenSy/zulip,PhilSk/zulip,tdr130/zulip,aakash-cr7/zulip,Diptanshu8/zulip,arpitpanwar/zulip,amallia/zulip,hengqujushi/zulip,christi3k/zulip,firstblade/zulip,codeKonami/zulip,glovebx/zulip,ipernet/zulip,zwily/zulip,eastlhu/zulip,suxinde2009/zulip,jackrzhang/zulip,ikasumiwt/zulip,hustlzp/zulip,peiwei/zulip,wdaher/zulip,vaidap/zulip,hafeez3000/zulip,babbage/zulip,aakash-cr7/zulip,tbutter/zulip,SmartPeople/zulip,dhcrzf/zulip,dawran6/zulip,jackrzhang/zulip,themass/zulip,dxq-git/zulip,Jianchun1/zulip,ryansnowboarder/zulip,easyfmxu/zulip,RobotCaleb/zulip,dawran6/zulip,wangdeshui/zulip,akuseru/zulip,JanzTam/zulip,m1ssou/zulip,joshisa/zulip,TigorC/zulip,tdr130/zulip,jerryge/zulip,Cheppers/zulip,amyliu345/zulip,calvinleenyc/zulip,bastianh/zulip,esander91/zulip,noroot/zulip,sharmaeklavya2/zulip,jonesgithub/zulip,seapasulli/zulip,so0k/zulip,zulip/zulip,Jianchun1/zulip,DazWorrall/zulip,brainwane/zulip,souravbadami/zulip,wdaher/zulip,MariaFaBella85/zulip,adnanh/zulip,saitodisse/zulip,sup95/zulip,MayB/zulip,showell/zulip,arpitpanwar/zulip,KJin99/zulip,arpith/zulip,rht/zulip,tiansiyuan/zulip,Vallher/zulip,mohsenSy/zulip,zhaoweigg/zulip,yocome/zulip,amanharitsh123/zulip,jerryge/zulip,mdavid/zulip,showell/zulip,PhilSk/zulip,alliejones/zulip,adnanh/zulip,sonali0901/zulip,arpitpanwar/zulip,jackrzhang/zulip,tommyip/zulip,zorojean/zulip,xuanhan863/zulip,ashwinirudrappa/zulip,Gabriel0402/zulip,itnihao/zulip,zhaoweigg/zulip,gigawhitlocks/zulip,PhilSk/zulip,wweiradio/zulip,kou/zulip,TigorC/zulip,akuseru/zulip,vikas-parashar/zulip,Suninus/zulip,aps-sids/zulip,thomasboyt/zulip,Gabriel0402/zulip,hackerkid/zulip,pradiptad/zulip,guiquanz/zulip,sharmaeklavya2/zulip,levixie/zulip,easyfmxu/zulip,he15his/zulip,Drooids/zulip,christi3k/zulip,Jianchun1/zulip,Vallher/zulip,firstblade/zulip,wweiradio/zulip,qq1012803704/zulip,voidException/zulip,johnny9/zulip,krtkmj/zulip,kou/zulip,voidException/zulip,jainayush975/zulip,zacps/zulip,dotcool/zulip,tdr130/zulip,shubhamdhama/zulip,peguin40/zulip,synicalsyntax/zulip,johnny9/zulip,jessedhillon/zulip,jeffcao/zulip,vikas-parashar/zulip,brainwane/zulip,reyha/zulip,kou/zulip,andersk/zulip,sonali0901/zulip,isht3/zulip,Juanvulcano/zulip,bitemyapp/zulip,ufosky-server/zulip,LAndreas/zulip,amyliu345/zulip,he15his/zulip,udxxabp/zulip,johnnygaddarr/zulip,kou/zulip,Batterfii/zulip,deer-hope/zulip,karamcnair/zulip,Jianchun1/zulip,showell/zulip,joshisa/zulip,samatdav/zulip,vikas-parashar/zulip,blaze225/zulip,zwily/zulip,hayderimran7/zulip,mahim97/zulip,mdavid/zulip,swinghu/zulip,zofuthan/zulip,timabbott/zulip,j831/zulip,themass/zulip,babbage/zulip,gigawhitlocks/zulip,qq1012803704/zulip,KingxBanana/zulip,hackerkid/zulip,schatt/zulip,itnihao/zulip,grave-w-grave/zulip,xuanhan863/zulip,TigorC/zulip,ericzhou2008/zulip,amallia/zulip,babbage/zulip,hustlzp/zulip,technicalpickles/zulip,Qgap/zulip,KJin99/zulip,aliceriot/zulip,RobotCaleb/zulip,showell/zulip,pradiptad/zulip,bssrdf/zulip,Gabriel0402/zulip,thomasboyt/zulip,mansilladev/zulip,mdavid/zulip,gkotian/zulip,bssrdf/zulip,karamcnair/zulip,SmartPeople/zulip,zhaoweigg/zulip,qq1012803704/zulip,ryanbackman/zulip,EasonYi/zulip,aps-sids/zulip,dwrpayne/zulip,brainwane/zulip,ahmadassaf/zulip,amallia/zulip,Qgap/zulip,swinghu/zulip,grave-w-grave/zulip,atomic-labs/zulip,joyhchen/zulip,vikas-parashar/zulip,AZtheAsian/zulip,willingc/zulip,saitodisse/zulip,technicalpickles/zulip,Gabriel0402/zulip,hj3938/zulip,Cheppers/zulip,dhcrzf/zulip,qq1012803704/zulip,johnnygaddarr/zulip,itnihao/zulip,ikasumiwt/zulip,punchagan/zulip,eastlhu/zulip,tiansiyuan/zulip,huangkebo/zulip,hackerkid/zulip,nicholasbs/zulip,huangkebo/zulip,atomic-labs/zulip,ipernet/zulip,zorojean/zulip,ufosky-server/zulip,itnihao/zulip,KJin99/zulip,m1ssou/zulip,easyfmxu/zulip,cosmicAsymmetry/zulip,KingxBanana/zulip,eeshangarg/zulip,Galexrt/zulip,mahim97/zulip,mdavid/zulip,MayB/zulip,jackrzhang/zulip,esander91/zulip,reyha/zulip,ahmadassaf/zulip,swinghu/zulip,hackerkid/zulip,dawran6/zulip,Drooids/zulip,dawran6/zulip,guiquanz/zulip,littledogboy/zulip,ashwinirudrappa/zulip,moria/zulip,stamhe/zulip,wweiradio/zulip,verma-varsha/zulip,tiansiyuan/zulip,esander91/zulip,kaiyuanheshang/zulip,aps-sids/zulip,brainwane/zulip,umkay/zulip,amanharitsh123/zulip,ufosky-server/zulip,willingc/zulip,vaidap/zulip,isht3/zulip,armooo/zulip,krtkmj/zulip,verma-varsha/zulip,LAndreas/zulip,developerfm/zulip,LeeRisk/zulip,jerryge/zulip,shaunstanislaus/zulip,kokoar/zulip,souravbadami/zulip,gigawhitlocks/zulip,pradiptad/zulip,krtkmj/zulip,mahim97/zulip,alliejones/zulip,mansilladev/zulip,themass/zulip,Qgap/zulip,MayB/zulip,tbutter/zulip,armooo/zulip,EasonYi/zulip,bluesea/zulip,luyifan/zulip,niftynei/zulip,sup95/zulip,amallia/zulip,wangdeshui/zulip,noroot/zulip,lfranchi/zulip,johnny9/zulip,natanovia/zulip,karamcnair/zulip,praveenaki/zulip,wweiradio/zulip,RobotCaleb/zulip,lfranchi/zulip,krtkmj/zulip,aakash-cr7/zulip,vikas-parashar/zulip,alliejones/zulip,deer-hope/zulip,dhcrzf/zulip,joyhchen/zulip,akuseru/zulip,seapasulli/zulip,tommyip/zulip,stamhe/zulip,bastianh/zulip,MayB/zulip,johnnygaddarr/zulip,ahmadassaf/zulip,mdavid/zulip,udxxabp/zulip,alliejones/zulip,thomasboyt/zulip,noroot/zulip,natanovia/zulip,Cheppers/zulip,jonesgithub/zulip,wweiradio/zulip,ikasumiwt/zulip,Batterfii/zulip,levixie/zulip,qq1012803704/zulip,levixie/zulip,hafeez3000/zulip,sup95/zulip,zofuthan/zulip,developerfm/zulip,proliming/zulip,ufosky-server/zulip,brainwane/zulip,rishig/zulip,saitodisse/zulip,kokoar/zulip,kokoar/zulip,arpith/zulip,paxapy/zulip,punchagan/zulip,dhcrzf/zulip,suxinde2009/zulip,grave-w-grave/zulip,amanharitsh123/zulip,dnmfarrell/zulip,Vallher/zulip,zwily/zulip,m1ssou/zulip,TigorC/zulip,aps-sids/zulip,mahim97/zulip,zulip/zulip,yocome/zulip,tdr130/zulip,jimmy54/zulip,jeffcao/zulip,isht3/zulip,Juanvulcano/zulip,Galexrt/zulip,babbage/zulip,JanzTam/zulip,sharmaeklavya2/zulip,easyfmxu/zulip,christi3k/zulip,peiwei/zulip,LeeRisk/zulip,peiwei/zulip,tiansiyuan/zulip,ipernet/zulip,jonesgithub/zulip,arpitpanwar/zulip,atomic-labs/zulip,PaulPetring/zulip,umkay/zulip,hayderimran7/zulip,avastu/zulip,synicalsyntax/zulip,jrowan/zulip,ApsOps/zulip,DazWorrall/zulip,zachallaun/zulip,johnnygaddarr/zulip,joyhchen/zulip,PaulPetring/zulip,Drooids/zulip,amyliu345/zulip,DazWorrall/zulip,bssrdf/zulip,proliming/zulip,karamcnair/zulip,guiquanz/zulip,zacps/zulip,ApsOps/zulip,zulip/zulip,codeKonami/zulip,mdavid/zulip,arpith/zulip,jrowan/zulip,luyifan/zulip,praveenaki/zulip,willingc/zulip,seapasulli/zulip,akuseru/zulip,grave-w-grave/zulip,JanzTam/zulip,shubhamdhama/zulip,Suninus/zulip,RobotCaleb/zulip,tommyip/zulip,Suninus/zulip,esander91/zulip,Juanvulcano/zulip,zwily/zulip,vabs22/zulip,Batterfii/zulip,peiwei/zulip,jeffcao/zulip,adnanh/zulip,LeeRisk/zulip,noroot/zulip,themass/zulip,blaze225/zulip,zulip/zulip,JanzTam/zulip,bluesea/zulip,joyhchen/zulip,JanzTam/zulip,pradiptad/zulip,Galexrt/zulip,shrikrishnaholla/zulip,LAndreas/zulip,suxinde2009/zulip,blaze225/zulip,timabbott/zulip,Cheppers/zulip,Vallher/zulip,ericzhou2008/zulip,dnmfarrell/zulip,schatt/zulip,thomasboyt/zulip,calvinleenyc/zulip,xuxiao/zulip,pradiptad/zulip,punchagan/zulip,arpith/zulip,zachallaun/zulip,Frouk/zulip,swinghu/zulip,hayderimran7/zulip,tiansiyuan/zulip,xuanhan863/zulip,Vallher/zulip,Frouk/zulip,dwrpayne/zulip,atomic-labs/zulip,zachallaun/zulip,brainwane/zulip,themass/zulip,calvinleenyc/zulip,arpith/zulip,Galexrt/zulip,tbutter/zulip,niftynei/zulip,LAndreas/zulip,timabbott/zulip,karamcnair/zulip,mansilladev/zulip,Drooids/zulip,jessedhillon/zulip,ahmadassaf/zulip,Jianchun1/zulip,amallia/zulip,ashwinirudrappa/zulip,stamhe/zulip,levixie/zulip,rishig/zulip,arpitpanwar/zulip,nicholasbs/zulip,eeshangarg/zulip,bitemyapp/zulip,dattatreya303/zulip,avastu/zulip,jerryge/zulip,bastianh/zulip,pradiptad/zulip,PaulPetring/zulip,xuanhan863/zulip,jrowan/zulip,ApsOps/zulip,jimmy54/zulip,hustlzp/zulip,saitodisse/zulip,bluesea/zulip,avastu/zulip,johnny9/zulip,he15his/zulip,itnihao/zulip,JanzTam/zulip,AZtheAsian/zulip,deer-hope/zulip,armooo/zulip,rht/zulip,bssrdf/zulip,sonali0901/zulip,ikasumiwt/zulip,huangkebo/zulip,brockwhittaker/zulip,shubhamdhama/zulip,nicholasbs/zulip,synicalsyntax/zulip,hafeez3000/zulip,MariaFaBella85/zulip,rht/zulip,aakash-cr7/zulip,vabs22/zulip,themass/zulip,voidException/zulip,joshisa/zulip,vakila/zulip,codeKonami/zulip,aliceriot/zulip,wweiradio/zulip,mansilladev/zulip,moria/zulip,fw1121/zulip,vaidap/zulip,MayB/zulip,Qgap/zulip,jerryge/zulip,natanovia/zulip,hackerkid/zulip,dwrpayne/zulip,mahim97/zulip,MariaFaBella85/zulip,littledogboy/zulip,ApsOps/zulip,yuvipanda/zulip,ericzhou2008/zulip,gkotian/zulip,shubhamdhama/zulip,hengqujushi/zulip,themass/zulip,xuxiao/zulip,stamhe/zulip,shrikrishnaholla/zulip,shubhamdhama/zulip,hustlzp/zulip,umkay/zulip,ericzhou2008/zulip,zhaoweigg/zulip,dwrpayne/zulip,shrikrishnaholla/zulip,cosmicAsymmetry/zulip,wavelets/zulip,hayderimran7/zulip,guiquanz/zulip,ryanbackman/zulip,schatt/zulip,karamcnair/zulip,lfranchi/zulip,Galexrt/zulip,technicalpickles/zulip,zacps/zulip,vakila/zulip,dwrpayne/zulip,KingxBanana/zulip,krtkmj/zulip,sonali0901/zulip,yuvipanda/zulip,samatdav/zulip,he15his/zulip,nicholasbs/zulip,brockwhittaker/zulip,j831/zulip,hackerkid/zulip,esander91/zulip,bowlofstew/zulip,mohsenSy/zulip,voidException/zulip,punchagan/zulip,dwrpayne/zulip,developerfm/zulip,developerfm/zulip,amanharitsh123/zulip,hj3938/zulip,PaulPetring/zulip,samatdav/zulip,brockwhittaker/zulip,easyfmxu/zulip,zofuthan/zulip,itnihao/zulip,susansls/zulip,glovebx/zulip,armooo/zulip,noroot/zulip,ericzhou2008/zulip,rht/zulip,Diptanshu8/zulip,yocome/zulip,RobotCaleb/zulip,dattatreya303/zulip,krtkmj/zulip,Batterfii/zulip,hengqujushi/zulip,ericzhou2008/zulip,yocome/zulip,dxq-git/zulip,glovebx/zulip,jessedhillon/zulip,paxapy/zulip,fw1121/zulip,zulip/zulip,synicalsyntax/zulip,Diptanshu8/zulip,stamhe/zulip,jainayush975/zulip,zofuthan/zulip,wavelets/zulip,firstblade/zulip,xuxiao/zulip,tdr130/zulip,yocome/zulip,avastu/zulip,huangkebo/zulip,wavelets/zulip,pradiptad/zulip,LeeRisk/zulip,aliceriot/zulip,jrowan/zulip,Cheppers/zulip,schatt/zulip,hj3938/zulip,gkotian/zulip,glovebx/zulip,calvinleenyc/zulip,mansilladev/zulip,zachallaun/zulip,xuxiao/zulip,voidException/zulip,johnnygaddarr/zulip,zofuthan/zulip,fw1121/zulip,gkotian/zulip,synicalsyntax/zulip,peiwei/zulip,zwily/zulip,shubhamdhama/zulip,Batterfii/zulip,nicholasbs/zulip,wavelets/zulip,codeKonami/zulip,akuseru/zulip,swinghu/zulip,hj3938/zulip,shaunstanislaus/zulip,xuanhan863/zulip,m1ssou/zulip,avastu/zulip,dxq-git/zulip,wavelets/zulip,DazWorrall/zulip,dnmfarrell/zulip,xuanhan863/zulip,cosmicAsymmetry/zulip,yuvipanda/zulip,akuseru/zulip,Cheppers/zulip,technicalpickles/zulip,aakash-cr7/zulip,arpitpanwar/zulip,xuxiao/zulip,shrikrishnaholla/zulip,shaunstanislaus/zulip,hafeez3000/zulip,peguin40/zulip,AZtheAsian/zulip,alliejones/zulip,peiwei/zulip,glovebx/zulip,dotcool/zulip,bluesea/zulip,PaulPetring/zulip,qq1012803704/zulip,easyfmxu/zulip,RobotCaleb/zulip,he15his/zulip,thomasboyt/zulip,kaiyuanheshang/zulip,so0k/zulip,zachallaun/zulip,rht/zulip,ryansnowboarder/zulip,andersk/zulip,shubhamdhama/zulip,paxapy/zulip,jphilipsen05/zulip,tommyip/zulip,so0k/zulip,Drooids/zulip,MayB/zulip,suxinde2009/zulip,eastlhu/zulip,vakila/zulip,dhcrzf/zulip,dxq-git/zulip,luyifan/zulip,armooo/zulip,susansls/zulip,zachallaun/zulip,niftynei/zulip,dotcool/zulip,Gabriel0402/zulip,MariaFaBella85/zulip,Frouk/zulip,j831/zulip,tbutter/zulip,dotcool/zulip,dattatreya303/zulip,blaze225/zulip,Galexrt/zulip,itnihao/zulip,hj3938/zulip,vakila/zulip,zorojean/zulip,amanharitsh123/zulip,aliceriot/zulip,wdaher/zulip,shrikrishnaholla/zulip,LeeRisk/zulip,shaunstanislaus/zulip,yuvipanda/zulip,wdaher/zulip,SmartPeople/zulip,atomic-labs/zulip,christi3k/zulip,hafeez3000/zulip,hengqujushi/zulip,m1ssou/zulip,easyfmxu/zulip,sup95/zulip,Qgap/zulip,guiquanz/zulip,luyifan/zulip,dnmfarrell/zulip,vabs22/zulip,kou/zulip,jessedhillon/zulip,so0k/zulip,littledogboy/zulip,gigawhitlocks/zulip,technicalpickles/zulip,zachallaun/zulip,bowlofstew/zulip,dxq-git/zulip,vaidap/zulip,jimmy54/zulip,so0k/zulip,m1ssou/zulip,schatt/zulip,atomic-labs/zulip,ufosky-server/zulip,shaunstanislaus/zulip,hayderimran7/zulip,paxapy/zulip,dxq-git/zulip,timabbott/zulip,souravbadami/zulip,ufosky-server/zulip,PhilSk/zulip,Frouk/zulip,johnnygaddarr/zulip,zofuthan/zulip,saitodisse/zulip,udxxabp/zulip,yuvipanda/zulip,gigawhitlocks/zulip,seapasulli/zulip,sonali0901/zulip,bitemyapp/zulip,Diptanshu8/zulip,tommyip/zulip,jimmy54/zulip,JPJPJPOPOP/zulip,jeffcao/zulip,seapasulli/zulip,zulip/zulip,synicalsyntax/zulip,jonesgithub/zulip,schatt/zulip,he15his/zulip,shrikrishnaholla/zulip,eastlhu/zulip,gkotian/zulip,rishig/zulip,rishig/zulip,moria/zulip,esander91/zulip,eastlhu/zulip,wangdeshui/zulip,jphilipsen05/zulip,udxxabp/zulip,sharmaeklavya2/zulip,aliceriot/zulip,calvinleenyc/zulip,grave-w-grave/zulip,eastlhu/zulip,mansilladev/zulip,schatt/zulip,zhaoweigg/zulip,firstblade/zulip,dnmfarrell/zulip,showell/zulip,Drooids/zulip,wangdeshui/zulip,calvinleenyc/zulip,bowlofstew/zulip,bastianh/zulip,zulip/zulip,ashwinirudrappa/zulip,tdr130/zulip,vabs22/zulip,stamhe/zulip,kokoar/zulip,vikas-parashar/zulip,LAndreas/zulip,eeshangarg/zulip,firstblade/zulip,joyhchen/zulip,mohsenSy/zulip,brockwhittaker/zulip,zwily/zulip,PaulPetring/zulip,tiansiyuan/zulip,developerfm/zulip,LAndreas/zulip,ikasumiwt/zulip,udxxabp/zulip,timabbott/zulip,jphilipsen05/zulip,xuanhan863/zulip,aps-sids/zulip,jimmy54/zulip,voidException/zulip,sup95/zulip,praveenaki/zulip,JPJPJPOPOP/zulip,paxapy/zulip,avastu/zulip,johnny9/zulip,niftynei/zulip,suxinde2009/zulip,ikasumiwt/zulip,rishig/zulip,firstblade/zulip,samatdav/zulip,krtkmj/zulip,EasonYi/zulip,PhilSk/zulip,vaidap/zulip,arpitpanwar/zulip,punchagan/zulip,amallia/zulip,jackrzhang/zulip,fw1121/zulip,adnanh/zulip,j831/zulip,arpith/zulip,verma-varsha/zulip,willingc/zulip,technicalpickles/zulip,kaiyuanheshang/zulip,dawran6/zulip,Gabriel0402/zulip,verma-varsha/zulip,reyha/zulip,AZtheAsian/zulip,xuxiao/zulip,glovebx/zulip,j831/zulip,Galexrt/zulip,showell/zulip,joshisa/zulip,wavelets/zulip,bowlofstew/zulip,ApsOps/zulip,hj3938/zulip,hengqujushi/zulip,ryanbackman/zulip,bluesea/zulip,swinghu/zulip,jphilipsen05/zulip,EasonYi/zulip,SmartPeople/zulip,shaunstanislaus/zulip,synicalsyntax/zulip,jainayush975/zulip,ashwinirudrappa/zulip,ryanbackman/zulip,kou/zulip,alliejones/zulip,wdaher/zulip,KingxBanana/zulip,hustlzp/zulip,kokoar/zulip,Suninus/zulip,dotcool/zulip,isht3/zulip,verma-varsha/zulip,Gabriel0402/zulip,rht/zulip,brockwhittaker/zulip,dattatreya303/zulip,lfranchi/zulip,tommyip/zulip,ipernet/zulip,vakila/zulip,huangkebo/zulip,jeffcao/zulip,wangdeshui/zulip,bitemyapp/zulip,tommyip/zulip,gkotian/zulip,Jianchun1/zulip,bluesea/zulip,ryansnowboarder/zulip,moria/zulip,jainayush975/zulip,zacps/zulip,praveenaki/zulip,Juanvulcano/zulip,joshisa/zulip,umkay/zulip,eeshangarg/zulip,andersk/zulip,bowlofstew/zulip,ryanbackman/zulip,KJin99/zulip,vabs22/zulip,johnny9/zulip,littledogboy/zulip,wweiradio/zulip,LAndreas/zulip,rishig/zulip,voidException/zulip,huangkebo/zulip,cosmicAsymmetry/zulip,christi3k/zulip,fw1121/zulip,technicalpickles/zulip,jackrzhang/zulip,glovebx/zulip,zwily/zulip,seapasulli/zulip,joyhchen/zulip,armooo/zulip,adnanh/zulip,thomasboyt/zulip,peguin40/zulip,yuvipanda/zulip,zhaoweigg/zulip,praveenaki/zulip,zorojean/zulip,armooo/zulip,willingc/zulip,mdavid/zulip,amyliu345/zulip,jphilipsen05/zulip,jessedhillon/zulip,zorojean/zulip,amallia/zulip,dxq-git/zulip,DazWorrall/zulip,peguin40/zulip,fw1121/zulip,kou/zulip,Frouk/zulip,Juanvulcano/zulip,andersk/zulip,ahmadassaf/zulip,babbage/zulip,codeKonami/zulip,dotcool/zulip,kaiyuanheshang/zulip,andersk/zulip,natanovia/zulip,PaulPetring/zulip,samatdav/zulip,j831/zulip,vabs22/zulip,swinghu/zulip,christi3k/zulip,jainayush975/zulip,saitodisse/zulip,brainwane/zulip,tbutter/zulip,suxinde2009/zulip,deer-hope/zulip,Frouk/zulip,ericzhou2008/zulip,wdaher/zulip,peiwei/zulip,reyha/zulip,niftynei/zulip,sharmaeklavya2/zulip,praveenaki/zulip,lfranchi/zulip,noroot/zulip,adnanh/zulip,ashwinirudrappa/zulip,amyliu345/zulip,jrowan/zulip,PhilSk/zulip,punchagan/zulip,AZtheAsian/zulip,babbage/zulip,vakila/zulip,jessedhillon/zulip,codeKonami/zulip,EasonYi/zulip,ipernet/zulip,xuxiao/zulip,ashwinirudrappa/zulip,samatdav/zulip,niftynei/zulip,Batterfii/zulip,hafeez3000/zulip,so0k/zulip,MayB/zulip,moria/zulip,souravbadami/zulip,KingxBanana/zulip,proliming/zulip,m1ssou/zulip,dattatreya303/zulip,so0k/zulip,bitemyapp/zulip,amyliu345/zulip,Frouk/zulip,zorojean/zulip,showell/zulip,qq1012803704/zulip,kokoar/zulip,umkay/zulip,umkay/zulip,lfranchi/zulip,sharmaeklavya2/zulip,wavelets/zulip,JPJPJPOPOP/zulip,nicholasbs/zulip,dhcrzf/zulip,noroot/zulip,dawran6/zulip,aliceriot/zulip,KJin99/zulip,SmartPeople/zulip,aakash-cr7/zulip,fw1121/zulip,LeeRisk/zulip,DazWorrall/zulip,Diptanshu8/zulip,saitodisse/zulip,KJin99/zulip,ryansnowboarder/zulip,udxxabp/zulip,kaiyuanheshang/zulip,mohsenSy/zulip,jonesgithub/zulip,isht3/zulip,atomic-labs/zulip,luyifan/zulip,shrikrishnaholla/zulip,tbutter/zulip,natanovia/zulip,ikasumiwt/zulip,ryansnowboarder/zulip,bowlofstew/zulip,wangdeshui/zulip,bastianh/zulip,susansls/zulip,sup95/zulip,dwrpayne/zulip,bowlofstew/zulip,guiquanz/zulip,mahim97/zulip,eeshangarg/zulip,gkotian/zulip,dhcrzf/zulip,developerfm/zulip,hayderimran7/zulip,luyifan/zulip,bitemyapp/zulip,levixie/zulip,Qgap/zulip,tdr130/zulip,eeshangarg/zulip,kaiyuanheshang/zulip,jrowan/zulip,jphilipsen05/zulip,moria/zulip,zorojean/zulip,JPJPJPOPOP/zulip,hj3938/zulip,isht3/zulip,wdaher/zulip,babbage/zulip,nicholasbs/zulip,alliejones/zulip,adnanh/zulip,Batterfii/zulip,bastianh/zulip,huangkebo/zulip,shaunstanislaus/zulip,firstblade/zulip,lfranchi/zulip,jimmy54/zulip,yocome/zulip,timabbott/zulip,esander91/zulip,bssrdf/zulip,yocome/zulip,Qgap/zulip,hustlzp/zulip,jerryge/zulip,EasonYi/zulip,bluesea/zulip,JPJPJPOPOP/zulip,mansilladev/zulip,moria/zulip,ApsOps/zulip,rht/zulip,LeeRisk/zulip,hengqujushi/zulip,akuseru/zulip,ahmadassaf/zulip,reyha/zulip,jerryge/zulip,zacps/zulip,TigorC/zulip,Suninus/zulip,cosmicAsymmetry/zulip,jackrzhang/zulip,littledogboy/zulip,amanharitsh123/zulip,suxinde2009/zulip,dattatreya303/zulip,praveenaki/zulip,eastlhu/zulip,zacps/zulip,zhaoweigg/zulip,DazWorrall/zulip,aps-sids/zulip,dnmfarrell/zulip,guiquanz/zulip,seapasulli/zulip,TigorC/zulip,Juanvulcano/zulip,joshisa/zulip,hayderimran7/zulip,MariaFaBella85/zulip,timabbott/zulip,andersk/zulip,jeffcao/zulip,KingxBanana/zulip,thomasboyt/zulip,proliming/zulip,proliming/zulip,hengqujushi/zulip,JanzTam/zulip,proliming/zulip,Vallher/zulip,willingc/zulip,susansls/zulip,brockwhittaker/zulip,developerfm/zulip,reyha/zulip,johnny9/zulip,deer-hope/zulip,littledogboy/zulip,mohsenSy/zulip,MariaFaBella85/zulip,littledogboy/zulip,cosmicAsymmetry/zulip,yuvipanda/zulip,ipernet/zulip,avastu/zulip,dotcool/zulip,Suninus/zulip,Suninus/zulip,KJin99/zulip,bssrdf/zulip,kaiyuanheshang/zulip,Cheppers/zulip,sonali0901/zulip,souravbadami/zulip,tiansiyuan/zulip,Diptanshu8/zulip,souravbadami/zulip,codeKonami/zulip,bastianh/zulip,aliceriot/zulip,levixie/zulip,verma-varsha/zulip,MariaFaBella85/zulip,jeffcao/zulip,vaidap/zulip,luyifan/zulip,bssrdf/zulip,ApsOps/zulip,ryansnowboarder/zulip,Vallher/zulip,hustlzp/zulip,stamhe/zulip,gigawhitlocks/zulip,he15his/zulip,ufosky-server/zulip,eeshangarg/zulip,ryansnowboarder/zulip,zofuthan/zulip,udxxabp/zulip,wangdeshui/zulip,punchagan/zulip,bitemyapp/zulip,susansls/zulip,ipernet/zulip,tbutter/zulip,jonesgithub/zulip,rishig/zulip,ahmadassaf/zulip,jimmy54/zulip,kokoar/zulip,karamcnair/zulip,jainayush975/zulip,umkay/zulip,vakila/zulip,dnmfarrell/zulip,joshisa/zulip,gigawhitlocks/zulip,natanovia/zulip,levixie/zulip,grave-w-grave/zulip,Drooids/zulip,AZtheAsian/zulip,SmartPeople/zulip,willingc/zulip,johnnygaddarr/zulip,EasonYi/zulip,jessedhillon/zulip,jonesgithub/zulip,proliming/zulip,aps-sids/zulip,deer-hope/zulip,JPJPJPOPOP/zulip,peguin40/zulip | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import humbug
import glob
import os.path
from distutils.core import setup
setup(name='humbug',
version=humbug.__version__,
description='Bindings for the Humbug message API',
author='Humbug, Inc.',
author_email='humbug@humbughq.com',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Communications :: Chat',
],
url='https://humbughq.com/dist/api/',
packages=['humbug'],
data_files=[('share/humbug/examples', ["examples/humbugrc", "examples/send-message"])] + \
[(os.path.join('share/humbug/', relpath),
glob.glob(os.path.join(relpath, '*'))) for relpath in
glob.glob("integrations/*")
],
scripts=["bin/humbug-send"],
)
Install the demos directory in /usr/local/share/ as well.
(imported from commit 7516e5318f9e30544f53703ead4bca60f2dd7828) | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import humbug
import glob
import os
from distutils.core import setup
setup(name='humbug',
version=humbug.__version__,
description='Bindings for the Humbug message API',
author='Humbug, Inc.',
author_email='humbug@humbughq.com',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Communications :: Chat',
],
url='https://humbughq.com/dist/api/',
packages=['humbug'],
data_files=[('share/humbug/examples', ["examples/humbugrc", "examples/send-message"])] + \
[(os.path.join('share/humbug/', relpath),
glob.glob(os.path.join(relpath, '*'))) for relpath in
glob.glob("integrations/*")] + \
[('share/humbug/demos',
[os.path.join("demos", relpath) for relpath in
os.listdir("demos")])],
scripts=["bin/humbug-send"],
)
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import humbug
import glob
import os.path
from distutils.core import setup
setup(name='humbug',
version=humbug.__version__,
description='Bindings for the Humbug message API',
author='Humbug, Inc.',
author_email='humbug@humbughq.com',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Communications :: Chat',
],
url='https://humbughq.com/dist/api/',
packages=['humbug'],
data_files=[('share/humbug/examples', ["examples/humbugrc", "examples/send-message"])] + \
[(os.path.join('share/humbug/', relpath),
glob.glob(os.path.join(relpath, '*'))) for relpath in
glob.glob("integrations/*")
],
scripts=["bin/humbug-send"],
)
<commit_msg>Install the demos directory in /usr/local/share/ as well.
(imported from commit 7516e5318f9e30544f53703ead4bca60f2dd7828)<commit_after> | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import humbug
import glob
import os
from distutils.core import setup
setup(name='humbug',
version=humbug.__version__,
description='Bindings for the Humbug message API',
author='Humbug, Inc.',
author_email='humbug@humbughq.com',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Communications :: Chat',
],
url='https://humbughq.com/dist/api/',
packages=['humbug'],
data_files=[('share/humbug/examples', ["examples/humbugrc", "examples/send-message"])] + \
[(os.path.join('share/humbug/', relpath),
glob.glob(os.path.join(relpath, '*'))) for relpath in
glob.glob("integrations/*")] + \
[('share/humbug/demos',
[os.path.join("demos", relpath) for relpath in
os.listdir("demos")])],
scripts=["bin/humbug-send"],
)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import humbug
import glob
import os.path
from distutils.core import setup
setup(name='humbug',
version=humbug.__version__,
description='Bindings for the Humbug message API',
author='Humbug, Inc.',
author_email='humbug@humbughq.com',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Communications :: Chat',
],
url='https://humbughq.com/dist/api/',
packages=['humbug'],
data_files=[('share/humbug/examples', ["examples/humbugrc", "examples/send-message"])] + \
[(os.path.join('share/humbug/', relpath),
glob.glob(os.path.join(relpath, '*'))) for relpath in
glob.glob("integrations/*")
],
scripts=["bin/humbug-send"],
)
Install the demos directory in /usr/local/share/ as well.
(imported from commit 7516e5318f9e30544f53703ead4bca60f2dd7828)#!/usr/bin/env python
# -*- coding: utf-8 -*-
import humbug
import glob
import os
from distutils.core import setup
setup(name='humbug',
version=humbug.__version__,
description='Bindings for the Humbug message API',
author='Humbug, Inc.',
author_email='humbug@humbughq.com',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Communications :: Chat',
],
url='https://humbughq.com/dist/api/',
packages=['humbug'],
data_files=[('share/humbug/examples', ["examples/humbugrc", "examples/send-message"])] + \
[(os.path.join('share/humbug/', relpath),
glob.glob(os.path.join(relpath, '*'))) for relpath in
glob.glob("integrations/*")] + \
[('share/humbug/demos',
[os.path.join("demos", relpath) for relpath in
os.listdir("demos")])],
scripts=["bin/humbug-send"],
)
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import humbug
import glob
import os.path
from distutils.core import setup
setup(name='humbug',
version=humbug.__version__,
description='Bindings for the Humbug message API',
author='Humbug, Inc.',
author_email='humbug@humbughq.com',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Communications :: Chat',
],
url='https://humbughq.com/dist/api/',
packages=['humbug'],
data_files=[('share/humbug/examples', ["examples/humbugrc", "examples/send-message"])] + \
[(os.path.join('share/humbug/', relpath),
glob.glob(os.path.join(relpath, '*'))) for relpath in
glob.glob("integrations/*")
],
scripts=["bin/humbug-send"],
)
<commit_msg>Install the demos directory in /usr/local/share/ as well.
(imported from commit 7516e5318f9e30544f53703ead4bca60f2dd7828)<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import humbug
import glob
import os
from distutils.core import setup
setup(name='humbug',
version=humbug.__version__,
description='Bindings for the Humbug message API',
author='Humbug, Inc.',
author_email='humbug@humbughq.com',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Communications :: Chat',
],
url='https://humbughq.com/dist/api/',
packages=['humbug'],
data_files=[('share/humbug/examples', ["examples/humbugrc", "examples/send-message"])] + \
[(os.path.join('share/humbug/', relpath),
glob.glob(os.path.join(relpath, '*'))) for relpath in
glob.glob("integrations/*")] + \
[('share/humbug/demos',
[os.path.join("demos", relpath) for relpath in
os.listdir("demos")])],
scripts=["bin/humbug-send"],
)
|
72064e373e6b13f5847199aeb8116ab1708523b2 | astroquery/cadc/tests/setup_package.py | astroquery/cadc/tests/setup_package.py | # Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import absolute_import
import os
# setup paths to the test data
# can specify a single file or a list of files
def get_package_data():
paths = [os.path.join('data', '*.vot'),
os.path.join('data', '*.xml'),
os.path.join('data', '*.pem'),
] # etc, add other extensions
# you can also enlist files individually by names
# finally construct and return a dict for the sub module
return {'astroquery.cadc.tests': paths}
| # Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import absolute_import
import os
# setup paths to the test data
# can specify a single file or a list of files
def get_package_data():
paths = [os.path.join('data', '*.vot'),
os.path.join('data', '*.xml'),
os.path.join('data', '*.pem'),
os.path.join('data', '*.fits'),
] # etc, add other extensions
# you can also enlist files individually by names
# finally construct and return a dict for the sub module
return {'astroquery.cadc.tests': paths}
| Add fits file to package build | Add fits file to package build
| Python | bsd-3-clause | imbasimba/astroquery,ceb8/astroquery,ceb8/astroquery,imbasimba/astroquery | # Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import absolute_import
import os
# setup paths to the test data
# can specify a single file or a list of files
def get_package_data():
paths = [os.path.join('data', '*.vot'),
os.path.join('data', '*.xml'),
os.path.join('data', '*.pem'),
] # etc, add other extensions
# you can also enlist files individually by names
# finally construct and return a dict for the sub module
return {'astroquery.cadc.tests': paths}
Add fits file to package build | # Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import absolute_import
import os
# setup paths to the test data
# can specify a single file or a list of files
def get_package_data():
paths = [os.path.join('data', '*.vot'),
os.path.join('data', '*.xml'),
os.path.join('data', '*.pem'),
os.path.join('data', '*.fits'),
] # etc, add other extensions
# you can also enlist files individually by names
# finally construct and return a dict for the sub module
return {'astroquery.cadc.tests': paths}
| <commit_before># Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import absolute_import
import os
# setup paths to the test data
# can specify a single file or a list of files
def get_package_data():
paths = [os.path.join('data', '*.vot'),
os.path.join('data', '*.xml'),
os.path.join('data', '*.pem'),
] # etc, add other extensions
# you can also enlist files individually by names
# finally construct and return a dict for the sub module
return {'astroquery.cadc.tests': paths}
<commit_msg>Add fits file to package build<commit_after> | # Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import absolute_import
import os
# setup paths to the test data
# can specify a single file or a list of files
def get_package_data():
paths = [os.path.join('data', '*.vot'),
os.path.join('data', '*.xml'),
os.path.join('data', '*.pem'),
os.path.join('data', '*.fits'),
] # etc, add other extensions
# you can also enlist files individually by names
# finally construct and return a dict for the sub module
return {'astroquery.cadc.tests': paths}
| # Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import absolute_import
import os
# setup paths to the test data
# can specify a single file or a list of files
def get_package_data():
paths = [os.path.join('data', '*.vot'),
os.path.join('data', '*.xml'),
os.path.join('data', '*.pem'),
] # etc, add other extensions
# you can also enlist files individually by names
# finally construct and return a dict for the sub module
return {'astroquery.cadc.tests': paths}
Add fits file to package build# Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import absolute_import
import os
# setup paths to the test data
# can specify a single file or a list of files
def get_package_data():
paths = [os.path.join('data', '*.vot'),
os.path.join('data', '*.xml'),
os.path.join('data', '*.pem'),
os.path.join('data', '*.fits'),
] # etc, add other extensions
# you can also enlist files individually by names
# finally construct and return a dict for the sub module
return {'astroquery.cadc.tests': paths}
| <commit_before># Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import absolute_import
import os
# setup paths to the test data
# can specify a single file or a list of files
def get_package_data():
paths = [os.path.join('data', '*.vot'),
os.path.join('data', '*.xml'),
os.path.join('data', '*.pem'),
] # etc, add other extensions
# you can also enlist files individually by names
# finally construct and return a dict for the sub module
return {'astroquery.cadc.tests': paths}
<commit_msg>Add fits file to package build<commit_after># Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import absolute_import
import os
# setup paths to the test data
# can specify a single file or a list of files
def get_package_data():
paths = [os.path.join('data', '*.vot'),
os.path.join('data', '*.xml'),
os.path.join('data', '*.pem'),
os.path.join('data', '*.fits'),
] # etc, add other extensions
# you can also enlist files individually by names
# finally construct and return a dict for the sub module
return {'astroquery.cadc.tests': paths}
|
1e71dbaa3d82ca757e1e38f114f2153a2d54500e | app/views.py | app/views.py | from flask import Flask, render_template, session, redirect, url_for, flash
from app import app, forms, models
@app.route('/', methods=['GET', 'POST'])
def index():
login_form = forms.LoginForm()
if login_form.validate_on_submit():
user = models.User.query.filter_by(username = login_form.username.data).first()
if user:
# Used to display user-specific nav items
session['logged_in'] = True
return redirect(url_for('create'))
else:
flash('Invalid username or password.')
return render_template('login.html', form=login_form)
@app.route('/logout')
def logout():
session['logged_in'] = False
flash('You have been logged out.')
return redirect('/')
@app.route('/create/', methods=['GET', 'POST'])
def create():
form = forms.CreateForm()
if form.validate_on_submit():
# TODO: If save was a success, inform user
# Otherwise report error message.
flash('The form has been submitted successfully.')
return render_template('create.html', form=form)
@app.errorhandler(404)
def page_not_found(e):
return render_template('404.html'), 404
| from flask import Flask, render_template, session, redirect, url_for, flash
from app import app, forms, models, db
import datetime
@app.route('/', methods=['GET', 'POST'])
def index():
login_form = forms.LoginForm()
if login_form.validate_on_submit():
user = models.User.query.filter_by(username = login_form.username.data).first()
if user:
# Used to display user-specific nav items
session['logged_in'] = True
return redirect(url_for('create'))
else:
flash('Invalid username or password.')
return render_template('login.html', form=login_form)
@app.route('/logout')
def logout():
session['logged_in'] = False
flash('You have been logged out.')
return redirect('/')
@app.route('/create/', methods=['GET', 'POST'])
def create():
if not session['logged_in']:
flash('You are not logged into the system.')
return redirect('/')
create_form = forms.CreateForm()
if create_form.validate_on_submit():
# Create a patient from user input
patient = models.Patient(forename = create_form.forename.data,
surname = create_form.surname.data,
dob = datetime.datetime.strptime(create_form.dob.data, "%d/%m/%Y"),
mobile = create_form.mobile.data
)
# Add patient data to database
db.session.add(patient)
db.session.commit()
# Reset the form & redirect to self.
flash('The form has been submitted successfully.')
create_form.reset()
return render_template('create.html', form=create_form,error='error')
@app.errorhandler(404)
def page_not_found(e):
return render_template('404.html'), 404
| Add patient data from the create form to the database. | Add patient data from the create form to the database.
| Python | mit | jawrainey/atc,jawrainey/atc | from flask import Flask, render_template, session, redirect, url_for, flash
from app import app, forms, models
@app.route('/', methods=['GET', 'POST'])
def index():
login_form = forms.LoginForm()
if login_form.validate_on_submit():
user = models.User.query.filter_by(username = login_form.username.data).first()
if user:
# Used to display user-specific nav items
session['logged_in'] = True
return redirect(url_for('create'))
else:
flash('Invalid username or password.')
return render_template('login.html', form=login_form)
@app.route('/logout')
def logout():
session['logged_in'] = False
flash('You have been logged out.')
return redirect('/')
@app.route('/create/', methods=['GET', 'POST'])
def create():
form = forms.CreateForm()
if form.validate_on_submit():
# TODO: If save was a success, inform user
# Otherwise report error message.
flash('The form has been submitted successfully.')
return render_template('create.html', form=form)
@app.errorhandler(404)
def page_not_found(e):
return render_template('404.html'), 404
Add patient data from the create form to the database. | from flask import Flask, render_template, session, redirect, url_for, flash
from app import app, forms, models, db
import datetime
@app.route('/', methods=['GET', 'POST'])
def index():
login_form = forms.LoginForm()
if login_form.validate_on_submit():
user = models.User.query.filter_by(username = login_form.username.data).first()
if user:
# Used to display user-specific nav items
session['logged_in'] = True
return redirect(url_for('create'))
else:
flash('Invalid username or password.')
return render_template('login.html', form=login_form)
@app.route('/logout')
def logout():
session['logged_in'] = False
flash('You have been logged out.')
return redirect('/')
@app.route('/create/', methods=['GET', 'POST'])
def create():
if not session['logged_in']:
flash('You are not logged into the system.')
return redirect('/')
create_form = forms.CreateForm()
if create_form.validate_on_submit():
# Create a patient from user input
patient = models.Patient(forename = create_form.forename.data,
surname = create_form.surname.data,
dob = datetime.datetime.strptime(create_form.dob.data, "%d/%m/%Y"),
mobile = create_form.mobile.data
)
# Add patient data to database
db.session.add(patient)
db.session.commit()
# Reset the form & redirect to self.
flash('The form has been submitted successfully.')
create_form.reset()
return render_template('create.html', form=create_form,error='error')
@app.errorhandler(404)
def page_not_found(e):
return render_template('404.html'), 404
| <commit_before>from flask import Flask, render_template, session, redirect, url_for, flash
from app import app, forms, models
@app.route('/', methods=['GET', 'POST'])
def index():
login_form = forms.LoginForm()
if login_form.validate_on_submit():
user = models.User.query.filter_by(username = login_form.username.data).first()
if user:
# Used to display user-specific nav items
session['logged_in'] = True
return redirect(url_for('create'))
else:
flash('Invalid username or password.')
return render_template('login.html', form=login_form)
@app.route('/logout')
def logout():
session['logged_in'] = False
flash('You have been logged out.')
return redirect('/')
@app.route('/create/', methods=['GET', 'POST'])
def create():
form = forms.CreateForm()
if form.validate_on_submit():
# TODO: If save was a success, inform user
# Otherwise report error message.
flash('The form has been submitted successfully.')
return render_template('create.html', form=form)
@app.errorhandler(404)
def page_not_found(e):
return render_template('404.html'), 404
<commit_msg>Add patient data from the create form to the database.<commit_after> | from flask import Flask, render_template, session, redirect, url_for, flash
from app import app, forms, models, db
import datetime
@app.route('/', methods=['GET', 'POST'])
def index():
login_form = forms.LoginForm()
if login_form.validate_on_submit():
user = models.User.query.filter_by(username = login_form.username.data).first()
if user:
# Used to display user-specific nav items
session['logged_in'] = True
return redirect(url_for('create'))
else:
flash('Invalid username or password.')
return render_template('login.html', form=login_form)
@app.route('/logout')
def logout():
session['logged_in'] = False
flash('You have been logged out.')
return redirect('/')
@app.route('/create/', methods=['GET', 'POST'])
def create():
if not session['logged_in']:
flash('You are not logged into the system.')
return redirect('/')
create_form = forms.CreateForm()
if create_form.validate_on_submit():
# Create a patient from user input
patient = models.Patient(forename = create_form.forename.data,
surname = create_form.surname.data,
dob = datetime.datetime.strptime(create_form.dob.data, "%d/%m/%Y"),
mobile = create_form.mobile.data
)
# Add patient data to database
db.session.add(patient)
db.session.commit()
# Reset the form & redirect to self.
flash('The form has been submitted successfully.')
create_form.reset()
return render_template('create.html', form=create_form,error='error')
@app.errorhandler(404)
def page_not_found(e):
return render_template('404.html'), 404
| from flask import Flask, render_template, session, redirect, url_for, flash
from app import app, forms, models
@app.route('/', methods=['GET', 'POST'])
def index():
login_form = forms.LoginForm()
if login_form.validate_on_submit():
user = models.User.query.filter_by(username = login_form.username.data).first()
if user:
# Used to display user-specific nav items
session['logged_in'] = True
return redirect(url_for('create'))
else:
flash('Invalid username or password.')
return render_template('login.html', form=login_form)
@app.route('/logout')
def logout():
session['logged_in'] = False
flash('You have been logged out.')
return redirect('/')
@app.route('/create/', methods=['GET', 'POST'])
def create():
form = forms.CreateForm()
if form.validate_on_submit():
# TODO: If save was a success, inform user
# Otherwise report error message.
flash('The form has been submitted successfully.')
return render_template('create.html', form=form)
@app.errorhandler(404)
def page_not_found(e):
return render_template('404.html'), 404
Add patient data from the create form to the database.from flask import Flask, render_template, session, redirect, url_for, flash
from app import app, forms, models, db
import datetime
@app.route('/', methods=['GET', 'POST'])
def index():
login_form = forms.LoginForm()
if login_form.validate_on_submit():
user = models.User.query.filter_by(username = login_form.username.data).first()
if user:
# Used to display user-specific nav items
session['logged_in'] = True
return redirect(url_for('create'))
else:
flash('Invalid username or password.')
return render_template('login.html', form=login_form)
@app.route('/logout')
def logout():
session['logged_in'] = False
flash('You have been logged out.')
return redirect('/')
@app.route('/create/', methods=['GET', 'POST'])
def create():
if not session['logged_in']:
flash('You are not logged into the system.')
return redirect('/')
create_form = forms.CreateForm()
if create_form.validate_on_submit():
# Create a patient from user input
patient = models.Patient(forename = create_form.forename.data,
surname = create_form.surname.data,
dob = datetime.datetime.strptime(create_form.dob.data, "%d/%m/%Y"),
mobile = create_form.mobile.data
)
# Add patient data to database
db.session.add(patient)
db.session.commit()
# Reset the form & redirect to self.
flash('The form has been submitted successfully.')
create_form.reset()
return render_template('create.html', form=create_form,error='error')
@app.errorhandler(404)
def page_not_found(e):
return render_template('404.html'), 404
| <commit_before>from flask import Flask, render_template, session, redirect, url_for, flash
from app import app, forms, models
@app.route('/', methods=['GET', 'POST'])
def index():
login_form = forms.LoginForm()
if login_form.validate_on_submit():
user = models.User.query.filter_by(username = login_form.username.data).first()
if user:
# Used to display user-specific nav items
session['logged_in'] = True
return redirect(url_for('create'))
else:
flash('Invalid username or password.')
return render_template('login.html', form=login_form)
@app.route('/logout')
def logout():
session['logged_in'] = False
flash('You have been logged out.')
return redirect('/')
@app.route('/create/', methods=['GET', 'POST'])
def create():
form = forms.CreateForm()
if form.validate_on_submit():
# TODO: If save was a success, inform user
# Otherwise report error message.
flash('The form has been submitted successfully.')
return render_template('create.html', form=form)
@app.errorhandler(404)
def page_not_found(e):
return render_template('404.html'), 404
<commit_msg>Add patient data from the create form to the database.<commit_after>from flask import Flask, render_template, session, redirect, url_for, flash
from app import app, forms, models, db
import datetime
@app.route('/', methods=['GET', 'POST'])
def index():
login_form = forms.LoginForm()
if login_form.validate_on_submit():
user = models.User.query.filter_by(username = login_form.username.data).first()
if user:
# Used to display user-specific nav items
session['logged_in'] = True
return redirect(url_for('create'))
else:
flash('Invalid username or password.')
return render_template('login.html', form=login_form)
@app.route('/logout')
def logout():
session['logged_in'] = False
flash('You have been logged out.')
return redirect('/')
@app.route('/create/', methods=['GET', 'POST'])
def create():
if not session['logged_in']:
flash('You are not logged into the system.')
return redirect('/')
create_form = forms.CreateForm()
if create_form.validate_on_submit():
# Create a patient from user input
patient = models.Patient(forename = create_form.forename.data,
surname = create_form.surname.data,
dob = datetime.datetime.strptime(create_form.dob.data, "%d/%m/%Y"),
mobile = create_form.mobile.data
)
# Add patient data to database
db.session.add(patient)
db.session.commit()
# Reset the form & redirect to self.
flash('The form has been submitted successfully.')
create_form.reset()
return render_template('create.html', form=create_form,error='error')
@app.errorhandler(404)
def page_not_found(e):
return render_template('404.html'), 404
|
5ab0e32f3a3b49747b6035cee6dcc1002b5075e1 | path/program_drive_provider.py | path/program_drive_provider.py | import os
from functools import lru_cache
from .. import logger
# use absolute path because of re-occuraing imports '.' could not work
from .program_path_provider import get_cached_program_path
@lru_cache(maxsize=None)
def get_cached_program_drive():
"""Get the value of the #PROGRAMDRIVE# variable"""
rainmeterpath = get_cached_program_path()
if not rainmeterpath:
logger.info(__file__, "get_cached_program_drive", "Rainmeter program path not found.")
return
probe_drive = os.path.splitdrive(rainmeterpath)
if not probe_drive:
logger.info(__file__, "get_cached_program_drive", "Drive could not be extracted from '" + rainmeterpath + "'.")
return
# can be either a drive like C:\
# or an UNC Mount Point like //host/computer
return probe_drive[0]
| """
This module provides methods for determine the program drive.
Rainmeter has an built-in variable called #PROGRAMDRIVE#.
With this you can directly route to the drive in which Rainmeter is contained.
If by some chance people use @Include on #PROGRAMDRIVE# it is still able to resolve
the path and open the include for you.
"""
import os
from functools import lru_cache
from .. import logger
# use absolute path because of re-occuraing imports '.' could not work
from .program_path_provider import get_cached_program_path
@lru_cache(maxsize=None)
def get_cached_program_drive():
"""Get the value of the #PROGRAMDRIVE# variable."""
rainmeterpath = get_cached_program_path()
if not rainmeterpath:
logger.info(__file__, "get_cached_program_drive", "Rainmeter program path not found.")
return
probe_drive = os.path.splitdrive(rainmeterpath)
if not probe_drive:
logger.info(
__file__,
"get_cached_program_drive",
"Drive could not be extracted from '" + rainmeterpath + "'."
)
return
# can be either a drive like C:\
# or an UNC Mount Point like //host/computer
return probe_drive[0]
| Add docstring to program drive provider | Add docstring to program drive provider
| Python | mit | thatsIch/sublime-rainmeter | import os
from functools import lru_cache
from .. import logger
# use absolute path because of re-occuraing imports '.' could not work
from .program_path_provider import get_cached_program_path
@lru_cache(maxsize=None)
def get_cached_program_drive():
"""Get the value of the #PROGRAMDRIVE# variable"""
rainmeterpath = get_cached_program_path()
if not rainmeterpath:
logger.info(__file__, "get_cached_program_drive", "Rainmeter program path not found.")
return
probe_drive = os.path.splitdrive(rainmeterpath)
if not probe_drive:
logger.info(__file__, "get_cached_program_drive", "Drive could not be extracted from '" + rainmeterpath + "'.")
return
# can be either a drive like C:\
# or an UNC Mount Point like //host/computer
return probe_drive[0]
Add docstring to program drive provider | """
This module provides methods for determine the program drive.
Rainmeter has an built-in variable called #PROGRAMDRIVE#.
With this you can directly route to the drive in which Rainmeter is contained.
If by some chance people use @Include on #PROGRAMDRIVE# it is still able to resolve
the path and open the include for you.
"""
import os
from functools import lru_cache
from .. import logger
# use absolute path because of re-occuraing imports '.' could not work
from .program_path_provider import get_cached_program_path
@lru_cache(maxsize=None)
def get_cached_program_drive():
"""Get the value of the #PROGRAMDRIVE# variable."""
rainmeterpath = get_cached_program_path()
if not rainmeterpath:
logger.info(__file__, "get_cached_program_drive", "Rainmeter program path not found.")
return
probe_drive = os.path.splitdrive(rainmeterpath)
if not probe_drive:
logger.info(
__file__,
"get_cached_program_drive",
"Drive could not be extracted from '" + rainmeterpath + "'."
)
return
# can be either a drive like C:\
# or an UNC Mount Point like //host/computer
return probe_drive[0]
| <commit_before>import os
from functools import lru_cache
from .. import logger
# use absolute path because of re-occuraing imports '.' could not work
from .program_path_provider import get_cached_program_path
@lru_cache(maxsize=None)
def get_cached_program_drive():
"""Get the value of the #PROGRAMDRIVE# variable"""
rainmeterpath = get_cached_program_path()
if not rainmeterpath:
logger.info(__file__, "get_cached_program_drive", "Rainmeter program path not found.")
return
probe_drive = os.path.splitdrive(rainmeterpath)
if not probe_drive:
logger.info(__file__, "get_cached_program_drive", "Drive could not be extracted from '" + rainmeterpath + "'.")
return
# can be either a drive like C:\
# or an UNC Mount Point like //host/computer
return probe_drive[0]
<commit_msg>Add docstring to program drive provider<commit_after> | """
This module provides methods for determine the program drive.
Rainmeter has an built-in variable called #PROGRAMDRIVE#.
With this you can directly route to the drive in which Rainmeter is contained.
If by some chance people use @Include on #PROGRAMDRIVE# it is still able to resolve
the path and open the include for you.
"""
import os
from functools import lru_cache
from .. import logger
# use absolute path because of re-occuraing imports '.' could not work
from .program_path_provider import get_cached_program_path
@lru_cache(maxsize=None)
def get_cached_program_drive():
"""Get the value of the #PROGRAMDRIVE# variable."""
rainmeterpath = get_cached_program_path()
if not rainmeterpath:
logger.info(__file__, "get_cached_program_drive", "Rainmeter program path not found.")
return
probe_drive = os.path.splitdrive(rainmeterpath)
if not probe_drive:
logger.info(
__file__,
"get_cached_program_drive",
"Drive could not be extracted from '" + rainmeterpath + "'."
)
return
# can be either a drive like C:\
# or an UNC Mount Point like //host/computer
return probe_drive[0]
| import os
from functools import lru_cache
from .. import logger
# use absolute path because of re-occuraing imports '.' could not work
from .program_path_provider import get_cached_program_path
@lru_cache(maxsize=None)
def get_cached_program_drive():
"""Get the value of the #PROGRAMDRIVE# variable"""
rainmeterpath = get_cached_program_path()
if not rainmeterpath:
logger.info(__file__, "get_cached_program_drive", "Rainmeter program path not found.")
return
probe_drive = os.path.splitdrive(rainmeterpath)
if not probe_drive:
logger.info(__file__, "get_cached_program_drive", "Drive could not be extracted from '" + rainmeterpath + "'.")
return
# can be either a drive like C:\
# or an UNC Mount Point like //host/computer
return probe_drive[0]
Add docstring to program drive provider"""
This module provides methods for determine the program drive.
Rainmeter has an built-in variable called #PROGRAMDRIVE#.
With this you can directly route to the drive in which Rainmeter is contained.
If by some chance people use @Include on #PROGRAMDRIVE# it is still able to resolve
the path and open the include for you.
"""
import os
from functools import lru_cache
from .. import logger
# use absolute path because of re-occuraing imports '.' could not work
from .program_path_provider import get_cached_program_path
@lru_cache(maxsize=None)
def get_cached_program_drive():
"""Get the value of the #PROGRAMDRIVE# variable."""
rainmeterpath = get_cached_program_path()
if not rainmeterpath:
logger.info(__file__, "get_cached_program_drive", "Rainmeter program path not found.")
return
probe_drive = os.path.splitdrive(rainmeterpath)
if not probe_drive:
logger.info(
__file__,
"get_cached_program_drive",
"Drive could not be extracted from '" + rainmeterpath + "'."
)
return
# can be either a drive like C:\
# or an UNC Mount Point like //host/computer
return probe_drive[0]
| <commit_before>import os
from functools import lru_cache
from .. import logger
# use absolute path because of re-occuraing imports '.' could not work
from .program_path_provider import get_cached_program_path
@lru_cache(maxsize=None)
def get_cached_program_drive():
"""Get the value of the #PROGRAMDRIVE# variable"""
rainmeterpath = get_cached_program_path()
if not rainmeterpath:
logger.info(__file__, "get_cached_program_drive", "Rainmeter program path not found.")
return
probe_drive = os.path.splitdrive(rainmeterpath)
if not probe_drive:
logger.info(__file__, "get_cached_program_drive", "Drive could not be extracted from '" + rainmeterpath + "'.")
return
# can be either a drive like C:\
# or an UNC Mount Point like //host/computer
return probe_drive[0]
<commit_msg>Add docstring to program drive provider<commit_after>"""
This module provides methods for determine the program drive.
Rainmeter has an built-in variable called #PROGRAMDRIVE#.
With this you can directly route to the drive in which Rainmeter is contained.
If by some chance people use @Include on #PROGRAMDRIVE# it is still able to resolve
the path and open the include for you.
"""
import os
from functools import lru_cache
from .. import logger
# use absolute path because of re-occuraing imports '.' could not work
from .program_path_provider import get_cached_program_path
@lru_cache(maxsize=None)
def get_cached_program_drive():
"""Get the value of the #PROGRAMDRIVE# variable."""
rainmeterpath = get_cached_program_path()
if not rainmeterpath:
logger.info(__file__, "get_cached_program_drive", "Rainmeter program path not found.")
return
probe_drive = os.path.splitdrive(rainmeterpath)
if not probe_drive:
logger.info(
__file__,
"get_cached_program_drive",
"Drive could not be extracted from '" + rainmeterpath + "'."
)
return
# can be either a drive like C:\
# or an UNC Mount Point like //host/computer
return probe_drive[0]
|
d2a024dda2d9032680131b1e8fba38e6bcf0f671 | billjobs/tests/tests_user_admin_api.py | billjobs/tests/tests_user_admin_api.py | from django.test import TestCase
from django.contrib.auth.models import User
from rest_framework import status
from rest_framework.test import APIClient, APIRequestFactory, \
force_authenticate
from billjobs.views import UserAdmin
class UserAdminAPI(TestCase):
""" Test User Admin API REST endpoint """
fixtures=['account_test.yaml']
def setUp(self):
self.client = APIClient()
self.factory = APIRequestFactory()
self.admin = User.objects.get(pk=1)
def test_admin_list_user(self):
request = self.factory.get('/billjobs/users/')
force_authenticate(request, user=self.admin)
view = UserAdmin.as_view()
response = view(request)
self.assertEqual(response.status_code, status.HTTP_200_OK)
| from django.test import TestCase
from django.contrib.auth.models import User
from rest_framework import status
from rest_framework.test import APIClient, APIRequestFactory, \
force_authenticate
from billjobs.views import UserAdmin, UserAdminDetail
class UserAdminAPI(TestCase):
""" Test User Admin API REST endpoint """
fixtures=['account_test.yaml']
def setUp(self):
self.client = APIClient()
self.factory = APIRequestFactory()
self.admin = User.objects.get(pk=1)
def test_admin_list_user(self):
request = self.factory.get('/billjobs/users/')
force_authenticate(request, user=self.admin)
view = UserAdmin.as_view()
response = view(request)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_admin_retrieve_user(self):
request = self.factory.get('/billjobs/users/')
force_authenticate(request, user=self.admin)
view = UserAdminDetail.as_view()
response = view(request, pk=1)
self.assertEqual(response.status_code, status.HTTP_200_OK)
| Add test admin to retrieve a user detail | Add test admin to retrieve a user detail
| Python | mit | ioO/billjobs | from django.test import TestCase
from django.contrib.auth.models import User
from rest_framework import status
from rest_framework.test import APIClient, APIRequestFactory, \
force_authenticate
from billjobs.views import UserAdmin
class UserAdminAPI(TestCase):
""" Test User Admin API REST endpoint """
fixtures=['account_test.yaml']
def setUp(self):
self.client = APIClient()
self.factory = APIRequestFactory()
self.admin = User.objects.get(pk=1)
def test_admin_list_user(self):
request = self.factory.get('/billjobs/users/')
force_authenticate(request, user=self.admin)
view = UserAdmin.as_view()
response = view(request)
self.assertEqual(response.status_code, status.HTTP_200_OK)
Add test admin to retrieve a user detail | from django.test import TestCase
from django.contrib.auth.models import User
from rest_framework import status
from rest_framework.test import APIClient, APIRequestFactory, \
force_authenticate
from billjobs.views import UserAdmin, UserAdminDetail
class UserAdminAPI(TestCase):
""" Test User Admin API REST endpoint """
fixtures=['account_test.yaml']
def setUp(self):
self.client = APIClient()
self.factory = APIRequestFactory()
self.admin = User.objects.get(pk=1)
def test_admin_list_user(self):
request = self.factory.get('/billjobs/users/')
force_authenticate(request, user=self.admin)
view = UserAdmin.as_view()
response = view(request)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_admin_retrieve_user(self):
request = self.factory.get('/billjobs/users/')
force_authenticate(request, user=self.admin)
view = UserAdminDetail.as_view()
response = view(request, pk=1)
self.assertEqual(response.status_code, status.HTTP_200_OK)
| <commit_before>from django.test import TestCase
from django.contrib.auth.models import User
from rest_framework import status
from rest_framework.test import APIClient, APIRequestFactory, \
force_authenticate
from billjobs.views import UserAdmin
class UserAdminAPI(TestCase):
""" Test User Admin API REST endpoint """
fixtures=['account_test.yaml']
def setUp(self):
self.client = APIClient()
self.factory = APIRequestFactory()
self.admin = User.objects.get(pk=1)
def test_admin_list_user(self):
request = self.factory.get('/billjobs/users/')
force_authenticate(request, user=self.admin)
view = UserAdmin.as_view()
response = view(request)
self.assertEqual(response.status_code, status.HTTP_200_OK)
<commit_msg>Add test admin to retrieve a user detail<commit_after> | from django.test import TestCase
from django.contrib.auth.models import User
from rest_framework import status
from rest_framework.test import APIClient, APIRequestFactory, \
force_authenticate
from billjobs.views import UserAdmin, UserAdminDetail
class UserAdminAPI(TestCase):
""" Test User Admin API REST endpoint """
fixtures=['account_test.yaml']
def setUp(self):
self.client = APIClient()
self.factory = APIRequestFactory()
self.admin = User.objects.get(pk=1)
def test_admin_list_user(self):
request = self.factory.get('/billjobs/users/')
force_authenticate(request, user=self.admin)
view = UserAdmin.as_view()
response = view(request)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_admin_retrieve_user(self):
request = self.factory.get('/billjobs/users/')
force_authenticate(request, user=self.admin)
view = UserAdminDetail.as_view()
response = view(request, pk=1)
self.assertEqual(response.status_code, status.HTTP_200_OK)
| from django.test import TestCase
from django.contrib.auth.models import User
from rest_framework import status
from rest_framework.test import APIClient, APIRequestFactory, \
force_authenticate
from billjobs.views import UserAdmin
class UserAdminAPI(TestCase):
""" Test User Admin API REST endpoint """
fixtures=['account_test.yaml']
def setUp(self):
self.client = APIClient()
self.factory = APIRequestFactory()
self.admin = User.objects.get(pk=1)
def test_admin_list_user(self):
request = self.factory.get('/billjobs/users/')
force_authenticate(request, user=self.admin)
view = UserAdmin.as_view()
response = view(request)
self.assertEqual(response.status_code, status.HTTP_200_OK)
Add test admin to retrieve a user detailfrom django.test import TestCase
from django.contrib.auth.models import User
from rest_framework import status
from rest_framework.test import APIClient, APIRequestFactory, \
force_authenticate
from billjobs.views import UserAdmin, UserAdminDetail
class UserAdminAPI(TestCase):
""" Test User Admin API REST endpoint """
fixtures=['account_test.yaml']
def setUp(self):
self.client = APIClient()
self.factory = APIRequestFactory()
self.admin = User.objects.get(pk=1)
def test_admin_list_user(self):
request = self.factory.get('/billjobs/users/')
force_authenticate(request, user=self.admin)
view = UserAdmin.as_view()
response = view(request)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_admin_retrieve_user(self):
request = self.factory.get('/billjobs/users/')
force_authenticate(request, user=self.admin)
view = UserAdminDetail.as_view()
response = view(request, pk=1)
self.assertEqual(response.status_code, status.HTTP_200_OK)
| <commit_before>from django.test import TestCase
from django.contrib.auth.models import User
from rest_framework import status
from rest_framework.test import APIClient, APIRequestFactory, \
force_authenticate
from billjobs.views import UserAdmin
class UserAdminAPI(TestCase):
""" Test User Admin API REST endpoint """
fixtures=['account_test.yaml']
def setUp(self):
self.client = APIClient()
self.factory = APIRequestFactory()
self.admin = User.objects.get(pk=1)
def test_admin_list_user(self):
request = self.factory.get('/billjobs/users/')
force_authenticate(request, user=self.admin)
view = UserAdmin.as_view()
response = view(request)
self.assertEqual(response.status_code, status.HTTP_200_OK)
<commit_msg>Add test admin to retrieve a user detail<commit_after>from django.test import TestCase
from django.contrib.auth.models import User
from rest_framework import status
from rest_framework.test import APIClient, APIRequestFactory, \
force_authenticate
from billjobs.views import UserAdmin, UserAdminDetail
class UserAdminAPI(TestCase):
""" Test User Admin API REST endpoint """
fixtures=['account_test.yaml']
def setUp(self):
self.client = APIClient()
self.factory = APIRequestFactory()
self.admin = User.objects.get(pk=1)
def test_admin_list_user(self):
request = self.factory.get('/billjobs/users/')
force_authenticate(request, user=self.admin)
view = UserAdmin.as_view()
response = view(request)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_admin_retrieve_user(self):
request = self.factory.get('/billjobs/users/')
force_authenticate(request, user=self.admin)
view = UserAdminDetail.as_view()
response = view(request, pk=1)
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
6ff6bdad9f7544be103e798838c12509411a2098 | tests/__init__.py | tests/__init__.py | import logging
from unittest import TestCase
import datetime
from redash import settings
settings.DATABASE_CONFIG = {
'name': 'circle_test',
'threadlocals': True
}
settings.REDIS_URL = "redis://localhost:6379/5"
from redash import models, redis_connection
logging.getLogger('peewee').setLevel(logging.INFO)
class BaseTestCase(TestCase):
def setUp(self):
models.create_db(True, True)
models.init_db()
def tearDown(self):
models.db.close_db(None)
models.create_db(False, True)
redis_connection.flushdb()
def assertResponseEqual(self, expected, actual):
for k, v in expected.iteritems():
if isinstance(v, datetime.datetime) or isinstance(actual[k], datetime.datetime):
continue
if isinstance(v, list):
continue
if isinstance(v, dict):
self.assertResponseEqual(v, actual[k])
continue
self.assertEqual(v, actual[k], "{} not equal (expected: {}, actual: {}).".format(k, v, actual[k]))
| import os
os.environ['REDASH_REDIS_URL'] = "redis://localhost:6379/5"
import logging
from unittest import TestCase
import datetime
from redash import settings
settings.DATABASE_CONFIG = {
'name': 'circle_test',
'threadlocals': True
}
from redash import models, redis_connection
logging.getLogger('peewee').setLevel(logging.INFO)
class BaseTestCase(TestCase):
def setUp(self):
models.create_db(True, True)
models.init_db()
def tearDown(self):
models.db.close_db(None)
models.create_db(False, True)
redis_connection.flushdb()
def assertResponseEqual(self, expected, actual):
for k, v in expected.iteritems():
if isinstance(v, datetime.datetime) or isinstance(actual[k], datetime.datetime):
continue
if isinstance(v, list):
continue
if isinstance(v, dict):
self.assertResponseEqual(v, actual[k])
continue
self.assertEqual(v, actual[k], "{} not equal (expected: {}, actual: {}).".format(k, v, actual[k]))
| Use the correct redis connection in tests | Use the correct redis connection in tests
| Python | bsd-2-clause | guaguadev/redash,vishesh92/redash,easytaxibr/redash,guaguadev/redash,pubnative/redash,M32Media/redash,vishesh92/redash,rockwotj/redash,amino-data/redash,pubnative/redash,chriszs/redash,alexanderlz/redash,stefanseifert/redash,vishesh92/redash,hudl/redash,ninneko/redash,easytaxibr/redash,akariv/redash,denisov-vlad/redash,denisov-vlad/redash,M32Media/redash,crowdworks/redash,imsally/redash,ninneko/redash,crowdworks/redash,getredash/redash,imsally/redash,chriszs/redash,guaguadev/redash,useabode/redash,imsally/redash,akariv/redash,44px/redash,easytaxibr/redash,useabode/redash,chriszs/redash,easytaxibr/redash,easytaxibr/redash,rockwotj/redash,moritz9/redash,vishesh92/redash,akariv/redash,getredash/redash,denisov-vlad/redash,useabode/redash,chriszs/redash,useabode/redash,stefanseifert/redash,getredash/redash,pubnative/redash,44px/redash,guaguadev/redash,getredash/redash,alexanderlz/redash,rockwotj/redash,stefanseifert/redash,crowdworks/redash,rockwotj/redash,denisov-vlad/redash,jmvasquez/redashtest,jmvasquez/redashtest,crowdworks/redash,EverlyWell/redash,amino-data/redash,amino-data/redash,44px/redash,pubnative/redash,hudl/redash,EverlyWell/redash,jmvasquez/redashtest,M32Media/redash,hudl/redash,alexanderlz/redash,pubnative/redash,stefanseifert/redash,ninneko/redash,akariv/redash,getredash/redash,stefanseifert/redash,imsally/redash,M32Media/redash,alexanderlz/redash,ninneko/redash,denisov-vlad/redash,akariv/redash,jmvasquez/redashtest,EverlyWell/redash,hudl/redash,ninneko/redash,moritz9/redash,moritz9/redash,guaguadev/redash,amino-data/redash,44px/redash,EverlyWell/redash,moritz9/redash,jmvasquez/redashtest | import logging
from unittest import TestCase
import datetime
from redash import settings
settings.DATABASE_CONFIG = {
'name': 'circle_test',
'threadlocals': True
}
settings.REDIS_URL = "redis://localhost:6379/5"
from redash import models, redis_connection
logging.getLogger('peewee').setLevel(logging.INFO)
class BaseTestCase(TestCase):
def setUp(self):
models.create_db(True, True)
models.init_db()
def tearDown(self):
models.db.close_db(None)
models.create_db(False, True)
redis_connection.flushdb()
def assertResponseEqual(self, expected, actual):
for k, v in expected.iteritems():
if isinstance(v, datetime.datetime) or isinstance(actual[k], datetime.datetime):
continue
if isinstance(v, list):
continue
if isinstance(v, dict):
self.assertResponseEqual(v, actual[k])
continue
self.assertEqual(v, actual[k], "{} not equal (expected: {}, actual: {}).".format(k, v, actual[k]))
Use the correct redis connection in tests | import os
os.environ['REDASH_REDIS_URL'] = "redis://localhost:6379/5"
import logging
from unittest import TestCase
import datetime
from redash import settings
settings.DATABASE_CONFIG = {
'name': 'circle_test',
'threadlocals': True
}
from redash import models, redis_connection
logging.getLogger('peewee').setLevel(logging.INFO)
class BaseTestCase(TestCase):
def setUp(self):
models.create_db(True, True)
models.init_db()
def tearDown(self):
models.db.close_db(None)
models.create_db(False, True)
redis_connection.flushdb()
def assertResponseEqual(self, expected, actual):
for k, v in expected.iteritems():
if isinstance(v, datetime.datetime) or isinstance(actual[k], datetime.datetime):
continue
if isinstance(v, list):
continue
if isinstance(v, dict):
self.assertResponseEqual(v, actual[k])
continue
self.assertEqual(v, actual[k], "{} not equal (expected: {}, actual: {}).".format(k, v, actual[k]))
| <commit_before>import logging
from unittest import TestCase
import datetime
from redash import settings
settings.DATABASE_CONFIG = {
'name': 'circle_test',
'threadlocals': True
}
settings.REDIS_URL = "redis://localhost:6379/5"
from redash import models, redis_connection
logging.getLogger('peewee').setLevel(logging.INFO)
class BaseTestCase(TestCase):
def setUp(self):
models.create_db(True, True)
models.init_db()
def tearDown(self):
models.db.close_db(None)
models.create_db(False, True)
redis_connection.flushdb()
def assertResponseEqual(self, expected, actual):
for k, v in expected.iteritems():
if isinstance(v, datetime.datetime) or isinstance(actual[k], datetime.datetime):
continue
if isinstance(v, list):
continue
if isinstance(v, dict):
self.assertResponseEqual(v, actual[k])
continue
self.assertEqual(v, actual[k], "{} not equal (expected: {}, actual: {}).".format(k, v, actual[k]))
<commit_msg>Use the correct redis connection in tests<commit_after> | import os
os.environ['REDASH_REDIS_URL'] = "redis://localhost:6379/5"
import logging
from unittest import TestCase
import datetime
from redash import settings
settings.DATABASE_CONFIG = {
'name': 'circle_test',
'threadlocals': True
}
from redash import models, redis_connection
logging.getLogger('peewee').setLevel(logging.INFO)
class BaseTestCase(TestCase):
def setUp(self):
models.create_db(True, True)
models.init_db()
def tearDown(self):
models.db.close_db(None)
models.create_db(False, True)
redis_connection.flushdb()
def assertResponseEqual(self, expected, actual):
for k, v in expected.iteritems():
if isinstance(v, datetime.datetime) or isinstance(actual[k], datetime.datetime):
continue
if isinstance(v, list):
continue
if isinstance(v, dict):
self.assertResponseEqual(v, actual[k])
continue
self.assertEqual(v, actual[k], "{} not equal (expected: {}, actual: {}).".format(k, v, actual[k]))
| import logging
from unittest import TestCase
import datetime
from redash import settings
settings.DATABASE_CONFIG = {
'name': 'circle_test',
'threadlocals': True
}
settings.REDIS_URL = "redis://localhost:6379/5"
from redash import models, redis_connection
logging.getLogger('peewee').setLevel(logging.INFO)
class BaseTestCase(TestCase):
def setUp(self):
models.create_db(True, True)
models.init_db()
def tearDown(self):
models.db.close_db(None)
models.create_db(False, True)
redis_connection.flushdb()
def assertResponseEqual(self, expected, actual):
for k, v in expected.iteritems():
if isinstance(v, datetime.datetime) or isinstance(actual[k], datetime.datetime):
continue
if isinstance(v, list):
continue
if isinstance(v, dict):
self.assertResponseEqual(v, actual[k])
continue
self.assertEqual(v, actual[k], "{} not equal (expected: {}, actual: {}).".format(k, v, actual[k]))
Use the correct redis connection in testsimport os
os.environ['REDASH_REDIS_URL'] = "redis://localhost:6379/5"
import logging
from unittest import TestCase
import datetime
from redash import settings
settings.DATABASE_CONFIG = {
'name': 'circle_test',
'threadlocals': True
}
from redash import models, redis_connection
logging.getLogger('peewee').setLevel(logging.INFO)
class BaseTestCase(TestCase):
def setUp(self):
models.create_db(True, True)
models.init_db()
def tearDown(self):
models.db.close_db(None)
models.create_db(False, True)
redis_connection.flushdb()
def assertResponseEqual(self, expected, actual):
for k, v in expected.iteritems():
if isinstance(v, datetime.datetime) or isinstance(actual[k], datetime.datetime):
continue
if isinstance(v, list):
continue
if isinstance(v, dict):
self.assertResponseEqual(v, actual[k])
continue
self.assertEqual(v, actual[k], "{} not equal (expected: {}, actual: {}).".format(k, v, actual[k]))
| <commit_before>import logging
from unittest import TestCase
import datetime
from redash import settings
settings.DATABASE_CONFIG = {
'name': 'circle_test',
'threadlocals': True
}
settings.REDIS_URL = "redis://localhost:6379/5"
from redash import models, redis_connection
logging.getLogger('peewee').setLevel(logging.INFO)
class BaseTestCase(TestCase):
def setUp(self):
models.create_db(True, True)
models.init_db()
def tearDown(self):
models.db.close_db(None)
models.create_db(False, True)
redis_connection.flushdb()
def assertResponseEqual(self, expected, actual):
for k, v in expected.iteritems():
if isinstance(v, datetime.datetime) or isinstance(actual[k], datetime.datetime):
continue
if isinstance(v, list):
continue
if isinstance(v, dict):
self.assertResponseEqual(v, actual[k])
continue
self.assertEqual(v, actual[k], "{} not equal (expected: {}, actual: {}).".format(k, v, actual[k]))
<commit_msg>Use the correct redis connection in tests<commit_after>import os
os.environ['REDASH_REDIS_URL'] = "redis://localhost:6379/5"
import logging
from unittest import TestCase
import datetime
from redash import settings
settings.DATABASE_CONFIG = {
'name': 'circle_test',
'threadlocals': True
}
from redash import models, redis_connection
logging.getLogger('peewee').setLevel(logging.INFO)
class BaseTestCase(TestCase):
def setUp(self):
models.create_db(True, True)
models.init_db()
def tearDown(self):
models.db.close_db(None)
models.create_db(False, True)
redis_connection.flushdb()
def assertResponseEqual(self, expected, actual):
for k, v in expected.iteritems():
if isinstance(v, datetime.datetime) or isinstance(actual[k], datetime.datetime):
continue
if isinstance(v, list):
continue
if isinstance(v, dict):
self.assertResponseEqual(v, actual[k])
continue
self.assertEqual(v, actual[k], "{} not equal (expected: {}, actual: {}).".format(k, v, actual[k]))
|
75d6920503b166efd778a6becf0939fe1d2cbe1f | openprescribing/pipeline/management/commands/fetch_prescribing_data.py | openprescribing/pipeline/management/commands/fetch_prescribing_data.py | import os
import requests
from bs4 import BeautifulSoup
from django.conf import settings
from django.core.management import BaseCommand
from openprescribing.utils import mkdir_p
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument("year", type=int)
parser.add_argument("month", type=int)
def handle(self, year, month, **kwargs):
year_and_month = "{year}_{month:02d}".format(year=year, month=month)
dir_path = os.path.join(
settings.PIPELINE_DATA_BASEDIR, "prescribing_v2", year_and_month
)
mkdir_p(dir_path)
rsp = requests.get(
"https://opendata.nhsbsa.net/dataset/english-prescribing-data-epd"
)
doc = BeautifulSoup(rsp.text, "html.parser")
filename = "epd_{year}{month:02d}.csv".format(year=year, month=month)
urls = [a["href"] for a in doc.find_all("a") if filename in a["href"]]
assert len(urls) == 1, urls
rsp = requests.get(urls[0], stream=True)
assert rsp.ok
with open(os.path.join(dir_path, filename), "wb") as f:
for block in rsp.iter_content(32 * 1024):
f.write(block)
| import os
import requests
from django.conf import settings
from django.core.management import BaseCommand
from openprescribing.utils import mkdir_p
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument("year", type=int)
parser.add_argument("month", type=int)
def handle(self, year, month, **kwargs):
rsp = requests.get(
"https://opendata.nhsbsa.net/api/3/action/package_show?id=english-prescribing-data-epd"
)
resources = rsp.json()["result"]["resources"]
urls = [
r["url"]
for r in resources
if r["name"] == "EPD_{year}{month:02d}".format(year=year, month=month)
]
assert len(urls) == 1, urls
rsp = requests.get(urls[0], stream=True)
assert rsp.ok
dir_path = os.path.join(
settings.PIPELINE_DATA_BASEDIR,
"prescribing_v2",
"{year}{month:02d}".format(year=year, month=month),
)
mkdir_p(dir_path)
filename = "epd_{year}{month:02d}.csv".format(year=year, month=month)
with open(os.path.join(dir_path, filename), "wb") as f:
for block in rsp.iter_content(32 * 1024):
f.write(block)
| Use BSA's API to get URL of latest prescribing data | Use BSA's API to get URL of latest prescribing data
| Python | mit | ebmdatalab/openprescribing,annapowellsmith/openpresc,annapowellsmith/openpresc,annapowellsmith/openpresc,annapowellsmith/openpresc,ebmdatalab/openprescribing,ebmdatalab/openprescribing,ebmdatalab/openprescribing | import os
import requests
from bs4 import BeautifulSoup
from django.conf import settings
from django.core.management import BaseCommand
from openprescribing.utils import mkdir_p
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument("year", type=int)
parser.add_argument("month", type=int)
def handle(self, year, month, **kwargs):
year_and_month = "{year}_{month:02d}".format(year=year, month=month)
dir_path = os.path.join(
settings.PIPELINE_DATA_BASEDIR, "prescribing_v2", year_and_month
)
mkdir_p(dir_path)
rsp = requests.get(
"https://opendata.nhsbsa.net/dataset/english-prescribing-data-epd"
)
doc = BeautifulSoup(rsp.text, "html.parser")
filename = "epd_{year}{month:02d}.csv".format(year=year, month=month)
urls = [a["href"] for a in doc.find_all("a") if filename in a["href"]]
assert len(urls) == 1, urls
rsp = requests.get(urls[0], stream=True)
assert rsp.ok
with open(os.path.join(dir_path, filename), "wb") as f:
for block in rsp.iter_content(32 * 1024):
f.write(block)
Use BSA's API to get URL of latest prescribing data | import os
import requests
from django.conf import settings
from django.core.management import BaseCommand
from openprescribing.utils import mkdir_p
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument("year", type=int)
parser.add_argument("month", type=int)
def handle(self, year, month, **kwargs):
rsp = requests.get(
"https://opendata.nhsbsa.net/api/3/action/package_show?id=english-prescribing-data-epd"
)
resources = rsp.json()["result"]["resources"]
urls = [
r["url"]
for r in resources
if r["name"] == "EPD_{year}{month:02d}".format(year=year, month=month)
]
assert len(urls) == 1, urls
rsp = requests.get(urls[0], stream=True)
assert rsp.ok
dir_path = os.path.join(
settings.PIPELINE_DATA_BASEDIR,
"prescribing_v2",
"{year}{month:02d}".format(year=year, month=month),
)
mkdir_p(dir_path)
filename = "epd_{year}{month:02d}.csv".format(year=year, month=month)
with open(os.path.join(dir_path, filename), "wb") as f:
for block in rsp.iter_content(32 * 1024):
f.write(block)
| <commit_before>import os
import requests
from bs4 import BeautifulSoup
from django.conf import settings
from django.core.management import BaseCommand
from openprescribing.utils import mkdir_p
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument("year", type=int)
parser.add_argument("month", type=int)
def handle(self, year, month, **kwargs):
year_and_month = "{year}_{month:02d}".format(year=year, month=month)
dir_path = os.path.join(
settings.PIPELINE_DATA_BASEDIR, "prescribing_v2", year_and_month
)
mkdir_p(dir_path)
rsp = requests.get(
"https://opendata.nhsbsa.net/dataset/english-prescribing-data-epd"
)
doc = BeautifulSoup(rsp.text, "html.parser")
filename = "epd_{year}{month:02d}.csv".format(year=year, month=month)
urls = [a["href"] for a in doc.find_all("a") if filename in a["href"]]
assert len(urls) == 1, urls
rsp = requests.get(urls[0], stream=True)
assert rsp.ok
with open(os.path.join(dir_path, filename), "wb") as f:
for block in rsp.iter_content(32 * 1024):
f.write(block)
<commit_msg>Use BSA's API to get URL of latest prescribing data<commit_after> | import os
import requests
from django.conf import settings
from django.core.management import BaseCommand
from openprescribing.utils import mkdir_p
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument("year", type=int)
parser.add_argument("month", type=int)
def handle(self, year, month, **kwargs):
rsp = requests.get(
"https://opendata.nhsbsa.net/api/3/action/package_show?id=english-prescribing-data-epd"
)
resources = rsp.json()["result"]["resources"]
urls = [
r["url"]
for r in resources
if r["name"] == "EPD_{year}{month:02d}".format(year=year, month=month)
]
assert len(urls) == 1, urls
rsp = requests.get(urls[0], stream=True)
assert rsp.ok
dir_path = os.path.join(
settings.PIPELINE_DATA_BASEDIR,
"prescribing_v2",
"{year}{month:02d}".format(year=year, month=month),
)
mkdir_p(dir_path)
filename = "epd_{year}{month:02d}.csv".format(year=year, month=month)
with open(os.path.join(dir_path, filename), "wb") as f:
for block in rsp.iter_content(32 * 1024):
f.write(block)
| import os
import requests
from bs4 import BeautifulSoup
from django.conf import settings
from django.core.management import BaseCommand
from openprescribing.utils import mkdir_p
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument("year", type=int)
parser.add_argument("month", type=int)
def handle(self, year, month, **kwargs):
year_and_month = "{year}_{month:02d}".format(year=year, month=month)
dir_path = os.path.join(
settings.PIPELINE_DATA_BASEDIR, "prescribing_v2", year_and_month
)
mkdir_p(dir_path)
rsp = requests.get(
"https://opendata.nhsbsa.net/dataset/english-prescribing-data-epd"
)
doc = BeautifulSoup(rsp.text, "html.parser")
filename = "epd_{year}{month:02d}.csv".format(year=year, month=month)
urls = [a["href"] for a in doc.find_all("a") if filename in a["href"]]
assert len(urls) == 1, urls
rsp = requests.get(urls[0], stream=True)
assert rsp.ok
with open(os.path.join(dir_path, filename), "wb") as f:
for block in rsp.iter_content(32 * 1024):
f.write(block)
Use BSA's API to get URL of latest prescribing dataimport os
import requests
from django.conf import settings
from django.core.management import BaseCommand
from openprescribing.utils import mkdir_p
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument("year", type=int)
parser.add_argument("month", type=int)
def handle(self, year, month, **kwargs):
rsp = requests.get(
"https://opendata.nhsbsa.net/api/3/action/package_show?id=english-prescribing-data-epd"
)
resources = rsp.json()["result"]["resources"]
urls = [
r["url"]
for r in resources
if r["name"] == "EPD_{year}{month:02d}".format(year=year, month=month)
]
assert len(urls) == 1, urls
rsp = requests.get(urls[0], stream=True)
assert rsp.ok
dir_path = os.path.join(
settings.PIPELINE_DATA_BASEDIR,
"prescribing_v2",
"{year}{month:02d}".format(year=year, month=month),
)
mkdir_p(dir_path)
filename = "epd_{year}{month:02d}.csv".format(year=year, month=month)
with open(os.path.join(dir_path, filename), "wb") as f:
for block in rsp.iter_content(32 * 1024):
f.write(block)
| <commit_before>import os
import requests
from bs4 import BeautifulSoup
from django.conf import settings
from django.core.management import BaseCommand
from openprescribing.utils import mkdir_p
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument("year", type=int)
parser.add_argument("month", type=int)
def handle(self, year, month, **kwargs):
year_and_month = "{year}_{month:02d}".format(year=year, month=month)
dir_path = os.path.join(
settings.PIPELINE_DATA_BASEDIR, "prescribing_v2", year_and_month
)
mkdir_p(dir_path)
rsp = requests.get(
"https://opendata.nhsbsa.net/dataset/english-prescribing-data-epd"
)
doc = BeautifulSoup(rsp.text, "html.parser")
filename = "epd_{year}{month:02d}.csv".format(year=year, month=month)
urls = [a["href"] for a in doc.find_all("a") if filename in a["href"]]
assert len(urls) == 1, urls
rsp = requests.get(urls[0], stream=True)
assert rsp.ok
with open(os.path.join(dir_path, filename), "wb") as f:
for block in rsp.iter_content(32 * 1024):
f.write(block)
<commit_msg>Use BSA's API to get URL of latest prescribing data<commit_after>import os
import requests
from django.conf import settings
from django.core.management import BaseCommand
from openprescribing.utils import mkdir_p
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument("year", type=int)
parser.add_argument("month", type=int)
def handle(self, year, month, **kwargs):
rsp = requests.get(
"https://opendata.nhsbsa.net/api/3/action/package_show?id=english-prescribing-data-epd"
)
resources = rsp.json()["result"]["resources"]
urls = [
r["url"]
for r in resources
if r["name"] == "EPD_{year}{month:02d}".format(year=year, month=month)
]
assert len(urls) == 1, urls
rsp = requests.get(urls[0], stream=True)
assert rsp.ok
dir_path = os.path.join(
settings.PIPELINE_DATA_BASEDIR,
"prescribing_v2",
"{year}{month:02d}".format(year=year, month=month),
)
mkdir_p(dir_path)
filename = "epd_{year}{month:02d}.csv".format(year=year, month=month)
with open(os.path.join(dir_path, filename), "wb") as f:
for block in rsp.iter_content(32 * 1024):
f.write(block)
|
3bb560dc03809238f586f78385deb41bba512ba9 | scripts/asgard-deploy.py | scripts/asgard-deploy.py | #!/usr/bin/env python
import sys
import logging
import click
import tubular.asgard as asgard
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
@click.command()
@click.option('--ami_id', envvar='AMI_ID', help='The ami-id to deploy', required=True)
def deploy(ami_id):
try:
asgard.deploy(ami_id)
except Exception, e:
click.secho("Error Deploying AMI: {0}.\nMessage: {1}".format(ami_id, e.message), fg='red')
sys.exit(1)
sys.exit(0)
if __name__ == "__main__":
deploy()
| #!/usr/bin/env python
import sys
import logging
import click
from os import path
# Add top-level module path to sys.path before importing tubular code.
sys.path.append( path.dirname( path.dirname( path.abspath(__file__) ) ) )
from tubular import asgard
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
@click.command()
@click.option('--ami_id', envvar='AMI_ID', help='The ami-id to deploy', required=True)
def deploy(ami_id):
try:
asgard.deploy(ami_id)
except Exception, e:
click.secho("Error Deploying AMI: {0}.\nMessage: {1}".format(ami_id, e.message), fg='red')
sys.exit(1)
sys.exit(0)
if __name__ == "__main__":
deploy()
| Add top-level module path before tubular import. | Add top-level module path before tubular import.
| Python | agpl-3.0 | eltoncarr/tubular,eltoncarr/tubular | #!/usr/bin/env python
import sys
import logging
import click
import tubular.asgard as asgard
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
@click.command()
@click.option('--ami_id', envvar='AMI_ID', help='The ami-id to deploy', required=True)
def deploy(ami_id):
try:
asgard.deploy(ami_id)
except Exception, e:
click.secho("Error Deploying AMI: {0}.\nMessage: {1}".format(ami_id, e.message), fg='red')
sys.exit(1)
sys.exit(0)
if __name__ == "__main__":
deploy()
Add top-level module path before tubular import. | #!/usr/bin/env python
import sys
import logging
import click
from os import path
# Add top-level module path to sys.path before importing tubular code.
sys.path.append( path.dirname( path.dirname( path.abspath(__file__) ) ) )
from tubular import asgard
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
@click.command()
@click.option('--ami_id', envvar='AMI_ID', help='The ami-id to deploy', required=True)
def deploy(ami_id):
try:
asgard.deploy(ami_id)
except Exception, e:
click.secho("Error Deploying AMI: {0}.\nMessage: {1}".format(ami_id, e.message), fg='red')
sys.exit(1)
sys.exit(0)
if __name__ == "__main__":
deploy()
| <commit_before>#!/usr/bin/env python
import sys
import logging
import click
import tubular.asgard as asgard
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
@click.command()
@click.option('--ami_id', envvar='AMI_ID', help='The ami-id to deploy', required=True)
def deploy(ami_id):
try:
asgard.deploy(ami_id)
except Exception, e:
click.secho("Error Deploying AMI: {0}.\nMessage: {1}".format(ami_id, e.message), fg='red')
sys.exit(1)
sys.exit(0)
if __name__ == "__main__":
deploy()
<commit_msg>Add top-level module path before tubular import.<commit_after> | #!/usr/bin/env python
import sys
import logging
import click
from os import path
# Add top-level module path to sys.path before importing tubular code.
sys.path.append( path.dirname( path.dirname( path.abspath(__file__) ) ) )
from tubular import asgard
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
@click.command()
@click.option('--ami_id', envvar='AMI_ID', help='The ami-id to deploy', required=True)
def deploy(ami_id):
try:
asgard.deploy(ami_id)
except Exception, e:
click.secho("Error Deploying AMI: {0}.\nMessage: {1}".format(ami_id, e.message), fg='red')
sys.exit(1)
sys.exit(0)
if __name__ == "__main__":
deploy()
| #!/usr/bin/env python
import sys
import logging
import click
import tubular.asgard as asgard
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
@click.command()
@click.option('--ami_id', envvar='AMI_ID', help='The ami-id to deploy', required=True)
def deploy(ami_id):
try:
asgard.deploy(ami_id)
except Exception, e:
click.secho("Error Deploying AMI: {0}.\nMessage: {1}".format(ami_id, e.message), fg='red')
sys.exit(1)
sys.exit(0)
if __name__ == "__main__":
deploy()
Add top-level module path before tubular import.#!/usr/bin/env python
import sys
import logging
import click
from os import path
# Add top-level module path to sys.path before importing tubular code.
sys.path.append( path.dirname( path.dirname( path.abspath(__file__) ) ) )
from tubular import asgard
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
@click.command()
@click.option('--ami_id', envvar='AMI_ID', help='The ami-id to deploy', required=True)
def deploy(ami_id):
try:
asgard.deploy(ami_id)
except Exception, e:
click.secho("Error Deploying AMI: {0}.\nMessage: {1}".format(ami_id, e.message), fg='red')
sys.exit(1)
sys.exit(0)
if __name__ == "__main__":
deploy()
| <commit_before>#!/usr/bin/env python
import sys
import logging
import click
import tubular.asgard as asgard
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
@click.command()
@click.option('--ami_id', envvar='AMI_ID', help='The ami-id to deploy', required=True)
def deploy(ami_id):
try:
asgard.deploy(ami_id)
except Exception, e:
click.secho("Error Deploying AMI: {0}.\nMessage: {1}".format(ami_id, e.message), fg='red')
sys.exit(1)
sys.exit(0)
if __name__ == "__main__":
deploy()
<commit_msg>Add top-level module path before tubular import.<commit_after>#!/usr/bin/env python
import sys
import logging
import click
from os import path
# Add top-level module path to sys.path before importing tubular code.
sys.path.append( path.dirname( path.dirname( path.abspath(__file__) ) ) )
from tubular import asgard
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
@click.command()
@click.option('--ami_id', envvar='AMI_ID', help='The ami-id to deploy', required=True)
def deploy(ami_id):
try:
asgard.deploy(ami_id)
except Exception, e:
click.secho("Error Deploying AMI: {0}.\nMessage: {1}".format(ami_id, e.message), fg='red')
sys.exit(1)
sys.exit(0)
if __name__ == "__main__":
deploy()
|
b60b03c5f9ff8b203c309714c06922c2680a244e | tests/test_now.py | tests/test_now.py | # -*- coding: utf-8 -*-
import pytest
from jinja2 import Environment
@pytest.fixture(scope='session')
def environment():
return Environment(extensions=['jinja2_time.TimeExtension'])
def test_foobar(environment):
assert environment
| # -*- coding: utf-8 -*-
from freezegun import freeze_time
from jinja2 import Environment, exceptions
import pytest
@pytest.fixture(scope='session')
def environment():
return Environment(extensions=['jinja2_time.TimeExtension'])
def test_tz_is_required(environment):
with pytest.raises(exceptions.TemplateSyntaxError):
environment.from_string('{% now %}')
@freeze_time("2015-12-09 23:33:01")
def test_default_datetime_format(environment):
template = environment.from_string("{% now 'utc' %}")
assert template.render() == "2015-12-09"
| Implement a test for the extensions default datetime format | Implement a test for the extensions default datetime format
| Python | mit | hackebrot/jinja2-time | # -*- coding: utf-8 -*-
import pytest
from jinja2 import Environment
@pytest.fixture(scope='session')
def environment():
return Environment(extensions=['jinja2_time.TimeExtension'])
def test_foobar(environment):
assert environment
Implement a test for the extensions default datetime format | # -*- coding: utf-8 -*-
from freezegun import freeze_time
from jinja2 import Environment, exceptions
import pytest
@pytest.fixture(scope='session')
def environment():
return Environment(extensions=['jinja2_time.TimeExtension'])
def test_tz_is_required(environment):
with pytest.raises(exceptions.TemplateSyntaxError):
environment.from_string('{% now %}')
@freeze_time("2015-12-09 23:33:01")
def test_default_datetime_format(environment):
template = environment.from_string("{% now 'utc' %}")
assert template.render() == "2015-12-09"
| <commit_before># -*- coding: utf-8 -*-
import pytest
from jinja2 import Environment
@pytest.fixture(scope='session')
def environment():
return Environment(extensions=['jinja2_time.TimeExtension'])
def test_foobar(environment):
assert environment
<commit_msg>Implement a test for the extensions default datetime format<commit_after> | # -*- coding: utf-8 -*-
from freezegun import freeze_time
from jinja2 import Environment, exceptions
import pytest
@pytest.fixture(scope='session')
def environment():
return Environment(extensions=['jinja2_time.TimeExtension'])
def test_tz_is_required(environment):
with pytest.raises(exceptions.TemplateSyntaxError):
environment.from_string('{% now %}')
@freeze_time("2015-12-09 23:33:01")
def test_default_datetime_format(environment):
template = environment.from_string("{% now 'utc' %}")
assert template.render() == "2015-12-09"
| # -*- coding: utf-8 -*-
import pytest
from jinja2 import Environment
@pytest.fixture(scope='session')
def environment():
return Environment(extensions=['jinja2_time.TimeExtension'])
def test_foobar(environment):
assert environment
Implement a test for the extensions default datetime format# -*- coding: utf-8 -*-
from freezegun import freeze_time
from jinja2 import Environment, exceptions
import pytest
@pytest.fixture(scope='session')
def environment():
return Environment(extensions=['jinja2_time.TimeExtension'])
def test_tz_is_required(environment):
with pytest.raises(exceptions.TemplateSyntaxError):
environment.from_string('{% now %}')
@freeze_time("2015-12-09 23:33:01")
def test_default_datetime_format(environment):
template = environment.from_string("{% now 'utc' %}")
assert template.render() == "2015-12-09"
| <commit_before># -*- coding: utf-8 -*-
import pytest
from jinja2 import Environment
@pytest.fixture(scope='session')
def environment():
return Environment(extensions=['jinja2_time.TimeExtension'])
def test_foobar(environment):
assert environment
<commit_msg>Implement a test for the extensions default datetime format<commit_after># -*- coding: utf-8 -*-
from freezegun import freeze_time
from jinja2 import Environment, exceptions
import pytest
@pytest.fixture(scope='session')
def environment():
return Environment(extensions=['jinja2_time.TimeExtension'])
def test_tz_is_required(environment):
with pytest.raises(exceptions.TemplateSyntaxError):
environment.from_string('{% now %}')
@freeze_time("2015-12-09 23:33:01")
def test_default_datetime_format(environment):
template = environment.from_string("{% now 'utc' %}")
assert template.render() == "2015-12-09"
|
8237072384204e51bc281de3dcdfd83e9c85df2d | us_ignite/sections/templatetags/sections_tags.py | us_ignite/sections/templatetags/sections_tags.py | from django import template
from django.template.loader import render_to_string
from us_ignite.sections.models import Sponsor
register = template.Library()
class RenderingNode(template.Node):
def __init__(self, template_name):
self.template_name = template_name
def render(self, context):
template_name = self.template_name.resolve(context)
template_context = {
'object_list': Sponsor.objects.all()
}
return render_to_string(template_name, template_context)
def _render_sponsors(parser, token):
"""Tag to render the latest ``Articles``.
Usage:
{% render_sponsors "sections/sponsor_list.html" %}
Where the second argument is a template path.
"""
bits = token.split_contents()
if not len(bits) == 2:
raise template.TemplateSyntaxError(
"%r tag only accepts a template argument." % bits[0])
# Determine the template name (could be a variable or a string):
template_name = parser.compile_filter(bits[1])
return RenderingNode(template_name)
register.tag('render_sponsors', _render_sponsors)
| from django import template
from django.template.loader import render_to_string
from us_ignite.sections.models import Sponsor
register = template.Library()
class RenderingNode(template.Node):
def __init__(self, template_name):
self.template_name = template_name
def render(self, context):
template_name = self.template_name.resolve(context)
context.update({
'object_list': Sponsor.objects.all()
})
return render_to_string(template_name, context)
def _render_sponsors(parser, token):
"""Tag to render the latest ``Articles``.
Usage:
{% render_sponsors "sections/sponsor_list.html" %}
Where the second argument is a template path.
"""
bits = token.split_contents()
if not len(bits) == 2:
raise template.TemplateSyntaxError(
"%r tag only accepts a template argument." % bits[0])
# Determine the template name (could be a variable or a string):
template_name = parser.compile_filter(bits[1])
return RenderingNode(template_name)
register.tag('render_sponsors', _render_sponsors)
| Make the request context available in the template. | Make the request context available in the template.
| Python | bsd-3-clause | us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite | from django import template
from django.template.loader import render_to_string
from us_ignite.sections.models import Sponsor
register = template.Library()
class RenderingNode(template.Node):
def __init__(self, template_name):
self.template_name = template_name
def render(self, context):
template_name = self.template_name.resolve(context)
template_context = {
'object_list': Sponsor.objects.all()
}
return render_to_string(template_name, template_context)
def _render_sponsors(parser, token):
"""Tag to render the latest ``Articles``.
Usage:
{% render_sponsors "sections/sponsor_list.html" %}
Where the second argument is a template path.
"""
bits = token.split_contents()
if not len(bits) == 2:
raise template.TemplateSyntaxError(
"%r tag only accepts a template argument." % bits[0])
# Determine the template name (could be a variable or a string):
template_name = parser.compile_filter(bits[1])
return RenderingNode(template_name)
register.tag('render_sponsors', _render_sponsors)
Make the request context available in the template. | from django import template
from django.template.loader import render_to_string
from us_ignite.sections.models import Sponsor
register = template.Library()
class RenderingNode(template.Node):
def __init__(self, template_name):
self.template_name = template_name
def render(self, context):
template_name = self.template_name.resolve(context)
context.update({
'object_list': Sponsor.objects.all()
})
return render_to_string(template_name, context)
def _render_sponsors(parser, token):
"""Tag to render the latest ``Articles``.
Usage:
{% render_sponsors "sections/sponsor_list.html" %}
Where the second argument is a template path.
"""
bits = token.split_contents()
if not len(bits) == 2:
raise template.TemplateSyntaxError(
"%r tag only accepts a template argument." % bits[0])
# Determine the template name (could be a variable or a string):
template_name = parser.compile_filter(bits[1])
return RenderingNode(template_name)
register.tag('render_sponsors', _render_sponsors)
| <commit_before>from django import template
from django.template.loader import render_to_string
from us_ignite.sections.models import Sponsor
register = template.Library()
class RenderingNode(template.Node):
def __init__(self, template_name):
self.template_name = template_name
def render(self, context):
template_name = self.template_name.resolve(context)
template_context = {
'object_list': Sponsor.objects.all()
}
return render_to_string(template_name, template_context)
def _render_sponsors(parser, token):
"""Tag to render the latest ``Articles``.
Usage:
{% render_sponsors "sections/sponsor_list.html" %}
Where the second argument is a template path.
"""
bits = token.split_contents()
if not len(bits) == 2:
raise template.TemplateSyntaxError(
"%r tag only accepts a template argument." % bits[0])
# Determine the template name (could be a variable or a string):
template_name = parser.compile_filter(bits[1])
return RenderingNode(template_name)
register.tag('render_sponsors', _render_sponsors)
<commit_msg>Make the request context available in the template.<commit_after> | from django import template
from django.template.loader import render_to_string
from us_ignite.sections.models import Sponsor
register = template.Library()
class RenderingNode(template.Node):
def __init__(self, template_name):
self.template_name = template_name
def render(self, context):
template_name = self.template_name.resolve(context)
context.update({
'object_list': Sponsor.objects.all()
})
return render_to_string(template_name, context)
def _render_sponsors(parser, token):
"""Tag to render the latest ``Articles``.
Usage:
{% render_sponsors "sections/sponsor_list.html" %}
Where the second argument is a template path.
"""
bits = token.split_contents()
if not len(bits) == 2:
raise template.TemplateSyntaxError(
"%r tag only accepts a template argument." % bits[0])
# Determine the template name (could be a variable or a string):
template_name = parser.compile_filter(bits[1])
return RenderingNode(template_name)
register.tag('render_sponsors', _render_sponsors)
| from django import template
from django.template.loader import render_to_string
from us_ignite.sections.models import Sponsor
register = template.Library()
class RenderingNode(template.Node):
def __init__(self, template_name):
self.template_name = template_name
def render(self, context):
template_name = self.template_name.resolve(context)
template_context = {
'object_list': Sponsor.objects.all()
}
return render_to_string(template_name, template_context)
def _render_sponsors(parser, token):
"""Tag to render the latest ``Articles``.
Usage:
{% render_sponsors "sections/sponsor_list.html" %}
Where the second argument is a template path.
"""
bits = token.split_contents()
if not len(bits) == 2:
raise template.TemplateSyntaxError(
"%r tag only accepts a template argument." % bits[0])
# Determine the template name (could be a variable or a string):
template_name = parser.compile_filter(bits[1])
return RenderingNode(template_name)
register.tag('render_sponsors', _render_sponsors)
Make the request context available in the template.from django import template
from django.template.loader import render_to_string
from us_ignite.sections.models import Sponsor
register = template.Library()
class RenderingNode(template.Node):
def __init__(self, template_name):
self.template_name = template_name
def render(self, context):
template_name = self.template_name.resolve(context)
context.update({
'object_list': Sponsor.objects.all()
})
return render_to_string(template_name, context)
def _render_sponsors(parser, token):
"""Tag to render the latest ``Articles``.
Usage:
{% render_sponsors "sections/sponsor_list.html" %}
Where the second argument is a template path.
"""
bits = token.split_contents()
if not len(bits) == 2:
raise template.TemplateSyntaxError(
"%r tag only accepts a template argument." % bits[0])
# Determine the template name (could be a variable or a string):
template_name = parser.compile_filter(bits[1])
return RenderingNode(template_name)
register.tag('render_sponsors', _render_sponsors)
| <commit_before>from django import template
from django.template.loader import render_to_string
from us_ignite.sections.models import Sponsor
register = template.Library()
class RenderingNode(template.Node):
def __init__(self, template_name):
self.template_name = template_name
def render(self, context):
template_name = self.template_name.resolve(context)
template_context = {
'object_list': Sponsor.objects.all()
}
return render_to_string(template_name, template_context)
def _render_sponsors(parser, token):
"""Tag to render the latest ``Articles``.
Usage:
{% render_sponsors "sections/sponsor_list.html" %}
Where the second argument is a template path.
"""
bits = token.split_contents()
if not len(bits) == 2:
raise template.TemplateSyntaxError(
"%r tag only accepts a template argument." % bits[0])
# Determine the template name (could be a variable or a string):
template_name = parser.compile_filter(bits[1])
return RenderingNode(template_name)
register.tag('render_sponsors', _render_sponsors)
<commit_msg>Make the request context available in the template.<commit_after>from django import template
from django.template.loader import render_to_string
from us_ignite.sections.models import Sponsor
register = template.Library()
class RenderingNode(template.Node):
def __init__(self, template_name):
self.template_name = template_name
def render(self, context):
template_name = self.template_name.resolve(context)
context.update({
'object_list': Sponsor.objects.all()
})
return render_to_string(template_name, context)
def _render_sponsors(parser, token):
"""Tag to render the latest ``Articles``.
Usage:
{% render_sponsors "sections/sponsor_list.html" %}
Where the second argument is a template path.
"""
bits = token.split_contents()
if not len(bits) == 2:
raise template.TemplateSyntaxError(
"%r tag only accepts a template argument." % bits[0])
# Determine the template name (could be a variable or a string):
template_name = parser.compile_filter(bits[1])
return RenderingNode(template_name)
register.tag('render_sponsors', _render_sponsors)
|
63d74ee66e9fe05c07d0032690889b3991e32e1b | logtacts/settings/heroku.py | logtacts/settings/heroku.py | from .base import *
import dj_database_url
DEBUG = False
TEMPLATE_DEBUG = DEBUG
DATABASES['default'] = dj_database_url.config()
SECRET_KEY = get_env_variable("SECRET_KEY")
ALLOWED_HOSTS = [
'localhost',
'127.0.0.1',
'.herokuapp.com',
'.pebble.ink',
]
STATIC_URL = '//logtacts.s3.amazonaws.com/assets/'
INSTALLED_APPS += (
'gunicorn',
) | from .base import *
import dj_database_url
DEBUG = False
TEMPLATE_DEBUG = DEBUG
DATABASES['default'] = dj_database_url.parse(get_env_variable('LOGTACTS_DB_URL'))
SECRET_KEY = get_env_variable("SECRET_KEY")
ALLOWED_HOSTS = [
'localhost',
'127.0.0.1',
'.herokuapp.com',
'.pebble.ink',
]
STATIC_URL = '//logtacts.s3.amazonaws.com/assets/'
INSTALLED_APPS += (
'gunicorn',
) | Adjust settings to use specific db url | Adjust settings to use specific db url
| Python | mit | phildini/logtacts,phildini/logtacts,phildini/logtacts,phildini/logtacts,phildini/logtacts | from .base import *
import dj_database_url
DEBUG = False
TEMPLATE_DEBUG = DEBUG
DATABASES['default'] = dj_database_url.config()
SECRET_KEY = get_env_variable("SECRET_KEY")
ALLOWED_HOSTS = [
'localhost',
'127.0.0.1',
'.herokuapp.com',
'.pebble.ink',
]
STATIC_URL = '//logtacts.s3.amazonaws.com/assets/'
INSTALLED_APPS += (
'gunicorn',
)Adjust settings to use specific db url | from .base import *
import dj_database_url
DEBUG = False
TEMPLATE_DEBUG = DEBUG
DATABASES['default'] = dj_database_url.parse(get_env_variable('LOGTACTS_DB_URL'))
SECRET_KEY = get_env_variable("SECRET_KEY")
ALLOWED_HOSTS = [
'localhost',
'127.0.0.1',
'.herokuapp.com',
'.pebble.ink',
]
STATIC_URL = '//logtacts.s3.amazonaws.com/assets/'
INSTALLED_APPS += (
'gunicorn',
) | <commit_before>from .base import *
import dj_database_url
DEBUG = False
TEMPLATE_DEBUG = DEBUG
DATABASES['default'] = dj_database_url.config()
SECRET_KEY = get_env_variable("SECRET_KEY")
ALLOWED_HOSTS = [
'localhost',
'127.0.0.1',
'.herokuapp.com',
'.pebble.ink',
]
STATIC_URL = '//logtacts.s3.amazonaws.com/assets/'
INSTALLED_APPS += (
'gunicorn',
)<commit_msg>Adjust settings to use specific db url<commit_after> | from .base import *
import dj_database_url
DEBUG = False
TEMPLATE_DEBUG = DEBUG
DATABASES['default'] = dj_database_url.parse(get_env_variable('LOGTACTS_DB_URL'))
SECRET_KEY = get_env_variable("SECRET_KEY")
ALLOWED_HOSTS = [
'localhost',
'127.0.0.1',
'.herokuapp.com',
'.pebble.ink',
]
STATIC_URL = '//logtacts.s3.amazonaws.com/assets/'
INSTALLED_APPS += (
'gunicorn',
) | from .base import *
import dj_database_url
DEBUG = False
TEMPLATE_DEBUG = DEBUG
DATABASES['default'] = dj_database_url.config()
SECRET_KEY = get_env_variable("SECRET_KEY")
ALLOWED_HOSTS = [
'localhost',
'127.0.0.1',
'.herokuapp.com',
'.pebble.ink',
]
STATIC_URL = '//logtacts.s3.amazonaws.com/assets/'
INSTALLED_APPS += (
'gunicorn',
)Adjust settings to use specific db urlfrom .base import *
import dj_database_url
DEBUG = False
TEMPLATE_DEBUG = DEBUG
DATABASES['default'] = dj_database_url.parse(get_env_variable('LOGTACTS_DB_URL'))
SECRET_KEY = get_env_variable("SECRET_KEY")
ALLOWED_HOSTS = [
'localhost',
'127.0.0.1',
'.herokuapp.com',
'.pebble.ink',
]
STATIC_URL = '//logtacts.s3.amazonaws.com/assets/'
INSTALLED_APPS += (
'gunicorn',
) | <commit_before>from .base import *
import dj_database_url
DEBUG = False
TEMPLATE_DEBUG = DEBUG
DATABASES['default'] = dj_database_url.config()
SECRET_KEY = get_env_variable("SECRET_KEY")
ALLOWED_HOSTS = [
'localhost',
'127.0.0.1',
'.herokuapp.com',
'.pebble.ink',
]
STATIC_URL = '//logtacts.s3.amazonaws.com/assets/'
INSTALLED_APPS += (
'gunicorn',
)<commit_msg>Adjust settings to use specific db url<commit_after>from .base import *
import dj_database_url
DEBUG = False
TEMPLATE_DEBUG = DEBUG
DATABASES['default'] = dj_database_url.parse(get_env_variable('LOGTACTS_DB_URL'))
SECRET_KEY = get_env_variable("SECRET_KEY")
ALLOWED_HOSTS = [
'localhost',
'127.0.0.1',
'.herokuapp.com',
'.pebble.ink',
]
STATIC_URL = '//logtacts.s3.amazonaws.com/assets/'
INSTALLED_APPS += (
'gunicorn',
) |
d78ff232acee51f95af3e815e72d3db32cb90533 | apps/privatemsg/management/commands/cleanupprivatemsg.py | apps/privatemsg/management/commands/cleanupprivatemsg.py | """
Management command to cleanup deleted private message database entries.
"""
from django.core.management.base import NoArgsCommand
from apps.privatemsg.models import PrivateMessage
class Command(NoArgsCommand):
"""
A management command which deletes deleted private messages from the database.
Calls ``PrivateMessage.objects.delete_deleted_msg()``, which
contains the actual logic for determining which messages are deleted.
"""
help = "Delete deleted private messages from the database"
def handle_noargs(self, **options):
"""
Command handler.
:param options: Not used.
:return: None.
"""
PrivateMessage.objects.delete_deleted_msg()
| """
Management command to cleanup deleted private message from database.
"""
from django.core.management.base import NoArgsCommand
from ...models import PrivateMessage
class Command(NoArgsCommand):
"""
A management command which deletes deleted private messages from the database.
Calls ``PrivateMessage.objects.delete_deleted_msg()``, which
contains the actual logic for determining which messages are deleted.
"""
help = "Delete deleted private messages from the database"
def handle_noargs(self, **options):
"""
Command handler.
:param options: Not used.
:return: None.
"""
PrivateMessage.objects.delete_deleted_msg()
| Use relative import and update docstring | Use relative import and update docstring
| Python | agpl-3.0 | TamiaLab/carnetdumaker,TamiaLab/carnetdumaker,TamiaLab/carnetdumaker,TamiaLab/carnetdumaker | """
Management command to cleanup deleted private message database entries.
"""
from django.core.management.base import NoArgsCommand
from apps.privatemsg.models import PrivateMessage
class Command(NoArgsCommand):
"""
A management command which deletes deleted private messages from the database.
Calls ``PrivateMessage.objects.delete_deleted_msg()``, which
contains the actual logic for determining which messages are deleted.
"""
help = "Delete deleted private messages from the database"
def handle_noargs(self, **options):
"""
Command handler.
:param options: Not used.
:return: None.
"""
PrivateMessage.objects.delete_deleted_msg()
Use relative import and update docstring | """
Management command to cleanup deleted private message from database.
"""
from django.core.management.base import NoArgsCommand
from ...models import PrivateMessage
class Command(NoArgsCommand):
"""
A management command which deletes deleted private messages from the database.
Calls ``PrivateMessage.objects.delete_deleted_msg()``, which
contains the actual logic for determining which messages are deleted.
"""
help = "Delete deleted private messages from the database"
def handle_noargs(self, **options):
"""
Command handler.
:param options: Not used.
:return: None.
"""
PrivateMessage.objects.delete_deleted_msg()
| <commit_before>"""
Management command to cleanup deleted private message database entries.
"""
from django.core.management.base import NoArgsCommand
from apps.privatemsg.models import PrivateMessage
class Command(NoArgsCommand):
"""
A management command which deletes deleted private messages from the database.
Calls ``PrivateMessage.objects.delete_deleted_msg()``, which
contains the actual logic for determining which messages are deleted.
"""
help = "Delete deleted private messages from the database"
def handle_noargs(self, **options):
"""
Command handler.
:param options: Not used.
:return: None.
"""
PrivateMessage.objects.delete_deleted_msg()
<commit_msg>Use relative import and update docstring<commit_after> | """
Management command to cleanup deleted private message from database.
"""
from django.core.management.base import NoArgsCommand
from ...models import PrivateMessage
class Command(NoArgsCommand):
"""
A management command which deletes deleted private messages from the database.
Calls ``PrivateMessage.objects.delete_deleted_msg()``, which
contains the actual logic for determining which messages are deleted.
"""
help = "Delete deleted private messages from the database"
def handle_noargs(self, **options):
"""
Command handler.
:param options: Not used.
:return: None.
"""
PrivateMessage.objects.delete_deleted_msg()
| """
Management command to cleanup deleted private message database entries.
"""
from django.core.management.base import NoArgsCommand
from apps.privatemsg.models import PrivateMessage
class Command(NoArgsCommand):
"""
A management command which deletes deleted private messages from the database.
Calls ``PrivateMessage.objects.delete_deleted_msg()``, which
contains the actual logic for determining which messages are deleted.
"""
help = "Delete deleted private messages from the database"
def handle_noargs(self, **options):
"""
Command handler.
:param options: Not used.
:return: None.
"""
PrivateMessage.objects.delete_deleted_msg()
Use relative import and update docstring"""
Management command to cleanup deleted private message from database.
"""
from django.core.management.base import NoArgsCommand
from ...models import PrivateMessage
class Command(NoArgsCommand):
"""
A management command which deletes deleted private messages from the database.
Calls ``PrivateMessage.objects.delete_deleted_msg()``, which
contains the actual logic for determining which messages are deleted.
"""
help = "Delete deleted private messages from the database"
def handle_noargs(self, **options):
"""
Command handler.
:param options: Not used.
:return: None.
"""
PrivateMessage.objects.delete_deleted_msg()
| <commit_before>"""
Management command to cleanup deleted private message database entries.
"""
from django.core.management.base import NoArgsCommand
from apps.privatemsg.models import PrivateMessage
class Command(NoArgsCommand):
"""
A management command which deletes deleted private messages from the database.
Calls ``PrivateMessage.objects.delete_deleted_msg()``, which
contains the actual logic for determining which messages are deleted.
"""
help = "Delete deleted private messages from the database"
def handle_noargs(self, **options):
"""
Command handler.
:param options: Not used.
:return: None.
"""
PrivateMessage.objects.delete_deleted_msg()
<commit_msg>Use relative import and update docstring<commit_after>"""
Management command to cleanup deleted private message from database.
"""
from django.core.management.base import NoArgsCommand
from ...models import PrivateMessage
class Command(NoArgsCommand):
"""
A management command which deletes deleted private messages from the database.
Calls ``PrivateMessage.objects.delete_deleted_msg()``, which
contains the actual logic for determining which messages are deleted.
"""
help = "Delete deleted private messages from the database"
def handle_noargs(self, **options):
"""
Command handler.
:param options: Not used.
:return: None.
"""
PrivateMessage.objects.delete_deleted_msg()
|
0ad4e80daff3441d3d0dc56f6891d6fefb9cedb9 | tinyblog/feeds.py | tinyblog/feeds.py | from datetime import date
from django.conf import settings
from django.contrib.syndication.views import Feed
from django.core.urlresolvers import reverse
from tinyblog.models import Post
class LatestEntriesFeed(Feed):
def title(self):
return settings.TINYBLOG_TITLE
def description(self):
return settings.TINYBLOG_DESCRIPTION
def author_name(self):
return settings.TINYBLOG_AUTHORNAME
def author_link(self):
return settings.TINYBLOG_AUTHORLINK
def copyright(self):
return (u'Copyright (c) %d %s.'
% (date.now().year, settings.TINYBLOG_AUTHORNAME))
def link(self, obj):
return reverse('tinyblog_index')
def items(self):
return Post.published_objects.order_by('-created')[:15]
def item_description(self, item):
return item.teaser_html + item.text_html
def item_title(self, item):
return item.title
def item_link(self, item):
return item.get_absolute_url()
def item_pubdate(self, item):
return item.created
| from datetime import date
from django.conf import settings
from django.contrib.syndication.views import Feed
from django.core.urlresolvers import reverse
from tinyblog.models import Post
class LatestEntriesFeed(Feed):
def title(self):
return settings.TINYBLOG_TITLE
def description(self):
return settings.TINYBLOG_DESCRIPTION
def author_name(self):
return settings.TINYBLOG_AUTHORNAME
def author_link(self):
return settings.TINYBLOG_AUTHORLINK
def feed_copyright(self):
return (u'Copyright (c) %d %s.'
% (date.today().year, settings.TINYBLOG_AUTHORNAME))
def link(self, obj):
return reverse('tinyblog_index')
def items(self):
return Post.published_objects.order_by('-created')[:15]
def item_description(self, item):
return item.teaser_html + item.text_html
def item_title(self, item):
return item.title
def item_link(self, item):
return item.get_absolute_url()
def item_pubdate(self, item):
return item.created
| Fix for copyright not showing up in RSS feed | Fix for copyright not showing up in RSS feed
Code coverage showed I was missing a line, because I'd used
the wrong function name. Once I fixed that, I discovered a
bug in the way I was getting the date for the copyright.
| Python | bsd-3-clause | dominicrodger/tinyblog,dominicrodger/tinyblog | from datetime import date
from django.conf import settings
from django.contrib.syndication.views import Feed
from django.core.urlresolvers import reverse
from tinyblog.models import Post
class LatestEntriesFeed(Feed):
def title(self):
return settings.TINYBLOG_TITLE
def description(self):
return settings.TINYBLOG_DESCRIPTION
def author_name(self):
return settings.TINYBLOG_AUTHORNAME
def author_link(self):
return settings.TINYBLOG_AUTHORLINK
def copyright(self):
return (u'Copyright (c) %d %s.'
% (date.now().year, settings.TINYBLOG_AUTHORNAME))
def link(self, obj):
return reverse('tinyblog_index')
def items(self):
return Post.published_objects.order_by('-created')[:15]
def item_description(self, item):
return item.teaser_html + item.text_html
def item_title(self, item):
return item.title
def item_link(self, item):
return item.get_absolute_url()
def item_pubdate(self, item):
return item.created
Fix for copyright not showing up in RSS feed
Code coverage showed I was missing a line, because I'd used
the wrong function name. Once I fixed that, I discovered a
bug in the way I was getting the date for the copyright. | from datetime import date
from django.conf import settings
from django.contrib.syndication.views import Feed
from django.core.urlresolvers import reverse
from tinyblog.models import Post
class LatestEntriesFeed(Feed):
def title(self):
return settings.TINYBLOG_TITLE
def description(self):
return settings.TINYBLOG_DESCRIPTION
def author_name(self):
return settings.TINYBLOG_AUTHORNAME
def author_link(self):
return settings.TINYBLOG_AUTHORLINK
def feed_copyright(self):
return (u'Copyright (c) %d %s.'
% (date.today().year, settings.TINYBLOG_AUTHORNAME))
def link(self, obj):
return reverse('tinyblog_index')
def items(self):
return Post.published_objects.order_by('-created')[:15]
def item_description(self, item):
return item.teaser_html + item.text_html
def item_title(self, item):
return item.title
def item_link(self, item):
return item.get_absolute_url()
def item_pubdate(self, item):
return item.created
| <commit_before>from datetime import date
from django.conf import settings
from django.contrib.syndication.views import Feed
from django.core.urlresolvers import reverse
from tinyblog.models import Post
class LatestEntriesFeed(Feed):
def title(self):
return settings.TINYBLOG_TITLE
def description(self):
return settings.TINYBLOG_DESCRIPTION
def author_name(self):
return settings.TINYBLOG_AUTHORNAME
def author_link(self):
return settings.TINYBLOG_AUTHORLINK
def copyright(self):
return (u'Copyright (c) %d %s.'
% (date.now().year, settings.TINYBLOG_AUTHORNAME))
def link(self, obj):
return reverse('tinyblog_index')
def items(self):
return Post.published_objects.order_by('-created')[:15]
def item_description(self, item):
return item.teaser_html + item.text_html
def item_title(self, item):
return item.title
def item_link(self, item):
return item.get_absolute_url()
def item_pubdate(self, item):
return item.created
<commit_msg>Fix for copyright not showing up in RSS feed
Code coverage showed I was missing a line, because I'd used
the wrong function name. Once I fixed that, I discovered a
bug in the way I was getting the date for the copyright.<commit_after> | from datetime import date
from django.conf import settings
from django.contrib.syndication.views import Feed
from django.core.urlresolvers import reverse
from tinyblog.models import Post
class LatestEntriesFeed(Feed):
def title(self):
return settings.TINYBLOG_TITLE
def description(self):
return settings.TINYBLOG_DESCRIPTION
def author_name(self):
return settings.TINYBLOG_AUTHORNAME
def author_link(self):
return settings.TINYBLOG_AUTHORLINK
def feed_copyright(self):
return (u'Copyright (c) %d %s.'
% (date.today().year, settings.TINYBLOG_AUTHORNAME))
def link(self, obj):
return reverse('tinyblog_index')
def items(self):
return Post.published_objects.order_by('-created')[:15]
def item_description(self, item):
return item.teaser_html + item.text_html
def item_title(self, item):
return item.title
def item_link(self, item):
return item.get_absolute_url()
def item_pubdate(self, item):
return item.created
| from datetime import date
from django.conf import settings
from django.contrib.syndication.views import Feed
from django.core.urlresolvers import reverse
from tinyblog.models import Post
class LatestEntriesFeed(Feed):
def title(self):
return settings.TINYBLOG_TITLE
def description(self):
return settings.TINYBLOG_DESCRIPTION
def author_name(self):
return settings.TINYBLOG_AUTHORNAME
def author_link(self):
return settings.TINYBLOG_AUTHORLINK
def copyright(self):
return (u'Copyright (c) %d %s.'
% (date.now().year, settings.TINYBLOG_AUTHORNAME))
def link(self, obj):
return reverse('tinyblog_index')
def items(self):
return Post.published_objects.order_by('-created')[:15]
def item_description(self, item):
return item.teaser_html + item.text_html
def item_title(self, item):
return item.title
def item_link(self, item):
return item.get_absolute_url()
def item_pubdate(self, item):
return item.created
Fix for copyright not showing up in RSS feed
Code coverage showed I was missing a line, because I'd used
the wrong function name. Once I fixed that, I discovered a
bug in the way I was getting the date for the copyright.from datetime import date
from django.conf import settings
from django.contrib.syndication.views import Feed
from django.core.urlresolvers import reverse
from tinyblog.models import Post
class LatestEntriesFeed(Feed):
def title(self):
return settings.TINYBLOG_TITLE
def description(self):
return settings.TINYBLOG_DESCRIPTION
def author_name(self):
return settings.TINYBLOG_AUTHORNAME
def author_link(self):
return settings.TINYBLOG_AUTHORLINK
def feed_copyright(self):
return (u'Copyright (c) %d %s.'
% (date.today().year, settings.TINYBLOG_AUTHORNAME))
def link(self, obj):
return reverse('tinyblog_index')
def items(self):
return Post.published_objects.order_by('-created')[:15]
def item_description(self, item):
return item.teaser_html + item.text_html
def item_title(self, item):
return item.title
def item_link(self, item):
return item.get_absolute_url()
def item_pubdate(self, item):
return item.created
| <commit_before>from datetime import date
from django.conf import settings
from django.contrib.syndication.views import Feed
from django.core.urlresolvers import reverse
from tinyblog.models import Post
class LatestEntriesFeed(Feed):
def title(self):
return settings.TINYBLOG_TITLE
def description(self):
return settings.TINYBLOG_DESCRIPTION
def author_name(self):
return settings.TINYBLOG_AUTHORNAME
def author_link(self):
return settings.TINYBLOG_AUTHORLINK
def copyright(self):
return (u'Copyright (c) %d %s.'
% (date.now().year, settings.TINYBLOG_AUTHORNAME))
def link(self, obj):
return reverse('tinyblog_index')
def items(self):
return Post.published_objects.order_by('-created')[:15]
def item_description(self, item):
return item.teaser_html + item.text_html
def item_title(self, item):
return item.title
def item_link(self, item):
return item.get_absolute_url()
def item_pubdate(self, item):
return item.created
<commit_msg>Fix for copyright not showing up in RSS feed
Code coverage showed I was missing a line, because I'd used
the wrong function name. Once I fixed that, I discovered a
bug in the way I was getting the date for the copyright.<commit_after>from datetime import date
from django.conf import settings
from django.contrib.syndication.views import Feed
from django.core.urlresolvers import reverse
from tinyblog.models import Post
class LatestEntriesFeed(Feed):
def title(self):
return settings.TINYBLOG_TITLE
def description(self):
return settings.TINYBLOG_DESCRIPTION
def author_name(self):
return settings.TINYBLOG_AUTHORNAME
def author_link(self):
return settings.TINYBLOG_AUTHORLINK
def feed_copyright(self):
return (u'Copyright (c) %d %s.'
% (date.today().year, settings.TINYBLOG_AUTHORNAME))
def link(self, obj):
return reverse('tinyblog_index')
def items(self):
return Post.published_objects.order_by('-created')[:15]
def item_description(self, item):
return item.teaser_html + item.text_html
def item_title(self, item):
return item.title
def item_link(self, item):
return item.get_absolute_url()
def item_pubdate(self, item):
return item.created
|
07b0b608a948e1058aeb40fdfbf5a0425933562d | mcavatar/__init__.py | mcavatar/__init__.py | from redis import Redis
from flask import Flask, g
app = Flask(__name__)
app.config.from_pyfile('config.py')
_redis = Redis(
host=app.config['REDIS_HOST'],
port=app.config['REDIS_PORT'],
db=app.config['REDIS_DB']
)
from mcavatar.views.public import public
from mcavatar.views.img import img
app.register_blueprint(public)
app.register_blueprint(img, subdomain='i')
@app.before_request
def set_db():
g.redis = _redis
| from redis import Redis
from flask import Flask, g
app = Flask(__name__)
app.config.from_pyfile('config.py')
_redis = Redis(
host=app.config['REDIS_HOST'],
port=app.config['REDIS_PORT'],
db=app.config['REDIS_DB']
)
from mcavatar.views.public import public
from mcavatar.views.img import img
app.register_blueprint(public)
app.register_blueprint(img, subdomain='i')
@app.before_request
def set_db():
g.redis = _redis
@app.teardown_request
def incr_requests(ex):
g.redis.incr('total_requests')
| Increment total_requests key in redis after each request | Increment total_requests key in redis after each request
| Python | mit | joealcorn/MCAvatar | from redis import Redis
from flask import Flask, g
app = Flask(__name__)
app.config.from_pyfile('config.py')
_redis = Redis(
host=app.config['REDIS_HOST'],
port=app.config['REDIS_PORT'],
db=app.config['REDIS_DB']
)
from mcavatar.views.public import public
from mcavatar.views.img import img
app.register_blueprint(public)
app.register_blueprint(img, subdomain='i')
@app.before_request
def set_db():
g.redis = _redis
Increment total_requests key in redis after each request | from redis import Redis
from flask import Flask, g
app = Flask(__name__)
app.config.from_pyfile('config.py')
_redis = Redis(
host=app.config['REDIS_HOST'],
port=app.config['REDIS_PORT'],
db=app.config['REDIS_DB']
)
from mcavatar.views.public import public
from mcavatar.views.img import img
app.register_blueprint(public)
app.register_blueprint(img, subdomain='i')
@app.before_request
def set_db():
g.redis = _redis
@app.teardown_request
def incr_requests(ex):
g.redis.incr('total_requests')
| <commit_before>from redis import Redis
from flask import Flask, g
app = Flask(__name__)
app.config.from_pyfile('config.py')
_redis = Redis(
host=app.config['REDIS_HOST'],
port=app.config['REDIS_PORT'],
db=app.config['REDIS_DB']
)
from mcavatar.views.public import public
from mcavatar.views.img import img
app.register_blueprint(public)
app.register_blueprint(img, subdomain='i')
@app.before_request
def set_db():
g.redis = _redis
<commit_msg>Increment total_requests key in redis after each request<commit_after> | from redis import Redis
from flask import Flask, g
app = Flask(__name__)
app.config.from_pyfile('config.py')
_redis = Redis(
host=app.config['REDIS_HOST'],
port=app.config['REDIS_PORT'],
db=app.config['REDIS_DB']
)
from mcavatar.views.public import public
from mcavatar.views.img import img
app.register_blueprint(public)
app.register_blueprint(img, subdomain='i')
@app.before_request
def set_db():
g.redis = _redis
@app.teardown_request
def incr_requests(ex):
g.redis.incr('total_requests')
| from redis import Redis
from flask import Flask, g
app = Flask(__name__)
app.config.from_pyfile('config.py')
_redis = Redis(
host=app.config['REDIS_HOST'],
port=app.config['REDIS_PORT'],
db=app.config['REDIS_DB']
)
from mcavatar.views.public import public
from mcavatar.views.img import img
app.register_blueprint(public)
app.register_blueprint(img, subdomain='i')
@app.before_request
def set_db():
g.redis = _redis
Increment total_requests key in redis after each requestfrom redis import Redis
from flask import Flask, g
app = Flask(__name__)
app.config.from_pyfile('config.py')
_redis = Redis(
host=app.config['REDIS_HOST'],
port=app.config['REDIS_PORT'],
db=app.config['REDIS_DB']
)
from mcavatar.views.public import public
from mcavatar.views.img import img
app.register_blueprint(public)
app.register_blueprint(img, subdomain='i')
@app.before_request
def set_db():
g.redis = _redis
@app.teardown_request
def incr_requests(ex):
g.redis.incr('total_requests')
| <commit_before>from redis import Redis
from flask import Flask, g
app = Flask(__name__)
app.config.from_pyfile('config.py')
_redis = Redis(
host=app.config['REDIS_HOST'],
port=app.config['REDIS_PORT'],
db=app.config['REDIS_DB']
)
from mcavatar.views.public import public
from mcavatar.views.img import img
app.register_blueprint(public)
app.register_blueprint(img, subdomain='i')
@app.before_request
def set_db():
g.redis = _redis
<commit_msg>Increment total_requests key in redis after each request<commit_after>from redis import Redis
from flask import Flask, g
app = Flask(__name__)
app.config.from_pyfile('config.py')
_redis = Redis(
host=app.config['REDIS_HOST'],
port=app.config['REDIS_PORT'],
db=app.config['REDIS_DB']
)
from mcavatar.views.public import public
from mcavatar.views.img import img
app.register_blueprint(public)
app.register_blueprint(img, subdomain='i')
@app.before_request
def set_db():
g.redis = _redis
@app.teardown_request
def incr_requests(ex):
g.redis.incr('total_requests')
|
e1a9f02051038270cdf3377c38c650a27bd65507 | apps/organizations/middleware.py | apps/organizations/middleware.py | from django.http import Http404
from django.shortcuts import get_object_or_404
from .models import Organization
class OrganizationMiddleware(object):
def process_request(self, request):
subdomain = request.subdomain
user = request.user
if subdomain is None:
request.organization = None
return
organization = get_object_or_404(
Organization, slug__iexact=subdomain.lower()
)
if user.is_authenticated() and organization != user.organization:
raise Http404
request.organization = organization
| from django.http import Http404
from django.shortcuts import get_object_or_404
from .models import Organization
class OrganizationMiddleware(object):
def process_request(self, request):
subdomain = request.subdomain
user = request.user
request.organization = None
if subdomain is None:
request.organization = None
return
organization = get_object_or_404(
Organization, slug__iexact=subdomain.lower()
)
if user.is_authenticated() and organization != user.organization:
raise Http404
request.organization = organization
| Set requestion organization to none | Set requestion organization to none
| Python | mit | xobb1t/ddash2013,xobb1t/ddash2013 | from django.http import Http404
from django.shortcuts import get_object_or_404
from .models import Organization
class OrganizationMiddleware(object):
def process_request(self, request):
subdomain = request.subdomain
user = request.user
if subdomain is None:
request.organization = None
return
organization = get_object_or_404(
Organization, slug__iexact=subdomain.lower()
)
if user.is_authenticated() and organization != user.organization:
raise Http404
request.organization = organization
Set requestion organization to none | from django.http import Http404
from django.shortcuts import get_object_or_404
from .models import Organization
class OrganizationMiddleware(object):
def process_request(self, request):
subdomain = request.subdomain
user = request.user
request.organization = None
if subdomain is None:
request.organization = None
return
organization = get_object_or_404(
Organization, slug__iexact=subdomain.lower()
)
if user.is_authenticated() and organization != user.organization:
raise Http404
request.organization = organization
| <commit_before>from django.http import Http404
from django.shortcuts import get_object_or_404
from .models import Organization
class OrganizationMiddleware(object):
def process_request(self, request):
subdomain = request.subdomain
user = request.user
if subdomain is None:
request.organization = None
return
organization = get_object_or_404(
Organization, slug__iexact=subdomain.lower()
)
if user.is_authenticated() and organization != user.organization:
raise Http404
request.organization = organization
<commit_msg>Set requestion organization to none<commit_after> | from django.http import Http404
from django.shortcuts import get_object_or_404
from .models import Organization
class OrganizationMiddleware(object):
def process_request(self, request):
subdomain = request.subdomain
user = request.user
request.organization = None
if subdomain is None:
request.organization = None
return
organization = get_object_or_404(
Organization, slug__iexact=subdomain.lower()
)
if user.is_authenticated() and organization != user.organization:
raise Http404
request.organization = organization
| from django.http import Http404
from django.shortcuts import get_object_or_404
from .models import Organization
class OrganizationMiddleware(object):
def process_request(self, request):
subdomain = request.subdomain
user = request.user
if subdomain is None:
request.organization = None
return
organization = get_object_or_404(
Organization, slug__iexact=subdomain.lower()
)
if user.is_authenticated() and organization != user.organization:
raise Http404
request.organization = organization
Set requestion organization to nonefrom django.http import Http404
from django.shortcuts import get_object_or_404
from .models import Organization
class OrganizationMiddleware(object):
def process_request(self, request):
subdomain = request.subdomain
user = request.user
request.organization = None
if subdomain is None:
request.organization = None
return
organization = get_object_or_404(
Organization, slug__iexact=subdomain.lower()
)
if user.is_authenticated() and organization != user.organization:
raise Http404
request.organization = organization
| <commit_before>from django.http import Http404
from django.shortcuts import get_object_or_404
from .models import Organization
class OrganizationMiddleware(object):
def process_request(self, request):
subdomain = request.subdomain
user = request.user
if subdomain is None:
request.organization = None
return
organization = get_object_or_404(
Organization, slug__iexact=subdomain.lower()
)
if user.is_authenticated() and organization != user.organization:
raise Http404
request.organization = organization
<commit_msg>Set requestion organization to none<commit_after>from django.http import Http404
from django.shortcuts import get_object_or_404
from .models import Organization
class OrganizationMiddleware(object):
def process_request(self, request):
subdomain = request.subdomain
user = request.user
request.organization = None
if subdomain is None:
request.organization = None
return
organization = get_object_or_404(
Organization, slug__iexact=subdomain.lower()
)
if user.is_authenticated() and organization != user.organization:
raise Http404
request.organization = organization
|
5a710d1013020dcbd66128b1c5235d652c4ef2da | udata/core/badges/forms.py | udata/core/badges/forms.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from udata.forms import ModelForm, fields, validators
from udata.i18n import lazy_gettext as _
from udata.models import Badge
__all__ = ('badge_form', )
def badge_form(model):
'''A form factory for a given model badges'''
class BadgeForm(ModelForm):
model_class = Badge
kind = fields.RadioField(
_('Kind'), [validators.required()],
choices=model.__badges__.items(),
description=_('Kind of badge (certified, etc)'))
return BadgeForm
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from udata.forms import ModelForm, fields, validators
from udata.i18n import lazy_gettext as _
from udata.models import Badge
__all__ = ('badge_form', )
def badge_form(model):
'''A form factory for a given model badges'''
class BadgeForm(ModelForm):
model_class = Badge
kind = fields.RadioField(
_('Kind'), [validators.DataRequired()],
choices=model.__badges__.items(),
description=_('Kind of badge (certified, etc)'))
return BadgeForm
| Switch from the deprecated validators.required() to validators.DataRequired() | Switch from the deprecated validators.required() to validators.DataRequired()
| Python | agpl-3.0 | opendatateam/udata,opendatateam/udata,etalab/udata,etalab/udata,etalab/udata,opendatateam/udata | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from udata.forms import ModelForm, fields, validators
from udata.i18n import lazy_gettext as _
from udata.models import Badge
__all__ = ('badge_form', )
def badge_form(model):
'''A form factory for a given model badges'''
class BadgeForm(ModelForm):
model_class = Badge
kind = fields.RadioField(
_('Kind'), [validators.required()],
choices=model.__badges__.items(),
description=_('Kind of badge (certified, etc)'))
return BadgeForm
Switch from the deprecated validators.required() to validators.DataRequired() | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from udata.forms import ModelForm, fields, validators
from udata.i18n import lazy_gettext as _
from udata.models import Badge
__all__ = ('badge_form', )
def badge_form(model):
'''A form factory for a given model badges'''
class BadgeForm(ModelForm):
model_class = Badge
kind = fields.RadioField(
_('Kind'), [validators.DataRequired()],
choices=model.__badges__.items(),
description=_('Kind of badge (certified, etc)'))
return BadgeForm
| <commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from udata.forms import ModelForm, fields, validators
from udata.i18n import lazy_gettext as _
from udata.models import Badge
__all__ = ('badge_form', )
def badge_form(model):
'''A form factory for a given model badges'''
class BadgeForm(ModelForm):
model_class = Badge
kind = fields.RadioField(
_('Kind'), [validators.required()],
choices=model.__badges__.items(),
description=_('Kind of badge (certified, etc)'))
return BadgeForm
<commit_msg>Switch from the deprecated validators.required() to validators.DataRequired()<commit_after> | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from udata.forms import ModelForm, fields, validators
from udata.i18n import lazy_gettext as _
from udata.models import Badge
__all__ = ('badge_form', )
def badge_form(model):
'''A form factory for a given model badges'''
class BadgeForm(ModelForm):
model_class = Badge
kind = fields.RadioField(
_('Kind'), [validators.DataRequired()],
choices=model.__badges__.items(),
description=_('Kind of badge (certified, etc)'))
return BadgeForm
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from udata.forms import ModelForm, fields, validators
from udata.i18n import lazy_gettext as _
from udata.models import Badge
__all__ = ('badge_form', )
def badge_form(model):
'''A form factory for a given model badges'''
class BadgeForm(ModelForm):
model_class = Badge
kind = fields.RadioField(
_('Kind'), [validators.required()],
choices=model.__badges__.items(),
description=_('Kind of badge (certified, etc)'))
return BadgeForm
Switch from the deprecated validators.required() to validators.DataRequired()# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from udata.forms import ModelForm, fields, validators
from udata.i18n import lazy_gettext as _
from udata.models import Badge
__all__ = ('badge_form', )
def badge_form(model):
'''A form factory for a given model badges'''
class BadgeForm(ModelForm):
model_class = Badge
kind = fields.RadioField(
_('Kind'), [validators.DataRequired()],
choices=model.__badges__.items(),
description=_('Kind of badge (certified, etc)'))
return BadgeForm
| <commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from udata.forms import ModelForm, fields, validators
from udata.i18n import lazy_gettext as _
from udata.models import Badge
__all__ = ('badge_form', )
def badge_form(model):
'''A form factory for a given model badges'''
class BadgeForm(ModelForm):
model_class = Badge
kind = fields.RadioField(
_('Kind'), [validators.required()],
choices=model.__badges__.items(),
description=_('Kind of badge (certified, etc)'))
return BadgeForm
<commit_msg>Switch from the deprecated validators.required() to validators.DataRequired()<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from udata.forms import ModelForm, fields, validators
from udata.i18n import lazy_gettext as _
from udata.models import Badge
__all__ = ('badge_form', )
def badge_form(model):
'''A form factory for a given model badges'''
class BadgeForm(ModelForm):
model_class = Badge
kind = fields.RadioField(
_('Kind'), [validators.DataRequired()],
choices=model.__badges__.items(),
description=_('Kind of badge (certified, etc)'))
return BadgeForm
|
ee9df63aeaabb4111cea3698a4f0e30b4502e519 | test/disable_captcha.py | test/disable_captcha.py | import owebunit
import urlparse
from wolis_test_case import WolisTestCase
class AcpLoginTestCase(WolisTestCase):
def test_disable_captcha(self):
self.login('morpheus', 'morpheus')
self.acp_login('morpheus', 'morpheus')
start_url = '/adm/index.php'
self.get_with_sid(start_url)
self.assert_status(200)
assert 'Board statistics' in self.response.body
url = self.link_href_by_text('Spambot countermeasures')
# already has sid
self.get(urlparse.urljoin(start_url, url))
self.assert_status(200)
assert 'Enable spambot countermeasures' in self.response.body
assert len(self.response.forms) == 1
form = self.response.forms[0]
params = {
'enable_confirm': '0',
}
params = owebunit.extend_params(form.params.list, params)
self.post(form.computed_action, body=params)
self.assert_status(200)
assert 'Configuration updated successfully' in self.response.body
if __name__ == '__main__':
import unittest
unittest.main()
| import owebunit
import urlparse
from wolis_test_case import WolisTestCase
class AcpLoginTestCase(WolisTestCase):
def test_disable_captcha(self):
self.login('morpheus', 'morpheus')
self.acp_login('morpheus', 'morpheus')
self.change_acp_knob(
link_text='Spambot countermeasures',
check_page_text='Enable spambot countermeasures',
name='enable_confirm',
value='0',
)
def change_acp_knob(self, link_text, check_page_text, name, value):
start_url = '/adm/index.php'
self.get_with_sid(start_url)
self.assert_status(200)
assert 'Board statistics' in self.response.body
url = self.link_href_by_text(link_text)
# already has sid
self.get(urlparse.urljoin(start_url, url))
self.assert_status(200)
assert check_page_text in self.response.body
assert len(self.response.forms) == 1
form = self.response.forms[0]
params = {
name: value,
}
params = owebunit.extend_params(form.params.list, params)
self.post(form.computed_action, body=params)
self.assert_status(200)
assert 'Configuration updated successfully' in self.response.body
if __name__ == '__main__':
import unittest
unittest.main()
| Generalize to allow editing other configuration | Generalize to allow editing other configuration
| Python | bsd-2-clause | p/wolis-phpbb,p/wolis-phpbb | import owebunit
import urlparse
from wolis_test_case import WolisTestCase
class AcpLoginTestCase(WolisTestCase):
def test_disable_captcha(self):
self.login('morpheus', 'morpheus')
self.acp_login('morpheus', 'morpheus')
start_url = '/adm/index.php'
self.get_with_sid(start_url)
self.assert_status(200)
assert 'Board statistics' in self.response.body
url = self.link_href_by_text('Spambot countermeasures')
# already has sid
self.get(urlparse.urljoin(start_url, url))
self.assert_status(200)
assert 'Enable spambot countermeasures' in self.response.body
assert len(self.response.forms) == 1
form = self.response.forms[0]
params = {
'enable_confirm': '0',
}
params = owebunit.extend_params(form.params.list, params)
self.post(form.computed_action, body=params)
self.assert_status(200)
assert 'Configuration updated successfully' in self.response.body
if __name__ == '__main__':
import unittest
unittest.main()
Generalize to allow editing other configuration | import owebunit
import urlparse
from wolis_test_case import WolisTestCase
class AcpLoginTestCase(WolisTestCase):
def test_disable_captcha(self):
self.login('morpheus', 'morpheus')
self.acp_login('morpheus', 'morpheus')
self.change_acp_knob(
link_text='Spambot countermeasures',
check_page_text='Enable spambot countermeasures',
name='enable_confirm',
value='0',
)
def change_acp_knob(self, link_text, check_page_text, name, value):
start_url = '/adm/index.php'
self.get_with_sid(start_url)
self.assert_status(200)
assert 'Board statistics' in self.response.body
url = self.link_href_by_text(link_text)
# already has sid
self.get(urlparse.urljoin(start_url, url))
self.assert_status(200)
assert check_page_text in self.response.body
assert len(self.response.forms) == 1
form = self.response.forms[0]
params = {
name: value,
}
params = owebunit.extend_params(form.params.list, params)
self.post(form.computed_action, body=params)
self.assert_status(200)
assert 'Configuration updated successfully' in self.response.body
if __name__ == '__main__':
import unittest
unittest.main()
| <commit_before>import owebunit
import urlparse
from wolis_test_case import WolisTestCase
class AcpLoginTestCase(WolisTestCase):
def test_disable_captcha(self):
self.login('morpheus', 'morpheus')
self.acp_login('morpheus', 'morpheus')
start_url = '/adm/index.php'
self.get_with_sid(start_url)
self.assert_status(200)
assert 'Board statistics' in self.response.body
url = self.link_href_by_text('Spambot countermeasures')
# already has sid
self.get(urlparse.urljoin(start_url, url))
self.assert_status(200)
assert 'Enable spambot countermeasures' in self.response.body
assert len(self.response.forms) == 1
form = self.response.forms[0]
params = {
'enable_confirm': '0',
}
params = owebunit.extend_params(form.params.list, params)
self.post(form.computed_action, body=params)
self.assert_status(200)
assert 'Configuration updated successfully' in self.response.body
if __name__ == '__main__':
import unittest
unittest.main()
<commit_msg>Generalize to allow editing other configuration<commit_after> | import owebunit
import urlparse
from wolis_test_case import WolisTestCase
class AcpLoginTestCase(WolisTestCase):
def test_disable_captcha(self):
self.login('morpheus', 'morpheus')
self.acp_login('morpheus', 'morpheus')
self.change_acp_knob(
link_text='Spambot countermeasures',
check_page_text='Enable spambot countermeasures',
name='enable_confirm',
value='0',
)
def change_acp_knob(self, link_text, check_page_text, name, value):
start_url = '/adm/index.php'
self.get_with_sid(start_url)
self.assert_status(200)
assert 'Board statistics' in self.response.body
url = self.link_href_by_text(link_text)
# already has sid
self.get(urlparse.urljoin(start_url, url))
self.assert_status(200)
assert check_page_text in self.response.body
assert len(self.response.forms) == 1
form = self.response.forms[0]
params = {
name: value,
}
params = owebunit.extend_params(form.params.list, params)
self.post(form.computed_action, body=params)
self.assert_status(200)
assert 'Configuration updated successfully' in self.response.body
if __name__ == '__main__':
import unittest
unittest.main()
| import owebunit
import urlparse
from wolis_test_case import WolisTestCase
class AcpLoginTestCase(WolisTestCase):
def test_disable_captcha(self):
self.login('morpheus', 'morpheus')
self.acp_login('morpheus', 'morpheus')
start_url = '/adm/index.php'
self.get_with_sid(start_url)
self.assert_status(200)
assert 'Board statistics' in self.response.body
url = self.link_href_by_text('Spambot countermeasures')
# already has sid
self.get(urlparse.urljoin(start_url, url))
self.assert_status(200)
assert 'Enable spambot countermeasures' in self.response.body
assert len(self.response.forms) == 1
form = self.response.forms[0]
params = {
'enable_confirm': '0',
}
params = owebunit.extend_params(form.params.list, params)
self.post(form.computed_action, body=params)
self.assert_status(200)
assert 'Configuration updated successfully' in self.response.body
if __name__ == '__main__':
import unittest
unittest.main()
Generalize to allow editing other configurationimport owebunit
import urlparse
from wolis_test_case import WolisTestCase
class AcpLoginTestCase(WolisTestCase):
def test_disable_captcha(self):
self.login('morpheus', 'morpheus')
self.acp_login('morpheus', 'morpheus')
self.change_acp_knob(
link_text='Spambot countermeasures',
check_page_text='Enable spambot countermeasures',
name='enable_confirm',
value='0',
)
def change_acp_knob(self, link_text, check_page_text, name, value):
start_url = '/adm/index.php'
self.get_with_sid(start_url)
self.assert_status(200)
assert 'Board statistics' in self.response.body
url = self.link_href_by_text(link_text)
# already has sid
self.get(urlparse.urljoin(start_url, url))
self.assert_status(200)
assert check_page_text in self.response.body
assert len(self.response.forms) == 1
form = self.response.forms[0]
params = {
name: value,
}
params = owebunit.extend_params(form.params.list, params)
self.post(form.computed_action, body=params)
self.assert_status(200)
assert 'Configuration updated successfully' in self.response.body
if __name__ == '__main__':
import unittest
unittest.main()
| <commit_before>import owebunit
import urlparse
from wolis_test_case import WolisTestCase
class AcpLoginTestCase(WolisTestCase):
def test_disable_captcha(self):
self.login('morpheus', 'morpheus')
self.acp_login('morpheus', 'morpheus')
start_url = '/adm/index.php'
self.get_with_sid(start_url)
self.assert_status(200)
assert 'Board statistics' in self.response.body
url = self.link_href_by_text('Spambot countermeasures')
# already has sid
self.get(urlparse.urljoin(start_url, url))
self.assert_status(200)
assert 'Enable spambot countermeasures' in self.response.body
assert len(self.response.forms) == 1
form = self.response.forms[0]
params = {
'enable_confirm': '0',
}
params = owebunit.extend_params(form.params.list, params)
self.post(form.computed_action, body=params)
self.assert_status(200)
assert 'Configuration updated successfully' in self.response.body
if __name__ == '__main__':
import unittest
unittest.main()
<commit_msg>Generalize to allow editing other configuration<commit_after>import owebunit
import urlparse
from wolis_test_case import WolisTestCase
class AcpLoginTestCase(WolisTestCase):
def test_disable_captcha(self):
self.login('morpheus', 'morpheus')
self.acp_login('morpheus', 'morpheus')
self.change_acp_knob(
link_text='Spambot countermeasures',
check_page_text='Enable spambot countermeasures',
name='enable_confirm',
value='0',
)
def change_acp_knob(self, link_text, check_page_text, name, value):
start_url = '/adm/index.php'
self.get_with_sid(start_url)
self.assert_status(200)
assert 'Board statistics' in self.response.body
url = self.link_href_by_text(link_text)
# already has sid
self.get(urlparse.urljoin(start_url, url))
self.assert_status(200)
assert check_page_text in self.response.body
assert len(self.response.forms) == 1
form = self.response.forms[0]
params = {
name: value,
}
params = owebunit.extend_params(form.params.list, params)
self.post(form.computed_action, body=params)
self.assert_status(200)
assert 'Configuration updated successfully' in self.response.body
if __name__ == '__main__':
import unittest
unittest.main()
|
d2d6e614bf618428ebd51019b82b6576f2e9c677 | bluebottle/activities/effects.py | bluebottle/activities/effects.py | from django.utils.timezone import now
from django.utils.translation import ugettext_lazy as _
from bluebottle.fsm.effects import Effect
from bluebottle.activities.models import Organizer, OrganizerContribution
class CreateOrganizer(Effect):
"Create an organizer for the activity"
def post_save(self, **kwargs):
Organizer.objects.get_or_create(
activity=self.instance,
defaults={'user': self.instance.owner}
)
def __str__(self):
return str(_('Create organizer'))
class CreateOrganizerContribution(Effect):
"Create an contribution for the organizer of the activity"
def post_save(self, **kwargs):
OrganizerContribution.objects.get_or_create(
contributor=self.instance
)
def __str__(self):
return str(_('Create organizer contribution'))
class SetContributionDateEffect(Effect):
"Set the contribution date"
conditions = []
display = False
def pre_save(self, **kwargs):
self.instance.start = now()
def __str__(self):
return _('Set the contribution date.')
| from django.utils.timezone import now
from django.utils.translation import ugettext_lazy as _
from bluebottle.fsm.effects import Effect
from bluebottle.activities.models import Organizer, OrganizerContribution
class CreateOrganizer(Effect):
"Create an organizer for the activity"
display = False
def post_save(self, **kwargs):
Organizer.objects.get_or_create(
activity=self.instance,
defaults={'user': self.instance.owner}
)
def __str__(self):
return str(_('Create organizer'))
class CreateOrganizerContribution(Effect):
"Create an contribution for the organizer of the activity"
display = False
def post_save(self, **kwargs):
OrganizerContribution.objects.get_or_create(
contributor=self.instance
)
def __str__(self):
return str(_('Create organizer contribution'))
class SetContributionDateEffect(Effect):
"Set the contribution date"
conditions = []
display = False
def pre_save(self, **kwargs):
self.instance.start = now()
def __str__(self):
return _('Set the contribution date.')
| Make sure we can add activities to initiatives in admin | Make sure we can add activities to initiatives in admin
| Python | bsd-3-clause | onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle | from django.utils.timezone import now
from django.utils.translation import ugettext_lazy as _
from bluebottle.fsm.effects import Effect
from bluebottle.activities.models import Organizer, OrganizerContribution
class CreateOrganizer(Effect):
"Create an organizer for the activity"
def post_save(self, **kwargs):
Organizer.objects.get_or_create(
activity=self.instance,
defaults={'user': self.instance.owner}
)
def __str__(self):
return str(_('Create organizer'))
class CreateOrganizerContribution(Effect):
"Create an contribution for the organizer of the activity"
def post_save(self, **kwargs):
OrganizerContribution.objects.get_or_create(
contributor=self.instance
)
def __str__(self):
return str(_('Create organizer contribution'))
class SetContributionDateEffect(Effect):
"Set the contribution date"
conditions = []
display = False
def pre_save(self, **kwargs):
self.instance.start = now()
def __str__(self):
return _('Set the contribution date.')
Make sure we can add activities to initiatives in admin | from django.utils.timezone import now
from django.utils.translation import ugettext_lazy as _
from bluebottle.fsm.effects import Effect
from bluebottle.activities.models import Organizer, OrganizerContribution
class CreateOrganizer(Effect):
"Create an organizer for the activity"
display = False
def post_save(self, **kwargs):
Organizer.objects.get_or_create(
activity=self.instance,
defaults={'user': self.instance.owner}
)
def __str__(self):
return str(_('Create organizer'))
class CreateOrganizerContribution(Effect):
"Create an contribution for the organizer of the activity"
display = False
def post_save(self, **kwargs):
OrganizerContribution.objects.get_or_create(
contributor=self.instance
)
def __str__(self):
return str(_('Create organizer contribution'))
class SetContributionDateEffect(Effect):
"Set the contribution date"
conditions = []
display = False
def pre_save(self, **kwargs):
self.instance.start = now()
def __str__(self):
return _('Set the contribution date.')
| <commit_before>from django.utils.timezone import now
from django.utils.translation import ugettext_lazy as _
from bluebottle.fsm.effects import Effect
from bluebottle.activities.models import Organizer, OrganizerContribution
class CreateOrganizer(Effect):
"Create an organizer for the activity"
def post_save(self, **kwargs):
Organizer.objects.get_or_create(
activity=self.instance,
defaults={'user': self.instance.owner}
)
def __str__(self):
return str(_('Create organizer'))
class CreateOrganizerContribution(Effect):
"Create an contribution for the organizer of the activity"
def post_save(self, **kwargs):
OrganizerContribution.objects.get_or_create(
contributor=self.instance
)
def __str__(self):
return str(_('Create organizer contribution'))
class SetContributionDateEffect(Effect):
"Set the contribution date"
conditions = []
display = False
def pre_save(self, **kwargs):
self.instance.start = now()
def __str__(self):
return _('Set the contribution date.')
<commit_msg>Make sure we can add activities to initiatives in admin<commit_after> | from django.utils.timezone import now
from django.utils.translation import ugettext_lazy as _
from bluebottle.fsm.effects import Effect
from bluebottle.activities.models import Organizer, OrganizerContribution
class CreateOrganizer(Effect):
"Create an organizer for the activity"
display = False
def post_save(self, **kwargs):
Organizer.objects.get_or_create(
activity=self.instance,
defaults={'user': self.instance.owner}
)
def __str__(self):
return str(_('Create organizer'))
class CreateOrganizerContribution(Effect):
"Create an contribution for the organizer of the activity"
display = False
def post_save(self, **kwargs):
OrganizerContribution.objects.get_or_create(
contributor=self.instance
)
def __str__(self):
return str(_('Create organizer contribution'))
class SetContributionDateEffect(Effect):
"Set the contribution date"
conditions = []
display = False
def pre_save(self, **kwargs):
self.instance.start = now()
def __str__(self):
return _('Set the contribution date.')
| from django.utils.timezone import now
from django.utils.translation import ugettext_lazy as _
from bluebottle.fsm.effects import Effect
from bluebottle.activities.models import Organizer, OrganizerContribution
class CreateOrganizer(Effect):
"Create an organizer for the activity"
def post_save(self, **kwargs):
Organizer.objects.get_or_create(
activity=self.instance,
defaults={'user': self.instance.owner}
)
def __str__(self):
return str(_('Create organizer'))
class CreateOrganizerContribution(Effect):
"Create an contribution for the organizer of the activity"
def post_save(self, **kwargs):
OrganizerContribution.objects.get_or_create(
contributor=self.instance
)
def __str__(self):
return str(_('Create organizer contribution'))
class SetContributionDateEffect(Effect):
"Set the contribution date"
conditions = []
display = False
def pre_save(self, **kwargs):
self.instance.start = now()
def __str__(self):
return _('Set the contribution date.')
Make sure we can add activities to initiatives in adminfrom django.utils.timezone import now
from django.utils.translation import ugettext_lazy as _
from bluebottle.fsm.effects import Effect
from bluebottle.activities.models import Organizer, OrganizerContribution
class CreateOrganizer(Effect):
"Create an organizer for the activity"
display = False
def post_save(self, **kwargs):
Organizer.objects.get_or_create(
activity=self.instance,
defaults={'user': self.instance.owner}
)
def __str__(self):
return str(_('Create organizer'))
class CreateOrganizerContribution(Effect):
"Create an contribution for the organizer of the activity"
display = False
def post_save(self, **kwargs):
OrganizerContribution.objects.get_or_create(
contributor=self.instance
)
def __str__(self):
return str(_('Create organizer contribution'))
class SetContributionDateEffect(Effect):
"Set the contribution date"
conditions = []
display = False
def pre_save(self, **kwargs):
self.instance.start = now()
def __str__(self):
return _('Set the contribution date.')
| <commit_before>from django.utils.timezone import now
from django.utils.translation import ugettext_lazy as _
from bluebottle.fsm.effects import Effect
from bluebottle.activities.models import Organizer, OrganizerContribution
class CreateOrganizer(Effect):
"Create an organizer for the activity"
def post_save(self, **kwargs):
Organizer.objects.get_or_create(
activity=self.instance,
defaults={'user': self.instance.owner}
)
def __str__(self):
return str(_('Create organizer'))
class CreateOrganizerContribution(Effect):
"Create an contribution for the organizer of the activity"
def post_save(self, **kwargs):
OrganizerContribution.objects.get_or_create(
contributor=self.instance
)
def __str__(self):
return str(_('Create organizer contribution'))
class SetContributionDateEffect(Effect):
"Set the contribution date"
conditions = []
display = False
def pre_save(self, **kwargs):
self.instance.start = now()
def __str__(self):
return _('Set the contribution date.')
<commit_msg>Make sure we can add activities to initiatives in admin<commit_after>from django.utils.timezone import now
from django.utils.translation import ugettext_lazy as _
from bluebottle.fsm.effects import Effect
from bluebottle.activities.models import Organizer, OrganizerContribution
class CreateOrganizer(Effect):
"Create an organizer for the activity"
display = False
def post_save(self, **kwargs):
Organizer.objects.get_or_create(
activity=self.instance,
defaults={'user': self.instance.owner}
)
def __str__(self):
return str(_('Create organizer'))
class CreateOrganizerContribution(Effect):
"Create an contribution for the organizer of the activity"
display = False
def post_save(self, **kwargs):
OrganizerContribution.objects.get_or_create(
contributor=self.instance
)
def __str__(self):
return str(_('Create organizer contribution'))
class SetContributionDateEffect(Effect):
"Set the contribution date"
conditions = []
display = False
def pre_save(self, **kwargs):
self.instance.start = now()
def __str__(self):
return _('Set the contribution date.')
|
fa88a6e6da009e1279589c76cff1e73abaebae48 | common/djangoapps/track/utils.py | common/djangoapps/track/utils.py | """Utility functions and classes for track backends"""
import json
from datetime import datetime, date
from pytz import UTC
class DateTimeJSONEncoder(json.JSONEncoder):
"""JSON encoder aware of datetime.datetime and datetime.date objects"""
def default(self, obj): # pylint: disable=method-hidden
"""
Serialize datetime and date objects of iso format.
datatime objects are converted to UTC.
"""
if isinstance(obj, datetime):
if obj.tzinfo is None:
# Localize to UTC naive datetime objects
obj = UTC.localize(obj)
else:
# Convert to UTC datetime objects from other timezones
obj = obj.astimezone(UTC)
return obj.isoformat()
elif isinstance(obj, date):
return obj.isoformat()
return super(DateTimeJSONEncoder, self).default(obj)
| """Utility functions and classes for track backends"""
import json
from datetime import datetime, date
from pytz import UTC
class DateTimeJSONEncoder(json.JSONEncoder):
"""JSON encoder aware of datetime.datetime and datetime.date objects"""
def default(self, obj): # pylint: disable=method-hidden
"""
Serialize datetime and date objects of iso format.
datetime objects are converted to UTC.
"""
if isinstance(obj, datetime):
if obj.tzinfo is None:
# Localize to UTC naive datetime objects
obj = UTC.localize(obj)
else:
# Convert to UTC datetime objects from other timezones
obj = obj.astimezone(UTC)
return obj.isoformat()
elif isinstance(obj, date):
return obj.isoformat()
return super(DateTimeJSONEncoder, self).default(obj)
| Fix the misspell in docstring | style: Fix the misspell in docstring
Datatime -> Datetime
| Python | agpl-3.0 | romain-li/edx-platform,romain-li/edx-platform,romain-li/edx-platform,romain-li/edx-platform,romain-li/edx-platform | """Utility functions and classes for track backends"""
import json
from datetime import datetime, date
from pytz import UTC
class DateTimeJSONEncoder(json.JSONEncoder):
"""JSON encoder aware of datetime.datetime and datetime.date objects"""
def default(self, obj): # pylint: disable=method-hidden
"""
Serialize datetime and date objects of iso format.
datatime objects are converted to UTC.
"""
if isinstance(obj, datetime):
if obj.tzinfo is None:
# Localize to UTC naive datetime objects
obj = UTC.localize(obj)
else:
# Convert to UTC datetime objects from other timezones
obj = obj.astimezone(UTC)
return obj.isoformat()
elif isinstance(obj, date):
return obj.isoformat()
return super(DateTimeJSONEncoder, self).default(obj)
style: Fix the misspell in docstring
Datatime -> Datetime | """Utility functions and classes for track backends"""
import json
from datetime import datetime, date
from pytz import UTC
class DateTimeJSONEncoder(json.JSONEncoder):
"""JSON encoder aware of datetime.datetime and datetime.date objects"""
def default(self, obj): # pylint: disable=method-hidden
"""
Serialize datetime and date objects of iso format.
datetime objects are converted to UTC.
"""
if isinstance(obj, datetime):
if obj.tzinfo is None:
# Localize to UTC naive datetime objects
obj = UTC.localize(obj)
else:
# Convert to UTC datetime objects from other timezones
obj = obj.astimezone(UTC)
return obj.isoformat()
elif isinstance(obj, date):
return obj.isoformat()
return super(DateTimeJSONEncoder, self).default(obj)
| <commit_before>"""Utility functions and classes for track backends"""
import json
from datetime import datetime, date
from pytz import UTC
class DateTimeJSONEncoder(json.JSONEncoder):
"""JSON encoder aware of datetime.datetime and datetime.date objects"""
def default(self, obj): # pylint: disable=method-hidden
"""
Serialize datetime and date objects of iso format.
datatime objects are converted to UTC.
"""
if isinstance(obj, datetime):
if obj.tzinfo is None:
# Localize to UTC naive datetime objects
obj = UTC.localize(obj)
else:
# Convert to UTC datetime objects from other timezones
obj = obj.astimezone(UTC)
return obj.isoformat()
elif isinstance(obj, date):
return obj.isoformat()
return super(DateTimeJSONEncoder, self).default(obj)
<commit_msg>style: Fix the misspell in docstring
Datatime -> Datetime<commit_after> | """Utility functions and classes for track backends"""
import json
from datetime import datetime, date
from pytz import UTC
class DateTimeJSONEncoder(json.JSONEncoder):
"""JSON encoder aware of datetime.datetime and datetime.date objects"""
def default(self, obj): # pylint: disable=method-hidden
"""
Serialize datetime and date objects of iso format.
datetime objects are converted to UTC.
"""
if isinstance(obj, datetime):
if obj.tzinfo is None:
# Localize to UTC naive datetime objects
obj = UTC.localize(obj)
else:
# Convert to UTC datetime objects from other timezones
obj = obj.astimezone(UTC)
return obj.isoformat()
elif isinstance(obj, date):
return obj.isoformat()
return super(DateTimeJSONEncoder, self).default(obj)
| """Utility functions and classes for track backends"""
import json
from datetime import datetime, date
from pytz import UTC
class DateTimeJSONEncoder(json.JSONEncoder):
"""JSON encoder aware of datetime.datetime and datetime.date objects"""
def default(self, obj): # pylint: disable=method-hidden
"""
Serialize datetime and date objects of iso format.
datatime objects are converted to UTC.
"""
if isinstance(obj, datetime):
if obj.tzinfo is None:
# Localize to UTC naive datetime objects
obj = UTC.localize(obj)
else:
# Convert to UTC datetime objects from other timezones
obj = obj.astimezone(UTC)
return obj.isoformat()
elif isinstance(obj, date):
return obj.isoformat()
return super(DateTimeJSONEncoder, self).default(obj)
style: Fix the misspell in docstring
Datatime -> Datetime"""Utility functions and classes for track backends"""
import json
from datetime import datetime, date
from pytz import UTC
class DateTimeJSONEncoder(json.JSONEncoder):
"""JSON encoder aware of datetime.datetime and datetime.date objects"""
def default(self, obj): # pylint: disable=method-hidden
"""
Serialize datetime and date objects of iso format.
datetime objects are converted to UTC.
"""
if isinstance(obj, datetime):
if obj.tzinfo is None:
# Localize to UTC naive datetime objects
obj = UTC.localize(obj)
else:
# Convert to UTC datetime objects from other timezones
obj = obj.astimezone(UTC)
return obj.isoformat()
elif isinstance(obj, date):
return obj.isoformat()
return super(DateTimeJSONEncoder, self).default(obj)
| <commit_before>"""Utility functions and classes for track backends"""
import json
from datetime import datetime, date
from pytz import UTC
class DateTimeJSONEncoder(json.JSONEncoder):
"""JSON encoder aware of datetime.datetime and datetime.date objects"""
def default(self, obj): # pylint: disable=method-hidden
"""
Serialize datetime and date objects of iso format.
datatime objects are converted to UTC.
"""
if isinstance(obj, datetime):
if obj.tzinfo is None:
# Localize to UTC naive datetime objects
obj = UTC.localize(obj)
else:
# Convert to UTC datetime objects from other timezones
obj = obj.astimezone(UTC)
return obj.isoformat()
elif isinstance(obj, date):
return obj.isoformat()
return super(DateTimeJSONEncoder, self).default(obj)
<commit_msg>style: Fix the misspell in docstring
Datatime -> Datetime<commit_after>"""Utility functions and classes for track backends"""
import json
from datetime import datetime, date
from pytz import UTC
class DateTimeJSONEncoder(json.JSONEncoder):
"""JSON encoder aware of datetime.datetime and datetime.date objects"""
def default(self, obj): # pylint: disable=method-hidden
"""
Serialize datetime and date objects of iso format.
datetime objects are converted to UTC.
"""
if isinstance(obj, datetime):
if obj.tzinfo is None:
# Localize to UTC naive datetime objects
obj = UTC.localize(obj)
else:
# Convert to UTC datetime objects from other timezones
obj = obj.astimezone(UTC)
return obj.isoformat()
elif isinstance(obj, date):
return obj.isoformat()
return super(DateTimeJSONEncoder, self).default(obj)
|
d130a926c847f37f039dfff7c14140d933b7a6af | django/website/contacts/tests/test_group_permissions.py | django/website/contacts/tests/test_group_permissions.py | import pytest
from django.contrib.auth.models import Permission, Group, ContentType
from contacts.group_permissions import GroupPermissions
@pytest.mark.django_db
def test_add_perms():
g1, _ = Group.objects.get_or_create(name="Test Group 1")
g2, _ = Group.objects.get_or_create(name="Test Group 2")
any_model = Group # for example
content_type = ContentType.objects.get_for_model(any_model)
codenames = ['a_do_stuff', 'b_do_more_stuff']
expected_permissions = []
for name in codenames:
perm, _ = Permission.objects.get_or_create(name=name,
codename=name,
content_type=content_type)
expected_permissions.append(perm)
gp = GroupPermissions()
with gp.groups(g1, g2):
gp.add_permissions(any_model, *codenames)
assert list(g1.permissions.all()) == expected_permissions
assert list(g2.permissions.all()) == expected_permissions
| import pytest
from django.contrib.auth.models import Permission, Group, ContentType
from django.core.exceptions import ObjectDoesNotExist
from contacts.group_permissions import GroupPermissions
@pytest.mark.django_db
def test_add_perms():
g1, _ = Group.objects.get_or_create(name="Test Group 1")
g2, _ = Group.objects.get_or_create(name="Test Group 2")
any_model = Group # for example
content_type = ContentType.objects.get_for_model(any_model)
codenames = ['a_do_stuff', 'b_do_more_stuff']
expected_permissions = []
for name in codenames:
perm, _ = Permission.objects.get_or_create(name=name,
codename=name,
content_type=content_type)
expected_permissions.append(perm)
gp = GroupPermissions()
with gp.groups(g1, g2):
gp.add_permissions(any_model, *codenames)
assert list(g1.permissions.all()) == expected_permissions
assert list(g2.permissions.all()) == expected_permissions
@pytest.mark.django_db
def test_add_nonexistent_perms():
g1, _ = Group.objects.get_or_create(name="Test Group 1")
g2, _ = Group.objects.get_or_create(name="Test Group 2")
any_model = Group # for example
codenames = ['a_do_stuff', 'b_do_more_stuff']
gp = GroupPermissions()
with gp.groups(g1, g2):
try:
gp.add_permissions(any_model, *codenames)
pytest.fail("This should raise an ObjectDoesNotExist exception", False)
except ObjectDoesNotExist:
pass
| Test can't give group non-exsitent permission | Test can't give group non-exsitent permission
| Python | agpl-3.0 | aptivate/alfie,daniell/kashana,aptivate/alfie,aptivate/kashana,aptivate/alfie,aptivate/alfie,daniell/kashana,daniell/kashana,aptivate/kashana,daniell/kashana,aptivate/kashana,aptivate/kashana | import pytest
from django.contrib.auth.models import Permission, Group, ContentType
from contacts.group_permissions import GroupPermissions
@pytest.mark.django_db
def test_add_perms():
g1, _ = Group.objects.get_or_create(name="Test Group 1")
g2, _ = Group.objects.get_or_create(name="Test Group 2")
any_model = Group # for example
content_type = ContentType.objects.get_for_model(any_model)
codenames = ['a_do_stuff', 'b_do_more_stuff']
expected_permissions = []
for name in codenames:
perm, _ = Permission.objects.get_or_create(name=name,
codename=name,
content_type=content_type)
expected_permissions.append(perm)
gp = GroupPermissions()
with gp.groups(g1, g2):
gp.add_permissions(any_model, *codenames)
assert list(g1.permissions.all()) == expected_permissions
assert list(g2.permissions.all()) == expected_permissions
Test can't give group non-exsitent permission | import pytest
from django.contrib.auth.models import Permission, Group, ContentType
from django.core.exceptions import ObjectDoesNotExist
from contacts.group_permissions import GroupPermissions
@pytest.mark.django_db
def test_add_perms():
g1, _ = Group.objects.get_or_create(name="Test Group 1")
g2, _ = Group.objects.get_or_create(name="Test Group 2")
any_model = Group # for example
content_type = ContentType.objects.get_for_model(any_model)
codenames = ['a_do_stuff', 'b_do_more_stuff']
expected_permissions = []
for name in codenames:
perm, _ = Permission.objects.get_or_create(name=name,
codename=name,
content_type=content_type)
expected_permissions.append(perm)
gp = GroupPermissions()
with gp.groups(g1, g2):
gp.add_permissions(any_model, *codenames)
assert list(g1.permissions.all()) == expected_permissions
assert list(g2.permissions.all()) == expected_permissions
@pytest.mark.django_db
def test_add_nonexistent_perms():
g1, _ = Group.objects.get_or_create(name="Test Group 1")
g2, _ = Group.objects.get_or_create(name="Test Group 2")
any_model = Group # for example
codenames = ['a_do_stuff', 'b_do_more_stuff']
gp = GroupPermissions()
with gp.groups(g1, g2):
try:
gp.add_permissions(any_model, *codenames)
pytest.fail("This should raise an ObjectDoesNotExist exception", False)
except ObjectDoesNotExist:
pass
| <commit_before>import pytest
from django.contrib.auth.models import Permission, Group, ContentType
from contacts.group_permissions import GroupPermissions
@pytest.mark.django_db
def test_add_perms():
g1, _ = Group.objects.get_or_create(name="Test Group 1")
g2, _ = Group.objects.get_or_create(name="Test Group 2")
any_model = Group # for example
content_type = ContentType.objects.get_for_model(any_model)
codenames = ['a_do_stuff', 'b_do_more_stuff']
expected_permissions = []
for name in codenames:
perm, _ = Permission.objects.get_or_create(name=name,
codename=name,
content_type=content_type)
expected_permissions.append(perm)
gp = GroupPermissions()
with gp.groups(g1, g2):
gp.add_permissions(any_model, *codenames)
assert list(g1.permissions.all()) == expected_permissions
assert list(g2.permissions.all()) == expected_permissions
<commit_msg>Test can't give group non-exsitent permission<commit_after> | import pytest
from django.contrib.auth.models import Permission, Group, ContentType
from django.core.exceptions import ObjectDoesNotExist
from contacts.group_permissions import GroupPermissions
@pytest.mark.django_db
def test_add_perms():
g1, _ = Group.objects.get_or_create(name="Test Group 1")
g2, _ = Group.objects.get_or_create(name="Test Group 2")
any_model = Group # for example
content_type = ContentType.objects.get_for_model(any_model)
codenames = ['a_do_stuff', 'b_do_more_stuff']
expected_permissions = []
for name in codenames:
perm, _ = Permission.objects.get_or_create(name=name,
codename=name,
content_type=content_type)
expected_permissions.append(perm)
gp = GroupPermissions()
with gp.groups(g1, g2):
gp.add_permissions(any_model, *codenames)
assert list(g1.permissions.all()) == expected_permissions
assert list(g2.permissions.all()) == expected_permissions
@pytest.mark.django_db
def test_add_nonexistent_perms():
g1, _ = Group.objects.get_or_create(name="Test Group 1")
g2, _ = Group.objects.get_or_create(name="Test Group 2")
any_model = Group # for example
codenames = ['a_do_stuff', 'b_do_more_stuff']
gp = GroupPermissions()
with gp.groups(g1, g2):
try:
gp.add_permissions(any_model, *codenames)
pytest.fail("This should raise an ObjectDoesNotExist exception", False)
except ObjectDoesNotExist:
pass
| import pytest
from django.contrib.auth.models import Permission, Group, ContentType
from contacts.group_permissions import GroupPermissions
@pytest.mark.django_db
def test_add_perms():
g1, _ = Group.objects.get_or_create(name="Test Group 1")
g2, _ = Group.objects.get_or_create(name="Test Group 2")
any_model = Group # for example
content_type = ContentType.objects.get_for_model(any_model)
codenames = ['a_do_stuff', 'b_do_more_stuff']
expected_permissions = []
for name in codenames:
perm, _ = Permission.objects.get_or_create(name=name,
codename=name,
content_type=content_type)
expected_permissions.append(perm)
gp = GroupPermissions()
with gp.groups(g1, g2):
gp.add_permissions(any_model, *codenames)
assert list(g1.permissions.all()) == expected_permissions
assert list(g2.permissions.all()) == expected_permissions
Test can't give group non-exsitent permissionimport pytest
from django.contrib.auth.models import Permission, Group, ContentType
from django.core.exceptions import ObjectDoesNotExist
from contacts.group_permissions import GroupPermissions
@pytest.mark.django_db
def test_add_perms():
g1, _ = Group.objects.get_or_create(name="Test Group 1")
g2, _ = Group.objects.get_or_create(name="Test Group 2")
any_model = Group # for example
content_type = ContentType.objects.get_for_model(any_model)
codenames = ['a_do_stuff', 'b_do_more_stuff']
expected_permissions = []
for name in codenames:
perm, _ = Permission.objects.get_or_create(name=name,
codename=name,
content_type=content_type)
expected_permissions.append(perm)
gp = GroupPermissions()
with gp.groups(g1, g2):
gp.add_permissions(any_model, *codenames)
assert list(g1.permissions.all()) == expected_permissions
assert list(g2.permissions.all()) == expected_permissions
@pytest.mark.django_db
def test_add_nonexistent_perms():
g1, _ = Group.objects.get_or_create(name="Test Group 1")
g2, _ = Group.objects.get_or_create(name="Test Group 2")
any_model = Group # for example
codenames = ['a_do_stuff', 'b_do_more_stuff']
gp = GroupPermissions()
with gp.groups(g1, g2):
try:
gp.add_permissions(any_model, *codenames)
pytest.fail("This should raise an ObjectDoesNotExist exception", False)
except ObjectDoesNotExist:
pass
| <commit_before>import pytest
from django.contrib.auth.models import Permission, Group, ContentType
from contacts.group_permissions import GroupPermissions
@pytest.mark.django_db
def test_add_perms():
g1, _ = Group.objects.get_or_create(name="Test Group 1")
g2, _ = Group.objects.get_or_create(name="Test Group 2")
any_model = Group # for example
content_type = ContentType.objects.get_for_model(any_model)
codenames = ['a_do_stuff', 'b_do_more_stuff']
expected_permissions = []
for name in codenames:
perm, _ = Permission.objects.get_or_create(name=name,
codename=name,
content_type=content_type)
expected_permissions.append(perm)
gp = GroupPermissions()
with gp.groups(g1, g2):
gp.add_permissions(any_model, *codenames)
assert list(g1.permissions.all()) == expected_permissions
assert list(g2.permissions.all()) == expected_permissions
<commit_msg>Test can't give group non-exsitent permission<commit_after>import pytest
from django.contrib.auth.models import Permission, Group, ContentType
from django.core.exceptions import ObjectDoesNotExist
from contacts.group_permissions import GroupPermissions
@pytest.mark.django_db
def test_add_perms():
g1, _ = Group.objects.get_or_create(name="Test Group 1")
g2, _ = Group.objects.get_or_create(name="Test Group 2")
any_model = Group # for example
content_type = ContentType.objects.get_for_model(any_model)
codenames = ['a_do_stuff', 'b_do_more_stuff']
expected_permissions = []
for name in codenames:
perm, _ = Permission.objects.get_or_create(name=name,
codename=name,
content_type=content_type)
expected_permissions.append(perm)
gp = GroupPermissions()
with gp.groups(g1, g2):
gp.add_permissions(any_model, *codenames)
assert list(g1.permissions.all()) == expected_permissions
assert list(g2.permissions.all()) == expected_permissions
@pytest.mark.django_db
def test_add_nonexistent_perms():
g1, _ = Group.objects.get_or_create(name="Test Group 1")
g2, _ = Group.objects.get_or_create(name="Test Group 2")
any_model = Group # for example
codenames = ['a_do_stuff', 'b_do_more_stuff']
gp = GroupPermissions()
with gp.groups(g1, g2):
try:
gp.add_permissions(any_model, *codenames)
pytest.fail("This should raise an ObjectDoesNotExist exception", False)
except ObjectDoesNotExist:
pass
|
dd444029abd63da594a36f52efbbc72851ac344f | bnw_handlers/command_register.py | bnw_handlers/command_register.py | # -*- coding: utf-8 -*-
#from twisted.words.xish import domish
from base import *
import random
import time
import bnw_core.bnw_objects as objs
def _(s,user):
return s
from uuid import uuid4
import re
@check_arg(name=USER_RE)
@defer.inlineCallbacks
def cmd_register(request,name=""):
""" Регистрация """
if request.user:
defer.returnValue(
dict(ok=False,
desc=u'You are already registered as %s' % (request.user['name'],),
)
)
else:
name=name.lower()[:128]
if name=='anonymous':
defer.returnValue(
dict(ok=False,desc=u'You aren''t anonymous.')
)
user=objs.User({ 'id': uuid4().hex,
'name': name,
'login_key': uuid4().hex,
'regdate': int(time.time()),
'jid': request.bare_jid,
'interface': 'redeye',
})
if not (yield objs.User.find_one({'name':name})):
_ = yield user.save()
defer.returnValue(
dict(ok=True,desc='We registered you as %s.' % (name,))
)
else:
defer.returnValue(
dict(ok=True,desc='This username is already taken')
)
| # -*- coding: utf-8 -*-
#from twisted.words.xish import domish
from base import *
import random
import time
import bnw_core.bnw_objects as objs
def _(s,user):
return s
from uuid import uuid4
import re
@check_arg(name=USER_RE)
@defer.inlineCallbacks
def cmd_register(request,name=""):
""" Регистрация """
if request.user:
defer.returnValue(
dict(ok=False,
desc=u'You are already registered as %s' % (request.user['name'],),
)
)
else:
name=name.lower()[:128]
if name=='anonymous':
defer.returnValue(
dict(ok=False,desc=u'You aren''t anonymous.')
)
user=objs.User({ 'id': uuid4().hex,
'name': name,
'login_key': uuid4().hex,
'regdate': int(time.time()),
'jid': request.bare_jid,
'interface': 'redeye',
'settings.servicejid': request.to.split('/',1)[0],
})
if not (yield objs.User.find_one({'name':name})):
_ = yield user.save()
defer.returnValue(
dict(ok=True,desc='We registered you as %s.' % (name,))
)
else:
defer.returnValue(
dict(ok=True,desc='This username is already taken')
)
| Set servicejid setting on register | Set servicejid setting on register
| Python | bsd-2-clause | un-def/bnw,stiletto/bnw,stiletto/bnw,stiletto/bnw,ojab/bnw,ojab/bnw,ojab/bnw,un-def/bnw,un-def/bnw,un-def/bnw,stiletto/bnw,ojab/bnw | # -*- coding: utf-8 -*-
#from twisted.words.xish import domish
from base import *
import random
import time
import bnw_core.bnw_objects as objs
def _(s,user):
return s
from uuid import uuid4
import re
@check_arg(name=USER_RE)
@defer.inlineCallbacks
def cmd_register(request,name=""):
""" Регистрация """
if request.user:
defer.returnValue(
dict(ok=False,
desc=u'You are already registered as %s' % (request.user['name'],),
)
)
else:
name=name.lower()[:128]
if name=='anonymous':
defer.returnValue(
dict(ok=False,desc=u'You aren''t anonymous.')
)
user=objs.User({ 'id': uuid4().hex,
'name': name,
'login_key': uuid4().hex,
'regdate': int(time.time()),
'jid': request.bare_jid,
'interface': 'redeye',
})
if not (yield objs.User.find_one({'name':name})):
_ = yield user.save()
defer.returnValue(
dict(ok=True,desc='We registered you as %s.' % (name,))
)
else:
defer.returnValue(
dict(ok=True,desc='This username is already taken')
)
Set servicejid setting on register | # -*- coding: utf-8 -*-
#from twisted.words.xish import domish
from base import *
import random
import time
import bnw_core.bnw_objects as objs
def _(s,user):
return s
from uuid import uuid4
import re
@check_arg(name=USER_RE)
@defer.inlineCallbacks
def cmd_register(request,name=""):
""" Регистрация """
if request.user:
defer.returnValue(
dict(ok=False,
desc=u'You are already registered as %s' % (request.user['name'],),
)
)
else:
name=name.lower()[:128]
if name=='anonymous':
defer.returnValue(
dict(ok=False,desc=u'You aren''t anonymous.')
)
user=objs.User({ 'id': uuid4().hex,
'name': name,
'login_key': uuid4().hex,
'regdate': int(time.time()),
'jid': request.bare_jid,
'interface': 'redeye',
'settings.servicejid': request.to.split('/',1)[0],
})
if not (yield objs.User.find_one({'name':name})):
_ = yield user.save()
defer.returnValue(
dict(ok=True,desc='We registered you as %s.' % (name,))
)
else:
defer.returnValue(
dict(ok=True,desc='This username is already taken')
)
| <commit_before># -*- coding: utf-8 -*-
#from twisted.words.xish import domish
from base import *
import random
import time
import bnw_core.bnw_objects as objs
def _(s,user):
return s
from uuid import uuid4
import re
@check_arg(name=USER_RE)
@defer.inlineCallbacks
def cmd_register(request,name=""):
""" Регистрация """
if request.user:
defer.returnValue(
dict(ok=False,
desc=u'You are already registered as %s' % (request.user['name'],),
)
)
else:
name=name.lower()[:128]
if name=='anonymous':
defer.returnValue(
dict(ok=False,desc=u'You aren''t anonymous.')
)
user=objs.User({ 'id': uuid4().hex,
'name': name,
'login_key': uuid4().hex,
'regdate': int(time.time()),
'jid': request.bare_jid,
'interface': 'redeye',
})
if not (yield objs.User.find_one({'name':name})):
_ = yield user.save()
defer.returnValue(
dict(ok=True,desc='We registered you as %s.' % (name,))
)
else:
defer.returnValue(
dict(ok=True,desc='This username is already taken')
)
<commit_msg>Set servicejid setting on register<commit_after> | # -*- coding: utf-8 -*-
#from twisted.words.xish import domish
from base import *
import random
import time
import bnw_core.bnw_objects as objs
def _(s,user):
return s
from uuid import uuid4
import re
@check_arg(name=USER_RE)
@defer.inlineCallbacks
def cmd_register(request,name=""):
""" Регистрация """
if request.user:
defer.returnValue(
dict(ok=False,
desc=u'You are already registered as %s' % (request.user['name'],),
)
)
else:
name=name.lower()[:128]
if name=='anonymous':
defer.returnValue(
dict(ok=False,desc=u'You aren''t anonymous.')
)
user=objs.User({ 'id': uuid4().hex,
'name': name,
'login_key': uuid4().hex,
'regdate': int(time.time()),
'jid': request.bare_jid,
'interface': 'redeye',
'settings.servicejid': request.to.split('/',1)[0],
})
if not (yield objs.User.find_one({'name':name})):
_ = yield user.save()
defer.returnValue(
dict(ok=True,desc='We registered you as %s.' % (name,))
)
else:
defer.returnValue(
dict(ok=True,desc='This username is already taken')
)
| # -*- coding: utf-8 -*-
#from twisted.words.xish import domish
from base import *
import random
import time
import bnw_core.bnw_objects as objs
def _(s,user):
return s
from uuid import uuid4
import re
@check_arg(name=USER_RE)
@defer.inlineCallbacks
def cmd_register(request,name=""):
""" Регистрация """
if request.user:
defer.returnValue(
dict(ok=False,
desc=u'You are already registered as %s' % (request.user['name'],),
)
)
else:
name=name.lower()[:128]
if name=='anonymous':
defer.returnValue(
dict(ok=False,desc=u'You aren''t anonymous.')
)
user=objs.User({ 'id': uuid4().hex,
'name': name,
'login_key': uuid4().hex,
'regdate': int(time.time()),
'jid': request.bare_jid,
'interface': 'redeye',
})
if not (yield objs.User.find_one({'name':name})):
_ = yield user.save()
defer.returnValue(
dict(ok=True,desc='We registered you as %s.' % (name,))
)
else:
defer.returnValue(
dict(ok=True,desc='This username is already taken')
)
Set servicejid setting on register# -*- coding: utf-8 -*-
#from twisted.words.xish import domish
from base import *
import random
import time
import bnw_core.bnw_objects as objs
def _(s,user):
return s
from uuid import uuid4
import re
@check_arg(name=USER_RE)
@defer.inlineCallbacks
def cmd_register(request,name=""):
""" Регистрация """
if request.user:
defer.returnValue(
dict(ok=False,
desc=u'You are already registered as %s' % (request.user['name'],),
)
)
else:
name=name.lower()[:128]
if name=='anonymous':
defer.returnValue(
dict(ok=False,desc=u'You aren''t anonymous.')
)
user=objs.User({ 'id': uuid4().hex,
'name': name,
'login_key': uuid4().hex,
'regdate': int(time.time()),
'jid': request.bare_jid,
'interface': 'redeye',
'settings.servicejid': request.to.split('/',1)[0],
})
if not (yield objs.User.find_one({'name':name})):
_ = yield user.save()
defer.returnValue(
dict(ok=True,desc='We registered you as %s.' % (name,))
)
else:
defer.returnValue(
dict(ok=True,desc='This username is already taken')
)
| <commit_before># -*- coding: utf-8 -*-
#from twisted.words.xish import domish
from base import *
import random
import time
import bnw_core.bnw_objects as objs
def _(s,user):
return s
from uuid import uuid4
import re
@check_arg(name=USER_RE)
@defer.inlineCallbacks
def cmd_register(request,name=""):
""" Регистрация """
if request.user:
defer.returnValue(
dict(ok=False,
desc=u'You are already registered as %s' % (request.user['name'],),
)
)
else:
name=name.lower()[:128]
if name=='anonymous':
defer.returnValue(
dict(ok=False,desc=u'You aren''t anonymous.')
)
user=objs.User({ 'id': uuid4().hex,
'name': name,
'login_key': uuid4().hex,
'regdate': int(time.time()),
'jid': request.bare_jid,
'interface': 'redeye',
})
if not (yield objs.User.find_one({'name':name})):
_ = yield user.save()
defer.returnValue(
dict(ok=True,desc='We registered you as %s.' % (name,))
)
else:
defer.returnValue(
dict(ok=True,desc='This username is already taken')
)
<commit_msg>Set servicejid setting on register<commit_after># -*- coding: utf-8 -*-
#from twisted.words.xish import domish
from base import *
import random
import time
import bnw_core.bnw_objects as objs
def _(s,user):
return s
from uuid import uuid4
import re
@check_arg(name=USER_RE)
@defer.inlineCallbacks
def cmd_register(request,name=""):
""" Регистрация """
if request.user:
defer.returnValue(
dict(ok=False,
desc=u'You are already registered as %s' % (request.user['name'],),
)
)
else:
name=name.lower()[:128]
if name=='anonymous':
defer.returnValue(
dict(ok=False,desc=u'You aren''t anonymous.')
)
user=objs.User({ 'id': uuid4().hex,
'name': name,
'login_key': uuid4().hex,
'regdate': int(time.time()),
'jid': request.bare_jid,
'interface': 'redeye',
'settings.servicejid': request.to.split('/',1)[0],
})
if not (yield objs.User.find_one({'name':name})):
_ = yield user.save()
defer.returnValue(
dict(ok=True,desc='We registered you as %s.' % (name,))
)
else:
defer.returnValue(
dict(ok=True,desc='This username is already taken')
)
|
80ad2451dabadeeeee6bb632ce6681fac3682b95 | boris/settings/local_template.py | boris/settings/local_template.py | from logging import LOGGING
DEBUG = True
# Don't log to sentry on local.
LOGGING['root']['handlers'] = ['console']
LOGGING['loggers']['django.request'] = {
'level': 'ERROR',
'handlers': ['console'],
'propagate': False,
}
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
| DEBUG = True
# Don't log to sentry on local.
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'root': {
'level': 'WARNING',
'handlers': ['console'],
},
'formatters': {
'verbose': {
'format': '%(levelname)s %(asctime)s %(module)s %(process)d %(thread)d %(message)s'
},
},
'handlers': {
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'verbose'
}
}
}
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
| Fix up logging on local. | Fix up logging on local.
| Python | mit | fragaria/BorIS,fragaria/BorIS,fragaria/BorIS | from logging import LOGGING
DEBUG = True
# Don't log to sentry on local.
LOGGING['root']['handlers'] = ['console']
LOGGING['loggers']['django.request'] = {
'level': 'ERROR',
'handlers': ['console'],
'propagate': False,
}
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
Fix up logging on local. | DEBUG = True
# Don't log to sentry on local.
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'root': {
'level': 'WARNING',
'handlers': ['console'],
},
'formatters': {
'verbose': {
'format': '%(levelname)s %(asctime)s %(module)s %(process)d %(thread)d %(message)s'
},
},
'handlers': {
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'verbose'
}
}
}
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
| <commit_before>from logging import LOGGING
DEBUG = True
# Don't log to sentry on local.
LOGGING['root']['handlers'] = ['console']
LOGGING['loggers']['django.request'] = {
'level': 'ERROR',
'handlers': ['console'],
'propagate': False,
}
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
<commit_msg>Fix up logging on local.<commit_after> | DEBUG = True
# Don't log to sentry on local.
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'root': {
'level': 'WARNING',
'handlers': ['console'],
},
'formatters': {
'verbose': {
'format': '%(levelname)s %(asctime)s %(module)s %(process)d %(thread)d %(message)s'
},
},
'handlers': {
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'verbose'
}
}
}
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
| from logging import LOGGING
DEBUG = True
# Don't log to sentry on local.
LOGGING['root']['handlers'] = ['console']
LOGGING['loggers']['django.request'] = {
'level': 'ERROR',
'handlers': ['console'],
'propagate': False,
}
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
Fix up logging on local.DEBUG = True
# Don't log to sentry on local.
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'root': {
'level': 'WARNING',
'handlers': ['console'],
},
'formatters': {
'verbose': {
'format': '%(levelname)s %(asctime)s %(module)s %(process)d %(thread)d %(message)s'
},
},
'handlers': {
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'verbose'
}
}
}
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
| <commit_before>from logging import LOGGING
DEBUG = True
# Don't log to sentry on local.
LOGGING['root']['handlers'] = ['console']
LOGGING['loggers']['django.request'] = {
'level': 'ERROR',
'handlers': ['console'],
'propagate': False,
}
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
<commit_msg>Fix up logging on local.<commit_after>DEBUG = True
# Don't log to sentry on local.
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'root': {
'level': 'WARNING',
'handlers': ['console'],
},
'formatters': {
'verbose': {
'format': '%(levelname)s %(asctime)s %(module)s %(process)d %(thread)d %(message)s'
},
},
'handlers': {
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'verbose'
}
}
}
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
|
eda0dc8bdc89e815ff21be91ade9d53f0c13721a | mockito/tests/numpy_test.py | mockito/tests/numpy_test.py | import mockito
from mockito import when, patch
import numpy as np
from . import module
def xcompare(a, b):
if isinstance(a, mockito.matchers.Matcher):
return a.matches(b)
return np.array_equal(a, b)
class TestEnsureNumpyWorks:
def testEnsureNumpyArrayAllowedWhenStubbing(self):
array = np.array([1, 2, 3])
with patch(mockito.invocation.MatchingInvocation.compare, xcompare):
when(module).one_arg(array).thenReturn('yep')
assert module.one_arg(array) == 'yep'
def testEnsureNumpyArrayAllowedWhenCalling(self):
array = np.array([1, 2, 3])
when(module).one_arg(Ellipsis).thenReturn('yep')
assert module.one_arg(array) == 'yep'
| import mockito
from mockito import when, patch
import pytest
import numpy as np
from . import module
pytestmark = pytest.mark.usefixtures("unstub")
def xcompare(a, b):
if isinstance(a, mockito.matchers.Matcher):
return a.matches(b)
return np.array_equal(a, b)
class TestEnsureNumpyWorks:
def testEnsureNumpyArrayAllowedWhenStubbing(self):
array = np.array([1, 2, 3])
when(module).one_arg(array).thenReturn('yep')
with patch(mockito.invocation.MatchingInvocation.compare, xcompare):
assert module.one_arg(array) == 'yep'
def testEnsureNumpyArrayAllowedWhenCalling(self):
array = np.array([1, 2, 3])
when(module).one_arg(Ellipsis).thenReturn('yep')
assert module.one_arg(array) == 'yep'
| Make numpy test clearer and ensure unstub | Make numpy test clearer and ensure unstub
| Python | mit | kaste/mockito-python | import mockito
from mockito import when, patch
import numpy as np
from . import module
def xcompare(a, b):
if isinstance(a, mockito.matchers.Matcher):
return a.matches(b)
return np.array_equal(a, b)
class TestEnsureNumpyWorks:
def testEnsureNumpyArrayAllowedWhenStubbing(self):
array = np.array([1, 2, 3])
with patch(mockito.invocation.MatchingInvocation.compare, xcompare):
when(module).one_arg(array).thenReturn('yep')
assert module.one_arg(array) == 'yep'
def testEnsureNumpyArrayAllowedWhenCalling(self):
array = np.array([1, 2, 3])
when(module).one_arg(Ellipsis).thenReturn('yep')
assert module.one_arg(array) == 'yep'
Make numpy test clearer and ensure unstub | import mockito
from mockito import when, patch
import pytest
import numpy as np
from . import module
pytestmark = pytest.mark.usefixtures("unstub")
def xcompare(a, b):
if isinstance(a, mockito.matchers.Matcher):
return a.matches(b)
return np.array_equal(a, b)
class TestEnsureNumpyWorks:
def testEnsureNumpyArrayAllowedWhenStubbing(self):
array = np.array([1, 2, 3])
when(module).one_arg(array).thenReturn('yep')
with patch(mockito.invocation.MatchingInvocation.compare, xcompare):
assert module.one_arg(array) == 'yep'
def testEnsureNumpyArrayAllowedWhenCalling(self):
array = np.array([1, 2, 3])
when(module).one_arg(Ellipsis).thenReturn('yep')
assert module.one_arg(array) == 'yep'
| <commit_before>import mockito
from mockito import when, patch
import numpy as np
from . import module
def xcompare(a, b):
if isinstance(a, mockito.matchers.Matcher):
return a.matches(b)
return np.array_equal(a, b)
class TestEnsureNumpyWorks:
def testEnsureNumpyArrayAllowedWhenStubbing(self):
array = np.array([1, 2, 3])
with patch(mockito.invocation.MatchingInvocation.compare, xcompare):
when(module).one_arg(array).thenReturn('yep')
assert module.one_arg(array) == 'yep'
def testEnsureNumpyArrayAllowedWhenCalling(self):
array = np.array([1, 2, 3])
when(module).one_arg(Ellipsis).thenReturn('yep')
assert module.one_arg(array) == 'yep'
<commit_msg>Make numpy test clearer and ensure unstub<commit_after> | import mockito
from mockito import when, patch
import pytest
import numpy as np
from . import module
pytestmark = pytest.mark.usefixtures("unstub")
def xcompare(a, b):
if isinstance(a, mockito.matchers.Matcher):
return a.matches(b)
return np.array_equal(a, b)
class TestEnsureNumpyWorks:
def testEnsureNumpyArrayAllowedWhenStubbing(self):
array = np.array([1, 2, 3])
when(module).one_arg(array).thenReturn('yep')
with patch(mockito.invocation.MatchingInvocation.compare, xcompare):
assert module.one_arg(array) == 'yep'
def testEnsureNumpyArrayAllowedWhenCalling(self):
array = np.array([1, 2, 3])
when(module).one_arg(Ellipsis).thenReturn('yep')
assert module.one_arg(array) == 'yep'
| import mockito
from mockito import when, patch
import numpy as np
from . import module
def xcompare(a, b):
if isinstance(a, mockito.matchers.Matcher):
return a.matches(b)
return np.array_equal(a, b)
class TestEnsureNumpyWorks:
def testEnsureNumpyArrayAllowedWhenStubbing(self):
array = np.array([1, 2, 3])
with patch(mockito.invocation.MatchingInvocation.compare, xcompare):
when(module).one_arg(array).thenReturn('yep')
assert module.one_arg(array) == 'yep'
def testEnsureNumpyArrayAllowedWhenCalling(self):
array = np.array([1, 2, 3])
when(module).one_arg(Ellipsis).thenReturn('yep')
assert module.one_arg(array) == 'yep'
Make numpy test clearer and ensure unstubimport mockito
from mockito import when, patch
import pytest
import numpy as np
from . import module
pytestmark = pytest.mark.usefixtures("unstub")
def xcompare(a, b):
if isinstance(a, mockito.matchers.Matcher):
return a.matches(b)
return np.array_equal(a, b)
class TestEnsureNumpyWorks:
def testEnsureNumpyArrayAllowedWhenStubbing(self):
array = np.array([1, 2, 3])
when(module).one_arg(array).thenReturn('yep')
with patch(mockito.invocation.MatchingInvocation.compare, xcompare):
assert module.one_arg(array) == 'yep'
def testEnsureNumpyArrayAllowedWhenCalling(self):
array = np.array([1, 2, 3])
when(module).one_arg(Ellipsis).thenReturn('yep')
assert module.one_arg(array) == 'yep'
| <commit_before>import mockito
from mockito import when, patch
import numpy as np
from . import module
def xcompare(a, b):
if isinstance(a, mockito.matchers.Matcher):
return a.matches(b)
return np.array_equal(a, b)
class TestEnsureNumpyWorks:
def testEnsureNumpyArrayAllowedWhenStubbing(self):
array = np.array([1, 2, 3])
with patch(mockito.invocation.MatchingInvocation.compare, xcompare):
when(module).one_arg(array).thenReturn('yep')
assert module.one_arg(array) == 'yep'
def testEnsureNumpyArrayAllowedWhenCalling(self):
array = np.array([1, 2, 3])
when(module).one_arg(Ellipsis).thenReturn('yep')
assert module.one_arg(array) == 'yep'
<commit_msg>Make numpy test clearer and ensure unstub<commit_after>import mockito
from mockito import when, patch
import pytest
import numpy as np
from . import module
pytestmark = pytest.mark.usefixtures("unstub")
def xcompare(a, b):
if isinstance(a, mockito.matchers.Matcher):
return a.matches(b)
return np.array_equal(a, b)
class TestEnsureNumpyWorks:
def testEnsureNumpyArrayAllowedWhenStubbing(self):
array = np.array([1, 2, 3])
when(module).one_arg(array).thenReturn('yep')
with patch(mockito.invocation.MatchingInvocation.compare, xcompare):
assert module.one_arg(array) == 'yep'
def testEnsureNumpyArrayAllowedWhenCalling(self):
array = np.array([1, 2, 3])
when(module).one_arg(Ellipsis).thenReturn('yep')
assert module.one_arg(array) == 'yep'
|
6df1f99592588f68c3aeac4c5808bde3f108be84 | owl2csv.py | owl2csv.py | import sys
import click
import json
import csv
from rdflib import Graph, Namespace, URIRef
def observe_dataset(dataset, query, prefixes):
g = Graph()
if prefixes:
prefixes = json.load(prefixes)
for name, url in prefixes.items():
g.bind(name, Namespace(url.strip('<>')))
g.parse(dataset)
return g, g.query(query.read())
def create_csv(graph, query_result, f):
def normalize_field(field):
if not field:
return None
elif isinstance(field, URIRef):
return field.n3(graph.namespace_manager)
return field
writer = csv.writer(f)
writer.writerow(query_result.vars)
for row in query_result:
writer.writerow(map(normalize_field, row))
@click.command()
@click.argument('dataset', type=click.File('r'))
@click.argument('query', type=click.File('r'))
@click.option('--prefixes', '-p', default=None, type=click.File('r'))
@click.option('--output', '-o', default=sys.stdout, type=click.File('w'))
def command(dataset, query, prefixes, output):
graph, query_result = observe_dataset(dataset, query, prefixes)
create_csv(graph, query_result, output)
if __name__ == '__main__':
command()
| import sys
import click
import json
import csv
from rdflib import Graph, Namespace, URIRef
def observe_dataset(datasets, query, prefixes):
g = Graph()
if prefixes:
prefixes = json.load(prefixes)
for name, url in prefixes.items():
g.bind(name, Namespace(url.strip('<>')))
for dataset in datasets:
g.parse(dataset)
return g, g.query(query.read())
def create_csv(graph, query_result, f):
def normalize_field(field):
if not field:
return None
elif isinstance(field, URIRef):
return field.n3(graph.namespace_manager)
return field
writer = csv.writer(f)
writer.writerow(query_result.vars)
for row in query_result:
writer.writerow(map(normalize_field, row))
@click.command()
@click.argument('datasets', type=click.File('r'), nargs=-1)
@click.argument('query', type=click.File('r'))
@click.option('--prefixes', '-p', default=None, type=click.File('r'))
@click.option('--output', '-o', default=sys.stdout, type=click.File('w'))
def command(datasets, query, prefixes, output):
graph, query_result = observe_dataset(datasets, query, prefixes)
create_csv(graph, query_result, output)
if __name__ == '__main__':
command()
| Add support for multiple datasets | Add support for multiple datasets
| Python | mit | Guhogu/owl2csv | import sys
import click
import json
import csv
from rdflib import Graph, Namespace, URIRef
def observe_dataset(dataset, query, prefixes):
g = Graph()
if prefixes:
prefixes = json.load(prefixes)
for name, url in prefixes.items():
g.bind(name, Namespace(url.strip('<>')))
g.parse(dataset)
return g, g.query(query.read())
def create_csv(graph, query_result, f):
def normalize_field(field):
if not field:
return None
elif isinstance(field, URIRef):
return field.n3(graph.namespace_manager)
return field
writer = csv.writer(f)
writer.writerow(query_result.vars)
for row in query_result:
writer.writerow(map(normalize_field, row))
@click.command()
@click.argument('dataset', type=click.File('r'))
@click.argument('query', type=click.File('r'))
@click.option('--prefixes', '-p', default=None, type=click.File('r'))
@click.option('--output', '-o', default=sys.stdout, type=click.File('w'))
def command(dataset, query, prefixes, output):
graph, query_result = observe_dataset(dataset, query, prefixes)
create_csv(graph, query_result, output)
if __name__ == '__main__':
command()
Add support for multiple datasets | import sys
import click
import json
import csv
from rdflib import Graph, Namespace, URIRef
def observe_dataset(datasets, query, prefixes):
g = Graph()
if prefixes:
prefixes = json.load(prefixes)
for name, url in prefixes.items():
g.bind(name, Namespace(url.strip('<>')))
for dataset in datasets:
g.parse(dataset)
return g, g.query(query.read())
def create_csv(graph, query_result, f):
def normalize_field(field):
if not field:
return None
elif isinstance(field, URIRef):
return field.n3(graph.namespace_manager)
return field
writer = csv.writer(f)
writer.writerow(query_result.vars)
for row in query_result:
writer.writerow(map(normalize_field, row))
@click.command()
@click.argument('datasets', type=click.File('r'), nargs=-1)
@click.argument('query', type=click.File('r'))
@click.option('--prefixes', '-p', default=None, type=click.File('r'))
@click.option('--output', '-o', default=sys.stdout, type=click.File('w'))
def command(datasets, query, prefixes, output):
graph, query_result = observe_dataset(datasets, query, prefixes)
create_csv(graph, query_result, output)
if __name__ == '__main__':
command()
| <commit_before>import sys
import click
import json
import csv
from rdflib import Graph, Namespace, URIRef
def observe_dataset(dataset, query, prefixes):
g = Graph()
if prefixes:
prefixes = json.load(prefixes)
for name, url in prefixes.items():
g.bind(name, Namespace(url.strip('<>')))
g.parse(dataset)
return g, g.query(query.read())
def create_csv(graph, query_result, f):
def normalize_field(field):
if not field:
return None
elif isinstance(field, URIRef):
return field.n3(graph.namespace_manager)
return field
writer = csv.writer(f)
writer.writerow(query_result.vars)
for row in query_result:
writer.writerow(map(normalize_field, row))
@click.command()
@click.argument('dataset', type=click.File('r'))
@click.argument('query', type=click.File('r'))
@click.option('--prefixes', '-p', default=None, type=click.File('r'))
@click.option('--output', '-o', default=sys.stdout, type=click.File('w'))
def command(dataset, query, prefixes, output):
graph, query_result = observe_dataset(dataset, query, prefixes)
create_csv(graph, query_result, output)
if __name__ == '__main__':
command()
<commit_msg>Add support for multiple datasets<commit_after> | import sys
import click
import json
import csv
from rdflib import Graph, Namespace, URIRef
def observe_dataset(datasets, query, prefixes):
g = Graph()
if prefixes:
prefixes = json.load(prefixes)
for name, url in prefixes.items():
g.bind(name, Namespace(url.strip('<>')))
for dataset in datasets:
g.parse(dataset)
return g, g.query(query.read())
def create_csv(graph, query_result, f):
def normalize_field(field):
if not field:
return None
elif isinstance(field, URIRef):
return field.n3(graph.namespace_manager)
return field
writer = csv.writer(f)
writer.writerow(query_result.vars)
for row in query_result:
writer.writerow(map(normalize_field, row))
@click.command()
@click.argument('datasets', type=click.File('r'), nargs=-1)
@click.argument('query', type=click.File('r'))
@click.option('--prefixes', '-p', default=None, type=click.File('r'))
@click.option('--output', '-o', default=sys.stdout, type=click.File('w'))
def command(datasets, query, prefixes, output):
graph, query_result = observe_dataset(datasets, query, prefixes)
create_csv(graph, query_result, output)
if __name__ == '__main__':
command()
| import sys
import click
import json
import csv
from rdflib import Graph, Namespace, URIRef
def observe_dataset(dataset, query, prefixes):
g = Graph()
if prefixes:
prefixes = json.load(prefixes)
for name, url in prefixes.items():
g.bind(name, Namespace(url.strip('<>')))
g.parse(dataset)
return g, g.query(query.read())
def create_csv(graph, query_result, f):
def normalize_field(field):
if not field:
return None
elif isinstance(field, URIRef):
return field.n3(graph.namespace_manager)
return field
writer = csv.writer(f)
writer.writerow(query_result.vars)
for row in query_result:
writer.writerow(map(normalize_field, row))
@click.command()
@click.argument('dataset', type=click.File('r'))
@click.argument('query', type=click.File('r'))
@click.option('--prefixes', '-p', default=None, type=click.File('r'))
@click.option('--output', '-o', default=sys.stdout, type=click.File('w'))
def command(dataset, query, prefixes, output):
graph, query_result = observe_dataset(dataset, query, prefixes)
create_csv(graph, query_result, output)
if __name__ == '__main__':
command()
Add support for multiple datasetsimport sys
import click
import json
import csv
from rdflib import Graph, Namespace, URIRef
def observe_dataset(datasets, query, prefixes):
g = Graph()
if prefixes:
prefixes = json.load(prefixes)
for name, url in prefixes.items():
g.bind(name, Namespace(url.strip('<>')))
for dataset in datasets:
g.parse(dataset)
return g, g.query(query.read())
def create_csv(graph, query_result, f):
def normalize_field(field):
if not field:
return None
elif isinstance(field, URIRef):
return field.n3(graph.namespace_manager)
return field
writer = csv.writer(f)
writer.writerow(query_result.vars)
for row in query_result:
writer.writerow(map(normalize_field, row))
@click.command()
@click.argument('datasets', type=click.File('r'), nargs=-1)
@click.argument('query', type=click.File('r'))
@click.option('--prefixes', '-p', default=None, type=click.File('r'))
@click.option('--output', '-o', default=sys.stdout, type=click.File('w'))
def command(datasets, query, prefixes, output):
graph, query_result = observe_dataset(datasets, query, prefixes)
create_csv(graph, query_result, output)
if __name__ == '__main__':
command()
| <commit_before>import sys
import click
import json
import csv
from rdflib import Graph, Namespace, URIRef
def observe_dataset(dataset, query, prefixes):
g = Graph()
if prefixes:
prefixes = json.load(prefixes)
for name, url in prefixes.items():
g.bind(name, Namespace(url.strip('<>')))
g.parse(dataset)
return g, g.query(query.read())
def create_csv(graph, query_result, f):
def normalize_field(field):
if not field:
return None
elif isinstance(field, URIRef):
return field.n3(graph.namespace_manager)
return field
writer = csv.writer(f)
writer.writerow(query_result.vars)
for row in query_result:
writer.writerow(map(normalize_field, row))
@click.command()
@click.argument('dataset', type=click.File('r'))
@click.argument('query', type=click.File('r'))
@click.option('--prefixes', '-p', default=None, type=click.File('r'))
@click.option('--output', '-o', default=sys.stdout, type=click.File('w'))
def command(dataset, query, prefixes, output):
graph, query_result = observe_dataset(dataset, query, prefixes)
create_csv(graph, query_result, output)
if __name__ == '__main__':
command()
<commit_msg>Add support for multiple datasets<commit_after>import sys
import click
import json
import csv
from rdflib import Graph, Namespace, URIRef
def observe_dataset(datasets, query, prefixes):
g = Graph()
if prefixes:
prefixes = json.load(prefixes)
for name, url in prefixes.items():
g.bind(name, Namespace(url.strip('<>')))
for dataset in datasets:
g.parse(dataset)
return g, g.query(query.read())
def create_csv(graph, query_result, f):
def normalize_field(field):
if not field:
return None
elif isinstance(field, URIRef):
return field.n3(graph.namespace_manager)
return field
writer = csv.writer(f)
writer.writerow(query_result.vars)
for row in query_result:
writer.writerow(map(normalize_field, row))
@click.command()
@click.argument('datasets', type=click.File('r'), nargs=-1)
@click.argument('query', type=click.File('r'))
@click.option('--prefixes', '-p', default=None, type=click.File('r'))
@click.option('--output', '-o', default=sys.stdout, type=click.File('w'))
def command(datasets, query, prefixes, output):
graph, query_result = observe_dataset(datasets, query, prefixes)
create_csv(graph, query_result, output)
if __name__ == '__main__':
command()
|
17cad98d95eeb1c5ae2748fbad0621d0ca460e8b | PrettyJson.py | PrettyJson.py | import sublime
import sublime_plugin
import simplejson as json
from simplejson import OrderedDict
import decimal
s = sublime.load_settings("Pretty JSON.sublime-settings")
class PrettyjsonCommand(sublime_plugin.TextCommand):
""" Pretty Print JSON
"""
def run(self, edit):
for region in self.view.sel():
# If no selection, use the entire file as the selection
if region.empty() and s.get("use_entire_file_if_no_selection"):
selection = sublime.Region(0, self.view.size())
else:
selection = region
try:
obj = json.loads(self.view.substr(selection),
object_pairs_hook=OrderedDict,
parse_float=decimal.Decimal)
self.view.replace(edit, selection, json.dumps(obj,
indent=s.get("indent", 2),
ensure_ascii=s.get("ensure_ascii", False),
sort_keys=s.get("sort_keys", False),
separators=(',', ': '),
use_decimal=True))
except Exception:
import sys
exc = sys.exc_info()[1]
sublime.status_message(str(exc))
| import sublime
import sublime_plugin
import PrettyJSON.simplejson as json
from PrettyJSON.simplejson import OrderedDict
import decimal
s = sublime.load_settings("Pretty JSON.sublime-settings")
class PrettyjsonCommand(sublime_plugin.TextCommand):
""" Pretty Print JSON
"""
def run(self, edit):
for region in self.view.sel():
# If no selection, use the entire file as the selection
if region.empty() and s.get("use_entire_file_if_no_selection"):
selection = sublime.Region(0, self.view.size())
else:
selection = region
try:
obj = json.loads(self.view.substr(selection),
object_pairs_hook=OrderedDict,
parse_float=decimal.Decimal)
self.view.replace(edit, selection, json.dumps(obj,
indent=s.get("indent", 2),
ensure_ascii=s.get("ensure_ascii", False),
sort_keys=s.get("sort_keys", False),
separators=(',', ': '),
use_decimal=True))
except Exception:
import sys
exc = sys.exc_info()[1]
sublime.status_message(str(exc))
| Prepend simplejson import with 'module' name | Prepend simplejson import with 'module' name
| Python | mit | dzhibas/SublimePrettyJson | import sublime
import sublime_plugin
import simplejson as json
from simplejson import OrderedDict
import decimal
s = sublime.load_settings("Pretty JSON.sublime-settings")
class PrettyjsonCommand(sublime_plugin.TextCommand):
""" Pretty Print JSON
"""
def run(self, edit):
for region in self.view.sel():
# If no selection, use the entire file as the selection
if region.empty() and s.get("use_entire_file_if_no_selection"):
selection = sublime.Region(0, self.view.size())
else:
selection = region
try:
obj = json.loads(self.view.substr(selection),
object_pairs_hook=OrderedDict,
parse_float=decimal.Decimal)
self.view.replace(edit, selection, json.dumps(obj,
indent=s.get("indent", 2),
ensure_ascii=s.get("ensure_ascii", False),
sort_keys=s.get("sort_keys", False),
separators=(',', ': '),
use_decimal=True))
except Exception:
import sys
exc = sys.exc_info()[1]
sublime.status_message(str(exc))
Prepend simplejson import with 'module' name | import sublime
import sublime_plugin
import PrettyJSON.simplejson as json
from PrettyJSON.simplejson import OrderedDict
import decimal
s = sublime.load_settings("Pretty JSON.sublime-settings")
class PrettyjsonCommand(sublime_plugin.TextCommand):
""" Pretty Print JSON
"""
def run(self, edit):
for region in self.view.sel():
# If no selection, use the entire file as the selection
if region.empty() and s.get("use_entire_file_if_no_selection"):
selection = sublime.Region(0, self.view.size())
else:
selection = region
try:
obj = json.loads(self.view.substr(selection),
object_pairs_hook=OrderedDict,
parse_float=decimal.Decimal)
self.view.replace(edit, selection, json.dumps(obj,
indent=s.get("indent", 2),
ensure_ascii=s.get("ensure_ascii", False),
sort_keys=s.get("sort_keys", False),
separators=(',', ': '),
use_decimal=True))
except Exception:
import sys
exc = sys.exc_info()[1]
sublime.status_message(str(exc))
| <commit_before>import sublime
import sublime_plugin
import simplejson as json
from simplejson import OrderedDict
import decimal
s = sublime.load_settings("Pretty JSON.sublime-settings")
class PrettyjsonCommand(sublime_plugin.TextCommand):
""" Pretty Print JSON
"""
def run(self, edit):
for region in self.view.sel():
# If no selection, use the entire file as the selection
if region.empty() and s.get("use_entire_file_if_no_selection"):
selection = sublime.Region(0, self.view.size())
else:
selection = region
try:
obj = json.loads(self.view.substr(selection),
object_pairs_hook=OrderedDict,
parse_float=decimal.Decimal)
self.view.replace(edit, selection, json.dumps(obj,
indent=s.get("indent", 2),
ensure_ascii=s.get("ensure_ascii", False),
sort_keys=s.get("sort_keys", False),
separators=(',', ': '),
use_decimal=True))
except Exception:
import sys
exc = sys.exc_info()[1]
sublime.status_message(str(exc))
<commit_msg>Prepend simplejson import with 'module' name<commit_after> | import sublime
import sublime_plugin
import PrettyJSON.simplejson as json
from PrettyJSON.simplejson import OrderedDict
import decimal
s = sublime.load_settings("Pretty JSON.sublime-settings")
class PrettyjsonCommand(sublime_plugin.TextCommand):
""" Pretty Print JSON
"""
def run(self, edit):
for region in self.view.sel():
# If no selection, use the entire file as the selection
if region.empty() and s.get("use_entire_file_if_no_selection"):
selection = sublime.Region(0, self.view.size())
else:
selection = region
try:
obj = json.loads(self.view.substr(selection),
object_pairs_hook=OrderedDict,
parse_float=decimal.Decimal)
self.view.replace(edit, selection, json.dumps(obj,
indent=s.get("indent", 2),
ensure_ascii=s.get("ensure_ascii", False),
sort_keys=s.get("sort_keys", False),
separators=(',', ': '),
use_decimal=True))
except Exception:
import sys
exc = sys.exc_info()[1]
sublime.status_message(str(exc))
| import sublime
import sublime_plugin
import simplejson as json
from simplejson import OrderedDict
import decimal
s = sublime.load_settings("Pretty JSON.sublime-settings")
class PrettyjsonCommand(sublime_plugin.TextCommand):
""" Pretty Print JSON
"""
def run(self, edit):
for region in self.view.sel():
# If no selection, use the entire file as the selection
if region.empty() and s.get("use_entire_file_if_no_selection"):
selection = sublime.Region(0, self.view.size())
else:
selection = region
try:
obj = json.loads(self.view.substr(selection),
object_pairs_hook=OrderedDict,
parse_float=decimal.Decimal)
self.view.replace(edit, selection, json.dumps(obj,
indent=s.get("indent", 2),
ensure_ascii=s.get("ensure_ascii", False),
sort_keys=s.get("sort_keys", False),
separators=(',', ': '),
use_decimal=True))
except Exception:
import sys
exc = sys.exc_info()[1]
sublime.status_message(str(exc))
Prepend simplejson import with 'module' nameimport sublime
import sublime_plugin
import PrettyJSON.simplejson as json
from PrettyJSON.simplejson import OrderedDict
import decimal
s = sublime.load_settings("Pretty JSON.sublime-settings")
class PrettyjsonCommand(sublime_plugin.TextCommand):
""" Pretty Print JSON
"""
def run(self, edit):
for region in self.view.sel():
# If no selection, use the entire file as the selection
if region.empty() and s.get("use_entire_file_if_no_selection"):
selection = sublime.Region(0, self.view.size())
else:
selection = region
try:
obj = json.loads(self.view.substr(selection),
object_pairs_hook=OrderedDict,
parse_float=decimal.Decimal)
self.view.replace(edit, selection, json.dumps(obj,
indent=s.get("indent", 2),
ensure_ascii=s.get("ensure_ascii", False),
sort_keys=s.get("sort_keys", False),
separators=(',', ': '),
use_decimal=True))
except Exception:
import sys
exc = sys.exc_info()[1]
sublime.status_message(str(exc))
| <commit_before>import sublime
import sublime_plugin
import simplejson as json
from simplejson import OrderedDict
import decimal
s = sublime.load_settings("Pretty JSON.sublime-settings")
class PrettyjsonCommand(sublime_plugin.TextCommand):
""" Pretty Print JSON
"""
def run(self, edit):
for region in self.view.sel():
# If no selection, use the entire file as the selection
if region.empty() and s.get("use_entire_file_if_no_selection"):
selection = sublime.Region(0, self.view.size())
else:
selection = region
try:
obj = json.loads(self.view.substr(selection),
object_pairs_hook=OrderedDict,
parse_float=decimal.Decimal)
self.view.replace(edit, selection, json.dumps(obj,
indent=s.get("indent", 2),
ensure_ascii=s.get("ensure_ascii", False),
sort_keys=s.get("sort_keys", False),
separators=(',', ': '),
use_decimal=True))
except Exception:
import sys
exc = sys.exc_info()[1]
sublime.status_message(str(exc))
<commit_msg>Prepend simplejson import with 'module' name<commit_after>import sublime
import sublime_plugin
import PrettyJSON.simplejson as json
from PrettyJSON.simplejson import OrderedDict
import decimal
s = sublime.load_settings("Pretty JSON.sublime-settings")
class PrettyjsonCommand(sublime_plugin.TextCommand):
""" Pretty Print JSON
"""
def run(self, edit):
for region in self.view.sel():
# If no selection, use the entire file as the selection
if region.empty() and s.get("use_entire_file_if_no_selection"):
selection = sublime.Region(0, self.view.size())
else:
selection = region
try:
obj = json.loads(self.view.substr(selection),
object_pairs_hook=OrderedDict,
parse_float=decimal.Decimal)
self.view.replace(edit, selection, json.dumps(obj,
indent=s.get("indent", 2),
ensure_ascii=s.get("ensure_ascii", False),
sort_keys=s.get("sort_keys", False),
separators=(',', ': '),
use_decimal=True))
except Exception:
import sys
exc = sys.exc_info()[1]
sublime.status_message(str(exc))
|
43bae84b1359d56ad150b49b38c2f8d400b05af2 | opps/core/cache/managers.py | opps/core/cache/managers.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.db import models
from django.core.cache import cache
from django.conf import settings
class CacheManager(models.Manager):
def __cache_key(self, id):
return u'{}:{}:{}'.format(settings.CACHE_PREFIX,
self.model._meta.db_table,
id)
def get(self, *args, **kwargs):
id = repr(kwargs)
pointer_key = self.__cache_key(id)
model_key = cache.get(pointer_key)
if model_key is not None:
model = cache.get(model_key)
if model is not None:
return model
model = super(CacheManager, self).get(*args, **kwargs)
if not model_key:
model_key = self.__cache_key(model, model.pk)
cache.set(pointer_key, model_key, settings.CACHE_EXPIRE)
cache.set(model_key, model, settings.CACHE_EXPIRE)
return model
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.db import models
from django.core.cache import cache
from django.conf import settings
def _cache_key(model, id):
return u'{}:{}:{}'.format(settings.CACHE_PREFIX,
model._meta.db_table,
id)
class CacheManager(models.Manager):
def get(self, *args, **kwargs):
id = repr(kwargs)
pointer_key = _cache_key(self.model, id)
model_key = cache.get(pointer_key)
if model_key is not None:
model = cache.get(model_key)
if model is not None:
return model
model = super(CacheManager, self).get(*args, **kwargs)
if not model_key:
model_key = _cache_key(model, model.pk)
cache.set(pointer_key, model_key, settings.CACHE_EXPIRE)
cache.set(model_key, model, settings.CACHE_EXPIRE)
return model
| Fix cache key set, on core cache | Fix cache key set, on core cache
| Python | mit | jeanmask/opps,williamroot/opps,opps/opps,jeanmask/opps,YACOWS/opps,jeanmask/opps,jeanmask/opps,williamroot/opps,williamroot/opps,YACOWS/opps,YACOWS/opps,williamroot/opps,YACOWS/opps,opps/opps,opps/opps,opps/opps | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.db import models
from django.core.cache import cache
from django.conf import settings
class CacheManager(models.Manager):
def __cache_key(self, id):
return u'{}:{}:{}'.format(settings.CACHE_PREFIX,
self.model._meta.db_table,
id)
def get(self, *args, **kwargs):
id = repr(kwargs)
pointer_key = self.__cache_key(id)
model_key = cache.get(pointer_key)
if model_key is not None:
model = cache.get(model_key)
if model is not None:
return model
model = super(CacheManager, self).get(*args, **kwargs)
if not model_key:
model_key = self.__cache_key(model, model.pk)
cache.set(pointer_key, model_key, settings.CACHE_EXPIRE)
cache.set(model_key, model, settings.CACHE_EXPIRE)
return model
Fix cache key set, on core cache | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.db import models
from django.core.cache import cache
from django.conf import settings
def _cache_key(model, id):
return u'{}:{}:{}'.format(settings.CACHE_PREFIX,
model._meta.db_table,
id)
class CacheManager(models.Manager):
def get(self, *args, **kwargs):
id = repr(kwargs)
pointer_key = _cache_key(self.model, id)
model_key = cache.get(pointer_key)
if model_key is not None:
model = cache.get(model_key)
if model is not None:
return model
model = super(CacheManager, self).get(*args, **kwargs)
if not model_key:
model_key = _cache_key(model, model.pk)
cache.set(pointer_key, model_key, settings.CACHE_EXPIRE)
cache.set(model_key, model, settings.CACHE_EXPIRE)
return model
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.db import models
from django.core.cache import cache
from django.conf import settings
class CacheManager(models.Manager):
def __cache_key(self, id):
return u'{}:{}:{}'.format(settings.CACHE_PREFIX,
self.model._meta.db_table,
id)
def get(self, *args, **kwargs):
id = repr(kwargs)
pointer_key = self.__cache_key(id)
model_key = cache.get(pointer_key)
if model_key is not None:
model = cache.get(model_key)
if model is not None:
return model
model = super(CacheManager, self).get(*args, **kwargs)
if not model_key:
model_key = self.__cache_key(model, model.pk)
cache.set(pointer_key, model_key, settings.CACHE_EXPIRE)
cache.set(model_key, model, settings.CACHE_EXPIRE)
return model
<commit_msg>Fix cache key set, on core cache<commit_after> | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.db import models
from django.core.cache import cache
from django.conf import settings
def _cache_key(model, id):
return u'{}:{}:{}'.format(settings.CACHE_PREFIX,
model._meta.db_table,
id)
class CacheManager(models.Manager):
def get(self, *args, **kwargs):
id = repr(kwargs)
pointer_key = _cache_key(self.model, id)
model_key = cache.get(pointer_key)
if model_key is not None:
model = cache.get(model_key)
if model is not None:
return model
model = super(CacheManager, self).get(*args, **kwargs)
if not model_key:
model_key = _cache_key(model, model.pk)
cache.set(pointer_key, model_key, settings.CACHE_EXPIRE)
cache.set(model_key, model, settings.CACHE_EXPIRE)
return model
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.db import models
from django.core.cache import cache
from django.conf import settings
class CacheManager(models.Manager):
def __cache_key(self, id):
return u'{}:{}:{}'.format(settings.CACHE_PREFIX,
self.model._meta.db_table,
id)
def get(self, *args, **kwargs):
id = repr(kwargs)
pointer_key = self.__cache_key(id)
model_key = cache.get(pointer_key)
if model_key is not None:
model = cache.get(model_key)
if model is not None:
return model
model = super(CacheManager, self).get(*args, **kwargs)
if not model_key:
model_key = self.__cache_key(model, model.pk)
cache.set(pointer_key, model_key, settings.CACHE_EXPIRE)
cache.set(model_key, model, settings.CACHE_EXPIRE)
return model
Fix cache key set, on core cache#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.db import models
from django.core.cache import cache
from django.conf import settings
def _cache_key(model, id):
return u'{}:{}:{}'.format(settings.CACHE_PREFIX,
model._meta.db_table,
id)
class CacheManager(models.Manager):
def get(self, *args, **kwargs):
id = repr(kwargs)
pointer_key = _cache_key(self.model, id)
model_key = cache.get(pointer_key)
if model_key is not None:
model = cache.get(model_key)
if model is not None:
return model
model = super(CacheManager, self).get(*args, **kwargs)
if not model_key:
model_key = _cache_key(model, model.pk)
cache.set(pointer_key, model_key, settings.CACHE_EXPIRE)
cache.set(model_key, model, settings.CACHE_EXPIRE)
return model
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.db import models
from django.core.cache import cache
from django.conf import settings
class CacheManager(models.Manager):
def __cache_key(self, id):
return u'{}:{}:{}'.format(settings.CACHE_PREFIX,
self.model._meta.db_table,
id)
def get(self, *args, **kwargs):
id = repr(kwargs)
pointer_key = self.__cache_key(id)
model_key = cache.get(pointer_key)
if model_key is not None:
model = cache.get(model_key)
if model is not None:
return model
model = super(CacheManager, self).get(*args, **kwargs)
if not model_key:
model_key = self.__cache_key(model, model.pk)
cache.set(pointer_key, model_key, settings.CACHE_EXPIRE)
cache.set(model_key, model, settings.CACHE_EXPIRE)
return model
<commit_msg>Fix cache key set, on core cache<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.db import models
from django.core.cache import cache
from django.conf import settings
def _cache_key(model, id):
return u'{}:{}:{}'.format(settings.CACHE_PREFIX,
model._meta.db_table,
id)
class CacheManager(models.Manager):
def get(self, *args, **kwargs):
id = repr(kwargs)
pointer_key = _cache_key(self.model, id)
model_key = cache.get(pointer_key)
if model_key is not None:
model = cache.get(model_key)
if model is not None:
return model
model = super(CacheManager, self).get(*args, **kwargs)
if not model_key:
model_key = _cache_key(model, model.pk)
cache.set(pointer_key, model_key, settings.CACHE_EXPIRE)
cache.set(model_key, model, settings.CACHE_EXPIRE)
return model
|
e3369232014adf29f78975ff102f8e3aec51b81a | corgi/pandas_utils.py | corgi/pandas_utils.py | import math
import numpy as np
def remove_single_value_columns(df):
drop_ix = df.apply(pd.Series.value_counts,
normalize=True,
axis=0).max() == 1
drop_cols = df.columns[drop_ix]
df = df.drop(drop_cols, axis=1)
return df
def sample(df, sample_percent=2e-2):
sample_n = math.floor(len(df) * sample_percent)
rows = np.random.choice(df.shape[0], sample_n)
return df.ix[rows]
def sample_columns(df, sample_percent=0.5):
df = sample(df.T, sample_percent).T
return df
| import math
import numpy as np
def remove_single_value_columns(df):
drop_ix = df.apply(pd.Series.value_counts,
normalize=True,
axis=0).max() == 1
drop_cols = df.columns[drop_ix]
df = df.drop(drop_cols, axis=1)
return df
| Remove pandas sample utils, these are built into pandas | Remove pandas sample utils, these are built into pandas
| Python | mit | log0ymxm/corgi | import math
import numpy as np
def remove_single_value_columns(df):
drop_ix = df.apply(pd.Series.value_counts,
normalize=True,
axis=0).max() == 1
drop_cols = df.columns[drop_ix]
df = df.drop(drop_cols, axis=1)
return df
def sample(df, sample_percent=2e-2):
sample_n = math.floor(len(df) * sample_percent)
rows = np.random.choice(df.shape[0], sample_n)
return df.ix[rows]
def sample_columns(df, sample_percent=0.5):
df = sample(df.T, sample_percent).T
return df
Remove pandas sample utils, these are built into pandas | import math
import numpy as np
def remove_single_value_columns(df):
drop_ix = df.apply(pd.Series.value_counts,
normalize=True,
axis=0).max() == 1
drop_cols = df.columns[drop_ix]
df = df.drop(drop_cols, axis=1)
return df
| <commit_before>import math
import numpy as np
def remove_single_value_columns(df):
drop_ix = df.apply(pd.Series.value_counts,
normalize=True,
axis=0).max() == 1
drop_cols = df.columns[drop_ix]
df = df.drop(drop_cols, axis=1)
return df
def sample(df, sample_percent=2e-2):
sample_n = math.floor(len(df) * sample_percent)
rows = np.random.choice(df.shape[0], sample_n)
return df.ix[rows]
def sample_columns(df, sample_percent=0.5):
df = sample(df.T, sample_percent).T
return df
<commit_msg>Remove pandas sample utils, these are built into pandas<commit_after> | import math
import numpy as np
def remove_single_value_columns(df):
drop_ix = df.apply(pd.Series.value_counts,
normalize=True,
axis=0).max() == 1
drop_cols = df.columns[drop_ix]
df = df.drop(drop_cols, axis=1)
return df
| import math
import numpy as np
def remove_single_value_columns(df):
drop_ix = df.apply(pd.Series.value_counts,
normalize=True,
axis=0).max() == 1
drop_cols = df.columns[drop_ix]
df = df.drop(drop_cols, axis=1)
return df
def sample(df, sample_percent=2e-2):
sample_n = math.floor(len(df) * sample_percent)
rows = np.random.choice(df.shape[0], sample_n)
return df.ix[rows]
def sample_columns(df, sample_percent=0.5):
df = sample(df.T, sample_percent).T
return df
Remove pandas sample utils, these are built into pandasimport math
import numpy as np
def remove_single_value_columns(df):
drop_ix = df.apply(pd.Series.value_counts,
normalize=True,
axis=0).max() == 1
drop_cols = df.columns[drop_ix]
df = df.drop(drop_cols, axis=1)
return df
| <commit_before>import math
import numpy as np
def remove_single_value_columns(df):
drop_ix = df.apply(pd.Series.value_counts,
normalize=True,
axis=0).max() == 1
drop_cols = df.columns[drop_ix]
df = df.drop(drop_cols, axis=1)
return df
def sample(df, sample_percent=2e-2):
sample_n = math.floor(len(df) * sample_percent)
rows = np.random.choice(df.shape[0], sample_n)
return df.ix[rows]
def sample_columns(df, sample_percent=0.5):
df = sample(df.T, sample_percent).T
return df
<commit_msg>Remove pandas sample utils, these are built into pandas<commit_after>import math
import numpy as np
def remove_single_value_columns(df):
drop_ix = df.apply(pd.Series.value_counts,
normalize=True,
axis=0).max() == 1
drop_cols = df.columns[drop_ix]
df = df.drop(drop_cols, axis=1)
return df
|
7f91d84cb7e57332ea843aac35ef9fae0a3023f0 | cumulusci/__init__.py | cumulusci/__init__.py | __import__('pkg_resources').declare_namespace('cumulusci')
__version__ = '2.0.0-beta56'
| import os
__import__('pkg_resources').declare_namespace('cumulusci')
__version__ = '2.0.0-beta56'
__location__ = os.path.dirname(os.path.realpath(__file__))
| Add a never changing __location__ attribute to cumulusci to get the root of the cumulusci codebase | Add a never changing __location__ attribute to cumulusci to get the root
of the cumulusci codebase
| Python | bsd-3-clause | SalesforceFoundation/CumulusCI,e02d96ec16/CumulusCI,SalesforceFoundation/CumulusCI,e02d96ec16/CumulusCI | __import__('pkg_resources').declare_namespace('cumulusci')
__version__ = '2.0.0-beta56'
Add a never changing __location__ attribute to cumulusci to get the root
of the cumulusci codebase | import os
__import__('pkg_resources').declare_namespace('cumulusci')
__version__ = '2.0.0-beta56'
__location__ = os.path.dirname(os.path.realpath(__file__))
| <commit_before>__import__('pkg_resources').declare_namespace('cumulusci')
__version__ = '2.0.0-beta56'
<commit_msg>Add a never changing __location__ attribute to cumulusci to get the root
of the cumulusci codebase<commit_after> | import os
__import__('pkg_resources').declare_namespace('cumulusci')
__version__ = '2.0.0-beta56'
__location__ = os.path.dirname(os.path.realpath(__file__))
| __import__('pkg_resources').declare_namespace('cumulusci')
__version__ = '2.0.0-beta56'
Add a never changing __location__ attribute to cumulusci to get the root
of the cumulusci codebaseimport os
__import__('pkg_resources').declare_namespace('cumulusci')
__version__ = '2.0.0-beta56'
__location__ = os.path.dirname(os.path.realpath(__file__))
| <commit_before>__import__('pkg_resources').declare_namespace('cumulusci')
__version__ = '2.0.0-beta56'
<commit_msg>Add a never changing __location__ attribute to cumulusci to get the root
of the cumulusci codebase<commit_after>import os
__import__('pkg_resources').declare_namespace('cumulusci')
__version__ = '2.0.0-beta56'
__location__ = os.path.dirname(os.path.realpath(__file__))
|
ca4b4732b4eacb6e1ac0e70bbc384982007d92de | custos/notify/http.py | custos/notify/http.py | import logging
import requests
from .base import Notifier
log = logging.getLogger(__name__)
class HTTPNotifier(Notifier):
''' A Notifier that sends http post request to a given url '''
def __init__(self, auth=None, json=True, **kwargs):
'''
Create a new HTTPNotifier
:param auth: If given, auth is handed over to request.post
:param recipients: The urls to post to.
:param json: If True, send message as json payload, else use an url query string
:type json: bool
:type recipients: Iterable of recipients or dict mapping categories to recipients
:param categories: The message categories this Notifier should relay
:type categories: Iterable
:param level: The minimum level for messages to be relayed
:type level: int
'''
self.auth = auth
self.json = json
super().__init__(**kwargs)
def notify(self, recipient, msg):
try:
params = msg.to_dict()
params.pop('image', None)
if self.json is True:
params['timestamp'] = str(params['timestamp'])
params['uuid'] = str(params['uuid'])
ret = requests.post(recipient, json=params, auth=self.auth)
else:
ret = requests.post(recipient, params=params, auth=self.auth)
ret.raise_for_status()
except ConnectionError:
except:
log.exception('Could not post message')
| import logging
import requests
from .base import Notifier
log = logging.getLogger(__name__)
class HTTPNotifier(Notifier):
''' A Notifier that sends http post request to a given url '''
def __init__(self, auth=None, json=True, **kwargs):
'''
Create a new HTTPNotifier
:param auth: If given, auth is handed over to request.post
:param recipients: The urls to post to.
:param json: If True, send message as json payload, else use an url query string
:type json: bool
:type recipients: Iterable of recipients or dict mapping categories to recipients
:param categories: The message categories this Notifier should relay
:type categories: Iterable
:param level: The minimum level for messages to be relayed
:type level: int
'''
self.auth = auth
self.json = json
super().__init__(**kwargs)
def notify(self, recipient, msg):
try:
params = msg.to_dict()
params.pop('image', None)
if self.json is True:
params['timestamp'] = str(params['timestamp'])
params['uuid'] = str(params['uuid'])
ret = requests.post(recipient, json=params, auth=self.auth)
else:
ret = requests.post(recipient, params=params, auth=self.auth)
ret.raise_for_status()
except ConnectionError:
# traceback of requests connection errors are really long for
# such a simple thing
log.error('No connection to {}'.format(recipient))
except:
log.exception('Could not post message')
| Fix exception handling in HTTPNotifier | Fix exception handling in HTTPNotifier
| Python | mit | fact-project/pycustos | import logging
import requests
from .base import Notifier
log = logging.getLogger(__name__)
class HTTPNotifier(Notifier):
''' A Notifier that sends http post request to a given url '''
def __init__(self, auth=None, json=True, **kwargs):
'''
Create a new HTTPNotifier
:param auth: If given, auth is handed over to request.post
:param recipients: The urls to post to.
:param json: If True, send message as json payload, else use an url query string
:type json: bool
:type recipients: Iterable of recipients or dict mapping categories to recipients
:param categories: The message categories this Notifier should relay
:type categories: Iterable
:param level: The minimum level for messages to be relayed
:type level: int
'''
self.auth = auth
self.json = json
super().__init__(**kwargs)
def notify(self, recipient, msg):
try:
params = msg.to_dict()
params.pop('image', None)
if self.json is True:
params['timestamp'] = str(params['timestamp'])
params['uuid'] = str(params['uuid'])
ret = requests.post(recipient, json=params, auth=self.auth)
else:
ret = requests.post(recipient, params=params, auth=self.auth)
ret.raise_for_status()
except ConnectionError:
except:
log.exception('Could not post message')
Fix exception handling in HTTPNotifier | import logging
import requests
from .base import Notifier
log = logging.getLogger(__name__)
class HTTPNotifier(Notifier):
''' A Notifier that sends http post request to a given url '''
def __init__(self, auth=None, json=True, **kwargs):
'''
Create a new HTTPNotifier
:param auth: If given, auth is handed over to request.post
:param recipients: The urls to post to.
:param json: If True, send message as json payload, else use an url query string
:type json: bool
:type recipients: Iterable of recipients or dict mapping categories to recipients
:param categories: The message categories this Notifier should relay
:type categories: Iterable
:param level: The minimum level for messages to be relayed
:type level: int
'''
self.auth = auth
self.json = json
super().__init__(**kwargs)
def notify(self, recipient, msg):
try:
params = msg.to_dict()
params.pop('image', None)
if self.json is True:
params['timestamp'] = str(params['timestamp'])
params['uuid'] = str(params['uuid'])
ret = requests.post(recipient, json=params, auth=self.auth)
else:
ret = requests.post(recipient, params=params, auth=self.auth)
ret.raise_for_status()
except ConnectionError:
# traceback of requests connection errors are really long for
# such a simple thing
log.error('No connection to {}'.format(recipient))
except:
log.exception('Could not post message')
| <commit_before>import logging
import requests
from .base import Notifier
log = logging.getLogger(__name__)
class HTTPNotifier(Notifier):
''' A Notifier that sends http post request to a given url '''
def __init__(self, auth=None, json=True, **kwargs):
'''
Create a new HTTPNotifier
:param auth: If given, auth is handed over to request.post
:param recipients: The urls to post to.
:param json: If True, send message as json payload, else use an url query string
:type json: bool
:type recipients: Iterable of recipients or dict mapping categories to recipients
:param categories: The message categories this Notifier should relay
:type categories: Iterable
:param level: The minimum level for messages to be relayed
:type level: int
'''
self.auth = auth
self.json = json
super().__init__(**kwargs)
def notify(self, recipient, msg):
try:
params = msg.to_dict()
params.pop('image', None)
if self.json is True:
params['timestamp'] = str(params['timestamp'])
params['uuid'] = str(params['uuid'])
ret = requests.post(recipient, json=params, auth=self.auth)
else:
ret = requests.post(recipient, params=params, auth=self.auth)
ret.raise_for_status()
except ConnectionError:
except:
log.exception('Could not post message')
<commit_msg>Fix exception handling in HTTPNotifier<commit_after> | import logging
import requests
from .base import Notifier
log = logging.getLogger(__name__)
class HTTPNotifier(Notifier):
''' A Notifier that sends http post request to a given url '''
def __init__(self, auth=None, json=True, **kwargs):
'''
Create a new HTTPNotifier
:param auth: If given, auth is handed over to request.post
:param recipients: The urls to post to.
:param json: If True, send message as json payload, else use an url query string
:type json: bool
:type recipients: Iterable of recipients or dict mapping categories to recipients
:param categories: The message categories this Notifier should relay
:type categories: Iterable
:param level: The minimum level for messages to be relayed
:type level: int
'''
self.auth = auth
self.json = json
super().__init__(**kwargs)
def notify(self, recipient, msg):
try:
params = msg.to_dict()
params.pop('image', None)
if self.json is True:
params['timestamp'] = str(params['timestamp'])
params['uuid'] = str(params['uuid'])
ret = requests.post(recipient, json=params, auth=self.auth)
else:
ret = requests.post(recipient, params=params, auth=self.auth)
ret.raise_for_status()
except ConnectionError:
# traceback of requests connection errors are really long for
# such a simple thing
log.error('No connection to {}'.format(recipient))
except:
log.exception('Could not post message')
| import logging
import requests
from .base import Notifier
log = logging.getLogger(__name__)
class HTTPNotifier(Notifier):
''' A Notifier that sends http post request to a given url '''
def __init__(self, auth=None, json=True, **kwargs):
'''
Create a new HTTPNotifier
:param auth: If given, auth is handed over to request.post
:param recipients: The urls to post to.
:param json: If True, send message as json payload, else use an url query string
:type json: bool
:type recipients: Iterable of recipients or dict mapping categories to recipients
:param categories: The message categories this Notifier should relay
:type categories: Iterable
:param level: The minimum level for messages to be relayed
:type level: int
'''
self.auth = auth
self.json = json
super().__init__(**kwargs)
def notify(self, recipient, msg):
try:
params = msg.to_dict()
params.pop('image', None)
if self.json is True:
params['timestamp'] = str(params['timestamp'])
params['uuid'] = str(params['uuid'])
ret = requests.post(recipient, json=params, auth=self.auth)
else:
ret = requests.post(recipient, params=params, auth=self.auth)
ret.raise_for_status()
except ConnectionError:
except:
log.exception('Could not post message')
Fix exception handling in HTTPNotifierimport logging
import requests
from .base import Notifier
log = logging.getLogger(__name__)
class HTTPNotifier(Notifier):
''' A Notifier that sends http post request to a given url '''
def __init__(self, auth=None, json=True, **kwargs):
'''
Create a new HTTPNotifier
:param auth: If given, auth is handed over to request.post
:param recipients: The urls to post to.
:param json: If True, send message as json payload, else use an url query string
:type json: bool
:type recipients: Iterable of recipients or dict mapping categories to recipients
:param categories: The message categories this Notifier should relay
:type categories: Iterable
:param level: The minimum level for messages to be relayed
:type level: int
'''
self.auth = auth
self.json = json
super().__init__(**kwargs)
def notify(self, recipient, msg):
try:
params = msg.to_dict()
params.pop('image', None)
if self.json is True:
params['timestamp'] = str(params['timestamp'])
params['uuid'] = str(params['uuid'])
ret = requests.post(recipient, json=params, auth=self.auth)
else:
ret = requests.post(recipient, params=params, auth=self.auth)
ret.raise_for_status()
except ConnectionError:
# traceback of requests connection errors are really long for
# such a simple thing
log.error('No connection to {}'.format(recipient))
except:
log.exception('Could not post message')
| <commit_before>import logging
import requests
from .base import Notifier
log = logging.getLogger(__name__)
class HTTPNotifier(Notifier):
''' A Notifier that sends http post request to a given url '''
def __init__(self, auth=None, json=True, **kwargs):
'''
Create a new HTTPNotifier
:param auth: If given, auth is handed over to request.post
:param recipients: The urls to post to.
:param json: If True, send message as json payload, else use an url query string
:type json: bool
:type recipients: Iterable of recipients or dict mapping categories to recipients
:param categories: The message categories this Notifier should relay
:type categories: Iterable
:param level: The minimum level for messages to be relayed
:type level: int
'''
self.auth = auth
self.json = json
super().__init__(**kwargs)
def notify(self, recipient, msg):
try:
params = msg.to_dict()
params.pop('image', None)
if self.json is True:
params['timestamp'] = str(params['timestamp'])
params['uuid'] = str(params['uuid'])
ret = requests.post(recipient, json=params, auth=self.auth)
else:
ret = requests.post(recipient, params=params, auth=self.auth)
ret.raise_for_status()
except ConnectionError:
except:
log.exception('Could not post message')
<commit_msg>Fix exception handling in HTTPNotifier<commit_after>import logging
import requests
from .base import Notifier
log = logging.getLogger(__name__)
class HTTPNotifier(Notifier):
''' A Notifier that sends http post request to a given url '''
def __init__(self, auth=None, json=True, **kwargs):
'''
Create a new HTTPNotifier
:param auth: If given, auth is handed over to request.post
:param recipients: The urls to post to.
:param json: If True, send message as json payload, else use an url query string
:type json: bool
:type recipients: Iterable of recipients or dict mapping categories to recipients
:param categories: The message categories this Notifier should relay
:type categories: Iterable
:param level: The minimum level for messages to be relayed
:type level: int
'''
self.auth = auth
self.json = json
super().__init__(**kwargs)
def notify(self, recipient, msg):
try:
params = msg.to_dict()
params.pop('image', None)
if self.json is True:
params['timestamp'] = str(params['timestamp'])
params['uuid'] = str(params['uuid'])
ret = requests.post(recipient, json=params, auth=self.auth)
else:
ret = requests.post(recipient, params=params, auth=self.auth)
ret.raise_for_status()
except ConnectionError:
# traceback of requests connection errors are really long for
# such a simple thing
log.error('No connection to {}'.format(recipient))
except:
log.exception('Could not post message')
|
a0d9d3f213fb28914040e9647509c897ad8cc41a | skimage/_shared/utils.py | skimage/_shared/utils.py | import warnings
import functools
__all__ = ['deprecated']
class deprecated(object):
'''Decorator to mark deprecated functions with warning.
Adapted from <http://wiki.python.org/moin/PythonDecoratorLibrary>.
Parameters
----------
alt_func : str
If given, tell user what function to use instead.
behavior : {'warn', 'raise'}
Behavior during call to deprecated function: 'warn' = warn user that
function is deprecated; 'raise' = raise error.
'''
def __init__(self, alt_func=None, behavior='warn'):
self.alt_func = alt_func
self.behavior = behavior
def __call__(self, func):
alt_msg = ''
if self.alt_func is not None:
alt_msg = ' Use `%s` instead.' % self.alt_func
msg = 'Call to deprecated function `%s`.' % func.__name__
msg += alt_msg
@functools.wraps(func)
def wrapped(*args, **kwargs):
if self.behavior == 'warn':
warnings.warn_explicit(msg,
category=DeprecationWarning,
filename=func.func_code.co_filename,
lineno=func.func_code.co_firstlineno + 1)
elif self.behavior == 'raise':
raise DeprecationWarning(msg)
return func(*args, **kwargs)
# modify doc string to display deprecation warning
doc = 'Deprecated function.' + alt_msg
if wrapped.__doc__ is None:
wrapped.__doc__ = doc
else:
wrapped.__doc__ = doc + '\n\n' + wrapped.__doc__
return wrapped
| import warnings
import functools
__all__ = ['deprecated']
class deprecated(object):
"""Decorator to mark deprecated functions with warning.
Adapted from <http://wiki.python.org/moin/PythonDecoratorLibrary>.
Parameters
----------
alt_func : str
If given, tell user what function to use instead.
behavior : {'warn', 'raise'}
Behavior during call to deprecated function: 'warn' = warn user that
function is deprecated; 'raise' = raise error.
"""
def __init__(self, alt_func=None, behavior='warn'):
self.alt_func = alt_func
self.behavior = behavior
def __call__(self, func):
alt_msg = ''
if self.alt_func is not None:
alt_msg = ' Use `%s` instead.' % self.alt_func
msg = 'Call to deprecated function `%s`.' % func.__name__
msg += alt_msg
@functools.wraps(func)
def wrapped(*args, **kwargs):
if self.behavior == 'warn':
warnings.warn_explicit(msg,
category=DeprecationWarning,
filename=func.func_code.co_filename,
lineno=func.func_code.co_firstlineno + 1)
elif self.behavior == 'raise':
raise DeprecationWarning(msg)
return func(*args, **kwargs)
# modify doc string to display deprecation warning
doc = 'Deprecated function.' + alt_msg
if wrapped.__doc__ is None:
wrapped.__doc__ = doc
else:
wrapped.__doc__ = doc + '\n\n' + wrapped.__doc__
return wrapped
| Use double instead of single quotes for doc string | Use double instead of single quotes for doc string
| Python | bsd-3-clause | GaZ3ll3/scikit-image,chintak/scikit-image,ajaybhat/scikit-image,warmspringwinds/scikit-image,bennlich/scikit-image,michaelaye/scikit-image,almarklein/scikit-image,michaelpacer/scikit-image,youprofit/scikit-image,almarklein/scikit-image,chintak/scikit-image,WarrenWeckesser/scikits-image,dpshelio/scikit-image,rjeli/scikit-image,keflavich/scikit-image,ClinicalGraphics/scikit-image,oew1v07/scikit-image,Britefury/scikit-image,youprofit/scikit-image,warmspringwinds/scikit-image,pratapvardhan/scikit-image,SamHames/scikit-image,robintw/scikit-image,WarrenWeckesser/scikits-image,vighneshbirodkar/scikit-image,almarklein/scikit-image,SamHames/scikit-image,paalge/scikit-image,chintak/scikit-image,keflavich/scikit-image,robintw/scikit-image,vighneshbirodkar/scikit-image,rjeli/scikit-image,michaelaye/scikit-image,juliusbierk/scikit-image,jwiggins/scikit-image,almarklein/scikit-image,Midafi/scikit-image,rjeli/scikit-image,juliusbierk/scikit-image,pratapvardhan/scikit-image,chintak/scikit-image,ofgulban/scikit-image,emon10005/scikit-image,blink1073/scikit-image,SamHames/scikit-image,ofgulban/scikit-image,Midafi/scikit-image,Hiyorimi/scikit-image,bennlich/scikit-image,newville/scikit-image,bsipocz/scikit-image,ofgulban/scikit-image,chriscrosscutler/scikit-image,paalge/scikit-image,ClinicalGraphics/scikit-image,Britefury/scikit-image,newville/scikit-image,Hiyorimi/scikit-image,oew1v07/scikit-image,blink1073/scikit-image,jwiggins/scikit-image,GaZ3ll3/scikit-image,paalge/scikit-image,vighneshbirodkar/scikit-image,chriscrosscutler/scikit-image,SamHames/scikit-image,michaelpacer/scikit-image,dpshelio/scikit-image,emon10005/scikit-image,ajaybhat/scikit-image,bsipocz/scikit-image | import warnings
import functools
__all__ = ['deprecated']
class deprecated(object):
'''Decorator to mark deprecated functions with warning.
Adapted from <http://wiki.python.org/moin/PythonDecoratorLibrary>.
Parameters
----------
alt_func : str
If given, tell user what function to use instead.
behavior : {'warn', 'raise'}
Behavior during call to deprecated function: 'warn' = warn user that
function is deprecated; 'raise' = raise error.
'''
def __init__(self, alt_func=None, behavior='warn'):
self.alt_func = alt_func
self.behavior = behavior
def __call__(self, func):
alt_msg = ''
if self.alt_func is not None:
alt_msg = ' Use `%s` instead.' % self.alt_func
msg = 'Call to deprecated function `%s`.' % func.__name__
msg += alt_msg
@functools.wraps(func)
def wrapped(*args, **kwargs):
if self.behavior == 'warn':
warnings.warn_explicit(msg,
category=DeprecationWarning,
filename=func.func_code.co_filename,
lineno=func.func_code.co_firstlineno + 1)
elif self.behavior == 'raise':
raise DeprecationWarning(msg)
return func(*args, **kwargs)
# modify doc string to display deprecation warning
doc = 'Deprecated function.' + alt_msg
if wrapped.__doc__ is None:
wrapped.__doc__ = doc
else:
wrapped.__doc__ = doc + '\n\n' + wrapped.__doc__
return wrapped
Use double instead of single quotes for doc string | import warnings
import functools
__all__ = ['deprecated']
class deprecated(object):
"""Decorator to mark deprecated functions with warning.
Adapted from <http://wiki.python.org/moin/PythonDecoratorLibrary>.
Parameters
----------
alt_func : str
If given, tell user what function to use instead.
behavior : {'warn', 'raise'}
Behavior during call to deprecated function: 'warn' = warn user that
function is deprecated; 'raise' = raise error.
"""
def __init__(self, alt_func=None, behavior='warn'):
self.alt_func = alt_func
self.behavior = behavior
def __call__(self, func):
alt_msg = ''
if self.alt_func is not None:
alt_msg = ' Use `%s` instead.' % self.alt_func
msg = 'Call to deprecated function `%s`.' % func.__name__
msg += alt_msg
@functools.wraps(func)
def wrapped(*args, **kwargs):
if self.behavior == 'warn':
warnings.warn_explicit(msg,
category=DeprecationWarning,
filename=func.func_code.co_filename,
lineno=func.func_code.co_firstlineno + 1)
elif self.behavior == 'raise':
raise DeprecationWarning(msg)
return func(*args, **kwargs)
# modify doc string to display deprecation warning
doc = 'Deprecated function.' + alt_msg
if wrapped.__doc__ is None:
wrapped.__doc__ = doc
else:
wrapped.__doc__ = doc + '\n\n' + wrapped.__doc__
return wrapped
| <commit_before>import warnings
import functools
__all__ = ['deprecated']
class deprecated(object):
'''Decorator to mark deprecated functions with warning.
Adapted from <http://wiki.python.org/moin/PythonDecoratorLibrary>.
Parameters
----------
alt_func : str
If given, tell user what function to use instead.
behavior : {'warn', 'raise'}
Behavior during call to deprecated function: 'warn' = warn user that
function is deprecated; 'raise' = raise error.
'''
def __init__(self, alt_func=None, behavior='warn'):
self.alt_func = alt_func
self.behavior = behavior
def __call__(self, func):
alt_msg = ''
if self.alt_func is not None:
alt_msg = ' Use `%s` instead.' % self.alt_func
msg = 'Call to deprecated function `%s`.' % func.__name__
msg += alt_msg
@functools.wraps(func)
def wrapped(*args, **kwargs):
if self.behavior == 'warn':
warnings.warn_explicit(msg,
category=DeprecationWarning,
filename=func.func_code.co_filename,
lineno=func.func_code.co_firstlineno + 1)
elif self.behavior == 'raise':
raise DeprecationWarning(msg)
return func(*args, **kwargs)
# modify doc string to display deprecation warning
doc = 'Deprecated function.' + alt_msg
if wrapped.__doc__ is None:
wrapped.__doc__ = doc
else:
wrapped.__doc__ = doc + '\n\n' + wrapped.__doc__
return wrapped
<commit_msg>Use double instead of single quotes for doc string<commit_after> | import warnings
import functools
__all__ = ['deprecated']
class deprecated(object):
"""Decorator to mark deprecated functions with warning.
Adapted from <http://wiki.python.org/moin/PythonDecoratorLibrary>.
Parameters
----------
alt_func : str
If given, tell user what function to use instead.
behavior : {'warn', 'raise'}
Behavior during call to deprecated function: 'warn' = warn user that
function is deprecated; 'raise' = raise error.
"""
def __init__(self, alt_func=None, behavior='warn'):
self.alt_func = alt_func
self.behavior = behavior
def __call__(self, func):
alt_msg = ''
if self.alt_func is not None:
alt_msg = ' Use `%s` instead.' % self.alt_func
msg = 'Call to deprecated function `%s`.' % func.__name__
msg += alt_msg
@functools.wraps(func)
def wrapped(*args, **kwargs):
if self.behavior == 'warn':
warnings.warn_explicit(msg,
category=DeprecationWarning,
filename=func.func_code.co_filename,
lineno=func.func_code.co_firstlineno + 1)
elif self.behavior == 'raise':
raise DeprecationWarning(msg)
return func(*args, **kwargs)
# modify doc string to display deprecation warning
doc = 'Deprecated function.' + alt_msg
if wrapped.__doc__ is None:
wrapped.__doc__ = doc
else:
wrapped.__doc__ = doc + '\n\n' + wrapped.__doc__
return wrapped
| import warnings
import functools
__all__ = ['deprecated']
class deprecated(object):
'''Decorator to mark deprecated functions with warning.
Adapted from <http://wiki.python.org/moin/PythonDecoratorLibrary>.
Parameters
----------
alt_func : str
If given, tell user what function to use instead.
behavior : {'warn', 'raise'}
Behavior during call to deprecated function: 'warn' = warn user that
function is deprecated; 'raise' = raise error.
'''
def __init__(self, alt_func=None, behavior='warn'):
self.alt_func = alt_func
self.behavior = behavior
def __call__(self, func):
alt_msg = ''
if self.alt_func is not None:
alt_msg = ' Use `%s` instead.' % self.alt_func
msg = 'Call to deprecated function `%s`.' % func.__name__
msg += alt_msg
@functools.wraps(func)
def wrapped(*args, **kwargs):
if self.behavior == 'warn':
warnings.warn_explicit(msg,
category=DeprecationWarning,
filename=func.func_code.co_filename,
lineno=func.func_code.co_firstlineno + 1)
elif self.behavior == 'raise':
raise DeprecationWarning(msg)
return func(*args, **kwargs)
# modify doc string to display deprecation warning
doc = 'Deprecated function.' + alt_msg
if wrapped.__doc__ is None:
wrapped.__doc__ = doc
else:
wrapped.__doc__ = doc + '\n\n' + wrapped.__doc__
return wrapped
Use double instead of single quotes for doc stringimport warnings
import functools
__all__ = ['deprecated']
class deprecated(object):
"""Decorator to mark deprecated functions with warning.
Adapted from <http://wiki.python.org/moin/PythonDecoratorLibrary>.
Parameters
----------
alt_func : str
If given, tell user what function to use instead.
behavior : {'warn', 'raise'}
Behavior during call to deprecated function: 'warn' = warn user that
function is deprecated; 'raise' = raise error.
"""
def __init__(self, alt_func=None, behavior='warn'):
self.alt_func = alt_func
self.behavior = behavior
def __call__(self, func):
alt_msg = ''
if self.alt_func is not None:
alt_msg = ' Use `%s` instead.' % self.alt_func
msg = 'Call to deprecated function `%s`.' % func.__name__
msg += alt_msg
@functools.wraps(func)
def wrapped(*args, **kwargs):
if self.behavior == 'warn':
warnings.warn_explicit(msg,
category=DeprecationWarning,
filename=func.func_code.co_filename,
lineno=func.func_code.co_firstlineno + 1)
elif self.behavior == 'raise':
raise DeprecationWarning(msg)
return func(*args, **kwargs)
# modify doc string to display deprecation warning
doc = 'Deprecated function.' + alt_msg
if wrapped.__doc__ is None:
wrapped.__doc__ = doc
else:
wrapped.__doc__ = doc + '\n\n' + wrapped.__doc__
return wrapped
| <commit_before>import warnings
import functools
__all__ = ['deprecated']
class deprecated(object):
'''Decorator to mark deprecated functions with warning.
Adapted from <http://wiki.python.org/moin/PythonDecoratorLibrary>.
Parameters
----------
alt_func : str
If given, tell user what function to use instead.
behavior : {'warn', 'raise'}
Behavior during call to deprecated function: 'warn' = warn user that
function is deprecated; 'raise' = raise error.
'''
def __init__(self, alt_func=None, behavior='warn'):
self.alt_func = alt_func
self.behavior = behavior
def __call__(self, func):
alt_msg = ''
if self.alt_func is not None:
alt_msg = ' Use `%s` instead.' % self.alt_func
msg = 'Call to deprecated function `%s`.' % func.__name__
msg += alt_msg
@functools.wraps(func)
def wrapped(*args, **kwargs):
if self.behavior == 'warn':
warnings.warn_explicit(msg,
category=DeprecationWarning,
filename=func.func_code.co_filename,
lineno=func.func_code.co_firstlineno + 1)
elif self.behavior == 'raise':
raise DeprecationWarning(msg)
return func(*args, **kwargs)
# modify doc string to display deprecation warning
doc = 'Deprecated function.' + alt_msg
if wrapped.__doc__ is None:
wrapped.__doc__ = doc
else:
wrapped.__doc__ = doc + '\n\n' + wrapped.__doc__
return wrapped
<commit_msg>Use double instead of single quotes for doc string<commit_after>import warnings
import functools
__all__ = ['deprecated']
class deprecated(object):
"""Decorator to mark deprecated functions with warning.
Adapted from <http://wiki.python.org/moin/PythonDecoratorLibrary>.
Parameters
----------
alt_func : str
If given, tell user what function to use instead.
behavior : {'warn', 'raise'}
Behavior during call to deprecated function: 'warn' = warn user that
function is deprecated; 'raise' = raise error.
"""
def __init__(self, alt_func=None, behavior='warn'):
self.alt_func = alt_func
self.behavior = behavior
def __call__(self, func):
alt_msg = ''
if self.alt_func is not None:
alt_msg = ' Use `%s` instead.' % self.alt_func
msg = 'Call to deprecated function `%s`.' % func.__name__
msg += alt_msg
@functools.wraps(func)
def wrapped(*args, **kwargs):
if self.behavior == 'warn':
warnings.warn_explicit(msg,
category=DeprecationWarning,
filename=func.func_code.co_filename,
lineno=func.func_code.co_firstlineno + 1)
elif self.behavior == 'raise':
raise DeprecationWarning(msg)
return func(*args, **kwargs)
# modify doc string to display deprecation warning
doc = 'Deprecated function.' + alt_msg
if wrapped.__doc__ is None:
wrapped.__doc__ = doc
else:
wrapped.__doc__ = doc + '\n\n' + wrapped.__doc__
return wrapped
|
2142db8de793382bdfc56de9133b320b5d6f2690 | tests/test_vector2_isclose.py | tests/test_vector2_isclose.py | from ppb_vector import Vector2
from utils import vectors
from hypothesis import assume, given, note, example
from hypothesis.strategies import floats
@given(x=vectors(), abs_tol=floats(min_value=0), rel_tol=floats(min_value=0))
def test_isclose_to_self(x, abs_tol, rel_tol):
assert x.isclose(x, abs_tol=abs_tol, rel_tol=rel_tol)
| from ppb_vector import Vector2
from pytest import raises # type: ignore
from utils import vectors
from hypothesis import assume, given, note, example
from hypothesis.strategies import floats
@given(x=vectors(), abs_tol=floats(min_value=0), rel_tol=floats(min_value=0))
def test_isclose_to_self(x, abs_tol, rel_tol):
assert x.isclose(x, abs_tol=abs_tol, rel_tol=rel_tol)
def test_isclose_negative_tolerances():
zero = Vector2(0, 0)
with raises(ValueError):
zero.isclose(zero, abs_tol=-1)
with raises(ValueError):
zero.isclose(zero, rel_tol=-1)
| Test Vector2.isclose with invalid values | Test Vector2.isclose with invalid values
| Python | artistic-2.0 | ppb/ppb-vector,ppb/ppb-vector | from ppb_vector import Vector2
from utils import vectors
from hypothesis import assume, given, note, example
from hypothesis.strategies import floats
@given(x=vectors(), abs_tol=floats(min_value=0), rel_tol=floats(min_value=0))
def test_isclose_to_self(x, abs_tol, rel_tol):
assert x.isclose(x, abs_tol=abs_tol, rel_tol=rel_tol)
Test Vector2.isclose with invalid values | from ppb_vector import Vector2
from pytest import raises # type: ignore
from utils import vectors
from hypothesis import assume, given, note, example
from hypothesis.strategies import floats
@given(x=vectors(), abs_tol=floats(min_value=0), rel_tol=floats(min_value=0))
def test_isclose_to_self(x, abs_tol, rel_tol):
assert x.isclose(x, abs_tol=abs_tol, rel_tol=rel_tol)
def test_isclose_negative_tolerances():
zero = Vector2(0, 0)
with raises(ValueError):
zero.isclose(zero, abs_tol=-1)
with raises(ValueError):
zero.isclose(zero, rel_tol=-1)
| <commit_before>from ppb_vector import Vector2
from utils import vectors
from hypothesis import assume, given, note, example
from hypothesis.strategies import floats
@given(x=vectors(), abs_tol=floats(min_value=0), rel_tol=floats(min_value=0))
def test_isclose_to_self(x, abs_tol, rel_tol):
assert x.isclose(x, abs_tol=abs_tol, rel_tol=rel_tol)
<commit_msg>Test Vector2.isclose with invalid values<commit_after> | from ppb_vector import Vector2
from pytest import raises # type: ignore
from utils import vectors
from hypothesis import assume, given, note, example
from hypothesis.strategies import floats
@given(x=vectors(), abs_tol=floats(min_value=0), rel_tol=floats(min_value=0))
def test_isclose_to_self(x, abs_tol, rel_tol):
assert x.isclose(x, abs_tol=abs_tol, rel_tol=rel_tol)
def test_isclose_negative_tolerances():
zero = Vector2(0, 0)
with raises(ValueError):
zero.isclose(zero, abs_tol=-1)
with raises(ValueError):
zero.isclose(zero, rel_tol=-1)
| from ppb_vector import Vector2
from utils import vectors
from hypothesis import assume, given, note, example
from hypothesis.strategies import floats
@given(x=vectors(), abs_tol=floats(min_value=0), rel_tol=floats(min_value=0))
def test_isclose_to_self(x, abs_tol, rel_tol):
assert x.isclose(x, abs_tol=abs_tol, rel_tol=rel_tol)
Test Vector2.isclose with invalid valuesfrom ppb_vector import Vector2
from pytest import raises # type: ignore
from utils import vectors
from hypothesis import assume, given, note, example
from hypothesis.strategies import floats
@given(x=vectors(), abs_tol=floats(min_value=0), rel_tol=floats(min_value=0))
def test_isclose_to_self(x, abs_tol, rel_tol):
assert x.isclose(x, abs_tol=abs_tol, rel_tol=rel_tol)
def test_isclose_negative_tolerances():
zero = Vector2(0, 0)
with raises(ValueError):
zero.isclose(zero, abs_tol=-1)
with raises(ValueError):
zero.isclose(zero, rel_tol=-1)
| <commit_before>from ppb_vector import Vector2
from utils import vectors
from hypothesis import assume, given, note, example
from hypothesis.strategies import floats
@given(x=vectors(), abs_tol=floats(min_value=0), rel_tol=floats(min_value=0))
def test_isclose_to_self(x, abs_tol, rel_tol):
assert x.isclose(x, abs_tol=abs_tol, rel_tol=rel_tol)
<commit_msg>Test Vector2.isclose with invalid values<commit_after>from ppb_vector import Vector2
from pytest import raises # type: ignore
from utils import vectors
from hypothesis import assume, given, note, example
from hypothesis.strategies import floats
@given(x=vectors(), abs_tol=floats(min_value=0), rel_tol=floats(min_value=0))
def test_isclose_to_self(x, abs_tol, rel_tol):
assert x.isclose(x, abs_tol=abs_tol, rel_tol=rel_tol)
def test_isclose_negative_tolerances():
zero = Vector2(0, 0)
with raises(ValueError):
zero.isclose(zero, abs_tol=-1)
with raises(ValueError):
zero.isclose(zero, rel_tol=-1)
|
e39c247f7ba97bdc2e2394f696437a298bd734d5 | api/models.py | api/models.py | from django.db import models
from rest_framework import serializers
class Question(models.Model):
version = models.CharField(primary_key=True, max_length=8)
text = models.TextField()
created_on = models.DateTimeField(auto_now_add=True)
updated_on = models.DateTimeField(auto_now=True)
class Choice(models.Model):
text = models.TextField()
version = models.ForeignKey(Question, on_delete=models.CASCADE)
created_on = models.DateTimeField(auto_now_add=True)
updated_on = models.DateTimeField(auto_now=True)
class Answer(models.Model):
choice = models.ForeignKey(Choice, on_delete=models.CASCADE)
user_id = models.TextField()
created_on = models.DateTimeField(auto_now_add=True)
class ChoiceSerializer(serializers.ModelSerializer):
class Meta:
model = Choice
fields = ('id', 'text', 'version', 'created_on', 'updated_on',)
class QuestionSerializer(serializers.ModelSerializer):
# TODO: create a serializer that returns list of choices for the question
class Meta:
model = Question
fields = ('text', 'version', 'created_on', 'updated_on',)
class AnswerSerializer(serializers.ModelSerializer):
class Meta:
model = Answer
fields = ('id', 'choice_id', 'user_id', 'created_on',)
| from django.db import models
from rest_framework import serializers
class Question(models.Model):
version = models.CharField(primary_key=True, max_length=8)
text = models.TextField()
created_on = models.DateTimeField(auto_now_add=True)
updated_on = models.DateTimeField(auto_now=True)
class Choice(models.Model):
text = models.TextField()
question_id = models.ForeignKey(Question, on_delete=models.CASCADE)
created_on = models.DateTimeField(auto_now_add=True)
updated_on = models.DateTimeField(auto_now=True)
class Answer(models.Model):
choice = models.ForeignKey(Choice, on_delete=models.CASCADE)
user_id = models.TextField()
created_on = models.DateTimeField(auto_now_add=True)
class ChoiceSerializer(serializers.ModelSerializer):
class Meta:
model = Choice
fields = ('id', 'text', 'version', 'created_on', 'updated_on',)
class QuestionSerializer(serializers.ModelSerializer):
# TODO: create a serializer that returns list of choices for the question
class Meta:
model = Question
fields = ('text', 'version', 'created_on', 'updated_on',)
class AnswerSerializer(serializers.ModelSerializer):
class Meta:
model = Answer
fields = ('id', 'choice_id', 'user_id', 'created_on',)
| Rename version field in Choice to question_id | Rename version field in Choice to question_id
| Python | mit | holycattle/pysqueak-api,holycattle/pysqueak-api | from django.db import models
from rest_framework import serializers
class Question(models.Model):
version = models.CharField(primary_key=True, max_length=8)
text = models.TextField()
created_on = models.DateTimeField(auto_now_add=True)
updated_on = models.DateTimeField(auto_now=True)
class Choice(models.Model):
text = models.TextField()
version = models.ForeignKey(Question, on_delete=models.CASCADE)
created_on = models.DateTimeField(auto_now_add=True)
updated_on = models.DateTimeField(auto_now=True)
class Answer(models.Model):
choice = models.ForeignKey(Choice, on_delete=models.CASCADE)
user_id = models.TextField()
created_on = models.DateTimeField(auto_now_add=True)
class ChoiceSerializer(serializers.ModelSerializer):
class Meta:
model = Choice
fields = ('id', 'text', 'version', 'created_on', 'updated_on',)
class QuestionSerializer(serializers.ModelSerializer):
# TODO: create a serializer that returns list of choices for the question
class Meta:
model = Question
fields = ('text', 'version', 'created_on', 'updated_on',)
class AnswerSerializer(serializers.ModelSerializer):
class Meta:
model = Answer
fields = ('id', 'choice_id', 'user_id', 'created_on',)
Rename version field in Choice to question_id | from django.db import models
from rest_framework import serializers
class Question(models.Model):
version = models.CharField(primary_key=True, max_length=8)
text = models.TextField()
created_on = models.DateTimeField(auto_now_add=True)
updated_on = models.DateTimeField(auto_now=True)
class Choice(models.Model):
text = models.TextField()
question_id = models.ForeignKey(Question, on_delete=models.CASCADE)
created_on = models.DateTimeField(auto_now_add=True)
updated_on = models.DateTimeField(auto_now=True)
class Answer(models.Model):
choice = models.ForeignKey(Choice, on_delete=models.CASCADE)
user_id = models.TextField()
created_on = models.DateTimeField(auto_now_add=True)
class ChoiceSerializer(serializers.ModelSerializer):
class Meta:
model = Choice
fields = ('id', 'text', 'version', 'created_on', 'updated_on',)
class QuestionSerializer(serializers.ModelSerializer):
# TODO: create a serializer that returns list of choices for the question
class Meta:
model = Question
fields = ('text', 'version', 'created_on', 'updated_on',)
class AnswerSerializer(serializers.ModelSerializer):
class Meta:
model = Answer
fields = ('id', 'choice_id', 'user_id', 'created_on',)
| <commit_before>from django.db import models
from rest_framework import serializers
class Question(models.Model):
version = models.CharField(primary_key=True, max_length=8)
text = models.TextField()
created_on = models.DateTimeField(auto_now_add=True)
updated_on = models.DateTimeField(auto_now=True)
class Choice(models.Model):
text = models.TextField()
version = models.ForeignKey(Question, on_delete=models.CASCADE)
created_on = models.DateTimeField(auto_now_add=True)
updated_on = models.DateTimeField(auto_now=True)
class Answer(models.Model):
choice = models.ForeignKey(Choice, on_delete=models.CASCADE)
user_id = models.TextField()
created_on = models.DateTimeField(auto_now_add=True)
class ChoiceSerializer(serializers.ModelSerializer):
class Meta:
model = Choice
fields = ('id', 'text', 'version', 'created_on', 'updated_on',)
class QuestionSerializer(serializers.ModelSerializer):
# TODO: create a serializer that returns list of choices for the question
class Meta:
model = Question
fields = ('text', 'version', 'created_on', 'updated_on',)
class AnswerSerializer(serializers.ModelSerializer):
class Meta:
model = Answer
fields = ('id', 'choice_id', 'user_id', 'created_on',)
<commit_msg>Rename version field in Choice to question_id<commit_after> | from django.db import models
from rest_framework import serializers
class Question(models.Model):
version = models.CharField(primary_key=True, max_length=8)
text = models.TextField()
created_on = models.DateTimeField(auto_now_add=True)
updated_on = models.DateTimeField(auto_now=True)
class Choice(models.Model):
text = models.TextField()
question_id = models.ForeignKey(Question, on_delete=models.CASCADE)
created_on = models.DateTimeField(auto_now_add=True)
updated_on = models.DateTimeField(auto_now=True)
class Answer(models.Model):
choice = models.ForeignKey(Choice, on_delete=models.CASCADE)
user_id = models.TextField()
created_on = models.DateTimeField(auto_now_add=True)
class ChoiceSerializer(serializers.ModelSerializer):
class Meta:
model = Choice
fields = ('id', 'text', 'version', 'created_on', 'updated_on',)
class QuestionSerializer(serializers.ModelSerializer):
# TODO: create a serializer that returns list of choices for the question
class Meta:
model = Question
fields = ('text', 'version', 'created_on', 'updated_on',)
class AnswerSerializer(serializers.ModelSerializer):
class Meta:
model = Answer
fields = ('id', 'choice_id', 'user_id', 'created_on',)
| from django.db import models
from rest_framework import serializers
class Question(models.Model):
version = models.CharField(primary_key=True, max_length=8)
text = models.TextField()
created_on = models.DateTimeField(auto_now_add=True)
updated_on = models.DateTimeField(auto_now=True)
class Choice(models.Model):
text = models.TextField()
version = models.ForeignKey(Question, on_delete=models.CASCADE)
created_on = models.DateTimeField(auto_now_add=True)
updated_on = models.DateTimeField(auto_now=True)
class Answer(models.Model):
choice = models.ForeignKey(Choice, on_delete=models.CASCADE)
user_id = models.TextField()
created_on = models.DateTimeField(auto_now_add=True)
class ChoiceSerializer(serializers.ModelSerializer):
class Meta:
model = Choice
fields = ('id', 'text', 'version', 'created_on', 'updated_on',)
class QuestionSerializer(serializers.ModelSerializer):
# TODO: create a serializer that returns list of choices for the question
class Meta:
model = Question
fields = ('text', 'version', 'created_on', 'updated_on',)
class AnswerSerializer(serializers.ModelSerializer):
class Meta:
model = Answer
fields = ('id', 'choice_id', 'user_id', 'created_on',)
Rename version field in Choice to question_idfrom django.db import models
from rest_framework import serializers
class Question(models.Model):
version = models.CharField(primary_key=True, max_length=8)
text = models.TextField()
created_on = models.DateTimeField(auto_now_add=True)
updated_on = models.DateTimeField(auto_now=True)
class Choice(models.Model):
text = models.TextField()
question_id = models.ForeignKey(Question, on_delete=models.CASCADE)
created_on = models.DateTimeField(auto_now_add=True)
updated_on = models.DateTimeField(auto_now=True)
class Answer(models.Model):
choice = models.ForeignKey(Choice, on_delete=models.CASCADE)
user_id = models.TextField()
created_on = models.DateTimeField(auto_now_add=True)
class ChoiceSerializer(serializers.ModelSerializer):
class Meta:
model = Choice
fields = ('id', 'text', 'version', 'created_on', 'updated_on',)
class QuestionSerializer(serializers.ModelSerializer):
# TODO: create a serializer that returns list of choices for the question
class Meta:
model = Question
fields = ('text', 'version', 'created_on', 'updated_on',)
class AnswerSerializer(serializers.ModelSerializer):
class Meta:
model = Answer
fields = ('id', 'choice_id', 'user_id', 'created_on',)
| <commit_before>from django.db import models
from rest_framework import serializers
class Question(models.Model):
version = models.CharField(primary_key=True, max_length=8)
text = models.TextField()
created_on = models.DateTimeField(auto_now_add=True)
updated_on = models.DateTimeField(auto_now=True)
class Choice(models.Model):
text = models.TextField()
version = models.ForeignKey(Question, on_delete=models.CASCADE)
created_on = models.DateTimeField(auto_now_add=True)
updated_on = models.DateTimeField(auto_now=True)
class Answer(models.Model):
choice = models.ForeignKey(Choice, on_delete=models.CASCADE)
user_id = models.TextField()
created_on = models.DateTimeField(auto_now_add=True)
class ChoiceSerializer(serializers.ModelSerializer):
class Meta:
model = Choice
fields = ('id', 'text', 'version', 'created_on', 'updated_on',)
class QuestionSerializer(serializers.ModelSerializer):
# TODO: create a serializer that returns list of choices for the question
class Meta:
model = Question
fields = ('text', 'version', 'created_on', 'updated_on',)
class AnswerSerializer(serializers.ModelSerializer):
class Meta:
model = Answer
fields = ('id', 'choice_id', 'user_id', 'created_on',)
<commit_msg>Rename version field in Choice to question_id<commit_after>from django.db import models
from rest_framework import serializers
class Question(models.Model):
version = models.CharField(primary_key=True, max_length=8)
text = models.TextField()
created_on = models.DateTimeField(auto_now_add=True)
updated_on = models.DateTimeField(auto_now=True)
class Choice(models.Model):
text = models.TextField()
question_id = models.ForeignKey(Question, on_delete=models.CASCADE)
created_on = models.DateTimeField(auto_now_add=True)
updated_on = models.DateTimeField(auto_now=True)
class Answer(models.Model):
choice = models.ForeignKey(Choice, on_delete=models.CASCADE)
user_id = models.TextField()
created_on = models.DateTimeField(auto_now_add=True)
class ChoiceSerializer(serializers.ModelSerializer):
class Meta:
model = Choice
fields = ('id', 'text', 'version', 'created_on', 'updated_on',)
class QuestionSerializer(serializers.ModelSerializer):
# TODO: create a serializer that returns list of choices for the question
class Meta:
model = Question
fields = ('text', 'version', 'created_on', 'updated_on',)
class AnswerSerializer(serializers.ModelSerializer):
class Meta:
model = Answer
fields = ('id', 'choice_id', 'user_id', 'created_on',)
|
acbd2db59024f6b6847193329ca5b83ace9f1202 | refactor/tests/dnstamper.py | refactor/tests/dnstamper.py | from dns import resolver
import gevent
import os
def lookup(hostname, ns):
res = resolver.Resolver(configure=False)
res.nameservers = [ns]
answer = res.query(hostname)
ret = []
for data in answer:
ret.append(data.address)
return ret
def compare_lookups(address):
exp = lookup(address, '8.8.8.8')
control = lookup(address, '208.67.222.222')
print address
if len(set(exp) & set(control)) > 0:
print "No tampering"
else:
print "Tampering"
print exp
print control
def run(ooni):
config = ooni.config
urls = []
f = open(os.path.join(config.main.assetdir, config.tests.dns_experiment))
i = 0
ooni.logger.info("reading file")
for line in f.readlines():
urls.append(line.strip())
if i % 100 == 0:
jobs = [gevent.spawn(compare_lookups, url) for url in urls]
gevent.joinall(jobs, timeout=2)
[job.value for job in jobs]
urls = []
ooni.logger.info("finished")
f.close()
| from dns import resolver
import gevent
import os
def lookup(hostname, ns):
res = resolver.Resolver(configure=False)
res.nameservers = [ns]
answer = res.query(hostname)
ret = []
for data in answer:
ret.append(data.address)
return ret
def compare_lookups(args):
# this is just a dirty hack
address = args[0]
ooni = args[1]
ns = args[2]
exp = lookup(address, ns)
control = lookup(address, ooni.config.dns_control_server)
print address
if len(set(exp) & set(control)) > 0:
print "No tampering"
else:
print "Tampering"
print exp
print control
def run(ooni):
"""Run the test
"""
config = ooni.config
urls = []
f = open(os.path.join(config.main.assetdir, config.tests.dns_experiment))
nsf = open(os.path.join(config.main.assetdir, config.tests.dns_experiment_dns))
nss = [x.strip() for x in nsf.readlines()]
i = 0
# XXX Clean up this code
ooni.logger.info("reading file")
for url in f.readlines():
jobs = [gevent.spawn(compare_lookups, (url, ooni, ns)) for ns in nss]
gevent.joinall(jobs, timeout=2)
[job.value for job in jobs]
ooni.logger.info("finished")
f.close()
| Clean up the DNS test | Clean up the DNS test
| Python | bsd-2-clause | 0xPoly/ooni-probe,kdmurray91/ooni-probe,lordappsec/ooni-probe,0xPoly/ooni-probe,juga0/ooni-probe,0xPoly/ooni-probe,Karthikeyan-kkk/ooni-probe,kdmurray91/ooni-probe,lordappsec/ooni-probe,0xPoly/ooni-probe,Karthikeyan-kkk/ooni-probe,hackerberry/ooni-probe,juga0/ooni-probe,juga0/ooni-probe,Karthikeyan-kkk/ooni-probe,kdmurray91/ooni-probe,Karthikeyan-kkk/ooni-probe,lordappsec/ooni-probe,lordappsec/ooni-probe,hackerberry/ooni-probe,kdmurray91/ooni-probe,juga0/ooni-probe | from dns import resolver
import gevent
import os
def lookup(hostname, ns):
res = resolver.Resolver(configure=False)
res.nameservers = [ns]
answer = res.query(hostname)
ret = []
for data in answer:
ret.append(data.address)
return ret
def compare_lookups(address):
exp = lookup(address, '8.8.8.8')
control = lookup(address, '208.67.222.222')
print address
if len(set(exp) & set(control)) > 0:
print "No tampering"
else:
print "Tampering"
print exp
print control
def run(ooni):
config = ooni.config
urls = []
f = open(os.path.join(config.main.assetdir, config.tests.dns_experiment))
i = 0
ooni.logger.info("reading file")
for line in f.readlines():
urls.append(line.strip())
if i % 100 == 0:
jobs = [gevent.spawn(compare_lookups, url) for url in urls]
gevent.joinall(jobs, timeout=2)
[job.value for job in jobs]
urls = []
ooni.logger.info("finished")
f.close()
Clean up the DNS test | from dns import resolver
import gevent
import os
def lookup(hostname, ns):
res = resolver.Resolver(configure=False)
res.nameservers = [ns]
answer = res.query(hostname)
ret = []
for data in answer:
ret.append(data.address)
return ret
def compare_lookups(args):
# this is just a dirty hack
address = args[0]
ooni = args[1]
ns = args[2]
exp = lookup(address, ns)
control = lookup(address, ooni.config.dns_control_server)
print address
if len(set(exp) & set(control)) > 0:
print "No tampering"
else:
print "Tampering"
print exp
print control
def run(ooni):
"""Run the test
"""
config = ooni.config
urls = []
f = open(os.path.join(config.main.assetdir, config.tests.dns_experiment))
nsf = open(os.path.join(config.main.assetdir, config.tests.dns_experiment_dns))
nss = [x.strip() for x in nsf.readlines()]
i = 0
# XXX Clean up this code
ooni.logger.info("reading file")
for url in f.readlines():
jobs = [gevent.spawn(compare_lookups, (url, ooni, ns)) for ns in nss]
gevent.joinall(jobs, timeout=2)
[job.value for job in jobs]
ooni.logger.info("finished")
f.close()
| <commit_before>from dns import resolver
import gevent
import os
def lookup(hostname, ns):
res = resolver.Resolver(configure=False)
res.nameservers = [ns]
answer = res.query(hostname)
ret = []
for data in answer:
ret.append(data.address)
return ret
def compare_lookups(address):
exp = lookup(address, '8.8.8.8')
control = lookup(address, '208.67.222.222')
print address
if len(set(exp) & set(control)) > 0:
print "No tampering"
else:
print "Tampering"
print exp
print control
def run(ooni):
config = ooni.config
urls = []
f = open(os.path.join(config.main.assetdir, config.tests.dns_experiment))
i = 0
ooni.logger.info("reading file")
for line in f.readlines():
urls.append(line.strip())
if i % 100 == 0:
jobs = [gevent.spawn(compare_lookups, url) for url in urls]
gevent.joinall(jobs, timeout=2)
[job.value for job in jobs]
urls = []
ooni.logger.info("finished")
f.close()
<commit_msg>Clean up the DNS test<commit_after> | from dns import resolver
import gevent
import os
def lookup(hostname, ns):
res = resolver.Resolver(configure=False)
res.nameservers = [ns]
answer = res.query(hostname)
ret = []
for data in answer:
ret.append(data.address)
return ret
def compare_lookups(args):
# this is just a dirty hack
address = args[0]
ooni = args[1]
ns = args[2]
exp = lookup(address, ns)
control = lookup(address, ooni.config.dns_control_server)
print address
if len(set(exp) & set(control)) > 0:
print "No tampering"
else:
print "Tampering"
print exp
print control
def run(ooni):
"""Run the test
"""
config = ooni.config
urls = []
f = open(os.path.join(config.main.assetdir, config.tests.dns_experiment))
nsf = open(os.path.join(config.main.assetdir, config.tests.dns_experiment_dns))
nss = [x.strip() for x in nsf.readlines()]
i = 0
# XXX Clean up this code
ooni.logger.info("reading file")
for url in f.readlines():
jobs = [gevent.spawn(compare_lookups, (url, ooni, ns)) for ns in nss]
gevent.joinall(jobs, timeout=2)
[job.value for job in jobs]
ooni.logger.info("finished")
f.close()
| from dns import resolver
import gevent
import os
def lookup(hostname, ns):
res = resolver.Resolver(configure=False)
res.nameservers = [ns]
answer = res.query(hostname)
ret = []
for data in answer:
ret.append(data.address)
return ret
def compare_lookups(address):
exp = lookup(address, '8.8.8.8')
control = lookup(address, '208.67.222.222')
print address
if len(set(exp) & set(control)) > 0:
print "No tampering"
else:
print "Tampering"
print exp
print control
def run(ooni):
config = ooni.config
urls = []
f = open(os.path.join(config.main.assetdir, config.tests.dns_experiment))
i = 0
ooni.logger.info("reading file")
for line in f.readlines():
urls.append(line.strip())
if i % 100 == 0:
jobs = [gevent.spawn(compare_lookups, url) for url in urls]
gevent.joinall(jobs, timeout=2)
[job.value for job in jobs]
urls = []
ooni.logger.info("finished")
f.close()
Clean up the DNS testfrom dns import resolver
import gevent
import os
def lookup(hostname, ns):
res = resolver.Resolver(configure=False)
res.nameservers = [ns]
answer = res.query(hostname)
ret = []
for data in answer:
ret.append(data.address)
return ret
def compare_lookups(args):
# this is just a dirty hack
address = args[0]
ooni = args[1]
ns = args[2]
exp = lookup(address, ns)
control = lookup(address, ooni.config.dns_control_server)
print address
if len(set(exp) & set(control)) > 0:
print "No tampering"
else:
print "Tampering"
print exp
print control
def run(ooni):
"""Run the test
"""
config = ooni.config
urls = []
f = open(os.path.join(config.main.assetdir, config.tests.dns_experiment))
nsf = open(os.path.join(config.main.assetdir, config.tests.dns_experiment_dns))
nss = [x.strip() for x in nsf.readlines()]
i = 0
# XXX Clean up this code
ooni.logger.info("reading file")
for url in f.readlines():
jobs = [gevent.spawn(compare_lookups, (url, ooni, ns)) for ns in nss]
gevent.joinall(jobs, timeout=2)
[job.value for job in jobs]
ooni.logger.info("finished")
f.close()
| <commit_before>from dns import resolver
import gevent
import os
def lookup(hostname, ns):
res = resolver.Resolver(configure=False)
res.nameservers = [ns]
answer = res.query(hostname)
ret = []
for data in answer:
ret.append(data.address)
return ret
def compare_lookups(address):
exp = lookup(address, '8.8.8.8')
control = lookup(address, '208.67.222.222')
print address
if len(set(exp) & set(control)) > 0:
print "No tampering"
else:
print "Tampering"
print exp
print control
def run(ooni):
config = ooni.config
urls = []
f = open(os.path.join(config.main.assetdir, config.tests.dns_experiment))
i = 0
ooni.logger.info("reading file")
for line in f.readlines():
urls.append(line.strip())
if i % 100 == 0:
jobs = [gevent.spawn(compare_lookups, url) for url in urls]
gevent.joinall(jobs, timeout=2)
[job.value for job in jobs]
urls = []
ooni.logger.info("finished")
f.close()
<commit_msg>Clean up the DNS test<commit_after>from dns import resolver
import gevent
import os
def lookup(hostname, ns):
res = resolver.Resolver(configure=False)
res.nameservers = [ns]
answer = res.query(hostname)
ret = []
for data in answer:
ret.append(data.address)
return ret
def compare_lookups(args):
# this is just a dirty hack
address = args[0]
ooni = args[1]
ns = args[2]
exp = lookup(address, ns)
control = lookup(address, ooni.config.dns_control_server)
print address
if len(set(exp) & set(control)) > 0:
print "No tampering"
else:
print "Tampering"
print exp
print control
def run(ooni):
"""Run the test
"""
config = ooni.config
urls = []
f = open(os.path.join(config.main.assetdir, config.tests.dns_experiment))
nsf = open(os.path.join(config.main.assetdir, config.tests.dns_experiment_dns))
nss = [x.strip() for x in nsf.readlines()]
i = 0
# XXX Clean up this code
ooni.logger.info("reading file")
for url in f.readlines():
jobs = [gevent.spawn(compare_lookups, (url, ooni, ns)) for ns in nss]
gevent.joinall(jobs, timeout=2)
[job.value for job in jobs]
ooni.logger.info("finished")
f.close()
|
4f59ac92fe9aaff621898ee61e76a434c1511e17 | django_wysiwyg/__init__.py | django_wysiwyg/__init__.py | __author__ = 'Daniel Greenfeld, Chris Adams'
VERSION = (0, 5, 1)
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version = '%s.%s' % (version, VERSION[2])
return version
__version__ = get_version()
def clean_html():
raise ImportError("clean_html requires html5lib or pytidylib")
def sanitize_html():
raise ImportError("sanitize_html requires html5lib")
try:
import html5lib
from utils import clean_html5lib as clean_html
from utils import sanitize_html5lib as sanitize_html
except ImportError:
try:
import tidylib
from utils import clean_tidylib as clean_html
except ImportError:
pass | __author__ = 'Daniel Greenfeld, Chris Adams'
VERSION = (0, 5, 1)
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version = '%s.%s' % (version, VERSION[2])
return version
__version__ = get_version()
def clean_html():
raise ImportError("clean_html requires html5lib or pytidylib")
def sanitize_html():
raise ImportError("sanitize_html requires html5lib")
try:
import html5lib
from .utils import clean_html5lib as clean_html
from .utils import sanitize_html5lib as sanitize_html
except ImportError:
try:
import tidylib
from .utils import clean_tidylib as clean_html
except ImportError:
pass
| Use relative imports, Python 2.6 style | Use relative imports, Python 2.6 style
| Python | mit | saydulk/django-wysiwyg,pydanny/django-wysiwyg,saydulk/django-wysiwyg,pydanny/django-wysiwyg | __author__ = 'Daniel Greenfeld, Chris Adams'
VERSION = (0, 5, 1)
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version = '%s.%s' % (version, VERSION[2])
return version
__version__ = get_version()
def clean_html():
raise ImportError("clean_html requires html5lib or pytidylib")
def sanitize_html():
raise ImportError("sanitize_html requires html5lib")
try:
import html5lib
from utils import clean_html5lib as clean_html
from utils import sanitize_html5lib as sanitize_html
except ImportError:
try:
import tidylib
from utils import clean_tidylib as clean_html
except ImportError:
passUse relative imports, Python 2.6 style | __author__ = 'Daniel Greenfeld, Chris Adams'
VERSION = (0, 5, 1)
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version = '%s.%s' % (version, VERSION[2])
return version
__version__ = get_version()
def clean_html():
raise ImportError("clean_html requires html5lib or pytidylib")
def sanitize_html():
raise ImportError("sanitize_html requires html5lib")
try:
import html5lib
from .utils import clean_html5lib as clean_html
from .utils import sanitize_html5lib as sanitize_html
except ImportError:
try:
import tidylib
from .utils import clean_tidylib as clean_html
except ImportError:
pass
| <commit_before>__author__ = 'Daniel Greenfeld, Chris Adams'
VERSION = (0, 5, 1)
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version = '%s.%s' % (version, VERSION[2])
return version
__version__ = get_version()
def clean_html():
raise ImportError("clean_html requires html5lib or pytidylib")
def sanitize_html():
raise ImportError("sanitize_html requires html5lib")
try:
import html5lib
from utils import clean_html5lib as clean_html
from utils import sanitize_html5lib as sanitize_html
except ImportError:
try:
import tidylib
from utils import clean_tidylib as clean_html
except ImportError:
pass<commit_msg>Use relative imports, Python 2.6 style<commit_after> | __author__ = 'Daniel Greenfeld, Chris Adams'
VERSION = (0, 5, 1)
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version = '%s.%s' % (version, VERSION[2])
return version
__version__ = get_version()
def clean_html():
raise ImportError("clean_html requires html5lib or pytidylib")
def sanitize_html():
raise ImportError("sanitize_html requires html5lib")
try:
import html5lib
from .utils import clean_html5lib as clean_html
from .utils import sanitize_html5lib as sanitize_html
except ImportError:
try:
import tidylib
from .utils import clean_tidylib as clean_html
except ImportError:
pass
| __author__ = 'Daniel Greenfeld, Chris Adams'
VERSION = (0, 5, 1)
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version = '%s.%s' % (version, VERSION[2])
return version
__version__ = get_version()
def clean_html():
raise ImportError("clean_html requires html5lib or pytidylib")
def sanitize_html():
raise ImportError("sanitize_html requires html5lib")
try:
import html5lib
from utils import clean_html5lib as clean_html
from utils import sanitize_html5lib as sanitize_html
except ImportError:
try:
import tidylib
from utils import clean_tidylib as clean_html
except ImportError:
passUse relative imports, Python 2.6 style__author__ = 'Daniel Greenfeld, Chris Adams'
VERSION = (0, 5, 1)
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version = '%s.%s' % (version, VERSION[2])
return version
__version__ = get_version()
def clean_html():
raise ImportError("clean_html requires html5lib or pytidylib")
def sanitize_html():
raise ImportError("sanitize_html requires html5lib")
try:
import html5lib
from .utils import clean_html5lib as clean_html
from .utils import sanitize_html5lib as sanitize_html
except ImportError:
try:
import tidylib
from .utils import clean_tidylib as clean_html
except ImportError:
pass
| <commit_before>__author__ = 'Daniel Greenfeld, Chris Adams'
VERSION = (0, 5, 1)
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version = '%s.%s' % (version, VERSION[2])
return version
__version__ = get_version()
def clean_html():
raise ImportError("clean_html requires html5lib or pytidylib")
def sanitize_html():
raise ImportError("sanitize_html requires html5lib")
try:
import html5lib
from utils import clean_html5lib as clean_html
from utils import sanitize_html5lib as sanitize_html
except ImportError:
try:
import tidylib
from utils import clean_tidylib as clean_html
except ImportError:
pass<commit_msg>Use relative imports, Python 2.6 style<commit_after>__author__ = 'Daniel Greenfeld, Chris Adams'
VERSION = (0, 5, 1)
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version = '%s.%s' % (version, VERSION[2])
return version
__version__ = get_version()
def clean_html():
raise ImportError("clean_html requires html5lib or pytidylib")
def sanitize_html():
raise ImportError("sanitize_html requires html5lib")
try:
import html5lib
from .utils import clean_html5lib as clean_html
from .utils import sanitize_html5lib as sanitize_html
except ImportError:
try:
import tidylib
from .utils import clean_tidylib as clean_html
except ImportError:
pass
|
e5c1cccaa08b519a19b6900db1376f2b75113668 | admin/urls.py | admin/urls.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from .views import AdminHandler, CubeHandler, ConnectionHandler
INCLUDE_URLS = [
(r"/admin", AdminHandler),
(r"/admin/connection", ConnectionHandler),
(r"/admin/cube/?(?P<slug>[\w-]+)?", CubeHandler),
]
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
from .views import AdminHandler, CubeHandler, ConnectionHandler
from .views import ElementHandler
INCLUDE_URLS = [
(r"/admin", AdminHandler),
(r"/admin/connection", ConnectionHandler),
(r"/admin/cube/?(?P<slug>[\w-]+)?", CubeHandler),
(r"/admin/element/?(?P<slug>[\w-]+)?", ElementHandler),
]
| Add Element admin view in admin url | Add Element admin view in admin url
| Python | mit | jgabriellima/mining,AndrzejR/mining,mining/mining,mlgruby/mining,seagoat/mining,mlgruby/mining,jgabriellima/mining,mlgruby/mining,avelino/mining,chrisdamba/mining,seagoat/mining,AndrzejR/mining,chrisdamba/mining,avelino/mining,mining/mining | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from .views import AdminHandler, CubeHandler, ConnectionHandler
INCLUDE_URLS = [
(r"/admin", AdminHandler),
(r"/admin/connection", ConnectionHandler),
(r"/admin/cube/?(?P<slug>[\w-]+)?", CubeHandler),
]
Add Element admin view in admin url | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from .views import AdminHandler, CubeHandler, ConnectionHandler
from .views import ElementHandler
INCLUDE_URLS = [
(r"/admin", AdminHandler),
(r"/admin/connection", ConnectionHandler),
(r"/admin/cube/?(?P<slug>[\w-]+)?", CubeHandler),
(r"/admin/element/?(?P<slug>[\w-]+)?", ElementHandler),
]
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from .views import AdminHandler, CubeHandler, ConnectionHandler
INCLUDE_URLS = [
(r"/admin", AdminHandler),
(r"/admin/connection", ConnectionHandler),
(r"/admin/cube/?(?P<slug>[\w-]+)?", CubeHandler),
]
<commit_msg>Add Element admin view in admin url<commit_after> | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from .views import AdminHandler, CubeHandler, ConnectionHandler
from .views import ElementHandler
INCLUDE_URLS = [
(r"/admin", AdminHandler),
(r"/admin/connection", ConnectionHandler),
(r"/admin/cube/?(?P<slug>[\w-]+)?", CubeHandler),
(r"/admin/element/?(?P<slug>[\w-]+)?", ElementHandler),
]
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
from .views import AdminHandler, CubeHandler, ConnectionHandler
INCLUDE_URLS = [
(r"/admin", AdminHandler),
(r"/admin/connection", ConnectionHandler),
(r"/admin/cube/?(?P<slug>[\w-]+)?", CubeHandler),
]
Add Element admin view in admin url#!/usr/bin/env python
# -*- coding: utf-8 -*-
from .views import AdminHandler, CubeHandler, ConnectionHandler
from .views import ElementHandler
INCLUDE_URLS = [
(r"/admin", AdminHandler),
(r"/admin/connection", ConnectionHandler),
(r"/admin/cube/?(?P<slug>[\w-]+)?", CubeHandler),
(r"/admin/element/?(?P<slug>[\w-]+)?", ElementHandler),
]
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from .views import AdminHandler, CubeHandler, ConnectionHandler
INCLUDE_URLS = [
(r"/admin", AdminHandler),
(r"/admin/connection", ConnectionHandler),
(r"/admin/cube/?(?P<slug>[\w-]+)?", CubeHandler),
]
<commit_msg>Add Element admin view in admin url<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from .views import AdminHandler, CubeHandler, ConnectionHandler
from .views import ElementHandler
INCLUDE_URLS = [
(r"/admin", AdminHandler),
(r"/admin/connection", ConnectionHandler),
(r"/admin/cube/?(?P<slug>[\w-]+)?", CubeHandler),
(r"/admin/element/?(?P<slug>[\w-]+)?", ElementHandler),
]
|
afb97310cba3c32d28f3b21b0895c53183914326 | interface.py | interface.py | import npyscreen
class App(npyscreen.NPSApp):
def main(self):
form = npyscreen.FormBaseNew(name='EKOiE')
form.add_widget(npyscreen.TitleSelectOne, name='Track number', values=[1, 2, 3, 4, 5])
form.edit()
if __name__ == '__main__':
app = App()
app.run()
| from contextlib import contextmanager
import os
import sys
import npyscreen
@contextmanager
def use_xterm():
"""Helper setting proper TERM value
Required for colors to work under 16-color tmux.
"""
old_value = os.environ.get('TERM')
os.environ['TERM'] = 'xterm'
yield
if old_value is not None:
os.environ['TERM'] = old_value
def quit():
"""Close application gracefully"""
sys.exit(0)
class QuitPopup(npyscreen.ActionPopup):
"""Popup used for exiting the app"""
def on_ok(self):
self.parentApp.setNextForm(None)
def on_cancel(self):
self.parentApp.switchFormPrevious()
class MyForm(npyscreen.FormBaseNew):
def h_quit(self, key):
self.parentApp.switchForm('quit_popup')
def set_up_handlers(self):
super(MyForm, self).set_up_handlers()
keys = {
'q': self.h_quit,
}
# Make upperkeys available, too!
for key, func in list(keys.items()):
keys[key.upper()] = func
self.handlers.update(keys)
class App(npyscreen.NPSAppManaged):
def onStart(self):
form = self.addForm('MAIN', MyForm, name='EKOiE')
form.add_widget(
npyscreen.TitleSelectOne,
name='Track number',
values=[1, 2, 3, 4, 5],
)
quit_popup = self.addForm(
'quit_popup',
QuitPopup,
name='Really quit?',
lines=5,
)
quit_popup.show_atx = 40
quit_popup.show_aty = 20
self.setNextForm('MAIN')
if __name__ == '__main__':
with use_xterm():
app = App()
try:
app.run()
except KeyboardInterrupt:
quit()
| Quit popup, NPSAppManaged and TERM | Quit popup, NPSAppManaged and TERM
Main class (App) now extends NPSAppManaged, and forms are added in
kosher way. Quit popup is also implemented and available upon
pressing `q` key (`Q` key too!).
Additionally, application sets TERM env variable to proper value
(`xterm`) in order for colors to properly show up under tmux.
| Python | mit | modrzew/ekoie | import npyscreen
class App(npyscreen.NPSApp):
def main(self):
form = npyscreen.FormBaseNew(name='EKOiE')
form.add_widget(npyscreen.TitleSelectOne, name='Track number', values=[1, 2, 3, 4, 5])
form.edit()
if __name__ == '__main__':
app = App()
app.run()
Quit popup, NPSAppManaged and TERM
Main class (App) now extends NPSAppManaged, and forms are added in
kosher way. Quit popup is also implemented and available upon
pressing `q` key (`Q` key too!).
Additionally, application sets TERM env variable to proper value
(`xterm`) in order for colors to properly show up under tmux. | from contextlib import contextmanager
import os
import sys
import npyscreen
@contextmanager
def use_xterm():
"""Helper setting proper TERM value
Required for colors to work under 16-color tmux.
"""
old_value = os.environ.get('TERM')
os.environ['TERM'] = 'xterm'
yield
if old_value is not None:
os.environ['TERM'] = old_value
def quit():
"""Close application gracefully"""
sys.exit(0)
class QuitPopup(npyscreen.ActionPopup):
"""Popup used for exiting the app"""
def on_ok(self):
self.parentApp.setNextForm(None)
def on_cancel(self):
self.parentApp.switchFormPrevious()
class MyForm(npyscreen.FormBaseNew):
def h_quit(self, key):
self.parentApp.switchForm('quit_popup')
def set_up_handlers(self):
super(MyForm, self).set_up_handlers()
keys = {
'q': self.h_quit,
}
# Make upperkeys available, too!
for key, func in list(keys.items()):
keys[key.upper()] = func
self.handlers.update(keys)
class App(npyscreen.NPSAppManaged):
def onStart(self):
form = self.addForm('MAIN', MyForm, name='EKOiE')
form.add_widget(
npyscreen.TitleSelectOne,
name='Track number',
values=[1, 2, 3, 4, 5],
)
quit_popup = self.addForm(
'quit_popup',
QuitPopup,
name='Really quit?',
lines=5,
)
quit_popup.show_atx = 40
quit_popup.show_aty = 20
self.setNextForm('MAIN')
if __name__ == '__main__':
with use_xterm():
app = App()
try:
app.run()
except KeyboardInterrupt:
quit()
| <commit_before>import npyscreen
class App(npyscreen.NPSApp):
def main(self):
form = npyscreen.FormBaseNew(name='EKOiE')
form.add_widget(npyscreen.TitleSelectOne, name='Track number', values=[1, 2, 3, 4, 5])
form.edit()
if __name__ == '__main__':
app = App()
app.run()
<commit_msg>Quit popup, NPSAppManaged and TERM
Main class (App) now extends NPSAppManaged, and forms are added in
kosher way. Quit popup is also implemented and available upon
pressing `q` key (`Q` key too!).
Additionally, application sets TERM env variable to proper value
(`xterm`) in order for colors to properly show up under tmux.<commit_after> | from contextlib import contextmanager
import os
import sys
import npyscreen
@contextmanager
def use_xterm():
"""Helper setting proper TERM value
Required for colors to work under 16-color tmux.
"""
old_value = os.environ.get('TERM')
os.environ['TERM'] = 'xterm'
yield
if old_value is not None:
os.environ['TERM'] = old_value
def quit():
"""Close application gracefully"""
sys.exit(0)
class QuitPopup(npyscreen.ActionPopup):
"""Popup used for exiting the app"""
def on_ok(self):
self.parentApp.setNextForm(None)
def on_cancel(self):
self.parentApp.switchFormPrevious()
class MyForm(npyscreen.FormBaseNew):
def h_quit(self, key):
self.parentApp.switchForm('quit_popup')
def set_up_handlers(self):
super(MyForm, self).set_up_handlers()
keys = {
'q': self.h_quit,
}
# Make upperkeys available, too!
for key, func in list(keys.items()):
keys[key.upper()] = func
self.handlers.update(keys)
class App(npyscreen.NPSAppManaged):
def onStart(self):
form = self.addForm('MAIN', MyForm, name='EKOiE')
form.add_widget(
npyscreen.TitleSelectOne,
name='Track number',
values=[1, 2, 3, 4, 5],
)
quit_popup = self.addForm(
'quit_popup',
QuitPopup,
name='Really quit?',
lines=5,
)
quit_popup.show_atx = 40
quit_popup.show_aty = 20
self.setNextForm('MAIN')
if __name__ == '__main__':
with use_xterm():
app = App()
try:
app.run()
except KeyboardInterrupt:
quit()
| import npyscreen
class App(npyscreen.NPSApp):
def main(self):
form = npyscreen.FormBaseNew(name='EKOiE')
form.add_widget(npyscreen.TitleSelectOne, name='Track number', values=[1, 2, 3, 4, 5])
form.edit()
if __name__ == '__main__':
app = App()
app.run()
Quit popup, NPSAppManaged and TERM
Main class (App) now extends NPSAppManaged, and forms are added in
kosher way. Quit popup is also implemented and available upon
pressing `q` key (`Q` key too!).
Additionally, application sets TERM env variable to proper value
(`xterm`) in order for colors to properly show up under tmux.from contextlib import contextmanager
import os
import sys
import npyscreen
@contextmanager
def use_xterm():
"""Helper setting proper TERM value
Required for colors to work under 16-color tmux.
"""
old_value = os.environ.get('TERM')
os.environ['TERM'] = 'xterm'
yield
if old_value is not None:
os.environ['TERM'] = old_value
def quit():
"""Close application gracefully"""
sys.exit(0)
class QuitPopup(npyscreen.ActionPopup):
"""Popup used for exiting the app"""
def on_ok(self):
self.parentApp.setNextForm(None)
def on_cancel(self):
self.parentApp.switchFormPrevious()
class MyForm(npyscreen.FormBaseNew):
def h_quit(self, key):
self.parentApp.switchForm('quit_popup')
def set_up_handlers(self):
super(MyForm, self).set_up_handlers()
keys = {
'q': self.h_quit,
}
# Make upperkeys available, too!
for key, func in list(keys.items()):
keys[key.upper()] = func
self.handlers.update(keys)
class App(npyscreen.NPSAppManaged):
def onStart(self):
form = self.addForm('MAIN', MyForm, name='EKOiE')
form.add_widget(
npyscreen.TitleSelectOne,
name='Track number',
values=[1, 2, 3, 4, 5],
)
quit_popup = self.addForm(
'quit_popup',
QuitPopup,
name='Really quit?',
lines=5,
)
quit_popup.show_atx = 40
quit_popup.show_aty = 20
self.setNextForm('MAIN')
if __name__ == '__main__':
with use_xterm():
app = App()
try:
app.run()
except KeyboardInterrupt:
quit()
| <commit_before>import npyscreen
class App(npyscreen.NPSApp):
def main(self):
form = npyscreen.FormBaseNew(name='EKOiE')
form.add_widget(npyscreen.TitleSelectOne, name='Track number', values=[1, 2, 3, 4, 5])
form.edit()
if __name__ == '__main__':
app = App()
app.run()
<commit_msg>Quit popup, NPSAppManaged and TERM
Main class (App) now extends NPSAppManaged, and forms are added in
kosher way. Quit popup is also implemented and available upon
pressing `q` key (`Q` key too!).
Additionally, application sets TERM env variable to proper value
(`xterm`) in order for colors to properly show up under tmux.<commit_after>from contextlib import contextmanager
import os
import sys
import npyscreen
@contextmanager
def use_xterm():
"""Helper setting proper TERM value
Required for colors to work under 16-color tmux.
"""
old_value = os.environ.get('TERM')
os.environ['TERM'] = 'xterm'
yield
if old_value is not None:
os.environ['TERM'] = old_value
def quit():
"""Close application gracefully"""
sys.exit(0)
class QuitPopup(npyscreen.ActionPopup):
"""Popup used for exiting the app"""
def on_ok(self):
self.parentApp.setNextForm(None)
def on_cancel(self):
self.parentApp.switchFormPrevious()
class MyForm(npyscreen.FormBaseNew):
def h_quit(self, key):
self.parentApp.switchForm('quit_popup')
def set_up_handlers(self):
super(MyForm, self).set_up_handlers()
keys = {
'q': self.h_quit,
}
# Make upperkeys available, too!
for key, func in list(keys.items()):
keys[key.upper()] = func
self.handlers.update(keys)
class App(npyscreen.NPSAppManaged):
def onStart(self):
form = self.addForm('MAIN', MyForm, name='EKOiE')
form.add_widget(
npyscreen.TitleSelectOne,
name='Track number',
values=[1, 2, 3, 4, 5],
)
quit_popup = self.addForm(
'quit_popup',
QuitPopup,
name='Really quit?',
lines=5,
)
quit_popup.show_atx = 40
quit_popup.show_aty = 20
self.setNextForm('MAIN')
if __name__ == '__main__':
with use_xterm():
app = App()
try:
app.run()
except KeyboardInterrupt:
quit()
|
71b2f82d99ffeda9d9435d279c7512fcbaaf108f | trackpy/tests/test_misc.py | trackpy/tests/test_misc.py | from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
import os
import unittest
import warnings
import trackpy
import trackpy.diag
path, _ = os.path.split(os.path.abspath(__file__))
class DiagTests(unittest.TestCase):
def test_performance_report(self):
trackpy.diag.performance_report()
def test_dependencies(self):
trackpy.diag.dependencies()
class APITests(unittest.TestCase):
def test_pims_deprecation(self):
with warnings.catch_warnings(True) as w:
warnings.simplefilter('always')
_ = trackpy.ImageSequence(os.path.join(path, 'video/image_sequence/*.png'))
assert len(w) == 1
| from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
import os
import unittest
import warnings
import pims
import trackpy
import trackpy.diag
path, _ = os.path.split(os.path.abspath(__file__))
class DiagTests(unittest.TestCase):
def test_performance_report(self):
trackpy.diag.performance_report()
def test_dependencies(self):
trackpy.diag.dependencies()
class APITests(unittest.TestCase):
def test_pims_deprecation(self):
"""Using a pims class should work, but generate a warning.
The inclusion of these classes (and therefore this test) in
trackpy is deprecated as of v0.3 and will be removed in a future
version."""
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always', UserWarning)
imseq = trackpy.ImageSequence(os.path.join(path, 'video/image_sequence/*.png'))
assert isinstance(imseq, pims.ImageSequence)
assert len(w) == 1
| Fix pims warning test under Py3 | TST: Fix pims warning test under Py3
| Python | bsd-3-clause | daniorerio/trackpy,daniorerio/trackpy | from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
import os
import unittest
import warnings
import trackpy
import trackpy.diag
path, _ = os.path.split(os.path.abspath(__file__))
class DiagTests(unittest.TestCase):
def test_performance_report(self):
trackpy.diag.performance_report()
def test_dependencies(self):
trackpy.diag.dependencies()
class APITests(unittest.TestCase):
def test_pims_deprecation(self):
with warnings.catch_warnings(True) as w:
warnings.simplefilter('always')
_ = trackpy.ImageSequence(os.path.join(path, 'video/image_sequence/*.png'))
assert len(w) == 1
TST: Fix pims warning test under Py3 | from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
import os
import unittest
import warnings
import pims
import trackpy
import trackpy.diag
path, _ = os.path.split(os.path.abspath(__file__))
class DiagTests(unittest.TestCase):
def test_performance_report(self):
trackpy.diag.performance_report()
def test_dependencies(self):
trackpy.diag.dependencies()
class APITests(unittest.TestCase):
def test_pims_deprecation(self):
"""Using a pims class should work, but generate a warning.
The inclusion of these classes (and therefore this test) in
trackpy is deprecated as of v0.3 and will be removed in a future
version."""
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always', UserWarning)
imseq = trackpy.ImageSequence(os.path.join(path, 'video/image_sequence/*.png'))
assert isinstance(imseq, pims.ImageSequence)
assert len(w) == 1
| <commit_before>from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
import os
import unittest
import warnings
import trackpy
import trackpy.diag
path, _ = os.path.split(os.path.abspath(__file__))
class DiagTests(unittest.TestCase):
def test_performance_report(self):
trackpy.diag.performance_report()
def test_dependencies(self):
trackpy.diag.dependencies()
class APITests(unittest.TestCase):
def test_pims_deprecation(self):
with warnings.catch_warnings(True) as w:
warnings.simplefilter('always')
_ = trackpy.ImageSequence(os.path.join(path, 'video/image_sequence/*.png'))
assert len(w) == 1
<commit_msg>TST: Fix pims warning test under Py3<commit_after> | from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
import os
import unittest
import warnings
import pims
import trackpy
import trackpy.diag
path, _ = os.path.split(os.path.abspath(__file__))
class DiagTests(unittest.TestCase):
def test_performance_report(self):
trackpy.diag.performance_report()
def test_dependencies(self):
trackpy.diag.dependencies()
class APITests(unittest.TestCase):
def test_pims_deprecation(self):
"""Using a pims class should work, but generate a warning.
The inclusion of these classes (and therefore this test) in
trackpy is deprecated as of v0.3 and will be removed in a future
version."""
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always', UserWarning)
imseq = trackpy.ImageSequence(os.path.join(path, 'video/image_sequence/*.png'))
assert isinstance(imseq, pims.ImageSequence)
assert len(w) == 1
| from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
import os
import unittest
import warnings
import trackpy
import trackpy.diag
path, _ = os.path.split(os.path.abspath(__file__))
class DiagTests(unittest.TestCase):
def test_performance_report(self):
trackpy.diag.performance_report()
def test_dependencies(self):
trackpy.diag.dependencies()
class APITests(unittest.TestCase):
def test_pims_deprecation(self):
with warnings.catch_warnings(True) as w:
warnings.simplefilter('always')
_ = trackpy.ImageSequence(os.path.join(path, 'video/image_sequence/*.png'))
assert len(w) == 1
TST: Fix pims warning test under Py3from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
import os
import unittest
import warnings
import pims
import trackpy
import trackpy.diag
path, _ = os.path.split(os.path.abspath(__file__))
class DiagTests(unittest.TestCase):
def test_performance_report(self):
trackpy.diag.performance_report()
def test_dependencies(self):
trackpy.diag.dependencies()
class APITests(unittest.TestCase):
def test_pims_deprecation(self):
"""Using a pims class should work, but generate a warning.
The inclusion of these classes (and therefore this test) in
trackpy is deprecated as of v0.3 and will be removed in a future
version."""
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always', UserWarning)
imseq = trackpy.ImageSequence(os.path.join(path, 'video/image_sequence/*.png'))
assert isinstance(imseq, pims.ImageSequence)
assert len(w) == 1
| <commit_before>from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
import os
import unittest
import warnings
import trackpy
import trackpy.diag
path, _ = os.path.split(os.path.abspath(__file__))
class DiagTests(unittest.TestCase):
def test_performance_report(self):
trackpy.diag.performance_report()
def test_dependencies(self):
trackpy.diag.dependencies()
class APITests(unittest.TestCase):
def test_pims_deprecation(self):
with warnings.catch_warnings(True) as w:
warnings.simplefilter('always')
_ = trackpy.ImageSequence(os.path.join(path, 'video/image_sequence/*.png'))
assert len(w) == 1
<commit_msg>TST: Fix pims warning test under Py3<commit_after>from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
import os
import unittest
import warnings
import pims
import trackpy
import trackpy.diag
path, _ = os.path.split(os.path.abspath(__file__))
class DiagTests(unittest.TestCase):
def test_performance_report(self):
trackpy.diag.performance_report()
def test_dependencies(self):
trackpy.diag.dependencies()
class APITests(unittest.TestCase):
def test_pims_deprecation(self):
"""Using a pims class should work, but generate a warning.
The inclusion of these classes (and therefore this test) in
trackpy is deprecated as of v0.3 and will be removed in a future
version."""
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always', UserWarning)
imseq = trackpy.ImageSequence(os.path.join(path, 'video/image_sequence/*.png'))
assert isinstance(imseq, pims.ImageSequence)
assert len(w) == 1
|
6a40aab945e28c509e24ede6a48b7ac1f3b89ce2 | product_isp/__manifest__.py | product_isp/__manifest__.py | # Copyright (C) 2019 - TODAY, Open Source Integrators
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
'name': 'Product ISP',
'version': '12.0.1.0.0',
'license': 'AGPL-3',
'summary': 'Assign ISPs to Products',
'author': 'Open Source Integrators, Odoo Community Association (OCA)',
"website": 'https://github.com/OCA/vertical-isp',
'depends': [
'stock',
'base_phone_rate'
],
'data': [
'views/product_product.xml',
],
'installable': True,
'development_status': 'Beta',
'maintainers': [
'wolfhall',
'max3903',
'osi-scampbell',
],
}
| # Copyright (C) 2019 - TODAY, Open Source Integrators
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
'name': 'Product ISP',
'version': '12.0.1.0.0',
'license': 'AGPL-3',
'summary': 'Assign ISPs to Products',
'author': 'Open Source Integrators, Odoo Community Association (OCA)',
"website": 'https://github.com/OCA/vertical-isp',
'depends': [
'product',
'base_phone_rate'
],
'data': [
'views/product_product.xml',
],
'installable': True,
'development_status': 'Beta',
'maintainers': [
'wolfhall',
'max3903',
'osi-scampbell',
],
}
| Remove unneeded dependency on Inventory | [IMP] Remove unneeded dependency on Inventory
| Python | agpl-3.0 | OCA/vertical-isp,OCA/vertical-isp | # Copyright (C) 2019 - TODAY, Open Source Integrators
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
'name': 'Product ISP',
'version': '12.0.1.0.0',
'license': 'AGPL-3',
'summary': 'Assign ISPs to Products',
'author': 'Open Source Integrators, Odoo Community Association (OCA)',
"website": 'https://github.com/OCA/vertical-isp',
'depends': [
'stock',
'base_phone_rate'
],
'data': [
'views/product_product.xml',
],
'installable': True,
'development_status': 'Beta',
'maintainers': [
'wolfhall',
'max3903',
'osi-scampbell',
],
}
[IMP] Remove unneeded dependency on Inventory | # Copyright (C) 2019 - TODAY, Open Source Integrators
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
'name': 'Product ISP',
'version': '12.0.1.0.0',
'license': 'AGPL-3',
'summary': 'Assign ISPs to Products',
'author': 'Open Source Integrators, Odoo Community Association (OCA)',
"website": 'https://github.com/OCA/vertical-isp',
'depends': [
'product',
'base_phone_rate'
],
'data': [
'views/product_product.xml',
],
'installable': True,
'development_status': 'Beta',
'maintainers': [
'wolfhall',
'max3903',
'osi-scampbell',
],
}
| <commit_before># Copyright (C) 2019 - TODAY, Open Source Integrators
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
'name': 'Product ISP',
'version': '12.0.1.0.0',
'license': 'AGPL-3',
'summary': 'Assign ISPs to Products',
'author': 'Open Source Integrators, Odoo Community Association (OCA)',
"website": 'https://github.com/OCA/vertical-isp',
'depends': [
'stock',
'base_phone_rate'
],
'data': [
'views/product_product.xml',
],
'installable': True,
'development_status': 'Beta',
'maintainers': [
'wolfhall',
'max3903',
'osi-scampbell',
],
}
<commit_msg>[IMP] Remove unneeded dependency on Inventory<commit_after> | # Copyright (C) 2019 - TODAY, Open Source Integrators
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
'name': 'Product ISP',
'version': '12.0.1.0.0',
'license': 'AGPL-3',
'summary': 'Assign ISPs to Products',
'author': 'Open Source Integrators, Odoo Community Association (OCA)',
"website": 'https://github.com/OCA/vertical-isp',
'depends': [
'product',
'base_phone_rate'
],
'data': [
'views/product_product.xml',
],
'installable': True,
'development_status': 'Beta',
'maintainers': [
'wolfhall',
'max3903',
'osi-scampbell',
],
}
| # Copyright (C) 2019 - TODAY, Open Source Integrators
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
'name': 'Product ISP',
'version': '12.0.1.0.0',
'license': 'AGPL-3',
'summary': 'Assign ISPs to Products',
'author': 'Open Source Integrators, Odoo Community Association (OCA)',
"website": 'https://github.com/OCA/vertical-isp',
'depends': [
'stock',
'base_phone_rate'
],
'data': [
'views/product_product.xml',
],
'installable': True,
'development_status': 'Beta',
'maintainers': [
'wolfhall',
'max3903',
'osi-scampbell',
],
}
[IMP] Remove unneeded dependency on Inventory# Copyright (C) 2019 - TODAY, Open Source Integrators
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
'name': 'Product ISP',
'version': '12.0.1.0.0',
'license': 'AGPL-3',
'summary': 'Assign ISPs to Products',
'author': 'Open Source Integrators, Odoo Community Association (OCA)',
"website": 'https://github.com/OCA/vertical-isp',
'depends': [
'product',
'base_phone_rate'
],
'data': [
'views/product_product.xml',
],
'installable': True,
'development_status': 'Beta',
'maintainers': [
'wolfhall',
'max3903',
'osi-scampbell',
],
}
| <commit_before># Copyright (C) 2019 - TODAY, Open Source Integrators
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
'name': 'Product ISP',
'version': '12.0.1.0.0',
'license': 'AGPL-3',
'summary': 'Assign ISPs to Products',
'author': 'Open Source Integrators, Odoo Community Association (OCA)',
"website": 'https://github.com/OCA/vertical-isp',
'depends': [
'stock',
'base_phone_rate'
],
'data': [
'views/product_product.xml',
],
'installable': True,
'development_status': 'Beta',
'maintainers': [
'wolfhall',
'max3903',
'osi-scampbell',
],
}
<commit_msg>[IMP] Remove unneeded dependency on Inventory<commit_after># Copyright (C) 2019 - TODAY, Open Source Integrators
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
'name': 'Product ISP',
'version': '12.0.1.0.0',
'license': 'AGPL-3',
'summary': 'Assign ISPs to Products',
'author': 'Open Source Integrators, Odoo Community Association (OCA)',
"website": 'https://github.com/OCA/vertical-isp',
'depends': [
'product',
'base_phone_rate'
],
'data': [
'views/product_product.xml',
],
'installable': True,
'development_status': 'Beta',
'maintainers': [
'wolfhall',
'max3903',
'osi-scampbell',
],
}
|
f49276f78b1b303df5fed063e226ee78165baff5 | spam_lists/exceptions.py | spam_lists/exceptions.py | # -*- coding: utf-8 -*-
'''
This module contains all classes of exceptions raised
by the library
'''
from __future__ import unicode_literals
class SpamListsError(Exception):
'''There was an error during testing a url or host'''
class SpamListsValueError(SpamListsError, ValueError):
'''An inapropriate value was used in SpamLists library '''
class UnknownCodeError(SpamListsError, KeyError):
'''The classification code from the service was not recognized'''
class UnathorizedAPIKeyError(SpamListsError, ValueError):
'''The API key used to query the service was not authorized'''
class InvalidHostError(SpamListsError, ValueError):
'''The value is not a valid host'''
class InvalidIPError(InvalidHostError):
''' The value is not a valid IP address'''
class InvalidIPv4Error(InvalidIPError):
'''The value is not a valid IPv4 address'''
class InvalidIPv6Error(InvalidIPError):
'''The value is not a valid IPv6 address'''
class InvalidHostnameError(InvalidHostError):
'''The value is not a valid hostname'''
class InvalidURLError(SpamListsError, ValueError):
'''The value is not a valid url'''
| # -*- coding: utf-8 -*-
'''
This module contains all classes of exceptions raised
by the library
'''
from __future__ import unicode_literals
class SpamListsError(Exception):
'''There was an error during testing a url or host'''
class SpamListsValueError(SpamListsError, ValueError):
'''An inapropriate value was used in SpamLists library '''
class UnknownCodeError(SpamListsError, KeyError):
'''The classification code from the service was not recognized'''
class UnathorizedAPIKeyError(SpamListsValueError):
'''The API key used to query the service was not authorized'''
class InvalidHostError(SpamListsValueError):
'''The value is not a valid host'''
class InvalidIPError(InvalidHostError):
''' The value is not a valid IP address'''
class InvalidIPv4Error(InvalidIPError):
'''The value is not a valid IPv4 address'''
class InvalidIPv6Error(InvalidIPError):
'''The value is not a valid IPv6 address'''
class InvalidHostnameError(InvalidHostError):
'''The value is not a valid hostname'''
class InvalidURLError(SpamListsValueError):
'''The value is not a valid url'''
| Change bases of exception classes extending SpamListsError and ValueError | Change bases of exception classes extending SpamListsError and ValueError
This commit removes SpamListsError and ValueError as direct base classes
of other exception classes (except SpamListsValueError), and replaces
them with SpamListsValueError.
| Python | mit | piotr-rusin/spam-lists | # -*- coding: utf-8 -*-
'''
This module contains all classes of exceptions raised
by the library
'''
from __future__ import unicode_literals
class SpamListsError(Exception):
'''There was an error during testing a url or host'''
class SpamListsValueError(SpamListsError, ValueError):
'''An inapropriate value was used in SpamLists library '''
class UnknownCodeError(SpamListsError, KeyError):
'''The classification code from the service was not recognized'''
class UnathorizedAPIKeyError(SpamListsError, ValueError):
'''The API key used to query the service was not authorized'''
class InvalidHostError(SpamListsError, ValueError):
'''The value is not a valid host'''
class InvalidIPError(InvalidHostError):
''' The value is not a valid IP address'''
class InvalidIPv4Error(InvalidIPError):
'''The value is not a valid IPv4 address'''
class InvalidIPv6Error(InvalidIPError):
'''The value is not a valid IPv6 address'''
class InvalidHostnameError(InvalidHostError):
'''The value is not a valid hostname'''
class InvalidURLError(SpamListsError, ValueError):
'''The value is not a valid url'''
Change bases of exception classes extending SpamListsError and ValueError
This commit removes SpamListsError and ValueError as direct base classes
of other exception classes (except SpamListsValueError), and replaces
them with SpamListsValueError. | # -*- coding: utf-8 -*-
'''
This module contains all classes of exceptions raised
by the library
'''
from __future__ import unicode_literals
class SpamListsError(Exception):
'''There was an error during testing a url or host'''
class SpamListsValueError(SpamListsError, ValueError):
'''An inapropriate value was used in SpamLists library '''
class UnknownCodeError(SpamListsError, KeyError):
'''The classification code from the service was not recognized'''
class UnathorizedAPIKeyError(SpamListsValueError):
'''The API key used to query the service was not authorized'''
class InvalidHostError(SpamListsValueError):
'''The value is not a valid host'''
class InvalidIPError(InvalidHostError):
''' The value is not a valid IP address'''
class InvalidIPv4Error(InvalidIPError):
'''The value is not a valid IPv4 address'''
class InvalidIPv6Error(InvalidIPError):
'''The value is not a valid IPv6 address'''
class InvalidHostnameError(InvalidHostError):
'''The value is not a valid hostname'''
class InvalidURLError(SpamListsValueError):
'''The value is not a valid url'''
| <commit_before># -*- coding: utf-8 -*-
'''
This module contains all classes of exceptions raised
by the library
'''
from __future__ import unicode_literals
class SpamListsError(Exception):
'''There was an error during testing a url or host'''
class SpamListsValueError(SpamListsError, ValueError):
'''An inapropriate value was used in SpamLists library '''
class UnknownCodeError(SpamListsError, KeyError):
'''The classification code from the service was not recognized'''
class UnathorizedAPIKeyError(SpamListsError, ValueError):
'''The API key used to query the service was not authorized'''
class InvalidHostError(SpamListsError, ValueError):
'''The value is not a valid host'''
class InvalidIPError(InvalidHostError):
''' The value is not a valid IP address'''
class InvalidIPv4Error(InvalidIPError):
'''The value is not a valid IPv4 address'''
class InvalidIPv6Error(InvalidIPError):
'''The value is not a valid IPv6 address'''
class InvalidHostnameError(InvalidHostError):
'''The value is not a valid hostname'''
class InvalidURLError(SpamListsError, ValueError):
'''The value is not a valid url'''
<commit_msg>Change bases of exception classes extending SpamListsError and ValueError
This commit removes SpamListsError and ValueError as direct base classes
of other exception classes (except SpamListsValueError), and replaces
them with SpamListsValueError.<commit_after> | # -*- coding: utf-8 -*-
'''
This module contains all classes of exceptions raised
by the library
'''
from __future__ import unicode_literals
class SpamListsError(Exception):
'''There was an error during testing a url or host'''
class SpamListsValueError(SpamListsError, ValueError):
'''An inapropriate value was used in SpamLists library '''
class UnknownCodeError(SpamListsError, KeyError):
'''The classification code from the service was not recognized'''
class UnathorizedAPIKeyError(SpamListsValueError):
'''The API key used to query the service was not authorized'''
class InvalidHostError(SpamListsValueError):
'''The value is not a valid host'''
class InvalidIPError(InvalidHostError):
''' The value is not a valid IP address'''
class InvalidIPv4Error(InvalidIPError):
'''The value is not a valid IPv4 address'''
class InvalidIPv6Error(InvalidIPError):
'''The value is not a valid IPv6 address'''
class InvalidHostnameError(InvalidHostError):
'''The value is not a valid hostname'''
class InvalidURLError(SpamListsValueError):
'''The value is not a valid url'''
| # -*- coding: utf-8 -*-
'''
This module contains all classes of exceptions raised
by the library
'''
from __future__ import unicode_literals
class SpamListsError(Exception):
'''There was an error during testing a url or host'''
class SpamListsValueError(SpamListsError, ValueError):
'''An inapropriate value was used in SpamLists library '''
class UnknownCodeError(SpamListsError, KeyError):
'''The classification code from the service was not recognized'''
class UnathorizedAPIKeyError(SpamListsError, ValueError):
'''The API key used to query the service was not authorized'''
class InvalidHostError(SpamListsError, ValueError):
'''The value is not a valid host'''
class InvalidIPError(InvalidHostError):
''' The value is not a valid IP address'''
class InvalidIPv4Error(InvalidIPError):
'''The value is not a valid IPv4 address'''
class InvalidIPv6Error(InvalidIPError):
'''The value is not a valid IPv6 address'''
class InvalidHostnameError(InvalidHostError):
'''The value is not a valid hostname'''
class InvalidURLError(SpamListsError, ValueError):
'''The value is not a valid url'''
Change bases of exception classes extending SpamListsError and ValueError
This commit removes SpamListsError and ValueError as direct base classes
of other exception classes (except SpamListsValueError), and replaces
them with SpamListsValueError.# -*- coding: utf-8 -*-
'''
This module contains all classes of exceptions raised
by the library
'''
from __future__ import unicode_literals
class SpamListsError(Exception):
'''There was an error during testing a url or host'''
class SpamListsValueError(SpamListsError, ValueError):
'''An inapropriate value was used in SpamLists library '''
class UnknownCodeError(SpamListsError, KeyError):
'''The classification code from the service was not recognized'''
class UnathorizedAPIKeyError(SpamListsValueError):
'''The API key used to query the service was not authorized'''
class InvalidHostError(SpamListsValueError):
'''The value is not a valid host'''
class InvalidIPError(InvalidHostError):
''' The value is not a valid IP address'''
class InvalidIPv4Error(InvalidIPError):
'''The value is not a valid IPv4 address'''
class InvalidIPv6Error(InvalidIPError):
'''The value is not a valid IPv6 address'''
class InvalidHostnameError(InvalidHostError):
'''The value is not a valid hostname'''
class InvalidURLError(SpamListsValueError):
'''The value is not a valid url'''
| <commit_before># -*- coding: utf-8 -*-
'''
This module contains all classes of exceptions raised
by the library
'''
from __future__ import unicode_literals
class SpamListsError(Exception):
'''There was an error during testing a url or host'''
class SpamListsValueError(SpamListsError, ValueError):
'''An inapropriate value was used in SpamLists library '''
class UnknownCodeError(SpamListsError, KeyError):
'''The classification code from the service was not recognized'''
class UnathorizedAPIKeyError(SpamListsError, ValueError):
'''The API key used to query the service was not authorized'''
class InvalidHostError(SpamListsError, ValueError):
'''The value is not a valid host'''
class InvalidIPError(InvalidHostError):
''' The value is not a valid IP address'''
class InvalidIPv4Error(InvalidIPError):
'''The value is not a valid IPv4 address'''
class InvalidIPv6Error(InvalidIPError):
'''The value is not a valid IPv6 address'''
class InvalidHostnameError(InvalidHostError):
'''The value is not a valid hostname'''
class InvalidURLError(SpamListsError, ValueError):
'''The value is not a valid url'''
<commit_msg>Change bases of exception classes extending SpamListsError and ValueError
This commit removes SpamListsError and ValueError as direct base classes
of other exception classes (except SpamListsValueError), and replaces
them with SpamListsValueError.<commit_after># -*- coding: utf-8 -*-
'''
This module contains all classes of exceptions raised
by the library
'''
from __future__ import unicode_literals
class SpamListsError(Exception):
'''There was an error during testing a url or host'''
class SpamListsValueError(SpamListsError, ValueError):
'''An inapropriate value was used in SpamLists library '''
class UnknownCodeError(SpamListsError, KeyError):
'''The classification code from the service was not recognized'''
class UnathorizedAPIKeyError(SpamListsValueError):
'''The API key used to query the service was not authorized'''
class InvalidHostError(SpamListsValueError):
'''The value is not a valid host'''
class InvalidIPError(InvalidHostError):
''' The value is not a valid IP address'''
class InvalidIPv4Error(InvalidIPError):
'''The value is not a valid IPv4 address'''
class InvalidIPv6Error(InvalidIPError):
'''The value is not a valid IPv6 address'''
class InvalidHostnameError(InvalidHostError):
'''The value is not a valid hostname'''
class InvalidURLError(SpamListsValueError):
'''The value is not a valid url'''
|
ba5efae80cf9e1e26399f1e6d9b9c41106a1c2ec | conanfile.py | conanfile.py | from conans import ConanFile
from conans.tools import download, unzip
import os
VERSION = "0.0.1"
class ToolingCMakeUtilConan(ConanFile):
name = "tooling-find-pkg-util"
version = os.environ.get("CONAN_VERSION_OVERRIDE", VERSION)
generators = "cmake"
requires = ("cmake-include-guard/master@smspillaz/cmake-include-guard",
"cmake-unit/master@smspillaz/cmake-unit")
url = "http://github.com/polysquare/tooling-find-pkg-util"
license = "MIT"
def source(self):
zip_name = "tooling-find-pkg-util.zip"
download("https://github.com/polysquare/"
"tooling-find-pkg-util/archive/{version}.zip"
"".format(version="v" + VERSION),
zip_name)
unzip(zip_name)
os.unlink(zip_name)
def package(self):
self.copy(pattern="*.cmake",
dst="cmake/tooling-find-pkg-util",
src="tooling-find-pkg-util-" + VERSION,
keep_path=True)
| from conans import ConanFile
from conans.tools import download, unzip
import os
VERSION = "0.0.2"
class ToolingCMakeUtilConan(ConanFile):
name = "tooling-find-pkg-util"
version = os.environ.get("CONAN_VERSION_OVERRIDE", VERSION)
generators = "cmake"
requires = ("cmake-include-guard/master@smspillaz/cmake-include-guard",
"cmake-unit/master@smspillaz/cmake-unit")
url = "http://github.com/polysquare/tooling-find-pkg-util"
license = "MIT"
def source(self):
zip_name = "tooling-find-pkg-util.zip"
download("https://github.com/polysquare/"
"tooling-find-pkg-util/archive/{version}.zip"
"".format(version="v" + VERSION),
zip_name)
unzip(zip_name)
os.unlink(zip_name)
def package(self):
self.copy(pattern="*.cmake",
dst="cmake/tooling-find-pkg-util",
src="tooling-find-pkg-util-" + VERSION,
keep_path=True)
| Bump version: 0.0.1 -> 0.0.2 | Bump version: 0.0.1 -> 0.0.2
[ci skip]
| Python | mit | polysquare/tooling-find-package-cmake-util,polysquare/tooling-find-package-cmake-util,polysquare/tooling-find-pkg-util,polysquare/tooling-find-pkg-util | from conans import ConanFile
from conans.tools import download, unzip
import os
VERSION = "0.0.1"
class ToolingCMakeUtilConan(ConanFile):
name = "tooling-find-pkg-util"
version = os.environ.get("CONAN_VERSION_OVERRIDE", VERSION)
generators = "cmake"
requires = ("cmake-include-guard/master@smspillaz/cmake-include-guard",
"cmake-unit/master@smspillaz/cmake-unit")
url = "http://github.com/polysquare/tooling-find-pkg-util"
license = "MIT"
def source(self):
zip_name = "tooling-find-pkg-util.zip"
download("https://github.com/polysquare/"
"tooling-find-pkg-util/archive/{version}.zip"
"".format(version="v" + VERSION),
zip_name)
unzip(zip_name)
os.unlink(zip_name)
def package(self):
self.copy(pattern="*.cmake",
dst="cmake/tooling-find-pkg-util",
src="tooling-find-pkg-util-" + VERSION,
keep_path=True)
Bump version: 0.0.1 -> 0.0.2
[ci skip] | from conans import ConanFile
from conans.tools import download, unzip
import os
VERSION = "0.0.2"
class ToolingCMakeUtilConan(ConanFile):
name = "tooling-find-pkg-util"
version = os.environ.get("CONAN_VERSION_OVERRIDE", VERSION)
generators = "cmake"
requires = ("cmake-include-guard/master@smspillaz/cmake-include-guard",
"cmake-unit/master@smspillaz/cmake-unit")
url = "http://github.com/polysquare/tooling-find-pkg-util"
license = "MIT"
def source(self):
zip_name = "tooling-find-pkg-util.zip"
download("https://github.com/polysquare/"
"tooling-find-pkg-util/archive/{version}.zip"
"".format(version="v" + VERSION),
zip_name)
unzip(zip_name)
os.unlink(zip_name)
def package(self):
self.copy(pattern="*.cmake",
dst="cmake/tooling-find-pkg-util",
src="tooling-find-pkg-util-" + VERSION,
keep_path=True)
| <commit_before>from conans import ConanFile
from conans.tools import download, unzip
import os
VERSION = "0.0.1"
class ToolingCMakeUtilConan(ConanFile):
name = "tooling-find-pkg-util"
version = os.environ.get("CONAN_VERSION_OVERRIDE", VERSION)
generators = "cmake"
requires = ("cmake-include-guard/master@smspillaz/cmake-include-guard",
"cmake-unit/master@smspillaz/cmake-unit")
url = "http://github.com/polysquare/tooling-find-pkg-util"
license = "MIT"
def source(self):
zip_name = "tooling-find-pkg-util.zip"
download("https://github.com/polysquare/"
"tooling-find-pkg-util/archive/{version}.zip"
"".format(version="v" + VERSION),
zip_name)
unzip(zip_name)
os.unlink(zip_name)
def package(self):
self.copy(pattern="*.cmake",
dst="cmake/tooling-find-pkg-util",
src="tooling-find-pkg-util-" + VERSION,
keep_path=True)
<commit_msg>Bump version: 0.0.1 -> 0.0.2
[ci skip]<commit_after> | from conans import ConanFile
from conans.tools import download, unzip
import os
VERSION = "0.0.2"
class ToolingCMakeUtilConan(ConanFile):
name = "tooling-find-pkg-util"
version = os.environ.get("CONAN_VERSION_OVERRIDE", VERSION)
generators = "cmake"
requires = ("cmake-include-guard/master@smspillaz/cmake-include-guard",
"cmake-unit/master@smspillaz/cmake-unit")
url = "http://github.com/polysquare/tooling-find-pkg-util"
license = "MIT"
def source(self):
zip_name = "tooling-find-pkg-util.zip"
download("https://github.com/polysquare/"
"tooling-find-pkg-util/archive/{version}.zip"
"".format(version="v" + VERSION),
zip_name)
unzip(zip_name)
os.unlink(zip_name)
def package(self):
self.copy(pattern="*.cmake",
dst="cmake/tooling-find-pkg-util",
src="tooling-find-pkg-util-" + VERSION,
keep_path=True)
| from conans import ConanFile
from conans.tools import download, unzip
import os
VERSION = "0.0.1"
class ToolingCMakeUtilConan(ConanFile):
name = "tooling-find-pkg-util"
version = os.environ.get("CONAN_VERSION_OVERRIDE", VERSION)
generators = "cmake"
requires = ("cmake-include-guard/master@smspillaz/cmake-include-guard",
"cmake-unit/master@smspillaz/cmake-unit")
url = "http://github.com/polysquare/tooling-find-pkg-util"
license = "MIT"
def source(self):
zip_name = "tooling-find-pkg-util.zip"
download("https://github.com/polysquare/"
"tooling-find-pkg-util/archive/{version}.zip"
"".format(version="v" + VERSION),
zip_name)
unzip(zip_name)
os.unlink(zip_name)
def package(self):
self.copy(pattern="*.cmake",
dst="cmake/tooling-find-pkg-util",
src="tooling-find-pkg-util-" + VERSION,
keep_path=True)
Bump version: 0.0.1 -> 0.0.2
[ci skip]from conans import ConanFile
from conans.tools import download, unzip
import os
VERSION = "0.0.2"
class ToolingCMakeUtilConan(ConanFile):
name = "tooling-find-pkg-util"
version = os.environ.get("CONAN_VERSION_OVERRIDE", VERSION)
generators = "cmake"
requires = ("cmake-include-guard/master@smspillaz/cmake-include-guard",
"cmake-unit/master@smspillaz/cmake-unit")
url = "http://github.com/polysquare/tooling-find-pkg-util"
license = "MIT"
def source(self):
zip_name = "tooling-find-pkg-util.zip"
download("https://github.com/polysquare/"
"tooling-find-pkg-util/archive/{version}.zip"
"".format(version="v" + VERSION),
zip_name)
unzip(zip_name)
os.unlink(zip_name)
def package(self):
self.copy(pattern="*.cmake",
dst="cmake/tooling-find-pkg-util",
src="tooling-find-pkg-util-" + VERSION,
keep_path=True)
| <commit_before>from conans import ConanFile
from conans.tools import download, unzip
import os
VERSION = "0.0.1"
class ToolingCMakeUtilConan(ConanFile):
name = "tooling-find-pkg-util"
version = os.environ.get("CONAN_VERSION_OVERRIDE", VERSION)
generators = "cmake"
requires = ("cmake-include-guard/master@smspillaz/cmake-include-guard",
"cmake-unit/master@smspillaz/cmake-unit")
url = "http://github.com/polysquare/tooling-find-pkg-util"
license = "MIT"
def source(self):
zip_name = "tooling-find-pkg-util.zip"
download("https://github.com/polysquare/"
"tooling-find-pkg-util/archive/{version}.zip"
"".format(version="v" + VERSION),
zip_name)
unzip(zip_name)
os.unlink(zip_name)
def package(self):
self.copy(pattern="*.cmake",
dst="cmake/tooling-find-pkg-util",
src="tooling-find-pkg-util-" + VERSION,
keep_path=True)
<commit_msg>Bump version: 0.0.1 -> 0.0.2
[ci skip]<commit_after>from conans import ConanFile
from conans.tools import download, unzip
import os
VERSION = "0.0.2"
class ToolingCMakeUtilConan(ConanFile):
name = "tooling-find-pkg-util"
version = os.environ.get("CONAN_VERSION_OVERRIDE", VERSION)
generators = "cmake"
requires = ("cmake-include-guard/master@smspillaz/cmake-include-guard",
"cmake-unit/master@smspillaz/cmake-unit")
url = "http://github.com/polysquare/tooling-find-pkg-util"
license = "MIT"
def source(self):
zip_name = "tooling-find-pkg-util.zip"
download("https://github.com/polysquare/"
"tooling-find-pkg-util/archive/{version}.zip"
"".format(version="v" + VERSION),
zip_name)
unzip(zip_name)
os.unlink(zip_name)
def package(self):
self.copy(pattern="*.cmake",
dst="cmake/tooling-find-pkg-util",
src="tooling-find-pkg-util-" + VERSION,
keep_path=True)
|
b0c2262bb50fb51bcc2f5eadb86b353cc9eb38a3 | bin/Bullet.py | bin/Bullet.py | from tkinter import Label
import threading
class Bullet(Label):
def __init__(self, x, y, space):
self.space = space
self.bullet_timer = 0.01
self.bullet_indicator = "'"
self.damage = -100
Label.__init__(self, text=self.bullet_indicator)
self.pack()
self._x = x
self._y = y
self._observers = []
def start(self):
process = threading.Thread(target=self.place_bullet)
process.start()
def place_bullet(self):
if self._y > 0:
self.set_y(-1)
self.place(x=self._x, y=self._y)
process = threading.Timer(self.bullet_timer, self.place_bullet, [])
process.start()
else:
self.set_y(self.space.height)
self.place(x=self._x, y=self._y)
process = threading.Timer(self.bullet_timer, self.place_bullet, [])
process.start()
def get_y(self):
return self._y
def set_y(self, value):
self._y += value
for callback in self._observers:
callback(x=self._x, y=self._y, thing=self)
y = property(get_y, set_y)
def bind_to(self, callback):
self._observers.append(callback)
| from tkinter import Label
import threading
class Bullet(Label):
def __init__(self, x, y, space):
self.space = space
self.bullet_timer = 0.01
self.bullet_indicator = "'"
self.damage = -100
Label.__init__(self, text=self.bullet_indicator)
self.pack()
self._x = x
self._y = y
self._observers = []
def start(self):
process = threading.Thread(target=self.place_bullet)
process.start()
def place_bullet(self):
if self._y > 0:
self.set_y(-1)
self.place(x=self._x, y=self._y)
process = threading.Timer(self.bullet_timer, self.place_bullet, [])
process.start()
else:
self.set_y(self.space.height)
self.place(x=self._x, y=self._y)
process = threading.Timer(self.bullet_timer, self.place_bullet, [])
process.start()
def get_y(self):
return self._y
def set_y(self, value):
self._y += value
for callback in self._observers:
callback(thing=self)
y = property(get_y, set_y)
def bind_to(self, callback):
self._observers.append(callback)
def hit(self):
self.destroy()
| Add bullet hit function to destroy self. | Add bullet hit function to destroy self.
| Python | mit | emreeroglu/DummyShip | from tkinter import Label
import threading
class Bullet(Label):
def __init__(self, x, y, space):
self.space = space
self.bullet_timer = 0.01
self.bullet_indicator = "'"
self.damage = -100
Label.__init__(self, text=self.bullet_indicator)
self.pack()
self._x = x
self._y = y
self._observers = []
def start(self):
process = threading.Thread(target=self.place_bullet)
process.start()
def place_bullet(self):
if self._y > 0:
self.set_y(-1)
self.place(x=self._x, y=self._y)
process = threading.Timer(self.bullet_timer, self.place_bullet, [])
process.start()
else:
self.set_y(self.space.height)
self.place(x=self._x, y=self._y)
process = threading.Timer(self.bullet_timer, self.place_bullet, [])
process.start()
def get_y(self):
return self._y
def set_y(self, value):
self._y += value
for callback in self._observers:
callback(x=self._x, y=self._y, thing=self)
y = property(get_y, set_y)
def bind_to(self, callback):
self._observers.append(callback)
Add bullet hit function to destroy self. | from tkinter import Label
import threading
class Bullet(Label):
def __init__(self, x, y, space):
self.space = space
self.bullet_timer = 0.01
self.bullet_indicator = "'"
self.damage = -100
Label.__init__(self, text=self.bullet_indicator)
self.pack()
self._x = x
self._y = y
self._observers = []
def start(self):
process = threading.Thread(target=self.place_bullet)
process.start()
def place_bullet(self):
if self._y > 0:
self.set_y(-1)
self.place(x=self._x, y=self._y)
process = threading.Timer(self.bullet_timer, self.place_bullet, [])
process.start()
else:
self.set_y(self.space.height)
self.place(x=self._x, y=self._y)
process = threading.Timer(self.bullet_timer, self.place_bullet, [])
process.start()
def get_y(self):
return self._y
def set_y(self, value):
self._y += value
for callback in self._observers:
callback(thing=self)
y = property(get_y, set_y)
def bind_to(self, callback):
self._observers.append(callback)
def hit(self):
self.destroy()
| <commit_before>from tkinter import Label
import threading
class Bullet(Label):
def __init__(self, x, y, space):
self.space = space
self.bullet_timer = 0.01
self.bullet_indicator = "'"
self.damage = -100
Label.__init__(self, text=self.bullet_indicator)
self.pack()
self._x = x
self._y = y
self._observers = []
def start(self):
process = threading.Thread(target=self.place_bullet)
process.start()
def place_bullet(self):
if self._y > 0:
self.set_y(-1)
self.place(x=self._x, y=self._y)
process = threading.Timer(self.bullet_timer, self.place_bullet, [])
process.start()
else:
self.set_y(self.space.height)
self.place(x=self._x, y=self._y)
process = threading.Timer(self.bullet_timer, self.place_bullet, [])
process.start()
def get_y(self):
return self._y
def set_y(self, value):
self._y += value
for callback in self._observers:
callback(x=self._x, y=self._y, thing=self)
y = property(get_y, set_y)
def bind_to(self, callback):
self._observers.append(callback)
<commit_msg>Add bullet hit function to destroy self.<commit_after> | from tkinter import Label
import threading
class Bullet(Label):
def __init__(self, x, y, space):
self.space = space
self.bullet_timer = 0.01
self.bullet_indicator = "'"
self.damage = -100
Label.__init__(self, text=self.bullet_indicator)
self.pack()
self._x = x
self._y = y
self._observers = []
def start(self):
process = threading.Thread(target=self.place_bullet)
process.start()
def place_bullet(self):
if self._y > 0:
self.set_y(-1)
self.place(x=self._x, y=self._y)
process = threading.Timer(self.bullet_timer, self.place_bullet, [])
process.start()
else:
self.set_y(self.space.height)
self.place(x=self._x, y=self._y)
process = threading.Timer(self.bullet_timer, self.place_bullet, [])
process.start()
def get_y(self):
return self._y
def set_y(self, value):
self._y += value
for callback in self._observers:
callback(thing=self)
y = property(get_y, set_y)
def bind_to(self, callback):
self._observers.append(callback)
def hit(self):
self.destroy()
| from tkinter import Label
import threading
class Bullet(Label):
def __init__(self, x, y, space):
self.space = space
self.bullet_timer = 0.01
self.bullet_indicator = "'"
self.damage = -100
Label.__init__(self, text=self.bullet_indicator)
self.pack()
self._x = x
self._y = y
self._observers = []
def start(self):
process = threading.Thread(target=self.place_bullet)
process.start()
def place_bullet(self):
if self._y > 0:
self.set_y(-1)
self.place(x=self._x, y=self._y)
process = threading.Timer(self.bullet_timer, self.place_bullet, [])
process.start()
else:
self.set_y(self.space.height)
self.place(x=self._x, y=self._y)
process = threading.Timer(self.bullet_timer, self.place_bullet, [])
process.start()
def get_y(self):
return self._y
def set_y(self, value):
self._y += value
for callback in self._observers:
callback(x=self._x, y=self._y, thing=self)
y = property(get_y, set_y)
def bind_to(self, callback):
self._observers.append(callback)
Add bullet hit function to destroy self.from tkinter import Label
import threading
class Bullet(Label):
def __init__(self, x, y, space):
self.space = space
self.bullet_timer = 0.01
self.bullet_indicator = "'"
self.damage = -100
Label.__init__(self, text=self.bullet_indicator)
self.pack()
self._x = x
self._y = y
self._observers = []
def start(self):
process = threading.Thread(target=self.place_bullet)
process.start()
def place_bullet(self):
if self._y > 0:
self.set_y(-1)
self.place(x=self._x, y=self._y)
process = threading.Timer(self.bullet_timer, self.place_bullet, [])
process.start()
else:
self.set_y(self.space.height)
self.place(x=self._x, y=self._y)
process = threading.Timer(self.bullet_timer, self.place_bullet, [])
process.start()
def get_y(self):
return self._y
def set_y(self, value):
self._y += value
for callback in self._observers:
callback(thing=self)
y = property(get_y, set_y)
def bind_to(self, callback):
self._observers.append(callback)
def hit(self):
self.destroy()
| <commit_before>from tkinter import Label
import threading
class Bullet(Label):
def __init__(self, x, y, space):
self.space = space
self.bullet_timer = 0.01
self.bullet_indicator = "'"
self.damage = -100
Label.__init__(self, text=self.bullet_indicator)
self.pack()
self._x = x
self._y = y
self._observers = []
def start(self):
process = threading.Thread(target=self.place_bullet)
process.start()
def place_bullet(self):
if self._y > 0:
self.set_y(-1)
self.place(x=self._x, y=self._y)
process = threading.Timer(self.bullet_timer, self.place_bullet, [])
process.start()
else:
self.set_y(self.space.height)
self.place(x=self._x, y=self._y)
process = threading.Timer(self.bullet_timer, self.place_bullet, [])
process.start()
def get_y(self):
return self._y
def set_y(self, value):
self._y += value
for callback in self._observers:
callback(x=self._x, y=self._y, thing=self)
y = property(get_y, set_y)
def bind_to(self, callback):
self._observers.append(callback)
<commit_msg>Add bullet hit function to destroy self.<commit_after>from tkinter import Label
import threading
class Bullet(Label):
def __init__(self, x, y, space):
self.space = space
self.bullet_timer = 0.01
self.bullet_indicator = "'"
self.damage = -100
Label.__init__(self, text=self.bullet_indicator)
self.pack()
self._x = x
self._y = y
self._observers = []
def start(self):
process = threading.Thread(target=self.place_bullet)
process.start()
def place_bullet(self):
if self._y > 0:
self.set_y(-1)
self.place(x=self._x, y=self._y)
process = threading.Timer(self.bullet_timer, self.place_bullet, [])
process.start()
else:
self.set_y(self.space.height)
self.place(x=self._x, y=self._y)
process = threading.Timer(self.bullet_timer, self.place_bullet, [])
process.start()
def get_y(self):
return self._y
def set_y(self, value):
self._y += value
for callback in self._observers:
callback(thing=self)
y = property(get_y, set_y)
def bind_to(self, callback):
self._observers.append(callback)
def hit(self):
self.destroy()
|
59cd1879efd790d664989d9f506ebdb72f8b6ed3 | portainer/util/fs.py | portainer/util/fs.py |
import os.path
import errno
def touch(path, times=None):
"""Mimics the behavior of the `touch` UNIX command line tool, to create
empty files.
"""
with open(path):
os.utime(path, times)
def mkdir_p(path):
"""Mimics the behavior of the `mkdir -p` UNIX command line took, creating
directories recursively, ignoring them if they already exist.
"""
try:
os.makedirs(path)
except OSError, e:
if e.errno != errno.EEXIST:
raise
|
import os.path
import errno
def touch(path, times=None):
"""Mimics the behavior of the `touch` UNIX command line tool, to create
empty files.
"""
with open(path, "a"):
os.utime(path, times)
def mkdir_p(path):
"""Mimics the behavior of the `mkdir -p` UNIX command line took, creating
directories recursively, ignoring them if they already exist.
"""
try:
os.makedirs(path)
except OSError, e:
if e.errno != errno.EEXIST:
raise
| Handle touching files that don't yet exist | Handle touching files that don't yet exist
| Python | mit | duedil-ltd/portainer,duedil-ltd/portainer |
import os.path
import errno
def touch(path, times=None):
"""Mimics the behavior of the `touch` UNIX command line tool, to create
empty files.
"""
with open(path):
os.utime(path, times)
def mkdir_p(path):
"""Mimics the behavior of the `mkdir -p` UNIX command line took, creating
directories recursively, ignoring them if they already exist.
"""
try:
os.makedirs(path)
except OSError, e:
if e.errno != errno.EEXIST:
raise
Handle touching files that don't yet exist |
import os.path
import errno
def touch(path, times=None):
"""Mimics the behavior of the `touch` UNIX command line tool, to create
empty files.
"""
with open(path, "a"):
os.utime(path, times)
def mkdir_p(path):
"""Mimics the behavior of the `mkdir -p` UNIX command line took, creating
directories recursively, ignoring them if they already exist.
"""
try:
os.makedirs(path)
except OSError, e:
if e.errno != errno.EEXIST:
raise
| <commit_before>
import os.path
import errno
def touch(path, times=None):
"""Mimics the behavior of the `touch` UNIX command line tool, to create
empty files.
"""
with open(path):
os.utime(path, times)
def mkdir_p(path):
"""Mimics the behavior of the `mkdir -p` UNIX command line took, creating
directories recursively, ignoring them if they already exist.
"""
try:
os.makedirs(path)
except OSError, e:
if e.errno != errno.EEXIST:
raise
<commit_msg>Handle touching files that don't yet exist<commit_after> |
import os.path
import errno
def touch(path, times=None):
"""Mimics the behavior of the `touch` UNIX command line tool, to create
empty files.
"""
with open(path, "a"):
os.utime(path, times)
def mkdir_p(path):
"""Mimics the behavior of the `mkdir -p` UNIX command line took, creating
directories recursively, ignoring them if they already exist.
"""
try:
os.makedirs(path)
except OSError, e:
if e.errno != errno.EEXIST:
raise
|
import os.path
import errno
def touch(path, times=None):
"""Mimics the behavior of the `touch` UNIX command line tool, to create
empty files.
"""
with open(path):
os.utime(path, times)
def mkdir_p(path):
"""Mimics the behavior of the `mkdir -p` UNIX command line took, creating
directories recursively, ignoring them if they already exist.
"""
try:
os.makedirs(path)
except OSError, e:
if e.errno != errno.EEXIST:
raise
Handle touching files that don't yet exist
import os.path
import errno
def touch(path, times=None):
"""Mimics the behavior of the `touch` UNIX command line tool, to create
empty files.
"""
with open(path, "a"):
os.utime(path, times)
def mkdir_p(path):
"""Mimics the behavior of the `mkdir -p` UNIX command line took, creating
directories recursively, ignoring them if they already exist.
"""
try:
os.makedirs(path)
except OSError, e:
if e.errno != errno.EEXIST:
raise
| <commit_before>
import os.path
import errno
def touch(path, times=None):
"""Mimics the behavior of the `touch` UNIX command line tool, to create
empty files.
"""
with open(path):
os.utime(path, times)
def mkdir_p(path):
"""Mimics the behavior of the `mkdir -p` UNIX command line took, creating
directories recursively, ignoring them if they already exist.
"""
try:
os.makedirs(path)
except OSError, e:
if e.errno != errno.EEXIST:
raise
<commit_msg>Handle touching files that don't yet exist<commit_after>
import os.path
import errno
def touch(path, times=None):
"""Mimics the behavior of the `touch` UNIX command line tool, to create
empty files.
"""
with open(path, "a"):
os.utime(path, times)
def mkdir_p(path):
"""Mimics the behavior of the `mkdir -p` UNIX command line took, creating
directories recursively, ignoring them if they already exist.
"""
try:
os.makedirs(path)
except OSError, e:
if e.errno != errno.EEXIST:
raise
|
6de094b3b08751a8585c5a492bf009807ff3e1e1 | stringinfo.py | stringinfo.py | #!/usr/bin/env python3
# -*- coding: utf8 -*-
"""
Usage:
stringinfo STRING...
Options:
STRING One or more strings for which you want information
"""
from docopt import docopt
import plugins
__author__ = 'peter'
def main():
args = docopt(__doc__)
# Find plugins
ps = plugins.get_plugins()
# For each plugin, check if it's applicable and if so, run it
for p in ps:
plugin = p(args)
if plugin.sentinel():
print(plugin.handle())
if __name__ == '__main__':
main() | #!/usr/bin/env python3
# -*- coding: utf8 -*-
"""
Usage:
stringinfo STRING...
Options:
STRING One or more strings for which you want information
"""
import colorama
from docopt import docopt
import plugins
__author__ = 'peter'
def main():
args = docopt(__doc__)
# Find plugins
ps = plugins.get_plugins()
# Initialize colorama
colorama.init()
# For each plugin, check if it's applicable and if so, run it
for p in ps:
plugin = p(args)
if plugin.sentinel():
print(plugin.short_description)
print(plugin.handle())
if __name__ == '__main__':
main() | Use colorama to print colors and print a plugin's short_description after running | Use colorama to print colors and print a plugin's short_description after running
| Python | mit | Sakartu/stringinfo | #!/usr/bin/env python3
# -*- coding: utf8 -*-
"""
Usage:
stringinfo STRING...
Options:
STRING One or more strings for which you want information
"""
from docopt import docopt
import plugins
__author__ = 'peter'
def main():
args = docopt(__doc__)
# Find plugins
ps = plugins.get_plugins()
# For each plugin, check if it's applicable and if so, run it
for p in ps:
plugin = p(args)
if plugin.sentinel():
print(plugin.handle())
if __name__ == '__main__':
main()Use colorama to print colors and print a plugin's short_description after running | #!/usr/bin/env python3
# -*- coding: utf8 -*-
"""
Usage:
stringinfo STRING...
Options:
STRING One or more strings for which you want information
"""
import colorama
from docopt import docopt
import plugins
__author__ = 'peter'
def main():
args = docopt(__doc__)
# Find plugins
ps = plugins.get_plugins()
# Initialize colorama
colorama.init()
# For each plugin, check if it's applicable and if so, run it
for p in ps:
plugin = p(args)
if plugin.sentinel():
print(plugin.short_description)
print(plugin.handle())
if __name__ == '__main__':
main() | <commit_before>#!/usr/bin/env python3
# -*- coding: utf8 -*-
"""
Usage:
stringinfo STRING...
Options:
STRING One or more strings for which you want information
"""
from docopt import docopt
import plugins
__author__ = 'peter'
def main():
args = docopt(__doc__)
# Find plugins
ps = plugins.get_plugins()
# For each plugin, check if it's applicable and if so, run it
for p in ps:
plugin = p(args)
if plugin.sentinel():
print(plugin.handle())
if __name__ == '__main__':
main()<commit_msg>Use colorama to print colors and print a plugin's short_description after running<commit_after> | #!/usr/bin/env python3
# -*- coding: utf8 -*-
"""
Usage:
stringinfo STRING...
Options:
STRING One or more strings for which you want information
"""
import colorama
from docopt import docopt
import plugins
__author__ = 'peter'
def main():
args = docopt(__doc__)
# Find plugins
ps = plugins.get_plugins()
# Initialize colorama
colorama.init()
# For each plugin, check if it's applicable and if so, run it
for p in ps:
plugin = p(args)
if plugin.sentinel():
print(plugin.short_description)
print(plugin.handle())
if __name__ == '__main__':
main() | #!/usr/bin/env python3
# -*- coding: utf8 -*-
"""
Usage:
stringinfo STRING...
Options:
STRING One or more strings for which you want information
"""
from docopt import docopt
import plugins
__author__ = 'peter'
def main():
args = docopt(__doc__)
# Find plugins
ps = plugins.get_plugins()
# For each plugin, check if it's applicable and if so, run it
for p in ps:
plugin = p(args)
if plugin.sentinel():
print(plugin.handle())
if __name__ == '__main__':
main()Use colorama to print colors and print a plugin's short_description after running#!/usr/bin/env python3
# -*- coding: utf8 -*-
"""
Usage:
stringinfo STRING...
Options:
STRING One or more strings for which you want information
"""
import colorama
from docopt import docopt
import plugins
__author__ = 'peter'
def main():
args = docopt(__doc__)
# Find plugins
ps = plugins.get_plugins()
# Initialize colorama
colorama.init()
# For each plugin, check if it's applicable and if so, run it
for p in ps:
plugin = p(args)
if plugin.sentinel():
print(plugin.short_description)
print(plugin.handle())
if __name__ == '__main__':
main() | <commit_before>#!/usr/bin/env python3
# -*- coding: utf8 -*-
"""
Usage:
stringinfo STRING...
Options:
STRING One or more strings for which you want information
"""
from docopt import docopt
import plugins
__author__ = 'peter'
def main():
args = docopt(__doc__)
# Find plugins
ps = plugins.get_plugins()
# For each plugin, check if it's applicable and if so, run it
for p in ps:
plugin = p(args)
if plugin.sentinel():
print(plugin.handle())
if __name__ == '__main__':
main()<commit_msg>Use colorama to print colors and print a plugin's short_description after running<commit_after>#!/usr/bin/env python3
# -*- coding: utf8 -*-
"""
Usage:
stringinfo STRING...
Options:
STRING One or more strings for which you want information
"""
import colorama
from docopt import docopt
import plugins
__author__ = 'peter'
def main():
args = docopt(__doc__)
# Find plugins
ps = plugins.get_plugins()
# Initialize colorama
colorama.init()
# For each plugin, check if it's applicable and if so, run it
for p in ps:
plugin = p(args)
if plugin.sentinel():
print(plugin.short_description)
print(plugin.handle())
if __name__ == '__main__':
main() |
6fa6ef07dd18794b75d63ffa2a5be83e2ec9b674 | bit/count_ones.py | bit/count_ones.py | """
Write a function that takes an unsigned integer and
returns the number of ’1' bits it has
(also known as the Hamming weight).
For example, the 32-bit integer ’11' has binary
representation 00000000000000000000000000001011,
so the function should return 3.
"""
def count_ones(n):
"""
:type n: int
:rtype: int
"""
counter = 0
while n:
counter += n & 1
n >>= 1
return counter
| """
Write a function that takes an unsigned integer and
returns the number of ’1' bits it has
(also known as the Hamming weight).
For example, the 32-bit integer ’11' has binary
representation 00000000000000000000000000001011,
so the function should return 3.
"""
def count_ones(n):
"""
:type n: int
:rtype: int
"""
if n < 0:
return
counter = 0
while n:
counter += n & 1
n >>= 1
return counter
| Check if the input is negative | Check if the input is negative
As the comments mention, the code would work only for unsigned integers.
If a negative integer is provided as input, then the code runs into an
infinite loop. To avoid this, we are checking if the input is negative.
If yes, then return control before loop is entered.
| Python | mit | amaozhao/algorithms,keon/algorithms | """
Write a function that takes an unsigned integer and
returns the number of ’1' bits it has
(also known as the Hamming weight).
For example, the 32-bit integer ’11' has binary
representation 00000000000000000000000000001011,
so the function should return 3.
"""
def count_ones(n):
"""
:type n: int
:rtype: int
"""
counter = 0
while n:
counter += n & 1
n >>= 1
return counter
Check if the input is negative
As the comments mention, the code would work only for unsigned integers.
If a negative integer is provided as input, then the code runs into an
infinite loop. To avoid this, we are checking if the input is negative.
If yes, then return control before loop is entered. | """
Write a function that takes an unsigned integer and
returns the number of ’1' bits it has
(also known as the Hamming weight).
For example, the 32-bit integer ’11' has binary
representation 00000000000000000000000000001011,
so the function should return 3.
"""
def count_ones(n):
"""
:type n: int
:rtype: int
"""
if n < 0:
return
counter = 0
while n:
counter += n & 1
n >>= 1
return counter
| <commit_before>"""
Write a function that takes an unsigned integer and
returns the number of ’1' bits it has
(also known as the Hamming weight).
For example, the 32-bit integer ’11' has binary
representation 00000000000000000000000000001011,
so the function should return 3.
"""
def count_ones(n):
"""
:type n: int
:rtype: int
"""
counter = 0
while n:
counter += n & 1
n >>= 1
return counter
<commit_msg>Check if the input is negative
As the comments mention, the code would work only for unsigned integers.
If a negative integer is provided as input, then the code runs into an
infinite loop. To avoid this, we are checking if the input is negative.
If yes, then return control before loop is entered.<commit_after> | """
Write a function that takes an unsigned integer and
returns the number of ’1' bits it has
(also known as the Hamming weight).
For example, the 32-bit integer ’11' has binary
representation 00000000000000000000000000001011,
so the function should return 3.
"""
def count_ones(n):
"""
:type n: int
:rtype: int
"""
if n < 0:
return
counter = 0
while n:
counter += n & 1
n >>= 1
return counter
| """
Write a function that takes an unsigned integer and
returns the number of ’1' bits it has
(also known as the Hamming weight).
For example, the 32-bit integer ’11' has binary
representation 00000000000000000000000000001011,
so the function should return 3.
"""
def count_ones(n):
"""
:type n: int
:rtype: int
"""
counter = 0
while n:
counter += n & 1
n >>= 1
return counter
Check if the input is negative
As the comments mention, the code would work only for unsigned integers.
If a negative integer is provided as input, then the code runs into an
infinite loop. To avoid this, we are checking if the input is negative.
If yes, then return control before loop is entered."""
Write a function that takes an unsigned integer and
returns the number of ’1' bits it has
(also known as the Hamming weight).
For example, the 32-bit integer ’11' has binary
representation 00000000000000000000000000001011,
so the function should return 3.
"""
def count_ones(n):
"""
:type n: int
:rtype: int
"""
if n < 0:
return
counter = 0
while n:
counter += n & 1
n >>= 1
return counter
| <commit_before>"""
Write a function that takes an unsigned integer and
returns the number of ’1' bits it has
(also known as the Hamming weight).
For example, the 32-bit integer ’11' has binary
representation 00000000000000000000000000001011,
so the function should return 3.
"""
def count_ones(n):
"""
:type n: int
:rtype: int
"""
counter = 0
while n:
counter += n & 1
n >>= 1
return counter
<commit_msg>Check if the input is negative
As the comments mention, the code would work only for unsigned integers.
If a negative integer is provided as input, then the code runs into an
infinite loop. To avoid this, we are checking if the input is negative.
If yes, then return control before loop is entered.<commit_after>"""
Write a function that takes an unsigned integer and
returns the number of ’1' bits it has
(also known as the Hamming weight).
For example, the 32-bit integer ’11' has binary
representation 00000000000000000000000000001011,
so the function should return 3.
"""
def count_ones(n):
"""
:type n: int
:rtype: int
"""
if n < 0:
return
counter = 0
while n:
counter += n & 1
n >>= 1
return counter
|
cdc63148c00a38ebbfd74879da8d646427627d1f | rasterio/rio/main.py | rasterio/rio/main.py | #!/usr/bin/env python
# main: loader of all the command entry points.
from pkg_resources import iter_entry_points
for entry_point in iter_entry_points('rasterio.rio_commands'):
entry_point.load()
| #!/usr/bin/env python
# main: loader of all the command entry points.
from pkg_resources import iter_entry_points
from rasterio.rio.cli import cli
for entry_point in iter_entry_points('rasterio.rio_commands'):
entry_point.load()
| Add back import of cli. | Add back import of cli.
| Python | bsd-3-clause | youngpm/rasterio,brendan-ward/rasterio,brendan-ward/rasterio,njwilson23/rasterio,kapadia/rasterio,brendan-ward/rasterio,perrygeo/rasterio,johanvdw/rasterio,perrygeo/rasterio,johanvdw/rasterio,johanvdw/rasterio,kapadia/rasterio,njwilson23/rasterio,youngpm/rasterio,youngpm/rasterio,perrygeo/rasterio,njwilson23/rasterio,clembou/rasterio,clembou/rasterio,kapadia/rasterio,clembou/rasterio | #!/usr/bin/env python
# main: loader of all the command entry points.
from pkg_resources import iter_entry_points
for entry_point in iter_entry_points('rasterio.rio_commands'):
entry_point.load()
Add back import of cli. | #!/usr/bin/env python
# main: loader of all the command entry points.
from pkg_resources import iter_entry_points
from rasterio.rio.cli import cli
for entry_point in iter_entry_points('rasterio.rio_commands'):
entry_point.load()
| <commit_before>#!/usr/bin/env python
# main: loader of all the command entry points.
from pkg_resources import iter_entry_points
for entry_point in iter_entry_points('rasterio.rio_commands'):
entry_point.load()
<commit_msg>Add back import of cli.<commit_after> | #!/usr/bin/env python
# main: loader of all the command entry points.
from pkg_resources import iter_entry_points
from rasterio.rio.cli import cli
for entry_point in iter_entry_points('rasterio.rio_commands'):
entry_point.load()
| #!/usr/bin/env python
# main: loader of all the command entry points.
from pkg_resources import iter_entry_points
for entry_point in iter_entry_points('rasterio.rio_commands'):
entry_point.load()
Add back import of cli.#!/usr/bin/env python
# main: loader of all the command entry points.
from pkg_resources import iter_entry_points
from rasterio.rio.cli import cli
for entry_point in iter_entry_points('rasterio.rio_commands'):
entry_point.load()
| <commit_before>#!/usr/bin/env python
# main: loader of all the command entry points.
from pkg_resources import iter_entry_points
for entry_point in iter_entry_points('rasterio.rio_commands'):
entry_point.load()
<commit_msg>Add back import of cli.<commit_after>#!/usr/bin/env python
# main: loader of all the command entry points.
from pkg_resources import iter_entry_points
from rasterio.rio.cli import cli
for entry_point in iter_entry_points('rasterio.rio_commands'):
entry_point.load()
|
28a457926921ef5e7f57c086d6e6b77a0221348c | carnifex/utils.py | carnifex/utils.py | """
@author: Geir Sporsheim
@license: see LICENCE for details
"""
def attr_string(filterKeys=(), filterValues=(), **kwargs):
return ', '.join([str(k)+'='+repr(v) for k, v in kwargs.iteritems() if k not in filterKeys and v not in filterValues])
| """
@author: Geir Sporsheim
@license: see LICENCE for details
"""
def attr_string(filterKeys=(), filterValues=(), **kwargs):
"""Build a string consisting of 'key=value' substrings for each keyword
argument in :kwargs:
@param filterKeys: list of key names to ignore
@param filterValues: list of values to ignore (e.g. None will ignore all
key=value pairs that has that value.
"""
return ', '.join([str(k)+'='+repr(v) for k, v in kwargs.iteritems()
if k not in filterKeys and v not in filterValues])
| Add documentation and touch up formatting of attr_string util method | Add documentation and touch up formatting of attr_string util method
| Python | mit | sporsh/carnifex | """
@author: Geir Sporsheim
@license: see LICENCE for details
"""
def attr_string(filterKeys=(), filterValues=(), **kwargs):
return ', '.join([str(k)+'='+repr(v) for k, v in kwargs.iteritems() if k not in filterKeys and v not in filterValues])
Add documentation and touch up formatting of attr_string util method | """
@author: Geir Sporsheim
@license: see LICENCE for details
"""
def attr_string(filterKeys=(), filterValues=(), **kwargs):
"""Build a string consisting of 'key=value' substrings for each keyword
argument in :kwargs:
@param filterKeys: list of key names to ignore
@param filterValues: list of values to ignore (e.g. None will ignore all
key=value pairs that has that value.
"""
return ', '.join([str(k)+'='+repr(v) for k, v in kwargs.iteritems()
if k not in filterKeys and v not in filterValues])
| <commit_before>"""
@author: Geir Sporsheim
@license: see LICENCE for details
"""
def attr_string(filterKeys=(), filterValues=(), **kwargs):
return ', '.join([str(k)+'='+repr(v) for k, v in kwargs.iteritems() if k not in filterKeys and v not in filterValues])
<commit_msg>Add documentation and touch up formatting of attr_string util method<commit_after> | """
@author: Geir Sporsheim
@license: see LICENCE for details
"""
def attr_string(filterKeys=(), filterValues=(), **kwargs):
"""Build a string consisting of 'key=value' substrings for each keyword
argument in :kwargs:
@param filterKeys: list of key names to ignore
@param filterValues: list of values to ignore (e.g. None will ignore all
key=value pairs that has that value.
"""
return ', '.join([str(k)+'='+repr(v) for k, v in kwargs.iteritems()
if k not in filterKeys and v not in filterValues])
| """
@author: Geir Sporsheim
@license: see LICENCE for details
"""
def attr_string(filterKeys=(), filterValues=(), **kwargs):
return ', '.join([str(k)+'='+repr(v) for k, v in kwargs.iteritems() if k not in filterKeys and v not in filterValues])
Add documentation and touch up formatting of attr_string util method"""
@author: Geir Sporsheim
@license: see LICENCE for details
"""
def attr_string(filterKeys=(), filterValues=(), **kwargs):
"""Build a string consisting of 'key=value' substrings for each keyword
argument in :kwargs:
@param filterKeys: list of key names to ignore
@param filterValues: list of values to ignore (e.g. None will ignore all
key=value pairs that has that value.
"""
return ', '.join([str(k)+'='+repr(v) for k, v in kwargs.iteritems()
if k not in filterKeys and v not in filterValues])
| <commit_before>"""
@author: Geir Sporsheim
@license: see LICENCE for details
"""
def attr_string(filterKeys=(), filterValues=(), **kwargs):
return ', '.join([str(k)+'='+repr(v) for k, v in kwargs.iteritems() if k not in filterKeys and v not in filterValues])
<commit_msg>Add documentation and touch up formatting of attr_string util method<commit_after>"""
@author: Geir Sporsheim
@license: see LICENCE for details
"""
def attr_string(filterKeys=(), filterValues=(), **kwargs):
"""Build a string consisting of 'key=value' substrings for each keyword
argument in :kwargs:
@param filterKeys: list of key names to ignore
@param filterValues: list of values to ignore (e.g. None will ignore all
key=value pairs that has that value.
"""
return ', '.join([str(k)+'='+repr(v) for k, v in kwargs.iteritems()
if k not in filterKeys and v not in filterValues])
|
b330afbcdc907343ab609a5b000f08e69671116e | sobotka/lib/ssh_config_util.py | sobotka/lib/ssh_config_util.py | from storm.parsers.ssh_config_parser import ConfigParser as StormParser
from os.path import expanduser
def add_host(name, user, hostname, key_file):
sconfig = StormParser(expanduser("~/.ssh/config"))
sconfig.load()
sconfig.add_host(name, {
'user': user,
'hostname': hostname,
'IdentityFile': key_file
})
sconfig.write_to_ssh_config()
def remove_host(name):
sconfig = StormParser(expanduser("~/.ssh/config"))
sconfig.load()
sconfig.delete_host(name)
sconfig.write_to_ssh_config()
| from storm.parsers.ssh_config_parser import ConfigParser as StormParser
from os.path import expanduser
def add_host(name, user, hostname, key_file):
sconfig = StormParser(expanduser("~/.ssh/config"))
sconfig.load()
sconfig.add_host(name, {
'user': user,
'hostname': hostname,
'IdentityFile': key_file,
"StrictHostKeyChecking": "no"
})
sconfig.write_to_ssh_config()
def remove_host(name):
sconfig = StormParser(expanduser("~/.ssh/config"))
sconfig.load()
sconfig.delete_host(name)
sconfig.write_to_ssh_config()
| Disable strict host checking for ssh | Disable strict host checking for ssh
| Python | mit | looneym/sobotka,looneym/sobotka | from storm.parsers.ssh_config_parser import ConfigParser as StormParser
from os.path import expanduser
def add_host(name, user, hostname, key_file):
sconfig = StormParser(expanduser("~/.ssh/config"))
sconfig.load()
sconfig.add_host(name, {
'user': user,
'hostname': hostname,
'IdentityFile': key_file
})
sconfig.write_to_ssh_config()
def remove_host(name):
sconfig = StormParser(expanduser("~/.ssh/config"))
sconfig.load()
sconfig.delete_host(name)
sconfig.write_to_ssh_config()
Disable strict host checking for ssh | from storm.parsers.ssh_config_parser import ConfigParser as StormParser
from os.path import expanduser
def add_host(name, user, hostname, key_file):
sconfig = StormParser(expanduser("~/.ssh/config"))
sconfig.load()
sconfig.add_host(name, {
'user': user,
'hostname': hostname,
'IdentityFile': key_file,
"StrictHostKeyChecking": "no"
})
sconfig.write_to_ssh_config()
def remove_host(name):
sconfig = StormParser(expanduser("~/.ssh/config"))
sconfig.load()
sconfig.delete_host(name)
sconfig.write_to_ssh_config()
| <commit_before>from storm.parsers.ssh_config_parser import ConfigParser as StormParser
from os.path import expanduser
def add_host(name, user, hostname, key_file):
sconfig = StormParser(expanduser("~/.ssh/config"))
sconfig.load()
sconfig.add_host(name, {
'user': user,
'hostname': hostname,
'IdentityFile': key_file
})
sconfig.write_to_ssh_config()
def remove_host(name):
sconfig = StormParser(expanduser("~/.ssh/config"))
sconfig.load()
sconfig.delete_host(name)
sconfig.write_to_ssh_config()
<commit_msg>Disable strict host checking for ssh<commit_after> | from storm.parsers.ssh_config_parser import ConfigParser as StormParser
from os.path import expanduser
def add_host(name, user, hostname, key_file):
sconfig = StormParser(expanduser("~/.ssh/config"))
sconfig.load()
sconfig.add_host(name, {
'user': user,
'hostname': hostname,
'IdentityFile': key_file,
"StrictHostKeyChecking": "no"
})
sconfig.write_to_ssh_config()
def remove_host(name):
sconfig = StormParser(expanduser("~/.ssh/config"))
sconfig.load()
sconfig.delete_host(name)
sconfig.write_to_ssh_config()
| from storm.parsers.ssh_config_parser import ConfigParser as StormParser
from os.path import expanduser
def add_host(name, user, hostname, key_file):
sconfig = StormParser(expanduser("~/.ssh/config"))
sconfig.load()
sconfig.add_host(name, {
'user': user,
'hostname': hostname,
'IdentityFile': key_file
})
sconfig.write_to_ssh_config()
def remove_host(name):
sconfig = StormParser(expanduser("~/.ssh/config"))
sconfig.load()
sconfig.delete_host(name)
sconfig.write_to_ssh_config()
Disable strict host checking for sshfrom storm.parsers.ssh_config_parser import ConfigParser as StormParser
from os.path import expanduser
def add_host(name, user, hostname, key_file):
sconfig = StormParser(expanduser("~/.ssh/config"))
sconfig.load()
sconfig.add_host(name, {
'user': user,
'hostname': hostname,
'IdentityFile': key_file,
"StrictHostKeyChecking": "no"
})
sconfig.write_to_ssh_config()
def remove_host(name):
sconfig = StormParser(expanduser("~/.ssh/config"))
sconfig.load()
sconfig.delete_host(name)
sconfig.write_to_ssh_config()
| <commit_before>from storm.parsers.ssh_config_parser import ConfigParser as StormParser
from os.path import expanduser
def add_host(name, user, hostname, key_file):
sconfig = StormParser(expanduser("~/.ssh/config"))
sconfig.load()
sconfig.add_host(name, {
'user': user,
'hostname': hostname,
'IdentityFile': key_file
})
sconfig.write_to_ssh_config()
def remove_host(name):
sconfig = StormParser(expanduser("~/.ssh/config"))
sconfig.load()
sconfig.delete_host(name)
sconfig.write_to_ssh_config()
<commit_msg>Disable strict host checking for ssh<commit_after>from storm.parsers.ssh_config_parser import ConfigParser as StormParser
from os.path import expanduser
def add_host(name, user, hostname, key_file):
sconfig = StormParser(expanduser("~/.ssh/config"))
sconfig.load()
sconfig.add_host(name, {
'user': user,
'hostname': hostname,
'IdentityFile': key_file,
"StrictHostKeyChecking": "no"
})
sconfig.write_to_ssh_config()
def remove_host(name):
sconfig = StormParser(expanduser("~/.ssh/config"))
sconfig.load()
sconfig.delete_host(name)
sconfig.write_to_ssh_config()
|
f529198d385e63ea657c33c166eb05d43bdcf14a | sensor/core/models/event.py | sensor/core/models/event.py | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from django.db import models
VALUE_MAX_LEN = 128
class GenericEvent(models.Model):
"""Represents a measurement event abstracting away what exactly is
measured.
"""
sensor = models.ForeignKey('core.GenericSensor')
datetime = models.DateTimeField()
value = models.CharField(max_length=VALUE_MAX_LEN)
class Event(models.Model):
"""Base class for sensor-specific event types"""
generic_event = models.OneToOneField('core.GenericEvent')
def value_to_string(self):
"""Event.value_to_string() -> unicode
Returns a string representation of the
"""
raise NotImplementedError(self.__class__.value_to_string)
class Meta:
abstract = True | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from django.db import models
VALUE_MAX_LEN = 128
class GenericEvent(models.Model):
"""Represents a measurement event abstracting away what exactly is
measured.
"""
sensor = models.ForeignKey('core.GenericSensor')
datetime = models.DateTimeField()
value = models.CharField(max_length=VALUE_MAX_LEN)
class Event(models.Model):
"""Base class for sensor-specific event types"""
generic_event = models.OneToOneField('core.GenericEvent')
datetime = models.DateTimeField()
def value_to_string(self):
"""Event.value_to_string() -> unicode
Returns a string representation of the
"""
raise NotImplementedError(self.__class__.value_to_string)
class Meta:
abstract = True | Add datetime field to Event model in sensor.core | Add datetime field to Event model in sensor.core
| Python | mpl-2.0 | HeisenbergPeople/weather-station-site,HeisenbergPeople/weather-station-site,HeisenbergPeople/weather-station-site | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from django.db import models
VALUE_MAX_LEN = 128
class GenericEvent(models.Model):
"""Represents a measurement event abstracting away what exactly is
measured.
"""
sensor = models.ForeignKey('core.GenericSensor')
datetime = models.DateTimeField()
value = models.CharField(max_length=VALUE_MAX_LEN)
class Event(models.Model):
"""Base class for sensor-specific event types"""
generic_event = models.OneToOneField('core.GenericEvent')
def value_to_string(self):
"""Event.value_to_string() -> unicode
Returns a string representation of the
"""
raise NotImplementedError(self.__class__.value_to_string)
class Meta:
abstract = TrueAdd datetime field to Event model in sensor.core | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from django.db import models
VALUE_MAX_LEN = 128
class GenericEvent(models.Model):
"""Represents a measurement event abstracting away what exactly is
measured.
"""
sensor = models.ForeignKey('core.GenericSensor')
datetime = models.DateTimeField()
value = models.CharField(max_length=VALUE_MAX_LEN)
class Event(models.Model):
"""Base class for sensor-specific event types"""
generic_event = models.OneToOneField('core.GenericEvent')
datetime = models.DateTimeField()
def value_to_string(self):
"""Event.value_to_string() -> unicode
Returns a string representation of the
"""
raise NotImplementedError(self.__class__.value_to_string)
class Meta:
abstract = True | <commit_before># This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from django.db import models
VALUE_MAX_LEN = 128
class GenericEvent(models.Model):
"""Represents a measurement event abstracting away what exactly is
measured.
"""
sensor = models.ForeignKey('core.GenericSensor')
datetime = models.DateTimeField()
value = models.CharField(max_length=VALUE_MAX_LEN)
class Event(models.Model):
"""Base class for sensor-specific event types"""
generic_event = models.OneToOneField('core.GenericEvent')
def value_to_string(self):
"""Event.value_to_string() -> unicode
Returns a string representation of the
"""
raise NotImplementedError(self.__class__.value_to_string)
class Meta:
abstract = True<commit_msg>Add datetime field to Event model in sensor.core<commit_after> | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from django.db import models
VALUE_MAX_LEN = 128
class GenericEvent(models.Model):
"""Represents a measurement event abstracting away what exactly is
measured.
"""
sensor = models.ForeignKey('core.GenericSensor')
datetime = models.DateTimeField()
value = models.CharField(max_length=VALUE_MAX_LEN)
class Event(models.Model):
"""Base class for sensor-specific event types"""
generic_event = models.OneToOneField('core.GenericEvent')
datetime = models.DateTimeField()
def value_to_string(self):
"""Event.value_to_string() -> unicode
Returns a string representation of the
"""
raise NotImplementedError(self.__class__.value_to_string)
class Meta:
abstract = True | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from django.db import models
VALUE_MAX_LEN = 128
class GenericEvent(models.Model):
"""Represents a measurement event abstracting away what exactly is
measured.
"""
sensor = models.ForeignKey('core.GenericSensor')
datetime = models.DateTimeField()
value = models.CharField(max_length=VALUE_MAX_LEN)
class Event(models.Model):
"""Base class for sensor-specific event types"""
generic_event = models.OneToOneField('core.GenericEvent')
def value_to_string(self):
"""Event.value_to_string() -> unicode
Returns a string representation of the
"""
raise NotImplementedError(self.__class__.value_to_string)
class Meta:
abstract = TrueAdd datetime field to Event model in sensor.core# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from django.db import models
VALUE_MAX_LEN = 128
class GenericEvent(models.Model):
"""Represents a measurement event abstracting away what exactly is
measured.
"""
sensor = models.ForeignKey('core.GenericSensor')
datetime = models.DateTimeField()
value = models.CharField(max_length=VALUE_MAX_LEN)
class Event(models.Model):
"""Base class for sensor-specific event types"""
generic_event = models.OneToOneField('core.GenericEvent')
datetime = models.DateTimeField()
def value_to_string(self):
"""Event.value_to_string() -> unicode
Returns a string representation of the
"""
raise NotImplementedError(self.__class__.value_to_string)
class Meta:
abstract = True | <commit_before># This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from django.db import models
VALUE_MAX_LEN = 128
class GenericEvent(models.Model):
"""Represents a measurement event abstracting away what exactly is
measured.
"""
sensor = models.ForeignKey('core.GenericSensor')
datetime = models.DateTimeField()
value = models.CharField(max_length=VALUE_MAX_LEN)
class Event(models.Model):
"""Base class for sensor-specific event types"""
generic_event = models.OneToOneField('core.GenericEvent')
def value_to_string(self):
"""Event.value_to_string() -> unicode
Returns a string representation of the
"""
raise NotImplementedError(self.__class__.value_to_string)
class Meta:
abstract = True<commit_msg>Add datetime field to Event model in sensor.core<commit_after># This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from django.db import models
VALUE_MAX_LEN = 128
class GenericEvent(models.Model):
"""Represents a measurement event abstracting away what exactly is
measured.
"""
sensor = models.ForeignKey('core.GenericSensor')
datetime = models.DateTimeField()
value = models.CharField(max_length=VALUE_MAX_LEN)
class Event(models.Model):
"""Base class for sensor-specific event types"""
generic_event = models.OneToOneField('core.GenericEvent')
datetime = models.DateTimeField()
def value_to_string(self):
"""Event.value_to_string() -> unicode
Returns a string representation of the
"""
raise NotImplementedError(self.__class__.value_to_string)
class Meta:
abstract = True |
db20368ba2531af3d027a25f0b9e71389a8ea7fd | rszio/templatetags/rszio.py | rszio/templatetags/rszio.py | from django import template
from django.urls import reverse
register = template.Library()
@register.simple_tag
def image(img, **kwargs):
# Convert the kwargs to an appropriate string
options = []
for key, value in kwargs.items():
options.append('{key},{value}'.format(key=key, value=value))
return reverse('rszio_image', kwargs={
'options': ';'.join(options),
'path': img,
})
| from django import template
from django.conf import settings
from django.urls import reverse
register = template.Library()
@register.simple_tag
def image(img, **kwargs):
if not img:
if settings.DEBUG:
raise ValueError('No image path defined')
return ''
# Convert the kwargs to an appropriate string
options = []
for key, value in kwargs.items():
options.append('{key},{value}'.format(key=key, value=value))
return reverse('rszio_image', kwargs={
'options': ';'.join(options),
'path': img,
})
| Raise ValueError when no path and DEBUG | Raise ValueError when no path and DEBUG
Fails silently when DEBUG is False
| Python | mit | g3rd/django-rszio | from django import template
from django.urls import reverse
register = template.Library()
@register.simple_tag
def image(img, **kwargs):
# Convert the kwargs to an appropriate string
options = []
for key, value in kwargs.items():
options.append('{key},{value}'.format(key=key, value=value))
return reverse('rszio_image', kwargs={
'options': ';'.join(options),
'path': img,
})
Raise ValueError when no path and DEBUG
Fails silently when DEBUG is False | from django import template
from django.conf import settings
from django.urls import reverse
register = template.Library()
@register.simple_tag
def image(img, **kwargs):
if not img:
if settings.DEBUG:
raise ValueError('No image path defined')
return ''
# Convert the kwargs to an appropriate string
options = []
for key, value in kwargs.items():
options.append('{key},{value}'.format(key=key, value=value))
return reverse('rszio_image', kwargs={
'options': ';'.join(options),
'path': img,
})
| <commit_before>from django import template
from django.urls import reverse
register = template.Library()
@register.simple_tag
def image(img, **kwargs):
# Convert the kwargs to an appropriate string
options = []
for key, value in kwargs.items():
options.append('{key},{value}'.format(key=key, value=value))
return reverse('rszio_image', kwargs={
'options': ';'.join(options),
'path': img,
})
<commit_msg>Raise ValueError when no path and DEBUG
Fails silently when DEBUG is False<commit_after> | from django import template
from django.conf import settings
from django.urls import reverse
register = template.Library()
@register.simple_tag
def image(img, **kwargs):
if not img:
if settings.DEBUG:
raise ValueError('No image path defined')
return ''
# Convert the kwargs to an appropriate string
options = []
for key, value in kwargs.items():
options.append('{key},{value}'.format(key=key, value=value))
return reverse('rszio_image', kwargs={
'options': ';'.join(options),
'path': img,
})
| from django import template
from django.urls import reverse
register = template.Library()
@register.simple_tag
def image(img, **kwargs):
# Convert the kwargs to an appropriate string
options = []
for key, value in kwargs.items():
options.append('{key},{value}'.format(key=key, value=value))
return reverse('rszio_image', kwargs={
'options': ';'.join(options),
'path': img,
})
Raise ValueError when no path and DEBUG
Fails silently when DEBUG is Falsefrom django import template
from django.conf import settings
from django.urls import reverse
register = template.Library()
@register.simple_tag
def image(img, **kwargs):
if not img:
if settings.DEBUG:
raise ValueError('No image path defined')
return ''
# Convert the kwargs to an appropriate string
options = []
for key, value in kwargs.items():
options.append('{key},{value}'.format(key=key, value=value))
return reverse('rszio_image', kwargs={
'options': ';'.join(options),
'path': img,
})
| <commit_before>from django import template
from django.urls import reverse
register = template.Library()
@register.simple_tag
def image(img, **kwargs):
# Convert the kwargs to an appropriate string
options = []
for key, value in kwargs.items():
options.append('{key},{value}'.format(key=key, value=value))
return reverse('rszio_image', kwargs={
'options': ';'.join(options),
'path': img,
})
<commit_msg>Raise ValueError when no path and DEBUG
Fails silently when DEBUG is False<commit_after>from django import template
from django.conf import settings
from django.urls import reverse
register = template.Library()
@register.simple_tag
def image(img, **kwargs):
if not img:
if settings.DEBUG:
raise ValueError('No image path defined')
return ''
# Convert the kwargs to an appropriate string
options = []
for key, value in kwargs.items():
options.append('{key},{value}'.format(key=key, value=value))
return reverse('rszio_image', kwargs={
'options': ';'.join(options),
'path': img,
})
|
d0ea4b585ef9523eac528c5a4fba4b0af653cad3 | tests/loginput/test_loginput_index.py | tests/loginput/test_loginput_index.py | from loginput_test_suite import LoginputTestSuite
class TestTestRoute(LoginputTestSuite):
routes = ['/test', '/test/']
status_code = 200
body = ''
# Routes left need to have unit tests written for:
# @route('/veris')
# @route('/veris/')
# @post('/blockip', methods=['POST'])
# @post('/blockip/', methods=['POST'])
# @post('/ipwhois', methods=['POST'])
# @post('/ipwhois/', methods=['POST'])
# @post('/ipintel', methods=['POST'])
# @post('/ipintel/', methods=['POST'])
# @post('/ipcifquery', methods=['POST'])
# @post('/ipcifquery/', methods=['POST'])
# @post('/ipdshieldquery', methods=['POST'])
# @post('/ipdshieldquery/', methods=['POST'])
# @route('/plugins', methods=['GET'])
# @route('/plugins/', methods=['GET'])
# @route('/plugins/<endpoint>', methods=['GET'])
# @post('/incident', methods=['POST'])
# @post('/incident/', methods=['POST'])
| from loginput_test_suite import LoginputTestSuite
class TestTestRoute(LoginputTestSuite):
routes = ['/test', '/test/']
status_code = 200
body = ''
# Routes left need to have unit tests written for:
# @route('/_bulk',method='POST')
# @route('/_bulk/',method='POST')
# @route('/_status')
# @route('/_status/')
# @route('/nxlog/', method=['POST','PUT'])
# @route('/nxlog', method=['POST','PUT'])
# @route('/events/',method=['POST','PUT'])
# @route('/events', method=['POST','PUT'])
# @route('/cef', method=['POST','PUT'])
# @route('/cef/',method=['POST','PUT'])
# @route('/custom/<application>',method=['POST','PUT'])
| Update comments for loginput tests | Update comments for loginput tests
Signed-off-by: Brandon Myers <9cda508be11a1ae7ceef912b85c196946f0ec5f3@mozilla.com>
| Python | mpl-2.0 | jeffbryner/MozDef,mpurzynski/MozDef,Phrozyn/MozDef,Phrozyn/MozDef,jeffbryner/MozDef,gdestuynder/MozDef,ameihm0912/MozDef,gdestuynder/MozDef,mozilla/MozDef,ameihm0912/MozDef,gdestuynder/MozDef,mozilla/MozDef,ameihm0912/MozDef,mpurzynski/MozDef,mozilla/MozDef,mpurzynski/MozDef,mpurzynski/MozDef,mozilla/MozDef,jeffbryner/MozDef,Phrozyn/MozDef,gdestuynder/MozDef,jeffbryner/MozDef,Phrozyn/MozDef,ameihm0912/MozDef | from loginput_test_suite import LoginputTestSuite
class TestTestRoute(LoginputTestSuite):
routes = ['/test', '/test/']
status_code = 200
body = ''
# Routes left need to have unit tests written for:
# @route('/veris')
# @route('/veris/')
# @post('/blockip', methods=['POST'])
# @post('/blockip/', methods=['POST'])
# @post('/ipwhois', methods=['POST'])
# @post('/ipwhois/', methods=['POST'])
# @post('/ipintel', methods=['POST'])
# @post('/ipintel/', methods=['POST'])
# @post('/ipcifquery', methods=['POST'])
# @post('/ipcifquery/', methods=['POST'])
# @post('/ipdshieldquery', methods=['POST'])
# @post('/ipdshieldquery/', methods=['POST'])
# @route('/plugins', methods=['GET'])
# @route('/plugins/', methods=['GET'])
# @route('/plugins/<endpoint>', methods=['GET'])
# @post('/incident', methods=['POST'])
# @post('/incident/', methods=['POST'])
Update comments for loginput tests
Signed-off-by: Brandon Myers <9cda508be11a1ae7ceef912b85c196946f0ec5f3@mozilla.com> | from loginput_test_suite import LoginputTestSuite
class TestTestRoute(LoginputTestSuite):
routes = ['/test', '/test/']
status_code = 200
body = ''
# Routes left need to have unit tests written for:
# @route('/_bulk',method='POST')
# @route('/_bulk/',method='POST')
# @route('/_status')
# @route('/_status/')
# @route('/nxlog/', method=['POST','PUT'])
# @route('/nxlog', method=['POST','PUT'])
# @route('/events/',method=['POST','PUT'])
# @route('/events', method=['POST','PUT'])
# @route('/cef', method=['POST','PUT'])
# @route('/cef/',method=['POST','PUT'])
# @route('/custom/<application>',method=['POST','PUT'])
| <commit_before>from loginput_test_suite import LoginputTestSuite
class TestTestRoute(LoginputTestSuite):
routes = ['/test', '/test/']
status_code = 200
body = ''
# Routes left need to have unit tests written for:
# @route('/veris')
# @route('/veris/')
# @post('/blockip', methods=['POST'])
# @post('/blockip/', methods=['POST'])
# @post('/ipwhois', methods=['POST'])
# @post('/ipwhois/', methods=['POST'])
# @post('/ipintel', methods=['POST'])
# @post('/ipintel/', methods=['POST'])
# @post('/ipcifquery', methods=['POST'])
# @post('/ipcifquery/', methods=['POST'])
# @post('/ipdshieldquery', methods=['POST'])
# @post('/ipdshieldquery/', methods=['POST'])
# @route('/plugins', methods=['GET'])
# @route('/plugins/', methods=['GET'])
# @route('/plugins/<endpoint>', methods=['GET'])
# @post('/incident', methods=['POST'])
# @post('/incident/', methods=['POST'])
<commit_msg>Update comments for loginput tests
Signed-off-by: Brandon Myers <9cda508be11a1ae7ceef912b85c196946f0ec5f3@mozilla.com><commit_after> | from loginput_test_suite import LoginputTestSuite
class TestTestRoute(LoginputTestSuite):
routes = ['/test', '/test/']
status_code = 200
body = ''
# Routes left need to have unit tests written for:
# @route('/_bulk',method='POST')
# @route('/_bulk/',method='POST')
# @route('/_status')
# @route('/_status/')
# @route('/nxlog/', method=['POST','PUT'])
# @route('/nxlog', method=['POST','PUT'])
# @route('/events/',method=['POST','PUT'])
# @route('/events', method=['POST','PUT'])
# @route('/cef', method=['POST','PUT'])
# @route('/cef/',method=['POST','PUT'])
# @route('/custom/<application>',method=['POST','PUT'])
| from loginput_test_suite import LoginputTestSuite
class TestTestRoute(LoginputTestSuite):
routes = ['/test', '/test/']
status_code = 200
body = ''
# Routes left need to have unit tests written for:
# @route('/veris')
# @route('/veris/')
# @post('/blockip', methods=['POST'])
# @post('/blockip/', methods=['POST'])
# @post('/ipwhois', methods=['POST'])
# @post('/ipwhois/', methods=['POST'])
# @post('/ipintel', methods=['POST'])
# @post('/ipintel/', methods=['POST'])
# @post('/ipcifquery', methods=['POST'])
# @post('/ipcifquery/', methods=['POST'])
# @post('/ipdshieldquery', methods=['POST'])
# @post('/ipdshieldquery/', methods=['POST'])
# @route('/plugins', methods=['GET'])
# @route('/plugins/', methods=['GET'])
# @route('/plugins/<endpoint>', methods=['GET'])
# @post('/incident', methods=['POST'])
# @post('/incident/', methods=['POST'])
Update comments for loginput tests
Signed-off-by: Brandon Myers <9cda508be11a1ae7ceef912b85c196946f0ec5f3@mozilla.com>from loginput_test_suite import LoginputTestSuite
class TestTestRoute(LoginputTestSuite):
routes = ['/test', '/test/']
status_code = 200
body = ''
# Routes left need to have unit tests written for:
# @route('/_bulk',method='POST')
# @route('/_bulk/',method='POST')
# @route('/_status')
# @route('/_status/')
# @route('/nxlog/', method=['POST','PUT'])
# @route('/nxlog', method=['POST','PUT'])
# @route('/events/',method=['POST','PUT'])
# @route('/events', method=['POST','PUT'])
# @route('/cef', method=['POST','PUT'])
# @route('/cef/',method=['POST','PUT'])
# @route('/custom/<application>',method=['POST','PUT'])
| <commit_before>from loginput_test_suite import LoginputTestSuite
class TestTestRoute(LoginputTestSuite):
routes = ['/test', '/test/']
status_code = 200
body = ''
# Routes left need to have unit tests written for:
# @route('/veris')
# @route('/veris/')
# @post('/blockip', methods=['POST'])
# @post('/blockip/', methods=['POST'])
# @post('/ipwhois', methods=['POST'])
# @post('/ipwhois/', methods=['POST'])
# @post('/ipintel', methods=['POST'])
# @post('/ipintel/', methods=['POST'])
# @post('/ipcifquery', methods=['POST'])
# @post('/ipcifquery/', methods=['POST'])
# @post('/ipdshieldquery', methods=['POST'])
# @post('/ipdshieldquery/', methods=['POST'])
# @route('/plugins', methods=['GET'])
# @route('/plugins/', methods=['GET'])
# @route('/plugins/<endpoint>', methods=['GET'])
# @post('/incident', methods=['POST'])
# @post('/incident/', methods=['POST'])
<commit_msg>Update comments for loginput tests
Signed-off-by: Brandon Myers <9cda508be11a1ae7ceef912b85c196946f0ec5f3@mozilla.com><commit_after>from loginput_test_suite import LoginputTestSuite
class TestTestRoute(LoginputTestSuite):
routes = ['/test', '/test/']
status_code = 200
body = ''
# Routes left need to have unit tests written for:
# @route('/_bulk',method='POST')
# @route('/_bulk/',method='POST')
# @route('/_status')
# @route('/_status/')
# @route('/nxlog/', method=['POST','PUT'])
# @route('/nxlog', method=['POST','PUT'])
# @route('/events/',method=['POST','PUT'])
# @route('/events', method=['POST','PUT'])
# @route('/cef', method=['POST','PUT'])
# @route('/cef/',method=['POST','PUT'])
# @route('/custom/<application>',method=['POST','PUT'])
|
4a37433c43ffda2443f80cc93c99f9cd76aa6475 | examples/miniapps/movie_lister/movies/__init__.py | examples/miniapps/movie_lister/movies/__init__.py | """Movies package.
Top-level package of movies library. This package contains IoC container of
movies module component providers - ``MoviesModule``. It is recommended to use
movies library functionality by fetching required instances from
``MoviesModule`` providers.
``MoviesModule.finder`` is a factory that provides abstract component
``finders.MovieFinder``. This provider should be overridden by provider of
concrete finder implementation in terms of library configuration.
Each of ``MoviesModule`` providers could be overridden.
"""
import movies.finders
import movies.listers
import movies.models
import dependency_injector.containers as containers
import dependency_injector.providers as providers
class MoviesModule(containers.DeclarativeContainer):
"""IoC container of movies module component providers."""
models_factory = providers.Factory(movies.models.Movie)
finder = providers.AbstractFactory(movies.finders.MovieFinder,
movie_model=models_factory.delegate())
lister = providers.Factory(movies.listers.MovieLister,
movie_finder=finder)
| """Movies package.
Top-level package of movies library. This package contains IoC container of
movies module component providers - ``MoviesModule``. It is recommended to use
movies library functionality by fetching required instances from
``MoviesModule`` providers.
``MoviesModule.finder`` is a factory that provides abstract component
``finders.MovieFinder``. This provider should be overridden by provider of
concrete finder implementation in terms of library configuration.
Each of ``MoviesModule`` providers could be overridden.
"""
import movies.finders
import movies.listers
import movies.models
import dependency_injector.containers as containers
import dependency_injector.providers as providers
class MoviesModule(containers.DeclarativeContainer):
"""IoC container of movies module component providers."""
movie = providers.Factory(movies.models.Movie)
finder = providers.AbstractFactory(movies.finders.MovieFinder,
movie_model=movie.provider)
lister = providers.Factory(movies.listers.MovieLister,
movie_finder=finder)
| Add minor fixes to movie lister miniapp | Add minor fixes to movie lister miniapp
| Python | bsd-3-clause | rmk135/objects,ets-labs/python-dependency-injector,ets-labs/dependency_injector,rmk135/dependency_injector | """Movies package.
Top-level package of movies library. This package contains IoC container of
movies module component providers - ``MoviesModule``. It is recommended to use
movies library functionality by fetching required instances from
``MoviesModule`` providers.
``MoviesModule.finder`` is a factory that provides abstract component
``finders.MovieFinder``. This provider should be overridden by provider of
concrete finder implementation in terms of library configuration.
Each of ``MoviesModule`` providers could be overridden.
"""
import movies.finders
import movies.listers
import movies.models
import dependency_injector.containers as containers
import dependency_injector.providers as providers
class MoviesModule(containers.DeclarativeContainer):
"""IoC container of movies module component providers."""
models_factory = providers.Factory(movies.models.Movie)
finder = providers.AbstractFactory(movies.finders.MovieFinder,
movie_model=models_factory.delegate())
lister = providers.Factory(movies.listers.MovieLister,
movie_finder=finder)
Add minor fixes to movie lister miniapp | """Movies package.
Top-level package of movies library. This package contains IoC container of
movies module component providers - ``MoviesModule``. It is recommended to use
movies library functionality by fetching required instances from
``MoviesModule`` providers.
``MoviesModule.finder`` is a factory that provides abstract component
``finders.MovieFinder``. This provider should be overridden by provider of
concrete finder implementation in terms of library configuration.
Each of ``MoviesModule`` providers could be overridden.
"""
import movies.finders
import movies.listers
import movies.models
import dependency_injector.containers as containers
import dependency_injector.providers as providers
class MoviesModule(containers.DeclarativeContainer):
"""IoC container of movies module component providers."""
movie = providers.Factory(movies.models.Movie)
finder = providers.AbstractFactory(movies.finders.MovieFinder,
movie_model=movie.provider)
lister = providers.Factory(movies.listers.MovieLister,
movie_finder=finder)
| <commit_before>"""Movies package.
Top-level package of movies library. This package contains IoC container of
movies module component providers - ``MoviesModule``. It is recommended to use
movies library functionality by fetching required instances from
``MoviesModule`` providers.
``MoviesModule.finder`` is a factory that provides abstract component
``finders.MovieFinder``. This provider should be overridden by provider of
concrete finder implementation in terms of library configuration.
Each of ``MoviesModule`` providers could be overridden.
"""
import movies.finders
import movies.listers
import movies.models
import dependency_injector.containers as containers
import dependency_injector.providers as providers
class MoviesModule(containers.DeclarativeContainer):
"""IoC container of movies module component providers."""
models_factory = providers.Factory(movies.models.Movie)
finder = providers.AbstractFactory(movies.finders.MovieFinder,
movie_model=models_factory.delegate())
lister = providers.Factory(movies.listers.MovieLister,
movie_finder=finder)
<commit_msg>Add minor fixes to movie lister miniapp<commit_after> | """Movies package.
Top-level package of movies library. This package contains IoC container of
movies module component providers - ``MoviesModule``. It is recommended to use
movies library functionality by fetching required instances from
``MoviesModule`` providers.
``MoviesModule.finder`` is a factory that provides abstract component
``finders.MovieFinder``. This provider should be overridden by provider of
concrete finder implementation in terms of library configuration.
Each of ``MoviesModule`` providers could be overridden.
"""
import movies.finders
import movies.listers
import movies.models
import dependency_injector.containers as containers
import dependency_injector.providers as providers
class MoviesModule(containers.DeclarativeContainer):
"""IoC container of movies module component providers."""
movie = providers.Factory(movies.models.Movie)
finder = providers.AbstractFactory(movies.finders.MovieFinder,
movie_model=movie.provider)
lister = providers.Factory(movies.listers.MovieLister,
movie_finder=finder)
| """Movies package.
Top-level package of movies library. This package contains IoC container of
movies module component providers - ``MoviesModule``. It is recommended to use
movies library functionality by fetching required instances from
``MoviesModule`` providers.
``MoviesModule.finder`` is a factory that provides abstract component
``finders.MovieFinder``. This provider should be overridden by provider of
concrete finder implementation in terms of library configuration.
Each of ``MoviesModule`` providers could be overridden.
"""
import movies.finders
import movies.listers
import movies.models
import dependency_injector.containers as containers
import dependency_injector.providers as providers
class MoviesModule(containers.DeclarativeContainer):
"""IoC container of movies module component providers."""
models_factory = providers.Factory(movies.models.Movie)
finder = providers.AbstractFactory(movies.finders.MovieFinder,
movie_model=models_factory.delegate())
lister = providers.Factory(movies.listers.MovieLister,
movie_finder=finder)
Add minor fixes to movie lister miniapp"""Movies package.
Top-level package of movies library. This package contains IoC container of
movies module component providers - ``MoviesModule``. It is recommended to use
movies library functionality by fetching required instances from
``MoviesModule`` providers.
``MoviesModule.finder`` is a factory that provides abstract component
``finders.MovieFinder``. This provider should be overridden by provider of
concrete finder implementation in terms of library configuration.
Each of ``MoviesModule`` providers could be overridden.
"""
import movies.finders
import movies.listers
import movies.models
import dependency_injector.containers as containers
import dependency_injector.providers as providers
class MoviesModule(containers.DeclarativeContainer):
"""IoC container of movies module component providers."""
movie = providers.Factory(movies.models.Movie)
finder = providers.AbstractFactory(movies.finders.MovieFinder,
movie_model=movie.provider)
lister = providers.Factory(movies.listers.MovieLister,
movie_finder=finder)
| <commit_before>"""Movies package.
Top-level package of movies library. This package contains IoC container of
movies module component providers - ``MoviesModule``. It is recommended to use
movies library functionality by fetching required instances from
``MoviesModule`` providers.
``MoviesModule.finder`` is a factory that provides abstract component
``finders.MovieFinder``. This provider should be overridden by provider of
concrete finder implementation in terms of library configuration.
Each of ``MoviesModule`` providers could be overridden.
"""
import movies.finders
import movies.listers
import movies.models
import dependency_injector.containers as containers
import dependency_injector.providers as providers
class MoviesModule(containers.DeclarativeContainer):
"""IoC container of movies module component providers."""
models_factory = providers.Factory(movies.models.Movie)
finder = providers.AbstractFactory(movies.finders.MovieFinder,
movie_model=models_factory.delegate())
lister = providers.Factory(movies.listers.MovieLister,
movie_finder=finder)
<commit_msg>Add minor fixes to movie lister miniapp<commit_after>"""Movies package.
Top-level package of movies library. This package contains IoC container of
movies module component providers - ``MoviesModule``. It is recommended to use
movies library functionality by fetching required instances from
``MoviesModule`` providers.
``MoviesModule.finder`` is a factory that provides abstract component
``finders.MovieFinder``. This provider should be overridden by provider of
concrete finder implementation in terms of library configuration.
Each of ``MoviesModule`` providers could be overridden.
"""
import movies.finders
import movies.listers
import movies.models
import dependency_injector.containers as containers
import dependency_injector.providers as providers
class MoviesModule(containers.DeclarativeContainer):
"""IoC container of movies module component providers."""
movie = providers.Factory(movies.models.Movie)
finder = providers.AbstractFactory(movies.finders.MovieFinder,
movie_model=movie.provider)
lister = providers.Factory(movies.listers.MovieLister,
movie_finder=finder)
|
68b7786c83eb16d3e752b4c5ed22c44e0a1aeddb | examples/status_watcher.py | examples/status_watcher.py | import logging
from twisted.internet import reactor
from flist.core import FList_Account
def log_status(data):
logging.debug("{character} is {status}: {statusmsg}".format(**data))
def on_disconnect():
reactor.callLater(60, connect)
def connect():
account = FList_Account('account', 'password')
char = account.characters['character']
chat = char.start_chat()
chat.websocket.add_op_callback('STA', log_status)
return chat
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(levelname)s - %(message)s')
connect()
reactor.run() | import logging
from twisted.internet import reactor
import flist
def log_status(data):
logging.debug("{character} is {status}: {statusmsg}".format(**data))
def on_disconnect():
reactor.callLater(60, connect)
def connect():
account = flist.Account('account', 'password')
char = account.characters['character']
chat = char.start_chat(dev_chat=True)
chat.websocket.add_op_callback(flist.fchat.STATUS, log_status)
return chat
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(levelname)s - %(message)s')
connect()
reactor.run()
| Update example to refactored code | Update example to refactored code
| Python | bsd-2-clause | StormyDragon/python-flist | import logging
from twisted.internet import reactor
from flist.core import FList_Account
def log_status(data):
logging.debug("{character} is {status}: {statusmsg}".format(**data))
def on_disconnect():
reactor.callLater(60, connect)
def connect():
account = FList_Account('account', 'password')
char = account.characters['character']
chat = char.start_chat()
chat.websocket.add_op_callback('STA', log_status)
return chat
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(levelname)s - %(message)s')
connect()
reactor.run()Update example to refactored code | import logging
from twisted.internet import reactor
import flist
def log_status(data):
logging.debug("{character} is {status}: {statusmsg}".format(**data))
def on_disconnect():
reactor.callLater(60, connect)
def connect():
account = flist.Account('account', 'password')
char = account.characters['character']
chat = char.start_chat(dev_chat=True)
chat.websocket.add_op_callback(flist.fchat.STATUS, log_status)
return chat
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(levelname)s - %(message)s')
connect()
reactor.run()
| <commit_before>import logging
from twisted.internet import reactor
from flist.core import FList_Account
def log_status(data):
logging.debug("{character} is {status}: {statusmsg}".format(**data))
def on_disconnect():
reactor.callLater(60, connect)
def connect():
account = FList_Account('account', 'password')
char = account.characters['character']
chat = char.start_chat()
chat.websocket.add_op_callback('STA', log_status)
return chat
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(levelname)s - %(message)s')
connect()
reactor.run()<commit_msg>Update example to refactored code<commit_after> | import logging
from twisted.internet import reactor
import flist
def log_status(data):
logging.debug("{character} is {status}: {statusmsg}".format(**data))
def on_disconnect():
reactor.callLater(60, connect)
def connect():
account = flist.Account('account', 'password')
char = account.characters['character']
chat = char.start_chat(dev_chat=True)
chat.websocket.add_op_callback(flist.fchat.STATUS, log_status)
return chat
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(levelname)s - %(message)s')
connect()
reactor.run()
| import logging
from twisted.internet import reactor
from flist.core import FList_Account
def log_status(data):
logging.debug("{character} is {status}: {statusmsg}".format(**data))
def on_disconnect():
reactor.callLater(60, connect)
def connect():
account = FList_Account('account', 'password')
char = account.characters['character']
chat = char.start_chat()
chat.websocket.add_op_callback('STA', log_status)
return chat
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(levelname)s - %(message)s')
connect()
reactor.run()Update example to refactored codeimport logging
from twisted.internet import reactor
import flist
def log_status(data):
logging.debug("{character} is {status}: {statusmsg}".format(**data))
def on_disconnect():
reactor.callLater(60, connect)
def connect():
account = flist.Account('account', 'password')
char = account.characters['character']
chat = char.start_chat(dev_chat=True)
chat.websocket.add_op_callback(flist.fchat.STATUS, log_status)
return chat
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(levelname)s - %(message)s')
connect()
reactor.run()
| <commit_before>import logging
from twisted.internet import reactor
from flist.core import FList_Account
def log_status(data):
logging.debug("{character} is {status}: {statusmsg}".format(**data))
def on_disconnect():
reactor.callLater(60, connect)
def connect():
account = FList_Account('account', 'password')
char = account.characters['character']
chat = char.start_chat()
chat.websocket.add_op_callback('STA', log_status)
return chat
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(levelname)s - %(message)s')
connect()
reactor.run()<commit_msg>Update example to refactored code<commit_after>import logging
from twisted.internet import reactor
import flist
def log_status(data):
logging.debug("{character} is {status}: {statusmsg}".format(**data))
def on_disconnect():
reactor.callLater(60, connect)
def connect():
account = flist.Account('account', 'password')
char = account.characters['character']
chat = char.start_chat(dev_chat=True)
chat.websocket.add_op_callback(flist.fchat.STATUS, log_status)
return chat
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(levelname)s - %(message)s')
connect()
reactor.run()
|
3aa8734ac9790a8869e01d5d56498dfaf697fe28 | cea/interfaces/dashboard/api/dashboard.py | cea/interfaces/dashboard/api/dashboard.py | from flask_restplus import Namespace, Resource, fields, abort
import cea.config
import cea.plots.cache
api = Namespace('Dashboard', description='Dashboard plots')
LAYOUTS = ['row', 'grid', 'map']
CATEGORIES = {c.name: {'label': c.label, 'plots': [{'id': p.id(), 'name': p.name} for p in c.plots]}
for c in cea.plots.categories.list_categories()}
@api.route('/')
class Dashboard(Resource):
def get(self):
"""
Get Dashboards from yaml file
"""
config = cea.config.Configuration()
plot_cache = cea.plots.cache.PlotCache(config)
dashboards = cea.plots.read_dashboards(config, plot_cache)
return [{'name': d.name, 'description': d.description, 'layout': d.layout if d.layout in LAYOUTS else 'row',
'plots': [{'title': plot.title, 'scenario':
plot.parameters['scenario-name'] if 'scenario-name' in plot.parameters.keys() else None}
for plot in d.plots]} for d in dashboards]
| from flask_restplus import Namespace, Resource, fields, abort
import cea.config
import cea.plots.cache
api = Namespace('Dashboard', description='Dashboard plots')
LAYOUTS = ['row', 'grid', 'map']
CATEGORIES = {c.name: {'label': c.label, 'plots': [{'id': p.id(), 'name': p.name} for p in c.plots]}
for c in cea.plots.categories.list_categories()}
@api.route('/')
class Dashboard(Resource):
def get(self):
"""
Get Dashboards from yaml file
"""
config = cea.config.Configuration()
plot_cache = cea.plots.cache.PlotCache(config)
dashboards = cea.plots.read_dashboards(config, plot_cache)
out = []
for d in dashboards:
dashboard = d.to_dict()
for i, plot in enumerate(dashboard['plots']):
dashboard['plots'][i]['title'] = d.plots[i].title
out.append(dashboard)
return out
| Include plot title to plots | Include plot title to plots
| Python | mit | architecture-building-systems/CityEnergyAnalyst,architecture-building-systems/CityEnergyAnalyst,architecture-building-systems/CityEnergyAnalyst | from flask_restplus import Namespace, Resource, fields, abort
import cea.config
import cea.plots.cache
api = Namespace('Dashboard', description='Dashboard plots')
LAYOUTS = ['row', 'grid', 'map']
CATEGORIES = {c.name: {'label': c.label, 'plots': [{'id': p.id(), 'name': p.name} for p in c.plots]}
for c in cea.plots.categories.list_categories()}
@api.route('/')
class Dashboard(Resource):
def get(self):
"""
Get Dashboards from yaml file
"""
config = cea.config.Configuration()
plot_cache = cea.plots.cache.PlotCache(config)
dashboards = cea.plots.read_dashboards(config, plot_cache)
return [{'name': d.name, 'description': d.description, 'layout': d.layout if d.layout in LAYOUTS else 'row',
'plots': [{'title': plot.title, 'scenario':
plot.parameters['scenario-name'] if 'scenario-name' in plot.parameters.keys() else None}
for plot in d.plots]} for d in dashboards]
Include plot title to plots | from flask_restplus import Namespace, Resource, fields, abort
import cea.config
import cea.plots.cache
api = Namespace('Dashboard', description='Dashboard plots')
LAYOUTS = ['row', 'grid', 'map']
CATEGORIES = {c.name: {'label': c.label, 'plots': [{'id': p.id(), 'name': p.name} for p in c.plots]}
for c in cea.plots.categories.list_categories()}
@api.route('/')
class Dashboard(Resource):
def get(self):
"""
Get Dashboards from yaml file
"""
config = cea.config.Configuration()
plot_cache = cea.plots.cache.PlotCache(config)
dashboards = cea.plots.read_dashboards(config, plot_cache)
out = []
for d in dashboards:
dashboard = d.to_dict()
for i, plot in enumerate(dashboard['plots']):
dashboard['plots'][i]['title'] = d.plots[i].title
out.append(dashboard)
return out
| <commit_before>from flask_restplus import Namespace, Resource, fields, abort
import cea.config
import cea.plots.cache
api = Namespace('Dashboard', description='Dashboard plots')
LAYOUTS = ['row', 'grid', 'map']
CATEGORIES = {c.name: {'label': c.label, 'plots': [{'id': p.id(), 'name': p.name} for p in c.plots]}
for c in cea.plots.categories.list_categories()}
@api.route('/')
class Dashboard(Resource):
def get(self):
"""
Get Dashboards from yaml file
"""
config = cea.config.Configuration()
plot_cache = cea.plots.cache.PlotCache(config)
dashboards = cea.plots.read_dashboards(config, plot_cache)
return [{'name': d.name, 'description': d.description, 'layout': d.layout if d.layout in LAYOUTS else 'row',
'plots': [{'title': plot.title, 'scenario':
plot.parameters['scenario-name'] if 'scenario-name' in plot.parameters.keys() else None}
for plot in d.plots]} for d in dashboards]
<commit_msg>Include plot title to plots<commit_after> | from flask_restplus import Namespace, Resource, fields, abort
import cea.config
import cea.plots.cache
api = Namespace('Dashboard', description='Dashboard plots')
LAYOUTS = ['row', 'grid', 'map']
CATEGORIES = {c.name: {'label': c.label, 'plots': [{'id': p.id(), 'name': p.name} for p in c.plots]}
for c in cea.plots.categories.list_categories()}
@api.route('/')
class Dashboard(Resource):
def get(self):
"""
Get Dashboards from yaml file
"""
config = cea.config.Configuration()
plot_cache = cea.plots.cache.PlotCache(config)
dashboards = cea.plots.read_dashboards(config, plot_cache)
out = []
for d in dashboards:
dashboard = d.to_dict()
for i, plot in enumerate(dashboard['plots']):
dashboard['plots'][i]['title'] = d.plots[i].title
out.append(dashboard)
return out
| from flask_restplus import Namespace, Resource, fields, abort
import cea.config
import cea.plots.cache
api = Namespace('Dashboard', description='Dashboard plots')
LAYOUTS = ['row', 'grid', 'map']
CATEGORIES = {c.name: {'label': c.label, 'plots': [{'id': p.id(), 'name': p.name} for p in c.plots]}
for c in cea.plots.categories.list_categories()}
@api.route('/')
class Dashboard(Resource):
def get(self):
"""
Get Dashboards from yaml file
"""
config = cea.config.Configuration()
plot_cache = cea.plots.cache.PlotCache(config)
dashboards = cea.plots.read_dashboards(config, plot_cache)
return [{'name': d.name, 'description': d.description, 'layout': d.layout if d.layout in LAYOUTS else 'row',
'plots': [{'title': plot.title, 'scenario':
plot.parameters['scenario-name'] if 'scenario-name' in plot.parameters.keys() else None}
for plot in d.plots]} for d in dashboards]
Include plot title to plotsfrom flask_restplus import Namespace, Resource, fields, abort
import cea.config
import cea.plots.cache
api = Namespace('Dashboard', description='Dashboard plots')
LAYOUTS = ['row', 'grid', 'map']
CATEGORIES = {c.name: {'label': c.label, 'plots': [{'id': p.id(), 'name': p.name} for p in c.plots]}
for c in cea.plots.categories.list_categories()}
@api.route('/')
class Dashboard(Resource):
def get(self):
"""
Get Dashboards from yaml file
"""
config = cea.config.Configuration()
plot_cache = cea.plots.cache.PlotCache(config)
dashboards = cea.plots.read_dashboards(config, plot_cache)
out = []
for d in dashboards:
dashboard = d.to_dict()
for i, plot in enumerate(dashboard['plots']):
dashboard['plots'][i]['title'] = d.plots[i].title
out.append(dashboard)
return out
| <commit_before>from flask_restplus import Namespace, Resource, fields, abort
import cea.config
import cea.plots.cache
api = Namespace('Dashboard', description='Dashboard plots')
LAYOUTS = ['row', 'grid', 'map']
CATEGORIES = {c.name: {'label': c.label, 'plots': [{'id': p.id(), 'name': p.name} for p in c.plots]}
for c in cea.plots.categories.list_categories()}
@api.route('/')
class Dashboard(Resource):
def get(self):
"""
Get Dashboards from yaml file
"""
config = cea.config.Configuration()
plot_cache = cea.plots.cache.PlotCache(config)
dashboards = cea.plots.read_dashboards(config, plot_cache)
return [{'name': d.name, 'description': d.description, 'layout': d.layout if d.layout in LAYOUTS else 'row',
'plots': [{'title': plot.title, 'scenario':
plot.parameters['scenario-name'] if 'scenario-name' in plot.parameters.keys() else None}
for plot in d.plots]} for d in dashboards]
<commit_msg>Include plot title to plots<commit_after>from flask_restplus import Namespace, Resource, fields, abort
import cea.config
import cea.plots.cache
api = Namespace('Dashboard', description='Dashboard plots')
LAYOUTS = ['row', 'grid', 'map']
CATEGORIES = {c.name: {'label': c.label, 'plots': [{'id': p.id(), 'name': p.name} for p in c.plots]}
for c in cea.plots.categories.list_categories()}
@api.route('/')
class Dashboard(Resource):
def get(self):
"""
Get Dashboards from yaml file
"""
config = cea.config.Configuration()
plot_cache = cea.plots.cache.PlotCache(config)
dashboards = cea.plots.read_dashboards(config, plot_cache)
out = []
for d in dashboards:
dashboard = d.to_dict()
for i, plot in enumerate(dashboard['plots']):
dashboard['plots'][i]['title'] = d.plots[i].title
out.append(dashboard)
return out
|
8440ffcfd87814e04188fe4077717e132f285cb2 | ckanext/requestdata/tests/test_helpers.py | ckanext/requestdata/tests/test_helpers.py | # encoding: utf-8
import nose
from datetime import datetime, timedelta
from ckanext.requestdata import helpers as h
import ckan.plugins as p
from ckan.tests import helpers, factories
from ckan import logic
ok_ = nose.tools.ok_
eq_ = nose.tools.eq_
raises = nose.tools.raises
class ActionBase(object):
@classmethod
def setup_class(self):
self.app = helpers._get_test_app()
if not p.plugin_loaded('requestdata'):
p.load('requestdata')
def setup(self):
helpers.reset_db()
@classmethod
def teardown_class(self):
if p.plugin_loaded('requestdata'):
p.unload('requestdata')
class TestHelpers(ActionBase):
def test_time_ago_from_datetime_valid(self):
d = datetime.today() - timedelta(days=1)
eq_(h.time_ago_from_datetime(d), '1 day ago')
def test_time_ago_from_datetime_valid_string_result(self):
d = datetime.today() - timedelta(days=2)
assert isinstance(h.time_ago_from_datetime(d), str)
| # encoding: utf-8
import nose
from datetime import datetime, timedelta
from ckanext.requestdata import helpers as h
import ckan.plugins as p
from ckan.tests import helpers, factories
from ckan import logic
ok_ = nose.tools.ok_
eq_ = nose.tools.eq_
raises = nose.tools.raises
class ActionBase(object):
@classmethod
def setup_class(self):
self.app = helpers._get_test_app()
if not p.plugin_loaded('requestdata'):
p.load('requestdata')
def setup(self):
helpers.reset_db()
@classmethod
def teardown_class(self):
if p.plugin_loaded('requestdata'):
p.unload('requestdata')
class TestHelpers(ActionBase):
def test_time_ago_from_datetime_valid(self):
d = datetime.today() - timedelta(days=1)
eq_(h.time_ago_from_datetime(d), '1 day ago')
def test_time_ago_from_datetime_valid_string_result(self):
d = datetime.today() - timedelta(days=2)
assert isinstance(h.time_ago_from_datetime(d), str)
def test_convert_id_to_emails_valid(self):
user = factories.User()
users = [{'name': user['name']}]
ids = user['id']
response = h.convert_id_to_email(ids)
email = 'test_user_05@ckan.org'
assert email == response
| Add tests for valid email converter | Add tests for valid email converter
| Python | agpl-3.0 | ViderumGlobal/ckanext-requestdata,ViderumGlobal/ckanext-requestdata,ViderumGlobal/ckanext-requestdata,ViderumGlobal/ckanext-requestdata | # encoding: utf-8
import nose
from datetime import datetime, timedelta
from ckanext.requestdata import helpers as h
import ckan.plugins as p
from ckan.tests import helpers, factories
from ckan import logic
ok_ = nose.tools.ok_
eq_ = nose.tools.eq_
raises = nose.tools.raises
class ActionBase(object):
@classmethod
def setup_class(self):
self.app = helpers._get_test_app()
if not p.plugin_loaded('requestdata'):
p.load('requestdata')
def setup(self):
helpers.reset_db()
@classmethod
def teardown_class(self):
if p.plugin_loaded('requestdata'):
p.unload('requestdata')
class TestHelpers(ActionBase):
def test_time_ago_from_datetime_valid(self):
d = datetime.today() - timedelta(days=1)
eq_(h.time_ago_from_datetime(d), '1 day ago')
def test_time_ago_from_datetime_valid_string_result(self):
d = datetime.today() - timedelta(days=2)
assert isinstance(h.time_ago_from_datetime(d), str)
Add tests for valid email converter | # encoding: utf-8
import nose
from datetime import datetime, timedelta
from ckanext.requestdata import helpers as h
import ckan.plugins as p
from ckan.tests import helpers, factories
from ckan import logic
ok_ = nose.tools.ok_
eq_ = nose.tools.eq_
raises = nose.tools.raises
class ActionBase(object):
@classmethod
def setup_class(self):
self.app = helpers._get_test_app()
if not p.plugin_loaded('requestdata'):
p.load('requestdata')
def setup(self):
helpers.reset_db()
@classmethod
def teardown_class(self):
if p.plugin_loaded('requestdata'):
p.unload('requestdata')
class TestHelpers(ActionBase):
def test_time_ago_from_datetime_valid(self):
d = datetime.today() - timedelta(days=1)
eq_(h.time_ago_from_datetime(d), '1 day ago')
def test_time_ago_from_datetime_valid_string_result(self):
d = datetime.today() - timedelta(days=2)
assert isinstance(h.time_ago_from_datetime(d), str)
def test_convert_id_to_emails_valid(self):
user = factories.User()
users = [{'name': user['name']}]
ids = user['id']
response = h.convert_id_to_email(ids)
email = 'test_user_05@ckan.org'
assert email == response
| <commit_before># encoding: utf-8
import nose
from datetime import datetime, timedelta
from ckanext.requestdata import helpers as h
import ckan.plugins as p
from ckan.tests import helpers, factories
from ckan import logic
ok_ = nose.tools.ok_
eq_ = nose.tools.eq_
raises = nose.tools.raises
class ActionBase(object):
@classmethod
def setup_class(self):
self.app = helpers._get_test_app()
if not p.plugin_loaded('requestdata'):
p.load('requestdata')
def setup(self):
helpers.reset_db()
@classmethod
def teardown_class(self):
if p.plugin_loaded('requestdata'):
p.unload('requestdata')
class TestHelpers(ActionBase):
def test_time_ago_from_datetime_valid(self):
d = datetime.today() - timedelta(days=1)
eq_(h.time_ago_from_datetime(d), '1 day ago')
def test_time_ago_from_datetime_valid_string_result(self):
d = datetime.today() - timedelta(days=2)
assert isinstance(h.time_ago_from_datetime(d), str)
<commit_msg>Add tests for valid email converter<commit_after> | # encoding: utf-8
import nose
from datetime import datetime, timedelta
from ckanext.requestdata import helpers as h
import ckan.plugins as p
from ckan.tests import helpers, factories
from ckan import logic
ok_ = nose.tools.ok_
eq_ = nose.tools.eq_
raises = nose.tools.raises
class ActionBase(object):
@classmethod
def setup_class(self):
self.app = helpers._get_test_app()
if not p.plugin_loaded('requestdata'):
p.load('requestdata')
def setup(self):
helpers.reset_db()
@classmethod
def teardown_class(self):
if p.plugin_loaded('requestdata'):
p.unload('requestdata')
class TestHelpers(ActionBase):
def test_time_ago_from_datetime_valid(self):
d = datetime.today() - timedelta(days=1)
eq_(h.time_ago_from_datetime(d), '1 day ago')
def test_time_ago_from_datetime_valid_string_result(self):
d = datetime.today() - timedelta(days=2)
assert isinstance(h.time_ago_from_datetime(d), str)
def test_convert_id_to_emails_valid(self):
user = factories.User()
users = [{'name': user['name']}]
ids = user['id']
response = h.convert_id_to_email(ids)
email = 'test_user_05@ckan.org'
assert email == response
| # encoding: utf-8
import nose
from datetime import datetime, timedelta
from ckanext.requestdata import helpers as h
import ckan.plugins as p
from ckan.tests import helpers, factories
from ckan import logic
ok_ = nose.tools.ok_
eq_ = nose.tools.eq_
raises = nose.tools.raises
class ActionBase(object):
@classmethod
def setup_class(self):
self.app = helpers._get_test_app()
if not p.plugin_loaded('requestdata'):
p.load('requestdata')
def setup(self):
helpers.reset_db()
@classmethod
def teardown_class(self):
if p.plugin_loaded('requestdata'):
p.unload('requestdata')
class TestHelpers(ActionBase):
def test_time_ago_from_datetime_valid(self):
d = datetime.today() - timedelta(days=1)
eq_(h.time_ago_from_datetime(d), '1 day ago')
def test_time_ago_from_datetime_valid_string_result(self):
d = datetime.today() - timedelta(days=2)
assert isinstance(h.time_ago_from_datetime(d), str)
Add tests for valid email converter# encoding: utf-8
import nose
from datetime import datetime, timedelta
from ckanext.requestdata import helpers as h
import ckan.plugins as p
from ckan.tests import helpers, factories
from ckan import logic
ok_ = nose.tools.ok_
eq_ = nose.tools.eq_
raises = nose.tools.raises
class ActionBase(object):
@classmethod
def setup_class(self):
self.app = helpers._get_test_app()
if not p.plugin_loaded('requestdata'):
p.load('requestdata')
def setup(self):
helpers.reset_db()
@classmethod
def teardown_class(self):
if p.plugin_loaded('requestdata'):
p.unload('requestdata')
class TestHelpers(ActionBase):
def test_time_ago_from_datetime_valid(self):
d = datetime.today() - timedelta(days=1)
eq_(h.time_ago_from_datetime(d), '1 day ago')
def test_time_ago_from_datetime_valid_string_result(self):
d = datetime.today() - timedelta(days=2)
assert isinstance(h.time_ago_from_datetime(d), str)
def test_convert_id_to_emails_valid(self):
user = factories.User()
users = [{'name': user['name']}]
ids = user['id']
response = h.convert_id_to_email(ids)
email = 'test_user_05@ckan.org'
assert email == response
| <commit_before># encoding: utf-8
import nose
from datetime import datetime, timedelta
from ckanext.requestdata import helpers as h
import ckan.plugins as p
from ckan.tests import helpers, factories
from ckan import logic
ok_ = nose.tools.ok_
eq_ = nose.tools.eq_
raises = nose.tools.raises
class ActionBase(object):
@classmethod
def setup_class(self):
self.app = helpers._get_test_app()
if not p.plugin_loaded('requestdata'):
p.load('requestdata')
def setup(self):
helpers.reset_db()
@classmethod
def teardown_class(self):
if p.plugin_loaded('requestdata'):
p.unload('requestdata')
class TestHelpers(ActionBase):
def test_time_ago_from_datetime_valid(self):
d = datetime.today() - timedelta(days=1)
eq_(h.time_ago_from_datetime(d), '1 day ago')
def test_time_ago_from_datetime_valid_string_result(self):
d = datetime.today() - timedelta(days=2)
assert isinstance(h.time_ago_from_datetime(d), str)
<commit_msg>Add tests for valid email converter<commit_after># encoding: utf-8
import nose
from datetime import datetime, timedelta
from ckanext.requestdata import helpers as h
import ckan.plugins as p
from ckan.tests import helpers, factories
from ckan import logic
ok_ = nose.tools.ok_
eq_ = nose.tools.eq_
raises = nose.tools.raises
class ActionBase(object):
@classmethod
def setup_class(self):
self.app = helpers._get_test_app()
if not p.plugin_loaded('requestdata'):
p.load('requestdata')
def setup(self):
helpers.reset_db()
@classmethod
def teardown_class(self):
if p.plugin_loaded('requestdata'):
p.unload('requestdata')
class TestHelpers(ActionBase):
def test_time_ago_from_datetime_valid(self):
d = datetime.today() - timedelta(days=1)
eq_(h.time_ago_from_datetime(d), '1 day ago')
def test_time_ago_from_datetime_valid_string_result(self):
d = datetime.today() - timedelta(days=2)
assert isinstance(h.time_ago_from_datetime(d), str)
def test_convert_id_to_emails_valid(self):
user = factories.User()
users = [{'name': user['name']}]
ids = user['id']
response = h.convert_id_to_email(ids)
email = 'test_user_05@ckan.org'
assert email == response
|
d4ffa7a0dd5462ad7d2b7eaf720bc8056ca1b859 | medical_medication_us/models/medical_medicament.py | medical_medication_us/models/medical_medicament.py | # -*- coding: utf-8 -*-
# © 2016 LasLabs Inc.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import fields, models
class MedicalMedicament(models.Model):
_inherit = 'medical.medicament'
ndc = fields.Char(
string='NDC',
help='National Drug Code for medication'
)
control_code = fields.Selection([
('c1', 'C1'),
('c2', 'C2'),
('c3', 'C3'),
('c4', 'C4'),
('c5', 'C5'),
],
help='Federal drug scheduling code',
)
| # -*- coding: utf-8 -*-
# © 2016 LasLabs Inc.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import fields, models
class MedicalMedicament(models.Model):
_inherit = 'medical.medicament'
ndc = fields.Char(
string='NDC',
help='National Drug Code for medication'
)
gpi = fields.Integer(
string='GPI',
help='Generic Product Identifier',
)
gcn = fields.Integer(
string='GCN',
help='Generic Code Number',
)
control_code = fields.Selection([
('c1', 'C1'),
('c2', 'C2'),
('c3', 'C3'),
('c4', 'C4'),
('c5', 'C5'),
],
help='Federal drug scheduling code',
)
| Add gpi and gcn to medicament in medical_medication_us | Add gpi and gcn to medicament in medical_medication_us
| Python | agpl-3.0 | laslabs/vertical-medical,laslabs/vertical-medical | # -*- coding: utf-8 -*-
# © 2016 LasLabs Inc.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import fields, models
class MedicalMedicament(models.Model):
_inherit = 'medical.medicament'
ndc = fields.Char(
string='NDC',
help='National Drug Code for medication'
)
control_code = fields.Selection([
('c1', 'C1'),
('c2', 'C2'),
('c3', 'C3'),
('c4', 'C4'),
('c5', 'C5'),
],
help='Federal drug scheduling code',
)
Add gpi and gcn to medicament in medical_medication_us | # -*- coding: utf-8 -*-
# © 2016 LasLabs Inc.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import fields, models
class MedicalMedicament(models.Model):
_inherit = 'medical.medicament'
ndc = fields.Char(
string='NDC',
help='National Drug Code for medication'
)
gpi = fields.Integer(
string='GPI',
help='Generic Product Identifier',
)
gcn = fields.Integer(
string='GCN',
help='Generic Code Number',
)
control_code = fields.Selection([
('c1', 'C1'),
('c2', 'C2'),
('c3', 'C3'),
('c4', 'C4'),
('c5', 'C5'),
],
help='Federal drug scheduling code',
)
| <commit_before># -*- coding: utf-8 -*-
# © 2016 LasLabs Inc.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import fields, models
class MedicalMedicament(models.Model):
_inherit = 'medical.medicament'
ndc = fields.Char(
string='NDC',
help='National Drug Code for medication'
)
control_code = fields.Selection([
('c1', 'C1'),
('c2', 'C2'),
('c3', 'C3'),
('c4', 'C4'),
('c5', 'C5'),
],
help='Federal drug scheduling code',
)
<commit_msg>Add gpi and gcn to medicament in medical_medication_us<commit_after> | # -*- coding: utf-8 -*-
# © 2016 LasLabs Inc.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import fields, models
class MedicalMedicament(models.Model):
_inherit = 'medical.medicament'
ndc = fields.Char(
string='NDC',
help='National Drug Code for medication'
)
gpi = fields.Integer(
string='GPI',
help='Generic Product Identifier',
)
gcn = fields.Integer(
string='GCN',
help='Generic Code Number',
)
control_code = fields.Selection([
('c1', 'C1'),
('c2', 'C2'),
('c3', 'C3'),
('c4', 'C4'),
('c5', 'C5'),
],
help='Federal drug scheduling code',
)
| # -*- coding: utf-8 -*-
# © 2016 LasLabs Inc.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import fields, models
class MedicalMedicament(models.Model):
_inherit = 'medical.medicament'
ndc = fields.Char(
string='NDC',
help='National Drug Code for medication'
)
control_code = fields.Selection([
('c1', 'C1'),
('c2', 'C2'),
('c3', 'C3'),
('c4', 'C4'),
('c5', 'C5'),
],
help='Federal drug scheduling code',
)
Add gpi and gcn to medicament in medical_medication_us# -*- coding: utf-8 -*-
# © 2016 LasLabs Inc.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import fields, models
class MedicalMedicament(models.Model):
_inherit = 'medical.medicament'
ndc = fields.Char(
string='NDC',
help='National Drug Code for medication'
)
gpi = fields.Integer(
string='GPI',
help='Generic Product Identifier',
)
gcn = fields.Integer(
string='GCN',
help='Generic Code Number',
)
control_code = fields.Selection([
('c1', 'C1'),
('c2', 'C2'),
('c3', 'C3'),
('c4', 'C4'),
('c5', 'C5'),
],
help='Federal drug scheduling code',
)
| <commit_before># -*- coding: utf-8 -*-
# © 2016 LasLabs Inc.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import fields, models
class MedicalMedicament(models.Model):
_inherit = 'medical.medicament'
ndc = fields.Char(
string='NDC',
help='National Drug Code for medication'
)
control_code = fields.Selection([
('c1', 'C1'),
('c2', 'C2'),
('c3', 'C3'),
('c4', 'C4'),
('c5', 'C5'),
],
help='Federal drug scheduling code',
)
<commit_msg>Add gpi and gcn to medicament in medical_medication_us<commit_after># -*- coding: utf-8 -*-
# © 2016 LasLabs Inc.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import fields, models
class MedicalMedicament(models.Model):
_inherit = 'medical.medicament'
ndc = fields.Char(
string='NDC',
help='National Drug Code for medication'
)
gpi = fields.Integer(
string='GPI',
help='Generic Product Identifier',
)
gcn = fields.Integer(
string='GCN',
help='Generic Code Number',
)
control_code = fields.Selection([
('c1', 'C1'),
('c2', 'C2'),
('c3', 'C3'),
('c4', 'C4'),
('c5', 'C5'),
],
help='Federal drug scheduling code',
)
|
edecc16f83eadbf836477c46fb056cb6eba5df28 | feder/contrib/sites/migrations/0001_initial.py | feder/contrib/sites/migrations/0001_initial.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import django.contrib.sites.models
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Site',
fields=[
('id', models.AutoField(verbose_name='ID',
primary_key=True, serialize=False, auto_created=True)),
('domain', models.CharField(verbose_name='domain name', max_length=100,
validators=[django.contrib.sites.models._simple_domain_name_validator])),
('name', models.CharField(verbose_name='display name', max_length=50)),
],
options={
'verbose_name_plural': 'sites',
'verbose_name': 'site',
'db_table': 'django_site',
'ordering': ('domain',),
},
managers=[
(b'objects', django.contrib.sites.models.SiteManager()),
],
),
]
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import django.contrib.sites.models
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Site',
fields=[
('id', models.AutoField(verbose_name='ID',
primary_key=True, serialize=False, auto_created=True)),
('domain', models.CharField(verbose_name='domain name', max_length=100,
validators=[django.contrib.sites.models._simple_domain_name_validator])),
('name', models.CharField(verbose_name='display name', max_length=50)),
],
options={
'verbose_name_plural': 'sites',
'verbose_name': 'site',
'db_table': 'django_site',
'ordering': ('domain',),
},
managers=[
('objects', django.contrib.sites.models.SiteManager()),
],
),
]
| Fix type in contrib.sites.migrations for Django 2.x compatibility | Fix type in contrib.sites.migrations for Django 2.x compatibility
| Python | mit | watchdogpolska/feder,watchdogpolska/feder,watchdogpolska/feder,watchdogpolska/feder | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import django.contrib.sites.models
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Site',
fields=[
('id', models.AutoField(verbose_name='ID',
primary_key=True, serialize=False, auto_created=True)),
('domain', models.CharField(verbose_name='domain name', max_length=100,
validators=[django.contrib.sites.models._simple_domain_name_validator])),
('name', models.CharField(verbose_name='display name', max_length=50)),
],
options={
'verbose_name_plural': 'sites',
'verbose_name': 'site',
'db_table': 'django_site',
'ordering': ('domain',),
},
managers=[
(b'objects', django.contrib.sites.models.SiteManager()),
],
),
]
Fix type in contrib.sites.migrations for Django 2.x compatibility | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import django.contrib.sites.models
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Site',
fields=[
('id', models.AutoField(verbose_name='ID',
primary_key=True, serialize=False, auto_created=True)),
('domain', models.CharField(verbose_name='domain name', max_length=100,
validators=[django.contrib.sites.models._simple_domain_name_validator])),
('name', models.CharField(verbose_name='display name', max_length=50)),
],
options={
'verbose_name_plural': 'sites',
'verbose_name': 'site',
'db_table': 'django_site',
'ordering': ('domain',),
},
managers=[
('objects', django.contrib.sites.models.SiteManager()),
],
),
]
| <commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
import django.contrib.sites.models
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Site',
fields=[
('id', models.AutoField(verbose_name='ID',
primary_key=True, serialize=False, auto_created=True)),
('domain', models.CharField(verbose_name='domain name', max_length=100,
validators=[django.contrib.sites.models._simple_domain_name_validator])),
('name', models.CharField(verbose_name='display name', max_length=50)),
],
options={
'verbose_name_plural': 'sites',
'verbose_name': 'site',
'db_table': 'django_site',
'ordering': ('domain',),
},
managers=[
(b'objects', django.contrib.sites.models.SiteManager()),
],
),
]
<commit_msg>Fix type in contrib.sites.migrations for Django 2.x compatibility<commit_after> | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import django.contrib.sites.models
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Site',
fields=[
('id', models.AutoField(verbose_name='ID',
primary_key=True, serialize=False, auto_created=True)),
('domain', models.CharField(verbose_name='domain name', max_length=100,
validators=[django.contrib.sites.models._simple_domain_name_validator])),
('name', models.CharField(verbose_name='display name', max_length=50)),
],
options={
'verbose_name_plural': 'sites',
'verbose_name': 'site',
'db_table': 'django_site',
'ordering': ('domain',),
},
managers=[
('objects', django.contrib.sites.models.SiteManager()),
],
),
]
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import django.contrib.sites.models
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Site',
fields=[
('id', models.AutoField(verbose_name='ID',
primary_key=True, serialize=False, auto_created=True)),
('domain', models.CharField(verbose_name='domain name', max_length=100,
validators=[django.contrib.sites.models._simple_domain_name_validator])),
('name', models.CharField(verbose_name='display name', max_length=50)),
],
options={
'verbose_name_plural': 'sites',
'verbose_name': 'site',
'db_table': 'django_site',
'ordering': ('domain',),
},
managers=[
(b'objects', django.contrib.sites.models.SiteManager()),
],
),
]
Fix type in contrib.sites.migrations for Django 2.x compatibility# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import django.contrib.sites.models
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Site',
fields=[
('id', models.AutoField(verbose_name='ID',
primary_key=True, serialize=False, auto_created=True)),
('domain', models.CharField(verbose_name='domain name', max_length=100,
validators=[django.contrib.sites.models._simple_domain_name_validator])),
('name', models.CharField(verbose_name='display name', max_length=50)),
],
options={
'verbose_name_plural': 'sites',
'verbose_name': 'site',
'db_table': 'django_site',
'ordering': ('domain',),
},
managers=[
('objects', django.contrib.sites.models.SiteManager()),
],
),
]
| <commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
import django.contrib.sites.models
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Site',
fields=[
('id', models.AutoField(verbose_name='ID',
primary_key=True, serialize=False, auto_created=True)),
('domain', models.CharField(verbose_name='domain name', max_length=100,
validators=[django.contrib.sites.models._simple_domain_name_validator])),
('name', models.CharField(verbose_name='display name', max_length=50)),
],
options={
'verbose_name_plural': 'sites',
'verbose_name': 'site',
'db_table': 'django_site',
'ordering': ('domain',),
},
managers=[
(b'objects', django.contrib.sites.models.SiteManager()),
],
),
]
<commit_msg>Fix type in contrib.sites.migrations for Django 2.x compatibility<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
import django.contrib.sites.models
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Site',
fields=[
('id', models.AutoField(verbose_name='ID',
primary_key=True, serialize=False, auto_created=True)),
('domain', models.CharField(verbose_name='domain name', max_length=100,
validators=[django.contrib.sites.models._simple_domain_name_validator])),
('name', models.CharField(verbose_name='display name', max_length=50)),
],
options={
'verbose_name_plural': 'sites',
'verbose_name': 'site',
'db_table': 'django_site',
'ordering': ('domain',),
},
managers=[
('objects', django.contrib.sites.models.SiteManager()),
],
),
]
|
b7dfb33e6cf4dd18a2ab626372bedc6b7c269780 | src/adhocracy/adhocracy/catalog/__init__.py | src/adhocracy/adhocracy/catalog/__init__.py | """ Catalog utilities."""
from substanced import catalog
from substanced.interfaces import IIndexingActionProcessor
from zope.interface import Interface
@catalog.catalog_factory('adhocracy')
class AdhocracyCatalogFactory:
tag = catalog.Keyword()
def includeme(config):
"""Register catalog utilities."""
config.add_view_predicate('catalogable', catalog._CatalogablePredicate)
config.add_directive('add_catalog_factory', catalog.add_catalog_factory)
config.add_directive('add_indexview',
catalog.add_indexview,
action_wrap=False)
config.registry.registerAdapter(catalog.deferred.BasicActionProcessor,
(Interface,),
IIndexingActionProcessor)
config.scan('substanced.catalog')
config.scan('.')
| """ Catalog utilities."""
from substanced import catalog
from substanced.interfaces import IIndexingActionProcessor
from zope.interface import Interface
@catalog.catalog_factory('adhocracy')
class AdhocracyCatalogFactory:
tag = catalog.Keyword()
def includeme(config):
"""Register catalog utilities."""
config.add_view_predicate('catalogable', catalog._CatalogablePredicate)
config.add_directive('add_catalog_factory', catalog.add_catalog_factory)
config.add_directive('add_indexview',
catalog.add_indexview,
action_wrap=False)
config.registry.registerAdapter(catalog.deferred.BasicActionProcessor,
(Interface,),
IIndexingActionProcessor)
config.scan('substanced.catalog')
config.add_catalog_factory('adhocracy', AdhocracyCatalogFactory)
| Fix regression: 'import pytest' error when starting adhocracy | Fix regression: 'import pytest' error when starting adhocracy
| Python | agpl-3.0 | liqd/adhocracy3.mercator,xs2maverick/adhocracy3.mercator,liqd/adhocracy3.mercator,xs2maverick/adhocracy3.mercator,liqd/adhocracy3.mercator,liqd/adhocracy3.mercator,liqd/adhocracy3.mercator,fhartwig/adhocracy3.mercator,fhartwig/adhocracy3.mercator,fhartwig/adhocracy3.mercator,xs2maverick/adhocracy3.mercator,fhartwig/adhocracy3.mercator,fhartwig/adhocracy3.mercator,fhartwig/adhocracy3.mercator,liqd/adhocracy3.mercator,xs2maverick/adhocracy3.mercator,fhartwig/adhocracy3.mercator,liqd/adhocracy3.mercator,xs2maverick/adhocracy3.mercator,xs2maverick/adhocracy3.mercator | """ Catalog utilities."""
from substanced import catalog
from substanced.interfaces import IIndexingActionProcessor
from zope.interface import Interface
@catalog.catalog_factory('adhocracy')
class AdhocracyCatalogFactory:
tag = catalog.Keyword()
def includeme(config):
"""Register catalog utilities."""
config.add_view_predicate('catalogable', catalog._CatalogablePredicate)
config.add_directive('add_catalog_factory', catalog.add_catalog_factory)
config.add_directive('add_indexview',
catalog.add_indexview,
action_wrap=False)
config.registry.registerAdapter(catalog.deferred.BasicActionProcessor,
(Interface,),
IIndexingActionProcessor)
config.scan('substanced.catalog')
config.scan('.')
Fix regression: 'import pytest' error when starting adhocracy | """ Catalog utilities."""
from substanced import catalog
from substanced.interfaces import IIndexingActionProcessor
from zope.interface import Interface
@catalog.catalog_factory('adhocracy')
class AdhocracyCatalogFactory:
tag = catalog.Keyword()
def includeme(config):
"""Register catalog utilities."""
config.add_view_predicate('catalogable', catalog._CatalogablePredicate)
config.add_directive('add_catalog_factory', catalog.add_catalog_factory)
config.add_directive('add_indexview',
catalog.add_indexview,
action_wrap=False)
config.registry.registerAdapter(catalog.deferred.BasicActionProcessor,
(Interface,),
IIndexingActionProcessor)
config.scan('substanced.catalog')
config.add_catalog_factory('adhocracy', AdhocracyCatalogFactory)
| <commit_before>""" Catalog utilities."""
from substanced import catalog
from substanced.interfaces import IIndexingActionProcessor
from zope.interface import Interface
@catalog.catalog_factory('adhocracy')
class AdhocracyCatalogFactory:
tag = catalog.Keyword()
def includeme(config):
"""Register catalog utilities."""
config.add_view_predicate('catalogable', catalog._CatalogablePredicate)
config.add_directive('add_catalog_factory', catalog.add_catalog_factory)
config.add_directive('add_indexview',
catalog.add_indexview,
action_wrap=False)
config.registry.registerAdapter(catalog.deferred.BasicActionProcessor,
(Interface,),
IIndexingActionProcessor)
config.scan('substanced.catalog')
config.scan('.')
<commit_msg>Fix regression: 'import pytest' error when starting adhocracy<commit_after> | """ Catalog utilities."""
from substanced import catalog
from substanced.interfaces import IIndexingActionProcessor
from zope.interface import Interface
@catalog.catalog_factory('adhocracy')
class AdhocracyCatalogFactory:
tag = catalog.Keyword()
def includeme(config):
"""Register catalog utilities."""
config.add_view_predicate('catalogable', catalog._CatalogablePredicate)
config.add_directive('add_catalog_factory', catalog.add_catalog_factory)
config.add_directive('add_indexview',
catalog.add_indexview,
action_wrap=False)
config.registry.registerAdapter(catalog.deferred.BasicActionProcessor,
(Interface,),
IIndexingActionProcessor)
config.scan('substanced.catalog')
config.add_catalog_factory('adhocracy', AdhocracyCatalogFactory)
| """ Catalog utilities."""
from substanced import catalog
from substanced.interfaces import IIndexingActionProcessor
from zope.interface import Interface
@catalog.catalog_factory('adhocracy')
class AdhocracyCatalogFactory:
tag = catalog.Keyword()
def includeme(config):
"""Register catalog utilities."""
config.add_view_predicate('catalogable', catalog._CatalogablePredicate)
config.add_directive('add_catalog_factory', catalog.add_catalog_factory)
config.add_directive('add_indexview',
catalog.add_indexview,
action_wrap=False)
config.registry.registerAdapter(catalog.deferred.BasicActionProcessor,
(Interface,),
IIndexingActionProcessor)
config.scan('substanced.catalog')
config.scan('.')
Fix regression: 'import pytest' error when starting adhocracy""" Catalog utilities."""
from substanced import catalog
from substanced.interfaces import IIndexingActionProcessor
from zope.interface import Interface
@catalog.catalog_factory('adhocracy')
class AdhocracyCatalogFactory:
tag = catalog.Keyword()
def includeme(config):
"""Register catalog utilities."""
config.add_view_predicate('catalogable', catalog._CatalogablePredicate)
config.add_directive('add_catalog_factory', catalog.add_catalog_factory)
config.add_directive('add_indexview',
catalog.add_indexview,
action_wrap=False)
config.registry.registerAdapter(catalog.deferred.BasicActionProcessor,
(Interface,),
IIndexingActionProcessor)
config.scan('substanced.catalog')
config.add_catalog_factory('adhocracy', AdhocracyCatalogFactory)
| <commit_before>""" Catalog utilities."""
from substanced import catalog
from substanced.interfaces import IIndexingActionProcessor
from zope.interface import Interface
@catalog.catalog_factory('adhocracy')
class AdhocracyCatalogFactory:
tag = catalog.Keyword()
def includeme(config):
"""Register catalog utilities."""
config.add_view_predicate('catalogable', catalog._CatalogablePredicate)
config.add_directive('add_catalog_factory', catalog.add_catalog_factory)
config.add_directive('add_indexview',
catalog.add_indexview,
action_wrap=False)
config.registry.registerAdapter(catalog.deferred.BasicActionProcessor,
(Interface,),
IIndexingActionProcessor)
config.scan('substanced.catalog')
config.scan('.')
<commit_msg>Fix regression: 'import pytest' error when starting adhocracy<commit_after>""" Catalog utilities."""
from substanced import catalog
from substanced.interfaces import IIndexingActionProcessor
from zope.interface import Interface
@catalog.catalog_factory('adhocracy')
class AdhocracyCatalogFactory:
tag = catalog.Keyword()
def includeme(config):
"""Register catalog utilities."""
config.add_view_predicate('catalogable', catalog._CatalogablePredicate)
config.add_directive('add_catalog_factory', catalog.add_catalog_factory)
config.add_directive('add_indexview',
catalog.add_indexview,
action_wrap=False)
config.registry.registerAdapter(catalog.deferred.BasicActionProcessor,
(Interface,),
IIndexingActionProcessor)
config.scan('substanced.catalog')
config.add_catalog_factory('adhocracy', AdhocracyCatalogFactory)
|
82fec866f384813118411d0485e31b12deaed9f0 | pmst/examples/isotropic_point_source/isotropic_point_source.py | pmst/examples/isotropic_point_source/isotropic_point_source.py | from pmst.source import DirectedPointSource
from pmst.microscope import Microscope
from pmst.detector import Detector
from pmst.geometry import Point
import numpy as np
s = DirectedPointSource(Point(0, 0, 0), n_rays=1e5, direction=Point(0, 0, 1), psi=np.pi/2)
center = Point(0, 0, 2)
x_edge = Point(5, 0, 2)
y_edge = Point(0, 5, 2)
n_pixels = 100
d = Detector(center, x_edge, y_edge, n_pixels, n_pixels)
m = Microscope(source=s, detector=d)
m.add_component(d)
m.simulate()
with open(__file__, 'r') as myfile:
src = myfile.readlines()
m.plot_results('isotropic_point_source.png', src=src)
| from pmst.source import DirectedPointSource
from pmst.microscope import Microscope
from pmst.detector import Detector
from pmst.geometry import Point
import numpy as np
import time; start = time.time(); print('Running...')
s = DirectedPointSource(Point(0, 0, 0), n_rays=1e5, direction=Point(0, 0, 1), psi=np.pi/2)
center = Point(0, 0, 2)
x_edge = Point(5, 0, 2)
y_edge = Point(0, 5, 2)
n_pixels = 100
d = Detector(center, x_edge, y_edge, n_pixels, n_pixels)
m = Microscope(source=s, detector=d)
m.add_component(d)
m.simulate()
with open(__file__, 'r') as myfile:
src = myfile.readlines()
m.plot_results('isotropic_point_source.png', src=src)
print('Run time:', np.round(time.time() - start, 2), 's')
| Add timing message to example | Add timing message to example
| Python | mit | talonchandler/pmst | from pmst.source import DirectedPointSource
from pmst.microscope import Microscope
from pmst.detector import Detector
from pmst.geometry import Point
import numpy as np
s = DirectedPointSource(Point(0, 0, 0), n_rays=1e5, direction=Point(0, 0, 1), psi=np.pi/2)
center = Point(0, 0, 2)
x_edge = Point(5, 0, 2)
y_edge = Point(0, 5, 2)
n_pixels = 100
d = Detector(center, x_edge, y_edge, n_pixels, n_pixels)
m = Microscope(source=s, detector=d)
m.add_component(d)
m.simulate()
with open(__file__, 'r') as myfile:
src = myfile.readlines()
m.plot_results('isotropic_point_source.png', src=src)
Add timing message to example | from pmst.source import DirectedPointSource
from pmst.microscope import Microscope
from pmst.detector import Detector
from pmst.geometry import Point
import numpy as np
import time; start = time.time(); print('Running...')
s = DirectedPointSource(Point(0, 0, 0), n_rays=1e5, direction=Point(0, 0, 1), psi=np.pi/2)
center = Point(0, 0, 2)
x_edge = Point(5, 0, 2)
y_edge = Point(0, 5, 2)
n_pixels = 100
d = Detector(center, x_edge, y_edge, n_pixels, n_pixels)
m = Microscope(source=s, detector=d)
m.add_component(d)
m.simulate()
with open(__file__, 'r') as myfile:
src = myfile.readlines()
m.plot_results('isotropic_point_source.png', src=src)
print('Run time:', np.round(time.time() - start, 2), 's')
| <commit_before>from pmst.source import DirectedPointSource
from pmst.microscope import Microscope
from pmst.detector import Detector
from pmst.geometry import Point
import numpy as np
s = DirectedPointSource(Point(0, 0, 0), n_rays=1e5, direction=Point(0, 0, 1), psi=np.pi/2)
center = Point(0, 0, 2)
x_edge = Point(5, 0, 2)
y_edge = Point(0, 5, 2)
n_pixels = 100
d = Detector(center, x_edge, y_edge, n_pixels, n_pixels)
m = Microscope(source=s, detector=d)
m.add_component(d)
m.simulate()
with open(__file__, 'r') as myfile:
src = myfile.readlines()
m.plot_results('isotropic_point_source.png', src=src)
<commit_msg>Add timing message to example<commit_after> | from pmst.source import DirectedPointSource
from pmst.microscope import Microscope
from pmst.detector import Detector
from pmst.geometry import Point
import numpy as np
import time; start = time.time(); print('Running...')
s = DirectedPointSource(Point(0, 0, 0), n_rays=1e5, direction=Point(0, 0, 1), psi=np.pi/2)
center = Point(0, 0, 2)
x_edge = Point(5, 0, 2)
y_edge = Point(0, 5, 2)
n_pixels = 100
d = Detector(center, x_edge, y_edge, n_pixels, n_pixels)
m = Microscope(source=s, detector=d)
m.add_component(d)
m.simulate()
with open(__file__, 'r') as myfile:
src = myfile.readlines()
m.plot_results('isotropic_point_source.png', src=src)
print('Run time:', np.round(time.time() - start, 2), 's')
| from pmst.source import DirectedPointSource
from pmst.microscope import Microscope
from pmst.detector import Detector
from pmst.geometry import Point
import numpy as np
s = DirectedPointSource(Point(0, 0, 0), n_rays=1e5, direction=Point(0, 0, 1), psi=np.pi/2)
center = Point(0, 0, 2)
x_edge = Point(5, 0, 2)
y_edge = Point(0, 5, 2)
n_pixels = 100
d = Detector(center, x_edge, y_edge, n_pixels, n_pixels)
m = Microscope(source=s, detector=d)
m.add_component(d)
m.simulate()
with open(__file__, 'r') as myfile:
src = myfile.readlines()
m.plot_results('isotropic_point_source.png', src=src)
Add timing message to examplefrom pmst.source import DirectedPointSource
from pmst.microscope import Microscope
from pmst.detector import Detector
from pmst.geometry import Point
import numpy as np
import time; start = time.time(); print('Running...')
s = DirectedPointSource(Point(0, 0, 0), n_rays=1e5, direction=Point(0, 0, 1), psi=np.pi/2)
center = Point(0, 0, 2)
x_edge = Point(5, 0, 2)
y_edge = Point(0, 5, 2)
n_pixels = 100
d = Detector(center, x_edge, y_edge, n_pixels, n_pixels)
m = Microscope(source=s, detector=d)
m.add_component(d)
m.simulate()
with open(__file__, 'r') as myfile:
src = myfile.readlines()
m.plot_results('isotropic_point_source.png', src=src)
print('Run time:', np.round(time.time() - start, 2), 's')
| <commit_before>from pmst.source import DirectedPointSource
from pmst.microscope import Microscope
from pmst.detector import Detector
from pmst.geometry import Point
import numpy as np
s = DirectedPointSource(Point(0, 0, 0), n_rays=1e5, direction=Point(0, 0, 1), psi=np.pi/2)
center = Point(0, 0, 2)
x_edge = Point(5, 0, 2)
y_edge = Point(0, 5, 2)
n_pixels = 100
d = Detector(center, x_edge, y_edge, n_pixels, n_pixels)
m = Microscope(source=s, detector=d)
m.add_component(d)
m.simulate()
with open(__file__, 'r') as myfile:
src = myfile.readlines()
m.plot_results('isotropic_point_source.png', src=src)
<commit_msg>Add timing message to example<commit_after>from pmst.source import DirectedPointSource
from pmst.microscope import Microscope
from pmst.detector import Detector
from pmst.geometry import Point
import numpy as np
import time; start = time.time(); print('Running...')
s = DirectedPointSource(Point(0, 0, 0), n_rays=1e5, direction=Point(0, 0, 1), psi=np.pi/2)
center = Point(0, 0, 2)
x_edge = Point(5, 0, 2)
y_edge = Point(0, 5, 2)
n_pixels = 100
d = Detector(center, x_edge, y_edge, n_pixels, n_pixels)
m = Microscope(source=s, detector=d)
m.add_component(d)
m.simulate()
with open(__file__, 'r') as myfile:
src = myfile.readlines()
m.plot_results('isotropic_point_source.png', src=src)
print('Run time:', np.round(time.time() - start, 2), 's')
|
ebfcef6ecf0acc202af081e7aa487e50fd7785af | soulmate_finder/__init__.py | soulmate_finder/__init__.py | # allow `soulmate_finder` to be imported
# FIXME: This is bad
# FIXME: WHY IS THIS CAUSING A RUNTIMEWARNING???
# from .__main__ import *
| from .core import *
| Allow package to be imported | Allow package to be imported
| Python | mit | erkghlerngm44/r-anime-soulmate-finder | # allow `soulmate_finder` to be imported
# FIXME: This is bad
# FIXME: WHY IS THIS CAUSING A RUNTIMEWARNING???
# from .__main__ import *
Allow package to be imported | from .core import *
| <commit_before># allow `soulmate_finder` to be imported
# FIXME: This is bad
# FIXME: WHY IS THIS CAUSING A RUNTIMEWARNING???
# from .__main__ import *
<commit_msg>Allow package to be imported<commit_after> | from .core import *
| # allow `soulmate_finder` to be imported
# FIXME: This is bad
# FIXME: WHY IS THIS CAUSING A RUNTIMEWARNING???
# from .__main__ import *
Allow package to be importedfrom .core import *
| <commit_before># allow `soulmate_finder` to be imported
# FIXME: This is bad
# FIXME: WHY IS THIS CAUSING A RUNTIMEWARNING???
# from .__main__ import *
<commit_msg>Allow package to be imported<commit_after>from .core import *
|
42e78f7ff795202843085daa65d241cf3fe29d08 | xoinvader/tests/test_level.py | xoinvader/tests/test_level.py | """Test xoinvader.level module."""
from xoinvader.level import Level
# pylint: disable=invalid-name,protected-access,missing-docstring
def test_wave():
e = Level()
# pylint: disable=too-few-public-methods
class MockObject(object):
def __init__(self):
self.value = 0
def add(self):
self.value += 10
a = MockObject()
b = MockObject()
e.add_event(10, a.add)
e.add_event(20, b.add)
e.speed = 10
e.update()
assert a.value == 0
assert b.value == 0
e.start()
e.update()
assert a.value == 10
assert b.value == 0
e.update()
assert a.value == 10
assert b.value == 10
assert not e.running
| """Test xoinvader.level module."""
from xoinvader.level import Level
# pylint: disable=invalid-name,protected-access,missing-docstring
def test_level():
e = Level()
# pylint: disable=too-few-public-methods
class MockObject(object):
def __init__(self):
self.value = 0
def add(self):
self.value += 10
a = MockObject()
b = MockObject()
e.add_event(10, a.add)
e.add_event(20, b.add)
e.speed = 10
assert e.speed == 10
e.update()
assert a.value == 0
assert b.value == 0
e.start()
e.update()
assert a.value == 10
assert b.value == 0
e.update()
assert a.value == 10
assert b.value == 10
assert not e.running
| Fix test name, cover speed getter | Fix test name, cover speed getter
Signed-off-by: pkulev <661f9993ccc93424ad20871d2ea98bc2860e5968@gmail.com>
| Python | mit | pkulev/xoinvader,pankshok/xoinvader | """Test xoinvader.level module."""
from xoinvader.level import Level
# pylint: disable=invalid-name,protected-access,missing-docstring
def test_wave():
e = Level()
# pylint: disable=too-few-public-methods
class MockObject(object):
def __init__(self):
self.value = 0
def add(self):
self.value += 10
a = MockObject()
b = MockObject()
e.add_event(10, a.add)
e.add_event(20, b.add)
e.speed = 10
e.update()
assert a.value == 0
assert b.value == 0
e.start()
e.update()
assert a.value == 10
assert b.value == 0
e.update()
assert a.value == 10
assert b.value == 10
assert not e.running
Fix test name, cover speed getter
Signed-off-by: pkulev <661f9993ccc93424ad20871d2ea98bc2860e5968@gmail.com> | """Test xoinvader.level module."""
from xoinvader.level import Level
# pylint: disable=invalid-name,protected-access,missing-docstring
def test_level():
e = Level()
# pylint: disable=too-few-public-methods
class MockObject(object):
def __init__(self):
self.value = 0
def add(self):
self.value += 10
a = MockObject()
b = MockObject()
e.add_event(10, a.add)
e.add_event(20, b.add)
e.speed = 10
assert e.speed == 10
e.update()
assert a.value == 0
assert b.value == 0
e.start()
e.update()
assert a.value == 10
assert b.value == 0
e.update()
assert a.value == 10
assert b.value == 10
assert not e.running
| <commit_before>"""Test xoinvader.level module."""
from xoinvader.level import Level
# pylint: disable=invalid-name,protected-access,missing-docstring
def test_wave():
e = Level()
# pylint: disable=too-few-public-methods
class MockObject(object):
def __init__(self):
self.value = 0
def add(self):
self.value += 10
a = MockObject()
b = MockObject()
e.add_event(10, a.add)
e.add_event(20, b.add)
e.speed = 10
e.update()
assert a.value == 0
assert b.value == 0
e.start()
e.update()
assert a.value == 10
assert b.value == 0
e.update()
assert a.value == 10
assert b.value == 10
assert not e.running
<commit_msg>Fix test name, cover speed getter
Signed-off-by: pkulev <661f9993ccc93424ad20871d2ea98bc2860e5968@gmail.com><commit_after> | """Test xoinvader.level module."""
from xoinvader.level import Level
# pylint: disable=invalid-name,protected-access,missing-docstring
def test_level():
e = Level()
# pylint: disable=too-few-public-methods
class MockObject(object):
def __init__(self):
self.value = 0
def add(self):
self.value += 10
a = MockObject()
b = MockObject()
e.add_event(10, a.add)
e.add_event(20, b.add)
e.speed = 10
assert e.speed == 10
e.update()
assert a.value == 0
assert b.value == 0
e.start()
e.update()
assert a.value == 10
assert b.value == 0
e.update()
assert a.value == 10
assert b.value == 10
assert not e.running
| """Test xoinvader.level module."""
from xoinvader.level import Level
# pylint: disable=invalid-name,protected-access,missing-docstring
def test_wave():
e = Level()
# pylint: disable=too-few-public-methods
class MockObject(object):
def __init__(self):
self.value = 0
def add(self):
self.value += 10
a = MockObject()
b = MockObject()
e.add_event(10, a.add)
e.add_event(20, b.add)
e.speed = 10
e.update()
assert a.value == 0
assert b.value == 0
e.start()
e.update()
assert a.value == 10
assert b.value == 0
e.update()
assert a.value == 10
assert b.value == 10
assert not e.running
Fix test name, cover speed getter
Signed-off-by: pkulev <661f9993ccc93424ad20871d2ea98bc2860e5968@gmail.com>"""Test xoinvader.level module."""
from xoinvader.level import Level
# pylint: disable=invalid-name,protected-access,missing-docstring
def test_level():
e = Level()
# pylint: disable=too-few-public-methods
class MockObject(object):
def __init__(self):
self.value = 0
def add(self):
self.value += 10
a = MockObject()
b = MockObject()
e.add_event(10, a.add)
e.add_event(20, b.add)
e.speed = 10
assert e.speed == 10
e.update()
assert a.value == 0
assert b.value == 0
e.start()
e.update()
assert a.value == 10
assert b.value == 0
e.update()
assert a.value == 10
assert b.value == 10
assert not e.running
| <commit_before>"""Test xoinvader.level module."""
from xoinvader.level import Level
# pylint: disable=invalid-name,protected-access,missing-docstring
def test_wave():
e = Level()
# pylint: disable=too-few-public-methods
class MockObject(object):
def __init__(self):
self.value = 0
def add(self):
self.value += 10
a = MockObject()
b = MockObject()
e.add_event(10, a.add)
e.add_event(20, b.add)
e.speed = 10
e.update()
assert a.value == 0
assert b.value == 0
e.start()
e.update()
assert a.value == 10
assert b.value == 0
e.update()
assert a.value == 10
assert b.value == 10
assert not e.running
<commit_msg>Fix test name, cover speed getter
Signed-off-by: pkulev <661f9993ccc93424ad20871d2ea98bc2860e5968@gmail.com><commit_after>"""Test xoinvader.level module."""
from xoinvader.level import Level
# pylint: disable=invalid-name,protected-access,missing-docstring
def test_level():
e = Level()
# pylint: disable=too-few-public-methods
class MockObject(object):
def __init__(self):
self.value = 0
def add(self):
self.value += 10
a = MockObject()
b = MockObject()
e.add_event(10, a.add)
e.add_event(20, b.add)
e.speed = 10
assert e.speed == 10
e.update()
assert a.value == 0
assert b.value == 0
e.start()
e.update()
assert a.value == 10
assert b.value == 0
e.update()
assert a.value == 10
assert b.value == 10
assert not e.running
|
0c5888aeab12c82d84ead79e4005133e0e91ef21 | dj/manage.py | dj/manage.py | #!/usr/bin/env python
###################################################################
#
# Copyright (c) 2011 Canonical Ltd.
# Copyright (c) 2013 Miing.org <samuel.miing@gmail.com>
#
# This software is licensed under the GNU Affero General Public
# License version 3 (AGPLv3), as published by the Free Software
# Foundation, and may be copied, distributed, and modified under
# those terms.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# file LICENSE for more details.
#
###################################################################
import paths
paths.setup_paths()
if __name__ == '__main__':
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'dj.settings')
from django_configglue.management import execute_from_command_line
execute_from_command_line(sys.argv)
| #!/usr/bin/env python
###################################################################
#
# Copyright (c) 2011 Canonical Ltd.
# Copyright (c) 2013 Miing.org <samuel.miing@gmail.com>
#
# This software is licensed under the GNU Affero General Public
# License version 3 (AGPLv3), as published by the Free Software
# Foundation, and may be copied, distributed, and modified under
# those terms.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# file LICENSE for more details.
#
###################################################################
import os
import sys
import paths
paths.setup_paths()
if __name__ == '__main__':
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'dj.settings')
from django_configglue.management import execute_from_command_line
execute_from_command_line(sys.argv)
| Fix 'not import from sys' | Fix 'not import from sys'
| Python | agpl-3.0 | miing/mci_migo,miing/mci_migo,miing/mci_migo | #!/usr/bin/env python
###################################################################
#
# Copyright (c) 2011 Canonical Ltd.
# Copyright (c) 2013 Miing.org <samuel.miing@gmail.com>
#
# This software is licensed under the GNU Affero General Public
# License version 3 (AGPLv3), as published by the Free Software
# Foundation, and may be copied, distributed, and modified under
# those terms.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# file LICENSE for more details.
#
###################################################################
import paths
paths.setup_paths()
if __name__ == '__main__':
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'dj.settings')
from django_configglue.management import execute_from_command_line
execute_from_command_line(sys.argv)
Fix 'not import from sys' | #!/usr/bin/env python
###################################################################
#
# Copyright (c) 2011 Canonical Ltd.
# Copyright (c) 2013 Miing.org <samuel.miing@gmail.com>
#
# This software is licensed under the GNU Affero General Public
# License version 3 (AGPLv3), as published by the Free Software
# Foundation, and may be copied, distributed, and modified under
# those terms.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# file LICENSE for more details.
#
###################################################################
import os
import sys
import paths
paths.setup_paths()
if __name__ == '__main__':
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'dj.settings')
from django_configglue.management import execute_from_command_line
execute_from_command_line(sys.argv)
| <commit_before>#!/usr/bin/env python
###################################################################
#
# Copyright (c) 2011 Canonical Ltd.
# Copyright (c) 2013 Miing.org <samuel.miing@gmail.com>
#
# This software is licensed under the GNU Affero General Public
# License version 3 (AGPLv3), as published by the Free Software
# Foundation, and may be copied, distributed, and modified under
# those terms.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# file LICENSE for more details.
#
###################################################################
import paths
paths.setup_paths()
if __name__ == '__main__':
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'dj.settings')
from django_configglue.management import execute_from_command_line
execute_from_command_line(sys.argv)
<commit_msg>Fix 'not import from sys'<commit_after> | #!/usr/bin/env python
###################################################################
#
# Copyright (c) 2011 Canonical Ltd.
# Copyright (c) 2013 Miing.org <samuel.miing@gmail.com>
#
# This software is licensed under the GNU Affero General Public
# License version 3 (AGPLv3), as published by the Free Software
# Foundation, and may be copied, distributed, and modified under
# those terms.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# file LICENSE for more details.
#
###################################################################
import os
import sys
import paths
paths.setup_paths()
if __name__ == '__main__':
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'dj.settings')
from django_configglue.management import execute_from_command_line
execute_from_command_line(sys.argv)
| #!/usr/bin/env python
###################################################################
#
# Copyright (c) 2011 Canonical Ltd.
# Copyright (c) 2013 Miing.org <samuel.miing@gmail.com>
#
# This software is licensed under the GNU Affero General Public
# License version 3 (AGPLv3), as published by the Free Software
# Foundation, and may be copied, distributed, and modified under
# those terms.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# file LICENSE for more details.
#
###################################################################
import paths
paths.setup_paths()
if __name__ == '__main__':
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'dj.settings')
from django_configglue.management import execute_from_command_line
execute_from_command_line(sys.argv)
Fix 'not import from sys'#!/usr/bin/env python
###################################################################
#
# Copyright (c) 2011 Canonical Ltd.
# Copyright (c) 2013 Miing.org <samuel.miing@gmail.com>
#
# This software is licensed under the GNU Affero General Public
# License version 3 (AGPLv3), as published by the Free Software
# Foundation, and may be copied, distributed, and modified under
# those terms.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# file LICENSE for more details.
#
###################################################################
import os
import sys
import paths
paths.setup_paths()
if __name__ == '__main__':
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'dj.settings')
from django_configglue.management import execute_from_command_line
execute_from_command_line(sys.argv)
| <commit_before>#!/usr/bin/env python
###################################################################
#
# Copyright (c) 2011 Canonical Ltd.
# Copyright (c) 2013 Miing.org <samuel.miing@gmail.com>
#
# This software is licensed under the GNU Affero General Public
# License version 3 (AGPLv3), as published by the Free Software
# Foundation, and may be copied, distributed, and modified under
# those terms.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# file LICENSE for more details.
#
###################################################################
import paths
paths.setup_paths()
if __name__ == '__main__':
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'dj.settings')
from django_configglue.management import execute_from_command_line
execute_from_command_line(sys.argv)
<commit_msg>Fix 'not import from sys'<commit_after>#!/usr/bin/env python
###################################################################
#
# Copyright (c) 2011 Canonical Ltd.
# Copyright (c) 2013 Miing.org <samuel.miing@gmail.com>
#
# This software is licensed under the GNU Affero General Public
# License version 3 (AGPLv3), as published by the Free Software
# Foundation, and may be copied, distributed, and modified under
# those terms.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# file LICENSE for more details.
#
###################################################################
import os
import sys
import paths
paths.setup_paths()
if __name__ == '__main__':
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'dj.settings')
from django_configglue.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
819b0375a2d0386fe349863b11ddba7da77ec1be | node/rand.py | node/rand.py | #!/usr/bin/env python
import datetime
import random
import ephem
from nodes import Node
class Random(Node):
char = "H"
args = 1
results = 1
def random_choice(self, inp:Node.indexable):
"""Choose one in a list randomly"""
return [random.choice(inp)]
def randint(self, inp:int):
"""Random number between 0 and inp inclusive"""
return random.randint(0,inp)
def get_next_new_moon(self, time: Node.clock):
"""Gets the date of the next full moon"""
new_time = datetime.datetime(*time.time_obj[:7])
return ephem.next_full_moon(new_time)
| #!/usr/bin/env python
import datetime
import random
import ephem
from nodes import Node
class Random(Node):
char = "H"
args = 1
results = 1
def random_choice(self, inp:Node.indexable):
"""Choose one in a list randomly"""
return [random.choice(inp)]
def randint(self, inp:int):
"""Random number between 0 and inp inclusive"""
return random.randint(0,inp)
def get_next_new_moon(self, time: Node.clock):
"""Gets the date of the next new moon"""
new_time = datetime.datetime(*time.time_obj[:7])
return ephem.next_new_moon(new_time)
| Fix get next new moon | Fix get next new moon
| Python | mit | muddyfish/PYKE,muddyfish/PYKE | #!/usr/bin/env python
import datetime
import random
import ephem
from nodes import Node
class Random(Node):
char = "H"
args = 1
results = 1
def random_choice(self, inp:Node.indexable):
"""Choose one in a list randomly"""
return [random.choice(inp)]
def randint(self, inp:int):
"""Random number between 0 and inp inclusive"""
return random.randint(0,inp)
def get_next_new_moon(self, time: Node.clock):
"""Gets the date of the next full moon"""
new_time = datetime.datetime(*time.time_obj[:7])
return ephem.next_full_moon(new_time)
Fix get next new moon | #!/usr/bin/env python
import datetime
import random
import ephem
from nodes import Node
class Random(Node):
char = "H"
args = 1
results = 1
def random_choice(self, inp:Node.indexable):
"""Choose one in a list randomly"""
return [random.choice(inp)]
def randint(self, inp:int):
"""Random number between 0 and inp inclusive"""
return random.randint(0,inp)
def get_next_new_moon(self, time: Node.clock):
"""Gets the date of the next new moon"""
new_time = datetime.datetime(*time.time_obj[:7])
return ephem.next_new_moon(new_time)
| <commit_before>#!/usr/bin/env python
import datetime
import random
import ephem
from nodes import Node
class Random(Node):
char = "H"
args = 1
results = 1
def random_choice(self, inp:Node.indexable):
"""Choose one in a list randomly"""
return [random.choice(inp)]
def randint(self, inp:int):
"""Random number between 0 and inp inclusive"""
return random.randint(0,inp)
def get_next_new_moon(self, time: Node.clock):
"""Gets the date of the next full moon"""
new_time = datetime.datetime(*time.time_obj[:7])
return ephem.next_full_moon(new_time)
<commit_msg>Fix get next new moon<commit_after> | #!/usr/bin/env python
import datetime
import random
import ephem
from nodes import Node
class Random(Node):
char = "H"
args = 1
results = 1
def random_choice(self, inp:Node.indexable):
"""Choose one in a list randomly"""
return [random.choice(inp)]
def randint(self, inp:int):
"""Random number between 0 and inp inclusive"""
return random.randint(0,inp)
def get_next_new_moon(self, time: Node.clock):
"""Gets the date of the next new moon"""
new_time = datetime.datetime(*time.time_obj[:7])
return ephem.next_new_moon(new_time)
| #!/usr/bin/env python
import datetime
import random
import ephem
from nodes import Node
class Random(Node):
char = "H"
args = 1
results = 1
def random_choice(self, inp:Node.indexable):
"""Choose one in a list randomly"""
return [random.choice(inp)]
def randint(self, inp:int):
"""Random number between 0 and inp inclusive"""
return random.randint(0,inp)
def get_next_new_moon(self, time: Node.clock):
"""Gets the date of the next full moon"""
new_time = datetime.datetime(*time.time_obj[:7])
return ephem.next_full_moon(new_time)
Fix get next new moon#!/usr/bin/env python
import datetime
import random
import ephem
from nodes import Node
class Random(Node):
char = "H"
args = 1
results = 1
def random_choice(self, inp:Node.indexable):
"""Choose one in a list randomly"""
return [random.choice(inp)]
def randint(self, inp:int):
"""Random number between 0 and inp inclusive"""
return random.randint(0,inp)
def get_next_new_moon(self, time: Node.clock):
"""Gets the date of the next new moon"""
new_time = datetime.datetime(*time.time_obj[:7])
return ephem.next_new_moon(new_time)
| <commit_before>#!/usr/bin/env python
import datetime
import random
import ephem
from nodes import Node
class Random(Node):
char = "H"
args = 1
results = 1
def random_choice(self, inp:Node.indexable):
"""Choose one in a list randomly"""
return [random.choice(inp)]
def randint(self, inp:int):
"""Random number between 0 and inp inclusive"""
return random.randint(0,inp)
def get_next_new_moon(self, time: Node.clock):
"""Gets the date of the next full moon"""
new_time = datetime.datetime(*time.time_obj[:7])
return ephem.next_full_moon(new_time)
<commit_msg>Fix get next new moon<commit_after>#!/usr/bin/env python
import datetime
import random
import ephem
from nodes import Node
class Random(Node):
char = "H"
args = 1
results = 1
def random_choice(self, inp:Node.indexable):
"""Choose one in a list randomly"""
return [random.choice(inp)]
def randint(self, inp:int):
"""Random number between 0 and inp inclusive"""
return random.randint(0,inp)
def get_next_new_moon(self, time: Node.clock):
"""Gets the date of the next new moon"""
new_time = datetime.datetime(*time.time_obj[:7])
return ephem.next_new_moon(new_time)
|
d8b3fcb34761af01e87026e3bc5b929bd6b68fc2 | frappe/patches/v11_0/copy_fetch_data_from_options.py | frappe/patches/v11_0/copy_fetch_data_from_options.py | import frappe
def execute():
frappe.reload_doc("core", "doctype", "docfield", force=True)
frappe.reload_doc("custom", "doctype", "custom_field", force=True)
frappe.reload_doc("custom", "doctype", "customize_form_field", force=True)
frappe.reload_doc("custom", "doctype", "property_setter", force=True)
frappe.db.sql('''
update `tabDocField`
set fetch_from = options, options=''
where options like '%.%' and (fetch_from is NULL OR fetch_from='')
and fieldtype in ('Data', 'Read Only', 'Text', 'Small Text', 'Text Editor', 'Code', 'Link', 'Check')
and fieldname!='naming_series'
''')
frappe.db.sql('''
update `tabCustom Field`
set fetch_from = options, options=''
where options like '%.%' and (fetch_from is NULL OR fetch_from='')
and fieldtype in ('Data', 'Read Only', 'Text', 'Small Text', 'Text Editor', 'Code', 'Link', 'Check')
and fieldname!='naming_series'
''')
frappe.db.sql('''
update `tabProperty Setter`
set property="fetch_from"
where property="options" and value like '%.%'
and property_type in ('Data', 'Read Only', 'Text', 'Small Text', 'Text Editor', 'Code', 'Link', 'Check')
and field_name!='naming_series'
''')
| import frappe
def execute():
frappe.reload_doc("core", "doctype", "docfield", force=True)
frappe.reload_doc("custom", "doctype", "custom_field", force=True)
frappe.reload_doc("custom", "doctype", "customize_form_field", force=True)
frappe.reload_doc("custom", "doctype", "property_setter", force=True)
frappe.db.sql('''
update `tabDocField`
set fetch_from = options, options=''
where options like '%.%' and (fetch_from is NULL OR fetch_from='')
and fieldtype in ('Data', 'Read Only', 'Text', 'Small Text', 'Text Editor', 'Code', 'Link', 'Check')
and fieldname!='naming_series'
''')
frappe.db.sql('''
update `tabCustom Field`
set fetch_from = options, options=''
where options like '%.%' and (fetch_from is NULL OR fetch_from='')
and fieldtype in ('Data', 'Read Only', 'Text', 'Small Text', 'Text Editor', 'Code', 'Link', 'Check')
and fieldname!='naming_series'
''')
frappe.db.sql('''
update `tabProperty Setter`
set property="fetch_from", name=concat(doc_type, '-', field_name, '-', property)
where property="options" and value like '%.%'
and property_type in ('Data', 'Read Only', 'Text', 'Small Text', 'Text Editor', 'Code', 'Link', 'Check')
and field_name!='naming_series'
''') | Rename property setter for fetch_from | [fix] Rename property setter for fetch_from
| Python | mit | mhbu50/frappe,frappe/frappe,vjFaLk/frappe,RicardoJohann/frappe,ESS-LLP/frappe,mhbu50/frappe,saurabh6790/frappe,mhbu50/frappe,vjFaLk/frappe,yashodhank/frappe,almeidapaulopt/frappe,RicardoJohann/frappe,yashodhank/frappe,adityahase/frappe,vjFaLk/frappe,ESS-LLP/frappe,yashodhank/frappe,saurabh6790/frappe,RicardoJohann/frappe,adityahase/frappe,StrellaGroup/frappe,saurabh6790/frappe,saurabh6790/frappe,mhbu50/frappe,RicardoJohann/frappe,almeidapaulopt/frappe,frappe/frappe,adityahase/frappe,ESS-LLP/frappe,yashodhank/frappe,almeidapaulopt/frappe,StrellaGroup/frappe,almeidapaulopt/frappe,frappe/frappe,vjFaLk/frappe,StrellaGroup/frappe,ESS-LLP/frappe,adityahase/frappe | import frappe
def execute():
frappe.reload_doc("core", "doctype", "docfield", force=True)
frappe.reload_doc("custom", "doctype", "custom_field", force=True)
frappe.reload_doc("custom", "doctype", "customize_form_field", force=True)
frappe.reload_doc("custom", "doctype", "property_setter", force=True)
frappe.db.sql('''
update `tabDocField`
set fetch_from = options, options=''
where options like '%.%' and (fetch_from is NULL OR fetch_from='')
and fieldtype in ('Data', 'Read Only', 'Text', 'Small Text', 'Text Editor', 'Code', 'Link', 'Check')
and fieldname!='naming_series'
''')
frappe.db.sql('''
update `tabCustom Field`
set fetch_from = options, options=''
where options like '%.%' and (fetch_from is NULL OR fetch_from='')
and fieldtype in ('Data', 'Read Only', 'Text', 'Small Text', 'Text Editor', 'Code', 'Link', 'Check')
and fieldname!='naming_series'
''')
frappe.db.sql('''
update `tabProperty Setter`
set property="fetch_from"
where property="options" and value like '%.%'
and property_type in ('Data', 'Read Only', 'Text', 'Small Text', 'Text Editor', 'Code', 'Link', 'Check')
and field_name!='naming_series'
''')
[fix] Rename property setter for fetch_from | import frappe
def execute():
frappe.reload_doc("core", "doctype", "docfield", force=True)
frappe.reload_doc("custom", "doctype", "custom_field", force=True)
frappe.reload_doc("custom", "doctype", "customize_form_field", force=True)
frappe.reload_doc("custom", "doctype", "property_setter", force=True)
frappe.db.sql('''
update `tabDocField`
set fetch_from = options, options=''
where options like '%.%' and (fetch_from is NULL OR fetch_from='')
and fieldtype in ('Data', 'Read Only', 'Text', 'Small Text', 'Text Editor', 'Code', 'Link', 'Check')
and fieldname!='naming_series'
''')
frappe.db.sql('''
update `tabCustom Field`
set fetch_from = options, options=''
where options like '%.%' and (fetch_from is NULL OR fetch_from='')
and fieldtype in ('Data', 'Read Only', 'Text', 'Small Text', 'Text Editor', 'Code', 'Link', 'Check')
and fieldname!='naming_series'
''')
frappe.db.sql('''
update `tabProperty Setter`
set property="fetch_from", name=concat(doc_type, '-', field_name, '-', property)
where property="options" and value like '%.%'
and property_type in ('Data', 'Read Only', 'Text', 'Small Text', 'Text Editor', 'Code', 'Link', 'Check')
and field_name!='naming_series'
''') | <commit_before>import frappe
def execute():
frappe.reload_doc("core", "doctype", "docfield", force=True)
frappe.reload_doc("custom", "doctype", "custom_field", force=True)
frappe.reload_doc("custom", "doctype", "customize_form_field", force=True)
frappe.reload_doc("custom", "doctype", "property_setter", force=True)
frappe.db.sql('''
update `tabDocField`
set fetch_from = options, options=''
where options like '%.%' and (fetch_from is NULL OR fetch_from='')
and fieldtype in ('Data', 'Read Only', 'Text', 'Small Text', 'Text Editor', 'Code', 'Link', 'Check')
and fieldname!='naming_series'
''')
frappe.db.sql('''
update `tabCustom Field`
set fetch_from = options, options=''
where options like '%.%' and (fetch_from is NULL OR fetch_from='')
and fieldtype in ('Data', 'Read Only', 'Text', 'Small Text', 'Text Editor', 'Code', 'Link', 'Check')
and fieldname!='naming_series'
''')
frappe.db.sql('''
update `tabProperty Setter`
set property="fetch_from"
where property="options" and value like '%.%'
and property_type in ('Data', 'Read Only', 'Text', 'Small Text', 'Text Editor', 'Code', 'Link', 'Check')
and field_name!='naming_series'
''')
<commit_msg>[fix] Rename property setter for fetch_from<commit_after> | import frappe
def execute():
frappe.reload_doc("core", "doctype", "docfield", force=True)
frappe.reload_doc("custom", "doctype", "custom_field", force=True)
frappe.reload_doc("custom", "doctype", "customize_form_field", force=True)
frappe.reload_doc("custom", "doctype", "property_setter", force=True)
frappe.db.sql('''
update `tabDocField`
set fetch_from = options, options=''
where options like '%.%' and (fetch_from is NULL OR fetch_from='')
and fieldtype in ('Data', 'Read Only', 'Text', 'Small Text', 'Text Editor', 'Code', 'Link', 'Check')
and fieldname!='naming_series'
''')
frappe.db.sql('''
update `tabCustom Field`
set fetch_from = options, options=''
where options like '%.%' and (fetch_from is NULL OR fetch_from='')
and fieldtype in ('Data', 'Read Only', 'Text', 'Small Text', 'Text Editor', 'Code', 'Link', 'Check')
and fieldname!='naming_series'
''')
frappe.db.sql('''
update `tabProperty Setter`
set property="fetch_from", name=concat(doc_type, '-', field_name, '-', property)
where property="options" and value like '%.%'
and property_type in ('Data', 'Read Only', 'Text', 'Small Text', 'Text Editor', 'Code', 'Link', 'Check')
and field_name!='naming_series'
''') | import frappe
def execute():
frappe.reload_doc("core", "doctype", "docfield", force=True)
frappe.reload_doc("custom", "doctype", "custom_field", force=True)
frappe.reload_doc("custom", "doctype", "customize_form_field", force=True)
frappe.reload_doc("custom", "doctype", "property_setter", force=True)
frappe.db.sql('''
update `tabDocField`
set fetch_from = options, options=''
where options like '%.%' and (fetch_from is NULL OR fetch_from='')
and fieldtype in ('Data', 'Read Only', 'Text', 'Small Text', 'Text Editor', 'Code', 'Link', 'Check')
and fieldname!='naming_series'
''')
frappe.db.sql('''
update `tabCustom Field`
set fetch_from = options, options=''
where options like '%.%' and (fetch_from is NULL OR fetch_from='')
and fieldtype in ('Data', 'Read Only', 'Text', 'Small Text', 'Text Editor', 'Code', 'Link', 'Check')
and fieldname!='naming_series'
''')
frappe.db.sql('''
update `tabProperty Setter`
set property="fetch_from"
where property="options" and value like '%.%'
and property_type in ('Data', 'Read Only', 'Text', 'Small Text', 'Text Editor', 'Code', 'Link', 'Check')
and field_name!='naming_series'
''')
[fix] Rename property setter for fetch_fromimport frappe
def execute():
frappe.reload_doc("core", "doctype", "docfield", force=True)
frappe.reload_doc("custom", "doctype", "custom_field", force=True)
frappe.reload_doc("custom", "doctype", "customize_form_field", force=True)
frappe.reload_doc("custom", "doctype", "property_setter", force=True)
frappe.db.sql('''
update `tabDocField`
set fetch_from = options, options=''
where options like '%.%' and (fetch_from is NULL OR fetch_from='')
and fieldtype in ('Data', 'Read Only', 'Text', 'Small Text', 'Text Editor', 'Code', 'Link', 'Check')
and fieldname!='naming_series'
''')
frappe.db.sql('''
update `tabCustom Field`
set fetch_from = options, options=''
where options like '%.%' and (fetch_from is NULL OR fetch_from='')
and fieldtype in ('Data', 'Read Only', 'Text', 'Small Text', 'Text Editor', 'Code', 'Link', 'Check')
and fieldname!='naming_series'
''')
frappe.db.sql('''
update `tabProperty Setter`
set property="fetch_from", name=concat(doc_type, '-', field_name, '-', property)
where property="options" and value like '%.%'
and property_type in ('Data', 'Read Only', 'Text', 'Small Text', 'Text Editor', 'Code', 'Link', 'Check')
and field_name!='naming_series'
''') | <commit_before>import frappe
def execute():
frappe.reload_doc("core", "doctype", "docfield", force=True)
frappe.reload_doc("custom", "doctype", "custom_field", force=True)
frappe.reload_doc("custom", "doctype", "customize_form_field", force=True)
frappe.reload_doc("custom", "doctype", "property_setter", force=True)
frappe.db.sql('''
update `tabDocField`
set fetch_from = options, options=''
where options like '%.%' and (fetch_from is NULL OR fetch_from='')
and fieldtype in ('Data', 'Read Only', 'Text', 'Small Text', 'Text Editor', 'Code', 'Link', 'Check')
and fieldname!='naming_series'
''')
frappe.db.sql('''
update `tabCustom Field`
set fetch_from = options, options=''
where options like '%.%' and (fetch_from is NULL OR fetch_from='')
and fieldtype in ('Data', 'Read Only', 'Text', 'Small Text', 'Text Editor', 'Code', 'Link', 'Check')
and fieldname!='naming_series'
''')
frappe.db.sql('''
update `tabProperty Setter`
set property="fetch_from"
where property="options" and value like '%.%'
and property_type in ('Data', 'Read Only', 'Text', 'Small Text', 'Text Editor', 'Code', 'Link', 'Check')
and field_name!='naming_series'
''')
<commit_msg>[fix] Rename property setter for fetch_from<commit_after>import frappe
def execute():
frappe.reload_doc("core", "doctype", "docfield", force=True)
frappe.reload_doc("custom", "doctype", "custom_field", force=True)
frappe.reload_doc("custom", "doctype", "customize_form_field", force=True)
frappe.reload_doc("custom", "doctype", "property_setter", force=True)
frappe.db.sql('''
update `tabDocField`
set fetch_from = options, options=''
where options like '%.%' and (fetch_from is NULL OR fetch_from='')
and fieldtype in ('Data', 'Read Only', 'Text', 'Small Text', 'Text Editor', 'Code', 'Link', 'Check')
and fieldname!='naming_series'
''')
frappe.db.sql('''
update `tabCustom Field`
set fetch_from = options, options=''
where options like '%.%' and (fetch_from is NULL OR fetch_from='')
and fieldtype in ('Data', 'Read Only', 'Text', 'Small Text', 'Text Editor', 'Code', 'Link', 'Check')
and fieldname!='naming_series'
''')
frappe.db.sql('''
update `tabProperty Setter`
set property="fetch_from", name=concat(doc_type, '-', field_name, '-', property)
where property="options" and value like '%.%'
and property_type in ('Data', 'Read Only', 'Text', 'Small Text', 'Text Editor', 'Code', 'Link', 'Check')
and field_name!='naming_series'
''') |
ee61414cb53dd883d9f5ab60b0148bf0ed9bf3d7 | us_ignite/people/tests/integration_tests.py | us_ignite/people/tests/integration_tests.py | from nose.tools import eq_
from django.contrib.auth.models import User
from django.test import TestCase
from django_nose.tools import assert_redirects
from us_ignite.common.tests import utils
from us_ignite.profiles.tests import fixtures
def _teardown_profiles():
for model in [User]:
model.objects.all().delete()
class TestPeopleListUnauthenticated(TestCase):
def test_people_list_requires_auth(self):
url = '/people/'
response = self.client.get(url)
assert_redirects(response, utils.get_login_url(url))
def test_people_list_is_successful(self):
user = fixtures.get_user('us-ignite')
fixtures.get_profile(user=user, name='us ignite', slug='ignite')
self.client.login(username='us-ignite', password='us-ignite')
response = self.client.get('/people/')
eq_(response.status_code, 200)
self.client.logout()
_teardown_profiles()
class TestPeopleListPage(TestCase):
def test_people_page_detail_is_successful(self):
user = fixtures.get_user('us-ignite')
fixtures.get_profile(user=user, name='us ignite', slug='ignite')
self.client.login(username='us-ignite', password='us-ignite')
response = self.client.get('/people/ignite/')
eq_(response.status_code, 200)
self.client.logout()
_teardown_profiles()
def test_people_page_detail_requires_auth(self):
user = fixtures.get_user('us-ignite')
fixtures.get_profile(user=user, name='us ignite', slug='ignite')
response = self.client.get('/people/ignite/')
assert_redirects(response, utils.get_login_url('/people/ignite/'))
_teardown_profiles()
| from nose.tools import eq_
from django.contrib.auth.models import User
from django.test import TestCase
from django_nose.tools import assert_redirects
from us_ignite.common.tests import utils
from us_ignite.profiles.tests import fixtures
def _teardown_profiles():
for model in [User]:
model.objects.all().delete()
class TestPeopleDetailPage(TestCase):
def test_people_page_detail_is_successful(self):
user = fixtures.get_user('us-ignite')
fixtures.get_profile(user=user, name='us ignite', slug='ignite')
self.client.login(username='us-ignite', password='us-ignite')
response = self.client.get('/people/ignite/')
eq_(response.status_code, 200)
self.client.logout()
_teardown_profiles()
def test_people_page_detail_requires_auth(self):
user = fixtures.get_user('us-ignite')
fixtures.get_profile(user=user, name='us ignite', slug='ignite')
response = self.client.get('/people/ignite/')
assert_redirects(response, utils.get_login_url('/people/ignite/'))
_teardown_profiles()
| Update tests to refelct the removal of the users list view. | Update tests to refelct the removal of the users list view.
| Python | bsd-3-clause | us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite | from nose.tools import eq_
from django.contrib.auth.models import User
from django.test import TestCase
from django_nose.tools import assert_redirects
from us_ignite.common.tests import utils
from us_ignite.profiles.tests import fixtures
def _teardown_profiles():
for model in [User]:
model.objects.all().delete()
class TestPeopleListUnauthenticated(TestCase):
def test_people_list_requires_auth(self):
url = '/people/'
response = self.client.get(url)
assert_redirects(response, utils.get_login_url(url))
def test_people_list_is_successful(self):
user = fixtures.get_user('us-ignite')
fixtures.get_profile(user=user, name='us ignite', slug='ignite')
self.client.login(username='us-ignite', password='us-ignite')
response = self.client.get('/people/')
eq_(response.status_code, 200)
self.client.logout()
_teardown_profiles()
class TestPeopleListPage(TestCase):
def test_people_page_detail_is_successful(self):
user = fixtures.get_user('us-ignite')
fixtures.get_profile(user=user, name='us ignite', slug='ignite')
self.client.login(username='us-ignite', password='us-ignite')
response = self.client.get('/people/ignite/')
eq_(response.status_code, 200)
self.client.logout()
_teardown_profiles()
def test_people_page_detail_requires_auth(self):
user = fixtures.get_user('us-ignite')
fixtures.get_profile(user=user, name='us ignite', slug='ignite')
response = self.client.get('/people/ignite/')
assert_redirects(response, utils.get_login_url('/people/ignite/'))
_teardown_profiles()
Update tests to refelct the removal of the users list view. | from nose.tools import eq_
from django.contrib.auth.models import User
from django.test import TestCase
from django_nose.tools import assert_redirects
from us_ignite.common.tests import utils
from us_ignite.profiles.tests import fixtures
def _teardown_profiles():
for model in [User]:
model.objects.all().delete()
class TestPeopleDetailPage(TestCase):
def test_people_page_detail_is_successful(self):
user = fixtures.get_user('us-ignite')
fixtures.get_profile(user=user, name='us ignite', slug='ignite')
self.client.login(username='us-ignite', password='us-ignite')
response = self.client.get('/people/ignite/')
eq_(response.status_code, 200)
self.client.logout()
_teardown_profiles()
def test_people_page_detail_requires_auth(self):
user = fixtures.get_user('us-ignite')
fixtures.get_profile(user=user, name='us ignite', slug='ignite')
response = self.client.get('/people/ignite/')
assert_redirects(response, utils.get_login_url('/people/ignite/'))
_teardown_profiles()
| <commit_before>from nose.tools import eq_
from django.contrib.auth.models import User
from django.test import TestCase
from django_nose.tools import assert_redirects
from us_ignite.common.tests import utils
from us_ignite.profiles.tests import fixtures
def _teardown_profiles():
for model in [User]:
model.objects.all().delete()
class TestPeopleListUnauthenticated(TestCase):
def test_people_list_requires_auth(self):
url = '/people/'
response = self.client.get(url)
assert_redirects(response, utils.get_login_url(url))
def test_people_list_is_successful(self):
user = fixtures.get_user('us-ignite')
fixtures.get_profile(user=user, name='us ignite', slug='ignite')
self.client.login(username='us-ignite', password='us-ignite')
response = self.client.get('/people/')
eq_(response.status_code, 200)
self.client.logout()
_teardown_profiles()
class TestPeopleListPage(TestCase):
def test_people_page_detail_is_successful(self):
user = fixtures.get_user('us-ignite')
fixtures.get_profile(user=user, name='us ignite', slug='ignite')
self.client.login(username='us-ignite', password='us-ignite')
response = self.client.get('/people/ignite/')
eq_(response.status_code, 200)
self.client.logout()
_teardown_profiles()
def test_people_page_detail_requires_auth(self):
user = fixtures.get_user('us-ignite')
fixtures.get_profile(user=user, name='us ignite', slug='ignite')
response = self.client.get('/people/ignite/')
assert_redirects(response, utils.get_login_url('/people/ignite/'))
_teardown_profiles()
<commit_msg>Update tests to refelct the removal of the users list view.<commit_after> | from nose.tools import eq_
from django.contrib.auth.models import User
from django.test import TestCase
from django_nose.tools import assert_redirects
from us_ignite.common.tests import utils
from us_ignite.profiles.tests import fixtures
def _teardown_profiles():
for model in [User]:
model.objects.all().delete()
class TestPeopleDetailPage(TestCase):
def test_people_page_detail_is_successful(self):
user = fixtures.get_user('us-ignite')
fixtures.get_profile(user=user, name='us ignite', slug='ignite')
self.client.login(username='us-ignite', password='us-ignite')
response = self.client.get('/people/ignite/')
eq_(response.status_code, 200)
self.client.logout()
_teardown_profiles()
def test_people_page_detail_requires_auth(self):
user = fixtures.get_user('us-ignite')
fixtures.get_profile(user=user, name='us ignite', slug='ignite')
response = self.client.get('/people/ignite/')
assert_redirects(response, utils.get_login_url('/people/ignite/'))
_teardown_profiles()
| from nose.tools import eq_
from django.contrib.auth.models import User
from django.test import TestCase
from django_nose.tools import assert_redirects
from us_ignite.common.tests import utils
from us_ignite.profiles.tests import fixtures
def _teardown_profiles():
for model in [User]:
model.objects.all().delete()
class TestPeopleListUnauthenticated(TestCase):
def test_people_list_requires_auth(self):
url = '/people/'
response = self.client.get(url)
assert_redirects(response, utils.get_login_url(url))
def test_people_list_is_successful(self):
user = fixtures.get_user('us-ignite')
fixtures.get_profile(user=user, name='us ignite', slug='ignite')
self.client.login(username='us-ignite', password='us-ignite')
response = self.client.get('/people/')
eq_(response.status_code, 200)
self.client.logout()
_teardown_profiles()
class TestPeopleListPage(TestCase):
def test_people_page_detail_is_successful(self):
user = fixtures.get_user('us-ignite')
fixtures.get_profile(user=user, name='us ignite', slug='ignite')
self.client.login(username='us-ignite', password='us-ignite')
response = self.client.get('/people/ignite/')
eq_(response.status_code, 200)
self.client.logout()
_teardown_profiles()
def test_people_page_detail_requires_auth(self):
user = fixtures.get_user('us-ignite')
fixtures.get_profile(user=user, name='us ignite', slug='ignite')
response = self.client.get('/people/ignite/')
assert_redirects(response, utils.get_login_url('/people/ignite/'))
_teardown_profiles()
Update tests to refelct the removal of the users list view.from nose.tools import eq_
from django.contrib.auth.models import User
from django.test import TestCase
from django_nose.tools import assert_redirects
from us_ignite.common.tests import utils
from us_ignite.profiles.tests import fixtures
def _teardown_profiles():
for model in [User]:
model.objects.all().delete()
class TestPeopleDetailPage(TestCase):
def test_people_page_detail_is_successful(self):
user = fixtures.get_user('us-ignite')
fixtures.get_profile(user=user, name='us ignite', slug='ignite')
self.client.login(username='us-ignite', password='us-ignite')
response = self.client.get('/people/ignite/')
eq_(response.status_code, 200)
self.client.logout()
_teardown_profiles()
def test_people_page_detail_requires_auth(self):
user = fixtures.get_user('us-ignite')
fixtures.get_profile(user=user, name='us ignite', slug='ignite')
response = self.client.get('/people/ignite/')
assert_redirects(response, utils.get_login_url('/people/ignite/'))
_teardown_profiles()
| <commit_before>from nose.tools import eq_
from django.contrib.auth.models import User
from django.test import TestCase
from django_nose.tools import assert_redirects
from us_ignite.common.tests import utils
from us_ignite.profiles.tests import fixtures
def _teardown_profiles():
for model in [User]:
model.objects.all().delete()
class TestPeopleListUnauthenticated(TestCase):
def test_people_list_requires_auth(self):
url = '/people/'
response = self.client.get(url)
assert_redirects(response, utils.get_login_url(url))
def test_people_list_is_successful(self):
user = fixtures.get_user('us-ignite')
fixtures.get_profile(user=user, name='us ignite', slug='ignite')
self.client.login(username='us-ignite', password='us-ignite')
response = self.client.get('/people/')
eq_(response.status_code, 200)
self.client.logout()
_teardown_profiles()
class TestPeopleListPage(TestCase):
def test_people_page_detail_is_successful(self):
user = fixtures.get_user('us-ignite')
fixtures.get_profile(user=user, name='us ignite', slug='ignite')
self.client.login(username='us-ignite', password='us-ignite')
response = self.client.get('/people/ignite/')
eq_(response.status_code, 200)
self.client.logout()
_teardown_profiles()
def test_people_page_detail_requires_auth(self):
user = fixtures.get_user('us-ignite')
fixtures.get_profile(user=user, name='us ignite', slug='ignite')
response = self.client.get('/people/ignite/')
assert_redirects(response, utils.get_login_url('/people/ignite/'))
_teardown_profiles()
<commit_msg>Update tests to refelct the removal of the users list view.<commit_after>from nose.tools import eq_
from django.contrib.auth.models import User
from django.test import TestCase
from django_nose.tools import assert_redirects
from us_ignite.common.tests import utils
from us_ignite.profiles.tests import fixtures
def _teardown_profiles():
for model in [User]:
model.objects.all().delete()
class TestPeopleDetailPage(TestCase):
def test_people_page_detail_is_successful(self):
user = fixtures.get_user('us-ignite')
fixtures.get_profile(user=user, name='us ignite', slug='ignite')
self.client.login(username='us-ignite', password='us-ignite')
response = self.client.get('/people/ignite/')
eq_(response.status_code, 200)
self.client.logout()
_teardown_profiles()
def test_people_page_detail_requires_auth(self):
user = fixtures.get_user('us-ignite')
fixtures.get_profile(user=user, name='us ignite', slug='ignite')
response = self.client.get('/people/ignite/')
assert_redirects(response, utils.get_login_url('/people/ignite/'))
_teardown_profiles()
|
499b7e95b10c453083d2e0438bfef8a57b330d9a | gui/status/TrayIconView.py | gui/status/TrayIconView.py | import trayjenkins
from PySide import QtGui
from pyjenkins.Event import Event
from trayjenkins.status.interfaces import IView
class TrayIconView(IView):
def __init__(self, parentWidget, delayInSecons):
"""
@type parentWidget: QtGui.QWidget
"""
self._statusRefreshEvent= Event()
self._delayInSeconds= delayInSecons
self._trayIcon= QtGui.QSystemTrayIcon(parentWidget)
self._trayIcon.show()
self._icons= {}
self._icons[trayjenkins.status.FAILING]= QtGui.QIcon('images/status/failing.png')
self._icons[trayjenkins.status.OK]= QtGui.QIcon('images/status/ok.png')
self._icons[trayjenkins.status.UNKNOWN]= QtGui.QIcon('images/status/unknown.png')
self.setStatus(trayjenkins.status.UNKNOWN)
def statusRefreshEvent(self):
"""
Event arguments: <none>
@rtype: pyjenkins.interfaces.IEvent
"""
return self._statusRefreshEvent
def setStatus(self, status):
"""
@type status: str
"""
self._trayIcon.setIcon(self._icons[status])
self._trayIcon.setToolTip(status.capitalize())
self._trayIcon.showMessage(unicode("Jenkins status change"),
unicode("Status: %s" % status.capitalize()),
QtGui.QSystemTrayIcon.Information,# icon,
self._delayInSeconds * 1000)
| import trayjenkins
from PySide import QtGui
from pyjenkins.Event import Event
from trayjenkins.status.interfaces import IView
class TrayIconView(IView):
def __init__(self, parentWidget, delayInSecons):
"""
@type parentWidget: QtGui.QWidget
"""
self._statusRefreshEvent= Event()
self._delayInSeconds= delayInSecons
self._trayIcon= QtGui.QSystemTrayIcon(parentWidget)
self._icons= {}
self._icons[trayjenkins.status.FAILING]= QtGui.QIcon('images/status/failing.png')
self._icons[trayjenkins.status.OK]= QtGui.QIcon('images/status/ok.png')
self._icons[trayjenkins.status.UNKNOWN]= QtGui.QIcon('images/status/unknown.png')
self.setStatus(trayjenkins.status.UNKNOWN)
self._trayIcon.show()
def statusRefreshEvent(self):
"""
Event arguments: <none>
@rtype: pyjenkins.interfaces.IEvent
"""
return self._statusRefreshEvent
def setStatus(self, status):
"""
@type status: str
"""
self._trayIcon.setIcon(self._icons[status])
self._trayIcon.setToolTip(status.capitalize())
self._trayIcon.showMessage(unicode("Jenkins status change"),
unicode("Status: %s" % status.capitalize()),
QtGui.QSystemTrayIcon.Information,# icon,
self._delayInSeconds * 1000)
| Set icon before calling show() to avoid warning. | Set icon before calling show() to avoid warning.
| Python | mit | coolhandmook/trayjenkins,coolhandmook/trayjenkins | import trayjenkins
from PySide import QtGui
from pyjenkins.Event import Event
from trayjenkins.status.interfaces import IView
class TrayIconView(IView):
def __init__(self, parentWidget, delayInSecons):
"""
@type parentWidget: QtGui.QWidget
"""
self._statusRefreshEvent= Event()
self._delayInSeconds= delayInSecons
self._trayIcon= QtGui.QSystemTrayIcon(parentWidget)
self._trayIcon.show()
self._icons= {}
self._icons[trayjenkins.status.FAILING]= QtGui.QIcon('images/status/failing.png')
self._icons[trayjenkins.status.OK]= QtGui.QIcon('images/status/ok.png')
self._icons[trayjenkins.status.UNKNOWN]= QtGui.QIcon('images/status/unknown.png')
self.setStatus(trayjenkins.status.UNKNOWN)
def statusRefreshEvent(self):
"""
Event arguments: <none>
@rtype: pyjenkins.interfaces.IEvent
"""
return self._statusRefreshEvent
def setStatus(self, status):
"""
@type status: str
"""
self._trayIcon.setIcon(self._icons[status])
self._trayIcon.setToolTip(status.capitalize())
self._trayIcon.showMessage(unicode("Jenkins status change"),
unicode("Status: %s" % status.capitalize()),
QtGui.QSystemTrayIcon.Information,# icon,
self._delayInSeconds * 1000)
Set icon before calling show() to avoid warning. | import trayjenkins
from PySide import QtGui
from pyjenkins.Event import Event
from trayjenkins.status.interfaces import IView
class TrayIconView(IView):
def __init__(self, parentWidget, delayInSecons):
"""
@type parentWidget: QtGui.QWidget
"""
self._statusRefreshEvent= Event()
self._delayInSeconds= delayInSecons
self._trayIcon= QtGui.QSystemTrayIcon(parentWidget)
self._icons= {}
self._icons[trayjenkins.status.FAILING]= QtGui.QIcon('images/status/failing.png')
self._icons[trayjenkins.status.OK]= QtGui.QIcon('images/status/ok.png')
self._icons[trayjenkins.status.UNKNOWN]= QtGui.QIcon('images/status/unknown.png')
self.setStatus(trayjenkins.status.UNKNOWN)
self._trayIcon.show()
def statusRefreshEvent(self):
"""
Event arguments: <none>
@rtype: pyjenkins.interfaces.IEvent
"""
return self._statusRefreshEvent
def setStatus(self, status):
"""
@type status: str
"""
self._trayIcon.setIcon(self._icons[status])
self._trayIcon.setToolTip(status.capitalize())
self._trayIcon.showMessage(unicode("Jenkins status change"),
unicode("Status: %s" % status.capitalize()),
QtGui.QSystemTrayIcon.Information,# icon,
self._delayInSeconds * 1000)
| <commit_before>import trayjenkins
from PySide import QtGui
from pyjenkins.Event import Event
from trayjenkins.status.interfaces import IView
class TrayIconView(IView):
def __init__(self, parentWidget, delayInSecons):
"""
@type parentWidget: QtGui.QWidget
"""
self._statusRefreshEvent= Event()
self._delayInSeconds= delayInSecons
self._trayIcon= QtGui.QSystemTrayIcon(parentWidget)
self._trayIcon.show()
self._icons= {}
self._icons[trayjenkins.status.FAILING]= QtGui.QIcon('images/status/failing.png')
self._icons[trayjenkins.status.OK]= QtGui.QIcon('images/status/ok.png')
self._icons[trayjenkins.status.UNKNOWN]= QtGui.QIcon('images/status/unknown.png')
self.setStatus(trayjenkins.status.UNKNOWN)
def statusRefreshEvent(self):
"""
Event arguments: <none>
@rtype: pyjenkins.interfaces.IEvent
"""
return self._statusRefreshEvent
def setStatus(self, status):
"""
@type status: str
"""
self._trayIcon.setIcon(self._icons[status])
self._trayIcon.setToolTip(status.capitalize())
self._trayIcon.showMessage(unicode("Jenkins status change"),
unicode("Status: %s" % status.capitalize()),
QtGui.QSystemTrayIcon.Information,# icon,
self._delayInSeconds * 1000)
<commit_msg>Set icon before calling show() to avoid warning.<commit_after> | import trayjenkins
from PySide import QtGui
from pyjenkins.Event import Event
from trayjenkins.status.interfaces import IView
class TrayIconView(IView):
def __init__(self, parentWidget, delayInSecons):
"""
@type parentWidget: QtGui.QWidget
"""
self._statusRefreshEvent= Event()
self._delayInSeconds= delayInSecons
self._trayIcon= QtGui.QSystemTrayIcon(parentWidget)
self._icons= {}
self._icons[trayjenkins.status.FAILING]= QtGui.QIcon('images/status/failing.png')
self._icons[trayjenkins.status.OK]= QtGui.QIcon('images/status/ok.png')
self._icons[trayjenkins.status.UNKNOWN]= QtGui.QIcon('images/status/unknown.png')
self.setStatus(trayjenkins.status.UNKNOWN)
self._trayIcon.show()
def statusRefreshEvent(self):
"""
Event arguments: <none>
@rtype: pyjenkins.interfaces.IEvent
"""
return self._statusRefreshEvent
def setStatus(self, status):
"""
@type status: str
"""
self._trayIcon.setIcon(self._icons[status])
self._trayIcon.setToolTip(status.capitalize())
self._trayIcon.showMessage(unicode("Jenkins status change"),
unicode("Status: %s" % status.capitalize()),
QtGui.QSystemTrayIcon.Information,# icon,
self._delayInSeconds * 1000)
| import trayjenkins
from PySide import QtGui
from pyjenkins.Event import Event
from trayjenkins.status.interfaces import IView
class TrayIconView(IView):
def __init__(self, parentWidget, delayInSecons):
"""
@type parentWidget: QtGui.QWidget
"""
self._statusRefreshEvent= Event()
self._delayInSeconds= delayInSecons
self._trayIcon= QtGui.QSystemTrayIcon(parentWidget)
self._trayIcon.show()
self._icons= {}
self._icons[trayjenkins.status.FAILING]= QtGui.QIcon('images/status/failing.png')
self._icons[trayjenkins.status.OK]= QtGui.QIcon('images/status/ok.png')
self._icons[trayjenkins.status.UNKNOWN]= QtGui.QIcon('images/status/unknown.png')
self.setStatus(trayjenkins.status.UNKNOWN)
def statusRefreshEvent(self):
"""
Event arguments: <none>
@rtype: pyjenkins.interfaces.IEvent
"""
return self._statusRefreshEvent
def setStatus(self, status):
"""
@type status: str
"""
self._trayIcon.setIcon(self._icons[status])
self._trayIcon.setToolTip(status.capitalize())
self._trayIcon.showMessage(unicode("Jenkins status change"),
unicode("Status: %s" % status.capitalize()),
QtGui.QSystemTrayIcon.Information,# icon,
self._delayInSeconds * 1000)
Set icon before calling show() to avoid warning.import trayjenkins
from PySide import QtGui
from pyjenkins.Event import Event
from trayjenkins.status.interfaces import IView
class TrayIconView(IView):
def __init__(self, parentWidget, delayInSecons):
"""
@type parentWidget: QtGui.QWidget
"""
self._statusRefreshEvent= Event()
self._delayInSeconds= delayInSecons
self._trayIcon= QtGui.QSystemTrayIcon(parentWidget)
self._icons= {}
self._icons[trayjenkins.status.FAILING]= QtGui.QIcon('images/status/failing.png')
self._icons[trayjenkins.status.OK]= QtGui.QIcon('images/status/ok.png')
self._icons[trayjenkins.status.UNKNOWN]= QtGui.QIcon('images/status/unknown.png')
self.setStatus(trayjenkins.status.UNKNOWN)
self._trayIcon.show()
def statusRefreshEvent(self):
"""
Event arguments: <none>
@rtype: pyjenkins.interfaces.IEvent
"""
return self._statusRefreshEvent
def setStatus(self, status):
"""
@type status: str
"""
self._trayIcon.setIcon(self._icons[status])
self._trayIcon.setToolTip(status.capitalize())
self._trayIcon.showMessage(unicode("Jenkins status change"),
unicode("Status: %s" % status.capitalize()),
QtGui.QSystemTrayIcon.Information,# icon,
self._delayInSeconds * 1000)
| <commit_before>import trayjenkins
from PySide import QtGui
from pyjenkins.Event import Event
from trayjenkins.status.interfaces import IView
class TrayIconView(IView):
def __init__(self, parentWidget, delayInSecons):
"""
@type parentWidget: QtGui.QWidget
"""
self._statusRefreshEvent= Event()
self._delayInSeconds= delayInSecons
self._trayIcon= QtGui.QSystemTrayIcon(parentWidget)
self._trayIcon.show()
self._icons= {}
self._icons[trayjenkins.status.FAILING]= QtGui.QIcon('images/status/failing.png')
self._icons[trayjenkins.status.OK]= QtGui.QIcon('images/status/ok.png')
self._icons[trayjenkins.status.UNKNOWN]= QtGui.QIcon('images/status/unknown.png')
self.setStatus(trayjenkins.status.UNKNOWN)
def statusRefreshEvent(self):
"""
Event arguments: <none>
@rtype: pyjenkins.interfaces.IEvent
"""
return self._statusRefreshEvent
def setStatus(self, status):
"""
@type status: str
"""
self._trayIcon.setIcon(self._icons[status])
self._trayIcon.setToolTip(status.capitalize())
self._trayIcon.showMessage(unicode("Jenkins status change"),
unicode("Status: %s" % status.capitalize()),
QtGui.QSystemTrayIcon.Information,# icon,
self._delayInSeconds * 1000)
<commit_msg>Set icon before calling show() to avoid warning.<commit_after>import trayjenkins
from PySide import QtGui
from pyjenkins.Event import Event
from trayjenkins.status.interfaces import IView
class TrayIconView(IView):
def __init__(self, parentWidget, delayInSecons):
"""
@type parentWidget: QtGui.QWidget
"""
self._statusRefreshEvent= Event()
self._delayInSeconds= delayInSecons
self._trayIcon= QtGui.QSystemTrayIcon(parentWidget)
self._icons= {}
self._icons[trayjenkins.status.FAILING]= QtGui.QIcon('images/status/failing.png')
self._icons[trayjenkins.status.OK]= QtGui.QIcon('images/status/ok.png')
self._icons[trayjenkins.status.UNKNOWN]= QtGui.QIcon('images/status/unknown.png')
self.setStatus(trayjenkins.status.UNKNOWN)
self._trayIcon.show()
def statusRefreshEvent(self):
"""
Event arguments: <none>
@rtype: pyjenkins.interfaces.IEvent
"""
return self._statusRefreshEvent
def setStatus(self, status):
"""
@type status: str
"""
self._trayIcon.setIcon(self._icons[status])
self._trayIcon.setToolTip(status.capitalize())
self._trayIcon.showMessage(unicode("Jenkins status change"),
unicode("Status: %s" % status.capitalize()),
QtGui.QSystemTrayIcon.Information,# icon,
self._delayInSeconds * 1000)
|
27a1d78611cef1ab23044db22bd4bf7c61582efe | src/data/Track/UploadHandlers/YoutubeUploadHandler.py | src/data/Track/UploadHandlers/YoutubeUploadHandler.py | import os
from data.Track.UploadHandler import UploadHandler
from src.data.Track.Tracks import YoutubeTrack
class YoutubeUploadHandler(UploadHandler):
def __init__(self, workingDir):
super().__init__(workingDir)
self.attributes.update({
"URL": ["string", "required", "url"]
})
def trackFromUploadedAttributes(self, attributes):
track = YoutubeTrack(
attributes["Artist"],
attributes["Album"],
attributes["Title"]
)
del attributes["Artist"]
del attributes["Album"]
del attributes["Title"]
super().autoImportAttributes(track, attributes)
super().writeTrackRecord(track)
artistPath = os.path.join(self.workingDir, track.artistName)
albumPath = os.path.join(artistPath, track.albumTitle)
recordPath = os.path.join(albumPath, track.title) + ".rec"
localFilePath = os.path.join(recordPath, "muzak.yturl")
fileToWrite = open(localFilePath, 'w+')
fileToWrite.write(track.url)
fileToWrite.close()
return track
| import os
from src.data.Track import UploadHandler
from src.data.Track.Tracks import YoutubeTrack
class YoutubeUploadHandler(UploadHandler):
def __init__(self, workingDir):
super().__init__(workingDir)
self.attributes.update({
"URL": ["string", "required", "url"]
})
def trackFromUploadedAttributes(self, attributes):
track = YoutubeTrack(
attributes["Artist"],
attributes["Album"],
attributes["Title"]
)
del attributes["Artist"]
del attributes["Album"]
del attributes["Title"]
super().autoImportAttributes(track, attributes)
super().writeTrackRecord(track)
artistPath = os.path.join(self.workingDir, track.artistName)
albumPath = os.path.join(artistPath, track.albumTitle)
recordPath = os.path.join(albumPath, track.title) + ".rec"
localFilePath = os.path.join(recordPath, "muzak.yturl")
fileToWrite = open(localFilePath, 'w+')
fileToWrite.write(track.url)
fileToWrite.close()
return track
| Fix wrong import from UploadHandler | Fix wrong import from UploadHandler
| Python | agpl-3.0 | Pynitus-Universe/Pynitus-Backend,Pynitus-Universe/Pynitus-Backend,Pynitus-Universe/Pynitus,Pynitus-Universe/Pynitus | import os
from data.Track.UploadHandler import UploadHandler
from src.data.Track.Tracks import YoutubeTrack
class YoutubeUploadHandler(UploadHandler):
def __init__(self, workingDir):
super().__init__(workingDir)
self.attributes.update({
"URL": ["string", "required", "url"]
})
def trackFromUploadedAttributes(self, attributes):
track = YoutubeTrack(
attributes["Artist"],
attributes["Album"],
attributes["Title"]
)
del attributes["Artist"]
del attributes["Album"]
del attributes["Title"]
super().autoImportAttributes(track, attributes)
super().writeTrackRecord(track)
artistPath = os.path.join(self.workingDir, track.artistName)
albumPath = os.path.join(artistPath, track.albumTitle)
recordPath = os.path.join(albumPath, track.title) + ".rec"
localFilePath = os.path.join(recordPath, "muzak.yturl")
fileToWrite = open(localFilePath, 'w+')
fileToWrite.write(track.url)
fileToWrite.close()
return track
Fix wrong import from UploadHandler | import os
from src.data.Track import UploadHandler
from src.data.Track.Tracks import YoutubeTrack
class YoutubeUploadHandler(UploadHandler):
def __init__(self, workingDir):
super().__init__(workingDir)
self.attributes.update({
"URL": ["string", "required", "url"]
})
def trackFromUploadedAttributes(self, attributes):
track = YoutubeTrack(
attributes["Artist"],
attributes["Album"],
attributes["Title"]
)
del attributes["Artist"]
del attributes["Album"]
del attributes["Title"]
super().autoImportAttributes(track, attributes)
super().writeTrackRecord(track)
artistPath = os.path.join(self.workingDir, track.artistName)
albumPath = os.path.join(artistPath, track.albumTitle)
recordPath = os.path.join(albumPath, track.title) + ".rec"
localFilePath = os.path.join(recordPath, "muzak.yturl")
fileToWrite = open(localFilePath, 'w+')
fileToWrite.write(track.url)
fileToWrite.close()
return track
| <commit_before>import os
from data.Track.UploadHandler import UploadHandler
from src.data.Track.Tracks import YoutubeTrack
class YoutubeUploadHandler(UploadHandler):
def __init__(self, workingDir):
super().__init__(workingDir)
self.attributes.update({
"URL": ["string", "required", "url"]
})
def trackFromUploadedAttributes(self, attributes):
track = YoutubeTrack(
attributes["Artist"],
attributes["Album"],
attributes["Title"]
)
del attributes["Artist"]
del attributes["Album"]
del attributes["Title"]
super().autoImportAttributes(track, attributes)
super().writeTrackRecord(track)
artistPath = os.path.join(self.workingDir, track.artistName)
albumPath = os.path.join(artistPath, track.albumTitle)
recordPath = os.path.join(albumPath, track.title) + ".rec"
localFilePath = os.path.join(recordPath, "muzak.yturl")
fileToWrite = open(localFilePath, 'w+')
fileToWrite.write(track.url)
fileToWrite.close()
return track
<commit_msg>Fix wrong import from UploadHandler<commit_after> | import os
from src.data.Track import UploadHandler
from src.data.Track.Tracks import YoutubeTrack
class YoutubeUploadHandler(UploadHandler):
def __init__(self, workingDir):
super().__init__(workingDir)
self.attributes.update({
"URL": ["string", "required", "url"]
})
def trackFromUploadedAttributes(self, attributes):
track = YoutubeTrack(
attributes["Artist"],
attributes["Album"],
attributes["Title"]
)
del attributes["Artist"]
del attributes["Album"]
del attributes["Title"]
super().autoImportAttributes(track, attributes)
super().writeTrackRecord(track)
artistPath = os.path.join(self.workingDir, track.artistName)
albumPath = os.path.join(artistPath, track.albumTitle)
recordPath = os.path.join(albumPath, track.title) + ".rec"
localFilePath = os.path.join(recordPath, "muzak.yturl")
fileToWrite = open(localFilePath, 'w+')
fileToWrite.write(track.url)
fileToWrite.close()
return track
| import os
from data.Track.UploadHandler import UploadHandler
from src.data.Track.Tracks import YoutubeTrack
class YoutubeUploadHandler(UploadHandler):
def __init__(self, workingDir):
super().__init__(workingDir)
self.attributes.update({
"URL": ["string", "required", "url"]
})
def trackFromUploadedAttributes(self, attributes):
track = YoutubeTrack(
attributes["Artist"],
attributes["Album"],
attributes["Title"]
)
del attributes["Artist"]
del attributes["Album"]
del attributes["Title"]
super().autoImportAttributes(track, attributes)
super().writeTrackRecord(track)
artistPath = os.path.join(self.workingDir, track.artistName)
albumPath = os.path.join(artistPath, track.albumTitle)
recordPath = os.path.join(albumPath, track.title) + ".rec"
localFilePath = os.path.join(recordPath, "muzak.yturl")
fileToWrite = open(localFilePath, 'w+')
fileToWrite.write(track.url)
fileToWrite.close()
return track
Fix wrong import from UploadHandlerimport os
from src.data.Track import UploadHandler
from src.data.Track.Tracks import YoutubeTrack
class YoutubeUploadHandler(UploadHandler):
def __init__(self, workingDir):
super().__init__(workingDir)
self.attributes.update({
"URL": ["string", "required", "url"]
})
def trackFromUploadedAttributes(self, attributes):
track = YoutubeTrack(
attributes["Artist"],
attributes["Album"],
attributes["Title"]
)
del attributes["Artist"]
del attributes["Album"]
del attributes["Title"]
super().autoImportAttributes(track, attributes)
super().writeTrackRecord(track)
artistPath = os.path.join(self.workingDir, track.artistName)
albumPath = os.path.join(artistPath, track.albumTitle)
recordPath = os.path.join(albumPath, track.title) + ".rec"
localFilePath = os.path.join(recordPath, "muzak.yturl")
fileToWrite = open(localFilePath, 'w+')
fileToWrite.write(track.url)
fileToWrite.close()
return track
| <commit_before>import os
from data.Track.UploadHandler import UploadHandler
from src.data.Track.Tracks import YoutubeTrack
class YoutubeUploadHandler(UploadHandler):
def __init__(self, workingDir):
super().__init__(workingDir)
self.attributes.update({
"URL": ["string", "required", "url"]
})
def trackFromUploadedAttributes(self, attributes):
track = YoutubeTrack(
attributes["Artist"],
attributes["Album"],
attributes["Title"]
)
del attributes["Artist"]
del attributes["Album"]
del attributes["Title"]
super().autoImportAttributes(track, attributes)
super().writeTrackRecord(track)
artistPath = os.path.join(self.workingDir, track.artistName)
albumPath = os.path.join(artistPath, track.albumTitle)
recordPath = os.path.join(albumPath, track.title) + ".rec"
localFilePath = os.path.join(recordPath, "muzak.yturl")
fileToWrite = open(localFilePath, 'w+')
fileToWrite.write(track.url)
fileToWrite.close()
return track
<commit_msg>Fix wrong import from UploadHandler<commit_after>import os
from src.data.Track import UploadHandler
from src.data.Track.Tracks import YoutubeTrack
class YoutubeUploadHandler(UploadHandler):
def __init__(self, workingDir):
super().__init__(workingDir)
self.attributes.update({
"URL": ["string", "required", "url"]
})
def trackFromUploadedAttributes(self, attributes):
track = YoutubeTrack(
attributes["Artist"],
attributes["Album"],
attributes["Title"]
)
del attributes["Artist"]
del attributes["Album"]
del attributes["Title"]
super().autoImportAttributes(track, attributes)
super().writeTrackRecord(track)
artistPath = os.path.join(self.workingDir, track.artistName)
albumPath = os.path.join(artistPath, track.albumTitle)
recordPath = os.path.join(albumPath, track.title) + ".rec"
localFilePath = os.path.join(recordPath, "muzak.yturl")
fileToWrite = open(localFilePath, 'w+')
fileToWrite.write(track.url)
fileToWrite.close()
return track
|
cf94fb86cab2fc892b762b66b760a80ed268e8b3 | social/accounts/__init__.py | social/accounts/__init__.py | """Import and register all account types."""
from abc import ABC, abstractmethod
class Account(ABC):
@abstractmethod
def __init__(self, *breadcrumbs):
"""
Return an Account object corresponding to the breadcrumbs.
This should only be called if "match" returned truthy about matching the
breadcrumbs. Otherwise, you're just mean.
"""
pass
@staticmethod
@abstractmethod
def match(*breadcrumbs):
"""
Return truthy if the breadcrumbs match the account.
The breadcrumbs are described below, but match functions should be
written to gracefully accept more or less keys in the breadcrumbs.
:param dict breadcrumbs: Dictionary containing at least one of the
following breadcrumbs:
- url: A URL that probably points to their profile.
- email: An email that could be used to find the profile.
- username: A username for the account.
"""
pass
@abstractmethod
def expand(self, info):
"""
Return an iterable of breadcrumb structs!
:param info: A dictionary that should contain information about the
person. It should be updated with any information you come across,
and you may want to use any info in it to help narrow down your
search.
"""
pass
| """Import and register all account types."""
from abc import ABC, abstractmethod
__all__ = ['github']
class Account(ABC):
@abstractmethod
def __init__(self, *breadcrumbs):
"""
Return an Account object corresponding to the breadcrumbs.
This should only be called if "match" returned truthy about matching the
breadcrumbs. Otherwise, you're just mean.
"""
pass
@staticmethod
@abstractmethod
def match(*breadcrumbs):
"""
Return truthy if the breadcrumbs match the account.
The breadcrumbs are described below, but match functions should be
written to gracefully accept more or less keys in the breadcrumbs.
:param dict breadcrumbs: Dictionary containing at least one of the
following breadcrumbs:
- url: A URL that probably points to their profile.
- email: An email that could be used to find the profile.
- username: A username for the account.
"""
pass
@abstractmethod
def expand(self, info):
"""
Return an iterable of breadcrumb structs!
:param info: A dictionary that should contain information about the
person. It should be updated with any information you come across,
and you may want to use any info in it to help narrow down your
search.
"""
pass
| Add github to the accounts package. | Add github to the accounts package.
| Python | bsd-3-clause | brenns10/social,brenns10/social | """Import and register all account types."""
from abc import ABC, abstractmethod
class Account(ABC):
@abstractmethod
def __init__(self, *breadcrumbs):
"""
Return an Account object corresponding to the breadcrumbs.
This should only be called if "match" returned truthy about matching the
breadcrumbs. Otherwise, you're just mean.
"""
pass
@staticmethod
@abstractmethod
def match(*breadcrumbs):
"""
Return truthy if the breadcrumbs match the account.
The breadcrumbs are described below, but match functions should be
written to gracefully accept more or less keys in the breadcrumbs.
:param dict breadcrumbs: Dictionary containing at least one of the
following breadcrumbs:
- url: A URL that probably points to their profile.
- email: An email that could be used to find the profile.
- username: A username for the account.
"""
pass
@abstractmethod
def expand(self, info):
"""
Return an iterable of breadcrumb structs!
:param info: A dictionary that should contain information about the
person. It should be updated with any information you come across,
and you may want to use any info in it to help narrow down your
search.
"""
pass
Add github to the accounts package. | """Import and register all account types."""
from abc import ABC, abstractmethod
__all__ = ['github']
class Account(ABC):
@abstractmethod
def __init__(self, *breadcrumbs):
"""
Return an Account object corresponding to the breadcrumbs.
This should only be called if "match" returned truthy about matching the
breadcrumbs. Otherwise, you're just mean.
"""
pass
@staticmethod
@abstractmethod
def match(*breadcrumbs):
"""
Return truthy if the breadcrumbs match the account.
The breadcrumbs are described below, but match functions should be
written to gracefully accept more or less keys in the breadcrumbs.
:param dict breadcrumbs: Dictionary containing at least one of the
following breadcrumbs:
- url: A URL that probably points to their profile.
- email: An email that could be used to find the profile.
- username: A username for the account.
"""
pass
@abstractmethod
def expand(self, info):
"""
Return an iterable of breadcrumb structs!
:param info: A dictionary that should contain information about the
person. It should be updated with any information you come across,
and you may want to use any info in it to help narrow down your
search.
"""
pass
| <commit_before>"""Import and register all account types."""
from abc import ABC, abstractmethod
class Account(ABC):
@abstractmethod
def __init__(self, *breadcrumbs):
"""
Return an Account object corresponding to the breadcrumbs.
This should only be called if "match" returned truthy about matching the
breadcrumbs. Otherwise, you're just mean.
"""
pass
@staticmethod
@abstractmethod
def match(*breadcrumbs):
"""
Return truthy if the breadcrumbs match the account.
The breadcrumbs are described below, but match functions should be
written to gracefully accept more or less keys in the breadcrumbs.
:param dict breadcrumbs: Dictionary containing at least one of the
following breadcrumbs:
- url: A URL that probably points to their profile.
- email: An email that could be used to find the profile.
- username: A username for the account.
"""
pass
@abstractmethod
def expand(self, info):
"""
Return an iterable of breadcrumb structs!
:param info: A dictionary that should contain information about the
person. It should be updated with any information you come across,
and you may want to use any info in it to help narrow down your
search.
"""
pass
<commit_msg>Add github to the accounts package.<commit_after> | """Import and register all account types."""
from abc import ABC, abstractmethod
__all__ = ['github']
class Account(ABC):
@abstractmethod
def __init__(self, *breadcrumbs):
"""
Return an Account object corresponding to the breadcrumbs.
This should only be called if "match" returned truthy about matching the
breadcrumbs. Otherwise, you're just mean.
"""
pass
@staticmethod
@abstractmethod
def match(*breadcrumbs):
"""
Return truthy if the breadcrumbs match the account.
The breadcrumbs are described below, but match functions should be
written to gracefully accept more or less keys in the breadcrumbs.
:param dict breadcrumbs: Dictionary containing at least one of the
following breadcrumbs:
- url: A URL that probably points to their profile.
- email: An email that could be used to find the profile.
- username: A username for the account.
"""
pass
@abstractmethod
def expand(self, info):
"""
Return an iterable of breadcrumb structs!
:param info: A dictionary that should contain information about the
person. It should be updated with any information you come across,
and you may want to use any info in it to help narrow down your
search.
"""
pass
| """Import and register all account types."""
from abc import ABC, abstractmethod
class Account(ABC):
@abstractmethod
def __init__(self, *breadcrumbs):
"""
Return an Account object corresponding to the breadcrumbs.
This should only be called if "match" returned truthy about matching the
breadcrumbs. Otherwise, you're just mean.
"""
pass
@staticmethod
@abstractmethod
def match(*breadcrumbs):
"""
Return truthy if the breadcrumbs match the account.
The breadcrumbs are described below, but match functions should be
written to gracefully accept more or less keys in the breadcrumbs.
:param dict breadcrumbs: Dictionary containing at least one of the
following breadcrumbs:
- url: A URL that probably points to their profile.
- email: An email that could be used to find the profile.
- username: A username for the account.
"""
pass
@abstractmethod
def expand(self, info):
"""
Return an iterable of breadcrumb structs!
:param info: A dictionary that should contain information about the
person. It should be updated with any information you come across,
and you may want to use any info in it to help narrow down your
search.
"""
pass
Add github to the accounts package."""Import and register all account types."""
from abc import ABC, abstractmethod
__all__ = ['github']
class Account(ABC):
@abstractmethod
def __init__(self, *breadcrumbs):
"""
Return an Account object corresponding to the breadcrumbs.
This should only be called if "match" returned truthy about matching the
breadcrumbs. Otherwise, you're just mean.
"""
pass
@staticmethod
@abstractmethod
def match(*breadcrumbs):
"""
Return truthy if the breadcrumbs match the account.
The breadcrumbs are described below, but match functions should be
written to gracefully accept more or less keys in the breadcrumbs.
:param dict breadcrumbs: Dictionary containing at least one of the
following breadcrumbs:
- url: A URL that probably points to their profile.
- email: An email that could be used to find the profile.
- username: A username for the account.
"""
pass
@abstractmethod
def expand(self, info):
"""
Return an iterable of breadcrumb structs!
:param info: A dictionary that should contain information about the
person. It should be updated with any information you come across,
and you may want to use any info in it to help narrow down your
search.
"""
pass
| <commit_before>"""Import and register all account types."""
from abc import ABC, abstractmethod
class Account(ABC):
@abstractmethod
def __init__(self, *breadcrumbs):
"""
Return an Account object corresponding to the breadcrumbs.
This should only be called if "match" returned truthy about matching the
breadcrumbs. Otherwise, you're just mean.
"""
pass
@staticmethod
@abstractmethod
def match(*breadcrumbs):
"""
Return truthy if the breadcrumbs match the account.
The breadcrumbs are described below, but match functions should be
written to gracefully accept more or less keys in the breadcrumbs.
:param dict breadcrumbs: Dictionary containing at least one of the
following breadcrumbs:
- url: A URL that probably points to their profile.
- email: An email that could be used to find the profile.
- username: A username for the account.
"""
pass
@abstractmethod
def expand(self, info):
"""
Return an iterable of breadcrumb structs!
:param info: A dictionary that should contain information about the
person. It should be updated with any information you come across,
and you may want to use any info in it to help narrow down your
search.
"""
pass
<commit_msg>Add github to the accounts package.<commit_after>"""Import and register all account types."""
from abc import ABC, abstractmethod
__all__ = ['github']
class Account(ABC):
@abstractmethod
def __init__(self, *breadcrumbs):
"""
Return an Account object corresponding to the breadcrumbs.
This should only be called if "match" returned truthy about matching the
breadcrumbs. Otherwise, you're just mean.
"""
pass
@staticmethod
@abstractmethod
def match(*breadcrumbs):
"""
Return truthy if the breadcrumbs match the account.
The breadcrumbs are described below, but match functions should be
written to gracefully accept more or less keys in the breadcrumbs.
:param dict breadcrumbs: Dictionary containing at least one of the
following breadcrumbs:
- url: A URL that probably points to their profile.
- email: An email that could be used to find the profile.
- username: A username for the account.
"""
pass
@abstractmethod
def expand(self, info):
"""
Return an iterable of breadcrumb structs!
:param info: A dictionary that should contain information about the
person. It should be updated with any information you come across,
and you may want to use any info in it to help narrow down your
search.
"""
pass
|
504bd5d8bb7ec63747318d16f90d24930e640fc6 | ipython_notebook_config.py | ipython_notebook_config.py | # See http://ipython.org/ipython-doc/1/interactive/public_server.html for more information.
# Configuration file for ipython-notebook.
c = get_config()
c.NotebookApp.ip = '0.0.0.0'
c.NotebookApp.port = 6789
c.NotebookApp.open_browser = False
c.NotebookApp.profile = u'default'
import yaml
with open('/import/conf.yaml','r') as handle:
conf = yaml.load(handle)
c.NotebookApp.base_url = '/ipython/%d/' % conf['docker_port']
c.NotebookApp.webapp_settings = {'static_url_prefix':'/ipython/%d/static/' % conf['docker_port']}
| # See http://ipython.org/ipython-doc/1/interactive/public_server.html for more information.
# Configuration file for ipython-notebook.
c = get_config()
c.NotebookApp.ip = '0.0.0.0'
c.NotebookApp.port = 6789
c.NotebookApp.open_browser = False
c.NotebookApp.profile = u'default'
import os
import yaml
config_file_path = '/import/conf.yaml'
# In case this Notebook was launched from Galaxy a config file exists in /import/
# For standalone usage we fall back to a port-less URL
if os.path.exists( config_file_path ):
with open( config_file_path ,'r') as handle:
conf = yaml.load(handle)
c.NotebookApp.base_url = '/ipython/%d/' % conf['docker_port']
c.NotebookApp.webapp_settings = {'static_url_prefix':'/ipython/%d/static/' % conf['docker_port']}
else:
c.NotebookApp.base_url = '/ipython/'
c.NotebookApp.webapp_settings = {'static_url_prefix':'/ipython/static/'}
| Implement fallback mode to make the image unsable without Galaxy | Implement fallback mode to make the image unsable without Galaxy
| Python | mit | bgruening/docker-jupyter-notebook,bgruening/docker-jupyter-notebook,bgruening/docker-ipython-notebook,bgruening/docker-ipython-notebook,bgruening/docker-jupyter-notebook,bgruening/docker-ipython-notebook | # See http://ipython.org/ipython-doc/1/interactive/public_server.html for more information.
# Configuration file for ipython-notebook.
c = get_config()
c.NotebookApp.ip = '0.0.0.0'
c.NotebookApp.port = 6789
c.NotebookApp.open_browser = False
c.NotebookApp.profile = u'default'
import yaml
with open('/import/conf.yaml','r') as handle:
conf = yaml.load(handle)
c.NotebookApp.base_url = '/ipython/%d/' % conf['docker_port']
c.NotebookApp.webapp_settings = {'static_url_prefix':'/ipython/%d/static/' % conf['docker_port']}
Implement fallback mode to make the image unsable without Galaxy | # See http://ipython.org/ipython-doc/1/interactive/public_server.html for more information.
# Configuration file for ipython-notebook.
c = get_config()
c.NotebookApp.ip = '0.0.0.0'
c.NotebookApp.port = 6789
c.NotebookApp.open_browser = False
c.NotebookApp.profile = u'default'
import os
import yaml
config_file_path = '/import/conf.yaml'
# In case this Notebook was launched from Galaxy a config file exists in /import/
# For standalone usage we fall back to a port-less URL
if os.path.exists( config_file_path ):
with open( config_file_path ,'r') as handle:
conf = yaml.load(handle)
c.NotebookApp.base_url = '/ipython/%d/' % conf['docker_port']
c.NotebookApp.webapp_settings = {'static_url_prefix':'/ipython/%d/static/' % conf['docker_port']}
else:
c.NotebookApp.base_url = '/ipython/'
c.NotebookApp.webapp_settings = {'static_url_prefix':'/ipython/static/'}
| <commit_before># See http://ipython.org/ipython-doc/1/interactive/public_server.html for more information.
# Configuration file for ipython-notebook.
c = get_config()
c.NotebookApp.ip = '0.0.0.0'
c.NotebookApp.port = 6789
c.NotebookApp.open_browser = False
c.NotebookApp.profile = u'default'
import yaml
with open('/import/conf.yaml','r') as handle:
conf = yaml.load(handle)
c.NotebookApp.base_url = '/ipython/%d/' % conf['docker_port']
c.NotebookApp.webapp_settings = {'static_url_prefix':'/ipython/%d/static/' % conf['docker_port']}
<commit_msg>Implement fallback mode to make the image unsable without Galaxy<commit_after> | # See http://ipython.org/ipython-doc/1/interactive/public_server.html for more information.
# Configuration file for ipython-notebook.
c = get_config()
c.NotebookApp.ip = '0.0.0.0'
c.NotebookApp.port = 6789
c.NotebookApp.open_browser = False
c.NotebookApp.profile = u'default'
import os
import yaml
config_file_path = '/import/conf.yaml'
# In case this Notebook was launched from Galaxy a config file exists in /import/
# For standalone usage we fall back to a port-less URL
if os.path.exists( config_file_path ):
with open( config_file_path ,'r') as handle:
conf = yaml.load(handle)
c.NotebookApp.base_url = '/ipython/%d/' % conf['docker_port']
c.NotebookApp.webapp_settings = {'static_url_prefix':'/ipython/%d/static/' % conf['docker_port']}
else:
c.NotebookApp.base_url = '/ipython/'
c.NotebookApp.webapp_settings = {'static_url_prefix':'/ipython/static/'}
| # See http://ipython.org/ipython-doc/1/interactive/public_server.html for more information.
# Configuration file for ipython-notebook.
c = get_config()
c.NotebookApp.ip = '0.0.0.0'
c.NotebookApp.port = 6789
c.NotebookApp.open_browser = False
c.NotebookApp.profile = u'default'
import yaml
with open('/import/conf.yaml','r') as handle:
conf = yaml.load(handle)
c.NotebookApp.base_url = '/ipython/%d/' % conf['docker_port']
c.NotebookApp.webapp_settings = {'static_url_prefix':'/ipython/%d/static/' % conf['docker_port']}
Implement fallback mode to make the image unsable without Galaxy# See http://ipython.org/ipython-doc/1/interactive/public_server.html for more information.
# Configuration file for ipython-notebook.
c = get_config()
c.NotebookApp.ip = '0.0.0.0'
c.NotebookApp.port = 6789
c.NotebookApp.open_browser = False
c.NotebookApp.profile = u'default'
import os
import yaml
config_file_path = '/import/conf.yaml'
# In case this Notebook was launched from Galaxy a config file exists in /import/
# For standalone usage we fall back to a port-less URL
if os.path.exists( config_file_path ):
with open( config_file_path ,'r') as handle:
conf = yaml.load(handle)
c.NotebookApp.base_url = '/ipython/%d/' % conf['docker_port']
c.NotebookApp.webapp_settings = {'static_url_prefix':'/ipython/%d/static/' % conf['docker_port']}
else:
c.NotebookApp.base_url = '/ipython/'
c.NotebookApp.webapp_settings = {'static_url_prefix':'/ipython/static/'}
| <commit_before># See http://ipython.org/ipython-doc/1/interactive/public_server.html for more information.
# Configuration file for ipython-notebook.
c = get_config()
c.NotebookApp.ip = '0.0.0.0'
c.NotebookApp.port = 6789
c.NotebookApp.open_browser = False
c.NotebookApp.profile = u'default'
import yaml
with open('/import/conf.yaml','r') as handle:
conf = yaml.load(handle)
c.NotebookApp.base_url = '/ipython/%d/' % conf['docker_port']
c.NotebookApp.webapp_settings = {'static_url_prefix':'/ipython/%d/static/' % conf['docker_port']}
<commit_msg>Implement fallback mode to make the image unsable without Galaxy<commit_after># See http://ipython.org/ipython-doc/1/interactive/public_server.html for more information.
# Configuration file for ipython-notebook.
c = get_config()
c.NotebookApp.ip = '0.0.0.0'
c.NotebookApp.port = 6789
c.NotebookApp.open_browser = False
c.NotebookApp.profile = u'default'
import os
import yaml
config_file_path = '/import/conf.yaml'
# In case this Notebook was launched from Galaxy a config file exists in /import/
# For standalone usage we fall back to a port-less URL
if os.path.exists( config_file_path ):
with open( config_file_path ,'r') as handle:
conf = yaml.load(handle)
c.NotebookApp.base_url = '/ipython/%d/' % conf['docker_port']
c.NotebookApp.webapp_settings = {'static_url_prefix':'/ipython/%d/static/' % conf['docker_port']}
else:
c.NotebookApp.base_url = '/ipython/'
c.NotebookApp.webapp_settings = {'static_url_prefix':'/ipython/static/'}
|
2e8d7952f4508e1cbf8d5d9b321a15bcd3bcf2ed | pylearn2/packaged_dependencies/theano_linear/util.py | pylearn2/packaged_dependencies/theano_linear/util.py |
_ndarray_status_fmt='%(msg)s shape=%(shape)s min=%(min)f max=%(max)f'
def ndarray_status(x, fmt=_ndarray_status_fmt, msg="", **kwargs):
kwargs.update(dict(
msg=msg,
min=x.min(),
max=x.max(),
mean=x.mean(),
var = x.var(),
shape=x.shape))
return fmt%kwargs
# XXX: copy-paste out of pylearn
try:
from pylearn.io.image_tiling import tile_slices_to_image
except ImportError:
def tile_slices_to_image(*args, **kwargs):
raise NotImplementedError()
|
from imaging import tile_slices_to_image
_ndarray_status_fmt='%(msg)s shape=%(shape)s min=%(min)f max=%(max)f'
def ndarray_status(x, fmt=_ndarray_status_fmt, msg="", **kwargs):
kwargs.update(dict(
msg=msg,
min=x.min(),
max=x.max(),
mean=x.mean(),
var = x.var(),
shape=x.shape))
return fmt%kwargs
| Remove pylearn1 dependency from packaged_dependencies/theano_linear | Remove pylearn1 dependency from packaged_dependencies/theano_linear
| Python | bsd-3-clause | caidongyun/pylearn2,hantek/pylearn2,msingh172/pylearn2,junbochen/pylearn2,sandeepkbhat/pylearn2,pkainz/pylearn2,lunyang/pylearn2,skearnes/pylearn2,cosmoharrigan/pylearn2,lunyang/pylearn2,aalmah/pylearn2,goodfeli/pylearn2,woozzu/pylearn2,mkraemer67/pylearn2,matrogers/pylearn2,goodfeli/pylearn2,skearnes/pylearn2,theoryno3/pylearn2,mclaughlin6464/pylearn2,aalmah/pylearn2,shiquanwang/pylearn2,hyqneuron/pylearn2-maxsom,fulmicoton/pylearn2,matrogers/pylearn2,pombredanne/pylearn2,hyqneuron/pylearn2-maxsom,kastnerkyle/pylearn2,msingh172/pylearn2,alexjc/pylearn2,fulmicoton/pylearn2,woozzu/pylearn2,jeremyfix/pylearn2,bartvm/pylearn2,hantek/pylearn2,lisa-lab/pylearn2,daemonmaker/pylearn2,cosmoharrigan/pylearn2,woozzu/pylearn2,kose-y/pylearn2,JesseLivezey/pylearn2,hyqneuron/pylearn2-maxsom,matrogers/pylearn2,fyffyt/pylearn2,mclaughlin6464/pylearn2,sandeepkbhat/pylearn2,CIFASIS/pylearn2,fulmicoton/pylearn2,KennethPierce/pylearnk,JesseLivezey/plankton,shiquanwang/pylearn2,lunyang/pylearn2,w1kke/pylearn2,shiquanwang/pylearn2,fishcorn/pylearn2,pkainz/pylearn2,jamessergeant/pylearn2,Refefer/pylearn2,daemonmaker/pylearn2,msingh172/pylearn2,hantek/pylearn2,junbochen/pylearn2,abergeron/pylearn2,lancezlin/pylearn2,jeremyfix/pylearn2,pombredanne/pylearn2,bartvm/pylearn2,sandeepkbhat/pylearn2,TNick/pylearn2,KennethPierce/pylearnk,TNick/pylearn2,Refefer/pylearn2,se4u/pylearn2,aalmah/pylearn2,JesseLivezey/plankton,bartvm/pylearn2,alexjc/pylearn2,JesseLivezey/pylearn2,w1kke/pylearn2,junbochen/pylearn2,fyffyt/pylearn2,woozzu/pylearn2,kastnerkyle/pylearn2,kastnerkyle/pylearn2,msingh172/pylearn2,goodfeli/pylearn2,theoryno3/pylearn2,se4u/pylearn2,chrish42/pylearn,ashhher3/pylearn2,pombredanne/pylearn2,abergeron/pylearn2,mclaughlin6464/pylearn2,Refefer/pylearn2,alexjc/pylearn2,KennethPierce/pylearnk,mkraemer67/pylearn2,fulmicoton/pylearn2,pkainz/pylearn2,lunyang/pylearn2,mkraemer67/pylearn2,nouiz/pylearn2,chrish42/pylearn,alexjc/pylearn2,kose-y/pylearn2,ddboline/pylearn2,lisa-lab/pylearn2,CIFASIS/pylearn2,mclaughlin6464/pylearn2,chrish42/pylearn,fishcorn/pylearn2,lamblin/pylearn2,junbochen/pylearn2,lamblin/pylearn2,shiquanwang/pylearn2,lancezlin/pylearn2,KennethPierce/pylearnk,se4u/pylearn2,ddboline/pylearn2,se4u/pylearn2,fyffyt/pylearn2,theoryno3/pylearn2,daemonmaker/pylearn2,chrish42/pylearn,jamessergeant/pylearn2,ddboline/pylearn2,caidongyun/pylearn2,nouiz/pylearn2,pombredanne/pylearn2,skearnes/pylearn2,JesseLivezey/plankton,lancezlin/pylearn2,goodfeli/pylearn2,ashhher3/pylearn2,fyffyt/pylearn2,CIFASIS/pylearn2,theoryno3/pylearn2,bartvm/pylearn2,CIFASIS/pylearn2,caidongyun/pylearn2,ashhher3/pylearn2,kose-y/pylearn2,ddboline/pylearn2,ashhher3/pylearn2,TNick/pylearn2,matrogers/pylearn2,pkainz/pylearn2,abergeron/pylearn2,lisa-lab/pylearn2,fishcorn/pylearn2,lisa-lab/pylearn2,abergeron/pylearn2,jamessergeant/pylearn2,jeremyfix/pylearn2,JesseLivezey/pylearn2,cosmoharrigan/pylearn2,fishcorn/pylearn2,TNick/pylearn2,w1kke/pylearn2,aalmah/pylearn2,mkraemer67/pylearn2,hantek/pylearn2,jeremyfix/pylearn2,JesseLivezey/pylearn2,hyqneuron/pylearn2-maxsom,kose-y/pylearn2,lamblin/pylearn2,kastnerkyle/pylearn2,lancezlin/pylearn2,nouiz/pylearn2,w1kke/pylearn2,JesseLivezey/plankton,sandeepkbhat/pylearn2,caidongyun/pylearn2,Refefer/pylearn2,skearnes/pylearn2,daemonmaker/pylearn2,jamessergeant/pylearn2,nouiz/pylearn2,lamblin/pylearn2,cosmoharrigan/pylearn2 |
_ndarray_status_fmt='%(msg)s shape=%(shape)s min=%(min)f max=%(max)f'
def ndarray_status(x, fmt=_ndarray_status_fmt, msg="", **kwargs):
kwargs.update(dict(
msg=msg,
min=x.min(),
max=x.max(),
mean=x.mean(),
var = x.var(),
shape=x.shape))
return fmt%kwargs
# XXX: copy-paste out of pylearn
try:
from pylearn.io.image_tiling import tile_slices_to_image
except ImportError:
def tile_slices_to_image(*args, **kwargs):
raise NotImplementedError()
Remove pylearn1 dependency from packaged_dependencies/theano_linear |
from imaging import tile_slices_to_image
_ndarray_status_fmt='%(msg)s shape=%(shape)s min=%(min)f max=%(max)f'
def ndarray_status(x, fmt=_ndarray_status_fmt, msg="", **kwargs):
kwargs.update(dict(
msg=msg,
min=x.min(),
max=x.max(),
mean=x.mean(),
var = x.var(),
shape=x.shape))
return fmt%kwargs
| <commit_before>
_ndarray_status_fmt='%(msg)s shape=%(shape)s min=%(min)f max=%(max)f'
def ndarray_status(x, fmt=_ndarray_status_fmt, msg="", **kwargs):
kwargs.update(dict(
msg=msg,
min=x.min(),
max=x.max(),
mean=x.mean(),
var = x.var(),
shape=x.shape))
return fmt%kwargs
# XXX: copy-paste out of pylearn
try:
from pylearn.io.image_tiling import tile_slices_to_image
except ImportError:
def tile_slices_to_image(*args, **kwargs):
raise NotImplementedError()
<commit_msg>Remove pylearn1 dependency from packaged_dependencies/theano_linear<commit_after> |
from imaging import tile_slices_to_image
_ndarray_status_fmt='%(msg)s shape=%(shape)s min=%(min)f max=%(max)f'
def ndarray_status(x, fmt=_ndarray_status_fmt, msg="", **kwargs):
kwargs.update(dict(
msg=msg,
min=x.min(),
max=x.max(),
mean=x.mean(),
var = x.var(),
shape=x.shape))
return fmt%kwargs
|
_ndarray_status_fmt='%(msg)s shape=%(shape)s min=%(min)f max=%(max)f'
def ndarray_status(x, fmt=_ndarray_status_fmt, msg="", **kwargs):
kwargs.update(dict(
msg=msg,
min=x.min(),
max=x.max(),
mean=x.mean(),
var = x.var(),
shape=x.shape))
return fmt%kwargs
# XXX: copy-paste out of pylearn
try:
from pylearn.io.image_tiling import tile_slices_to_image
except ImportError:
def tile_slices_to_image(*args, **kwargs):
raise NotImplementedError()
Remove pylearn1 dependency from packaged_dependencies/theano_linear
from imaging import tile_slices_to_image
_ndarray_status_fmt='%(msg)s shape=%(shape)s min=%(min)f max=%(max)f'
def ndarray_status(x, fmt=_ndarray_status_fmt, msg="", **kwargs):
kwargs.update(dict(
msg=msg,
min=x.min(),
max=x.max(),
mean=x.mean(),
var = x.var(),
shape=x.shape))
return fmt%kwargs
| <commit_before>
_ndarray_status_fmt='%(msg)s shape=%(shape)s min=%(min)f max=%(max)f'
def ndarray_status(x, fmt=_ndarray_status_fmt, msg="", **kwargs):
kwargs.update(dict(
msg=msg,
min=x.min(),
max=x.max(),
mean=x.mean(),
var = x.var(),
shape=x.shape))
return fmt%kwargs
# XXX: copy-paste out of pylearn
try:
from pylearn.io.image_tiling import tile_slices_to_image
except ImportError:
def tile_slices_to_image(*args, **kwargs):
raise NotImplementedError()
<commit_msg>Remove pylearn1 dependency from packaged_dependencies/theano_linear<commit_after>
from imaging import tile_slices_to_image
_ndarray_status_fmt='%(msg)s shape=%(shape)s min=%(min)f max=%(max)f'
def ndarray_status(x, fmt=_ndarray_status_fmt, msg="", **kwargs):
kwargs.update(dict(
msg=msg,
min=x.min(),
max=x.max(),
mean=x.mean(),
var = x.var(),
shape=x.shape))
return fmt%kwargs
|
b52037176cd1b8a4d99ff195d72680928ba3790f | cms/djangoapps/export_course_metadata/management/commands/export_course_metadata_for_all_courses.py | cms/djangoapps/export_course_metadata/management/commands/export_course_metadata_for_all_courses.py | """
Export course metadata for all courses
"""
from django.core.management.base import BaseCommand
from xmodule.modulestore.django import modulestore
from cms.djangoapps.export_course_metadata.signals import export_course_metadata
from cms.djangoapps.export_course_metadata.tasks import export_course_metadata_task
class Command(BaseCommand):
"""
Export course metadata for all courses
"""
help = 'Export course metadata for all courses'
def handle(self, *args, **options):
"""
Execute the command
"""
export_course_metadata_for_all_courses()
def export_course_metadata_for_all_courses():
"""
Export course metadata for all courses
"""
module_store = modulestore()
courses = module_store.get_courses()
for course in courses:
export_course_metadata_task.delay(str(course.id))
| """
Export course metadata for all courses
"""
from django.core.management.base import BaseCommand
from xmodule.modulestore.django import modulestore
from cms.djangoapps.export_course_metadata.signals import export_course_metadata
from cms.djangoapps.export_course_metadata.tasks import export_course_metadata_task
class Command(BaseCommand):
"""
Export course metadata for all courses
"""
help = 'Export course metadata for all courses'
def handle(self, *args, **options):
"""
Execute the command
"""
export_course_metadata_for_all_courses()
def export_course_metadata_for_all_courses():
"""
Export course metadata for all courses
"""
courses = modulestore().get_course_summaries()
for course in courses:
export_course_metadata_task.delay(str(course.id))
| Change how we get course ids to avoid memory issues | Change how we get course ids to avoid memory issues
| Python | agpl-3.0 | angelapper/edx-platform,arbrandes/edx-platform,angelapper/edx-platform,eduNEXT/edx-platform,edx/edx-platform,EDUlib/edx-platform,arbrandes/edx-platform,eduNEXT/edunext-platform,eduNEXT/edunext-platform,eduNEXT/edx-platform,EDUlib/edx-platform,arbrandes/edx-platform,eduNEXT/edunext-platform,angelapper/edx-platform,angelapper/edx-platform,edx/edx-platform,EDUlib/edx-platform,eduNEXT/edx-platform,edx/edx-platform,eduNEXT/edx-platform,arbrandes/edx-platform,edx/edx-platform,EDUlib/edx-platform,eduNEXT/edunext-platform | """
Export course metadata for all courses
"""
from django.core.management.base import BaseCommand
from xmodule.modulestore.django import modulestore
from cms.djangoapps.export_course_metadata.signals import export_course_metadata
from cms.djangoapps.export_course_metadata.tasks import export_course_metadata_task
class Command(BaseCommand):
"""
Export course metadata for all courses
"""
help = 'Export course metadata for all courses'
def handle(self, *args, **options):
"""
Execute the command
"""
export_course_metadata_for_all_courses()
def export_course_metadata_for_all_courses():
"""
Export course metadata for all courses
"""
module_store = modulestore()
courses = module_store.get_courses()
for course in courses:
export_course_metadata_task.delay(str(course.id))
Change how we get course ids to avoid memory issues | """
Export course metadata for all courses
"""
from django.core.management.base import BaseCommand
from xmodule.modulestore.django import modulestore
from cms.djangoapps.export_course_metadata.signals import export_course_metadata
from cms.djangoapps.export_course_metadata.tasks import export_course_metadata_task
class Command(BaseCommand):
"""
Export course metadata for all courses
"""
help = 'Export course metadata for all courses'
def handle(self, *args, **options):
"""
Execute the command
"""
export_course_metadata_for_all_courses()
def export_course_metadata_for_all_courses():
"""
Export course metadata for all courses
"""
courses = modulestore().get_course_summaries()
for course in courses:
export_course_metadata_task.delay(str(course.id))
| <commit_before>"""
Export course metadata for all courses
"""
from django.core.management.base import BaseCommand
from xmodule.modulestore.django import modulestore
from cms.djangoapps.export_course_metadata.signals import export_course_metadata
from cms.djangoapps.export_course_metadata.tasks import export_course_metadata_task
class Command(BaseCommand):
"""
Export course metadata for all courses
"""
help = 'Export course metadata for all courses'
def handle(self, *args, **options):
"""
Execute the command
"""
export_course_metadata_for_all_courses()
def export_course_metadata_for_all_courses():
"""
Export course metadata for all courses
"""
module_store = modulestore()
courses = module_store.get_courses()
for course in courses:
export_course_metadata_task.delay(str(course.id))
<commit_msg>Change how we get course ids to avoid memory issues<commit_after> | """
Export course metadata for all courses
"""
from django.core.management.base import BaseCommand
from xmodule.modulestore.django import modulestore
from cms.djangoapps.export_course_metadata.signals import export_course_metadata
from cms.djangoapps.export_course_metadata.tasks import export_course_metadata_task
class Command(BaseCommand):
"""
Export course metadata for all courses
"""
help = 'Export course metadata for all courses'
def handle(self, *args, **options):
"""
Execute the command
"""
export_course_metadata_for_all_courses()
def export_course_metadata_for_all_courses():
"""
Export course metadata for all courses
"""
courses = modulestore().get_course_summaries()
for course in courses:
export_course_metadata_task.delay(str(course.id))
| """
Export course metadata for all courses
"""
from django.core.management.base import BaseCommand
from xmodule.modulestore.django import modulestore
from cms.djangoapps.export_course_metadata.signals import export_course_metadata
from cms.djangoapps.export_course_metadata.tasks import export_course_metadata_task
class Command(BaseCommand):
"""
Export course metadata for all courses
"""
help = 'Export course metadata for all courses'
def handle(self, *args, **options):
"""
Execute the command
"""
export_course_metadata_for_all_courses()
def export_course_metadata_for_all_courses():
"""
Export course metadata for all courses
"""
module_store = modulestore()
courses = module_store.get_courses()
for course in courses:
export_course_metadata_task.delay(str(course.id))
Change how we get course ids to avoid memory issues"""
Export course metadata for all courses
"""
from django.core.management.base import BaseCommand
from xmodule.modulestore.django import modulestore
from cms.djangoapps.export_course_metadata.signals import export_course_metadata
from cms.djangoapps.export_course_metadata.tasks import export_course_metadata_task
class Command(BaseCommand):
"""
Export course metadata for all courses
"""
help = 'Export course metadata for all courses'
def handle(self, *args, **options):
"""
Execute the command
"""
export_course_metadata_for_all_courses()
def export_course_metadata_for_all_courses():
"""
Export course metadata for all courses
"""
courses = modulestore().get_course_summaries()
for course in courses:
export_course_metadata_task.delay(str(course.id))
| <commit_before>"""
Export course metadata for all courses
"""
from django.core.management.base import BaseCommand
from xmodule.modulestore.django import modulestore
from cms.djangoapps.export_course_metadata.signals import export_course_metadata
from cms.djangoapps.export_course_metadata.tasks import export_course_metadata_task
class Command(BaseCommand):
"""
Export course metadata for all courses
"""
help = 'Export course metadata for all courses'
def handle(self, *args, **options):
"""
Execute the command
"""
export_course_metadata_for_all_courses()
def export_course_metadata_for_all_courses():
"""
Export course metadata for all courses
"""
module_store = modulestore()
courses = module_store.get_courses()
for course in courses:
export_course_metadata_task.delay(str(course.id))
<commit_msg>Change how we get course ids to avoid memory issues<commit_after>"""
Export course metadata for all courses
"""
from django.core.management.base import BaseCommand
from xmodule.modulestore.django import modulestore
from cms.djangoapps.export_course_metadata.signals import export_course_metadata
from cms.djangoapps.export_course_metadata.tasks import export_course_metadata_task
class Command(BaseCommand):
"""
Export course metadata for all courses
"""
help = 'Export course metadata for all courses'
def handle(self, *args, **options):
"""
Execute the command
"""
export_course_metadata_for_all_courses()
def export_course_metadata_for_all_courses():
"""
Export course metadata for all courses
"""
courses = modulestore().get_course_summaries()
for course in courses:
export_course_metadata_task.delay(str(course.id))
|
cf19c9ad305328f9ab21490e8bdaec4da72adec8 | webmanager/management/commands/keys_default/admin_pass.py | webmanager/management/commands/keys_default/admin_pass.py | default_admin_user = "richard"
default_admin_password = "richard555"
| default_admin_user = "richard"
default_admin_password = "richard666"
| Change default super user password. | Change default super user password.
| Python | bsd-3-clause | weijia/webmanager,weijia/webmanager,weijia/webmanager | default_admin_user = "richard"
default_admin_password = "richard555"
Change default super user password. | default_admin_user = "richard"
default_admin_password = "richard666"
| <commit_before>default_admin_user = "richard"
default_admin_password = "richard555"
<commit_msg>Change default super user password.<commit_after> | default_admin_user = "richard"
default_admin_password = "richard666"
| default_admin_user = "richard"
default_admin_password = "richard555"
Change default super user password.default_admin_user = "richard"
default_admin_password = "richard666"
| <commit_before>default_admin_user = "richard"
default_admin_password = "richard555"
<commit_msg>Change default super user password.<commit_after>default_admin_user = "richard"
default_admin_password = "richard666"
|
a8eff550934730b1b9289796366cc4fe23c669db | stanford/bin/send-email.py | stanford/bin/send-email.py | #!/usr/bin/env python
from email.mime.text import MIMEText
import smtplib
import sys
def send(recipient, sender, subject, body):
message = MIMEText(body, _charset='UTF-8')
message['Subject'] = subject
message['From'] = sender
message['To'] = recipient
smtp = smtplib.SMTP('localhost')
result = smtp.sendmail(sender, recipient, message.as_string())
return result
if __name__ == '__main__':
recipient = sys.argv[1]
sender = sys.argv[2]
subject = sys.argv[3]
path_file = sys.argv[4]
with open(path_file) as file_input:
body = file_input.read()
result = send(recipient, subject, body)
| #!/usr/bin/env python
from email.mime.text import MIMEText
from subprocess import call
import sys
def send(recipient, sender, sender_name, subject, body):
with open('configuration/stanford/bin/email_params.txt', 'rt') as fin:
with open('email.txt', 'wt') as fout:
for line in fin:
line = line.replace('{RECIPIENT}', recipient).replace('{SENDER}', sender).replace('{SENDER_NAME}', sender_name).replace('{SUBJECT}', subject).replace('{BODY}', body)
fout.write(line)
fout = open('email.txt')
cmd = ['openssl', 's_client', '-crlf', '-quiet', '-connect', 'email-smtp.us-east-1.amazonaws.com:465']
call(cmd, stdin=fout)
fout.close()
call(['rm', 'email.txt'])
if __name__ == '__main__':
recipient = sys.argv[1]
sender = sys.argv[2]
sender_name = sys.argv[3]
subject = sys.argv[4]
path_file = sys.argv[5]
with open(path_file) as file_input:
body = file_input.read()
result = send(recipient, sender, sender_name, subject, body)
| Use AWS SMTP server in cut release script. | Use AWS SMTP server in cut release script.
| Python | agpl-3.0 | Stanford-Online/configuration,Stanford-Online/configuration,Stanford-Online/configuration,Stanford-Online/configuration,Stanford-Online/configuration | #!/usr/bin/env python
from email.mime.text import MIMEText
import smtplib
import sys
def send(recipient, sender, subject, body):
message = MIMEText(body, _charset='UTF-8')
message['Subject'] = subject
message['From'] = sender
message['To'] = recipient
smtp = smtplib.SMTP('localhost')
result = smtp.sendmail(sender, recipient, message.as_string())
return result
if __name__ == '__main__':
recipient = sys.argv[1]
sender = sys.argv[2]
subject = sys.argv[3]
path_file = sys.argv[4]
with open(path_file) as file_input:
body = file_input.read()
result = send(recipient, subject, body)
Use AWS SMTP server in cut release script. | #!/usr/bin/env python
from email.mime.text import MIMEText
from subprocess import call
import sys
def send(recipient, sender, sender_name, subject, body):
with open('configuration/stanford/bin/email_params.txt', 'rt') as fin:
with open('email.txt', 'wt') as fout:
for line in fin:
line = line.replace('{RECIPIENT}', recipient).replace('{SENDER}', sender).replace('{SENDER_NAME}', sender_name).replace('{SUBJECT}', subject).replace('{BODY}', body)
fout.write(line)
fout = open('email.txt')
cmd = ['openssl', 's_client', '-crlf', '-quiet', '-connect', 'email-smtp.us-east-1.amazonaws.com:465']
call(cmd, stdin=fout)
fout.close()
call(['rm', 'email.txt'])
if __name__ == '__main__':
recipient = sys.argv[1]
sender = sys.argv[2]
sender_name = sys.argv[3]
subject = sys.argv[4]
path_file = sys.argv[5]
with open(path_file) as file_input:
body = file_input.read()
result = send(recipient, sender, sender_name, subject, body)
| <commit_before>#!/usr/bin/env python
from email.mime.text import MIMEText
import smtplib
import sys
def send(recipient, sender, subject, body):
message = MIMEText(body, _charset='UTF-8')
message['Subject'] = subject
message['From'] = sender
message['To'] = recipient
smtp = smtplib.SMTP('localhost')
result = smtp.sendmail(sender, recipient, message.as_string())
return result
if __name__ == '__main__':
recipient = sys.argv[1]
sender = sys.argv[2]
subject = sys.argv[3]
path_file = sys.argv[4]
with open(path_file) as file_input:
body = file_input.read()
result = send(recipient, subject, body)
<commit_msg>Use AWS SMTP server in cut release script.<commit_after> | #!/usr/bin/env python
from email.mime.text import MIMEText
from subprocess import call
import sys
def send(recipient, sender, sender_name, subject, body):
with open('configuration/stanford/bin/email_params.txt', 'rt') as fin:
with open('email.txt', 'wt') as fout:
for line in fin:
line = line.replace('{RECIPIENT}', recipient).replace('{SENDER}', sender).replace('{SENDER_NAME}', sender_name).replace('{SUBJECT}', subject).replace('{BODY}', body)
fout.write(line)
fout = open('email.txt')
cmd = ['openssl', 's_client', '-crlf', '-quiet', '-connect', 'email-smtp.us-east-1.amazonaws.com:465']
call(cmd, stdin=fout)
fout.close()
call(['rm', 'email.txt'])
if __name__ == '__main__':
recipient = sys.argv[1]
sender = sys.argv[2]
sender_name = sys.argv[3]
subject = sys.argv[4]
path_file = sys.argv[5]
with open(path_file) as file_input:
body = file_input.read()
result = send(recipient, sender, sender_name, subject, body)
| #!/usr/bin/env python
from email.mime.text import MIMEText
import smtplib
import sys
def send(recipient, sender, subject, body):
message = MIMEText(body, _charset='UTF-8')
message['Subject'] = subject
message['From'] = sender
message['To'] = recipient
smtp = smtplib.SMTP('localhost')
result = smtp.sendmail(sender, recipient, message.as_string())
return result
if __name__ == '__main__':
recipient = sys.argv[1]
sender = sys.argv[2]
subject = sys.argv[3]
path_file = sys.argv[4]
with open(path_file) as file_input:
body = file_input.read()
result = send(recipient, subject, body)
Use AWS SMTP server in cut release script.#!/usr/bin/env python
from email.mime.text import MIMEText
from subprocess import call
import sys
def send(recipient, sender, sender_name, subject, body):
with open('configuration/stanford/bin/email_params.txt', 'rt') as fin:
with open('email.txt', 'wt') as fout:
for line in fin:
line = line.replace('{RECIPIENT}', recipient).replace('{SENDER}', sender).replace('{SENDER_NAME}', sender_name).replace('{SUBJECT}', subject).replace('{BODY}', body)
fout.write(line)
fout = open('email.txt')
cmd = ['openssl', 's_client', '-crlf', '-quiet', '-connect', 'email-smtp.us-east-1.amazonaws.com:465']
call(cmd, stdin=fout)
fout.close()
call(['rm', 'email.txt'])
if __name__ == '__main__':
recipient = sys.argv[1]
sender = sys.argv[2]
sender_name = sys.argv[3]
subject = sys.argv[4]
path_file = sys.argv[5]
with open(path_file) as file_input:
body = file_input.read()
result = send(recipient, sender, sender_name, subject, body)
| <commit_before>#!/usr/bin/env python
from email.mime.text import MIMEText
import smtplib
import sys
def send(recipient, sender, subject, body):
message = MIMEText(body, _charset='UTF-8')
message['Subject'] = subject
message['From'] = sender
message['To'] = recipient
smtp = smtplib.SMTP('localhost')
result = smtp.sendmail(sender, recipient, message.as_string())
return result
if __name__ == '__main__':
recipient = sys.argv[1]
sender = sys.argv[2]
subject = sys.argv[3]
path_file = sys.argv[4]
with open(path_file) as file_input:
body = file_input.read()
result = send(recipient, subject, body)
<commit_msg>Use AWS SMTP server in cut release script.<commit_after>#!/usr/bin/env python
from email.mime.text import MIMEText
from subprocess import call
import sys
def send(recipient, sender, sender_name, subject, body):
with open('configuration/stanford/bin/email_params.txt', 'rt') as fin:
with open('email.txt', 'wt') as fout:
for line in fin:
line = line.replace('{RECIPIENT}', recipient).replace('{SENDER}', sender).replace('{SENDER_NAME}', sender_name).replace('{SUBJECT}', subject).replace('{BODY}', body)
fout.write(line)
fout = open('email.txt')
cmd = ['openssl', 's_client', '-crlf', '-quiet', '-connect', 'email-smtp.us-east-1.amazonaws.com:465']
call(cmd, stdin=fout)
fout.close()
call(['rm', 'email.txt'])
if __name__ == '__main__':
recipient = sys.argv[1]
sender = sys.argv[2]
sender_name = sys.argv[3]
subject = sys.argv[4]
path_file = sys.argv[5]
with open(path_file) as file_input:
body = file_input.read()
result = send(recipient, sender, sender_name, subject, body)
|
f8a160b91cf91a02f36bfd88316c199b914298b2 | src/nodeconductor_assembly_waldur/experts/filters.py | src/nodeconductor_assembly_waldur/experts/filters.py | import django_filters
from nodeconductor.core import filters as core_filters
from . import models
class ExpertProviderFilter(django_filters.FilterSet):
customer = core_filters.URLFilter(view_name='customer-detail', name='customer__uuid')
customer_uuid = django_filters.UUIDFilter(name='customer__uuid')
class Meta(object):
model = models.ExpertProvider
fields = []
class ExpertRequestFilter(django_filters.FilterSet):
name = django_filters.CharFilter(lookup_expr='icontains')
project = core_filters.URLFilter(view_name='project-detail', name='project__uuid')
project_uuid = django_filters.UUIDFilter(name='project__uuid')
class Meta(object):
model = models.ExpertRequest
fields = []
class ExpertBidFilter(django_filters.FilterSet):
request = core_filters.URLFilter(view_name='expert-request-detail', name='request__uuid')
request_uuid = django_filters.UUIDFilter(name='request__uuid')
class Meta(object):
model = models.ExpertBid
fields = []
| import django_filters
from nodeconductor.core import filters as core_filters
from . import models
class ExpertProviderFilter(django_filters.FilterSet):
customer = core_filters.URLFilter(view_name='customer-detail', name='customer__uuid')
customer_uuid = django_filters.UUIDFilter(name='customer__uuid')
class Meta(object):
model = models.ExpertProvider
fields = []
class ExpertRequestFilter(django_filters.FilterSet):
name = django_filters.CharFilter(lookup_expr='icontains')
project = core_filters.URLFilter(view_name='project-detail', name='project__uuid')
project_uuid = django_filters.UUIDFilter(name='project__uuid')
class Meta(object):
model = models.ExpertRequest
fields = ['state']
class ExpertBidFilter(django_filters.FilterSet):
request = core_filters.URLFilter(view_name='expert-request-detail', name='request__uuid')
request_uuid = django_filters.UUIDFilter(name='request__uuid')
class Meta(object):
model = models.ExpertBid
fields = []
| Allow to filter expert requests by state | Allow to filter expert requests by state [WAL-1041]
| Python | mit | opennode/nodeconductor-assembly-waldur,opennode/nodeconductor-assembly-waldur,opennode/waldur-mastermind,opennode/nodeconductor-assembly-waldur,opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/waldur-mastermind | import django_filters
from nodeconductor.core import filters as core_filters
from . import models
class ExpertProviderFilter(django_filters.FilterSet):
customer = core_filters.URLFilter(view_name='customer-detail', name='customer__uuid')
customer_uuid = django_filters.UUIDFilter(name='customer__uuid')
class Meta(object):
model = models.ExpertProvider
fields = []
class ExpertRequestFilter(django_filters.FilterSet):
name = django_filters.CharFilter(lookup_expr='icontains')
project = core_filters.URLFilter(view_name='project-detail', name='project__uuid')
project_uuid = django_filters.UUIDFilter(name='project__uuid')
class Meta(object):
model = models.ExpertRequest
fields = []
class ExpertBidFilter(django_filters.FilterSet):
request = core_filters.URLFilter(view_name='expert-request-detail', name='request__uuid')
request_uuid = django_filters.UUIDFilter(name='request__uuid')
class Meta(object):
model = models.ExpertBid
fields = []
Allow to filter expert requests by state [WAL-1041] | import django_filters
from nodeconductor.core import filters as core_filters
from . import models
class ExpertProviderFilter(django_filters.FilterSet):
customer = core_filters.URLFilter(view_name='customer-detail', name='customer__uuid')
customer_uuid = django_filters.UUIDFilter(name='customer__uuid')
class Meta(object):
model = models.ExpertProvider
fields = []
class ExpertRequestFilter(django_filters.FilterSet):
name = django_filters.CharFilter(lookup_expr='icontains')
project = core_filters.URLFilter(view_name='project-detail', name='project__uuid')
project_uuid = django_filters.UUIDFilter(name='project__uuid')
class Meta(object):
model = models.ExpertRequest
fields = ['state']
class ExpertBidFilter(django_filters.FilterSet):
request = core_filters.URLFilter(view_name='expert-request-detail', name='request__uuid')
request_uuid = django_filters.UUIDFilter(name='request__uuid')
class Meta(object):
model = models.ExpertBid
fields = []
| <commit_before>import django_filters
from nodeconductor.core import filters as core_filters
from . import models
class ExpertProviderFilter(django_filters.FilterSet):
customer = core_filters.URLFilter(view_name='customer-detail', name='customer__uuid')
customer_uuid = django_filters.UUIDFilter(name='customer__uuid')
class Meta(object):
model = models.ExpertProvider
fields = []
class ExpertRequestFilter(django_filters.FilterSet):
name = django_filters.CharFilter(lookup_expr='icontains')
project = core_filters.URLFilter(view_name='project-detail', name='project__uuid')
project_uuid = django_filters.UUIDFilter(name='project__uuid')
class Meta(object):
model = models.ExpertRequest
fields = []
class ExpertBidFilter(django_filters.FilterSet):
request = core_filters.URLFilter(view_name='expert-request-detail', name='request__uuid')
request_uuid = django_filters.UUIDFilter(name='request__uuid')
class Meta(object):
model = models.ExpertBid
fields = []
<commit_msg>Allow to filter expert requests by state [WAL-1041]<commit_after> | import django_filters
from nodeconductor.core import filters as core_filters
from . import models
class ExpertProviderFilter(django_filters.FilterSet):
customer = core_filters.URLFilter(view_name='customer-detail', name='customer__uuid')
customer_uuid = django_filters.UUIDFilter(name='customer__uuid')
class Meta(object):
model = models.ExpertProvider
fields = []
class ExpertRequestFilter(django_filters.FilterSet):
name = django_filters.CharFilter(lookup_expr='icontains')
project = core_filters.URLFilter(view_name='project-detail', name='project__uuid')
project_uuid = django_filters.UUIDFilter(name='project__uuid')
class Meta(object):
model = models.ExpertRequest
fields = ['state']
class ExpertBidFilter(django_filters.FilterSet):
request = core_filters.URLFilter(view_name='expert-request-detail', name='request__uuid')
request_uuid = django_filters.UUIDFilter(name='request__uuid')
class Meta(object):
model = models.ExpertBid
fields = []
| import django_filters
from nodeconductor.core import filters as core_filters
from . import models
class ExpertProviderFilter(django_filters.FilterSet):
customer = core_filters.URLFilter(view_name='customer-detail', name='customer__uuid')
customer_uuid = django_filters.UUIDFilter(name='customer__uuid')
class Meta(object):
model = models.ExpertProvider
fields = []
class ExpertRequestFilter(django_filters.FilterSet):
name = django_filters.CharFilter(lookup_expr='icontains')
project = core_filters.URLFilter(view_name='project-detail', name='project__uuid')
project_uuid = django_filters.UUIDFilter(name='project__uuid')
class Meta(object):
model = models.ExpertRequest
fields = []
class ExpertBidFilter(django_filters.FilterSet):
request = core_filters.URLFilter(view_name='expert-request-detail', name='request__uuid')
request_uuid = django_filters.UUIDFilter(name='request__uuid')
class Meta(object):
model = models.ExpertBid
fields = []
Allow to filter expert requests by state [WAL-1041]import django_filters
from nodeconductor.core import filters as core_filters
from . import models
class ExpertProviderFilter(django_filters.FilterSet):
customer = core_filters.URLFilter(view_name='customer-detail', name='customer__uuid')
customer_uuid = django_filters.UUIDFilter(name='customer__uuid')
class Meta(object):
model = models.ExpertProvider
fields = []
class ExpertRequestFilter(django_filters.FilterSet):
name = django_filters.CharFilter(lookup_expr='icontains')
project = core_filters.URLFilter(view_name='project-detail', name='project__uuid')
project_uuid = django_filters.UUIDFilter(name='project__uuid')
class Meta(object):
model = models.ExpertRequest
fields = ['state']
class ExpertBidFilter(django_filters.FilterSet):
request = core_filters.URLFilter(view_name='expert-request-detail', name='request__uuid')
request_uuid = django_filters.UUIDFilter(name='request__uuid')
class Meta(object):
model = models.ExpertBid
fields = []
| <commit_before>import django_filters
from nodeconductor.core import filters as core_filters
from . import models
class ExpertProviderFilter(django_filters.FilterSet):
customer = core_filters.URLFilter(view_name='customer-detail', name='customer__uuid')
customer_uuid = django_filters.UUIDFilter(name='customer__uuid')
class Meta(object):
model = models.ExpertProvider
fields = []
class ExpertRequestFilter(django_filters.FilterSet):
name = django_filters.CharFilter(lookup_expr='icontains')
project = core_filters.URLFilter(view_name='project-detail', name='project__uuid')
project_uuid = django_filters.UUIDFilter(name='project__uuid')
class Meta(object):
model = models.ExpertRequest
fields = []
class ExpertBidFilter(django_filters.FilterSet):
request = core_filters.URLFilter(view_name='expert-request-detail', name='request__uuid')
request_uuid = django_filters.UUIDFilter(name='request__uuid')
class Meta(object):
model = models.ExpertBid
fields = []
<commit_msg>Allow to filter expert requests by state [WAL-1041]<commit_after>import django_filters
from nodeconductor.core import filters as core_filters
from . import models
class ExpertProviderFilter(django_filters.FilterSet):
customer = core_filters.URLFilter(view_name='customer-detail', name='customer__uuid')
customer_uuid = django_filters.UUIDFilter(name='customer__uuid')
class Meta(object):
model = models.ExpertProvider
fields = []
class ExpertRequestFilter(django_filters.FilterSet):
name = django_filters.CharFilter(lookup_expr='icontains')
project = core_filters.URLFilter(view_name='project-detail', name='project__uuid')
project_uuid = django_filters.UUIDFilter(name='project__uuid')
class Meta(object):
model = models.ExpertRequest
fields = ['state']
class ExpertBidFilter(django_filters.FilterSet):
request = core_filters.URLFilter(view_name='expert-request-detail', name='request__uuid')
request_uuid = django_filters.UUIDFilter(name='request__uuid')
class Meta(object):
model = models.ExpertBid
fields = []
|
34fa7690fe2255d44b129c7d2397113d713342b6 | test/requests/test-website.py | test/requests/test-website.py | # Run with something like
#
# env GN2_PROFILE=/home/wrk/opt/gn-latest ./bin/genenetwork2 ./etc/default_settings.py -c ../test/requests/test-website.py http://localhost:5003
#
# Mostly to pick up the Guix GN2_PROFILE and python modules
import requests as req
import sys
print "Mechanical Rob firing up..."
if len(sys.argv)<1:
raise "Problem with arguments"
url = sys.argv[1]
print url
r = req.get(url)
print r
| # Run with something like
#
# env GN2_PROFILE=/home/wrk/opt/gn-latest ./bin/genenetwork2 ./etc/default_settings.py -c ../test/requests/test-website.py http://localhost:5003
#
# Mostly to pick up the Guix GN2_PROFILE and python modules
from __future__ import print_function
from link_checker import check_links
import argparse
print("Mechanical Rob firing up...")
def run_all(args_obj, parser):
print("")
print("Running all tests.")
check_links(args_obj, parser)
# TODO: Add other functions as they are created.
def print_help(args_obj, parser):
print(parser.format_help())
def dummy(args_obj, parser):
print("Not implemented yet.")
desc = """
This is Mechanical-Rob - an automated web server tester for
Genenetwork.org
"""
parser = argparse.ArgumentParser(description=desc)
parser.add_argument("-d", "--database", metavar="DB", type=str
, default="db_webqtl_s"
, help="Use database (default db_webqtl_s)")
parser.add_argument("host", metavar="HOST", type=str
, default="http://localhost:5003"
, help="The url to the web server")
parser.add_argument("-a", "--all", dest="accumulate", action="store_const"
, const=run_all, default=print_help
, help="Runs all tests.")
parser.add_argument("-l", "--link-checker", dest="accumulate"
, action='store_const', const=check_links, default=print_help
, help="Checks for dead links.")
# parser.add_argument("-n", "--navigation", dest="accumulate"
# , action="store_const", const=check_navigation, default=print_help
# , help="Checks for navigation.")
# parser.add_argument("-m", "--mapping", dest="accumulate"
# , action="store_const", const=check_mapping, default=print_help
# , help="Checks for mapping.")
# parser.add_argument("-s", "--skip-broken", dest="accumulate"
# , action="store_const", const=dummy, default=print_help
# , help="Skip tests that are known to be broken.")
args = parser.parse_args()
# print("The arguments object: ", args)
args.accumulate(args, parser)
| Use argparse to handle arguments | Use argparse to handle arguments
* Use argparse to handle commandline arguments.
* Create initial layout of how the code might end up - lots of the code
is currently commented out.
| Python | agpl-3.0 | genenetwork/genenetwork2,pjotrp/genenetwork2,genenetwork/genenetwork2,pjotrp/genenetwork2,DannyArends/genenetwork2,pjotrp/genenetwork2,zsloan/genenetwork2,genenetwork/genenetwork2,DannyArends/genenetwork2,pjotrp/genenetwork2,DannyArends/genenetwork2,zsloan/genenetwork2,DannyArends/genenetwork2,DannyArends/genenetwork2,pjotrp/genenetwork2,zsloan/genenetwork2,genenetwork/genenetwork2,zsloan/genenetwork2,DannyArends/genenetwork2 | # Run with something like
#
# env GN2_PROFILE=/home/wrk/opt/gn-latest ./bin/genenetwork2 ./etc/default_settings.py -c ../test/requests/test-website.py http://localhost:5003
#
# Mostly to pick up the Guix GN2_PROFILE and python modules
import requests as req
import sys
print "Mechanical Rob firing up..."
if len(sys.argv)<1:
raise "Problem with arguments"
url = sys.argv[1]
print url
r = req.get(url)
print r
Use argparse to handle arguments
* Use argparse to handle commandline arguments.
* Create initial layout of how the code might end up - lots of the code
is currently commented out. | # Run with something like
#
# env GN2_PROFILE=/home/wrk/opt/gn-latest ./bin/genenetwork2 ./etc/default_settings.py -c ../test/requests/test-website.py http://localhost:5003
#
# Mostly to pick up the Guix GN2_PROFILE and python modules
from __future__ import print_function
from link_checker import check_links
import argparse
print("Mechanical Rob firing up...")
def run_all(args_obj, parser):
print("")
print("Running all tests.")
check_links(args_obj, parser)
# TODO: Add other functions as they are created.
def print_help(args_obj, parser):
print(parser.format_help())
def dummy(args_obj, parser):
print("Not implemented yet.")
desc = """
This is Mechanical-Rob - an automated web server tester for
Genenetwork.org
"""
parser = argparse.ArgumentParser(description=desc)
parser.add_argument("-d", "--database", metavar="DB", type=str
, default="db_webqtl_s"
, help="Use database (default db_webqtl_s)")
parser.add_argument("host", metavar="HOST", type=str
, default="http://localhost:5003"
, help="The url to the web server")
parser.add_argument("-a", "--all", dest="accumulate", action="store_const"
, const=run_all, default=print_help
, help="Runs all tests.")
parser.add_argument("-l", "--link-checker", dest="accumulate"
, action='store_const', const=check_links, default=print_help
, help="Checks for dead links.")
# parser.add_argument("-n", "--navigation", dest="accumulate"
# , action="store_const", const=check_navigation, default=print_help
# , help="Checks for navigation.")
# parser.add_argument("-m", "--mapping", dest="accumulate"
# , action="store_const", const=check_mapping, default=print_help
# , help="Checks for mapping.")
# parser.add_argument("-s", "--skip-broken", dest="accumulate"
# , action="store_const", const=dummy, default=print_help
# , help="Skip tests that are known to be broken.")
args = parser.parse_args()
# print("The arguments object: ", args)
args.accumulate(args, parser)
| <commit_before># Run with something like
#
# env GN2_PROFILE=/home/wrk/opt/gn-latest ./bin/genenetwork2 ./etc/default_settings.py -c ../test/requests/test-website.py http://localhost:5003
#
# Mostly to pick up the Guix GN2_PROFILE and python modules
import requests as req
import sys
print "Mechanical Rob firing up..."
if len(sys.argv)<1:
raise "Problem with arguments"
url = sys.argv[1]
print url
r = req.get(url)
print r
<commit_msg>Use argparse to handle arguments
* Use argparse to handle commandline arguments.
* Create initial layout of how the code might end up - lots of the code
is currently commented out.<commit_after> | # Run with something like
#
# env GN2_PROFILE=/home/wrk/opt/gn-latest ./bin/genenetwork2 ./etc/default_settings.py -c ../test/requests/test-website.py http://localhost:5003
#
# Mostly to pick up the Guix GN2_PROFILE and python modules
from __future__ import print_function
from link_checker import check_links
import argparse
print("Mechanical Rob firing up...")
def run_all(args_obj, parser):
print("")
print("Running all tests.")
check_links(args_obj, parser)
# TODO: Add other functions as they are created.
def print_help(args_obj, parser):
print(parser.format_help())
def dummy(args_obj, parser):
print("Not implemented yet.")
desc = """
This is Mechanical-Rob - an automated web server tester for
Genenetwork.org
"""
parser = argparse.ArgumentParser(description=desc)
parser.add_argument("-d", "--database", metavar="DB", type=str
, default="db_webqtl_s"
, help="Use database (default db_webqtl_s)")
parser.add_argument("host", metavar="HOST", type=str
, default="http://localhost:5003"
, help="The url to the web server")
parser.add_argument("-a", "--all", dest="accumulate", action="store_const"
, const=run_all, default=print_help
, help="Runs all tests.")
parser.add_argument("-l", "--link-checker", dest="accumulate"
, action='store_const', const=check_links, default=print_help
, help="Checks for dead links.")
# parser.add_argument("-n", "--navigation", dest="accumulate"
# , action="store_const", const=check_navigation, default=print_help
# , help="Checks for navigation.")
# parser.add_argument("-m", "--mapping", dest="accumulate"
# , action="store_const", const=check_mapping, default=print_help
# , help="Checks for mapping.")
# parser.add_argument("-s", "--skip-broken", dest="accumulate"
# , action="store_const", const=dummy, default=print_help
# , help="Skip tests that are known to be broken.")
args = parser.parse_args()
# print("The arguments object: ", args)
args.accumulate(args, parser)
| # Run with something like
#
# env GN2_PROFILE=/home/wrk/opt/gn-latest ./bin/genenetwork2 ./etc/default_settings.py -c ../test/requests/test-website.py http://localhost:5003
#
# Mostly to pick up the Guix GN2_PROFILE and python modules
import requests as req
import sys
print "Mechanical Rob firing up..."
if len(sys.argv)<1:
raise "Problem with arguments"
url = sys.argv[1]
print url
r = req.get(url)
print r
Use argparse to handle arguments
* Use argparse to handle commandline arguments.
* Create initial layout of how the code might end up - lots of the code
is currently commented out.# Run with something like
#
# env GN2_PROFILE=/home/wrk/opt/gn-latest ./bin/genenetwork2 ./etc/default_settings.py -c ../test/requests/test-website.py http://localhost:5003
#
# Mostly to pick up the Guix GN2_PROFILE and python modules
from __future__ import print_function
from link_checker import check_links
import argparse
print("Mechanical Rob firing up...")
def run_all(args_obj, parser):
print("")
print("Running all tests.")
check_links(args_obj, parser)
# TODO: Add other functions as they are created.
def print_help(args_obj, parser):
print(parser.format_help())
def dummy(args_obj, parser):
print("Not implemented yet.")
desc = """
This is Mechanical-Rob - an automated web server tester for
Genenetwork.org
"""
parser = argparse.ArgumentParser(description=desc)
parser.add_argument("-d", "--database", metavar="DB", type=str
, default="db_webqtl_s"
, help="Use database (default db_webqtl_s)")
parser.add_argument("host", metavar="HOST", type=str
, default="http://localhost:5003"
, help="The url to the web server")
parser.add_argument("-a", "--all", dest="accumulate", action="store_const"
, const=run_all, default=print_help
, help="Runs all tests.")
parser.add_argument("-l", "--link-checker", dest="accumulate"
, action='store_const', const=check_links, default=print_help
, help="Checks for dead links.")
# parser.add_argument("-n", "--navigation", dest="accumulate"
# , action="store_const", const=check_navigation, default=print_help
# , help="Checks for navigation.")
# parser.add_argument("-m", "--mapping", dest="accumulate"
# , action="store_const", const=check_mapping, default=print_help
# , help="Checks for mapping.")
# parser.add_argument("-s", "--skip-broken", dest="accumulate"
# , action="store_const", const=dummy, default=print_help
# , help="Skip tests that are known to be broken.")
args = parser.parse_args()
# print("The arguments object: ", args)
args.accumulate(args, parser)
| <commit_before># Run with something like
#
# env GN2_PROFILE=/home/wrk/opt/gn-latest ./bin/genenetwork2 ./etc/default_settings.py -c ../test/requests/test-website.py http://localhost:5003
#
# Mostly to pick up the Guix GN2_PROFILE and python modules
import requests as req
import sys
print "Mechanical Rob firing up..."
if len(sys.argv)<1:
raise "Problem with arguments"
url = sys.argv[1]
print url
r = req.get(url)
print r
<commit_msg>Use argparse to handle arguments
* Use argparse to handle commandline arguments.
* Create initial layout of how the code might end up - lots of the code
is currently commented out.<commit_after># Run with something like
#
# env GN2_PROFILE=/home/wrk/opt/gn-latest ./bin/genenetwork2 ./etc/default_settings.py -c ../test/requests/test-website.py http://localhost:5003
#
# Mostly to pick up the Guix GN2_PROFILE and python modules
from __future__ import print_function
from link_checker import check_links
import argparse
print("Mechanical Rob firing up...")
def run_all(args_obj, parser):
print("")
print("Running all tests.")
check_links(args_obj, parser)
# TODO: Add other functions as they are created.
def print_help(args_obj, parser):
print(parser.format_help())
def dummy(args_obj, parser):
print("Not implemented yet.")
desc = """
This is Mechanical-Rob - an automated web server tester for
Genenetwork.org
"""
parser = argparse.ArgumentParser(description=desc)
parser.add_argument("-d", "--database", metavar="DB", type=str
, default="db_webqtl_s"
, help="Use database (default db_webqtl_s)")
parser.add_argument("host", metavar="HOST", type=str
, default="http://localhost:5003"
, help="The url to the web server")
parser.add_argument("-a", "--all", dest="accumulate", action="store_const"
, const=run_all, default=print_help
, help="Runs all tests.")
parser.add_argument("-l", "--link-checker", dest="accumulate"
, action='store_const', const=check_links, default=print_help
, help="Checks for dead links.")
# parser.add_argument("-n", "--navigation", dest="accumulate"
# , action="store_const", const=check_navigation, default=print_help
# , help="Checks for navigation.")
# parser.add_argument("-m", "--mapping", dest="accumulate"
# , action="store_const", const=check_mapping, default=print_help
# , help="Checks for mapping.")
# parser.add_argument("-s", "--skip-broken", dest="accumulate"
# , action="store_const", const=dummy, default=print_help
# , help="Skip tests that are known to be broken.")
args = parser.parse_args()
# print("The arguments object: ", args)
args.accumulate(args, parser)
|
64042be2b6febf64d601adaa6f85a542ae9b876d | sunpy/instr/iris/iris.py | sunpy/instr/iris/iris.py | """
Some very beta tools for IRIS
"""
import sunpy.io
import sunpy.time
import sunpy.map
__all__ = ['SJI_to_cube']
def SJI_to_cube(filename, start=0, stop=None):
"""
Read a SJI file and return a MapCube
..warning::
This function is a very early beta and is not stable. Further work is
on going to improve SunPy IRIS support.
Parameters
----------
filename: string
File to read
start:
Temporal axis index to create MapCube from
stop:
Temporal index to stop MapCube at
Returns
-------
iris_cube: sunpy.map.MapCube
A map cube of the SJI sequence
"""
hdus = sunpy.io.read_file(filename)
#Get the time delta
time_range = sunpy.time.TimeRange(hdus[0][1]['STARTOBS'], hdus[0][1]['ENDOBS'])
splits = time_range.split(hdus[0][0].shape[0])
if not stop:
stop = len(splits)
headers = [hdus[0][1]]*(stop-start)
datas = hdus[0][0][start:stop]
#Make the cube:
iris_cube = sunpy.map.Map(zip(datas,headers),cube=True)
#Set the date/time
for i,m in enumerate(iris_cube):
m.meta['DATE-OBS'] = splits[i].center().isoformat()
return iris_cube | """
Some very beta tools for IRIS
"""
import sunpy.io
import sunpy.time
import sunpy.map
__all__ = ['SJI_to_cube']
def SJI_to_cube(filename, start=0, stop=None, hdu=0):
"""
Read a SJI file and return a MapCube
..warning::
This function is a very early beta and is not stable. Further work is
on going to improve SunPy IRIS support.
Parameters
----------
filename: string
File to read
start:
Temporal axis index to create MapCube from
stop:
Temporal index to stop MapCube at
hdu:
Choose hdu index
Returns
-------
iris_cube: sunpy.map.MapCube
A map cube of the SJI sequence
"""
hdus = sunpy.io.read_file(filename)
#Get the time delta
time_range = sunpy.time.TimeRange(hdus[hdu][1]['STARTOBS'], hdus[hdu][1]['ENDOBS'])
splits = time_range.split(hdus[hdu][0].shape[0])
if not stop:
stop = len(splits)
headers = [hdus[hdu][1]]*(stop-start)
datas = hdus[hdu][0][start:stop]
#Make the cube:
iris_cube = sunpy.map.Map(zip(datas,headers),cube=True)
#Set the date/time
for i,m in enumerate(iris_cube):
m.meta['DATE-OBS'] = splits[i].center().isoformat()
return iris_cube
| Change hdu[0] to hdu for optional indexing | Change hdu[0] to hdu for optional indexing
| Python | bsd-2-clause | Alex-Ian-Hamilton/sunpy,dpshelio/sunpy,dpshelio/sunpy,Alex-Ian-Hamilton/sunpy,dpshelio/sunpy,Alex-Ian-Hamilton/sunpy | """
Some very beta tools for IRIS
"""
import sunpy.io
import sunpy.time
import sunpy.map
__all__ = ['SJI_to_cube']
def SJI_to_cube(filename, start=0, stop=None):
"""
Read a SJI file and return a MapCube
..warning::
This function is a very early beta and is not stable. Further work is
on going to improve SunPy IRIS support.
Parameters
----------
filename: string
File to read
start:
Temporal axis index to create MapCube from
stop:
Temporal index to stop MapCube at
Returns
-------
iris_cube: sunpy.map.MapCube
A map cube of the SJI sequence
"""
hdus = sunpy.io.read_file(filename)
#Get the time delta
time_range = sunpy.time.TimeRange(hdus[0][1]['STARTOBS'], hdus[0][1]['ENDOBS'])
splits = time_range.split(hdus[0][0].shape[0])
if not stop:
stop = len(splits)
headers = [hdus[0][1]]*(stop-start)
datas = hdus[0][0][start:stop]
#Make the cube:
iris_cube = sunpy.map.Map(zip(datas,headers),cube=True)
#Set the date/time
for i,m in enumerate(iris_cube):
m.meta['DATE-OBS'] = splits[i].center().isoformat()
return iris_cubeChange hdu[0] to hdu for optional indexing | """
Some very beta tools for IRIS
"""
import sunpy.io
import sunpy.time
import sunpy.map
__all__ = ['SJI_to_cube']
def SJI_to_cube(filename, start=0, stop=None, hdu=0):
"""
Read a SJI file and return a MapCube
..warning::
This function is a very early beta and is not stable. Further work is
on going to improve SunPy IRIS support.
Parameters
----------
filename: string
File to read
start:
Temporal axis index to create MapCube from
stop:
Temporal index to stop MapCube at
hdu:
Choose hdu index
Returns
-------
iris_cube: sunpy.map.MapCube
A map cube of the SJI sequence
"""
hdus = sunpy.io.read_file(filename)
#Get the time delta
time_range = sunpy.time.TimeRange(hdus[hdu][1]['STARTOBS'], hdus[hdu][1]['ENDOBS'])
splits = time_range.split(hdus[hdu][0].shape[0])
if not stop:
stop = len(splits)
headers = [hdus[hdu][1]]*(stop-start)
datas = hdus[hdu][0][start:stop]
#Make the cube:
iris_cube = sunpy.map.Map(zip(datas,headers),cube=True)
#Set the date/time
for i,m in enumerate(iris_cube):
m.meta['DATE-OBS'] = splits[i].center().isoformat()
return iris_cube
| <commit_before>"""
Some very beta tools for IRIS
"""
import sunpy.io
import sunpy.time
import sunpy.map
__all__ = ['SJI_to_cube']
def SJI_to_cube(filename, start=0, stop=None):
"""
Read a SJI file and return a MapCube
..warning::
This function is a very early beta and is not stable. Further work is
on going to improve SunPy IRIS support.
Parameters
----------
filename: string
File to read
start:
Temporal axis index to create MapCube from
stop:
Temporal index to stop MapCube at
Returns
-------
iris_cube: sunpy.map.MapCube
A map cube of the SJI sequence
"""
hdus = sunpy.io.read_file(filename)
#Get the time delta
time_range = sunpy.time.TimeRange(hdus[0][1]['STARTOBS'], hdus[0][1]['ENDOBS'])
splits = time_range.split(hdus[0][0].shape[0])
if not stop:
stop = len(splits)
headers = [hdus[0][1]]*(stop-start)
datas = hdus[0][0][start:stop]
#Make the cube:
iris_cube = sunpy.map.Map(zip(datas,headers),cube=True)
#Set the date/time
for i,m in enumerate(iris_cube):
m.meta['DATE-OBS'] = splits[i].center().isoformat()
return iris_cube<commit_msg>Change hdu[0] to hdu for optional indexing<commit_after> | """
Some very beta tools for IRIS
"""
import sunpy.io
import sunpy.time
import sunpy.map
__all__ = ['SJI_to_cube']
def SJI_to_cube(filename, start=0, stop=None, hdu=0):
"""
Read a SJI file and return a MapCube
..warning::
This function is a very early beta and is not stable. Further work is
on going to improve SunPy IRIS support.
Parameters
----------
filename: string
File to read
start:
Temporal axis index to create MapCube from
stop:
Temporal index to stop MapCube at
hdu:
Choose hdu index
Returns
-------
iris_cube: sunpy.map.MapCube
A map cube of the SJI sequence
"""
hdus = sunpy.io.read_file(filename)
#Get the time delta
time_range = sunpy.time.TimeRange(hdus[hdu][1]['STARTOBS'], hdus[hdu][1]['ENDOBS'])
splits = time_range.split(hdus[hdu][0].shape[0])
if not stop:
stop = len(splits)
headers = [hdus[hdu][1]]*(stop-start)
datas = hdus[hdu][0][start:stop]
#Make the cube:
iris_cube = sunpy.map.Map(zip(datas,headers),cube=True)
#Set the date/time
for i,m in enumerate(iris_cube):
m.meta['DATE-OBS'] = splits[i].center().isoformat()
return iris_cube
| """
Some very beta tools for IRIS
"""
import sunpy.io
import sunpy.time
import sunpy.map
__all__ = ['SJI_to_cube']
def SJI_to_cube(filename, start=0, stop=None):
"""
Read a SJI file and return a MapCube
..warning::
This function is a very early beta and is not stable. Further work is
on going to improve SunPy IRIS support.
Parameters
----------
filename: string
File to read
start:
Temporal axis index to create MapCube from
stop:
Temporal index to stop MapCube at
Returns
-------
iris_cube: sunpy.map.MapCube
A map cube of the SJI sequence
"""
hdus = sunpy.io.read_file(filename)
#Get the time delta
time_range = sunpy.time.TimeRange(hdus[0][1]['STARTOBS'], hdus[0][1]['ENDOBS'])
splits = time_range.split(hdus[0][0].shape[0])
if not stop:
stop = len(splits)
headers = [hdus[0][1]]*(stop-start)
datas = hdus[0][0][start:stop]
#Make the cube:
iris_cube = sunpy.map.Map(zip(datas,headers),cube=True)
#Set the date/time
for i,m in enumerate(iris_cube):
m.meta['DATE-OBS'] = splits[i].center().isoformat()
return iris_cubeChange hdu[0] to hdu for optional indexing"""
Some very beta tools for IRIS
"""
import sunpy.io
import sunpy.time
import sunpy.map
__all__ = ['SJI_to_cube']
def SJI_to_cube(filename, start=0, stop=None, hdu=0):
"""
Read a SJI file and return a MapCube
..warning::
This function is a very early beta and is not stable. Further work is
on going to improve SunPy IRIS support.
Parameters
----------
filename: string
File to read
start:
Temporal axis index to create MapCube from
stop:
Temporal index to stop MapCube at
hdu:
Choose hdu index
Returns
-------
iris_cube: sunpy.map.MapCube
A map cube of the SJI sequence
"""
hdus = sunpy.io.read_file(filename)
#Get the time delta
time_range = sunpy.time.TimeRange(hdus[hdu][1]['STARTOBS'], hdus[hdu][1]['ENDOBS'])
splits = time_range.split(hdus[hdu][0].shape[0])
if not stop:
stop = len(splits)
headers = [hdus[hdu][1]]*(stop-start)
datas = hdus[hdu][0][start:stop]
#Make the cube:
iris_cube = sunpy.map.Map(zip(datas,headers),cube=True)
#Set the date/time
for i,m in enumerate(iris_cube):
m.meta['DATE-OBS'] = splits[i].center().isoformat()
return iris_cube
| <commit_before>"""
Some very beta tools for IRIS
"""
import sunpy.io
import sunpy.time
import sunpy.map
__all__ = ['SJI_to_cube']
def SJI_to_cube(filename, start=0, stop=None):
"""
Read a SJI file and return a MapCube
..warning::
This function is a very early beta and is not stable. Further work is
on going to improve SunPy IRIS support.
Parameters
----------
filename: string
File to read
start:
Temporal axis index to create MapCube from
stop:
Temporal index to stop MapCube at
Returns
-------
iris_cube: sunpy.map.MapCube
A map cube of the SJI sequence
"""
hdus = sunpy.io.read_file(filename)
#Get the time delta
time_range = sunpy.time.TimeRange(hdus[0][1]['STARTOBS'], hdus[0][1]['ENDOBS'])
splits = time_range.split(hdus[0][0].shape[0])
if not stop:
stop = len(splits)
headers = [hdus[0][1]]*(stop-start)
datas = hdus[0][0][start:stop]
#Make the cube:
iris_cube = sunpy.map.Map(zip(datas,headers),cube=True)
#Set the date/time
for i,m in enumerate(iris_cube):
m.meta['DATE-OBS'] = splits[i].center().isoformat()
return iris_cube<commit_msg>Change hdu[0] to hdu for optional indexing<commit_after>"""
Some very beta tools for IRIS
"""
import sunpy.io
import sunpy.time
import sunpy.map
__all__ = ['SJI_to_cube']
def SJI_to_cube(filename, start=0, stop=None, hdu=0):
"""
Read a SJI file and return a MapCube
..warning::
This function is a very early beta and is not stable. Further work is
on going to improve SunPy IRIS support.
Parameters
----------
filename: string
File to read
start:
Temporal axis index to create MapCube from
stop:
Temporal index to stop MapCube at
hdu:
Choose hdu index
Returns
-------
iris_cube: sunpy.map.MapCube
A map cube of the SJI sequence
"""
hdus = sunpy.io.read_file(filename)
#Get the time delta
time_range = sunpy.time.TimeRange(hdus[hdu][1]['STARTOBS'], hdus[hdu][1]['ENDOBS'])
splits = time_range.split(hdus[hdu][0].shape[0])
if not stop:
stop = len(splits)
headers = [hdus[hdu][1]]*(stop-start)
datas = hdus[hdu][0][start:stop]
#Make the cube:
iris_cube = sunpy.map.Map(zip(datas,headers),cube=True)
#Set the date/time
for i,m in enumerate(iris_cube):
m.meta['DATE-OBS'] = splits[i].center().isoformat()
return iris_cube
|
2d5c5a1bf693f428b53f8d4a6e788f7be864aa9e | image_site_app/forms.py | image_site_app/forms.py | from django import forms
class SignupForm(forms.Form):
field_order = ['username', 'first_name', 'last_name', 'email', 'password', 'password2']
first_name = forms.CharField(max_length=30, label='First name (optional)', required=False)
last_name = forms.CharField(max_length=30, label='Last name (optional)', required=False)
def signup(self, request, user):
user.first_name = self.cleaned_data['first_name']
user.last_name = self.cleaned_data['last_name']
user.save()
| from django import forms
class SignupForm(forms.Form):
field_order = ['username', 'first_name', 'last_name', 'email', 'password', 'password2']
first_name = forms.CharField(max_length=30,
label='First name (optional)',
required=False,
widget=forms.TextInput(attrs={
'placeholder': 'First name'
}))
last_name = forms.CharField(max_length=30,
label='Last name (optional)',
required=False,
widget=forms.TextInput(attrs={
'placeholder': 'Last name'
}))
def signup(self, request, user):
user.first_name = self.cleaned_data['first_name']
user.last_name = self.cleaned_data['last_name']
user.save()
| Add placeholder to first_name and last_name fields in signup form | Add placeholder to first_name and last_name fields in signup form
| Python | mit | frostblooded/kanq,frostblooded/kanq,frostblooded/kanq,frostblooded/kanq,frostblooded/kanq | from django import forms
class SignupForm(forms.Form):
field_order = ['username', 'first_name', 'last_name', 'email', 'password', 'password2']
first_name = forms.CharField(max_length=30, label='First name (optional)', required=False)
last_name = forms.CharField(max_length=30, label='Last name (optional)', required=False)
def signup(self, request, user):
user.first_name = self.cleaned_data['first_name']
user.last_name = self.cleaned_data['last_name']
user.save()
Add placeholder to first_name and last_name fields in signup form | from django import forms
class SignupForm(forms.Form):
field_order = ['username', 'first_name', 'last_name', 'email', 'password', 'password2']
first_name = forms.CharField(max_length=30,
label='First name (optional)',
required=False,
widget=forms.TextInput(attrs={
'placeholder': 'First name'
}))
last_name = forms.CharField(max_length=30,
label='Last name (optional)',
required=False,
widget=forms.TextInput(attrs={
'placeholder': 'Last name'
}))
def signup(self, request, user):
user.first_name = self.cleaned_data['first_name']
user.last_name = self.cleaned_data['last_name']
user.save()
| <commit_before>from django import forms
class SignupForm(forms.Form):
field_order = ['username', 'first_name', 'last_name', 'email', 'password', 'password2']
first_name = forms.CharField(max_length=30, label='First name (optional)', required=False)
last_name = forms.CharField(max_length=30, label='Last name (optional)', required=False)
def signup(self, request, user):
user.first_name = self.cleaned_data['first_name']
user.last_name = self.cleaned_data['last_name']
user.save()
<commit_msg>Add placeholder to first_name and last_name fields in signup form<commit_after> | from django import forms
class SignupForm(forms.Form):
field_order = ['username', 'first_name', 'last_name', 'email', 'password', 'password2']
first_name = forms.CharField(max_length=30,
label='First name (optional)',
required=False,
widget=forms.TextInput(attrs={
'placeholder': 'First name'
}))
last_name = forms.CharField(max_length=30,
label='Last name (optional)',
required=False,
widget=forms.TextInput(attrs={
'placeholder': 'Last name'
}))
def signup(self, request, user):
user.first_name = self.cleaned_data['first_name']
user.last_name = self.cleaned_data['last_name']
user.save()
| from django import forms
class SignupForm(forms.Form):
field_order = ['username', 'first_name', 'last_name', 'email', 'password', 'password2']
first_name = forms.CharField(max_length=30, label='First name (optional)', required=False)
last_name = forms.CharField(max_length=30, label='Last name (optional)', required=False)
def signup(self, request, user):
user.first_name = self.cleaned_data['first_name']
user.last_name = self.cleaned_data['last_name']
user.save()
Add placeholder to first_name and last_name fields in signup formfrom django import forms
class SignupForm(forms.Form):
field_order = ['username', 'first_name', 'last_name', 'email', 'password', 'password2']
first_name = forms.CharField(max_length=30,
label='First name (optional)',
required=False,
widget=forms.TextInput(attrs={
'placeholder': 'First name'
}))
last_name = forms.CharField(max_length=30,
label='Last name (optional)',
required=False,
widget=forms.TextInput(attrs={
'placeholder': 'Last name'
}))
def signup(self, request, user):
user.first_name = self.cleaned_data['first_name']
user.last_name = self.cleaned_data['last_name']
user.save()
| <commit_before>from django import forms
class SignupForm(forms.Form):
field_order = ['username', 'first_name', 'last_name', 'email', 'password', 'password2']
first_name = forms.CharField(max_length=30, label='First name (optional)', required=False)
last_name = forms.CharField(max_length=30, label='Last name (optional)', required=False)
def signup(self, request, user):
user.first_name = self.cleaned_data['first_name']
user.last_name = self.cleaned_data['last_name']
user.save()
<commit_msg>Add placeholder to first_name and last_name fields in signup form<commit_after>from django import forms
class SignupForm(forms.Form):
field_order = ['username', 'first_name', 'last_name', 'email', 'password', 'password2']
first_name = forms.CharField(max_length=30,
label='First name (optional)',
required=False,
widget=forms.TextInput(attrs={
'placeholder': 'First name'
}))
last_name = forms.CharField(max_length=30,
label='Last name (optional)',
required=False,
widget=forms.TextInput(attrs={
'placeholder': 'Last name'
}))
def signup(self, request, user):
user.first_name = self.cleaned_data['first_name']
user.last_name = self.cleaned_data['last_name']
user.save()
|
64bc8ff452d03c7bb026be0b2edd9a047a88b386 | foyer/forcefields/forcefields.py | foyer/forcefields/forcefields.py | import os
import glob
from pkg_resources import resource_filename
from foyer import Forcefield
def get_ff_path():
return [resource_filename('foyer', 'forcefields')]
def get_forcefield_paths(forcefield_name=None):
for dir_path in get_ff_path():
file_pattern = os.path.join(dir_path, 'xml/*.xml')
file_paths = [file_path for file_path in glob.glob(file_pattern)]
return file_paths
def get_forcefield(name=None):
if name is None:
raise ValueError('Need a force field name')
file_paths = get_forcefield_paths()
try:
ff_path = next(val for val in file_paths if name in val)
except StopIteration:
raise ValueError('Could not find force field with name {}'
' in path {}'.format(name, get_ff_path()))
return Forcefield(ff_path)
load_OPLSAA = get_forcefield(name='oplsaa')
load_TRAPPE_UA = get_forcefield(name='trappe-ua')
| import os
import glob
from pkg_resources import resource_filename
from foyer import Forcefield
def get_ff_path():
return [resource_filename('foyer', 'forcefields')]
def get_forcefield_paths(forcefield_name=None):
for dir_path in get_ff_path():
file_pattern = os.path.join(dir_path, 'xml/*.xml')
file_paths = [file_path for file_path in glob.glob(file_pattern)]
return file_paths
def get_forcefield(name=None):
if name is None:
raise ValueError('Need a force field name')
file_paths = get_forcefield_paths()
try:
ff_path = next(val for val in file_paths if name in val)
except StopIteration:
raise ValueError('Could not find force field with name {}'
' in path {}'.format(name, get_ff_path()))
return Forcefield(ff_path)
def load_OPLSAA():
return get_forcefield(name='oplsaa')
def load_TRAPPE_UA():
return get_forcefield(name='trappe-ua')
load_OPLSAA = load_OPLSAA
load_TRAPPE_UA = load_TRAPPE_UA
| Make discrete functions for each force field | Make discrete functions for each force field
| Python | mit | mosdef-hub/foyer,mosdef-hub/foyer,iModels/foyer,iModels/foyer | import os
import glob
from pkg_resources import resource_filename
from foyer import Forcefield
def get_ff_path():
return [resource_filename('foyer', 'forcefields')]
def get_forcefield_paths(forcefield_name=None):
for dir_path in get_ff_path():
file_pattern = os.path.join(dir_path, 'xml/*.xml')
file_paths = [file_path for file_path in glob.glob(file_pattern)]
return file_paths
def get_forcefield(name=None):
if name is None:
raise ValueError('Need a force field name')
file_paths = get_forcefield_paths()
try:
ff_path = next(val for val in file_paths if name in val)
except StopIteration:
raise ValueError('Could not find force field with name {}'
' in path {}'.format(name, get_ff_path()))
return Forcefield(ff_path)
load_OPLSAA = get_forcefield(name='oplsaa')
load_TRAPPE_UA = get_forcefield(name='trappe-ua')
Make discrete functions for each force field | import os
import glob
from pkg_resources import resource_filename
from foyer import Forcefield
def get_ff_path():
return [resource_filename('foyer', 'forcefields')]
def get_forcefield_paths(forcefield_name=None):
for dir_path in get_ff_path():
file_pattern = os.path.join(dir_path, 'xml/*.xml')
file_paths = [file_path for file_path in glob.glob(file_pattern)]
return file_paths
def get_forcefield(name=None):
if name is None:
raise ValueError('Need a force field name')
file_paths = get_forcefield_paths()
try:
ff_path = next(val for val in file_paths if name in val)
except StopIteration:
raise ValueError('Could not find force field with name {}'
' in path {}'.format(name, get_ff_path()))
return Forcefield(ff_path)
def load_OPLSAA():
return get_forcefield(name='oplsaa')
def load_TRAPPE_UA():
return get_forcefield(name='trappe-ua')
load_OPLSAA = load_OPLSAA
load_TRAPPE_UA = load_TRAPPE_UA
| <commit_before>import os
import glob
from pkg_resources import resource_filename
from foyer import Forcefield
def get_ff_path():
return [resource_filename('foyer', 'forcefields')]
def get_forcefield_paths(forcefield_name=None):
for dir_path in get_ff_path():
file_pattern = os.path.join(dir_path, 'xml/*.xml')
file_paths = [file_path for file_path in glob.glob(file_pattern)]
return file_paths
def get_forcefield(name=None):
if name is None:
raise ValueError('Need a force field name')
file_paths = get_forcefield_paths()
try:
ff_path = next(val for val in file_paths if name in val)
except StopIteration:
raise ValueError('Could not find force field with name {}'
' in path {}'.format(name, get_ff_path()))
return Forcefield(ff_path)
load_OPLSAA = get_forcefield(name='oplsaa')
load_TRAPPE_UA = get_forcefield(name='trappe-ua')
<commit_msg>Make discrete functions for each force field<commit_after> | import os
import glob
from pkg_resources import resource_filename
from foyer import Forcefield
def get_ff_path():
return [resource_filename('foyer', 'forcefields')]
def get_forcefield_paths(forcefield_name=None):
for dir_path in get_ff_path():
file_pattern = os.path.join(dir_path, 'xml/*.xml')
file_paths = [file_path for file_path in glob.glob(file_pattern)]
return file_paths
def get_forcefield(name=None):
if name is None:
raise ValueError('Need a force field name')
file_paths = get_forcefield_paths()
try:
ff_path = next(val for val in file_paths if name in val)
except StopIteration:
raise ValueError('Could not find force field with name {}'
' in path {}'.format(name, get_ff_path()))
return Forcefield(ff_path)
def load_OPLSAA():
return get_forcefield(name='oplsaa')
def load_TRAPPE_UA():
return get_forcefield(name='trappe-ua')
load_OPLSAA = load_OPLSAA
load_TRAPPE_UA = load_TRAPPE_UA
| import os
import glob
from pkg_resources import resource_filename
from foyer import Forcefield
def get_ff_path():
return [resource_filename('foyer', 'forcefields')]
def get_forcefield_paths(forcefield_name=None):
for dir_path in get_ff_path():
file_pattern = os.path.join(dir_path, 'xml/*.xml')
file_paths = [file_path for file_path in glob.glob(file_pattern)]
return file_paths
def get_forcefield(name=None):
if name is None:
raise ValueError('Need a force field name')
file_paths = get_forcefield_paths()
try:
ff_path = next(val for val in file_paths if name in val)
except StopIteration:
raise ValueError('Could not find force field with name {}'
' in path {}'.format(name, get_ff_path()))
return Forcefield(ff_path)
load_OPLSAA = get_forcefield(name='oplsaa')
load_TRAPPE_UA = get_forcefield(name='trappe-ua')
Make discrete functions for each force fieldimport os
import glob
from pkg_resources import resource_filename
from foyer import Forcefield
def get_ff_path():
return [resource_filename('foyer', 'forcefields')]
def get_forcefield_paths(forcefield_name=None):
for dir_path in get_ff_path():
file_pattern = os.path.join(dir_path, 'xml/*.xml')
file_paths = [file_path for file_path in glob.glob(file_pattern)]
return file_paths
def get_forcefield(name=None):
if name is None:
raise ValueError('Need a force field name')
file_paths = get_forcefield_paths()
try:
ff_path = next(val for val in file_paths if name in val)
except StopIteration:
raise ValueError('Could not find force field with name {}'
' in path {}'.format(name, get_ff_path()))
return Forcefield(ff_path)
def load_OPLSAA():
return get_forcefield(name='oplsaa')
def load_TRAPPE_UA():
return get_forcefield(name='trappe-ua')
load_OPLSAA = load_OPLSAA
load_TRAPPE_UA = load_TRAPPE_UA
| <commit_before>import os
import glob
from pkg_resources import resource_filename
from foyer import Forcefield
def get_ff_path():
return [resource_filename('foyer', 'forcefields')]
def get_forcefield_paths(forcefield_name=None):
for dir_path in get_ff_path():
file_pattern = os.path.join(dir_path, 'xml/*.xml')
file_paths = [file_path for file_path in glob.glob(file_pattern)]
return file_paths
def get_forcefield(name=None):
if name is None:
raise ValueError('Need a force field name')
file_paths = get_forcefield_paths()
try:
ff_path = next(val for val in file_paths if name in val)
except StopIteration:
raise ValueError('Could not find force field with name {}'
' in path {}'.format(name, get_ff_path()))
return Forcefield(ff_path)
load_OPLSAA = get_forcefield(name='oplsaa')
load_TRAPPE_UA = get_forcefield(name='trappe-ua')
<commit_msg>Make discrete functions for each force field<commit_after>import os
import glob
from pkg_resources import resource_filename
from foyer import Forcefield
def get_ff_path():
return [resource_filename('foyer', 'forcefields')]
def get_forcefield_paths(forcefield_name=None):
for dir_path in get_ff_path():
file_pattern = os.path.join(dir_path, 'xml/*.xml')
file_paths = [file_path for file_path in glob.glob(file_pattern)]
return file_paths
def get_forcefield(name=None):
if name is None:
raise ValueError('Need a force field name')
file_paths = get_forcefield_paths()
try:
ff_path = next(val for val in file_paths if name in val)
except StopIteration:
raise ValueError('Could not find force field with name {}'
' in path {}'.format(name, get_ff_path()))
return Forcefield(ff_path)
def load_OPLSAA():
return get_forcefield(name='oplsaa')
def load_TRAPPE_UA():
return get_forcefield(name='trappe-ua')
load_OPLSAA = load_OPLSAA
load_TRAPPE_UA = load_TRAPPE_UA
|
3fbca600b1b90ad3499d941e178aae89d1c7df70 | regulations/generator/layers/external_citation.py | regulations/generator/layers/external_citation.py | from django.template import loader
import utils
from regulations.generator.layers.base import SearchReplaceLayer
class ExternalCitationLayer(SearchReplaceLayer):
shorthand = 'external'
data_source = 'external-citations'
def __init__(self, layer):
self.layer = layer
self.template = loader.get_template(
'regulations/layers/external_citation.html')
def replacements_for(self, text, data):
yield utils.render_template(self.template, data)
| from django.template import loader
from regulations.generator.layers import utils
from regulations.generator.layers.base import SearchReplaceLayer
class ExternalCitationLayer(SearchReplaceLayer):
shorthand = 'external'
data_source = 'external-citations'
def __init__(self, layer):
self.layer = layer
self.template = loader.get_template(
'regulations/layers/external_citation.html')
def replacements_for(self, text, data):
yield utils.render_template(self.template, data)
| Make external citations Python3 compatible | Make external citations Python3 compatible
| Python | cc0-1.0 | 18F/regulations-site,18F/regulations-site,tadhg-ohiggins/regulations-site,eregs/regulations-site,18F/regulations-site,tadhg-ohiggins/regulations-site,eregs/regulations-site,tadhg-ohiggins/regulations-site,eregs/regulations-site,18F/regulations-site,tadhg-ohiggins/regulations-site,eregs/regulations-site | from django.template import loader
import utils
from regulations.generator.layers.base import SearchReplaceLayer
class ExternalCitationLayer(SearchReplaceLayer):
shorthand = 'external'
data_source = 'external-citations'
def __init__(self, layer):
self.layer = layer
self.template = loader.get_template(
'regulations/layers/external_citation.html')
def replacements_for(self, text, data):
yield utils.render_template(self.template, data)
Make external citations Python3 compatible | from django.template import loader
from regulations.generator.layers import utils
from regulations.generator.layers.base import SearchReplaceLayer
class ExternalCitationLayer(SearchReplaceLayer):
shorthand = 'external'
data_source = 'external-citations'
def __init__(self, layer):
self.layer = layer
self.template = loader.get_template(
'regulations/layers/external_citation.html')
def replacements_for(self, text, data):
yield utils.render_template(self.template, data)
| <commit_before>from django.template import loader
import utils
from regulations.generator.layers.base import SearchReplaceLayer
class ExternalCitationLayer(SearchReplaceLayer):
shorthand = 'external'
data_source = 'external-citations'
def __init__(self, layer):
self.layer = layer
self.template = loader.get_template(
'regulations/layers/external_citation.html')
def replacements_for(self, text, data):
yield utils.render_template(self.template, data)
<commit_msg>Make external citations Python3 compatible<commit_after> | from django.template import loader
from regulations.generator.layers import utils
from regulations.generator.layers.base import SearchReplaceLayer
class ExternalCitationLayer(SearchReplaceLayer):
shorthand = 'external'
data_source = 'external-citations'
def __init__(self, layer):
self.layer = layer
self.template = loader.get_template(
'regulations/layers/external_citation.html')
def replacements_for(self, text, data):
yield utils.render_template(self.template, data)
| from django.template import loader
import utils
from regulations.generator.layers.base import SearchReplaceLayer
class ExternalCitationLayer(SearchReplaceLayer):
shorthand = 'external'
data_source = 'external-citations'
def __init__(self, layer):
self.layer = layer
self.template = loader.get_template(
'regulations/layers/external_citation.html')
def replacements_for(self, text, data):
yield utils.render_template(self.template, data)
Make external citations Python3 compatiblefrom django.template import loader
from regulations.generator.layers import utils
from regulations.generator.layers.base import SearchReplaceLayer
class ExternalCitationLayer(SearchReplaceLayer):
shorthand = 'external'
data_source = 'external-citations'
def __init__(self, layer):
self.layer = layer
self.template = loader.get_template(
'regulations/layers/external_citation.html')
def replacements_for(self, text, data):
yield utils.render_template(self.template, data)
| <commit_before>from django.template import loader
import utils
from regulations.generator.layers.base import SearchReplaceLayer
class ExternalCitationLayer(SearchReplaceLayer):
shorthand = 'external'
data_source = 'external-citations'
def __init__(self, layer):
self.layer = layer
self.template = loader.get_template(
'regulations/layers/external_citation.html')
def replacements_for(self, text, data):
yield utils.render_template(self.template, data)
<commit_msg>Make external citations Python3 compatible<commit_after>from django.template import loader
from regulations.generator.layers import utils
from regulations.generator.layers.base import SearchReplaceLayer
class ExternalCitationLayer(SearchReplaceLayer):
shorthand = 'external'
data_source = 'external-citations'
def __init__(self, layer):
self.layer = layer
self.template = loader.get_template(
'regulations/layers/external_citation.html')
def replacements_for(self, text, data):
yield utils.render_template(self.template, data)
|
d7f25a05622f115babacc74e05e20f8c147867c3 | accelerator_abstract/models/base_application_answer.py | accelerator_abstract/models/base_application_answer.py | # MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from __future__ import unicode_literals
import swapper
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from accelerator_abstract.models.accelerator_model import AcceleratorModel
@python_2_unicode_compatible
class BaseApplicationAnswer(AcceleratorModel):
application = models.ForeignKey(to=swapper.get_model_name(
AcceleratorModel.Meta.app_label, "Application"),
on_delete=models.CASCADE)
application_question = models.ForeignKey(swapper.get_model_name(
AcceleratorModel.Meta.app_label, 'ApplicationQuestion'),
on_delete=models.CASCADE)
answer_text = models.CharField(max_length=2000, blank=True)
class Meta(AcceleratorModel.Meta):
verbose_name_plural = 'Application Answers'
db_table = '{}_applicationanswer'.format(
AcceleratorModel.Meta.app_label)
abstract = True
unique_together = ('application', 'application_question')
def __str__(self):
return "Answer to question %s from %s" % (
self.application_question.question_number,
self.application.startup.name)
| # MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from __future__ import unicode_literals
import swapper
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from accelerator_abstract.models.accelerator_model import AcceleratorModel
@python_2_unicode_compatible
class BaseApplicationAnswer(AcceleratorModel):
application = models.ForeignKey(to=swapper.get_model_name(
AcceleratorModel.Meta.app_label, "Application"),
on_delete=models.CASCADE)
application_question = models.ForeignKey(swapper.get_model_name(
AcceleratorModel.Meta.app_label, 'ApplicationQuestion'),
on_delete=models.CASCADE)
answer_text = models.TextField(blank=True)
class Meta(AcceleratorModel.Meta):
verbose_name_plural = 'Application Answers'
db_table = '{}_applicationanswer'.format(
AcceleratorModel.Meta.app_label)
abstract = True
unique_together = ('application', 'application_question')
def __str__(self):
return "Answer to question %s from %s" % (
self.application_question.question_number,
self.application.startup.name)
| Change field type for answer_text | [AC-8296] Change field type for answer_text
| Python | mit | masschallenge/django-accelerator,masschallenge/django-accelerator | # MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from __future__ import unicode_literals
import swapper
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from accelerator_abstract.models.accelerator_model import AcceleratorModel
@python_2_unicode_compatible
class BaseApplicationAnswer(AcceleratorModel):
application = models.ForeignKey(to=swapper.get_model_name(
AcceleratorModel.Meta.app_label, "Application"),
on_delete=models.CASCADE)
application_question = models.ForeignKey(swapper.get_model_name(
AcceleratorModel.Meta.app_label, 'ApplicationQuestion'),
on_delete=models.CASCADE)
answer_text = models.CharField(max_length=2000, blank=True)
class Meta(AcceleratorModel.Meta):
verbose_name_plural = 'Application Answers'
db_table = '{}_applicationanswer'.format(
AcceleratorModel.Meta.app_label)
abstract = True
unique_together = ('application', 'application_question')
def __str__(self):
return "Answer to question %s from %s" % (
self.application_question.question_number,
self.application.startup.name)
[AC-8296] Change field type for answer_text | # MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from __future__ import unicode_literals
import swapper
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from accelerator_abstract.models.accelerator_model import AcceleratorModel
@python_2_unicode_compatible
class BaseApplicationAnswer(AcceleratorModel):
application = models.ForeignKey(to=swapper.get_model_name(
AcceleratorModel.Meta.app_label, "Application"),
on_delete=models.CASCADE)
application_question = models.ForeignKey(swapper.get_model_name(
AcceleratorModel.Meta.app_label, 'ApplicationQuestion'),
on_delete=models.CASCADE)
answer_text = models.TextField(blank=True)
class Meta(AcceleratorModel.Meta):
verbose_name_plural = 'Application Answers'
db_table = '{}_applicationanswer'.format(
AcceleratorModel.Meta.app_label)
abstract = True
unique_together = ('application', 'application_question')
def __str__(self):
return "Answer to question %s from %s" % (
self.application_question.question_number,
self.application.startup.name)
| <commit_before># MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from __future__ import unicode_literals
import swapper
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from accelerator_abstract.models.accelerator_model import AcceleratorModel
@python_2_unicode_compatible
class BaseApplicationAnswer(AcceleratorModel):
application = models.ForeignKey(to=swapper.get_model_name(
AcceleratorModel.Meta.app_label, "Application"),
on_delete=models.CASCADE)
application_question = models.ForeignKey(swapper.get_model_name(
AcceleratorModel.Meta.app_label, 'ApplicationQuestion'),
on_delete=models.CASCADE)
answer_text = models.CharField(max_length=2000, blank=True)
class Meta(AcceleratorModel.Meta):
verbose_name_plural = 'Application Answers'
db_table = '{}_applicationanswer'.format(
AcceleratorModel.Meta.app_label)
abstract = True
unique_together = ('application', 'application_question')
def __str__(self):
return "Answer to question %s from %s" % (
self.application_question.question_number,
self.application.startup.name)
<commit_msg>[AC-8296] Change field type for answer_text<commit_after> | # MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from __future__ import unicode_literals
import swapper
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from accelerator_abstract.models.accelerator_model import AcceleratorModel
@python_2_unicode_compatible
class BaseApplicationAnswer(AcceleratorModel):
application = models.ForeignKey(to=swapper.get_model_name(
AcceleratorModel.Meta.app_label, "Application"),
on_delete=models.CASCADE)
application_question = models.ForeignKey(swapper.get_model_name(
AcceleratorModel.Meta.app_label, 'ApplicationQuestion'),
on_delete=models.CASCADE)
answer_text = models.TextField(blank=True)
class Meta(AcceleratorModel.Meta):
verbose_name_plural = 'Application Answers'
db_table = '{}_applicationanswer'.format(
AcceleratorModel.Meta.app_label)
abstract = True
unique_together = ('application', 'application_question')
def __str__(self):
return "Answer to question %s from %s" % (
self.application_question.question_number,
self.application.startup.name)
| # MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from __future__ import unicode_literals
import swapper
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from accelerator_abstract.models.accelerator_model import AcceleratorModel
@python_2_unicode_compatible
class BaseApplicationAnswer(AcceleratorModel):
application = models.ForeignKey(to=swapper.get_model_name(
AcceleratorModel.Meta.app_label, "Application"),
on_delete=models.CASCADE)
application_question = models.ForeignKey(swapper.get_model_name(
AcceleratorModel.Meta.app_label, 'ApplicationQuestion'),
on_delete=models.CASCADE)
answer_text = models.CharField(max_length=2000, blank=True)
class Meta(AcceleratorModel.Meta):
verbose_name_plural = 'Application Answers'
db_table = '{}_applicationanswer'.format(
AcceleratorModel.Meta.app_label)
abstract = True
unique_together = ('application', 'application_question')
def __str__(self):
return "Answer to question %s from %s" % (
self.application_question.question_number,
self.application.startup.name)
[AC-8296] Change field type for answer_text# MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from __future__ import unicode_literals
import swapper
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from accelerator_abstract.models.accelerator_model import AcceleratorModel
@python_2_unicode_compatible
class BaseApplicationAnswer(AcceleratorModel):
application = models.ForeignKey(to=swapper.get_model_name(
AcceleratorModel.Meta.app_label, "Application"),
on_delete=models.CASCADE)
application_question = models.ForeignKey(swapper.get_model_name(
AcceleratorModel.Meta.app_label, 'ApplicationQuestion'),
on_delete=models.CASCADE)
answer_text = models.TextField(blank=True)
class Meta(AcceleratorModel.Meta):
verbose_name_plural = 'Application Answers'
db_table = '{}_applicationanswer'.format(
AcceleratorModel.Meta.app_label)
abstract = True
unique_together = ('application', 'application_question')
def __str__(self):
return "Answer to question %s from %s" % (
self.application_question.question_number,
self.application.startup.name)
| <commit_before># MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from __future__ import unicode_literals
import swapper
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from accelerator_abstract.models.accelerator_model import AcceleratorModel
@python_2_unicode_compatible
class BaseApplicationAnswer(AcceleratorModel):
application = models.ForeignKey(to=swapper.get_model_name(
AcceleratorModel.Meta.app_label, "Application"),
on_delete=models.CASCADE)
application_question = models.ForeignKey(swapper.get_model_name(
AcceleratorModel.Meta.app_label, 'ApplicationQuestion'),
on_delete=models.CASCADE)
answer_text = models.CharField(max_length=2000, blank=True)
class Meta(AcceleratorModel.Meta):
verbose_name_plural = 'Application Answers'
db_table = '{}_applicationanswer'.format(
AcceleratorModel.Meta.app_label)
abstract = True
unique_together = ('application', 'application_question')
def __str__(self):
return "Answer to question %s from %s" % (
self.application_question.question_number,
self.application.startup.name)
<commit_msg>[AC-8296] Change field type for answer_text<commit_after># MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from __future__ import unicode_literals
import swapper
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from accelerator_abstract.models.accelerator_model import AcceleratorModel
@python_2_unicode_compatible
class BaseApplicationAnswer(AcceleratorModel):
application = models.ForeignKey(to=swapper.get_model_name(
AcceleratorModel.Meta.app_label, "Application"),
on_delete=models.CASCADE)
application_question = models.ForeignKey(swapper.get_model_name(
AcceleratorModel.Meta.app_label, 'ApplicationQuestion'),
on_delete=models.CASCADE)
answer_text = models.TextField(blank=True)
class Meta(AcceleratorModel.Meta):
verbose_name_plural = 'Application Answers'
db_table = '{}_applicationanswer'.format(
AcceleratorModel.Meta.app_label)
abstract = True
unique_together = ('application', 'application_question')
def __str__(self):
return "Answer to question %s from %s" % (
self.application_question.question_number,
self.application.startup.name)
|
6963b2d42cfee97b57c512a2776df02604da8e5f | polling_stations/apps/data_importers/management/commands/import_reading.py | polling_stations/apps/data_importers/management/commands/import_reading.py | from data_importers.management.commands import BaseXpressDemocracyClubCsvImporter
class Command(BaseXpressDemocracyClubCsvImporter):
council_id = "RDG"
addresses_name = (
"2022-05-05/2022-03-01T15:11:53.624789/Democracy_Club__05May2022.tsv"
)
stations_name = (
"2022-05-05/2022-03-01T15:11:53.624789/Democracy_Club__05May2022.tsv"
)
elections = ["2022-05-05"]
csv_delimiter = "\t"
def address_record_to_dict(self, record):
if record.addressline6 in [
"RG30 4RX",
"RG1 3NF",
"RG4 8ES",
"RG2 7PS",
]:
return None
return super().address_record_to_dict(record)
| from data_importers.management.commands import BaseXpressDemocracyClubCsvImporter
class Command(BaseXpressDemocracyClubCsvImporter):
council_id = "RDG"
addresses_name = (
"2022-05-05/2022-03-01T15:11:53.624789/Democracy_Club__05May2022.tsv"
)
stations_name = (
"2022-05-05/2022-03-01T15:11:53.624789/Democracy_Club__05May2022.tsv"
)
elections = ["2022-05-05"]
csv_delimiter = "\t"
def address_record_to_dict(self, record):
if record.property_urn.strip().lstrip("0") == "310088234":
record = record._replace(addressline6="RG1 1SN")
if record.addressline6 in [
"RG30 4RX",
"RG4 8ES",
"RG2 7PS",
]:
return None
return super().address_record_to_dict(record)
| Fix an incorrect postcode for Reading | Fix an incorrect postcode for Reading
| Python | bsd-3-clause | DemocracyClub/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations | from data_importers.management.commands import BaseXpressDemocracyClubCsvImporter
class Command(BaseXpressDemocracyClubCsvImporter):
council_id = "RDG"
addresses_name = (
"2022-05-05/2022-03-01T15:11:53.624789/Democracy_Club__05May2022.tsv"
)
stations_name = (
"2022-05-05/2022-03-01T15:11:53.624789/Democracy_Club__05May2022.tsv"
)
elections = ["2022-05-05"]
csv_delimiter = "\t"
def address_record_to_dict(self, record):
if record.addressline6 in [
"RG30 4RX",
"RG1 3NF",
"RG4 8ES",
"RG2 7PS",
]:
return None
return super().address_record_to_dict(record)
Fix an incorrect postcode for Reading | from data_importers.management.commands import BaseXpressDemocracyClubCsvImporter
class Command(BaseXpressDemocracyClubCsvImporter):
council_id = "RDG"
addresses_name = (
"2022-05-05/2022-03-01T15:11:53.624789/Democracy_Club__05May2022.tsv"
)
stations_name = (
"2022-05-05/2022-03-01T15:11:53.624789/Democracy_Club__05May2022.tsv"
)
elections = ["2022-05-05"]
csv_delimiter = "\t"
def address_record_to_dict(self, record):
if record.property_urn.strip().lstrip("0") == "310088234":
record = record._replace(addressline6="RG1 1SN")
if record.addressline6 in [
"RG30 4RX",
"RG4 8ES",
"RG2 7PS",
]:
return None
return super().address_record_to_dict(record)
| <commit_before>from data_importers.management.commands import BaseXpressDemocracyClubCsvImporter
class Command(BaseXpressDemocracyClubCsvImporter):
council_id = "RDG"
addresses_name = (
"2022-05-05/2022-03-01T15:11:53.624789/Democracy_Club__05May2022.tsv"
)
stations_name = (
"2022-05-05/2022-03-01T15:11:53.624789/Democracy_Club__05May2022.tsv"
)
elections = ["2022-05-05"]
csv_delimiter = "\t"
def address_record_to_dict(self, record):
if record.addressline6 in [
"RG30 4RX",
"RG1 3NF",
"RG4 8ES",
"RG2 7PS",
]:
return None
return super().address_record_to_dict(record)
<commit_msg>Fix an incorrect postcode for Reading<commit_after> | from data_importers.management.commands import BaseXpressDemocracyClubCsvImporter
class Command(BaseXpressDemocracyClubCsvImporter):
council_id = "RDG"
addresses_name = (
"2022-05-05/2022-03-01T15:11:53.624789/Democracy_Club__05May2022.tsv"
)
stations_name = (
"2022-05-05/2022-03-01T15:11:53.624789/Democracy_Club__05May2022.tsv"
)
elections = ["2022-05-05"]
csv_delimiter = "\t"
def address_record_to_dict(self, record):
if record.property_urn.strip().lstrip("0") == "310088234":
record = record._replace(addressline6="RG1 1SN")
if record.addressline6 in [
"RG30 4RX",
"RG4 8ES",
"RG2 7PS",
]:
return None
return super().address_record_to_dict(record)
| from data_importers.management.commands import BaseXpressDemocracyClubCsvImporter
class Command(BaseXpressDemocracyClubCsvImporter):
council_id = "RDG"
addresses_name = (
"2022-05-05/2022-03-01T15:11:53.624789/Democracy_Club__05May2022.tsv"
)
stations_name = (
"2022-05-05/2022-03-01T15:11:53.624789/Democracy_Club__05May2022.tsv"
)
elections = ["2022-05-05"]
csv_delimiter = "\t"
def address_record_to_dict(self, record):
if record.addressline6 in [
"RG30 4RX",
"RG1 3NF",
"RG4 8ES",
"RG2 7PS",
]:
return None
return super().address_record_to_dict(record)
Fix an incorrect postcode for Readingfrom data_importers.management.commands import BaseXpressDemocracyClubCsvImporter
class Command(BaseXpressDemocracyClubCsvImporter):
council_id = "RDG"
addresses_name = (
"2022-05-05/2022-03-01T15:11:53.624789/Democracy_Club__05May2022.tsv"
)
stations_name = (
"2022-05-05/2022-03-01T15:11:53.624789/Democracy_Club__05May2022.tsv"
)
elections = ["2022-05-05"]
csv_delimiter = "\t"
def address_record_to_dict(self, record):
if record.property_urn.strip().lstrip("0") == "310088234":
record = record._replace(addressline6="RG1 1SN")
if record.addressline6 in [
"RG30 4RX",
"RG4 8ES",
"RG2 7PS",
]:
return None
return super().address_record_to_dict(record)
| <commit_before>from data_importers.management.commands import BaseXpressDemocracyClubCsvImporter
class Command(BaseXpressDemocracyClubCsvImporter):
council_id = "RDG"
addresses_name = (
"2022-05-05/2022-03-01T15:11:53.624789/Democracy_Club__05May2022.tsv"
)
stations_name = (
"2022-05-05/2022-03-01T15:11:53.624789/Democracy_Club__05May2022.tsv"
)
elections = ["2022-05-05"]
csv_delimiter = "\t"
def address_record_to_dict(self, record):
if record.addressline6 in [
"RG30 4RX",
"RG1 3NF",
"RG4 8ES",
"RG2 7PS",
]:
return None
return super().address_record_to_dict(record)
<commit_msg>Fix an incorrect postcode for Reading<commit_after>from data_importers.management.commands import BaseXpressDemocracyClubCsvImporter
class Command(BaseXpressDemocracyClubCsvImporter):
council_id = "RDG"
addresses_name = (
"2022-05-05/2022-03-01T15:11:53.624789/Democracy_Club__05May2022.tsv"
)
stations_name = (
"2022-05-05/2022-03-01T15:11:53.624789/Democracy_Club__05May2022.tsv"
)
elections = ["2022-05-05"]
csv_delimiter = "\t"
def address_record_to_dict(self, record):
if record.property_urn.strip().lstrip("0") == "310088234":
record = record._replace(addressline6="RG1 1SN")
if record.addressline6 in [
"RG30 4RX",
"RG4 8ES",
"RG2 7PS",
]:
return None
return super().address_record_to_dict(record)
|
d994294d2c5297635fa36bf7911fbaef6e8e0345 | admin/base/urls.py | admin/base/urls.py | from django.conf.urls import include, url, patterns
from django.contrib import admin
from settings import ADMIN_BASE
from . import views
base_pattern = '^{}'.format(ADMIN_BASE)
urlpatterns = [
### ADMIN ###
url(base_pattern,
include(patterns('',
url(r'^$', views.home, name='home'),
url(r'^django_admin/', include(admin.site.urls)),
url(r'^spam/', include('admin.spam.urls', namespace='spam')),
url(r'^auth/', include('admin.common_auth.urls', namespace='auth')),
url(r'^prereg/', include('admin.pre_reg.urls', namespace='pre_reg')),
url(r'^reset/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$',
views.password_reset_confirm_custom, name='password_reset_confirm'),
url(r'^reset/done/$', views.password_reset_done,
name='password_reset_complete'),
)
)
)
]
| from django.conf.urls import include, url, patterns
from django.contrib import admin
from django.views.generic import RedirectView
from settings import ADMIN_BASE
from . import views
base_pattern = '^{}'.format(ADMIN_BASE)
urlpatterns = [
### ADMIN ###
url(base_pattern,
include(patterns('',
url(r'^$', views.home, name='home'),
url(r'^django_admin/', include(admin.site.urls)),
url(r'^spam/', include('admin.spam.urls', namespace='spam')),
url(r'^auth/', include('admin.common_auth.urls', namespace='auth')),
url(r'^prereg/', include('admin.pre_reg.urls', namespace='pre_reg')),
url(r'^reset/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$',
views.password_reset_confirm_custom, name='password_reset_confirm'),
url(r'^reset/done/$', views.password_reset_done,
name='password_reset_complete'),
)
)
),
url(r'^$', RedirectView.as_view(url='/admin/')),
]
| Add redirect to /admin/ on Admin app w/ empty URL | Add redirect to /admin/ on Admin app w/ empty URL
| Python | apache-2.0 | crcresearch/osf.io,jnayak1/osf.io,felliott/osf.io,zachjanicki/osf.io,icereval/osf.io,kwierman/osf.io,zamattiac/osf.io,wearpants/osf.io,cwisecarver/osf.io,leb2dg/osf.io,RomanZWang/osf.io,caseyrollins/osf.io,brianjgeiger/osf.io,saradbowman/osf.io,CenterForOpenScience/osf.io,chrisseto/osf.io,asanfilippo7/osf.io,brandonPurvis/osf.io,KAsante95/osf.io,mfraezz/osf.io,DanielSBrown/osf.io,SSJohns/osf.io,samchrisinger/osf.io,TomHeatwole/osf.io,zachjanicki/osf.io,acshi/osf.io,Johnetordoff/osf.io,monikagrabowska/osf.io,brianjgeiger/osf.io,pattisdr/osf.io,RomanZWang/osf.io,felliott/osf.io,felliott/osf.io,monikagrabowska/osf.io,caneruguz/osf.io,samchrisinger/osf.io,adlius/osf.io,adlius/osf.io,kwierman/osf.io,alexschiller/osf.io,rdhyee/osf.io,doublebits/osf.io,caseyrollins/osf.io,mattclark/osf.io,monikagrabowska/osf.io,TomBaxter/osf.io,baylee-d/osf.io,amyshi188/osf.io,amyshi188/osf.io,KAsante95/osf.io,wearpants/osf.io,Ghalko/osf.io,billyhunt/osf.io,KAsante95/osf.io,emetsger/osf.io,samchrisinger/osf.io,emetsger/osf.io,cslzchen/osf.io,emetsger/osf.io,aaxelb/osf.io,aaxelb/osf.io,KAsante95/osf.io,RomanZWang/osf.io,baylee-d/osf.io,kwierman/osf.io,acshi/osf.io,adlius/osf.io,Johnetordoff/osf.io,samchrisinger/osf.io,amyshi188/osf.io,HalcyonChimera/osf.io,GageGaskins/osf.io,chrisseto/osf.io,mattclark/osf.io,chrisseto/osf.io,caneruguz/osf.io,monikagrabowska/osf.io,chennan47/osf.io,Ghalko/osf.io,pattisdr/osf.io,cslzchen/osf.io,doublebits/osf.io,SSJohns/osf.io,Johnetordoff/osf.io,hmoco/osf.io,jnayak1/osf.io,caseyrollins/osf.io,binoculars/osf.io,HalcyonChimera/osf.io,binoculars/osf.io,TomBaxter/osf.io,kch8qx/osf.io,mluke93/osf.io,chennan47/osf.io,Nesiehr/osf.io,acshi/osf.io,mluo613/osf.io,Nesiehr/osf.io,billyhunt/osf.io,zachjanicki/osf.io,CenterForOpenScience/osf.io,erinspace/osf.io,amyshi188/osf.io,asanfilippo7/osf.io,alexschiller/osf.io,kch8qx/osf.io,monikagrabowska/osf.io,chrisseto/osf.io,acshi/osf.io,caneruguz/osf.io,asanfilippo7/osf.io,aaxelb/osf.io,CenterForOpenScience/osf.io,SSJohns/osf.io,erinspace/osf.io,crcresearch/osf.io,acshi/osf.io,rdhyee/osf.io,alexschiller/osf.io,TomHeatwole/osf.io,brianjgeiger/osf.io,GageGaskins/osf.io,cwisecarver/osf.io,laurenrevere/osf.io,caneruguz/osf.io,abought/osf.io,adlius/osf.io,alexschiller/osf.io,rdhyee/osf.io,billyhunt/osf.io,CenterForOpenScience/osf.io,brianjgeiger/osf.io,KAsante95/osf.io,brandonPurvis/osf.io,mfraezz/osf.io,leb2dg/osf.io,DanielSBrown/osf.io,mluo613/osf.io,abought/osf.io,cwisecarver/osf.io,wearpants/osf.io,sloria/osf.io,erinspace/osf.io,kwierman/osf.io,icereval/osf.io,Johnetordoff/osf.io,abought/osf.io,TomBaxter/osf.io,crcresearch/osf.io,aaxelb/osf.io,wearpants/osf.io,billyhunt/osf.io,abought/osf.io,mluo613/osf.io,doublebits/osf.io,zamattiac/osf.io,GageGaskins/osf.io,sloria/osf.io,GageGaskins/osf.io,Nesiehr/osf.io,felliott/osf.io,zachjanicki/osf.io,saradbowman/osf.io,Nesiehr/osf.io,baylee-d/osf.io,RomanZWang/osf.io,TomHeatwole/osf.io,leb2dg/osf.io,TomHeatwole/osf.io,laurenrevere/osf.io,kch8qx/osf.io,alexschiller/osf.io,kch8qx/osf.io,binoculars/osf.io,cwisecarver/osf.io,RomanZWang/osf.io,mfraezz/osf.io,doublebits/osf.io,zamattiac/osf.io,sloria/osf.io,brandonPurvis/osf.io,rdhyee/osf.io,DanielSBrown/osf.io,zamattiac/osf.io,cslzchen/osf.io,GageGaskins/osf.io,leb2dg/osf.io,jnayak1/osf.io,laurenrevere/osf.io,jnayak1/osf.io,emetsger/osf.io,kch8qx/osf.io,SSJohns/osf.io,HalcyonChimera/osf.io,hmoco/osf.io,Ghalko/osf.io,mluo613/osf.io,HalcyonChimera/osf.io,hmoco/osf.io,Ghalko/osf.io,billyhunt/osf.io,hmoco/osf.io,cslzchen/osf.io,mattclark/osf.io,mluo613/osf.io,icereval/osf.io,DanielSBrown/osf.io,brandonPurvis/osf.io,mluke93/osf.io,mluke93/osf.io,brandonPurvis/osf.io,mluke93/osf.io,chennan47/osf.io,doublebits/osf.io,mfraezz/osf.io,asanfilippo7/osf.io,pattisdr/osf.io | from django.conf.urls import include, url, patterns
from django.contrib import admin
from settings import ADMIN_BASE
from . import views
base_pattern = '^{}'.format(ADMIN_BASE)
urlpatterns = [
### ADMIN ###
url(base_pattern,
include(patterns('',
url(r'^$', views.home, name='home'),
url(r'^django_admin/', include(admin.site.urls)),
url(r'^spam/', include('admin.spam.urls', namespace='spam')),
url(r'^auth/', include('admin.common_auth.urls', namespace='auth')),
url(r'^prereg/', include('admin.pre_reg.urls', namespace='pre_reg')),
url(r'^reset/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$',
views.password_reset_confirm_custom, name='password_reset_confirm'),
url(r'^reset/done/$', views.password_reset_done,
name='password_reset_complete'),
)
)
)
]
Add redirect to /admin/ on Admin app w/ empty URL | from django.conf.urls import include, url, patterns
from django.contrib import admin
from django.views.generic import RedirectView
from settings import ADMIN_BASE
from . import views
base_pattern = '^{}'.format(ADMIN_BASE)
urlpatterns = [
### ADMIN ###
url(base_pattern,
include(patterns('',
url(r'^$', views.home, name='home'),
url(r'^django_admin/', include(admin.site.urls)),
url(r'^spam/', include('admin.spam.urls', namespace='spam')),
url(r'^auth/', include('admin.common_auth.urls', namespace='auth')),
url(r'^prereg/', include('admin.pre_reg.urls', namespace='pre_reg')),
url(r'^reset/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$',
views.password_reset_confirm_custom, name='password_reset_confirm'),
url(r'^reset/done/$', views.password_reset_done,
name='password_reset_complete'),
)
)
),
url(r'^$', RedirectView.as_view(url='/admin/')),
]
| <commit_before>from django.conf.urls import include, url, patterns
from django.contrib import admin
from settings import ADMIN_BASE
from . import views
base_pattern = '^{}'.format(ADMIN_BASE)
urlpatterns = [
### ADMIN ###
url(base_pattern,
include(patterns('',
url(r'^$', views.home, name='home'),
url(r'^django_admin/', include(admin.site.urls)),
url(r'^spam/', include('admin.spam.urls', namespace='spam')),
url(r'^auth/', include('admin.common_auth.urls', namespace='auth')),
url(r'^prereg/', include('admin.pre_reg.urls', namespace='pre_reg')),
url(r'^reset/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$',
views.password_reset_confirm_custom, name='password_reset_confirm'),
url(r'^reset/done/$', views.password_reset_done,
name='password_reset_complete'),
)
)
)
]
<commit_msg>Add redirect to /admin/ on Admin app w/ empty URL<commit_after> | from django.conf.urls import include, url, patterns
from django.contrib import admin
from django.views.generic import RedirectView
from settings import ADMIN_BASE
from . import views
base_pattern = '^{}'.format(ADMIN_BASE)
urlpatterns = [
### ADMIN ###
url(base_pattern,
include(patterns('',
url(r'^$', views.home, name='home'),
url(r'^django_admin/', include(admin.site.urls)),
url(r'^spam/', include('admin.spam.urls', namespace='spam')),
url(r'^auth/', include('admin.common_auth.urls', namespace='auth')),
url(r'^prereg/', include('admin.pre_reg.urls', namespace='pre_reg')),
url(r'^reset/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$',
views.password_reset_confirm_custom, name='password_reset_confirm'),
url(r'^reset/done/$', views.password_reset_done,
name='password_reset_complete'),
)
)
),
url(r'^$', RedirectView.as_view(url='/admin/')),
]
| from django.conf.urls import include, url, patterns
from django.contrib import admin
from settings import ADMIN_BASE
from . import views
base_pattern = '^{}'.format(ADMIN_BASE)
urlpatterns = [
### ADMIN ###
url(base_pattern,
include(patterns('',
url(r'^$', views.home, name='home'),
url(r'^django_admin/', include(admin.site.urls)),
url(r'^spam/', include('admin.spam.urls', namespace='spam')),
url(r'^auth/', include('admin.common_auth.urls', namespace='auth')),
url(r'^prereg/', include('admin.pre_reg.urls', namespace='pre_reg')),
url(r'^reset/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$',
views.password_reset_confirm_custom, name='password_reset_confirm'),
url(r'^reset/done/$', views.password_reset_done,
name='password_reset_complete'),
)
)
)
]
Add redirect to /admin/ on Admin app w/ empty URLfrom django.conf.urls import include, url, patterns
from django.contrib import admin
from django.views.generic import RedirectView
from settings import ADMIN_BASE
from . import views
base_pattern = '^{}'.format(ADMIN_BASE)
urlpatterns = [
### ADMIN ###
url(base_pattern,
include(patterns('',
url(r'^$', views.home, name='home'),
url(r'^django_admin/', include(admin.site.urls)),
url(r'^spam/', include('admin.spam.urls', namespace='spam')),
url(r'^auth/', include('admin.common_auth.urls', namespace='auth')),
url(r'^prereg/', include('admin.pre_reg.urls', namespace='pre_reg')),
url(r'^reset/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$',
views.password_reset_confirm_custom, name='password_reset_confirm'),
url(r'^reset/done/$', views.password_reset_done,
name='password_reset_complete'),
)
)
),
url(r'^$', RedirectView.as_view(url='/admin/')),
]
| <commit_before>from django.conf.urls import include, url, patterns
from django.contrib import admin
from settings import ADMIN_BASE
from . import views
base_pattern = '^{}'.format(ADMIN_BASE)
urlpatterns = [
### ADMIN ###
url(base_pattern,
include(patterns('',
url(r'^$', views.home, name='home'),
url(r'^django_admin/', include(admin.site.urls)),
url(r'^spam/', include('admin.spam.urls', namespace='spam')),
url(r'^auth/', include('admin.common_auth.urls', namespace='auth')),
url(r'^prereg/', include('admin.pre_reg.urls', namespace='pre_reg')),
url(r'^reset/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$',
views.password_reset_confirm_custom, name='password_reset_confirm'),
url(r'^reset/done/$', views.password_reset_done,
name='password_reset_complete'),
)
)
)
]
<commit_msg>Add redirect to /admin/ on Admin app w/ empty URL<commit_after>from django.conf.urls import include, url, patterns
from django.contrib import admin
from django.views.generic import RedirectView
from settings import ADMIN_BASE
from . import views
base_pattern = '^{}'.format(ADMIN_BASE)
urlpatterns = [
### ADMIN ###
url(base_pattern,
include(patterns('',
url(r'^$', views.home, name='home'),
url(r'^django_admin/', include(admin.site.urls)),
url(r'^spam/', include('admin.spam.urls', namespace='spam')),
url(r'^auth/', include('admin.common_auth.urls', namespace='auth')),
url(r'^prereg/', include('admin.pre_reg.urls', namespace='pre_reg')),
url(r'^reset/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$',
views.password_reset_confirm_custom, name='password_reset_confirm'),
url(r'^reset/done/$', views.password_reset_done,
name='password_reset_complete'),
)
)
),
url(r'^$', RedirectView.as_view(url='/admin/')),
]
|
34061c55be17a19846833148e2cf6e015918efae | frameworks/C/onion/setup.py | frameworks/C/onion/setup.py | import subprocess
import sys
import os
import setup_util
def start(args, logfile, errfile):
setup_util.replace_text("onion/hello.c", "mysql_real_connect\(data.db\[i\], \".*\",", "mysql_real_connect(data.db[i], \"" + args.database_host + "\",")
subprocess.call("rm *.o", cwd="onion", shell=True, stderr=errfile, stdout=logfile)
subprocess.call("cp -R $IROOT/onion/* onion/onion", shell=True, stderr=errfile, stdout=logfile)
subprocess.call("rm CMakeCache.txt", shell=True, cwd="onion/onion/build", stderr=errfile, stdout=logfile)
subprocess.Popen("make && ./hello", shell=True, cwd="onion", stderr=errfile, stdout=logfile)
return 0
def stop(logfile, errfile):
p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
out, err = p.communicate()
for line in out.splitlines():
if 'hello' in line:
pid = int(line.split(None, 2)[1])
os.kill(pid, 15)
return 0
| import subprocess
import sys
import os
import setup_util
def start(args, logfile, errfile):
setup_util.replace_text("onion/hello.c", "mysql_real_connect\(data.db\[i\], \".*\",", "mysql_real_connect(data.db[i], \"" + args.database_host + "\",")
subprocess.call("rm -f *.o", cwd="onion", shell=True, stderr=errfile, stdout=logfile)
subprocess.call("cp -R $IROOT/onion/ onion/onion", shell=True, stderr=errfile, stdout=logfile)
subprocess.call("rm CMakeCache.txt", shell=True, cwd="onion/onion/build", stderr=errfile, stdout=logfile)
subprocess.Popen("make && ./hello", shell=True, cwd="onion", stderr=errfile, stdout=logfile)
return 0
def stop(logfile, errfile):
p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
out, err = p.communicate()
for line in out.splitlines():
if 'hello' in line:
pid = int(line.split(None, 2)[1])
os.kill(pid, 15)
return 0
| Remove minor errors in onion | Remove minor errors in onion
| Python | bsd-3-clause | alubbe/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,sxend/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,jamming/FrameworkBenchmarks,jamming/FrameworkBenchmarks,torhve/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,methane/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,Verber/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,joshk/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,zapov/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,zloster/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,torhve/FrameworkBenchmarks,sxend/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,sgml/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,sgml/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,Verber/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,valyala/FrameworkBenchmarks,sgml/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,jamming/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,Verber/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,zapov/FrameworkBenchmarks,zapov/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,herloct/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,zapov/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,zloster/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,zapov/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,doom369/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,zloster/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,actframework/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,herloct/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,herloct/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,sgml/FrameworkBenchmarks,herloct/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,zloster/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,doom369/FrameworkBenchmarks,Verber/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,sgml/FrameworkBenchmarks,Verber/FrameworkBenchmarks,sgml/FrameworkBenchmarks,doom369/FrameworkBenchmarks,valyala/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,doom369/FrameworkBenchmarks,testn/FrameworkBenchmarks,Verber/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,jamming/FrameworkBenchmarks,methane/FrameworkBenchmarks,sgml/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,sxend/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,joshk/FrameworkBenchmarks,denkab/FrameworkBenchmarks,doom369/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,valyala/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,denkab/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,testn/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,testn/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,sxend/FrameworkBenchmarks,grob/FrameworkBenchmarks,jamming/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,doom369/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,joshk/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,sgml/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,torhve/FrameworkBenchmarks,torhve/FrameworkBenchmarks,denkab/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,actframework/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,testn/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,khellang/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,joshk/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,khellang/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,jamming/FrameworkBenchmarks,zloster/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,doom369/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,grob/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,actframework/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,sxend/FrameworkBenchmarks,doom369/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,denkab/FrameworkBenchmarks,actframework/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,testn/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,sgml/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,denkab/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,methane/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,denkab/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,actframework/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,joshk/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,joshk/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,Verber/FrameworkBenchmarks,jamming/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,zloster/FrameworkBenchmarks,methane/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,methane/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,zloster/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,denkab/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,zapov/FrameworkBenchmarks,torhve/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,grob/FrameworkBenchmarks,zloster/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,herloct/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,Verber/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,sxend/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,Verber/FrameworkBenchmarks,zloster/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,zapov/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,methane/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,valyala/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,torhve/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,herloct/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,actframework/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,sxend/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,doom369/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,sxend/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,sxend/FrameworkBenchmarks,grob/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,valyala/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,jamming/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,sxend/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,khellang/FrameworkBenchmarks,jamming/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,joshk/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,zloster/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,khellang/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,torhve/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,valyala/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,khellang/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,valyala/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,denkab/FrameworkBenchmarks,testn/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,jamming/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,testn/FrameworkBenchmarks,testn/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,denkab/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,zapov/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,methane/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,valyala/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,sxend/FrameworkBenchmarks,actframework/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,doom369/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,actframework/FrameworkBenchmarks,zapov/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,sgml/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,sxend/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,herloct/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,khellang/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,herloct/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,torhve/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,grob/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,khellang/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,doom369/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,torhve/FrameworkBenchmarks,sxend/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,herloct/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,grob/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,zloster/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,sgml/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,methane/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,grob/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,methane/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,jamming/FrameworkBenchmarks,doom369/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,denkab/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,doom369/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,Verber/FrameworkBenchmarks,sgml/FrameworkBenchmarks,joshk/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,zapov/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,actframework/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,khellang/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,grob/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,doom369/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,herloct/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,joshk/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,khellang/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,sgml/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,zloster/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,sxend/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,actframework/FrameworkBenchmarks,zapov/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,valyala/FrameworkBenchmarks,zapov/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,actframework/FrameworkBenchmarks,testn/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,doom369/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,khellang/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,khellang/FrameworkBenchmarks,grob/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,jamming/FrameworkBenchmarks,jamming/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,khellang/FrameworkBenchmarks,actframework/FrameworkBenchmarks,torhve/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,zloster/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,valyala/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,denkab/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,sxend/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,zapov/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,denkab/FrameworkBenchmarks,denkab/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,valyala/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,testn/FrameworkBenchmarks,zapov/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,grob/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,joshk/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,testn/FrameworkBenchmarks,doom369/FrameworkBenchmarks,grob/FrameworkBenchmarks,zloster/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,denkab/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,Verber/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,actframework/FrameworkBenchmarks,methane/FrameworkBenchmarks,torhve/FrameworkBenchmarks,actframework/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,joshk/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,joshk/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,grob/FrameworkBenchmarks,torhve/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,valyala/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,zapov/FrameworkBenchmarks,herloct/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,zloster/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,testn/FrameworkBenchmarks,zloster/FrameworkBenchmarks,grob/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,methane/FrameworkBenchmarks,khellang/FrameworkBenchmarks,doom369/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,herloct/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,khellang/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,zloster/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,zloster/FrameworkBenchmarks,grob/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,valyala/FrameworkBenchmarks,herloct/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,testn/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,actframework/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,methane/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,Verber/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,actframework/FrameworkBenchmarks,sgml/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,joshk/FrameworkBenchmarks,Verber/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,Verber/FrameworkBenchmarks,methane/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,herloct/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,jamming/FrameworkBenchmarks,valyala/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,zapov/FrameworkBenchmarks,sxend/FrameworkBenchmarks,sxend/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,methane/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,joshk/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,herloct/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,testn/FrameworkBenchmarks,sxend/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks | import subprocess
import sys
import os
import setup_util
def start(args, logfile, errfile):
setup_util.replace_text("onion/hello.c", "mysql_real_connect\(data.db\[i\], \".*\",", "mysql_real_connect(data.db[i], \"" + args.database_host + "\",")
subprocess.call("rm *.o", cwd="onion", shell=True, stderr=errfile, stdout=logfile)
subprocess.call("cp -R $IROOT/onion/* onion/onion", shell=True, stderr=errfile, stdout=logfile)
subprocess.call("rm CMakeCache.txt", shell=True, cwd="onion/onion/build", stderr=errfile, stdout=logfile)
subprocess.Popen("make && ./hello", shell=True, cwd="onion", stderr=errfile, stdout=logfile)
return 0
def stop(logfile, errfile):
p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
out, err = p.communicate()
for line in out.splitlines():
if 'hello' in line:
pid = int(line.split(None, 2)[1])
os.kill(pid, 15)
return 0
Remove minor errors in onion | import subprocess
import sys
import os
import setup_util
def start(args, logfile, errfile):
setup_util.replace_text("onion/hello.c", "mysql_real_connect\(data.db\[i\], \".*\",", "mysql_real_connect(data.db[i], \"" + args.database_host + "\",")
subprocess.call("rm -f *.o", cwd="onion", shell=True, stderr=errfile, stdout=logfile)
subprocess.call("cp -R $IROOT/onion/ onion/onion", shell=True, stderr=errfile, stdout=logfile)
subprocess.call("rm CMakeCache.txt", shell=True, cwd="onion/onion/build", stderr=errfile, stdout=logfile)
subprocess.Popen("make && ./hello", shell=True, cwd="onion", stderr=errfile, stdout=logfile)
return 0
def stop(logfile, errfile):
p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
out, err = p.communicate()
for line in out.splitlines():
if 'hello' in line:
pid = int(line.split(None, 2)[1])
os.kill(pid, 15)
return 0
| <commit_before>import subprocess
import sys
import os
import setup_util
def start(args, logfile, errfile):
setup_util.replace_text("onion/hello.c", "mysql_real_connect\(data.db\[i\], \".*\",", "mysql_real_connect(data.db[i], \"" + args.database_host + "\",")
subprocess.call("rm *.o", cwd="onion", shell=True, stderr=errfile, stdout=logfile)
subprocess.call("cp -R $IROOT/onion/* onion/onion", shell=True, stderr=errfile, stdout=logfile)
subprocess.call("rm CMakeCache.txt", shell=True, cwd="onion/onion/build", stderr=errfile, stdout=logfile)
subprocess.Popen("make && ./hello", shell=True, cwd="onion", stderr=errfile, stdout=logfile)
return 0
def stop(logfile, errfile):
p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
out, err = p.communicate()
for line in out.splitlines():
if 'hello' in line:
pid = int(line.split(None, 2)[1])
os.kill(pid, 15)
return 0
<commit_msg>Remove minor errors in onion<commit_after> | import subprocess
import sys
import os
import setup_util
def start(args, logfile, errfile):
setup_util.replace_text("onion/hello.c", "mysql_real_connect\(data.db\[i\], \".*\",", "mysql_real_connect(data.db[i], \"" + args.database_host + "\",")
subprocess.call("rm -f *.o", cwd="onion", shell=True, stderr=errfile, stdout=logfile)
subprocess.call("cp -R $IROOT/onion/ onion/onion", shell=True, stderr=errfile, stdout=logfile)
subprocess.call("rm CMakeCache.txt", shell=True, cwd="onion/onion/build", stderr=errfile, stdout=logfile)
subprocess.Popen("make && ./hello", shell=True, cwd="onion", stderr=errfile, stdout=logfile)
return 0
def stop(logfile, errfile):
p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
out, err = p.communicate()
for line in out.splitlines():
if 'hello' in line:
pid = int(line.split(None, 2)[1])
os.kill(pid, 15)
return 0
| import subprocess
import sys
import os
import setup_util
def start(args, logfile, errfile):
setup_util.replace_text("onion/hello.c", "mysql_real_connect\(data.db\[i\], \".*\",", "mysql_real_connect(data.db[i], \"" + args.database_host + "\",")
subprocess.call("rm *.o", cwd="onion", shell=True, stderr=errfile, stdout=logfile)
subprocess.call("cp -R $IROOT/onion/* onion/onion", shell=True, stderr=errfile, stdout=logfile)
subprocess.call("rm CMakeCache.txt", shell=True, cwd="onion/onion/build", stderr=errfile, stdout=logfile)
subprocess.Popen("make && ./hello", shell=True, cwd="onion", stderr=errfile, stdout=logfile)
return 0
def stop(logfile, errfile):
p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
out, err = p.communicate()
for line in out.splitlines():
if 'hello' in line:
pid = int(line.split(None, 2)[1])
os.kill(pid, 15)
return 0
Remove minor errors in onionimport subprocess
import sys
import os
import setup_util
def start(args, logfile, errfile):
setup_util.replace_text("onion/hello.c", "mysql_real_connect\(data.db\[i\], \".*\",", "mysql_real_connect(data.db[i], \"" + args.database_host + "\",")
subprocess.call("rm -f *.o", cwd="onion", shell=True, stderr=errfile, stdout=logfile)
subprocess.call("cp -R $IROOT/onion/ onion/onion", shell=True, stderr=errfile, stdout=logfile)
subprocess.call("rm CMakeCache.txt", shell=True, cwd="onion/onion/build", stderr=errfile, stdout=logfile)
subprocess.Popen("make && ./hello", shell=True, cwd="onion", stderr=errfile, stdout=logfile)
return 0
def stop(logfile, errfile):
p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
out, err = p.communicate()
for line in out.splitlines():
if 'hello' in line:
pid = int(line.split(None, 2)[1])
os.kill(pid, 15)
return 0
| <commit_before>import subprocess
import sys
import os
import setup_util
def start(args, logfile, errfile):
setup_util.replace_text("onion/hello.c", "mysql_real_connect\(data.db\[i\], \".*\",", "mysql_real_connect(data.db[i], \"" + args.database_host + "\",")
subprocess.call("rm *.o", cwd="onion", shell=True, stderr=errfile, stdout=logfile)
subprocess.call("cp -R $IROOT/onion/* onion/onion", shell=True, stderr=errfile, stdout=logfile)
subprocess.call("rm CMakeCache.txt", shell=True, cwd="onion/onion/build", stderr=errfile, stdout=logfile)
subprocess.Popen("make && ./hello", shell=True, cwd="onion", stderr=errfile, stdout=logfile)
return 0
def stop(logfile, errfile):
p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
out, err = p.communicate()
for line in out.splitlines():
if 'hello' in line:
pid = int(line.split(None, 2)[1])
os.kill(pid, 15)
return 0
<commit_msg>Remove minor errors in onion<commit_after>import subprocess
import sys
import os
import setup_util
def start(args, logfile, errfile):
setup_util.replace_text("onion/hello.c", "mysql_real_connect\(data.db\[i\], \".*\",", "mysql_real_connect(data.db[i], \"" + args.database_host + "\",")
subprocess.call("rm -f *.o", cwd="onion", shell=True, stderr=errfile, stdout=logfile)
subprocess.call("cp -R $IROOT/onion/ onion/onion", shell=True, stderr=errfile, stdout=logfile)
subprocess.call("rm CMakeCache.txt", shell=True, cwd="onion/onion/build", stderr=errfile, stdout=logfile)
subprocess.Popen("make && ./hello", shell=True, cwd="onion", stderr=errfile, stdout=logfile)
return 0
def stop(logfile, errfile):
p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
out, err = p.communicate()
for line in out.splitlines():
if 'hello' in line:
pid = int(line.split(None, 2)[1])
os.kill(pid, 15)
return 0
|
c90f279d9555f1f21e6bc80348c7ebe7156e94a4 | wiblog/formatting.py | wiblog/formatting.py | from django.utils.safestring import mark_safe
import markdown
# Convert a markdown string into HTML5, and prevent Django from escaping it
def mdToHTML(value):
return mark_safe(markdown.markdown(value, output_format="html5"))
# Get a summary of a post
def summarize(fullBody):
firstNewline = fullBody.find("\n")
if firstNewline > 0:
return str(fullBody)[:firstNewline]
return fullBody
| from django.utils.safestring import mark_safe
import markdown
# Convert a markdown string into HTML5, and prevent Django from escaping it
def mdToHTML(value):
return mark_safe(markdown.markdown(value, output_format="html5"))
# Get a summary of a post
def summarize(fullBody):
firstNewline = fullBody.find("\n")
if firstNewline > 0:
return unicode(fullBody)[:firstNewline]
return fullBody
| Fix UTF-8 Bug In summarize Function | Fix UTF-8 Bug In summarize Function
| Python | agpl-3.0 | lo-windigo/fragdev,lo-windigo/fragdev | from django.utils.safestring import mark_safe
import markdown
# Convert a markdown string into HTML5, and prevent Django from escaping it
def mdToHTML(value):
return mark_safe(markdown.markdown(value, output_format="html5"))
# Get a summary of a post
def summarize(fullBody):
firstNewline = fullBody.find("\n")
if firstNewline > 0:
return str(fullBody)[:firstNewline]
return fullBody
Fix UTF-8 Bug In summarize Function | from django.utils.safestring import mark_safe
import markdown
# Convert a markdown string into HTML5, and prevent Django from escaping it
def mdToHTML(value):
return mark_safe(markdown.markdown(value, output_format="html5"))
# Get a summary of a post
def summarize(fullBody):
firstNewline = fullBody.find("\n")
if firstNewline > 0:
return unicode(fullBody)[:firstNewline]
return fullBody
| <commit_before>from django.utils.safestring import mark_safe
import markdown
# Convert a markdown string into HTML5, and prevent Django from escaping it
def mdToHTML(value):
return mark_safe(markdown.markdown(value, output_format="html5"))
# Get a summary of a post
def summarize(fullBody):
firstNewline = fullBody.find("\n")
if firstNewline > 0:
return str(fullBody)[:firstNewline]
return fullBody
<commit_msg>Fix UTF-8 Bug In summarize Function<commit_after> | from django.utils.safestring import mark_safe
import markdown
# Convert a markdown string into HTML5, and prevent Django from escaping it
def mdToHTML(value):
return mark_safe(markdown.markdown(value, output_format="html5"))
# Get a summary of a post
def summarize(fullBody):
firstNewline = fullBody.find("\n")
if firstNewline > 0:
return unicode(fullBody)[:firstNewline]
return fullBody
| from django.utils.safestring import mark_safe
import markdown
# Convert a markdown string into HTML5, and prevent Django from escaping it
def mdToHTML(value):
return mark_safe(markdown.markdown(value, output_format="html5"))
# Get a summary of a post
def summarize(fullBody):
firstNewline = fullBody.find("\n")
if firstNewline > 0:
return str(fullBody)[:firstNewline]
return fullBody
Fix UTF-8 Bug In summarize Functionfrom django.utils.safestring import mark_safe
import markdown
# Convert a markdown string into HTML5, and prevent Django from escaping it
def mdToHTML(value):
return mark_safe(markdown.markdown(value, output_format="html5"))
# Get a summary of a post
def summarize(fullBody):
firstNewline = fullBody.find("\n")
if firstNewline > 0:
return unicode(fullBody)[:firstNewline]
return fullBody
| <commit_before>from django.utils.safestring import mark_safe
import markdown
# Convert a markdown string into HTML5, and prevent Django from escaping it
def mdToHTML(value):
return mark_safe(markdown.markdown(value, output_format="html5"))
# Get a summary of a post
def summarize(fullBody):
firstNewline = fullBody.find("\n")
if firstNewline > 0:
return str(fullBody)[:firstNewline]
return fullBody
<commit_msg>Fix UTF-8 Bug In summarize Function<commit_after>from django.utils.safestring import mark_safe
import markdown
# Convert a markdown string into HTML5, and prevent Django from escaping it
def mdToHTML(value):
return mark_safe(markdown.markdown(value, output_format="html5"))
# Get a summary of a post
def summarize(fullBody):
firstNewline = fullBody.find("\n")
if firstNewline > 0:
return unicode(fullBody)[:firstNewline]
return fullBody
|
9a84ffde3909c74a47049c65e3b2bb5038a2cfaa | sillymap/burrows_wheeler.py | sillymap/burrows_wheeler.py |
def burrows_wheeler(text):
"""Returns the burrows wheeler transform of <text>.
The text is assumed to not contain the character $"""
text += "$"
all_permutations = []
for i in range(len(text)):
all_permutations.append(text[i:] + text[:i])
all_permutations.sort()
return "".join([w[-1] for w in all_permutations])
|
def burrows_wheeler(text):
"""Calculates the burrows wheeler transform of <text>.
returns the burrows wheeler string and the suffix array indices
The text is assumed to not contain the character $"""
text += "$"
all_permutations = []
for i in range(len(text)):
all_permutations.append((text[i:] + text[:i],i))
all_permutations.sort()
bw_l = [] # burrows wheeler as list
sa_i = [] # suffix array indices
for w,j in all_permutations:
bw_l.append(w[-1])
sa_i.append(j)
return "".join(bw_l), sa_i
| Return suffix array indices from burrows wheeler | Return suffix array indices from burrows wheeler
| Python | mit | alneberg/sillymap |
def burrows_wheeler(text):
"""Returns the burrows wheeler transform of <text>.
The text is assumed to not contain the character $"""
text += "$"
all_permutations = []
for i in range(len(text)):
all_permutations.append(text[i:] + text[:i])
all_permutations.sort()
return "".join([w[-1] for w in all_permutations])
Return suffix array indices from burrows wheeler |
def burrows_wheeler(text):
"""Calculates the burrows wheeler transform of <text>.
returns the burrows wheeler string and the suffix array indices
The text is assumed to not contain the character $"""
text += "$"
all_permutations = []
for i in range(len(text)):
all_permutations.append((text[i:] + text[:i],i))
all_permutations.sort()
bw_l = [] # burrows wheeler as list
sa_i = [] # suffix array indices
for w,j in all_permutations:
bw_l.append(w[-1])
sa_i.append(j)
return "".join(bw_l), sa_i
| <commit_before>
def burrows_wheeler(text):
"""Returns the burrows wheeler transform of <text>.
The text is assumed to not contain the character $"""
text += "$"
all_permutations = []
for i in range(len(text)):
all_permutations.append(text[i:] + text[:i])
all_permutations.sort()
return "".join([w[-1] for w in all_permutations])
<commit_msg>Return suffix array indices from burrows wheeler<commit_after> |
def burrows_wheeler(text):
"""Calculates the burrows wheeler transform of <text>.
returns the burrows wheeler string and the suffix array indices
The text is assumed to not contain the character $"""
text += "$"
all_permutations = []
for i in range(len(text)):
all_permutations.append((text[i:] + text[:i],i))
all_permutations.sort()
bw_l = [] # burrows wheeler as list
sa_i = [] # suffix array indices
for w,j in all_permutations:
bw_l.append(w[-1])
sa_i.append(j)
return "".join(bw_l), sa_i
|
def burrows_wheeler(text):
"""Returns the burrows wheeler transform of <text>.
The text is assumed to not contain the character $"""
text += "$"
all_permutations = []
for i in range(len(text)):
all_permutations.append(text[i:] + text[:i])
all_permutations.sort()
return "".join([w[-1] for w in all_permutations])
Return suffix array indices from burrows wheeler
def burrows_wheeler(text):
"""Calculates the burrows wheeler transform of <text>.
returns the burrows wheeler string and the suffix array indices
The text is assumed to not contain the character $"""
text += "$"
all_permutations = []
for i in range(len(text)):
all_permutations.append((text[i:] + text[:i],i))
all_permutations.sort()
bw_l = [] # burrows wheeler as list
sa_i = [] # suffix array indices
for w,j in all_permutations:
bw_l.append(w[-1])
sa_i.append(j)
return "".join(bw_l), sa_i
| <commit_before>
def burrows_wheeler(text):
"""Returns the burrows wheeler transform of <text>.
The text is assumed to not contain the character $"""
text += "$"
all_permutations = []
for i in range(len(text)):
all_permutations.append(text[i:] + text[:i])
all_permutations.sort()
return "".join([w[-1] for w in all_permutations])
<commit_msg>Return suffix array indices from burrows wheeler<commit_after>
def burrows_wheeler(text):
"""Calculates the burrows wheeler transform of <text>.
returns the burrows wheeler string and the suffix array indices
The text is assumed to not contain the character $"""
text += "$"
all_permutations = []
for i in range(len(text)):
all_permutations.append((text[i:] + text[:i],i))
all_permutations.sort()
bw_l = [] # burrows wheeler as list
sa_i = [] # suffix array indices
for w,j in all_permutations:
bw_l.append(w[-1])
sa_i.append(j)
return "".join(bw_l), sa_i
|
bcdcd9451de924e2fa870e20414086729369b7bd | version.py | version.py | major = 0
minor=0
patch=27
branch="master"
timestamp=1376610643.76 | major = 0
minor=0
patch=28
branch="master"
timestamp=1376705489.59 | Tag commit for v0.0.28-master generated by gitmake.py | Tag commit for v0.0.28-master generated by gitmake.py
| Python | mit | ryansturmer/gitmake | major = 0
minor=0
patch=27
branch="master"
timestamp=1376610643.76Tag commit for v0.0.28-master generated by gitmake.py | major = 0
minor=0
patch=28
branch="master"
timestamp=1376705489.59 | <commit_before>major = 0
minor=0
patch=27
branch="master"
timestamp=1376610643.76<commit_msg>Tag commit for v0.0.28-master generated by gitmake.py<commit_after> | major = 0
minor=0
patch=28
branch="master"
timestamp=1376705489.59 | major = 0
minor=0
patch=27
branch="master"
timestamp=1376610643.76Tag commit for v0.0.28-master generated by gitmake.pymajor = 0
minor=0
patch=28
branch="master"
timestamp=1376705489.59 | <commit_before>major = 0
minor=0
patch=27
branch="master"
timestamp=1376610643.76<commit_msg>Tag commit for v0.0.28-master generated by gitmake.py<commit_after>major = 0
minor=0
patch=28
branch="master"
timestamp=1376705489.59 |
a8d1812532211312287e58093ca966a7ea2050f4 | core/ModificationEntities.py | core/ModificationEntities.py | # coding: utf8
from core.ParsingEntities import Entity
class ModificationEntity(Entity):
def __add__(self, other):
if isinstance(self, ModificationEntity) and isinstance(other, ModificationEntity):
modification_operator = ModificationOperator()
return modification_operator
else:
raise TypeError("Operands have to be ModificationEntity's subclasses")
class ModificationCondition(ModificationEntity):
pass
class ModificationOperator(ModificationEntity):
pass | # coding: utf8
from core.ParsingEntities import Entity
class ModificationEntity(Entity):
def __add__(self, other):
if isinstance(self, ModificationEntity) and isinstance(other, ModificationEntity):
modification_operator = ModificationOperator()
return modification_operator
else:
raise TypeError("Operands have to be ModificationEntity's subclasses")
class ModificationOperator(ModificationEntity):
def __init__(self, operand_a, operand_b):
self.operandA = operand_a
self.operandB = operand_b
def check(self, element, ref_position):
pass
class ModificationCondition(ModificationEntity):
def __init__(self, character, operation_type, rel_position=0):
self.rel_position = rel_position
self.operationType = operation_type
self.character = character
def check(self, element, ref_position=0):
pass
| Implement ModificationOperator and ModificationCondition classes | Implement ModificationOperator and ModificationCondition classes
| Python | mit | JCH222/matriochkas | # coding: utf8
from core.ParsingEntities import Entity
class ModificationEntity(Entity):
def __add__(self, other):
if isinstance(self, ModificationEntity) and isinstance(other, ModificationEntity):
modification_operator = ModificationOperator()
return modification_operator
else:
raise TypeError("Operands have to be ModificationEntity's subclasses")
class ModificationCondition(ModificationEntity):
pass
class ModificationOperator(ModificationEntity):
passImplement ModificationOperator and ModificationCondition classes | # coding: utf8
from core.ParsingEntities import Entity
class ModificationEntity(Entity):
def __add__(self, other):
if isinstance(self, ModificationEntity) and isinstance(other, ModificationEntity):
modification_operator = ModificationOperator()
return modification_operator
else:
raise TypeError("Operands have to be ModificationEntity's subclasses")
class ModificationOperator(ModificationEntity):
def __init__(self, operand_a, operand_b):
self.operandA = operand_a
self.operandB = operand_b
def check(self, element, ref_position):
pass
class ModificationCondition(ModificationEntity):
def __init__(self, character, operation_type, rel_position=0):
self.rel_position = rel_position
self.operationType = operation_type
self.character = character
def check(self, element, ref_position=0):
pass
| <commit_before># coding: utf8
from core.ParsingEntities import Entity
class ModificationEntity(Entity):
def __add__(self, other):
if isinstance(self, ModificationEntity) and isinstance(other, ModificationEntity):
modification_operator = ModificationOperator()
return modification_operator
else:
raise TypeError("Operands have to be ModificationEntity's subclasses")
class ModificationCondition(ModificationEntity):
pass
class ModificationOperator(ModificationEntity):
pass<commit_msg>Implement ModificationOperator and ModificationCondition classes<commit_after> | # coding: utf8
from core.ParsingEntities import Entity
class ModificationEntity(Entity):
def __add__(self, other):
if isinstance(self, ModificationEntity) and isinstance(other, ModificationEntity):
modification_operator = ModificationOperator()
return modification_operator
else:
raise TypeError("Operands have to be ModificationEntity's subclasses")
class ModificationOperator(ModificationEntity):
def __init__(self, operand_a, operand_b):
self.operandA = operand_a
self.operandB = operand_b
def check(self, element, ref_position):
pass
class ModificationCondition(ModificationEntity):
def __init__(self, character, operation_type, rel_position=0):
self.rel_position = rel_position
self.operationType = operation_type
self.character = character
def check(self, element, ref_position=0):
pass
| # coding: utf8
from core.ParsingEntities import Entity
class ModificationEntity(Entity):
def __add__(self, other):
if isinstance(self, ModificationEntity) and isinstance(other, ModificationEntity):
modification_operator = ModificationOperator()
return modification_operator
else:
raise TypeError("Operands have to be ModificationEntity's subclasses")
class ModificationCondition(ModificationEntity):
pass
class ModificationOperator(ModificationEntity):
passImplement ModificationOperator and ModificationCondition classes# coding: utf8
from core.ParsingEntities import Entity
class ModificationEntity(Entity):
def __add__(self, other):
if isinstance(self, ModificationEntity) and isinstance(other, ModificationEntity):
modification_operator = ModificationOperator()
return modification_operator
else:
raise TypeError("Operands have to be ModificationEntity's subclasses")
class ModificationOperator(ModificationEntity):
def __init__(self, operand_a, operand_b):
self.operandA = operand_a
self.operandB = operand_b
def check(self, element, ref_position):
pass
class ModificationCondition(ModificationEntity):
def __init__(self, character, operation_type, rel_position=0):
self.rel_position = rel_position
self.operationType = operation_type
self.character = character
def check(self, element, ref_position=0):
pass
| <commit_before># coding: utf8
from core.ParsingEntities import Entity
class ModificationEntity(Entity):
def __add__(self, other):
if isinstance(self, ModificationEntity) and isinstance(other, ModificationEntity):
modification_operator = ModificationOperator()
return modification_operator
else:
raise TypeError("Operands have to be ModificationEntity's subclasses")
class ModificationCondition(ModificationEntity):
pass
class ModificationOperator(ModificationEntity):
pass<commit_msg>Implement ModificationOperator and ModificationCondition classes<commit_after># coding: utf8
from core.ParsingEntities import Entity
class ModificationEntity(Entity):
def __add__(self, other):
if isinstance(self, ModificationEntity) and isinstance(other, ModificationEntity):
modification_operator = ModificationOperator()
return modification_operator
else:
raise TypeError("Operands have to be ModificationEntity's subclasses")
class ModificationOperator(ModificationEntity):
def __init__(self, operand_a, operand_b):
self.operandA = operand_a
self.operandB = operand_b
def check(self, element, ref_position):
pass
class ModificationCondition(ModificationEntity):
def __init__(self, character, operation_type, rel_position=0):
self.rel_position = rel_position
self.operationType = operation_type
self.character = character
def check(self, element, ref_position=0):
pass
|
fbc4bd9a9b803a831862ca81d315aa9eb79847d5 | geokey_wegovnow/__init__.py | geokey_wegovnow/__init__.py | """Main initialization for the WeGovNow extension."""
VERSION = (3, 1, 2)
__version__ = '.'.join(map(str, VERSION))
try:
from geokey.extensions.base import register
register(
'geokey_wegovnow',
'WeGovNow',
display_admin=False,
superuser=False,
version=__version__
)
except BaseException:
print 'Please install GeoKey first'
| """Main initialization for the WeGovNow extension."""
VERSION = (3, 2, 0)
__version__ = '.'.join(map(str, VERSION))
try:
from geokey.extensions.base import register
register(
'geokey_wegovnow',
'WeGovNow',
display_admin=False,
superuser=False,
version=__version__
)
except BaseException:
print 'Please install GeoKey first'
| Increment version number ahead of release. | Increment version number ahead of release. | Python | mit | ExCiteS/geokey-wegovnow,ExCiteS/geokey-wegovnow | """Main initialization for the WeGovNow extension."""
VERSION = (3, 1, 2)
__version__ = '.'.join(map(str, VERSION))
try:
from geokey.extensions.base import register
register(
'geokey_wegovnow',
'WeGovNow',
display_admin=False,
superuser=False,
version=__version__
)
except BaseException:
print 'Please install GeoKey first'
Increment version number ahead of release. | """Main initialization for the WeGovNow extension."""
VERSION = (3, 2, 0)
__version__ = '.'.join(map(str, VERSION))
try:
from geokey.extensions.base import register
register(
'geokey_wegovnow',
'WeGovNow',
display_admin=False,
superuser=False,
version=__version__
)
except BaseException:
print 'Please install GeoKey first'
| <commit_before>"""Main initialization for the WeGovNow extension."""
VERSION = (3, 1, 2)
__version__ = '.'.join(map(str, VERSION))
try:
from geokey.extensions.base import register
register(
'geokey_wegovnow',
'WeGovNow',
display_admin=False,
superuser=False,
version=__version__
)
except BaseException:
print 'Please install GeoKey first'
<commit_msg>Increment version number ahead of release.<commit_after> | """Main initialization for the WeGovNow extension."""
VERSION = (3, 2, 0)
__version__ = '.'.join(map(str, VERSION))
try:
from geokey.extensions.base import register
register(
'geokey_wegovnow',
'WeGovNow',
display_admin=False,
superuser=False,
version=__version__
)
except BaseException:
print 'Please install GeoKey first'
| """Main initialization for the WeGovNow extension."""
VERSION = (3, 1, 2)
__version__ = '.'.join(map(str, VERSION))
try:
from geokey.extensions.base import register
register(
'geokey_wegovnow',
'WeGovNow',
display_admin=False,
superuser=False,
version=__version__
)
except BaseException:
print 'Please install GeoKey first'
Increment version number ahead of release."""Main initialization for the WeGovNow extension."""
VERSION = (3, 2, 0)
__version__ = '.'.join(map(str, VERSION))
try:
from geokey.extensions.base import register
register(
'geokey_wegovnow',
'WeGovNow',
display_admin=False,
superuser=False,
version=__version__
)
except BaseException:
print 'Please install GeoKey first'
| <commit_before>"""Main initialization for the WeGovNow extension."""
VERSION = (3, 1, 2)
__version__ = '.'.join(map(str, VERSION))
try:
from geokey.extensions.base import register
register(
'geokey_wegovnow',
'WeGovNow',
display_admin=False,
superuser=False,
version=__version__
)
except BaseException:
print 'Please install GeoKey first'
<commit_msg>Increment version number ahead of release.<commit_after>"""Main initialization for the WeGovNow extension."""
VERSION = (3, 2, 0)
__version__ = '.'.join(map(str, VERSION))
try:
from geokey.extensions.base import register
register(
'geokey_wegovnow',
'WeGovNow',
display_admin=False,
superuser=False,
version=__version__
)
except BaseException:
print 'Please install GeoKey first'
|
54f99c5c62c170b538a7a6ea4bf786c897151e5a | pylcp/url.py | pylcp/url.py | def url_path_join(url, path_part):
"""Join a path part to the end of a URL adding/removing slashes as necessary."""
result = url + '/' if url[-1] != '/' else url
index = 1 if path_part[0] == '/' else 0
return result + path_part[index:]
| def url_path_join(url, path_part):
"""Join a path part to the end of a URL adding/removing slashes as necessary."""
result = url + '/' if not url.endswith('/') else url
index = 1 if path_part.startswith('/') else 0
return result + path_part[index:]
| Use startswith/endswith instead of indices for readability. | Use startswith/endswith instead of indices for readability.
| Python | bsd-3-clause | bradsokol/PyLCP,Points/PyLCP,bradsokol/PyLCP,Points/PyLCP | def url_path_join(url, path_part):
"""Join a path part to the end of a URL adding/removing slashes as necessary."""
result = url + '/' if url[-1] != '/' else url
index = 1 if path_part[0] == '/' else 0
return result + path_part[index:]
Use startswith/endswith instead of indices for readability. | def url_path_join(url, path_part):
"""Join a path part to the end of a URL adding/removing slashes as necessary."""
result = url + '/' if not url.endswith('/') else url
index = 1 if path_part.startswith('/') else 0
return result + path_part[index:]
| <commit_before>def url_path_join(url, path_part):
"""Join a path part to the end of a URL adding/removing slashes as necessary."""
result = url + '/' if url[-1] != '/' else url
index = 1 if path_part[0] == '/' else 0
return result + path_part[index:]
<commit_msg>Use startswith/endswith instead of indices for readability.<commit_after> | def url_path_join(url, path_part):
"""Join a path part to the end of a URL adding/removing slashes as necessary."""
result = url + '/' if not url.endswith('/') else url
index = 1 if path_part.startswith('/') else 0
return result + path_part[index:]
| def url_path_join(url, path_part):
"""Join a path part to the end of a URL adding/removing slashes as necessary."""
result = url + '/' if url[-1] != '/' else url
index = 1 if path_part[0] == '/' else 0
return result + path_part[index:]
Use startswith/endswith instead of indices for readability.def url_path_join(url, path_part):
"""Join a path part to the end of a URL adding/removing slashes as necessary."""
result = url + '/' if not url.endswith('/') else url
index = 1 if path_part.startswith('/') else 0
return result + path_part[index:]
| <commit_before>def url_path_join(url, path_part):
"""Join a path part to the end of a URL adding/removing slashes as necessary."""
result = url + '/' if url[-1] != '/' else url
index = 1 if path_part[0] == '/' else 0
return result + path_part[index:]
<commit_msg>Use startswith/endswith instead of indices for readability.<commit_after>def url_path_join(url, path_part):
"""Join a path part to the end of a URL adding/removing slashes as necessary."""
result = url + '/' if not url.endswith('/') else url
index = 1 if path_part.startswith('/') else 0
return result + path_part[index:]
|
6e17e781f6cb8e29a7284beffe10463c843b86b3 | tests/test_vector2_equality.py | tests/test_vector2_equality.py | from hypothesis import assume, given
from ppb_vector import Vector2
from utils import vectors
@given(x=vectors())
def test_equal_self(x: Vector2):
assert x == x
@given(x=vectors())
def test_non_zero_equal(x: Vector2):
assume(x != (0, 0))
assert x != 1.1 * x
@given(x=vectors(), y=vectors())
def test_not_equal_equivalent(x: Vector2, y: Vector2):
assert (x != y) == (not x == y)
| from hypothesis import assume, given
from ppb_vector import Vector2
from utils import vectors
@given(x=vectors())
def test_equal_self(x: Vector2):
assert x == x
@given(x=vectors())
def test_non_zero_equal(x: Vector2):
assume(x != (0, 0))
assert x != 1.1 * x
assert x != -x
@given(x=vectors(), y=vectors())
def test_not_equal_equivalent(x: Vector2, y: Vector2):
assert (x != y) == (not x == y)
| Add another (generated) negative example | tests/equality: Add another (generated) negative example
| Python | artistic-2.0 | ppb/ppb-vector,ppb/ppb-vector | from hypothesis import assume, given
from ppb_vector import Vector2
from utils import vectors
@given(x=vectors())
def test_equal_self(x: Vector2):
assert x == x
@given(x=vectors())
def test_non_zero_equal(x: Vector2):
assume(x != (0, 0))
assert x != 1.1 * x
@given(x=vectors(), y=vectors())
def test_not_equal_equivalent(x: Vector2, y: Vector2):
assert (x != y) == (not x == y)
tests/equality: Add another (generated) negative example | from hypothesis import assume, given
from ppb_vector import Vector2
from utils import vectors
@given(x=vectors())
def test_equal_self(x: Vector2):
assert x == x
@given(x=vectors())
def test_non_zero_equal(x: Vector2):
assume(x != (0, 0))
assert x != 1.1 * x
assert x != -x
@given(x=vectors(), y=vectors())
def test_not_equal_equivalent(x: Vector2, y: Vector2):
assert (x != y) == (not x == y)
| <commit_before>from hypothesis import assume, given
from ppb_vector import Vector2
from utils import vectors
@given(x=vectors())
def test_equal_self(x: Vector2):
assert x == x
@given(x=vectors())
def test_non_zero_equal(x: Vector2):
assume(x != (0, 0))
assert x != 1.1 * x
@given(x=vectors(), y=vectors())
def test_not_equal_equivalent(x: Vector2, y: Vector2):
assert (x != y) == (not x == y)
<commit_msg>tests/equality: Add another (generated) negative example<commit_after> | from hypothesis import assume, given
from ppb_vector import Vector2
from utils import vectors
@given(x=vectors())
def test_equal_self(x: Vector2):
assert x == x
@given(x=vectors())
def test_non_zero_equal(x: Vector2):
assume(x != (0, 0))
assert x != 1.1 * x
assert x != -x
@given(x=vectors(), y=vectors())
def test_not_equal_equivalent(x: Vector2, y: Vector2):
assert (x != y) == (not x == y)
| from hypothesis import assume, given
from ppb_vector import Vector2
from utils import vectors
@given(x=vectors())
def test_equal_self(x: Vector2):
assert x == x
@given(x=vectors())
def test_non_zero_equal(x: Vector2):
assume(x != (0, 0))
assert x != 1.1 * x
@given(x=vectors(), y=vectors())
def test_not_equal_equivalent(x: Vector2, y: Vector2):
assert (x != y) == (not x == y)
tests/equality: Add another (generated) negative examplefrom hypothesis import assume, given
from ppb_vector import Vector2
from utils import vectors
@given(x=vectors())
def test_equal_self(x: Vector2):
assert x == x
@given(x=vectors())
def test_non_zero_equal(x: Vector2):
assume(x != (0, 0))
assert x != 1.1 * x
assert x != -x
@given(x=vectors(), y=vectors())
def test_not_equal_equivalent(x: Vector2, y: Vector2):
assert (x != y) == (not x == y)
| <commit_before>from hypothesis import assume, given
from ppb_vector import Vector2
from utils import vectors
@given(x=vectors())
def test_equal_self(x: Vector2):
assert x == x
@given(x=vectors())
def test_non_zero_equal(x: Vector2):
assume(x != (0, 0))
assert x != 1.1 * x
@given(x=vectors(), y=vectors())
def test_not_equal_equivalent(x: Vector2, y: Vector2):
assert (x != y) == (not x == y)
<commit_msg>tests/equality: Add another (generated) negative example<commit_after>from hypothesis import assume, given
from ppb_vector import Vector2
from utils import vectors
@given(x=vectors())
def test_equal_self(x: Vector2):
assert x == x
@given(x=vectors())
def test_non_zero_equal(x: Vector2):
assume(x != (0, 0))
assert x != 1.1 * x
assert x != -x
@given(x=vectors(), y=vectors())
def test_not_equal_equivalent(x: Vector2, y: Vector2):
assert (x != y) == (not x == y)
|
132a10f38c6c5d29c38a388af7d50e7ceb71e8fa | zipline_extension/calendars/exchange_calendar_forex.py | zipline_extension/calendars/exchange_calendar_forex.py | import pytz
from datetime import time
from zipline.utils.calendars import TradingCalendar
class ForexCalendar(TradingCalendar):
@property
def name(self):
return "forex"
@property
def tz(self):
return pytz.UTC
@property
def open_time(self):
return time(0, 0)
@property
def close_time(self):
return time(23, 59)
| import pytz
import pandas as pd
from datetime import time
from zipline.utils.calendars import TradingCalendar
class ForexCalendar(TradingCalendar):
NYT_5PM = time(9)
@property
def name(self):
return "forex"
@property
def tz(self):
return pytz.UTC
@property
def open_time(self):
return time(0, 0)
@property
def close_time(self):
return time(23, 59)
def special_opens_adhoc(self):
return [
(self.NYT_5PM, self._sunday_dates())
]
def special_closes_adhoc(self):
return [
(self.NYT_5PM, self._friday_dates())
]
def _friday_dates(self):
return pd.date_range(start=self.schedule.index[0],
end=self.schedule.idnex[-1],
freq='W-FRI')
def _sunday_dates(self):
return pd.date_range(start=self.schedule.index[0],
end=self.schedule.idnex[-1],
freq='W-SUN')
| Add weekend close and open times | Add weekend close and open times
| Python | mit | bernoullio/toolbox | import pytz
from datetime import time
from zipline.utils.calendars import TradingCalendar
class ForexCalendar(TradingCalendar):
@property
def name(self):
return "forex"
@property
def tz(self):
return pytz.UTC
@property
def open_time(self):
return time(0, 0)
@property
def close_time(self):
return time(23, 59)
Add weekend close and open times | import pytz
import pandas as pd
from datetime import time
from zipline.utils.calendars import TradingCalendar
class ForexCalendar(TradingCalendar):
NYT_5PM = time(9)
@property
def name(self):
return "forex"
@property
def tz(self):
return pytz.UTC
@property
def open_time(self):
return time(0, 0)
@property
def close_time(self):
return time(23, 59)
def special_opens_adhoc(self):
return [
(self.NYT_5PM, self._sunday_dates())
]
def special_closes_adhoc(self):
return [
(self.NYT_5PM, self._friday_dates())
]
def _friday_dates(self):
return pd.date_range(start=self.schedule.index[0],
end=self.schedule.idnex[-1],
freq='W-FRI')
def _sunday_dates(self):
return pd.date_range(start=self.schedule.index[0],
end=self.schedule.idnex[-1],
freq='W-SUN')
| <commit_before>import pytz
from datetime import time
from zipline.utils.calendars import TradingCalendar
class ForexCalendar(TradingCalendar):
@property
def name(self):
return "forex"
@property
def tz(self):
return pytz.UTC
@property
def open_time(self):
return time(0, 0)
@property
def close_time(self):
return time(23, 59)
<commit_msg>Add weekend close and open times<commit_after> | import pytz
import pandas as pd
from datetime import time
from zipline.utils.calendars import TradingCalendar
class ForexCalendar(TradingCalendar):
NYT_5PM = time(9)
@property
def name(self):
return "forex"
@property
def tz(self):
return pytz.UTC
@property
def open_time(self):
return time(0, 0)
@property
def close_time(self):
return time(23, 59)
def special_opens_adhoc(self):
return [
(self.NYT_5PM, self._sunday_dates())
]
def special_closes_adhoc(self):
return [
(self.NYT_5PM, self._friday_dates())
]
def _friday_dates(self):
return pd.date_range(start=self.schedule.index[0],
end=self.schedule.idnex[-1],
freq='W-FRI')
def _sunday_dates(self):
return pd.date_range(start=self.schedule.index[0],
end=self.schedule.idnex[-1],
freq='W-SUN')
| import pytz
from datetime import time
from zipline.utils.calendars import TradingCalendar
class ForexCalendar(TradingCalendar):
@property
def name(self):
return "forex"
@property
def tz(self):
return pytz.UTC
@property
def open_time(self):
return time(0, 0)
@property
def close_time(self):
return time(23, 59)
Add weekend close and open timesimport pytz
import pandas as pd
from datetime import time
from zipline.utils.calendars import TradingCalendar
class ForexCalendar(TradingCalendar):
NYT_5PM = time(9)
@property
def name(self):
return "forex"
@property
def tz(self):
return pytz.UTC
@property
def open_time(self):
return time(0, 0)
@property
def close_time(self):
return time(23, 59)
def special_opens_adhoc(self):
return [
(self.NYT_5PM, self._sunday_dates())
]
def special_closes_adhoc(self):
return [
(self.NYT_5PM, self._friday_dates())
]
def _friday_dates(self):
return pd.date_range(start=self.schedule.index[0],
end=self.schedule.idnex[-1],
freq='W-FRI')
def _sunday_dates(self):
return pd.date_range(start=self.schedule.index[0],
end=self.schedule.idnex[-1],
freq='W-SUN')
| <commit_before>import pytz
from datetime import time
from zipline.utils.calendars import TradingCalendar
class ForexCalendar(TradingCalendar):
@property
def name(self):
return "forex"
@property
def tz(self):
return pytz.UTC
@property
def open_time(self):
return time(0, 0)
@property
def close_time(self):
return time(23, 59)
<commit_msg>Add weekend close and open times<commit_after>import pytz
import pandas as pd
from datetime import time
from zipline.utils.calendars import TradingCalendar
class ForexCalendar(TradingCalendar):
NYT_5PM = time(9)
@property
def name(self):
return "forex"
@property
def tz(self):
return pytz.UTC
@property
def open_time(self):
return time(0, 0)
@property
def close_time(self):
return time(23, 59)
def special_opens_adhoc(self):
return [
(self.NYT_5PM, self._sunday_dates())
]
def special_closes_adhoc(self):
return [
(self.NYT_5PM, self._friday_dates())
]
def _friday_dates(self):
return pd.date_range(start=self.schedule.index[0],
end=self.schedule.idnex[-1],
freq='W-FRI')
def _sunday_dates(self):
return pd.date_range(start=self.schedule.index[0],
end=self.schedule.idnex[-1],
freq='W-SUN')
|
01657e6991197c999180c2fec367fc75e59fba15 | var/spack/repos/builtin/packages/suite-sparse/package.py | var/spack/repos/builtin/packages/suite-sparse/package.py | from spack import *
class SuiteSparse(Package):
"""
SuiteSparse is a suite of sparse matrix algorithms
"""
homepage = 'http://faculty.cse.tamu.edu/davis/suitesparse.html'
url = 'http://faculty.cse.tamu.edu/davis/SuiteSparse/SuiteSparse-4.5.1.tar.gz'
version('4.5.1', 'f0ea9aad8d2d1ffec66a5b6bfeff5319')
depends_on('blas')
depends_on('lapack')
depends_on('metis@5.1.0', when='@4.5.1')
def install(self, spec, prefix):
# The build system of SuiteSparse is quite old-fashioned
# It's basically a plain Makefile which include an header (SuiteSparse_config/SuiteSparse_config.mk)
# with a lot of convoluted logic in it.
# Any kind of customization will need to go through filtering of that file
# FIXME : this actually uses the current workaround
# FIXME : (blas / lapack always provide libblas and liblapack as aliases)
make('install', 'INSTALL=%s' % prefix, 'BLAS=-lblas', 'LAPACK=-llapack')
| from spack import *
class SuiteSparse(Package):
"""
SuiteSparse is a suite of sparse matrix algorithms
"""
homepage = 'http://faculty.cse.tamu.edu/davis/suitesparse.html'
url = 'http://faculty.cse.tamu.edu/davis/SuiteSparse/SuiteSparse-4.5.1.tar.gz'
version('4.5.1', 'f0ea9aad8d2d1ffec66a5b6bfeff5319')
depends_on('blas')
depends_on('lapack')
depends_on('metis@5.1.0', when='@4.5.1')
def install(self, spec, prefix):
# The build system of SuiteSparse is quite old-fashioned
# It's basically a plain Makefile which include an header (SuiteSparse_config/SuiteSparse_config.mk)
# with a lot of convoluted logic in it.
# Any kind of customization will need to go through filtering of that file
# FIXME : this actually uses the current workaround
# FIXME : (blas / lapack always provide libblas and liblapack as aliases)
make('install', 'INSTALL=%s' % prefix,
# inject Spack compiler wrappers
'AUTOCC=no',
'CC=cc',
'CXX=c++',
'F77=f77',
# BLAS arguments require path to libraries
'BLAS=-lblas',
'LAPACK=-llapack')
| Make suite-sparse use spack compilers. | Make suite-sparse use spack compilers.
| Python | lgpl-2.1 | lgarren/spack,TheTimmy/spack,mfherbst/spack,lgarren/spack,iulian787/spack,LLNL/spack,skosukhin/spack,mfherbst/spack,lgarren/spack,krafczyk/spack,mfherbst/spack,krafczyk/spack,lgarren/spack,TheTimmy/spack,TheTimmy/spack,tmerrick1/spack,LLNL/spack,krafczyk/spack,matthiasdiener/spack,skosukhin/spack,TheTimmy/spack,iulian787/spack,skosukhin/spack,LLNL/spack,tmerrick1/spack,lgarren/spack,iulian787/spack,krafczyk/spack,matthiasdiener/spack,EmreAtes/spack,matthiasdiener/spack,LLNL/spack,tmerrick1/spack,TheTimmy/spack,EmreAtes/spack,mfherbst/spack,LLNL/spack,EmreAtes/spack,iulian787/spack,matthiasdiener/spack,iulian787/spack,skosukhin/spack,mfherbst/spack,skosukhin/spack,tmerrick1/spack,krafczyk/spack,EmreAtes/spack,matthiasdiener/spack,EmreAtes/spack,tmerrick1/spack | from spack import *
class SuiteSparse(Package):
"""
SuiteSparse is a suite of sparse matrix algorithms
"""
homepage = 'http://faculty.cse.tamu.edu/davis/suitesparse.html'
url = 'http://faculty.cse.tamu.edu/davis/SuiteSparse/SuiteSparse-4.5.1.tar.gz'
version('4.5.1', 'f0ea9aad8d2d1ffec66a5b6bfeff5319')
depends_on('blas')
depends_on('lapack')
depends_on('metis@5.1.0', when='@4.5.1')
def install(self, spec, prefix):
# The build system of SuiteSparse is quite old-fashioned
# It's basically a plain Makefile which include an header (SuiteSparse_config/SuiteSparse_config.mk)
# with a lot of convoluted logic in it.
# Any kind of customization will need to go through filtering of that file
# FIXME : this actually uses the current workaround
# FIXME : (blas / lapack always provide libblas and liblapack as aliases)
make('install', 'INSTALL=%s' % prefix, 'BLAS=-lblas', 'LAPACK=-llapack')
Make suite-sparse use spack compilers. | from spack import *
class SuiteSparse(Package):
"""
SuiteSparse is a suite of sparse matrix algorithms
"""
homepage = 'http://faculty.cse.tamu.edu/davis/suitesparse.html'
url = 'http://faculty.cse.tamu.edu/davis/SuiteSparse/SuiteSparse-4.5.1.tar.gz'
version('4.5.1', 'f0ea9aad8d2d1ffec66a5b6bfeff5319')
depends_on('blas')
depends_on('lapack')
depends_on('metis@5.1.0', when='@4.5.1')
def install(self, spec, prefix):
# The build system of SuiteSparse is quite old-fashioned
# It's basically a plain Makefile which include an header (SuiteSparse_config/SuiteSparse_config.mk)
# with a lot of convoluted logic in it.
# Any kind of customization will need to go through filtering of that file
# FIXME : this actually uses the current workaround
# FIXME : (blas / lapack always provide libblas and liblapack as aliases)
make('install', 'INSTALL=%s' % prefix,
# inject Spack compiler wrappers
'AUTOCC=no',
'CC=cc',
'CXX=c++',
'F77=f77',
# BLAS arguments require path to libraries
'BLAS=-lblas',
'LAPACK=-llapack')
| <commit_before>from spack import *
class SuiteSparse(Package):
"""
SuiteSparse is a suite of sparse matrix algorithms
"""
homepage = 'http://faculty.cse.tamu.edu/davis/suitesparse.html'
url = 'http://faculty.cse.tamu.edu/davis/SuiteSparse/SuiteSparse-4.5.1.tar.gz'
version('4.5.1', 'f0ea9aad8d2d1ffec66a5b6bfeff5319')
depends_on('blas')
depends_on('lapack')
depends_on('metis@5.1.0', when='@4.5.1')
def install(self, spec, prefix):
# The build system of SuiteSparse is quite old-fashioned
# It's basically a plain Makefile which include an header (SuiteSparse_config/SuiteSparse_config.mk)
# with a lot of convoluted logic in it.
# Any kind of customization will need to go through filtering of that file
# FIXME : this actually uses the current workaround
# FIXME : (blas / lapack always provide libblas and liblapack as aliases)
make('install', 'INSTALL=%s' % prefix, 'BLAS=-lblas', 'LAPACK=-llapack')
<commit_msg>Make suite-sparse use spack compilers.<commit_after> | from spack import *
class SuiteSparse(Package):
"""
SuiteSparse is a suite of sparse matrix algorithms
"""
homepage = 'http://faculty.cse.tamu.edu/davis/suitesparse.html'
url = 'http://faculty.cse.tamu.edu/davis/SuiteSparse/SuiteSparse-4.5.1.tar.gz'
version('4.5.1', 'f0ea9aad8d2d1ffec66a5b6bfeff5319')
depends_on('blas')
depends_on('lapack')
depends_on('metis@5.1.0', when='@4.5.1')
def install(self, spec, prefix):
# The build system of SuiteSparse is quite old-fashioned
# It's basically a plain Makefile which include an header (SuiteSparse_config/SuiteSparse_config.mk)
# with a lot of convoluted logic in it.
# Any kind of customization will need to go through filtering of that file
# FIXME : this actually uses the current workaround
# FIXME : (blas / lapack always provide libblas and liblapack as aliases)
make('install', 'INSTALL=%s' % prefix,
# inject Spack compiler wrappers
'AUTOCC=no',
'CC=cc',
'CXX=c++',
'F77=f77',
# BLAS arguments require path to libraries
'BLAS=-lblas',
'LAPACK=-llapack')
| from spack import *
class SuiteSparse(Package):
"""
SuiteSparse is a suite of sparse matrix algorithms
"""
homepage = 'http://faculty.cse.tamu.edu/davis/suitesparse.html'
url = 'http://faculty.cse.tamu.edu/davis/SuiteSparse/SuiteSparse-4.5.1.tar.gz'
version('4.5.1', 'f0ea9aad8d2d1ffec66a5b6bfeff5319')
depends_on('blas')
depends_on('lapack')
depends_on('metis@5.1.0', when='@4.5.1')
def install(self, spec, prefix):
# The build system of SuiteSparse is quite old-fashioned
# It's basically a plain Makefile which include an header (SuiteSparse_config/SuiteSparse_config.mk)
# with a lot of convoluted logic in it.
# Any kind of customization will need to go through filtering of that file
# FIXME : this actually uses the current workaround
# FIXME : (blas / lapack always provide libblas and liblapack as aliases)
make('install', 'INSTALL=%s' % prefix, 'BLAS=-lblas', 'LAPACK=-llapack')
Make suite-sparse use spack compilers.from spack import *
class SuiteSparse(Package):
"""
SuiteSparse is a suite of sparse matrix algorithms
"""
homepage = 'http://faculty.cse.tamu.edu/davis/suitesparse.html'
url = 'http://faculty.cse.tamu.edu/davis/SuiteSparse/SuiteSparse-4.5.1.tar.gz'
version('4.5.1', 'f0ea9aad8d2d1ffec66a5b6bfeff5319')
depends_on('blas')
depends_on('lapack')
depends_on('metis@5.1.0', when='@4.5.1')
def install(self, spec, prefix):
# The build system of SuiteSparse is quite old-fashioned
# It's basically a plain Makefile which include an header (SuiteSparse_config/SuiteSparse_config.mk)
# with a lot of convoluted logic in it.
# Any kind of customization will need to go through filtering of that file
# FIXME : this actually uses the current workaround
# FIXME : (blas / lapack always provide libblas and liblapack as aliases)
make('install', 'INSTALL=%s' % prefix,
# inject Spack compiler wrappers
'AUTOCC=no',
'CC=cc',
'CXX=c++',
'F77=f77',
# BLAS arguments require path to libraries
'BLAS=-lblas',
'LAPACK=-llapack')
| <commit_before>from spack import *
class SuiteSparse(Package):
"""
SuiteSparse is a suite of sparse matrix algorithms
"""
homepage = 'http://faculty.cse.tamu.edu/davis/suitesparse.html'
url = 'http://faculty.cse.tamu.edu/davis/SuiteSparse/SuiteSparse-4.5.1.tar.gz'
version('4.5.1', 'f0ea9aad8d2d1ffec66a5b6bfeff5319')
depends_on('blas')
depends_on('lapack')
depends_on('metis@5.1.0', when='@4.5.1')
def install(self, spec, prefix):
# The build system of SuiteSparse is quite old-fashioned
# It's basically a plain Makefile which include an header (SuiteSparse_config/SuiteSparse_config.mk)
# with a lot of convoluted logic in it.
# Any kind of customization will need to go through filtering of that file
# FIXME : this actually uses the current workaround
# FIXME : (blas / lapack always provide libblas and liblapack as aliases)
make('install', 'INSTALL=%s' % prefix, 'BLAS=-lblas', 'LAPACK=-llapack')
<commit_msg>Make suite-sparse use spack compilers.<commit_after>from spack import *
class SuiteSparse(Package):
"""
SuiteSparse is a suite of sparse matrix algorithms
"""
homepage = 'http://faculty.cse.tamu.edu/davis/suitesparse.html'
url = 'http://faculty.cse.tamu.edu/davis/SuiteSparse/SuiteSparse-4.5.1.tar.gz'
version('4.5.1', 'f0ea9aad8d2d1ffec66a5b6bfeff5319')
depends_on('blas')
depends_on('lapack')
depends_on('metis@5.1.0', when='@4.5.1')
def install(self, spec, prefix):
# The build system of SuiteSparse is quite old-fashioned
# It's basically a plain Makefile which include an header (SuiteSparse_config/SuiteSparse_config.mk)
# with a lot of convoluted logic in it.
# Any kind of customization will need to go through filtering of that file
# FIXME : this actually uses the current workaround
# FIXME : (blas / lapack always provide libblas and liblapack as aliases)
make('install', 'INSTALL=%s' % prefix,
# inject Spack compiler wrappers
'AUTOCC=no',
'CC=cc',
'CXX=c++',
'F77=f77',
# BLAS arguments require path to libraries
'BLAS=-lblas',
'LAPACK=-llapack')
|
c33f011d3ea9e1783149a9fb34d941a899f7cedc | blog/forms.py | blog/forms.py | from .models import BlogPost, Comment
from django.forms import ModelForm
class BlogPostForm(ModelForm):
class Meta:
model = BlogPost
exclude = ('user',)
class CommentForm(ModelForm):
class Meta:
model = Comment
exclude = ('post',)
| from .models import BlogPost, Comment
from django.forms import ModelForm
class BlogPostForm(ModelForm):
class Meta:
model = BlogPost
exclude = ('user',)
class CommentForm(ModelForm):
class Meta:
model = Comment
exclude = ('post', 'user', 'date',)
| Exclude fields from CommentForm that will be automatically inserted | Exclude fields from CommentForm that will be automatically inserted
| Python | mit | andreagrandi/bloggato,andreagrandi/bloggato | from .models import BlogPost, Comment
from django.forms import ModelForm
class BlogPostForm(ModelForm):
class Meta:
model = BlogPost
exclude = ('user',)
class CommentForm(ModelForm):
class Meta:
model = Comment
exclude = ('post',)
Exclude fields from CommentForm that will be automatically inserted | from .models import BlogPost, Comment
from django.forms import ModelForm
class BlogPostForm(ModelForm):
class Meta:
model = BlogPost
exclude = ('user',)
class CommentForm(ModelForm):
class Meta:
model = Comment
exclude = ('post', 'user', 'date',)
| <commit_before>from .models import BlogPost, Comment
from django.forms import ModelForm
class BlogPostForm(ModelForm):
class Meta:
model = BlogPost
exclude = ('user',)
class CommentForm(ModelForm):
class Meta:
model = Comment
exclude = ('post',)
<commit_msg>Exclude fields from CommentForm that will be automatically inserted<commit_after> | from .models import BlogPost, Comment
from django.forms import ModelForm
class BlogPostForm(ModelForm):
class Meta:
model = BlogPost
exclude = ('user',)
class CommentForm(ModelForm):
class Meta:
model = Comment
exclude = ('post', 'user', 'date',)
| from .models import BlogPost, Comment
from django.forms import ModelForm
class BlogPostForm(ModelForm):
class Meta:
model = BlogPost
exclude = ('user',)
class CommentForm(ModelForm):
class Meta:
model = Comment
exclude = ('post',)
Exclude fields from CommentForm that will be automatically insertedfrom .models import BlogPost, Comment
from django.forms import ModelForm
class BlogPostForm(ModelForm):
class Meta:
model = BlogPost
exclude = ('user',)
class CommentForm(ModelForm):
class Meta:
model = Comment
exclude = ('post', 'user', 'date',)
| <commit_before>from .models import BlogPost, Comment
from django.forms import ModelForm
class BlogPostForm(ModelForm):
class Meta:
model = BlogPost
exclude = ('user',)
class CommentForm(ModelForm):
class Meta:
model = Comment
exclude = ('post',)
<commit_msg>Exclude fields from CommentForm that will be automatically inserted<commit_after>from .models import BlogPost, Comment
from django.forms import ModelForm
class BlogPostForm(ModelForm):
class Meta:
model = BlogPost
exclude = ('user',)
class CommentForm(ModelForm):
class Meta:
model = Comment
exclude = ('post', 'user', 'date',)
|
e03426b8fd696b8794e21ef52c76a0a5140e1463 | Maths/fibonacciSeries.py | Maths/fibonacciSeries.py | # Fibonacci Sequence Using Recursion
def recur_fibo(n):
if n <= 1:
return n
else:
return(recur_fibo(n-1) + recur_fibo(n-2))
limit = int(input("How many terms to include in fibonacci series: "))
if limit <= 0:
print("Please enter a positive integer: ")
else:
print(f"The first {limit} terms of the fibonacci series are as follows")
for i in range(limit):
print(recur_fibo(i))
| # Fibonacci Sequence Using Recursion
def recur_fibo(n):
return n if n <= 1 else (recur_fibo(n-1) + recur_fibo(n-2))
def isPositiveInteger(limit):
return limit >= 0
def main():
limit = int(input("How many terms to include in fibonacci series: "))
if isPositiveInteger:
print(f"The first {limit} terms of the fibonacci series are as follows:")
print([recur_fibo(n) for n in range(limit)])
else:
print("Please enter a positive integer: ")
if __name__ == '__main__':
main()
| Improve and Refactor the fibonnaciSeries.py (Recursion) | Improve and Refactor the fibonnaciSeries.py (Recursion)
| Python | mit | TheAlgorithms/Python | # Fibonacci Sequence Using Recursion
def recur_fibo(n):
if n <= 1:
return n
else:
return(recur_fibo(n-1) + recur_fibo(n-2))
limit = int(input("How many terms to include in fibonacci series: "))
if limit <= 0:
print("Please enter a positive integer: ")
else:
print(f"The first {limit} terms of the fibonacci series are as follows")
for i in range(limit):
print(recur_fibo(i))
Improve and Refactor the fibonnaciSeries.py (Recursion) | # Fibonacci Sequence Using Recursion
def recur_fibo(n):
return n if n <= 1 else (recur_fibo(n-1) + recur_fibo(n-2))
def isPositiveInteger(limit):
return limit >= 0
def main():
limit = int(input("How many terms to include in fibonacci series: "))
if isPositiveInteger:
print(f"The first {limit} terms of the fibonacci series are as follows:")
print([recur_fibo(n) for n in range(limit)])
else:
print("Please enter a positive integer: ")
if __name__ == '__main__':
main()
| <commit_before># Fibonacci Sequence Using Recursion
def recur_fibo(n):
if n <= 1:
return n
else:
return(recur_fibo(n-1) + recur_fibo(n-2))
limit = int(input("How many terms to include in fibonacci series: "))
if limit <= 0:
print("Please enter a positive integer: ")
else:
print(f"The first {limit} terms of the fibonacci series are as follows")
for i in range(limit):
print(recur_fibo(i))
<commit_msg>Improve and Refactor the fibonnaciSeries.py (Recursion)<commit_after> | # Fibonacci Sequence Using Recursion
def recur_fibo(n):
return n if n <= 1 else (recur_fibo(n-1) + recur_fibo(n-2))
def isPositiveInteger(limit):
return limit >= 0
def main():
limit = int(input("How many terms to include in fibonacci series: "))
if isPositiveInteger:
print(f"The first {limit} terms of the fibonacci series are as follows:")
print([recur_fibo(n) for n in range(limit)])
else:
print("Please enter a positive integer: ")
if __name__ == '__main__':
main()
| # Fibonacci Sequence Using Recursion
def recur_fibo(n):
if n <= 1:
return n
else:
return(recur_fibo(n-1) + recur_fibo(n-2))
limit = int(input("How many terms to include in fibonacci series: "))
if limit <= 0:
print("Please enter a positive integer: ")
else:
print(f"The first {limit} terms of the fibonacci series are as follows")
for i in range(limit):
print(recur_fibo(i))
Improve and Refactor the fibonnaciSeries.py (Recursion)# Fibonacci Sequence Using Recursion
def recur_fibo(n):
return n if n <= 1 else (recur_fibo(n-1) + recur_fibo(n-2))
def isPositiveInteger(limit):
return limit >= 0
def main():
limit = int(input("How many terms to include in fibonacci series: "))
if isPositiveInteger:
print(f"The first {limit} terms of the fibonacci series are as follows:")
print([recur_fibo(n) for n in range(limit)])
else:
print("Please enter a positive integer: ")
if __name__ == '__main__':
main()
| <commit_before># Fibonacci Sequence Using Recursion
def recur_fibo(n):
if n <= 1:
return n
else:
return(recur_fibo(n-1) + recur_fibo(n-2))
limit = int(input("How many terms to include in fibonacci series: "))
if limit <= 0:
print("Please enter a positive integer: ")
else:
print(f"The first {limit} terms of the fibonacci series are as follows")
for i in range(limit):
print(recur_fibo(i))
<commit_msg>Improve and Refactor the fibonnaciSeries.py (Recursion)<commit_after># Fibonacci Sequence Using Recursion
def recur_fibo(n):
return n if n <= 1 else (recur_fibo(n-1) + recur_fibo(n-2))
def isPositiveInteger(limit):
return limit >= 0
def main():
limit = int(input("How many terms to include in fibonacci series: "))
if isPositiveInteger:
print(f"The first {limit} terms of the fibonacci series are as follows:")
print([recur_fibo(n) for n in range(limit)])
else:
print("Please enter a positive integer: ")
if __name__ == '__main__':
main()
|
00222bb47818ea2fdf60847e6ad42ba96c39f16b | whacked4/whacked4/dehacked/filters.py | whacked4/whacked4/dehacked/filters.py | #!/usr/bin/env python
#coding=utf8
"""
This module contains functions used by a Dehacked table entry to filter certain values when reading or writing
them.
"""
import math
import re
def filter_thing_flags_read(value, table):
"""
Filters a thing's flags value.
Extended patches can use mnemonics for flag names, separated by plus signs.
@raise LookupError: if the value contains an unknown mnemonic.
"""
items = re.split(r"[,+| \t\f\r]+", value)
if len(items) <= 1 and value.isalpha() == True:
return value
out = 0
for item in items:
item = item.strip()
# Find the index of the flag mnemonic and convert it to a flag value.
flag = table.flags.get(item)
if flag is None:
raise LookupError('Ignoring unknown thing flag {}.'.format(item))
bit = int(math.pow(2, flag['index']))
out += bit
return out
def filter_thing_flags_write(value, table):
"""
Returns a thing flags value as a string of mnemonics.
"""
bit = 1
out = []
for _ in range(0, 32):
if (value & bit) == 0:
bit *= 2
continue
for key, flag in table.flags.iteritems():
if int(math.pow(2, flag['index'])) == bit:
out.append(key)
break
bit *= 2
if len(out) == 0:
return 0
else:
return '+'.join(out) | #!/usr/bin/env python
#coding=utf8
"""
This module contains functions used by a Dehacked table entry to filter certain values when reading or writing
them.
"""
import math
import re
def filter_thing_flags_read(value, table):
"""
Filters a thing's flags value.
Extended patches can use mnemonics for flag names, separated by plus signs.
@raise LookupError: if the value contains an unknown mnemonic.
"""
if value.isdigit() == True:
return value
items = re.split(r"[,+| \t\f\r]+", value)
out = 0
for item in items:
item = item.strip()
# Find the index of the flag mnemonic and convert it to a flag value.
flag = table.flags.get(item)
if flag is None:
raise LookupError('Ignoring unknown thing flag {}.'.format(item))
bit = int(math.pow(2, flag['index']))
out += bit
return out
def filter_thing_flags_write(value, table):
"""
Returns a thing flags value as a string of mnemonics.
"""
bit = 1
out = []
for _ in range(0, 32):
if (value & bit) == 0:
bit *= 2
continue
for key, flag in table.flags.iteritems():
if int(math.pow(2, flag['index'])) == bit:
out.append(key)
break
bit *= 2
if len(out) == 0:
return 0
else:
return '+'.join(out) | Fix flag delimiter support from 30ec188. | Fix flag delimiter support from 30ec188.
| Python | bsd-2-clause | GitExl/WhackEd4,GitExl/WhackEd4 | #!/usr/bin/env python
#coding=utf8
"""
This module contains functions used by a Dehacked table entry to filter certain values when reading or writing
them.
"""
import math
import re
def filter_thing_flags_read(value, table):
"""
Filters a thing's flags value.
Extended patches can use mnemonics for flag names, separated by plus signs.
@raise LookupError: if the value contains an unknown mnemonic.
"""
items = re.split(r"[,+| \t\f\r]+", value)
if len(items) <= 1 and value.isalpha() == True:
return value
out = 0
for item in items:
item = item.strip()
# Find the index of the flag mnemonic and convert it to a flag value.
flag = table.flags.get(item)
if flag is None:
raise LookupError('Ignoring unknown thing flag {}.'.format(item))
bit = int(math.pow(2, flag['index']))
out += bit
return out
def filter_thing_flags_write(value, table):
"""
Returns a thing flags value as a string of mnemonics.
"""
bit = 1
out = []
for _ in range(0, 32):
if (value & bit) == 0:
bit *= 2
continue
for key, flag in table.flags.iteritems():
if int(math.pow(2, flag['index'])) == bit:
out.append(key)
break
bit *= 2
if len(out) == 0:
return 0
else:
return '+'.join(out)Fix flag delimiter support from 30ec188. | #!/usr/bin/env python
#coding=utf8
"""
This module contains functions used by a Dehacked table entry to filter certain values when reading or writing
them.
"""
import math
import re
def filter_thing_flags_read(value, table):
"""
Filters a thing's flags value.
Extended patches can use mnemonics for flag names, separated by plus signs.
@raise LookupError: if the value contains an unknown mnemonic.
"""
if value.isdigit() == True:
return value
items = re.split(r"[,+| \t\f\r]+", value)
out = 0
for item in items:
item = item.strip()
# Find the index of the flag mnemonic and convert it to a flag value.
flag = table.flags.get(item)
if flag is None:
raise LookupError('Ignoring unknown thing flag {}.'.format(item))
bit = int(math.pow(2, flag['index']))
out += bit
return out
def filter_thing_flags_write(value, table):
"""
Returns a thing flags value as a string of mnemonics.
"""
bit = 1
out = []
for _ in range(0, 32):
if (value & bit) == 0:
bit *= 2
continue
for key, flag in table.flags.iteritems():
if int(math.pow(2, flag['index'])) == bit:
out.append(key)
break
bit *= 2
if len(out) == 0:
return 0
else:
return '+'.join(out) | <commit_before>#!/usr/bin/env python
#coding=utf8
"""
This module contains functions used by a Dehacked table entry to filter certain values when reading or writing
them.
"""
import math
import re
def filter_thing_flags_read(value, table):
"""
Filters a thing's flags value.
Extended patches can use mnemonics for flag names, separated by plus signs.
@raise LookupError: if the value contains an unknown mnemonic.
"""
items = re.split(r"[,+| \t\f\r]+", value)
if len(items) <= 1 and value.isalpha() == True:
return value
out = 0
for item in items:
item = item.strip()
# Find the index of the flag mnemonic and convert it to a flag value.
flag = table.flags.get(item)
if flag is None:
raise LookupError('Ignoring unknown thing flag {}.'.format(item))
bit = int(math.pow(2, flag['index']))
out += bit
return out
def filter_thing_flags_write(value, table):
"""
Returns a thing flags value as a string of mnemonics.
"""
bit = 1
out = []
for _ in range(0, 32):
if (value & bit) == 0:
bit *= 2
continue
for key, flag in table.flags.iteritems():
if int(math.pow(2, flag['index'])) == bit:
out.append(key)
break
bit *= 2
if len(out) == 0:
return 0
else:
return '+'.join(out)<commit_msg>Fix flag delimiter support from 30ec188.<commit_after> | #!/usr/bin/env python
#coding=utf8
"""
This module contains functions used by a Dehacked table entry to filter certain values when reading or writing
them.
"""
import math
import re
def filter_thing_flags_read(value, table):
"""
Filters a thing's flags value.
Extended patches can use mnemonics for flag names, separated by plus signs.
@raise LookupError: if the value contains an unknown mnemonic.
"""
if value.isdigit() == True:
return value
items = re.split(r"[,+| \t\f\r]+", value)
out = 0
for item in items:
item = item.strip()
# Find the index of the flag mnemonic and convert it to a flag value.
flag = table.flags.get(item)
if flag is None:
raise LookupError('Ignoring unknown thing flag {}.'.format(item))
bit = int(math.pow(2, flag['index']))
out += bit
return out
def filter_thing_flags_write(value, table):
"""
Returns a thing flags value as a string of mnemonics.
"""
bit = 1
out = []
for _ in range(0, 32):
if (value & bit) == 0:
bit *= 2
continue
for key, flag in table.flags.iteritems():
if int(math.pow(2, flag['index'])) == bit:
out.append(key)
break
bit *= 2
if len(out) == 0:
return 0
else:
return '+'.join(out) | #!/usr/bin/env python
#coding=utf8
"""
This module contains functions used by a Dehacked table entry to filter certain values when reading or writing
them.
"""
import math
import re
def filter_thing_flags_read(value, table):
"""
Filters a thing's flags value.
Extended patches can use mnemonics for flag names, separated by plus signs.
@raise LookupError: if the value contains an unknown mnemonic.
"""
items = re.split(r"[,+| \t\f\r]+", value)
if len(items) <= 1 and value.isalpha() == True:
return value
out = 0
for item in items:
item = item.strip()
# Find the index of the flag mnemonic and convert it to a flag value.
flag = table.flags.get(item)
if flag is None:
raise LookupError('Ignoring unknown thing flag {}.'.format(item))
bit = int(math.pow(2, flag['index']))
out += bit
return out
def filter_thing_flags_write(value, table):
"""
Returns a thing flags value as a string of mnemonics.
"""
bit = 1
out = []
for _ in range(0, 32):
if (value & bit) == 0:
bit *= 2
continue
for key, flag in table.flags.iteritems():
if int(math.pow(2, flag['index'])) == bit:
out.append(key)
break
bit *= 2
if len(out) == 0:
return 0
else:
return '+'.join(out)Fix flag delimiter support from 30ec188.#!/usr/bin/env python
#coding=utf8
"""
This module contains functions used by a Dehacked table entry to filter certain values when reading or writing
them.
"""
import math
import re
def filter_thing_flags_read(value, table):
"""
Filters a thing's flags value.
Extended patches can use mnemonics for flag names, separated by plus signs.
@raise LookupError: if the value contains an unknown mnemonic.
"""
if value.isdigit() == True:
return value
items = re.split(r"[,+| \t\f\r]+", value)
out = 0
for item in items:
item = item.strip()
# Find the index of the flag mnemonic and convert it to a flag value.
flag = table.flags.get(item)
if flag is None:
raise LookupError('Ignoring unknown thing flag {}.'.format(item))
bit = int(math.pow(2, flag['index']))
out += bit
return out
def filter_thing_flags_write(value, table):
"""
Returns a thing flags value as a string of mnemonics.
"""
bit = 1
out = []
for _ in range(0, 32):
if (value & bit) == 0:
bit *= 2
continue
for key, flag in table.flags.iteritems():
if int(math.pow(2, flag['index'])) == bit:
out.append(key)
break
bit *= 2
if len(out) == 0:
return 0
else:
return '+'.join(out) | <commit_before>#!/usr/bin/env python
#coding=utf8
"""
This module contains functions used by a Dehacked table entry to filter certain values when reading or writing
them.
"""
import math
import re
def filter_thing_flags_read(value, table):
"""
Filters a thing's flags value.
Extended patches can use mnemonics for flag names, separated by plus signs.
@raise LookupError: if the value contains an unknown mnemonic.
"""
items = re.split(r"[,+| \t\f\r]+", value)
if len(items) <= 1 and value.isalpha() == True:
return value
out = 0
for item in items:
item = item.strip()
# Find the index of the flag mnemonic and convert it to a flag value.
flag = table.flags.get(item)
if flag is None:
raise LookupError('Ignoring unknown thing flag {}.'.format(item))
bit = int(math.pow(2, flag['index']))
out += bit
return out
def filter_thing_flags_write(value, table):
"""
Returns a thing flags value as a string of mnemonics.
"""
bit = 1
out = []
for _ in range(0, 32):
if (value & bit) == 0:
bit *= 2
continue
for key, flag in table.flags.iteritems():
if int(math.pow(2, flag['index'])) == bit:
out.append(key)
break
bit *= 2
if len(out) == 0:
return 0
else:
return '+'.join(out)<commit_msg>Fix flag delimiter support from 30ec188.<commit_after>#!/usr/bin/env python
#coding=utf8
"""
This module contains functions used by a Dehacked table entry to filter certain values when reading or writing
them.
"""
import math
import re
def filter_thing_flags_read(value, table):
"""
Filters a thing's flags value.
Extended patches can use mnemonics for flag names, separated by plus signs.
@raise LookupError: if the value contains an unknown mnemonic.
"""
if value.isdigit() == True:
return value
items = re.split(r"[,+| \t\f\r]+", value)
out = 0
for item in items:
item = item.strip()
# Find the index of the flag mnemonic and convert it to a flag value.
flag = table.flags.get(item)
if flag is None:
raise LookupError('Ignoring unknown thing flag {}.'.format(item))
bit = int(math.pow(2, flag['index']))
out += bit
return out
def filter_thing_flags_write(value, table):
"""
Returns a thing flags value as a string of mnemonics.
"""
bit = 1
out = []
for _ in range(0, 32):
if (value & bit) == 0:
bit *= 2
continue
for key, flag in table.flags.iteritems():
if int(math.pow(2, flag['index'])) == bit:
out.append(key)
break
bit *= 2
if len(out) == 0:
return 0
else:
return '+'.join(out) |
45e2651325ebf3d4554816d5c7bef04030d147b2 | tests/test_middleware.py | tests/test_middleware.py | from os import environ
from unittest import TestCase
environ['DJANGO_SETTINGS_MODULE'] = 'test_settings'
from incuna_auth.middleware import LoginRequiredMiddleware
class AuthenticatedUser(object):
def is_authenticated(self):
return True
class AnonymousUser(object):
def is_authenticated(self):
return False
class Request(object):
def __init__(self, path_info, method='GET'):
self.path_info = path_info
self.method = method
class TestLoginRequiredMiddleware(TestCase):
def setUp(self):
self.middleware = LoginRequiredMiddleware()
def test_skip_middleware_if_url_is_exempt(self):
self.request = Request('exempt-and-protected-url/')
self.request.user = AnonymousUser()
response = self.middleware.process_request(self.request)
self.assertEqual(response, None)
def test_skip_middleware_if_url_is_not_protected(self):
self.request = Request('non-protected-url/')
self.request.user = AnonymousUser()
response = self.middleware.process_request(self.request)
self.assertEqual(response, None)
def test_skip_middleware_if_user_is_authenticated(self):
self.request = Request('protected-url/')
self.request.user = AuthenticatedUser()
response = self.middleware.process_request(self.request)
self.assertEqual(response, None)
| from os import environ
from unittest import TestCase
environ['DJANGO_SETTINGS_MODULE'] = 'test_settings'
from incuna_auth.middleware import LoginRequiredMiddleware
class AuthenticatedUser(object):
def is_authenticated(self):
return True
class AnonymousUser(object):
def is_authenticated(self):
return False
class Request(object):
def __init__(self, path_info, method='GET'):
self.path_info = path_info
self.method = method
class TestLoginRequiredMiddleware(TestCase):
def setUp(self):
self.middleware = LoginRequiredMiddleware()
def test_skip_middleware_if_url_is_exempt(self):
self.request = Request('exempt-and-protected-url/')
self.request.user = AnonymousUser()
response = self.middleware.process_request(self.request)
self.assertEqual(response, None)
def test_skip_middleware_if_url_is_not_protected(self):
self.request = Request('non-protected-url/')
self.request.user = AnonymousUser()
response = self.middleware.process_request(self.request)
self.assertEqual(response, None)
def test_skip_middleware_if_user_is_authenticated(self):
self.request = Request('protected-url/')
self.request.user = AuthenticatedUser()
response = self.middleware.process_request(self.request)
self.assertEqual(response, None)
def test_403_result_if_non_get_request(self):
self.request = Request('protected-url/', 'POST')
self.request.user = AnonymousUser()
response = self.middleware.process_request(self.request)
self.assertEqual(response.status_code, 403)
| Add test for non-GET requests | Add test for non-GET requests
* Check that we get a 403 result.
| Python | bsd-2-clause | incuna/incuna-auth,incuna/incuna-auth,ghickman/incuna-auth,ghickman/incuna-auth | from os import environ
from unittest import TestCase
environ['DJANGO_SETTINGS_MODULE'] = 'test_settings'
from incuna_auth.middleware import LoginRequiredMiddleware
class AuthenticatedUser(object):
def is_authenticated(self):
return True
class AnonymousUser(object):
def is_authenticated(self):
return False
class Request(object):
def __init__(self, path_info, method='GET'):
self.path_info = path_info
self.method = method
class TestLoginRequiredMiddleware(TestCase):
def setUp(self):
self.middleware = LoginRequiredMiddleware()
def test_skip_middleware_if_url_is_exempt(self):
self.request = Request('exempt-and-protected-url/')
self.request.user = AnonymousUser()
response = self.middleware.process_request(self.request)
self.assertEqual(response, None)
def test_skip_middleware_if_url_is_not_protected(self):
self.request = Request('non-protected-url/')
self.request.user = AnonymousUser()
response = self.middleware.process_request(self.request)
self.assertEqual(response, None)
def test_skip_middleware_if_user_is_authenticated(self):
self.request = Request('protected-url/')
self.request.user = AuthenticatedUser()
response = self.middleware.process_request(self.request)
self.assertEqual(response, None)
Add test for non-GET requests
* Check that we get a 403 result. | from os import environ
from unittest import TestCase
environ['DJANGO_SETTINGS_MODULE'] = 'test_settings'
from incuna_auth.middleware import LoginRequiredMiddleware
class AuthenticatedUser(object):
def is_authenticated(self):
return True
class AnonymousUser(object):
def is_authenticated(self):
return False
class Request(object):
def __init__(self, path_info, method='GET'):
self.path_info = path_info
self.method = method
class TestLoginRequiredMiddleware(TestCase):
def setUp(self):
self.middleware = LoginRequiredMiddleware()
def test_skip_middleware_if_url_is_exempt(self):
self.request = Request('exempt-and-protected-url/')
self.request.user = AnonymousUser()
response = self.middleware.process_request(self.request)
self.assertEqual(response, None)
def test_skip_middleware_if_url_is_not_protected(self):
self.request = Request('non-protected-url/')
self.request.user = AnonymousUser()
response = self.middleware.process_request(self.request)
self.assertEqual(response, None)
def test_skip_middleware_if_user_is_authenticated(self):
self.request = Request('protected-url/')
self.request.user = AuthenticatedUser()
response = self.middleware.process_request(self.request)
self.assertEqual(response, None)
def test_403_result_if_non_get_request(self):
self.request = Request('protected-url/', 'POST')
self.request.user = AnonymousUser()
response = self.middleware.process_request(self.request)
self.assertEqual(response.status_code, 403)
| <commit_before>from os import environ
from unittest import TestCase
environ['DJANGO_SETTINGS_MODULE'] = 'test_settings'
from incuna_auth.middleware import LoginRequiredMiddleware
class AuthenticatedUser(object):
def is_authenticated(self):
return True
class AnonymousUser(object):
def is_authenticated(self):
return False
class Request(object):
def __init__(self, path_info, method='GET'):
self.path_info = path_info
self.method = method
class TestLoginRequiredMiddleware(TestCase):
def setUp(self):
self.middleware = LoginRequiredMiddleware()
def test_skip_middleware_if_url_is_exempt(self):
self.request = Request('exempt-and-protected-url/')
self.request.user = AnonymousUser()
response = self.middleware.process_request(self.request)
self.assertEqual(response, None)
def test_skip_middleware_if_url_is_not_protected(self):
self.request = Request('non-protected-url/')
self.request.user = AnonymousUser()
response = self.middleware.process_request(self.request)
self.assertEqual(response, None)
def test_skip_middleware_if_user_is_authenticated(self):
self.request = Request('protected-url/')
self.request.user = AuthenticatedUser()
response = self.middleware.process_request(self.request)
self.assertEqual(response, None)
<commit_msg>Add test for non-GET requests
* Check that we get a 403 result.<commit_after> | from os import environ
from unittest import TestCase
environ['DJANGO_SETTINGS_MODULE'] = 'test_settings'
from incuna_auth.middleware import LoginRequiredMiddleware
class AuthenticatedUser(object):
def is_authenticated(self):
return True
class AnonymousUser(object):
def is_authenticated(self):
return False
class Request(object):
def __init__(self, path_info, method='GET'):
self.path_info = path_info
self.method = method
class TestLoginRequiredMiddleware(TestCase):
def setUp(self):
self.middleware = LoginRequiredMiddleware()
def test_skip_middleware_if_url_is_exempt(self):
self.request = Request('exempt-and-protected-url/')
self.request.user = AnonymousUser()
response = self.middleware.process_request(self.request)
self.assertEqual(response, None)
def test_skip_middleware_if_url_is_not_protected(self):
self.request = Request('non-protected-url/')
self.request.user = AnonymousUser()
response = self.middleware.process_request(self.request)
self.assertEqual(response, None)
def test_skip_middleware_if_user_is_authenticated(self):
self.request = Request('protected-url/')
self.request.user = AuthenticatedUser()
response = self.middleware.process_request(self.request)
self.assertEqual(response, None)
def test_403_result_if_non_get_request(self):
self.request = Request('protected-url/', 'POST')
self.request.user = AnonymousUser()
response = self.middleware.process_request(self.request)
self.assertEqual(response.status_code, 403)
| from os import environ
from unittest import TestCase
environ['DJANGO_SETTINGS_MODULE'] = 'test_settings'
from incuna_auth.middleware import LoginRequiredMiddleware
class AuthenticatedUser(object):
def is_authenticated(self):
return True
class AnonymousUser(object):
def is_authenticated(self):
return False
class Request(object):
def __init__(self, path_info, method='GET'):
self.path_info = path_info
self.method = method
class TestLoginRequiredMiddleware(TestCase):
def setUp(self):
self.middleware = LoginRequiredMiddleware()
def test_skip_middleware_if_url_is_exempt(self):
self.request = Request('exempt-and-protected-url/')
self.request.user = AnonymousUser()
response = self.middleware.process_request(self.request)
self.assertEqual(response, None)
def test_skip_middleware_if_url_is_not_protected(self):
self.request = Request('non-protected-url/')
self.request.user = AnonymousUser()
response = self.middleware.process_request(self.request)
self.assertEqual(response, None)
def test_skip_middleware_if_user_is_authenticated(self):
self.request = Request('protected-url/')
self.request.user = AuthenticatedUser()
response = self.middleware.process_request(self.request)
self.assertEqual(response, None)
Add test for non-GET requests
* Check that we get a 403 result.from os import environ
from unittest import TestCase
environ['DJANGO_SETTINGS_MODULE'] = 'test_settings'
from incuna_auth.middleware import LoginRequiredMiddleware
class AuthenticatedUser(object):
def is_authenticated(self):
return True
class AnonymousUser(object):
def is_authenticated(self):
return False
class Request(object):
def __init__(self, path_info, method='GET'):
self.path_info = path_info
self.method = method
class TestLoginRequiredMiddleware(TestCase):
def setUp(self):
self.middleware = LoginRequiredMiddleware()
def test_skip_middleware_if_url_is_exempt(self):
self.request = Request('exempt-and-protected-url/')
self.request.user = AnonymousUser()
response = self.middleware.process_request(self.request)
self.assertEqual(response, None)
def test_skip_middleware_if_url_is_not_protected(self):
self.request = Request('non-protected-url/')
self.request.user = AnonymousUser()
response = self.middleware.process_request(self.request)
self.assertEqual(response, None)
def test_skip_middleware_if_user_is_authenticated(self):
self.request = Request('protected-url/')
self.request.user = AuthenticatedUser()
response = self.middleware.process_request(self.request)
self.assertEqual(response, None)
def test_403_result_if_non_get_request(self):
self.request = Request('protected-url/', 'POST')
self.request.user = AnonymousUser()
response = self.middleware.process_request(self.request)
self.assertEqual(response.status_code, 403)
| <commit_before>from os import environ
from unittest import TestCase
environ['DJANGO_SETTINGS_MODULE'] = 'test_settings'
from incuna_auth.middleware import LoginRequiredMiddleware
class AuthenticatedUser(object):
def is_authenticated(self):
return True
class AnonymousUser(object):
def is_authenticated(self):
return False
class Request(object):
def __init__(self, path_info, method='GET'):
self.path_info = path_info
self.method = method
class TestLoginRequiredMiddleware(TestCase):
def setUp(self):
self.middleware = LoginRequiredMiddleware()
def test_skip_middleware_if_url_is_exempt(self):
self.request = Request('exempt-and-protected-url/')
self.request.user = AnonymousUser()
response = self.middleware.process_request(self.request)
self.assertEqual(response, None)
def test_skip_middleware_if_url_is_not_protected(self):
self.request = Request('non-protected-url/')
self.request.user = AnonymousUser()
response = self.middleware.process_request(self.request)
self.assertEqual(response, None)
def test_skip_middleware_if_user_is_authenticated(self):
self.request = Request('protected-url/')
self.request.user = AuthenticatedUser()
response = self.middleware.process_request(self.request)
self.assertEqual(response, None)
<commit_msg>Add test for non-GET requests
* Check that we get a 403 result.<commit_after>from os import environ
from unittest import TestCase
environ['DJANGO_SETTINGS_MODULE'] = 'test_settings'
from incuna_auth.middleware import LoginRequiredMiddleware
class AuthenticatedUser(object):
def is_authenticated(self):
return True
class AnonymousUser(object):
def is_authenticated(self):
return False
class Request(object):
def __init__(self, path_info, method='GET'):
self.path_info = path_info
self.method = method
class TestLoginRequiredMiddleware(TestCase):
def setUp(self):
self.middleware = LoginRequiredMiddleware()
def test_skip_middleware_if_url_is_exempt(self):
self.request = Request('exempt-and-protected-url/')
self.request.user = AnonymousUser()
response = self.middleware.process_request(self.request)
self.assertEqual(response, None)
def test_skip_middleware_if_url_is_not_protected(self):
self.request = Request('non-protected-url/')
self.request.user = AnonymousUser()
response = self.middleware.process_request(self.request)
self.assertEqual(response, None)
def test_skip_middleware_if_user_is_authenticated(self):
self.request = Request('protected-url/')
self.request.user = AuthenticatedUser()
response = self.middleware.process_request(self.request)
self.assertEqual(response, None)
def test_403_result_if_non_get_request(self):
self.request = Request('protected-url/', 'POST')
self.request.user = AnonymousUser()
response = self.middleware.process_request(self.request)
self.assertEqual(response.status_code, 403)
|
85c509913cc9a6b22036c33eccb07277b39260e3 | pygraphc/anomaly/AnomalyScore.py | pygraphc/anomaly/AnomalyScore.py | import csv
from pygraphc.abstraction.ClusterAbstraction import ClusterAbstraction
from pygraphc.clustering.ClusterUtility import ClusterUtility
class AnomalyScore(object):
"""A class to calculate anomaly score in a cluster.
"""
def __init__(self, graph, clusters, filename):
"""The constructor of class AnomalyScore.
Parameters
----------
graph : graph
clusters : dict[list]
filename : str
"""
self.graph = graph
self.clusters = clusters
self.filename = filename
self.property = {}
self.abstraction = {}
def write_property(self):
"""Write cluster property to a file.
"""
# get cluster abstraction and its properties
self.abstraction = ClusterAbstraction.dp_lcs(self.graph, self.clusters)
self.property = ClusterUtility.get_cluster_property(self.graph, self.clusters)
# write to csv
f = open(self.filename + '_anomaly.csv', 'wt')
writer = csv.writer(f)
# set header
header = ('cluster_id', 'cluster_abstraction') + tuple(self.property[0].keys())
writer.writerow(header)
# write data
for cluster_id, abstract in self.abstraction.iteritems():
row = (cluster_id, abstract) + tuple(self.property[cluster_id].values())
writer.writerow(row)
| import csv
from pygraphc.abstraction.ClusterAbstraction import ClusterAbstraction
from pygraphc.clustering.ClusterUtility import ClusterUtility
class AnomalyScore(object):
"""A class to calculate anomaly score in a cluster.
"""
def __init__(self, graph, clusters, filename):
"""The constructor of class AnomalyScore.
Parameters
----------
graph : graph
A graph to be analyzed for its anomaly.
clusters : dict[list]
Dictionary of list containing node identifier for each clusters.
filename : str
Filename for anomaly detection result.
"""
self.graph = graph
self.clusters = clusters
self.filename = filename
self.property = {}
self.abstraction = {}
def write_property(self):
"""Write cluster property to a file.
"""
# get cluster abstraction and its properties
self.abstraction = ClusterAbstraction.dp_lcs(self.graph, self.clusters)
self.property = ClusterUtility.get_cluster_property(self.graph, self.clusters)
# write to csv
f = open(self.filename + '_anomaly.csv', 'wt')
writer = csv.writer(f)
# set header
header = ('cluster_id', 'cluster_abstraction') + tuple(self.property[0].keys())
writer.writerow(header)
# write data
for cluster_id, abstract in self.abstraction.iteritems():
row = (cluster_id, abstract) + tuple(self.property[cluster_id].values())
writer.writerow(row)
| Add description of Parameters section in docstring | Add description of Parameters section in docstring
| Python | mit | studiawan/pygraphc | import csv
from pygraphc.abstraction.ClusterAbstraction import ClusterAbstraction
from pygraphc.clustering.ClusterUtility import ClusterUtility
class AnomalyScore(object):
"""A class to calculate anomaly score in a cluster.
"""
def __init__(self, graph, clusters, filename):
"""The constructor of class AnomalyScore.
Parameters
----------
graph : graph
clusters : dict[list]
filename : str
"""
self.graph = graph
self.clusters = clusters
self.filename = filename
self.property = {}
self.abstraction = {}
def write_property(self):
"""Write cluster property to a file.
"""
# get cluster abstraction and its properties
self.abstraction = ClusterAbstraction.dp_lcs(self.graph, self.clusters)
self.property = ClusterUtility.get_cluster_property(self.graph, self.clusters)
# write to csv
f = open(self.filename + '_anomaly.csv', 'wt')
writer = csv.writer(f)
# set header
header = ('cluster_id', 'cluster_abstraction') + tuple(self.property[0].keys())
writer.writerow(header)
# write data
for cluster_id, abstract in self.abstraction.iteritems():
row = (cluster_id, abstract) + tuple(self.property[cluster_id].values())
writer.writerow(row)
Add description of Parameters section in docstring | import csv
from pygraphc.abstraction.ClusterAbstraction import ClusterAbstraction
from pygraphc.clustering.ClusterUtility import ClusterUtility
class AnomalyScore(object):
"""A class to calculate anomaly score in a cluster.
"""
def __init__(self, graph, clusters, filename):
"""The constructor of class AnomalyScore.
Parameters
----------
graph : graph
A graph to be analyzed for its anomaly.
clusters : dict[list]
Dictionary of list containing node identifier for each clusters.
filename : str
Filename for anomaly detection result.
"""
self.graph = graph
self.clusters = clusters
self.filename = filename
self.property = {}
self.abstraction = {}
def write_property(self):
"""Write cluster property to a file.
"""
# get cluster abstraction and its properties
self.abstraction = ClusterAbstraction.dp_lcs(self.graph, self.clusters)
self.property = ClusterUtility.get_cluster_property(self.graph, self.clusters)
# write to csv
f = open(self.filename + '_anomaly.csv', 'wt')
writer = csv.writer(f)
# set header
header = ('cluster_id', 'cluster_abstraction') + tuple(self.property[0].keys())
writer.writerow(header)
# write data
for cluster_id, abstract in self.abstraction.iteritems():
row = (cluster_id, abstract) + tuple(self.property[cluster_id].values())
writer.writerow(row)
| <commit_before>import csv
from pygraphc.abstraction.ClusterAbstraction import ClusterAbstraction
from pygraphc.clustering.ClusterUtility import ClusterUtility
class AnomalyScore(object):
"""A class to calculate anomaly score in a cluster.
"""
def __init__(self, graph, clusters, filename):
"""The constructor of class AnomalyScore.
Parameters
----------
graph : graph
clusters : dict[list]
filename : str
"""
self.graph = graph
self.clusters = clusters
self.filename = filename
self.property = {}
self.abstraction = {}
def write_property(self):
"""Write cluster property to a file.
"""
# get cluster abstraction and its properties
self.abstraction = ClusterAbstraction.dp_lcs(self.graph, self.clusters)
self.property = ClusterUtility.get_cluster_property(self.graph, self.clusters)
# write to csv
f = open(self.filename + '_anomaly.csv', 'wt')
writer = csv.writer(f)
# set header
header = ('cluster_id', 'cluster_abstraction') + tuple(self.property[0].keys())
writer.writerow(header)
# write data
for cluster_id, abstract in self.abstraction.iteritems():
row = (cluster_id, abstract) + tuple(self.property[cluster_id].values())
writer.writerow(row)
<commit_msg>Add description of Parameters section in docstring<commit_after> | import csv
from pygraphc.abstraction.ClusterAbstraction import ClusterAbstraction
from pygraphc.clustering.ClusterUtility import ClusterUtility
class AnomalyScore(object):
"""A class to calculate anomaly score in a cluster.
"""
def __init__(self, graph, clusters, filename):
"""The constructor of class AnomalyScore.
Parameters
----------
graph : graph
A graph to be analyzed for its anomaly.
clusters : dict[list]
Dictionary of list containing node identifier for each clusters.
filename : str
Filename for anomaly detection result.
"""
self.graph = graph
self.clusters = clusters
self.filename = filename
self.property = {}
self.abstraction = {}
def write_property(self):
"""Write cluster property to a file.
"""
# get cluster abstraction and its properties
self.abstraction = ClusterAbstraction.dp_lcs(self.graph, self.clusters)
self.property = ClusterUtility.get_cluster_property(self.graph, self.clusters)
# write to csv
f = open(self.filename + '_anomaly.csv', 'wt')
writer = csv.writer(f)
# set header
header = ('cluster_id', 'cluster_abstraction') + tuple(self.property[0].keys())
writer.writerow(header)
# write data
for cluster_id, abstract in self.abstraction.iteritems():
row = (cluster_id, abstract) + tuple(self.property[cluster_id].values())
writer.writerow(row)
| import csv
from pygraphc.abstraction.ClusterAbstraction import ClusterAbstraction
from pygraphc.clustering.ClusterUtility import ClusterUtility
class AnomalyScore(object):
"""A class to calculate anomaly score in a cluster.
"""
def __init__(self, graph, clusters, filename):
"""The constructor of class AnomalyScore.
Parameters
----------
graph : graph
clusters : dict[list]
filename : str
"""
self.graph = graph
self.clusters = clusters
self.filename = filename
self.property = {}
self.abstraction = {}
def write_property(self):
"""Write cluster property to a file.
"""
# get cluster abstraction and its properties
self.abstraction = ClusterAbstraction.dp_lcs(self.graph, self.clusters)
self.property = ClusterUtility.get_cluster_property(self.graph, self.clusters)
# write to csv
f = open(self.filename + '_anomaly.csv', 'wt')
writer = csv.writer(f)
# set header
header = ('cluster_id', 'cluster_abstraction') + tuple(self.property[0].keys())
writer.writerow(header)
# write data
for cluster_id, abstract in self.abstraction.iteritems():
row = (cluster_id, abstract) + tuple(self.property[cluster_id].values())
writer.writerow(row)
Add description of Parameters section in docstringimport csv
from pygraphc.abstraction.ClusterAbstraction import ClusterAbstraction
from pygraphc.clustering.ClusterUtility import ClusterUtility
class AnomalyScore(object):
"""A class to calculate anomaly score in a cluster.
"""
def __init__(self, graph, clusters, filename):
"""The constructor of class AnomalyScore.
Parameters
----------
graph : graph
A graph to be analyzed for its anomaly.
clusters : dict[list]
Dictionary of list containing node identifier for each clusters.
filename : str
Filename for anomaly detection result.
"""
self.graph = graph
self.clusters = clusters
self.filename = filename
self.property = {}
self.abstraction = {}
def write_property(self):
"""Write cluster property to a file.
"""
# get cluster abstraction and its properties
self.abstraction = ClusterAbstraction.dp_lcs(self.graph, self.clusters)
self.property = ClusterUtility.get_cluster_property(self.graph, self.clusters)
# write to csv
f = open(self.filename + '_anomaly.csv', 'wt')
writer = csv.writer(f)
# set header
header = ('cluster_id', 'cluster_abstraction') + tuple(self.property[0].keys())
writer.writerow(header)
# write data
for cluster_id, abstract in self.abstraction.iteritems():
row = (cluster_id, abstract) + tuple(self.property[cluster_id].values())
writer.writerow(row)
| <commit_before>import csv
from pygraphc.abstraction.ClusterAbstraction import ClusterAbstraction
from pygraphc.clustering.ClusterUtility import ClusterUtility
class AnomalyScore(object):
"""A class to calculate anomaly score in a cluster.
"""
def __init__(self, graph, clusters, filename):
"""The constructor of class AnomalyScore.
Parameters
----------
graph : graph
clusters : dict[list]
filename : str
"""
self.graph = graph
self.clusters = clusters
self.filename = filename
self.property = {}
self.abstraction = {}
def write_property(self):
"""Write cluster property to a file.
"""
# get cluster abstraction and its properties
self.abstraction = ClusterAbstraction.dp_lcs(self.graph, self.clusters)
self.property = ClusterUtility.get_cluster_property(self.graph, self.clusters)
# write to csv
f = open(self.filename + '_anomaly.csv', 'wt')
writer = csv.writer(f)
# set header
header = ('cluster_id', 'cluster_abstraction') + tuple(self.property[0].keys())
writer.writerow(header)
# write data
for cluster_id, abstract in self.abstraction.iteritems():
row = (cluster_id, abstract) + tuple(self.property[cluster_id].values())
writer.writerow(row)
<commit_msg>Add description of Parameters section in docstring<commit_after>import csv
from pygraphc.abstraction.ClusterAbstraction import ClusterAbstraction
from pygraphc.clustering.ClusterUtility import ClusterUtility
class AnomalyScore(object):
"""A class to calculate anomaly score in a cluster.
"""
def __init__(self, graph, clusters, filename):
"""The constructor of class AnomalyScore.
Parameters
----------
graph : graph
A graph to be analyzed for its anomaly.
clusters : dict[list]
Dictionary of list containing node identifier for each clusters.
filename : str
Filename for anomaly detection result.
"""
self.graph = graph
self.clusters = clusters
self.filename = filename
self.property = {}
self.abstraction = {}
def write_property(self):
"""Write cluster property to a file.
"""
# get cluster abstraction and its properties
self.abstraction = ClusterAbstraction.dp_lcs(self.graph, self.clusters)
self.property = ClusterUtility.get_cluster_property(self.graph, self.clusters)
# write to csv
f = open(self.filename + '_anomaly.csv', 'wt')
writer = csv.writer(f)
# set header
header = ('cluster_id', 'cluster_abstraction') + tuple(self.property[0].keys())
writer.writerow(header)
# write data
for cluster_id, abstract in self.abstraction.iteritems():
row = (cluster_id, abstract) + tuple(self.property[cluster_id].values())
writer.writerow(row)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.