commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
9be9ddd64d192240d45dea5b0c3dfbe3a2d3e261
|
gae2django/utils.py
|
gae2django/utils.py
|
#
# Copyright 2008 Andi Albrecht <albrecht.andi@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class CallableString(str):
def __call__(self):
return self
def id(self):
try:
return int(self.split('_')[-1])
except:
return None
|
#
# Copyright 2008 Andi Albrecht <albrecht.andi@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class CallableString(str):
def __call__(self):
return str(self)
def id(self):
try:
return int(self.split('_')[-1])
except:
return None
|
Return str(self) instead of self.
|
Return str(self) instead of self.
|
Python
|
apache-2.0
|
bubenkoff/bubenkoff-gae2django,andialbrecht/django-gae2django,andialbrecht/django-gae2django,bubenkoff/bubenkoff-gae2django
|
#
# Copyright 2008 Andi Albrecht <albrecht.andi@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class CallableString(str):
def __call__(self):
return self
def id(self):
try:
return int(self.split('_')[-1])
except:
return None
Return str(self) instead of self.
|
#
# Copyright 2008 Andi Albrecht <albrecht.andi@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class CallableString(str):
def __call__(self):
return str(self)
def id(self):
try:
return int(self.split('_')[-1])
except:
return None
|
<commit_before>#
# Copyright 2008 Andi Albrecht <albrecht.andi@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class CallableString(str):
def __call__(self):
return self
def id(self):
try:
return int(self.split('_')[-1])
except:
return None
<commit_msg>Return str(self) instead of self.<commit_after>
|
#
# Copyright 2008 Andi Albrecht <albrecht.andi@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class CallableString(str):
def __call__(self):
return str(self)
def id(self):
try:
return int(self.split('_')[-1])
except:
return None
|
#
# Copyright 2008 Andi Albrecht <albrecht.andi@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class CallableString(str):
def __call__(self):
return self
def id(self):
try:
return int(self.split('_')[-1])
except:
return None
Return str(self) instead of self.#
# Copyright 2008 Andi Albrecht <albrecht.andi@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class CallableString(str):
def __call__(self):
return str(self)
def id(self):
try:
return int(self.split('_')[-1])
except:
return None
|
<commit_before>#
# Copyright 2008 Andi Albrecht <albrecht.andi@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class CallableString(str):
def __call__(self):
return self
def id(self):
try:
return int(self.split('_')[-1])
except:
return None
<commit_msg>Return str(self) instead of self.<commit_after>#
# Copyright 2008 Andi Albrecht <albrecht.andi@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class CallableString(str):
def __call__(self):
return str(self)
def id(self):
try:
return int(self.split('_')[-1])
except:
return None
|
3328a58e7c81fb86af2424b851de05e7b409ec00
|
asyncio/__init__.py
|
asyncio/__init__.py
|
"""The asyncio package, tracking PEP 3156."""
import sys
# This relies on each of the submodules having an __all__ variable.
from .futures import *
from .events import *
from .locks import *
from .transports import *
from .protocols import *
from .streams import *
from .tasks import *
if sys.platform == 'win32': # pragma: no cover
from .windows_events import *
else:
from .unix_events import * # pragma: no cover
__all__ = (futures.__all__ +
events.__all__ +
locks.__all__ +
transports.__all__ +
protocols.__all__ +
streams.__all__ +
tasks.__all__)
|
"""The asyncio package, tracking PEP 3156."""
import sys
# The selectors module is in the stdlib in Python 3.4 but not in 3.3.
# Do this first, so the other submodules can use "from . import selectors".
try:
import selectors # Will also be exported.
except ImportError:
from . import selectors
# This relies on each of the submodules having an __all__ variable.
from .futures import *
from .events import *
from .locks import *
from .transports import *
from .protocols import *
from .streams import *
from .tasks import *
if sys.platform == 'win32': # pragma: no cover
from .windows_events import *
else:
from .unix_events import * # pragma: no cover
__all__ = (futures.__all__ +
events.__all__ +
locks.__all__ +
transports.__all__ +
protocols.__all__ +
streams.__all__ +
tasks.__all__)
|
Add fakery so "from asyncio import selectors" always works.
|
Add fakery so "from asyncio import selectors" always works.
|
Python
|
apache-2.0
|
jashandeep-sohi/asyncio,gsb-eng/asyncio,ajdavis/asyncio,vxgmichel/asyncio,gvanrossum/asyncio,manipopopo/asyncio,gsb-eng/asyncio,fallen/asyncio,1st1/asyncio,Martiusweb/asyncio,vxgmichel/asyncio,ajdavis/asyncio,haypo/trollius,ajdavis/asyncio,fallen/asyncio,manipopopo/asyncio,1st1/asyncio,fallen/asyncio,Martiusweb/asyncio,Martiusweb/asyncio,1st1/asyncio,gvanrossum/asyncio,manipopopo/asyncio,jashandeep-sohi/asyncio,vxgmichel/asyncio,haypo/trollius,gsb-eng/asyncio,gvanrossum/asyncio,jashandeep-sohi/asyncio,haypo/trollius
|
"""The asyncio package, tracking PEP 3156."""
import sys
# This relies on each of the submodules having an __all__ variable.
from .futures import *
from .events import *
from .locks import *
from .transports import *
from .protocols import *
from .streams import *
from .tasks import *
if sys.platform == 'win32': # pragma: no cover
from .windows_events import *
else:
from .unix_events import * # pragma: no cover
__all__ = (futures.__all__ +
events.__all__ +
locks.__all__ +
transports.__all__ +
protocols.__all__ +
streams.__all__ +
tasks.__all__)
Add fakery so "from asyncio import selectors" always works.
|
"""The asyncio package, tracking PEP 3156."""
import sys
# The selectors module is in the stdlib in Python 3.4 but not in 3.3.
# Do this first, so the other submodules can use "from . import selectors".
try:
import selectors # Will also be exported.
except ImportError:
from . import selectors
# This relies on each of the submodules having an __all__ variable.
from .futures import *
from .events import *
from .locks import *
from .transports import *
from .protocols import *
from .streams import *
from .tasks import *
if sys.platform == 'win32': # pragma: no cover
from .windows_events import *
else:
from .unix_events import * # pragma: no cover
__all__ = (futures.__all__ +
events.__all__ +
locks.__all__ +
transports.__all__ +
protocols.__all__ +
streams.__all__ +
tasks.__all__)
|
<commit_before>"""The asyncio package, tracking PEP 3156."""
import sys
# This relies on each of the submodules having an __all__ variable.
from .futures import *
from .events import *
from .locks import *
from .transports import *
from .protocols import *
from .streams import *
from .tasks import *
if sys.platform == 'win32': # pragma: no cover
from .windows_events import *
else:
from .unix_events import * # pragma: no cover
__all__ = (futures.__all__ +
events.__all__ +
locks.__all__ +
transports.__all__ +
protocols.__all__ +
streams.__all__ +
tasks.__all__)
<commit_msg>Add fakery so "from asyncio import selectors" always works.<commit_after>
|
"""The asyncio package, tracking PEP 3156."""
import sys
# The selectors module is in the stdlib in Python 3.4 but not in 3.3.
# Do this first, so the other submodules can use "from . import selectors".
try:
import selectors # Will also be exported.
except ImportError:
from . import selectors
# This relies on each of the submodules having an __all__ variable.
from .futures import *
from .events import *
from .locks import *
from .transports import *
from .protocols import *
from .streams import *
from .tasks import *
if sys.platform == 'win32': # pragma: no cover
from .windows_events import *
else:
from .unix_events import * # pragma: no cover
__all__ = (futures.__all__ +
events.__all__ +
locks.__all__ +
transports.__all__ +
protocols.__all__ +
streams.__all__ +
tasks.__all__)
|
"""The asyncio package, tracking PEP 3156."""
import sys
# This relies on each of the submodules having an __all__ variable.
from .futures import *
from .events import *
from .locks import *
from .transports import *
from .protocols import *
from .streams import *
from .tasks import *
if sys.platform == 'win32': # pragma: no cover
from .windows_events import *
else:
from .unix_events import * # pragma: no cover
__all__ = (futures.__all__ +
events.__all__ +
locks.__all__ +
transports.__all__ +
protocols.__all__ +
streams.__all__ +
tasks.__all__)
Add fakery so "from asyncio import selectors" always works."""The asyncio package, tracking PEP 3156."""
import sys
# The selectors module is in the stdlib in Python 3.4 but not in 3.3.
# Do this first, so the other submodules can use "from . import selectors".
try:
import selectors # Will also be exported.
except ImportError:
from . import selectors
# This relies on each of the submodules having an __all__ variable.
from .futures import *
from .events import *
from .locks import *
from .transports import *
from .protocols import *
from .streams import *
from .tasks import *
if sys.platform == 'win32': # pragma: no cover
from .windows_events import *
else:
from .unix_events import * # pragma: no cover
__all__ = (futures.__all__ +
events.__all__ +
locks.__all__ +
transports.__all__ +
protocols.__all__ +
streams.__all__ +
tasks.__all__)
|
<commit_before>"""The asyncio package, tracking PEP 3156."""
import sys
# This relies on each of the submodules having an __all__ variable.
from .futures import *
from .events import *
from .locks import *
from .transports import *
from .protocols import *
from .streams import *
from .tasks import *
if sys.platform == 'win32': # pragma: no cover
from .windows_events import *
else:
from .unix_events import * # pragma: no cover
__all__ = (futures.__all__ +
events.__all__ +
locks.__all__ +
transports.__all__ +
protocols.__all__ +
streams.__all__ +
tasks.__all__)
<commit_msg>Add fakery so "from asyncio import selectors" always works.<commit_after>"""The asyncio package, tracking PEP 3156."""
import sys
# The selectors module is in the stdlib in Python 3.4 but not in 3.3.
# Do this first, so the other submodules can use "from . import selectors".
try:
import selectors # Will also be exported.
except ImportError:
from . import selectors
# This relies on each of the submodules having an __all__ variable.
from .futures import *
from .events import *
from .locks import *
from .transports import *
from .protocols import *
from .streams import *
from .tasks import *
if sys.platform == 'win32': # pragma: no cover
from .windows_events import *
else:
from .unix_events import * # pragma: no cover
__all__ = (futures.__all__ +
events.__all__ +
locks.__all__ +
transports.__all__ +
protocols.__all__ +
streams.__all__ +
tasks.__all__)
|
09e4dd8736d6e829b779dd14b882e0e1d7f5abb9
|
tester/register/prepare_test.py
|
tester/register/prepare_test.py
|
import sys
import os
import argparse
def write_csv(filename, nb_users):
with open(filename, "w") as csv_file:
csv_file.write("SEQUENTIAL\n")
for x in xrange(nb_users):
line = "{uname};localhost;[authentication username={uname} password={uname}];\n".format(uname=str(1000+x))
csv_file.write(line)
def write_sql(filename, nb_users):
with open(filename, "w") as sql_file:
header = """DROP DATABASE tests;
CREATE DATABASE tests;
USE tests;
CREATE TABLE accounts (user VARCHAR(20),password VARCHAR(20));"""
sql_file.write(header)
for x in xrange(nb_users):
line = """INSERT into accounts (user, password) VALUES ("{uname}", "{uname}");\n""".format(uname=str(1000+x))
sql_file.write(line)
def main(argv=None):
if argv == None:
argv = sys.argv
argparser = argparse.ArgumentParser(description="Prepare load tests for Flexisip.")
argparser.add_argument('-N', '--users', help="How many different users should be registering to flexisip", dest="users", default=5000)
args, additional_args = argparser.parse_known_args()
write_csv("users.csv", args.users)
write_sql("users.sql", args.users)
if __name__ == '__main__':
main()
|
import sys
import os
import argparse
def write_csv(filename, nb_users):
with open(filename, "w") as csv_file:
csv_file.write("SEQUENTIAL\n")
for x in xrange(nb_users):
line = "{uname};localhost;[authentication username={uname} password={uname}];\n".format(uname=str(1000+x))
csv_file.write(line)
def write_sql(filename, nb_users):
with open(filename, "w") as sql_file:
header = """DROP DATABASE IF EXISTS tests;
CREATE DATABASE tests;
USE tests;
CREATE TABLE accounts (user VARCHAR(20),password VARCHAR(20));"""
sql_file.write(header)
for x in xrange(nb_users):
line = """INSERT into accounts (user, password) VALUES ("{uname}", "{uname}");\n""".format(uname=str(1000+x))
sql_file.write(line)
def main(argv=None):
if argv == None:
argv = sys.argv
argparser = argparse.ArgumentParser(description="Prepare load tests for Flexisip.")
argparser.add_argument('-N', '--users', help="How many different users should be registering to flexisip", dest="users", default=5000)
args, additional_args = argparser.parse_known_args()
write_csv("users.csv", args.users)
write_sql("users.sql", args.users)
if __name__ == '__main__':
main()
|
DROP DATABASE IF EXISTS in tests.
|
DROP DATABASE IF EXISTS in tests.
|
Python
|
agpl-3.0
|
BelledonneCommunications/flexisip,BelledonneCommunications/flexisip,BelledonneCommunications/flexisip,BelledonneCommunications/flexisip
|
import sys
import os
import argparse
def write_csv(filename, nb_users):
with open(filename, "w") as csv_file:
csv_file.write("SEQUENTIAL\n")
for x in xrange(nb_users):
line = "{uname};localhost;[authentication username={uname} password={uname}];\n".format(uname=str(1000+x))
csv_file.write(line)
def write_sql(filename, nb_users):
with open(filename, "w") as sql_file:
header = """DROP DATABASE tests;
CREATE DATABASE tests;
USE tests;
CREATE TABLE accounts (user VARCHAR(20),password VARCHAR(20));"""
sql_file.write(header)
for x in xrange(nb_users):
line = """INSERT into accounts (user, password) VALUES ("{uname}", "{uname}");\n""".format(uname=str(1000+x))
sql_file.write(line)
def main(argv=None):
if argv == None:
argv = sys.argv
argparser = argparse.ArgumentParser(description="Prepare load tests for Flexisip.")
argparser.add_argument('-N', '--users', help="How many different users should be registering to flexisip", dest="users", default=5000)
args, additional_args = argparser.parse_known_args()
write_csv("users.csv", args.users)
write_sql("users.sql", args.users)
if __name__ == '__main__':
main()
DROP DATABASE IF EXISTS in tests.
|
import sys
import os
import argparse
def write_csv(filename, nb_users):
with open(filename, "w") as csv_file:
csv_file.write("SEQUENTIAL\n")
for x in xrange(nb_users):
line = "{uname};localhost;[authentication username={uname} password={uname}];\n".format(uname=str(1000+x))
csv_file.write(line)
def write_sql(filename, nb_users):
with open(filename, "w") as sql_file:
header = """DROP DATABASE IF EXISTS tests;
CREATE DATABASE tests;
USE tests;
CREATE TABLE accounts (user VARCHAR(20),password VARCHAR(20));"""
sql_file.write(header)
for x in xrange(nb_users):
line = """INSERT into accounts (user, password) VALUES ("{uname}", "{uname}");\n""".format(uname=str(1000+x))
sql_file.write(line)
def main(argv=None):
if argv == None:
argv = sys.argv
argparser = argparse.ArgumentParser(description="Prepare load tests for Flexisip.")
argparser.add_argument('-N', '--users', help="How many different users should be registering to flexisip", dest="users", default=5000)
args, additional_args = argparser.parse_known_args()
write_csv("users.csv", args.users)
write_sql("users.sql", args.users)
if __name__ == '__main__':
main()
|
<commit_before>
import sys
import os
import argparse
def write_csv(filename, nb_users):
with open(filename, "w") as csv_file:
csv_file.write("SEQUENTIAL\n")
for x in xrange(nb_users):
line = "{uname};localhost;[authentication username={uname} password={uname}];\n".format(uname=str(1000+x))
csv_file.write(line)
def write_sql(filename, nb_users):
with open(filename, "w") as sql_file:
header = """DROP DATABASE tests;
CREATE DATABASE tests;
USE tests;
CREATE TABLE accounts (user VARCHAR(20),password VARCHAR(20));"""
sql_file.write(header)
for x in xrange(nb_users):
line = """INSERT into accounts (user, password) VALUES ("{uname}", "{uname}");\n""".format(uname=str(1000+x))
sql_file.write(line)
def main(argv=None):
if argv == None:
argv = sys.argv
argparser = argparse.ArgumentParser(description="Prepare load tests for Flexisip.")
argparser.add_argument('-N', '--users', help="How many different users should be registering to flexisip", dest="users", default=5000)
args, additional_args = argparser.parse_known_args()
write_csv("users.csv", args.users)
write_sql("users.sql", args.users)
if __name__ == '__main__':
main()
<commit_msg>DROP DATABASE IF EXISTS in tests.<commit_after>
|
import sys
import os
import argparse
def write_csv(filename, nb_users):
with open(filename, "w") as csv_file:
csv_file.write("SEQUENTIAL\n")
for x in xrange(nb_users):
line = "{uname};localhost;[authentication username={uname} password={uname}];\n".format(uname=str(1000+x))
csv_file.write(line)
def write_sql(filename, nb_users):
with open(filename, "w") as sql_file:
header = """DROP DATABASE IF EXISTS tests;
CREATE DATABASE tests;
USE tests;
CREATE TABLE accounts (user VARCHAR(20),password VARCHAR(20));"""
sql_file.write(header)
for x in xrange(nb_users):
line = """INSERT into accounts (user, password) VALUES ("{uname}", "{uname}");\n""".format(uname=str(1000+x))
sql_file.write(line)
def main(argv=None):
if argv == None:
argv = sys.argv
argparser = argparse.ArgumentParser(description="Prepare load tests for Flexisip.")
argparser.add_argument('-N', '--users', help="How many different users should be registering to flexisip", dest="users", default=5000)
args, additional_args = argparser.parse_known_args()
write_csv("users.csv", args.users)
write_sql("users.sql", args.users)
if __name__ == '__main__':
main()
|
import sys
import os
import argparse
def write_csv(filename, nb_users):
with open(filename, "w") as csv_file:
csv_file.write("SEQUENTIAL\n")
for x in xrange(nb_users):
line = "{uname};localhost;[authentication username={uname} password={uname}];\n".format(uname=str(1000+x))
csv_file.write(line)
def write_sql(filename, nb_users):
with open(filename, "w") as sql_file:
header = """DROP DATABASE tests;
CREATE DATABASE tests;
USE tests;
CREATE TABLE accounts (user VARCHAR(20),password VARCHAR(20));"""
sql_file.write(header)
for x in xrange(nb_users):
line = """INSERT into accounts (user, password) VALUES ("{uname}", "{uname}");\n""".format(uname=str(1000+x))
sql_file.write(line)
def main(argv=None):
if argv == None:
argv = sys.argv
argparser = argparse.ArgumentParser(description="Prepare load tests for Flexisip.")
argparser.add_argument('-N', '--users', help="How many different users should be registering to flexisip", dest="users", default=5000)
args, additional_args = argparser.parse_known_args()
write_csv("users.csv", args.users)
write_sql("users.sql", args.users)
if __name__ == '__main__':
main()
DROP DATABASE IF EXISTS in tests.
import sys
import os
import argparse
def write_csv(filename, nb_users):
with open(filename, "w") as csv_file:
csv_file.write("SEQUENTIAL\n")
for x in xrange(nb_users):
line = "{uname};localhost;[authentication username={uname} password={uname}];\n".format(uname=str(1000+x))
csv_file.write(line)
def write_sql(filename, nb_users):
with open(filename, "w") as sql_file:
header = """DROP DATABASE IF EXISTS tests;
CREATE DATABASE tests;
USE tests;
CREATE TABLE accounts (user VARCHAR(20),password VARCHAR(20));"""
sql_file.write(header)
for x in xrange(nb_users):
line = """INSERT into accounts (user, password) VALUES ("{uname}", "{uname}");\n""".format(uname=str(1000+x))
sql_file.write(line)
def main(argv=None):
if argv == None:
argv = sys.argv
argparser = argparse.ArgumentParser(description="Prepare load tests for Flexisip.")
argparser.add_argument('-N', '--users', help="How many different users should be registering to flexisip", dest="users", default=5000)
args, additional_args = argparser.parse_known_args()
write_csv("users.csv", args.users)
write_sql("users.sql", args.users)
if __name__ == '__main__':
main()
|
<commit_before>
import sys
import os
import argparse
def write_csv(filename, nb_users):
with open(filename, "w") as csv_file:
csv_file.write("SEQUENTIAL\n")
for x in xrange(nb_users):
line = "{uname};localhost;[authentication username={uname} password={uname}];\n".format(uname=str(1000+x))
csv_file.write(line)
def write_sql(filename, nb_users):
with open(filename, "w") as sql_file:
header = """DROP DATABASE tests;
CREATE DATABASE tests;
USE tests;
CREATE TABLE accounts (user VARCHAR(20),password VARCHAR(20));"""
sql_file.write(header)
for x in xrange(nb_users):
line = """INSERT into accounts (user, password) VALUES ("{uname}", "{uname}");\n""".format(uname=str(1000+x))
sql_file.write(line)
def main(argv=None):
if argv == None:
argv = sys.argv
argparser = argparse.ArgumentParser(description="Prepare load tests for Flexisip.")
argparser.add_argument('-N', '--users', help="How many different users should be registering to flexisip", dest="users", default=5000)
args, additional_args = argparser.parse_known_args()
write_csv("users.csv", args.users)
write_sql("users.sql", args.users)
if __name__ == '__main__':
main()
<commit_msg>DROP DATABASE IF EXISTS in tests.<commit_after>
import sys
import os
import argparse
def write_csv(filename, nb_users):
with open(filename, "w") as csv_file:
csv_file.write("SEQUENTIAL\n")
for x in xrange(nb_users):
line = "{uname};localhost;[authentication username={uname} password={uname}];\n".format(uname=str(1000+x))
csv_file.write(line)
def write_sql(filename, nb_users):
with open(filename, "w") as sql_file:
header = """DROP DATABASE IF EXISTS tests;
CREATE DATABASE tests;
USE tests;
CREATE TABLE accounts (user VARCHAR(20),password VARCHAR(20));"""
sql_file.write(header)
for x in xrange(nb_users):
line = """INSERT into accounts (user, password) VALUES ("{uname}", "{uname}");\n""".format(uname=str(1000+x))
sql_file.write(line)
def main(argv=None):
if argv == None:
argv = sys.argv
argparser = argparse.ArgumentParser(description="Prepare load tests for Flexisip.")
argparser.add_argument('-N', '--users', help="How many different users should be registering to flexisip", dest="users", default=5000)
args, additional_args = argparser.parse_known_args()
write_csv("users.csv", args.users)
write_sql("users.sql", args.users)
if __name__ == '__main__':
main()
|
8be856ed565d9e961a4d24da74a13240e25f4ded
|
cio/plugins/base.py
|
cio/plugins/base.py
|
class BasePlugin(object):
ext = None
def load(self, content):
"""
Return plugin data for content string
"""
return content
def save(self, data):
"""
Persist external plugin resources and return content string for plugin data
"""
return data
def delete(self, data):
"""
Delete external plugin resources
"""
pass
def render(self, data):
"""
Render plugin
"""
return data
|
from cio.conf import settings
class BasePlugin(object):
ext = None
@property
def settings(self):
return settings.get(self.ext.upper(), {})
def load(self, content):
"""
Return plugin data for content string
"""
return content
def save(self, data):
"""
Persist external plugin resources and return content string for plugin data
"""
return data
def delete(self, data):
"""
Delete external plugin resources
"""
pass
def render(self, data):
"""
Render plugin
"""
return data
|
Add support for plugin settings
|
Add support for plugin settings
|
Python
|
bsd-3-clause
|
5monkeys/content-io
|
class BasePlugin(object):
ext = None
def load(self, content):
"""
Return plugin data for content string
"""
return content
def save(self, data):
"""
Persist external plugin resources and return content string for plugin data
"""
return data
def delete(self, data):
"""
Delete external plugin resources
"""
pass
def render(self, data):
"""
Render plugin
"""
return data
Add support for plugin settings
|
from cio.conf import settings
class BasePlugin(object):
ext = None
@property
def settings(self):
return settings.get(self.ext.upper(), {})
def load(self, content):
"""
Return plugin data for content string
"""
return content
def save(self, data):
"""
Persist external plugin resources and return content string for plugin data
"""
return data
def delete(self, data):
"""
Delete external plugin resources
"""
pass
def render(self, data):
"""
Render plugin
"""
return data
|
<commit_before>class BasePlugin(object):
ext = None
def load(self, content):
"""
Return plugin data for content string
"""
return content
def save(self, data):
"""
Persist external plugin resources and return content string for plugin data
"""
return data
def delete(self, data):
"""
Delete external plugin resources
"""
pass
def render(self, data):
"""
Render plugin
"""
return data
<commit_msg>Add support for plugin settings<commit_after>
|
from cio.conf import settings
class BasePlugin(object):
ext = None
@property
def settings(self):
return settings.get(self.ext.upper(), {})
def load(self, content):
"""
Return plugin data for content string
"""
return content
def save(self, data):
"""
Persist external plugin resources and return content string for plugin data
"""
return data
def delete(self, data):
"""
Delete external plugin resources
"""
pass
def render(self, data):
"""
Render plugin
"""
return data
|
class BasePlugin(object):
ext = None
def load(self, content):
"""
Return plugin data for content string
"""
return content
def save(self, data):
"""
Persist external plugin resources and return content string for plugin data
"""
return data
def delete(self, data):
"""
Delete external plugin resources
"""
pass
def render(self, data):
"""
Render plugin
"""
return data
Add support for plugin settingsfrom cio.conf import settings
class BasePlugin(object):
ext = None
@property
def settings(self):
return settings.get(self.ext.upper(), {})
def load(self, content):
"""
Return plugin data for content string
"""
return content
def save(self, data):
"""
Persist external plugin resources and return content string for plugin data
"""
return data
def delete(self, data):
"""
Delete external plugin resources
"""
pass
def render(self, data):
"""
Render plugin
"""
return data
|
<commit_before>class BasePlugin(object):
ext = None
def load(self, content):
"""
Return plugin data for content string
"""
return content
def save(self, data):
"""
Persist external plugin resources and return content string for plugin data
"""
return data
def delete(self, data):
"""
Delete external plugin resources
"""
pass
def render(self, data):
"""
Render plugin
"""
return data
<commit_msg>Add support for plugin settings<commit_after>from cio.conf import settings
class BasePlugin(object):
ext = None
@property
def settings(self):
return settings.get(self.ext.upper(), {})
def load(self, content):
"""
Return plugin data for content string
"""
return content
def save(self, data):
"""
Persist external plugin resources and return content string for plugin data
"""
return data
def delete(self, data):
"""
Delete external plugin resources
"""
pass
def render(self, data):
"""
Render plugin
"""
return data
|
e4696a04cbc003737d7ba28d58b14775e9fc2682
|
tests/transport/test_asyncio.py
|
tests/transport/test_asyncio.py
|
from unittest import TestCase
class AsyncioTransportTestCase(TestCase):
pass
|
from asyncio import get_event_loop
from unittest import TestCase, mock
from rfxcom.transport import AsyncioTransport
from rfxcom.protocol import RESET_PACKET, STATUS_PACKET
class AsyncioTransportTestCase(TestCase):
def test_loop_once(self):
loop = get_event_loop()
def handler(*args, **kwargs):
pass
device = mock.MagicMock()
AsyncioTransport(device, loop, callback=handler)
loop._run_once()
device.write.assert_has_call(bytearray(RESET_PACKET))
device.write.assert_has_call(bytearray(STATUS_PACKET))
|
Add an initial ghetto asyncio test.
|
Add an initial ghetto asyncio test.
|
Python
|
bsd-3-clause
|
skimpax/python-rfxcom,kalfa/python-rfxcom,AndyA13/python-rfxcom,d0ugal-archive/python-rfxcom,kalfa/python-rfxcom,d0ugal-archive/python-rfxcom,AndyA13/python-rfxcom,skimpax/python-rfxcom
|
from unittest import TestCase
class AsyncioTransportTestCase(TestCase):
pass
Add an initial ghetto asyncio test.
|
from asyncio import get_event_loop
from unittest import TestCase, mock
from rfxcom.transport import AsyncioTransport
from rfxcom.protocol import RESET_PACKET, STATUS_PACKET
class AsyncioTransportTestCase(TestCase):
def test_loop_once(self):
loop = get_event_loop()
def handler(*args, **kwargs):
pass
device = mock.MagicMock()
AsyncioTransport(device, loop, callback=handler)
loop._run_once()
device.write.assert_has_call(bytearray(RESET_PACKET))
device.write.assert_has_call(bytearray(STATUS_PACKET))
|
<commit_before>from unittest import TestCase
class AsyncioTransportTestCase(TestCase):
pass
<commit_msg>Add an initial ghetto asyncio test.<commit_after>
|
from asyncio import get_event_loop
from unittest import TestCase, mock
from rfxcom.transport import AsyncioTransport
from rfxcom.protocol import RESET_PACKET, STATUS_PACKET
class AsyncioTransportTestCase(TestCase):
def test_loop_once(self):
loop = get_event_loop()
def handler(*args, **kwargs):
pass
device = mock.MagicMock()
AsyncioTransport(device, loop, callback=handler)
loop._run_once()
device.write.assert_has_call(bytearray(RESET_PACKET))
device.write.assert_has_call(bytearray(STATUS_PACKET))
|
from unittest import TestCase
class AsyncioTransportTestCase(TestCase):
pass
Add an initial ghetto asyncio test.from asyncio import get_event_loop
from unittest import TestCase, mock
from rfxcom.transport import AsyncioTransport
from rfxcom.protocol import RESET_PACKET, STATUS_PACKET
class AsyncioTransportTestCase(TestCase):
def test_loop_once(self):
loop = get_event_loop()
def handler(*args, **kwargs):
pass
device = mock.MagicMock()
AsyncioTransport(device, loop, callback=handler)
loop._run_once()
device.write.assert_has_call(bytearray(RESET_PACKET))
device.write.assert_has_call(bytearray(STATUS_PACKET))
|
<commit_before>from unittest import TestCase
class AsyncioTransportTestCase(TestCase):
pass
<commit_msg>Add an initial ghetto asyncio test.<commit_after>from asyncio import get_event_loop
from unittest import TestCase, mock
from rfxcom.transport import AsyncioTransport
from rfxcom.protocol import RESET_PACKET, STATUS_PACKET
class AsyncioTransportTestCase(TestCase):
def test_loop_once(self):
loop = get_event_loop()
def handler(*args, **kwargs):
pass
device = mock.MagicMock()
AsyncioTransport(device, loop, callback=handler)
loop._run_once()
device.write.assert_has_call(bytearray(RESET_PACKET))
device.write.assert_has_call(bytearray(STATUS_PACKET))
|
f97e1d88f0508521fb2841ef9e8c98ec77424daa
|
SettingsTemplate.py
|
SettingsTemplate.py
|
HOST = "irc.twitch.tv"
PORT = 6667
PASS = "oauth:##############################" # Your bot's oauth (https://twitchapps.com/tmi/)
IDENTITY = "my_bot" # Your bot's username. Lowercase!!
WHITELIST = ["some_authourized_account", "another one"] # People who may execute commands. Lower!!
CHANNEL = "channel" # The channel to join. Lowercase!!
JOIN_MESSAGE = "Hi, I'm a bot that just joined this channel." # Message from the bot when it joins a channel.
WOT_KEY = "" # Api key of WOT to check sites (mywot.com)
CHECK_LINKS = True # Should the bot check links via WOT?
|
HOST = "irc.twitch.tv"
PORT = 6667
PASS = "oauth:##############################" # Your bot's oauth (https://twitchapps.com/tmi/)
IDENTITY = "my_bot" # Your bot's username. Lowercase!!
WHITELIST = ["some_authourized_account", "another one"] # People who may execute commands. Lower!!
CHANNEL = "channel" # The channel to join. Lowercase!!
JOIN_MESSAGE = "Hi, I'm a bot that just joined this channel." # Message from the bot when it joins a channel.
WOT_KEY = "" # Api key of WOT to check sites (mywot.com)
CHECK_LINKS = False # Should the bot check links via WOT?
|
Set CHECK_LINKS to False for default
|
Set CHECK_LINKS to False for default
|
Python
|
apache-2.0
|
K00sKlust/K00sTwitchBot
|
HOST = "irc.twitch.tv"
PORT = 6667
PASS = "oauth:##############################" # Your bot's oauth (https://twitchapps.com/tmi/)
IDENTITY = "my_bot" # Your bot's username. Lowercase!!
WHITELIST = ["some_authourized_account", "another one"] # People who may execute commands. Lower!!
CHANNEL = "channel" # The channel to join. Lowercase!!
JOIN_MESSAGE = "Hi, I'm a bot that just joined this channel." # Message from the bot when it joins a channel.
WOT_KEY = "" # Api key of WOT to check sites (mywot.com)
CHECK_LINKS = True # Should the bot check links via WOT?
Set CHECK_LINKS to False for default
|
HOST = "irc.twitch.tv"
PORT = 6667
PASS = "oauth:##############################" # Your bot's oauth (https://twitchapps.com/tmi/)
IDENTITY = "my_bot" # Your bot's username. Lowercase!!
WHITELIST = ["some_authourized_account", "another one"] # People who may execute commands. Lower!!
CHANNEL = "channel" # The channel to join. Lowercase!!
JOIN_MESSAGE = "Hi, I'm a bot that just joined this channel." # Message from the bot when it joins a channel.
WOT_KEY = "" # Api key of WOT to check sites (mywot.com)
CHECK_LINKS = False # Should the bot check links via WOT?
|
<commit_before>HOST = "irc.twitch.tv"
PORT = 6667
PASS = "oauth:##############################" # Your bot's oauth (https://twitchapps.com/tmi/)
IDENTITY = "my_bot" # Your bot's username. Lowercase!!
WHITELIST = ["some_authourized_account", "another one"] # People who may execute commands. Lower!!
CHANNEL = "channel" # The channel to join. Lowercase!!
JOIN_MESSAGE = "Hi, I'm a bot that just joined this channel." # Message from the bot when it joins a channel.
WOT_KEY = "" # Api key of WOT to check sites (mywot.com)
CHECK_LINKS = True # Should the bot check links via WOT?
<commit_msg>Set CHECK_LINKS to False for default<commit_after>
|
HOST = "irc.twitch.tv"
PORT = 6667
PASS = "oauth:##############################" # Your bot's oauth (https://twitchapps.com/tmi/)
IDENTITY = "my_bot" # Your bot's username. Lowercase!!
WHITELIST = ["some_authourized_account", "another one"] # People who may execute commands. Lower!!
CHANNEL = "channel" # The channel to join. Lowercase!!
JOIN_MESSAGE = "Hi, I'm a bot that just joined this channel." # Message from the bot when it joins a channel.
WOT_KEY = "" # Api key of WOT to check sites (mywot.com)
CHECK_LINKS = False # Should the bot check links via WOT?
|
HOST = "irc.twitch.tv"
PORT = 6667
PASS = "oauth:##############################" # Your bot's oauth (https://twitchapps.com/tmi/)
IDENTITY = "my_bot" # Your bot's username. Lowercase!!
WHITELIST = ["some_authourized_account", "another one"] # People who may execute commands. Lower!!
CHANNEL = "channel" # The channel to join. Lowercase!!
JOIN_MESSAGE = "Hi, I'm a bot that just joined this channel." # Message from the bot when it joins a channel.
WOT_KEY = "" # Api key of WOT to check sites (mywot.com)
CHECK_LINKS = True # Should the bot check links via WOT?
Set CHECK_LINKS to False for defaultHOST = "irc.twitch.tv"
PORT = 6667
PASS = "oauth:##############################" # Your bot's oauth (https://twitchapps.com/tmi/)
IDENTITY = "my_bot" # Your bot's username. Lowercase!!
WHITELIST = ["some_authourized_account", "another one"] # People who may execute commands. Lower!!
CHANNEL = "channel" # The channel to join. Lowercase!!
JOIN_MESSAGE = "Hi, I'm a bot that just joined this channel." # Message from the bot when it joins a channel.
WOT_KEY = "" # Api key of WOT to check sites (mywot.com)
CHECK_LINKS = False # Should the bot check links via WOT?
|
<commit_before>HOST = "irc.twitch.tv"
PORT = 6667
PASS = "oauth:##############################" # Your bot's oauth (https://twitchapps.com/tmi/)
IDENTITY = "my_bot" # Your bot's username. Lowercase!!
WHITELIST = ["some_authourized_account", "another one"] # People who may execute commands. Lower!!
CHANNEL = "channel" # The channel to join. Lowercase!!
JOIN_MESSAGE = "Hi, I'm a bot that just joined this channel." # Message from the bot when it joins a channel.
WOT_KEY = "" # Api key of WOT to check sites (mywot.com)
CHECK_LINKS = True # Should the bot check links via WOT?
<commit_msg>Set CHECK_LINKS to False for default<commit_after>HOST = "irc.twitch.tv"
PORT = 6667
PASS = "oauth:##############################" # Your bot's oauth (https://twitchapps.com/tmi/)
IDENTITY = "my_bot" # Your bot's username. Lowercase!!
WHITELIST = ["some_authourized_account", "another one"] # People who may execute commands. Lower!!
CHANNEL = "channel" # The channel to join. Lowercase!!
JOIN_MESSAGE = "Hi, I'm a bot that just joined this channel." # Message from the bot when it joins a channel.
WOT_KEY = "" # Api key of WOT to check sites (mywot.com)
CHECK_LINKS = False # Should the bot check links via WOT?
|
793ad28cf8bae098d12d08d971f19fd9cc29f3dd
|
colorlog/logging.py
|
colorlog/logging.py
|
"""Wrappers around the logging module"""
from __future__ import absolute_import
import functools
import logging
from colorlog.colorlog import ColoredFormatter
BASIC_FORMAT = "%(log_color)s%(levelname)s%(reset)s:%(name)s:%(message)s"
def basicConfig(**kwargs):
"""This calls basicConfig() and then overrides the formatter it creates"""
logging.basicConfig(**kwargs)
logging._acquireLock()
try:
stream = logging.root.handlers[0]
stream.setFormatter(
ColoredFormatter(kwargs.get('format', BASIC_FORMAT)))
finally:
logging._releaseLock()
def ensure_configured(func):
"""This ensures basicConfig is called if no handlers exist"""
@functools.wraps(func)
def wrapper(*args, **kwargs):
if len(logging.root.handlers) == 0:
basicConfig()
return func(*args, **kwargs)
return wrapper
root = logging.root
getLogger = logging.getLogger
debug = ensure_configured(logging.debug)
info = ensure_configured(logging.info)
warning = ensure_configured(logging.warning)
error = ensure_configured(logging.error)
critical = ensure_configured(logging.critical)
log = ensure_configured(logging.log)
exception = ensure_configured(logging.exception)
|
"""Wrappers around the logging module"""
from __future__ import absolute_import
import functools
import logging
from colorlog.colorlog import ColoredFormatter
BASIC_FORMAT = "%(log_color)s%(levelname)s%(reset)s:%(name)s:%(message)s"
def basicConfig(**kwargs):
"""This calls basicConfig() and then overrides the formatter it creates"""
logging.basicConfig(**kwargs)
logging._acquireLock()
try:
stream = logging.root.handlers[0]
stream.setFormatter(
ColoredFormatter(kwargs.get('format', BASIC_FORMAT),
datefmt=kwargs.get('datefmt')))
finally:
logging._releaseLock()
def ensure_configured(func):
"""This ensures basicConfig is called if no handlers exist"""
@functools.wraps(func)
def wrapper(*args, **kwargs):
if len(logging.root.handlers) == 0:
basicConfig()
return func(*args, **kwargs)
return wrapper
root = logging.root
getLogger = logging.getLogger
debug = ensure_configured(logging.debug)
info = ensure_configured(logging.info)
warning = ensure_configured(logging.warning)
error = ensure_configured(logging.error)
critical = ensure_configured(logging.critical)
log = ensure_configured(logging.log)
exception = ensure_configured(logging.exception)
|
Add datefmt to arguments that will be passed within decorated basicConfig
|
Add datefmt to arguments that will be passed within decorated basicConfig
|
Python
|
mit
|
borntyping/python-colorlog
|
"""Wrappers around the logging module"""
from __future__ import absolute_import
import functools
import logging
from colorlog.colorlog import ColoredFormatter
BASIC_FORMAT = "%(log_color)s%(levelname)s%(reset)s:%(name)s:%(message)s"
def basicConfig(**kwargs):
"""This calls basicConfig() and then overrides the formatter it creates"""
logging.basicConfig(**kwargs)
logging._acquireLock()
try:
stream = logging.root.handlers[0]
stream.setFormatter(
ColoredFormatter(kwargs.get('format', BASIC_FORMAT)))
finally:
logging._releaseLock()
def ensure_configured(func):
"""This ensures basicConfig is called if no handlers exist"""
@functools.wraps(func)
def wrapper(*args, **kwargs):
if len(logging.root.handlers) == 0:
basicConfig()
return func(*args, **kwargs)
return wrapper
root = logging.root
getLogger = logging.getLogger
debug = ensure_configured(logging.debug)
info = ensure_configured(logging.info)
warning = ensure_configured(logging.warning)
error = ensure_configured(logging.error)
critical = ensure_configured(logging.critical)
log = ensure_configured(logging.log)
exception = ensure_configured(logging.exception)
Add datefmt to arguments that will be passed within decorated basicConfig
|
"""Wrappers around the logging module"""
from __future__ import absolute_import
import functools
import logging
from colorlog.colorlog import ColoredFormatter
BASIC_FORMAT = "%(log_color)s%(levelname)s%(reset)s:%(name)s:%(message)s"
def basicConfig(**kwargs):
"""This calls basicConfig() and then overrides the formatter it creates"""
logging.basicConfig(**kwargs)
logging._acquireLock()
try:
stream = logging.root.handlers[0]
stream.setFormatter(
ColoredFormatter(kwargs.get('format', BASIC_FORMAT),
datefmt=kwargs.get('datefmt')))
finally:
logging._releaseLock()
def ensure_configured(func):
"""This ensures basicConfig is called if no handlers exist"""
@functools.wraps(func)
def wrapper(*args, **kwargs):
if len(logging.root.handlers) == 0:
basicConfig()
return func(*args, **kwargs)
return wrapper
root = logging.root
getLogger = logging.getLogger
debug = ensure_configured(logging.debug)
info = ensure_configured(logging.info)
warning = ensure_configured(logging.warning)
error = ensure_configured(logging.error)
critical = ensure_configured(logging.critical)
log = ensure_configured(logging.log)
exception = ensure_configured(logging.exception)
|
<commit_before>"""Wrappers around the logging module"""
from __future__ import absolute_import
import functools
import logging
from colorlog.colorlog import ColoredFormatter
BASIC_FORMAT = "%(log_color)s%(levelname)s%(reset)s:%(name)s:%(message)s"
def basicConfig(**kwargs):
"""This calls basicConfig() and then overrides the formatter it creates"""
logging.basicConfig(**kwargs)
logging._acquireLock()
try:
stream = logging.root.handlers[0]
stream.setFormatter(
ColoredFormatter(kwargs.get('format', BASIC_FORMAT)))
finally:
logging._releaseLock()
def ensure_configured(func):
"""This ensures basicConfig is called if no handlers exist"""
@functools.wraps(func)
def wrapper(*args, **kwargs):
if len(logging.root.handlers) == 0:
basicConfig()
return func(*args, **kwargs)
return wrapper
root = logging.root
getLogger = logging.getLogger
debug = ensure_configured(logging.debug)
info = ensure_configured(logging.info)
warning = ensure_configured(logging.warning)
error = ensure_configured(logging.error)
critical = ensure_configured(logging.critical)
log = ensure_configured(logging.log)
exception = ensure_configured(logging.exception)
<commit_msg>Add datefmt to arguments that will be passed within decorated basicConfig<commit_after>
|
"""Wrappers around the logging module"""
from __future__ import absolute_import
import functools
import logging
from colorlog.colorlog import ColoredFormatter
BASIC_FORMAT = "%(log_color)s%(levelname)s%(reset)s:%(name)s:%(message)s"
def basicConfig(**kwargs):
"""This calls basicConfig() and then overrides the formatter it creates"""
logging.basicConfig(**kwargs)
logging._acquireLock()
try:
stream = logging.root.handlers[0]
stream.setFormatter(
ColoredFormatter(kwargs.get('format', BASIC_FORMAT),
datefmt=kwargs.get('datefmt')))
finally:
logging._releaseLock()
def ensure_configured(func):
"""This ensures basicConfig is called if no handlers exist"""
@functools.wraps(func)
def wrapper(*args, **kwargs):
if len(logging.root.handlers) == 0:
basicConfig()
return func(*args, **kwargs)
return wrapper
root = logging.root
getLogger = logging.getLogger
debug = ensure_configured(logging.debug)
info = ensure_configured(logging.info)
warning = ensure_configured(logging.warning)
error = ensure_configured(logging.error)
critical = ensure_configured(logging.critical)
log = ensure_configured(logging.log)
exception = ensure_configured(logging.exception)
|
"""Wrappers around the logging module"""
from __future__ import absolute_import
import functools
import logging
from colorlog.colorlog import ColoredFormatter
BASIC_FORMAT = "%(log_color)s%(levelname)s%(reset)s:%(name)s:%(message)s"
def basicConfig(**kwargs):
"""This calls basicConfig() and then overrides the formatter it creates"""
logging.basicConfig(**kwargs)
logging._acquireLock()
try:
stream = logging.root.handlers[0]
stream.setFormatter(
ColoredFormatter(kwargs.get('format', BASIC_FORMAT)))
finally:
logging._releaseLock()
def ensure_configured(func):
"""This ensures basicConfig is called if no handlers exist"""
@functools.wraps(func)
def wrapper(*args, **kwargs):
if len(logging.root.handlers) == 0:
basicConfig()
return func(*args, **kwargs)
return wrapper
root = logging.root
getLogger = logging.getLogger
debug = ensure_configured(logging.debug)
info = ensure_configured(logging.info)
warning = ensure_configured(logging.warning)
error = ensure_configured(logging.error)
critical = ensure_configured(logging.critical)
log = ensure_configured(logging.log)
exception = ensure_configured(logging.exception)
Add datefmt to arguments that will be passed within decorated basicConfig"""Wrappers around the logging module"""
from __future__ import absolute_import
import functools
import logging
from colorlog.colorlog import ColoredFormatter
BASIC_FORMAT = "%(log_color)s%(levelname)s%(reset)s:%(name)s:%(message)s"
def basicConfig(**kwargs):
"""This calls basicConfig() and then overrides the formatter it creates"""
logging.basicConfig(**kwargs)
logging._acquireLock()
try:
stream = logging.root.handlers[0]
stream.setFormatter(
ColoredFormatter(kwargs.get('format', BASIC_FORMAT),
datefmt=kwargs.get('datefmt')))
finally:
logging._releaseLock()
def ensure_configured(func):
"""This ensures basicConfig is called if no handlers exist"""
@functools.wraps(func)
def wrapper(*args, **kwargs):
if len(logging.root.handlers) == 0:
basicConfig()
return func(*args, **kwargs)
return wrapper
root = logging.root
getLogger = logging.getLogger
debug = ensure_configured(logging.debug)
info = ensure_configured(logging.info)
warning = ensure_configured(logging.warning)
error = ensure_configured(logging.error)
critical = ensure_configured(logging.critical)
log = ensure_configured(logging.log)
exception = ensure_configured(logging.exception)
|
<commit_before>"""Wrappers around the logging module"""
from __future__ import absolute_import
import functools
import logging
from colorlog.colorlog import ColoredFormatter
BASIC_FORMAT = "%(log_color)s%(levelname)s%(reset)s:%(name)s:%(message)s"
def basicConfig(**kwargs):
"""This calls basicConfig() and then overrides the formatter it creates"""
logging.basicConfig(**kwargs)
logging._acquireLock()
try:
stream = logging.root.handlers[0]
stream.setFormatter(
ColoredFormatter(kwargs.get('format', BASIC_FORMAT)))
finally:
logging._releaseLock()
def ensure_configured(func):
"""This ensures basicConfig is called if no handlers exist"""
@functools.wraps(func)
def wrapper(*args, **kwargs):
if len(logging.root.handlers) == 0:
basicConfig()
return func(*args, **kwargs)
return wrapper
root = logging.root
getLogger = logging.getLogger
debug = ensure_configured(logging.debug)
info = ensure_configured(logging.info)
warning = ensure_configured(logging.warning)
error = ensure_configured(logging.error)
critical = ensure_configured(logging.critical)
log = ensure_configured(logging.log)
exception = ensure_configured(logging.exception)
<commit_msg>Add datefmt to arguments that will be passed within decorated basicConfig<commit_after>"""Wrappers around the logging module"""
from __future__ import absolute_import
import functools
import logging
from colorlog.colorlog import ColoredFormatter
BASIC_FORMAT = "%(log_color)s%(levelname)s%(reset)s:%(name)s:%(message)s"
def basicConfig(**kwargs):
"""This calls basicConfig() and then overrides the formatter it creates"""
logging.basicConfig(**kwargs)
logging._acquireLock()
try:
stream = logging.root.handlers[0]
stream.setFormatter(
ColoredFormatter(kwargs.get('format', BASIC_FORMAT),
datefmt=kwargs.get('datefmt')))
finally:
logging._releaseLock()
def ensure_configured(func):
"""This ensures basicConfig is called if no handlers exist"""
@functools.wraps(func)
def wrapper(*args, **kwargs):
if len(logging.root.handlers) == 0:
basicConfig()
return func(*args, **kwargs)
return wrapper
root = logging.root
getLogger = logging.getLogger
debug = ensure_configured(logging.debug)
info = ensure_configured(logging.info)
warning = ensure_configured(logging.warning)
error = ensure_configured(logging.error)
critical = ensure_configured(logging.critical)
log = ensure_configured(logging.log)
exception = ensure_configured(logging.exception)
|
b2d06e068fbc7bad9ed0c8f22e751b6bb46d353d
|
dependencies/contrib/_django.py
|
dependencies/contrib/_django.py
|
from __future__ import absolute_import
from collections import OrderedDict
from dependencies import this
from django.views.generic import View
def view(injector):
"""Create Django class-based view from injector class."""
handler = create_handler(View)
apply_http_methods(handler, injector)
handler.http_method_names = list(handler.http_method_names.keys())
return injector.let(as_view=handler.as_view)
def create_handler(from_class):
class Handler(from_class):
http_method_names = OrderedDict.fromkeys(["head", "options"])
return Handler
def apply_http_methods(handler, injector):
for method in ["get", "post", "put", "patch", "delete", "head", "options", "trace"]:
if method in injector:
def __view(self, request, *args, **kwargs):
return injector.let(
view=self,
request=request,
args=args,
kwargs=kwargs,
user=this.request.user,
).trace()
handler.http_method_names[method] = None
setattr(handler, method, __view)
|
from __future__ import absolute_import
from collections import OrderedDict
from dependencies import this
from django.views.generic import View
def view(injector):
"""Create Django class-based view from injector class."""
handler = create_handler(View)
apply_http_methods(handler, injector)
finalize_http_methods(handler)
return injector.let(as_view=handler.as_view)
def create_handler(from_class):
class Handler(from_class):
http_method_names = OrderedDict.fromkeys(["head", "options"])
return Handler
def apply_http_methods(handler, injector):
for method in ["get", "post", "put", "patch", "delete", "head", "options", "trace"]:
if method in injector:
def __view(self, request, *args, **kwargs):
ns = injector.let(
view=self,
request=request,
args=args,
kwargs=kwargs,
user=this.request.user,
)
return getattr(ns, __view.method)()
__view.method = method
handler.http_method_names[method] = None
setattr(handler, method, __view)
def finalize_http_methods(handler):
handler.http_method_names = list(handler.http_method_names.keys())
|
Fix django view closure issue.
|
Fix django view closure issue.
|
Python
|
bsd-2-clause
|
proofit404/dependencies,proofit404/dependencies,proofit404/dependencies,proofit404/dependencies
|
from __future__ import absolute_import
from collections import OrderedDict
from dependencies import this
from django.views.generic import View
def view(injector):
"""Create Django class-based view from injector class."""
handler = create_handler(View)
apply_http_methods(handler, injector)
handler.http_method_names = list(handler.http_method_names.keys())
return injector.let(as_view=handler.as_view)
def create_handler(from_class):
class Handler(from_class):
http_method_names = OrderedDict.fromkeys(["head", "options"])
return Handler
def apply_http_methods(handler, injector):
for method in ["get", "post", "put", "patch", "delete", "head", "options", "trace"]:
if method in injector:
def __view(self, request, *args, **kwargs):
return injector.let(
view=self,
request=request,
args=args,
kwargs=kwargs,
user=this.request.user,
).trace()
handler.http_method_names[method] = None
setattr(handler, method, __view)
Fix django view closure issue.
|
from __future__ import absolute_import
from collections import OrderedDict
from dependencies import this
from django.views.generic import View
def view(injector):
"""Create Django class-based view from injector class."""
handler = create_handler(View)
apply_http_methods(handler, injector)
finalize_http_methods(handler)
return injector.let(as_view=handler.as_view)
def create_handler(from_class):
class Handler(from_class):
http_method_names = OrderedDict.fromkeys(["head", "options"])
return Handler
def apply_http_methods(handler, injector):
for method in ["get", "post", "put", "patch", "delete", "head", "options", "trace"]:
if method in injector:
def __view(self, request, *args, **kwargs):
ns = injector.let(
view=self,
request=request,
args=args,
kwargs=kwargs,
user=this.request.user,
)
return getattr(ns, __view.method)()
__view.method = method
handler.http_method_names[method] = None
setattr(handler, method, __view)
def finalize_http_methods(handler):
handler.http_method_names = list(handler.http_method_names.keys())
|
<commit_before>from __future__ import absolute_import
from collections import OrderedDict
from dependencies import this
from django.views.generic import View
def view(injector):
"""Create Django class-based view from injector class."""
handler = create_handler(View)
apply_http_methods(handler, injector)
handler.http_method_names = list(handler.http_method_names.keys())
return injector.let(as_view=handler.as_view)
def create_handler(from_class):
class Handler(from_class):
http_method_names = OrderedDict.fromkeys(["head", "options"])
return Handler
def apply_http_methods(handler, injector):
for method in ["get", "post", "put", "patch", "delete", "head", "options", "trace"]:
if method in injector:
def __view(self, request, *args, **kwargs):
return injector.let(
view=self,
request=request,
args=args,
kwargs=kwargs,
user=this.request.user,
).trace()
handler.http_method_names[method] = None
setattr(handler, method, __view)
<commit_msg>Fix django view closure issue.<commit_after>
|
from __future__ import absolute_import
from collections import OrderedDict
from dependencies import this
from django.views.generic import View
def view(injector):
"""Create Django class-based view from injector class."""
handler = create_handler(View)
apply_http_methods(handler, injector)
finalize_http_methods(handler)
return injector.let(as_view=handler.as_view)
def create_handler(from_class):
class Handler(from_class):
http_method_names = OrderedDict.fromkeys(["head", "options"])
return Handler
def apply_http_methods(handler, injector):
for method in ["get", "post", "put", "patch", "delete", "head", "options", "trace"]:
if method in injector:
def __view(self, request, *args, **kwargs):
ns = injector.let(
view=self,
request=request,
args=args,
kwargs=kwargs,
user=this.request.user,
)
return getattr(ns, __view.method)()
__view.method = method
handler.http_method_names[method] = None
setattr(handler, method, __view)
def finalize_http_methods(handler):
handler.http_method_names = list(handler.http_method_names.keys())
|
from __future__ import absolute_import
from collections import OrderedDict
from dependencies import this
from django.views.generic import View
def view(injector):
"""Create Django class-based view from injector class."""
handler = create_handler(View)
apply_http_methods(handler, injector)
handler.http_method_names = list(handler.http_method_names.keys())
return injector.let(as_view=handler.as_view)
def create_handler(from_class):
class Handler(from_class):
http_method_names = OrderedDict.fromkeys(["head", "options"])
return Handler
def apply_http_methods(handler, injector):
for method in ["get", "post", "put", "patch", "delete", "head", "options", "trace"]:
if method in injector:
def __view(self, request, *args, **kwargs):
return injector.let(
view=self,
request=request,
args=args,
kwargs=kwargs,
user=this.request.user,
).trace()
handler.http_method_names[method] = None
setattr(handler, method, __view)
Fix django view closure issue.from __future__ import absolute_import
from collections import OrderedDict
from dependencies import this
from django.views.generic import View
def view(injector):
"""Create Django class-based view from injector class."""
handler = create_handler(View)
apply_http_methods(handler, injector)
finalize_http_methods(handler)
return injector.let(as_view=handler.as_view)
def create_handler(from_class):
class Handler(from_class):
http_method_names = OrderedDict.fromkeys(["head", "options"])
return Handler
def apply_http_methods(handler, injector):
for method in ["get", "post", "put", "patch", "delete", "head", "options", "trace"]:
if method in injector:
def __view(self, request, *args, **kwargs):
ns = injector.let(
view=self,
request=request,
args=args,
kwargs=kwargs,
user=this.request.user,
)
return getattr(ns, __view.method)()
__view.method = method
handler.http_method_names[method] = None
setattr(handler, method, __view)
def finalize_http_methods(handler):
handler.http_method_names = list(handler.http_method_names.keys())
|
<commit_before>from __future__ import absolute_import
from collections import OrderedDict
from dependencies import this
from django.views.generic import View
def view(injector):
"""Create Django class-based view from injector class."""
handler = create_handler(View)
apply_http_methods(handler, injector)
handler.http_method_names = list(handler.http_method_names.keys())
return injector.let(as_view=handler.as_view)
def create_handler(from_class):
class Handler(from_class):
http_method_names = OrderedDict.fromkeys(["head", "options"])
return Handler
def apply_http_methods(handler, injector):
for method in ["get", "post", "put", "patch", "delete", "head", "options", "trace"]:
if method in injector:
def __view(self, request, *args, **kwargs):
return injector.let(
view=self,
request=request,
args=args,
kwargs=kwargs,
user=this.request.user,
).trace()
handler.http_method_names[method] = None
setattr(handler, method, __view)
<commit_msg>Fix django view closure issue.<commit_after>from __future__ import absolute_import
from collections import OrderedDict
from dependencies import this
from django.views.generic import View
def view(injector):
"""Create Django class-based view from injector class."""
handler = create_handler(View)
apply_http_methods(handler, injector)
finalize_http_methods(handler)
return injector.let(as_view=handler.as_view)
def create_handler(from_class):
class Handler(from_class):
http_method_names = OrderedDict.fromkeys(["head", "options"])
return Handler
def apply_http_methods(handler, injector):
for method in ["get", "post", "put", "patch", "delete", "head", "options", "trace"]:
if method in injector:
def __view(self, request, *args, **kwargs):
ns = injector.let(
view=self,
request=request,
args=args,
kwargs=kwargs,
user=this.request.user,
)
return getattr(ns, __view.method)()
__view.method = method
handler.http_method_names[method] = None
setattr(handler, method, __view)
def finalize_http_methods(handler):
handler.http_method_names = list(handler.http_method_names.keys())
|
5d5cb362410896927b6216deeb9421adfc3331c4
|
hatarake/net.py
|
hatarake/net.py
|
'''
Wrappers around Python requests
This allows us to handle all the custom headers in a single place
'''
from __future__ import absolute_import
import requests
from functools import wraps
from hatarake import USER_AGENT
def add_args(func):
@wraps(func)
def wrapper(*args, **kwargs):
if 'headers' not in kwargs:
kwargs['headers'] = {}
kwargs['headers']['user-agent'] = USER_AGENT
return func(*args, **kwargs)
return wrapper
get = add_args(requests.get)
post = add_args(requests.post)
put = add_args(requests.put)
|
'''
Wrappers around Python requests
This allows us to handle all the custom headers in a single place
'''
from __future__ import absolute_import
import requests
from functools import wraps
from hatarake import USER_AGENT
def add_args(func):
@wraps(func)
def wrapper(*args, **kwargs):
try:
kwargs['headers']['user-agent'] = USER_AGENT
except KeyError:
kwargs['headers'] = {'user-agent': USER_AGENT}
if 'token' in kwargs:
token = kwargs.pop('token')
kwargs['headers']['Authorization'] = 'Token %s' % token
return func(*args, **kwargs)
return wrapper
get = add_args(requests.get)
post = add_args(requests.post)
put = add_args(requests.put)
|
Support token as an argument to our requests wrapper
|
Support token as an argument to our requests wrapper
|
Python
|
mit
|
kfdm/hatarake
|
'''
Wrappers around Python requests
This allows us to handle all the custom headers in a single place
'''
from __future__ import absolute_import
import requests
from functools import wraps
from hatarake import USER_AGENT
def add_args(func):
@wraps(func)
def wrapper(*args, **kwargs):
if 'headers' not in kwargs:
kwargs['headers'] = {}
kwargs['headers']['user-agent'] = USER_AGENT
return func(*args, **kwargs)
return wrapper
get = add_args(requests.get)
post = add_args(requests.post)
put = add_args(requests.put)
Support token as an argument to our requests wrapper
|
'''
Wrappers around Python requests
This allows us to handle all the custom headers in a single place
'''
from __future__ import absolute_import
import requests
from functools import wraps
from hatarake import USER_AGENT
def add_args(func):
@wraps(func)
def wrapper(*args, **kwargs):
try:
kwargs['headers']['user-agent'] = USER_AGENT
except KeyError:
kwargs['headers'] = {'user-agent': USER_AGENT}
if 'token' in kwargs:
token = kwargs.pop('token')
kwargs['headers']['Authorization'] = 'Token %s' % token
return func(*args, **kwargs)
return wrapper
get = add_args(requests.get)
post = add_args(requests.post)
put = add_args(requests.put)
|
<commit_before>'''
Wrappers around Python requests
This allows us to handle all the custom headers in a single place
'''
from __future__ import absolute_import
import requests
from functools import wraps
from hatarake import USER_AGENT
def add_args(func):
@wraps(func)
def wrapper(*args, **kwargs):
if 'headers' not in kwargs:
kwargs['headers'] = {}
kwargs['headers']['user-agent'] = USER_AGENT
return func(*args, **kwargs)
return wrapper
get = add_args(requests.get)
post = add_args(requests.post)
put = add_args(requests.put)
<commit_msg>Support token as an argument to our requests wrapper<commit_after>
|
'''
Wrappers around Python requests
This allows us to handle all the custom headers in a single place
'''
from __future__ import absolute_import
import requests
from functools import wraps
from hatarake import USER_AGENT
def add_args(func):
@wraps(func)
def wrapper(*args, **kwargs):
try:
kwargs['headers']['user-agent'] = USER_AGENT
except KeyError:
kwargs['headers'] = {'user-agent': USER_AGENT}
if 'token' in kwargs:
token = kwargs.pop('token')
kwargs['headers']['Authorization'] = 'Token %s' % token
return func(*args, **kwargs)
return wrapper
get = add_args(requests.get)
post = add_args(requests.post)
put = add_args(requests.put)
|
'''
Wrappers around Python requests
This allows us to handle all the custom headers in a single place
'''
from __future__ import absolute_import
import requests
from functools import wraps
from hatarake import USER_AGENT
def add_args(func):
@wraps(func)
def wrapper(*args, **kwargs):
if 'headers' not in kwargs:
kwargs['headers'] = {}
kwargs['headers']['user-agent'] = USER_AGENT
return func(*args, **kwargs)
return wrapper
get = add_args(requests.get)
post = add_args(requests.post)
put = add_args(requests.put)
Support token as an argument to our requests wrapper'''
Wrappers around Python requests
This allows us to handle all the custom headers in a single place
'''
from __future__ import absolute_import
import requests
from functools import wraps
from hatarake import USER_AGENT
def add_args(func):
@wraps(func)
def wrapper(*args, **kwargs):
try:
kwargs['headers']['user-agent'] = USER_AGENT
except KeyError:
kwargs['headers'] = {'user-agent': USER_AGENT}
if 'token' in kwargs:
token = kwargs.pop('token')
kwargs['headers']['Authorization'] = 'Token %s' % token
return func(*args, **kwargs)
return wrapper
get = add_args(requests.get)
post = add_args(requests.post)
put = add_args(requests.put)
|
<commit_before>'''
Wrappers around Python requests
This allows us to handle all the custom headers in a single place
'''
from __future__ import absolute_import
import requests
from functools import wraps
from hatarake import USER_AGENT
def add_args(func):
@wraps(func)
def wrapper(*args, **kwargs):
if 'headers' not in kwargs:
kwargs['headers'] = {}
kwargs['headers']['user-agent'] = USER_AGENT
return func(*args, **kwargs)
return wrapper
get = add_args(requests.get)
post = add_args(requests.post)
put = add_args(requests.put)
<commit_msg>Support token as an argument to our requests wrapper<commit_after>'''
Wrappers around Python requests
This allows us to handle all the custom headers in a single place
'''
from __future__ import absolute_import
import requests
from functools import wraps
from hatarake import USER_AGENT
def add_args(func):
@wraps(func)
def wrapper(*args, **kwargs):
try:
kwargs['headers']['user-agent'] = USER_AGENT
except KeyError:
kwargs['headers'] = {'user-agent': USER_AGENT}
if 'token' in kwargs:
token = kwargs.pop('token')
kwargs['headers']['Authorization'] = 'Token %s' % token
return func(*args, **kwargs)
return wrapper
get = add_args(requests.get)
post = add_args(requests.post)
put = add_args(requests.put)
|
6d6d1af248ce555cca56521bba5e7c356817c74e
|
account/forms.py
|
account/forms.py
|
from django.contrib.auth.models import User
from django import forms
from account.models import UserProfile
attributes = {"class": "required"}
class RegistrationForm(forms.Form):
username = forms.RegexField(regex=r'^[\w.@+-]+$',
max_length=30,
widget=forms.TextInput(attrs=attributes),
label="Username",
error_message={'invalid': "This value may contain only letters, numbers and @.+- characters."}
)
email = forms.EmailField()
def clean_username(self):
username = self.cleaned_data["username"]
existing = User.objects.filter(username__iexact=username)
if existing.exists():
raise forms.ValidationError("A user with that username already exists.")
else:
return self.cleaned_data["username"]
class SettingsForm(forms.Form):
email = forms.EmailField()
xsede_username = forms.CharField(max_length=50,
required=False,
label="XSEDE Username")
new_ssh_keypair = forms.BooleanField(required=False)
def clean(self):
if "password1" in self.cleaned_data and "password2" in self.cleaned_data:
if self.cleaned_data["password1"] != self.cleaned_data["password2"]:
raise forms.ValidationError("The two password fields did not match.")
return self.cleaned_data
class UserProfileForm(forms.ModelForm):
private_key = forms.CharField(widget=forms.Textarea)
public_key = forms.CharField(widget=forms.Textarea)
class Meta:
model = UserProfile
fields = ("xsede_username", "public_key", "activation_key", "password_reset_key", "reset_expires")
|
from django.contrib.auth.models import User
from django import forms
from account.models import UserProfile
attributes = {"class": "required"}
class RegistrationForm(forms.Form):
username = forms.RegexField(regex=r'^[\w.@+-]+$',
max_length=30,
widget=forms.TextInput(attrs=attributes),
label="Username",
error_message={'invalid': "This value may contain only letters, numbers and @.+- characters."}
)
email = forms.EmailField()
def clean_username(self):
username = self.cleaned_data["username"]
existing = User.objects.filter(username__iexact=username)
if existing.exists():
raise forms.ValidationError("A user with that username already exists.")
else:
return self.cleaned_data["username"]
class SettingsForm(forms.Form):
email = forms.EmailField()
xsede_username = forms.CharField(max_length=50,
required=False,
label="XSEDE Username")
new_ssh_keypair = forms.BooleanField(required=False)
class UserProfileForm(forms.ModelForm):
private_key = forms.CharField(widget=forms.Textarea)
public_key = forms.CharField(widget=forms.Textarea)
class Meta:
model = UserProfile
fields = ("xsede_username", "public_key", "activation_key", "password_reset_key", "reset_expires")
|
Remove unused section of SettingsForm
|
Remove unused section of SettingsForm
|
Python
|
mit
|
crcollins/chemtools-webapp,crcollins/chemtools-webapp,crcollins/chemtools-webapp,crcollins/chemtools-webapp,crcollins/chemtools-webapp
|
from django.contrib.auth.models import User
from django import forms
from account.models import UserProfile
attributes = {"class": "required"}
class RegistrationForm(forms.Form):
username = forms.RegexField(regex=r'^[\w.@+-]+$',
max_length=30,
widget=forms.TextInput(attrs=attributes),
label="Username",
error_message={'invalid': "This value may contain only letters, numbers and @.+- characters."}
)
email = forms.EmailField()
def clean_username(self):
username = self.cleaned_data["username"]
existing = User.objects.filter(username__iexact=username)
if existing.exists():
raise forms.ValidationError("A user with that username already exists.")
else:
return self.cleaned_data["username"]
class SettingsForm(forms.Form):
email = forms.EmailField()
xsede_username = forms.CharField(max_length=50,
required=False,
label="XSEDE Username")
new_ssh_keypair = forms.BooleanField(required=False)
def clean(self):
if "password1" in self.cleaned_data and "password2" in self.cleaned_data:
if self.cleaned_data["password1"] != self.cleaned_data["password2"]:
raise forms.ValidationError("The two password fields did not match.")
return self.cleaned_data
class UserProfileForm(forms.ModelForm):
private_key = forms.CharField(widget=forms.Textarea)
public_key = forms.CharField(widget=forms.Textarea)
class Meta:
model = UserProfile
fields = ("xsede_username", "public_key", "activation_key", "password_reset_key", "reset_expires")
Remove unused section of SettingsForm
|
from django.contrib.auth.models import User
from django import forms
from account.models import UserProfile
attributes = {"class": "required"}
class RegistrationForm(forms.Form):
username = forms.RegexField(regex=r'^[\w.@+-]+$',
max_length=30,
widget=forms.TextInput(attrs=attributes),
label="Username",
error_message={'invalid': "This value may contain only letters, numbers and @.+- characters."}
)
email = forms.EmailField()
def clean_username(self):
username = self.cleaned_data["username"]
existing = User.objects.filter(username__iexact=username)
if existing.exists():
raise forms.ValidationError("A user with that username already exists.")
else:
return self.cleaned_data["username"]
class SettingsForm(forms.Form):
email = forms.EmailField()
xsede_username = forms.CharField(max_length=50,
required=False,
label="XSEDE Username")
new_ssh_keypair = forms.BooleanField(required=False)
class UserProfileForm(forms.ModelForm):
private_key = forms.CharField(widget=forms.Textarea)
public_key = forms.CharField(widget=forms.Textarea)
class Meta:
model = UserProfile
fields = ("xsede_username", "public_key", "activation_key", "password_reset_key", "reset_expires")
|
<commit_before>from django.contrib.auth.models import User
from django import forms
from account.models import UserProfile
attributes = {"class": "required"}
class RegistrationForm(forms.Form):
username = forms.RegexField(regex=r'^[\w.@+-]+$',
max_length=30,
widget=forms.TextInput(attrs=attributes),
label="Username",
error_message={'invalid': "This value may contain only letters, numbers and @.+- characters."}
)
email = forms.EmailField()
def clean_username(self):
username = self.cleaned_data["username"]
existing = User.objects.filter(username__iexact=username)
if existing.exists():
raise forms.ValidationError("A user with that username already exists.")
else:
return self.cleaned_data["username"]
class SettingsForm(forms.Form):
email = forms.EmailField()
xsede_username = forms.CharField(max_length=50,
required=False,
label="XSEDE Username")
new_ssh_keypair = forms.BooleanField(required=False)
def clean(self):
if "password1" in self.cleaned_data and "password2" in self.cleaned_data:
if self.cleaned_data["password1"] != self.cleaned_data["password2"]:
raise forms.ValidationError("The two password fields did not match.")
return self.cleaned_data
class UserProfileForm(forms.ModelForm):
private_key = forms.CharField(widget=forms.Textarea)
public_key = forms.CharField(widget=forms.Textarea)
class Meta:
model = UserProfile
fields = ("xsede_username", "public_key", "activation_key", "password_reset_key", "reset_expires")
<commit_msg>Remove unused section of SettingsForm<commit_after>
|
from django.contrib.auth.models import User
from django import forms
from account.models import UserProfile
attributes = {"class": "required"}
class RegistrationForm(forms.Form):
username = forms.RegexField(regex=r'^[\w.@+-]+$',
max_length=30,
widget=forms.TextInput(attrs=attributes),
label="Username",
error_message={'invalid': "This value may contain only letters, numbers and @.+- characters."}
)
email = forms.EmailField()
def clean_username(self):
username = self.cleaned_data["username"]
existing = User.objects.filter(username__iexact=username)
if existing.exists():
raise forms.ValidationError("A user with that username already exists.")
else:
return self.cleaned_data["username"]
class SettingsForm(forms.Form):
email = forms.EmailField()
xsede_username = forms.CharField(max_length=50,
required=False,
label="XSEDE Username")
new_ssh_keypair = forms.BooleanField(required=False)
class UserProfileForm(forms.ModelForm):
private_key = forms.CharField(widget=forms.Textarea)
public_key = forms.CharField(widget=forms.Textarea)
class Meta:
model = UserProfile
fields = ("xsede_username", "public_key", "activation_key", "password_reset_key", "reset_expires")
|
from django.contrib.auth.models import User
from django import forms
from account.models import UserProfile
attributes = {"class": "required"}
class RegistrationForm(forms.Form):
username = forms.RegexField(regex=r'^[\w.@+-]+$',
max_length=30,
widget=forms.TextInput(attrs=attributes),
label="Username",
error_message={'invalid': "This value may contain only letters, numbers and @.+- characters."}
)
email = forms.EmailField()
def clean_username(self):
username = self.cleaned_data["username"]
existing = User.objects.filter(username__iexact=username)
if existing.exists():
raise forms.ValidationError("A user with that username already exists.")
else:
return self.cleaned_data["username"]
class SettingsForm(forms.Form):
email = forms.EmailField()
xsede_username = forms.CharField(max_length=50,
required=False,
label="XSEDE Username")
new_ssh_keypair = forms.BooleanField(required=False)
def clean(self):
if "password1" in self.cleaned_data and "password2" in self.cleaned_data:
if self.cleaned_data["password1"] != self.cleaned_data["password2"]:
raise forms.ValidationError("The two password fields did not match.")
return self.cleaned_data
class UserProfileForm(forms.ModelForm):
private_key = forms.CharField(widget=forms.Textarea)
public_key = forms.CharField(widget=forms.Textarea)
class Meta:
model = UserProfile
fields = ("xsede_username", "public_key", "activation_key", "password_reset_key", "reset_expires")
Remove unused section of SettingsFormfrom django.contrib.auth.models import User
from django import forms
from account.models import UserProfile
attributes = {"class": "required"}
class RegistrationForm(forms.Form):
username = forms.RegexField(regex=r'^[\w.@+-]+$',
max_length=30,
widget=forms.TextInput(attrs=attributes),
label="Username",
error_message={'invalid': "This value may contain only letters, numbers and @.+- characters."}
)
email = forms.EmailField()
def clean_username(self):
username = self.cleaned_data["username"]
existing = User.objects.filter(username__iexact=username)
if existing.exists():
raise forms.ValidationError("A user with that username already exists.")
else:
return self.cleaned_data["username"]
class SettingsForm(forms.Form):
email = forms.EmailField()
xsede_username = forms.CharField(max_length=50,
required=False,
label="XSEDE Username")
new_ssh_keypair = forms.BooleanField(required=False)
class UserProfileForm(forms.ModelForm):
private_key = forms.CharField(widget=forms.Textarea)
public_key = forms.CharField(widget=forms.Textarea)
class Meta:
model = UserProfile
fields = ("xsede_username", "public_key", "activation_key", "password_reset_key", "reset_expires")
|
<commit_before>from django.contrib.auth.models import User
from django import forms
from account.models import UserProfile
attributes = {"class": "required"}
class RegistrationForm(forms.Form):
username = forms.RegexField(regex=r'^[\w.@+-]+$',
max_length=30,
widget=forms.TextInput(attrs=attributes),
label="Username",
error_message={'invalid': "This value may contain only letters, numbers and @.+- characters."}
)
email = forms.EmailField()
def clean_username(self):
username = self.cleaned_data["username"]
existing = User.objects.filter(username__iexact=username)
if existing.exists():
raise forms.ValidationError("A user with that username already exists.")
else:
return self.cleaned_data["username"]
class SettingsForm(forms.Form):
email = forms.EmailField()
xsede_username = forms.CharField(max_length=50,
required=False,
label="XSEDE Username")
new_ssh_keypair = forms.BooleanField(required=False)
def clean(self):
if "password1" in self.cleaned_data and "password2" in self.cleaned_data:
if self.cleaned_data["password1"] != self.cleaned_data["password2"]:
raise forms.ValidationError("The two password fields did not match.")
return self.cleaned_data
class UserProfileForm(forms.ModelForm):
private_key = forms.CharField(widget=forms.Textarea)
public_key = forms.CharField(widget=forms.Textarea)
class Meta:
model = UserProfile
fields = ("xsede_username", "public_key", "activation_key", "password_reset_key", "reset_expires")
<commit_msg>Remove unused section of SettingsForm<commit_after>from django.contrib.auth.models import User
from django import forms
from account.models import UserProfile
attributes = {"class": "required"}
class RegistrationForm(forms.Form):
username = forms.RegexField(regex=r'^[\w.@+-]+$',
max_length=30,
widget=forms.TextInput(attrs=attributes),
label="Username",
error_message={'invalid': "This value may contain only letters, numbers and @.+- characters."}
)
email = forms.EmailField()
def clean_username(self):
username = self.cleaned_data["username"]
existing = User.objects.filter(username__iexact=username)
if existing.exists():
raise forms.ValidationError("A user with that username already exists.")
else:
return self.cleaned_data["username"]
class SettingsForm(forms.Form):
email = forms.EmailField()
xsede_username = forms.CharField(max_length=50,
required=False,
label="XSEDE Username")
new_ssh_keypair = forms.BooleanField(required=False)
class UserProfileForm(forms.ModelForm):
private_key = forms.CharField(widget=forms.Textarea)
public_key = forms.CharField(widget=forms.Textarea)
class Meta:
model = UserProfile
fields = ("xsede_username", "public_key", "activation_key", "password_reset_key", "reset_expires")
|
8c174388aefa3907aeb8733bb3d4c77c770eefe7
|
DataModelAdapter.py
|
DataModelAdapter.py
|
class DataModelAdapter(object) :
def __init__(self, data) :
self._data = data
self._children = set()
pass
def numChildren(self) :
return len(self._children)
def hasData(self) :
return self._data is not None
def getData(self, key) :
return self._data[key]
def addChild(self, child) :
self._children.add(child)
|
class DataModelAdapter(object) :
def __init__(self, data) :
self._data = data
self._children = set()
self._parent = None
pass
def numChildren(self) :
return len(self._children)
def hasData(self) :
return self._data is not None
def getData(self, key) :
return self._data[key]
def addChild(self, child) :
self._children.add(child)
def setParent(self, parent) :
self._parent = parent
def parent(self) :
return self._parent
|
Add _parent field with setter/getter
|
Add _parent field with setter/getter
|
Python
|
apache-2.0
|
mattdeckard/wherewithal
|
class DataModelAdapter(object) :
def __init__(self, data) :
self._data = data
self._children = set()
pass
def numChildren(self) :
return len(self._children)
def hasData(self) :
return self._data is not None
def getData(self, key) :
return self._data[key]
def addChild(self, child) :
self._children.add(child)
Add _parent field with setter/getter
|
class DataModelAdapter(object) :
def __init__(self, data) :
self._data = data
self._children = set()
self._parent = None
pass
def numChildren(self) :
return len(self._children)
def hasData(self) :
return self._data is not None
def getData(self, key) :
return self._data[key]
def addChild(self, child) :
self._children.add(child)
def setParent(self, parent) :
self._parent = parent
def parent(self) :
return self._parent
|
<commit_before>
class DataModelAdapter(object) :
def __init__(self, data) :
self._data = data
self._children = set()
pass
def numChildren(self) :
return len(self._children)
def hasData(self) :
return self._data is not None
def getData(self, key) :
return self._data[key]
def addChild(self, child) :
self._children.add(child)
<commit_msg>Add _parent field with setter/getter<commit_after>
|
class DataModelAdapter(object) :
def __init__(self, data) :
self._data = data
self._children = set()
self._parent = None
pass
def numChildren(self) :
return len(self._children)
def hasData(self) :
return self._data is not None
def getData(self, key) :
return self._data[key]
def addChild(self, child) :
self._children.add(child)
def setParent(self, parent) :
self._parent = parent
def parent(self) :
return self._parent
|
class DataModelAdapter(object) :
def __init__(self, data) :
self._data = data
self._children = set()
pass
def numChildren(self) :
return len(self._children)
def hasData(self) :
return self._data is not None
def getData(self, key) :
return self._data[key]
def addChild(self, child) :
self._children.add(child)
Add _parent field with setter/getter
class DataModelAdapter(object) :
def __init__(self, data) :
self._data = data
self._children = set()
self._parent = None
pass
def numChildren(self) :
return len(self._children)
def hasData(self) :
return self._data is not None
def getData(self, key) :
return self._data[key]
def addChild(self, child) :
self._children.add(child)
def setParent(self, parent) :
self._parent = parent
def parent(self) :
return self._parent
|
<commit_before>
class DataModelAdapter(object) :
def __init__(self, data) :
self._data = data
self._children = set()
pass
def numChildren(self) :
return len(self._children)
def hasData(self) :
return self._data is not None
def getData(self, key) :
return self._data[key]
def addChild(self, child) :
self._children.add(child)
<commit_msg>Add _parent field with setter/getter<commit_after>
class DataModelAdapter(object) :
def __init__(self, data) :
self._data = data
self._children = set()
self._parent = None
pass
def numChildren(self) :
return len(self._children)
def hasData(self) :
return self._data is not None
def getData(self, key) :
return self._data[key]
def addChild(self, child) :
self._children.add(child)
def setParent(self, parent) :
self._parent = parent
def parent(self) :
return self._parent
|
e27f04e9c8d5d74afdd9cd7d6990cad5ff6f6cb5
|
api/v330/docking_event/serializers.py
|
api/v330/docking_event/serializers.py
|
from api.v330.common.serializers import *
class SpacecraftFlightSerializerForDockingEvent(serializers.HyperlinkedModelSerializer):
spacecraft = SpacecraftSerializer(read_only=True, many=False)
class Meta:
model = SpacecraftFlight
fields = ('id', 'url', 'destination', 'splashdown', 'spacecraft')
class DockingEventSerializer(serializers.HyperlinkedModelSerializer):
flight_vehicle = SpacecraftFlightSerializerForDockingEvent(read_only=True)
docking_location = serializers.StringRelatedField(many=False, read_only=True)
class Meta:
model = DockingEvent
fields = ('id', 'url', 'docking', 'departure', 'flight_vehicle', 'docking_location')
class DockingEventDetailedSerializer(serializers.HyperlinkedModelSerializer):
flight_vehicle = SpacecraftFlightSerializerForDockingEvent(read_only=True, many=False)
docking_location = serializers.StringRelatedField(many=False, read_only=True)
class Meta:
model = DockingEvent
fields = ('id', 'url', 'docking', 'departure', 'flight_vehicle', 'docking_location')
|
from api.v330.common.serializers import *
class SpacecraftFlightSerializerForDockingEvent(serializers.HyperlinkedModelSerializer):
spacecraft = SpacecraftSerializer(read_only=True, many=False)
class Meta:
model = SpacecraftFlight
fields = ('id', 'url', 'destination', 'splashdown', 'spacecraft')
class SpaceStationSerializerForDockingEvent(serializers.HyperlinkedModelSerializer):
class Meta:
model = SpaceStation
fields = ('id', 'url', 'name', 'image_url')
class DockingEventSerializer(serializers.HyperlinkedModelSerializer):
flight_vehicle = SpacecraftFlightSerializerForDockingEvent(read_only=True)
docking_location = serializers.StringRelatedField(many=False, read_only=True)
class Meta:
model = DockingEvent
fields = ('id', 'url', 'docking', 'departure', 'flight_vehicle', 'docking_location')
class DockingEventDetailedSerializer(serializers.HyperlinkedModelSerializer):
flight_vehicle = SpacecraftFlightSerializerForDockingEvent(read_only=True, many=False)
docking_location = serializers.StringRelatedField(many=False, read_only=True)
space_station = SpaceStationSerializerForDockingEvent(many=False, read_only=True)
class Meta:
model = DockingEvent
fields = ('id', 'url', 'docking', 'departure', 'flight_vehicle', 'docking_location', 'space_station')
|
Add space_station field to detailed docking event
|
Add space_station field to detailed docking event
|
Python
|
apache-2.0
|
ItsCalebJones/SpaceLaunchNow-Server,ItsCalebJones/SpaceLaunchNow-Server,ItsCalebJones/SpaceLaunchNow-Server
|
from api.v330.common.serializers import *
class SpacecraftFlightSerializerForDockingEvent(serializers.HyperlinkedModelSerializer):
spacecraft = SpacecraftSerializer(read_only=True, many=False)
class Meta:
model = SpacecraftFlight
fields = ('id', 'url', 'destination', 'splashdown', 'spacecraft')
class DockingEventSerializer(serializers.HyperlinkedModelSerializer):
flight_vehicle = SpacecraftFlightSerializerForDockingEvent(read_only=True)
docking_location = serializers.StringRelatedField(many=False, read_only=True)
class Meta:
model = DockingEvent
fields = ('id', 'url', 'docking', 'departure', 'flight_vehicle', 'docking_location')
class DockingEventDetailedSerializer(serializers.HyperlinkedModelSerializer):
flight_vehicle = SpacecraftFlightSerializerForDockingEvent(read_only=True, many=False)
docking_location = serializers.StringRelatedField(many=False, read_only=True)
class Meta:
model = DockingEvent
fields = ('id', 'url', 'docking', 'departure', 'flight_vehicle', 'docking_location')
Add space_station field to detailed docking event
|
from api.v330.common.serializers import *
class SpacecraftFlightSerializerForDockingEvent(serializers.HyperlinkedModelSerializer):
spacecraft = SpacecraftSerializer(read_only=True, many=False)
class Meta:
model = SpacecraftFlight
fields = ('id', 'url', 'destination', 'splashdown', 'spacecraft')
class SpaceStationSerializerForDockingEvent(serializers.HyperlinkedModelSerializer):
class Meta:
model = SpaceStation
fields = ('id', 'url', 'name', 'image_url')
class DockingEventSerializer(serializers.HyperlinkedModelSerializer):
flight_vehicle = SpacecraftFlightSerializerForDockingEvent(read_only=True)
docking_location = serializers.StringRelatedField(many=False, read_only=True)
class Meta:
model = DockingEvent
fields = ('id', 'url', 'docking', 'departure', 'flight_vehicle', 'docking_location')
class DockingEventDetailedSerializer(serializers.HyperlinkedModelSerializer):
flight_vehicle = SpacecraftFlightSerializerForDockingEvent(read_only=True, many=False)
docking_location = serializers.StringRelatedField(many=False, read_only=True)
space_station = SpaceStationSerializerForDockingEvent(many=False, read_only=True)
class Meta:
model = DockingEvent
fields = ('id', 'url', 'docking', 'departure', 'flight_vehicle', 'docking_location', 'space_station')
|
<commit_before>from api.v330.common.serializers import *
class SpacecraftFlightSerializerForDockingEvent(serializers.HyperlinkedModelSerializer):
spacecraft = SpacecraftSerializer(read_only=True, many=False)
class Meta:
model = SpacecraftFlight
fields = ('id', 'url', 'destination', 'splashdown', 'spacecraft')
class DockingEventSerializer(serializers.HyperlinkedModelSerializer):
flight_vehicle = SpacecraftFlightSerializerForDockingEvent(read_only=True)
docking_location = serializers.StringRelatedField(many=False, read_only=True)
class Meta:
model = DockingEvent
fields = ('id', 'url', 'docking', 'departure', 'flight_vehicle', 'docking_location')
class DockingEventDetailedSerializer(serializers.HyperlinkedModelSerializer):
flight_vehicle = SpacecraftFlightSerializerForDockingEvent(read_only=True, many=False)
docking_location = serializers.StringRelatedField(many=False, read_only=True)
class Meta:
model = DockingEvent
fields = ('id', 'url', 'docking', 'departure', 'flight_vehicle', 'docking_location')
<commit_msg>Add space_station field to detailed docking event<commit_after>
|
from api.v330.common.serializers import *
class SpacecraftFlightSerializerForDockingEvent(serializers.HyperlinkedModelSerializer):
spacecraft = SpacecraftSerializer(read_only=True, many=False)
class Meta:
model = SpacecraftFlight
fields = ('id', 'url', 'destination', 'splashdown', 'spacecraft')
class SpaceStationSerializerForDockingEvent(serializers.HyperlinkedModelSerializer):
class Meta:
model = SpaceStation
fields = ('id', 'url', 'name', 'image_url')
class DockingEventSerializer(serializers.HyperlinkedModelSerializer):
flight_vehicle = SpacecraftFlightSerializerForDockingEvent(read_only=True)
docking_location = serializers.StringRelatedField(many=False, read_only=True)
class Meta:
model = DockingEvent
fields = ('id', 'url', 'docking', 'departure', 'flight_vehicle', 'docking_location')
class DockingEventDetailedSerializer(serializers.HyperlinkedModelSerializer):
flight_vehicle = SpacecraftFlightSerializerForDockingEvent(read_only=True, many=False)
docking_location = serializers.StringRelatedField(many=False, read_only=True)
space_station = SpaceStationSerializerForDockingEvent(many=False, read_only=True)
class Meta:
model = DockingEvent
fields = ('id', 'url', 'docking', 'departure', 'flight_vehicle', 'docking_location', 'space_station')
|
from api.v330.common.serializers import *
class SpacecraftFlightSerializerForDockingEvent(serializers.HyperlinkedModelSerializer):
spacecraft = SpacecraftSerializer(read_only=True, many=False)
class Meta:
model = SpacecraftFlight
fields = ('id', 'url', 'destination', 'splashdown', 'spacecraft')
class DockingEventSerializer(serializers.HyperlinkedModelSerializer):
flight_vehicle = SpacecraftFlightSerializerForDockingEvent(read_only=True)
docking_location = serializers.StringRelatedField(many=False, read_only=True)
class Meta:
model = DockingEvent
fields = ('id', 'url', 'docking', 'departure', 'flight_vehicle', 'docking_location')
class DockingEventDetailedSerializer(serializers.HyperlinkedModelSerializer):
flight_vehicle = SpacecraftFlightSerializerForDockingEvent(read_only=True, many=False)
docking_location = serializers.StringRelatedField(many=False, read_only=True)
class Meta:
model = DockingEvent
fields = ('id', 'url', 'docking', 'departure', 'flight_vehicle', 'docking_location')
Add space_station field to detailed docking eventfrom api.v330.common.serializers import *
class SpacecraftFlightSerializerForDockingEvent(serializers.HyperlinkedModelSerializer):
spacecraft = SpacecraftSerializer(read_only=True, many=False)
class Meta:
model = SpacecraftFlight
fields = ('id', 'url', 'destination', 'splashdown', 'spacecraft')
class SpaceStationSerializerForDockingEvent(serializers.HyperlinkedModelSerializer):
class Meta:
model = SpaceStation
fields = ('id', 'url', 'name', 'image_url')
class DockingEventSerializer(serializers.HyperlinkedModelSerializer):
flight_vehicle = SpacecraftFlightSerializerForDockingEvent(read_only=True)
docking_location = serializers.StringRelatedField(many=False, read_only=True)
class Meta:
model = DockingEvent
fields = ('id', 'url', 'docking', 'departure', 'flight_vehicle', 'docking_location')
class DockingEventDetailedSerializer(serializers.HyperlinkedModelSerializer):
flight_vehicle = SpacecraftFlightSerializerForDockingEvent(read_only=True, many=False)
docking_location = serializers.StringRelatedField(many=False, read_only=True)
space_station = SpaceStationSerializerForDockingEvent(many=False, read_only=True)
class Meta:
model = DockingEvent
fields = ('id', 'url', 'docking', 'departure', 'flight_vehicle', 'docking_location', 'space_station')
|
<commit_before>from api.v330.common.serializers import *
class SpacecraftFlightSerializerForDockingEvent(serializers.HyperlinkedModelSerializer):
spacecraft = SpacecraftSerializer(read_only=True, many=False)
class Meta:
model = SpacecraftFlight
fields = ('id', 'url', 'destination', 'splashdown', 'spacecraft')
class DockingEventSerializer(serializers.HyperlinkedModelSerializer):
flight_vehicle = SpacecraftFlightSerializerForDockingEvent(read_only=True)
docking_location = serializers.StringRelatedField(many=False, read_only=True)
class Meta:
model = DockingEvent
fields = ('id', 'url', 'docking', 'departure', 'flight_vehicle', 'docking_location')
class DockingEventDetailedSerializer(serializers.HyperlinkedModelSerializer):
flight_vehicle = SpacecraftFlightSerializerForDockingEvent(read_only=True, many=False)
docking_location = serializers.StringRelatedField(many=False, read_only=True)
class Meta:
model = DockingEvent
fields = ('id', 'url', 'docking', 'departure', 'flight_vehicle', 'docking_location')
<commit_msg>Add space_station field to detailed docking event<commit_after>from api.v330.common.serializers import *
class SpacecraftFlightSerializerForDockingEvent(serializers.HyperlinkedModelSerializer):
spacecraft = SpacecraftSerializer(read_only=True, many=False)
class Meta:
model = SpacecraftFlight
fields = ('id', 'url', 'destination', 'splashdown', 'spacecraft')
class SpaceStationSerializerForDockingEvent(serializers.HyperlinkedModelSerializer):
class Meta:
model = SpaceStation
fields = ('id', 'url', 'name', 'image_url')
class DockingEventSerializer(serializers.HyperlinkedModelSerializer):
flight_vehicle = SpacecraftFlightSerializerForDockingEvent(read_only=True)
docking_location = serializers.StringRelatedField(many=False, read_only=True)
class Meta:
model = DockingEvent
fields = ('id', 'url', 'docking', 'departure', 'flight_vehicle', 'docking_location')
class DockingEventDetailedSerializer(serializers.HyperlinkedModelSerializer):
flight_vehicle = SpacecraftFlightSerializerForDockingEvent(read_only=True, many=False)
docking_location = serializers.StringRelatedField(many=False, read_only=True)
space_station = SpaceStationSerializerForDockingEvent(many=False, read_only=True)
class Meta:
model = DockingEvent
fields = ('id', 'url', 'docking', 'departure', 'flight_vehicle', 'docking_location', 'space_station')
|
e0becdd677c06c29834ecea73c28635553e18337
|
app/main/presenters/search_results.py
|
app/main/presenters/search_results.py
|
from flask import Markup
class SearchResults(object):
"""Provides access to the search results information"""
def __init__(self, response, lots_by_slug):
self.search_results = response['services']
self._lots = lots_by_slug
self._annotate()
self.total = response['meta']['total']
if 'page' in response['meta']['query']:
self.page = response['meta']['query']['page']
def _annotate(self):
for service in self.search_results:
self._replace_lot(service)
self._add_highlighting(service)
def _replace_lot(self, service):
# replace lot slug with reference to dict containing all the relevant lot data
service['lot'] = self._lots.get(service['lot'])
def _add_highlighting(self, service):
if 'highlight' in service:
if 'serviceSummary' in service['highlight']:
service['serviceSummary'] = Markup(
''.join(service['highlight']['serviceSummary'])
)
class AggregationResults(object):
"""Provides access to the aggregation results information"""
def __init__(self, response):
self.results = response['aggregations']
self.total = response['meta']['total']
if 'page' in response['meta']['query']:
self.page = response['meta']['query']['page']
|
from flask import Markup
class SearchResults(object):
"""Provides access to the search results information"""
def __init__(self, response, lots_by_slug):
self.search_results = response['services']
self._lots = lots_by_slug
self._annotate()
self.total = response['meta']['total']
if 'page' in response['meta']['query']:
self.page = response['meta']['query']['page']
def _annotate(self):
for service in self.search_results:
self._replace_lot(service)
self._add_highlighting(service)
def _replace_lot(self, service):
# replace lot slug with reference to dict containing all the relevant lot data
service['lot'] = self._lots.get(service['lot'])
def _add_highlighting(self, service):
if 'highlight' in service:
for highlighted_field in ['serviceSummary', 'serviceDescription']:
if highlighted_field in service['highlight']:
service[highlighted_field] = Markup(
''.join(service['highlight'][highlighted_field])
)
class AggregationResults(object):
"""Provides access to the aggregation results information"""
def __init__(self, response):
self.results = response['aggregations']
self.total = response['meta']['total']
if 'page' in response['meta']['query']:
self.page = response['meta']['query']['page']
|
Add static highlighting on serviceDescription field
|
Add static highlighting on serviceDescription field
|
Python
|
mit
|
alphagov/digitalmarketplace-buyer-frontend,alphagov/digitalmarketplace-buyer-frontend,alphagov/digitalmarketplace-buyer-frontend,alphagov/digitalmarketplace-buyer-frontend
|
from flask import Markup
class SearchResults(object):
"""Provides access to the search results information"""
def __init__(self, response, lots_by_slug):
self.search_results = response['services']
self._lots = lots_by_slug
self._annotate()
self.total = response['meta']['total']
if 'page' in response['meta']['query']:
self.page = response['meta']['query']['page']
def _annotate(self):
for service in self.search_results:
self._replace_lot(service)
self._add_highlighting(service)
def _replace_lot(self, service):
# replace lot slug with reference to dict containing all the relevant lot data
service['lot'] = self._lots.get(service['lot'])
def _add_highlighting(self, service):
if 'highlight' in service:
if 'serviceSummary' in service['highlight']:
service['serviceSummary'] = Markup(
''.join(service['highlight']['serviceSummary'])
)
class AggregationResults(object):
"""Provides access to the aggregation results information"""
def __init__(self, response):
self.results = response['aggregations']
self.total = response['meta']['total']
if 'page' in response['meta']['query']:
self.page = response['meta']['query']['page']
Add static highlighting on serviceDescription field
|
from flask import Markup
class SearchResults(object):
"""Provides access to the search results information"""
def __init__(self, response, lots_by_slug):
self.search_results = response['services']
self._lots = lots_by_slug
self._annotate()
self.total = response['meta']['total']
if 'page' in response['meta']['query']:
self.page = response['meta']['query']['page']
def _annotate(self):
for service in self.search_results:
self._replace_lot(service)
self._add_highlighting(service)
def _replace_lot(self, service):
# replace lot slug with reference to dict containing all the relevant lot data
service['lot'] = self._lots.get(service['lot'])
def _add_highlighting(self, service):
if 'highlight' in service:
for highlighted_field in ['serviceSummary', 'serviceDescription']:
if highlighted_field in service['highlight']:
service[highlighted_field] = Markup(
''.join(service['highlight'][highlighted_field])
)
class AggregationResults(object):
"""Provides access to the aggregation results information"""
def __init__(self, response):
self.results = response['aggregations']
self.total = response['meta']['total']
if 'page' in response['meta']['query']:
self.page = response['meta']['query']['page']
|
<commit_before>from flask import Markup
class SearchResults(object):
"""Provides access to the search results information"""
def __init__(self, response, lots_by_slug):
self.search_results = response['services']
self._lots = lots_by_slug
self._annotate()
self.total = response['meta']['total']
if 'page' in response['meta']['query']:
self.page = response['meta']['query']['page']
def _annotate(self):
for service in self.search_results:
self._replace_lot(service)
self._add_highlighting(service)
def _replace_lot(self, service):
# replace lot slug with reference to dict containing all the relevant lot data
service['lot'] = self._lots.get(service['lot'])
def _add_highlighting(self, service):
if 'highlight' in service:
if 'serviceSummary' in service['highlight']:
service['serviceSummary'] = Markup(
''.join(service['highlight']['serviceSummary'])
)
class AggregationResults(object):
"""Provides access to the aggregation results information"""
def __init__(self, response):
self.results = response['aggregations']
self.total = response['meta']['total']
if 'page' in response['meta']['query']:
self.page = response['meta']['query']['page']
<commit_msg>Add static highlighting on serviceDescription field<commit_after>
|
from flask import Markup
class SearchResults(object):
"""Provides access to the search results information"""
def __init__(self, response, lots_by_slug):
self.search_results = response['services']
self._lots = lots_by_slug
self._annotate()
self.total = response['meta']['total']
if 'page' in response['meta']['query']:
self.page = response['meta']['query']['page']
def _annotate(self):
for service in self.search_results:
self._replace_lot(service)
self._add_highlighting(service)
def _replace_lot(self, service):
# replace lot slug with reference to dict containing all the relevant lot data
service['lot'] = self._lots.get(service['lot'])
def _add_highlighting(self, service):
if 'highlight' in service:
for highlighted_field in ['serviceSummary', 'serviceDescription']:
if highlighted_field in service['highlight']:
service[highlighted_field] = Markup(
''.join(service['highlight'][highlighted_field])
)
class AggregationResults(object):
"""Provides access to the aggregation results information"""
def __init__(self, response):
self.results = response['aggregations']
self.total = response['meta']['total']
if 'page' in response['meta']['query']:
self.page = response['meta']['query']['page']
|
from flask import Markup
class SearchResults(object):
"""Provides access to the search results information"""
def __init__(self, response, lots_by_slug):
self.search_results = response['services']
self._lots = lots_by_slug
self._annotate()
self.total = response['meta']['total']
if 'page' in response['meta']['query']:
self.page = response['meta']['query']['page']
def _annotate(self):
for service in self.search_results:
self._replace_lot(service)
self._add_highlighting(service)
def _replace_lot(self, service):
# replace lot slug with reference to dict containing all the relevant lot data
service['lot'] = self._lots.get(service['lot'])
def _add_highlighting(self, service):
if 'highlight' in service:
if 'serviceSummary' in service['highlight']:
service['serviceSummary'] = Markup(
''.join(service['highlight']['serviceSummary'])
)
class AggregationResults(object):
"""Provides access to the aggregation results information"""
def __init__(self, response):
self.results = response['aggregations']
self.total = response['meta']['total']
if 'page' in response['meta']['query']:
self.page = response['meta']['query']['page']
Add static highlighting on serviceDescription fieldfrom flask import Markup
class SearchResults(object):
"""Provides access to the search results information"""
def __init__(self, response, lots_by_slug):
self.search_results = response['services']
self._lots = lots_by_slug
self._annotate()
self.total = response['meta']['total']
if 'page' in response['meta']['query']:
self.page = response['meta']['query']['page']
def _annotate(self):
for service in self.search_results:
self._replace_lot(service)
self._add_highlighting(service)
def _replace_lot(self, service):
# replace lot slug with reference to dict containing all the relevant lot data
service['lot'] = self._lots.get(service['lot'])
def _add_highlighting(self, service):
if 'highlight' in service:
for highlighted_field in ['serviceSummary', 'serviceDescription']:
if highlighted_field in service['highlight']:
service[highlighted_field] = Markup(
''.join(service['highlight'][highlighted_field])
)
class AggregationResults(object):
"""Provides access to the aggregation results information"""
def __init__(self, response):
self.results = response['aggregations']
self.total = response['meta']['total']
if 'page' in response['meta']['query']:
self.page = response['meta']['query']['page']
|
<commit_before>from flask import Markup
class SearchResults(object):
"""Provides access to the search results information"""
def __init__(self, response, lots_by_slug):
self.search_results = response['services']
self._lots = lots_by_slug
self._annotate()
self.total = response['meta']['total']
if 'page' in response['meta']['query']:
self.page = response['meta']['query']['page']
def _annotate(self):
for service in self.search_results:
self._replace_lot(service)
self._add_highlighting(service)
def _replace_lot(self, service):
# replace lot slug with reference to dict containing all the relevant lot data
service['lot'] = self._lots.get(service['lot'])
def _add_highlighting(self, service):
if 'highlight' in service:
if 'serviceSummary' in service['highlight']:
service['serviceSummary'] = Markup(
''.join(service['highlight']['serviceSummary'])
)
class AggregationResults(object):
"""Provides access to the aggregation results information"""
def __init__(self, response):
self.results = response['aggregations']
self.total = response['meta']['total']
if 'page' in response['meta']['query']:
self.page = response['meta']['query']['page']
<commit_msg>Add static highlighting on serviceDescription field<commit_after>from flask import Markup
class SearchResults(object):
"""Provides access to the search results information"""
def __init__(self, response, lots_by_slug):
self.search_results = response['services']
self._lots = lots_by_slug
self._annotate()
self.total = response['meta']['total']
if 'page' in response['meta']['query']:
self.page = response['meta']['query']['page']
def _annotate(self):
for service in self.search_results:
self._replace_lot(service)
self._add_highlighting(service)
def _replace_lot(self, service):
# replace lot slug with reference to dict containing all the relevant lot data
service['lot'] = self._lots.get(service['lot'])
def _add_highlighting(self, service):
if 'highlight' in service:
for highlighted_field in ['serviceSummary', 'serviceDescription']:
if highlighted_field in service['highlight']:
service[highlighted_field] = Markup(
''.join(service['highlight'][highlighted_field])
)
class AggregationResults(object):
"""Provides access to the aggregation results information"""
def __init__(self, response):
self.results = response['aggregations']
self.total = response['meta']['total']
if 'page' in response['meta']['query']:
self.page = response['meta']['query']['page']
|
df6339ad776aa989362089f54f3a1f675a86bfb0
|
adhocracy/lib/tiles/badge_tiles.py
|
adhocracy/lib/tiles/badge_tiles.py
|
def badge(badge):
from adhocracy.lib.templating import render_def
return render_def('/badge/tiles.html', 'badge', badge=badge,
cached=True)
def badges(badges):
from adhocracy.lib.templating import render_def
return render_def('/badge/tiles.html', 'badges', badges=badges,
cached=True)
def badge_styles():
'''
Render a <style>-block with dyamic badge styles
'''
from adhocracy.lib.templating import render_def
from adhocracy.model import Badge
badges = Badge.all()
return render_def('/badge/tiles.html', 'badge_styles', badges=badges,
cached=True)
|
def badge(badge):
from adhocracy.lib.templating import render_def
return render_def('/badge/tiles.html', 'badge', badge=badge,
cached=True)
def badges(badges):
from adhocracy.lib.templating import render_def
return render_def('/badge/tiles.html', 'badges', badges=badges,
cached=True)
def badge_styles():
'''
Render a <style>-block with dyamic badge styles
'''
from adhocracy.lib.templating import render_def
from adhocracy.model import Badge
badges = Badge.all_q().all()
return render_def('/badge/tiles.html', 'badge_styles', badges=badges,
cached=True)
|
Include style for all badges
|
badges: Include style for all badges
Not only global badges.
|
Python
|
agpl-3.0
|
phihag/adhocracy,DanielNeugebauer/adhocracy,liqd/adhocracy,DanielNeugebauer/adhocracy,alkadis/vcv,liqd/adhocracy,alkadis/vcv,DanielNeugebauer/adhocracy,alkadis/vcv,alkadis/vcv,alkadis/vcv,liqd/adhocracy,SysTheron/adhocracy,DanielNeugebauer/adhocracy,phihag/adhocracy,SysTheron/adhocracy,phihag/adhocracy,SysTheron/adhocracy,liqd/adhocracy,phihag/adhocracy,DanielNeugebauer/adhocracy,phihag/adhocracy
|
def badge(badge):
from adhocracy.lib.templating import render_def
return render_def('/badge/tiles.html', 'badge', badge=badge,
cached=True)
def badges(badges):
from adhocracy.lib.templating import render_def
return render_def('/badge/tiles.html', 'badges', badges=badges,
cached=True)
def badge_styles():
'''
Render a <style>-block with dyamic badge styles
'''
from adhocracy.lib.templating import render_def
from adhocracy.model import Badge
badges = Badge.all()
return render_def('/badge/tiles.html', 'badge_styles', badges=badges,
cached=True)
badges: Include style for all badges
Not only global badges.
|
def badge(badge):
from adhocracy.lib.templating import render_def
return render_def('/badge/tiles.html', 'badge', badge=badge,
cached=True)
def badges(badges):
from adhocracy.lib.templating import render_def
return render_def('/badge/tiles.html', 'badges', badges=badges,
cached=True)
def badge_styles():
'''
Render a <style>-block with dyamic badge styles
'''
from adhocracy.lib.templating import render_def
from adhocracy.model import Badge
badges = Badge.all_q().all()
return render_def('/badge/tiles.html', 'badge_styles', badges=badges,
cached=True)
|
<commit_before>def badge(badge):
from adhocracy.lib.templating import render_def
return render_def('/badge/tiles.html', 'badge', badge=badge,
cached=True)
def badges(badges):
from adhocracy.lib.templating import render_def
return render_def('/badge/tiles.html', 'badges', badges=badges,
cached=True)
def badge_styles():
'''
Render a <style>-block with dyamic badge styles
'''
from adhocracy.lib.templating import render_def
from adhocracy.model import Badge
badges = Badge.all()
return render_def('/badge/tiles.html', 'badge_styles', badges=badges,
cached=True)
<commit_msg>badges: Include style for all badges
Not only global badges.<commit_after>
|
def badge(badge):
from adhocracy.lib.templating import render_def
return render_def('/badge/tiles.html', 'badge', badge=badge,
cached=True)
def badges(badges):
from adhocracy.lib.templating import render_def
return render_def('/badge/tiles.html', 'badges', badges=badges,
cached=True)
def badge_styles():
'''
Render a <style>-block with dyamic badge styles
'''
from adhocracy.lib.templating import render_def
from adhocracy.model import Badge
badges = Badge.all_q().all()
return render_def('/badge/tiles.html', 'badge_styles', badges=badges,
cached=True)
|
def badge(badge):
from adhocracy.lib.templating import render_def
return render_def('/badge/tiles.html', 'badge', badge=badge,
cached=True)
def badges(badges):
from adhocracy.lib.templating import render_def
return render_def('/badge/tiles.html', 'badges', badges=badges,
cached=True)
def badge_styles():
'''
Render a <style>-block with dyamic badge styles
'''
from adhocracy.lib.templating import render_def
from adhocracy.model import Badge
badges = Badge.all()
return render_def('/badge/tiles.html', 'badge_styles', badges=badges,
cached=True)
badges: Include style for all badges
Not only global badges.def badge(badge):
from adhocracy.lib.templating import render_def
return render_def('/badge/tiles.html', 'badge', badge=badge,
cached=True)
def badges(badges):
from adhocracy.lib.templating import render_def
return render_def('/badge/tiles.html', 'badges', badges=badges,
cached=True)
def badge_styles():
'''
Render a <style>-block with dyamic badge styles
'''
from adhocracy.lib.templating import render_def
from adhocracy.model import Badge
badges = Badge.all_q().all()
return render_def('/badge/tiles.html', 'badge_styles', badges=badges,
cached=True)
|
<commit_before>def badge(badge):
from adhocracy.lib.templating import render_def
return render_def('/badge/tiles.html', 'badge', badge=badge,
cached=True)
def badges(badges):
from adhocracy.lib.templating import render_def
return render_def('/badge/tiles.html', 'badges', badges=badges,
cached=True)
def badge_styles():
'''
Render a <style>-block with dyamic badge styles
'''
from adhocracy.lib.templating import render_def
from adhocracy.model import Badge
badges = Badge.all()
return render_def('/badge/tiles.html', 'badge_styles', badges=badges,
cached=True)
<commit_msg>badges: Include style for all badges
Not only global badges.<commit_after>def badge(badge):
from adhocracy.lib.templating import render_def
return render_def('/badge/tiles.html', 'badge', badge=badge,
cached=True)
def badges(badges):
from adhocracy.lib.templating import render_def
return render_def('/badge/tiles.html', 'badges', badges=badges,
cached=True)
def badge_styles():
'''
Render a <style>-block with dyamic badge styles
'''
from adhocracy.lib.templating import render_def
from adhocracy.model import Badge
badges = Badge.all_q().all()
return render_def('/badge/tiles.html', 'badge_styles', badges=badges,
cached=True)
|
dbba6f10c867e64031ae07adb3d21becfe4a4e5a
|
law/contrib/cms/__init__.py
|
law/contrib/cms/__init__.py
|
# coding: utf-8
"""
CMS-related contrib package. https://home.cern/about/experiments/cms
"""
__all__ = ["CMSJobDashboard", "BundleCMSSW"]
# provisioning imports
from law.contrib.cms.job import CMSJobDashboard
from law.contrib.cms.tasks import BundleCMSSW
|
# coding: utf-8
"""
CMS-related contrib package. https://home.cern/about/experiments/cms
"""
__all__ = ["CMSJobDashboard", "BundleCMSSW", "Site", "lfn_to_pfn"]
# provisioning imports
from law.contrib.cms.job import CMSJobDashboard
from law.contrib.cms.tasks import BundleCMSSW
from law.contrib.cms.util import Site, lfn_to_pfn
|
Load utils in cms contrib package.
|
Load utils in cms contrib package.
|
Python
|
bsd-3-clause
|
riga/law,riga/law
|
# coding: utf-8
"""
CMS-related contrib package. https://home.cern/about/experiments/cms
"""
__all__ = ["CMSJobDashboard", "BundleCMSSW"]
# provisioning imports
from law.contrib.cms.job import CMSJobDashboard
from law.contrib.cms.tasks import BundleCMSSW
Load utils in cms contrib package.
|
# coding: utf-8
"""
CMS-related contrib package. https://home.cern/about/experiments/cms
"""
__all__ = ["CMSJobDashboard", "BundleCMSSW", "Site", "lfn_to_pfn"]
# provisioning imports
from law.contrib.cms.job import CMSJobDashboard
from law.contrib.cms.tasks import BundleCMSSW
from law.contrib.cms.util import Site, lfn_to_pfn
|
<commit_before># coding: utf-8
"""
CMS-related contrib package. https://home.cern/about/experiments/cms
"""
__all__ = ["CMSJobDashboard", "BundleCMSSW"]
# provisioning imports
from law.contrib.cms.job import CMSJobDashboard
from law.contrib.cms.tasks import BundleCMSSW
<commit_msg>Load utils in cms contrib package.<commit_after>
|
# coding: utf-8
"""
CMS-related contrib package. https://home.cern/about/experiments/cms
"""
__all__ = ["CMSJobDashboard", "BundleCMSSW", "Site", "lfn_to_pfn"]
# provisioning imports
from law.contrib.cms.job import CMSJobDashboard
from law.contrib.cms.tasks import BundleCMSSW
from law.contrib.cms.util import Site, lfn_to_pfn
|
# coding: utf-8
"""
CMS-related contrib package. https://home.cern/about/experiments/cms
"""
__all__ = ["CMSJobDashboard", "BundleCMSSW"]
# provisioning imports
from law.contrib.cms.job import CMSJobDashboard
from law.contrib.cms.tasks import BundleCMSSW
Load utils in cms contrib package.# coding: utf-8
"""
CMS-related contrib package. https://home.cern/about/experiments/cms
"""
__all__ = ["CMSJobDashboard", "BundleCMSSW", "Site", "lfn_to_pfn"]
# provisioning imports
from law.contrib.cms.job import CMSJobDashboard
from law.contrib.cms.tasks import BundleCMSSW
from law.contrib.cms.util import Site, lfn_to_pfn
|
<commit_before># coding: utf-8
"""
CMS-related contrib package. https://home.cern/about/experiments/cms
"""
__all__ = ["CMSJobDashboard", "BundleCMSSW"]
# provisioning imports
from law.contrib.cms.job import CMSJobDashboard
from law.contrib.cms.tasks import BundleCMSSW
<commit_msg>Load utils in cms contrib package.<commit_after># coding: utf-8
"""
CMS-related contrib package. https://home.cern/about/experiments/cms
"""
__all__ = ["CMSJobDashboard", "BundleCMSSW", "Site", "lfn_to_pfn"]
# provisioning imports
from law.contrib.cms.job import CMSJobDashboard
from law.contrib.cms.tasks import BundleCMSSW
from law.contrib.cms.util import Site, lfn_to_pfn
|
372a1cab73dad91daab5640f472eda4552be0adb
|
chatterbot/__init__.py
|
chatterbot/__init__.py
|
"""
ChatterBot is a machine learning, conversational dialog engine.
"""
from .chatterbot import ChatBot
__version__ = '1.0.0a1'
__author__ = 'Gunther Cox'
__email__ = 'gunthercx@gmail.com'
__url__ = 'https://github.com/gunthercox/ChatterBot'
__all__ = (
'ChatBot',
)
|
"""
ChatterBot is a machine learning, conversational dialog engine.
"""
from .chatterbot import ChatBot
__version__ = '1.0.0a2'
__author__ = 'Gunther Cox'
__email__ = 'gunthercx@gmail.com'
__url__ = 'https://github.com/gunthercox/ChatterBot'
__all__ = (
'ChatBot',
)
|
Set release version to 1.0.0a2
|
Set release version to 1.0.0a2
|
Python
|
bsd-3-clause
|
gunthercox/ChatterBot,vkosuri/ChatterBot
|
"""
ChatterBot is a machine learning, conversational dialog engine.
"""
from .chatterbot import ChatBot
__version__ = '1.0.0a1'
__author__ = 'Gunther Cox'
__email__ = 'gunthercx@gmail.com'
__url__ = 'https://github.com/gunthercox/ChatterBot'
__all__ = (
'ChatBot',
)
Set release version to 1.0.0a2
|
"""
ChatterBot is a machine learning, conversational dialog engine.
"""
from .chatterbot import ChatBot
__version__ = '1.0.0a2'
__author__ = 'Gunther Cox'
__email__ = 'gunthercx@gmail.com'
__url__ = 'https://github.com/gunthercox/ChatterBot'
__all__ = (
'ChatBot',
)
|
<commit_before>"""
ChatterBot is a machine learning, conversational dialog engine.
"""
from .chatterbot import ChatBot
__version__ = '1.0.0a1'
__author__ = 'Gunther Cox'
__email__ = 'gunthercx@gmail.com'
__url__ = 'https://github.com/gunthercox/ChatterBot'
__all__ = (
'ChatBot',
)
<commit_msg>Set release version to 1.0.0a2<commit_after>
|
"""
ChatterBot is a machine learning, conversational dialog engine.
"""
from .chatterbot import ChatBot
__version__ = '1.0.0a2'
__author__ = 'Gunther Cox'
__email__ = 'gunthercx@gmail.com'
__url__ = 'https://github.com/gunthercox/ChatterBot'
__all__ = (
'ChatBot',
)
|
"""
ChatterBot is a machine learning, conversational dialog engine.
"""
from .chatterbot import ChatBot
__version__ = '1.0.0a1'
__author__ = 'Gunther Cox'
__email__ = 'gunthercx@gmail.com'
__url__ = 'https://github.com/gunthercox/ChatterBot'
__all__ = (
'ChatBot',
)
Set release version to 1.0.0a2"""
ChatterBot is a machine learning, conversational dialog engine.
"""
from .chatterbot import ChatBot
__version__ = '1.0.0a2'
__author__ = 'Gunther Cox'
__email__ = 'gunthercx@gmail.com'
__url__ = 'https://github.com/gunthercox/ChatterBot'
__all__ = (
'ChatBot',
)
|
<commit_before>"""
ChatterBot is a machine learning, conversational dialog engine.
"""
from .chatterbot import ChatBot
__version__ = '1.0.0a1'
__author__ = 'Gunther Cox'
__email__ = 'gunthercx@gmail.com'
__url__ = 'https://github.com/gunthercox/ChatterBot'
__all__ = (
'ChatBot',
)
<commit_msg>Set release version to 1.0.0a2<commit_after>"""
ChatterBot is a machine learning, conversational dialog engine.
"""
from .chatterbot import ChatBot
__version__ = '1.0.0a2'
__author__ = 'Gunther Cox'
__email__ = 'gunthercx@gmail.com'
__url__ = 'https://github.com/gunthercox/ChatterBot'
__all__ = (
'ChatBot',
)
|
3c1357627bf1921fdee114b60f96f42c328120b4
|
caramel/__init__.py
|
caramel/__init__.py
|
#! /usr/bin/env python
# vim: expandtab shiftwidth=4 softtabstop=4 tabstop=17 filetype=python :
from pyramid.config import Configurator
from sqlalchemy import engine_from_config
from .models import (
init_session,
)
def main(global_config, **settings):
"""This function returns a Pyramid WSGI application."""
engine = engine_from_config(settings, "sqlalchemy.")
init_session(engine)
config = Configurator(settings=settings)
config.add_route("ca", "/root.crt", request_method="GET")
config.add_route("cabundle", "/bundle.crt", request_method="GET")
config.add_route("csr", "/{sha256:[0-9a-f]{64}}", request_method="POST")
config.add_route("cert", "/{sha256:[0-9a-f]{64}}", request_method="GET")
config.scan()
return config.make_wsgi_app()
|
#! /usr/bin/env python
# vim: expandtab shiftwidth=4 softtabstop=4 tabstop=17 filetype=python :
from pyramid.config import Configurator
from sqlalchemy import engine_from_config
from .models import (
init_session,
)
def main(global_config, **settings):
"""This function returns a Pyramid WSGI application."""
engine = engine_from_config(settings, "sqlalchemy.")
init_session(engine)
config = Configurator(settings=settings)
config.include("pyramid_tm")
config.add_route("ca", "/root.crt", request_method="GET")
config.add_route("cabundle", "/bundle.crt", request_method="GET")
config.add_route("csr", "/{sha256:[0-9a-f]{64}}", request_method="POST")
config.add_route("cert", "/{sha256:[0-9a-f]{64}}", request_method="GET")
config.scan()
return config.make_wsgi_app()
|
Move pyramid_tm include to caramel.main
|
Caramel: Move pyramid_tm include to caramel.main
Move the setting to include pyramid_tm to caramel.main from ini files.
This is a vital setting that should never be changed by the user.
|
Python
|
agpl-3.0
|
ModioAB/caramel,ModioAB/caramel
|
#! /usr/bin/env python
# vim: expandtab shiftwidth=4 softtabstop=4 tabstop=17 filetype=python :
from pyramid.config import Configurator
from sqlalchemy import engine_from_config
from .models import (
init_session,
)
def main(global_config, **settings):
"""This function returns a Pyramid WSGI application."""
engine = engine_from_config(settings, "sqlalchemy.")
init_session(engine)
config = Configurator(settings=settings)
config.add_route("ca", "/root.crt", request_method="GET")
config.add_route("cabundle", "/bundle.crt", request_method="GET")
config.add_route("csr", "/{sha256:[0-9a-f]{64}}", request_method="POST")
config.add_route("cert", "/{sha256:[0-9a-f]{64}}", request_method="GET")
config.scan()
return config.make_wsgi_app()
Caramel: Move pyramid_tm include to caramel.main
Move the setting to include pyramid_tm to caramel.main from ini files.
This is a vital setting that should never be changed by the user.
|
#! /usr/bin/env python
# vim: expandtab shiftwidth=4 softtabstop=4 tabstop=17 filetype=python :
from pyramid.config import Configurator
from sqlalchemy import engine_from_config
from .models import (
init_session,
)
def main(global_config, **settings):
"""This function returns a Pyramid WSGI application."""
engine = engine_from_config(settings, "sqlalchemy.")
init_session(engine)
config = Configurator(settings=settings)
config.include("pyramid_tm")
config.add_route("ca", "/root.crt", request_method="GET")
config.add_route("cabundle", "/bundle.crt", request_method="GET")
config.add_route("csr", "/{sha256:[0-9a-f]{64}}", request_method="POST")
config.add_route("cert", "/{sha256:[0-9a-f]{64}}", request_method="GET")
config.scan()
return config.make_wsgi_app()
|
<commit_before>#! /usr/bin/env python
# vim: expandtab shiftwidth=4 softtabstop=4 tabstop=17 filetype=python :
from pyramid.config import Configurator
from sqlalchemy import engine_from_config
from .models import (
init_session,
)
def main(global_config, **settings):
"""This function returns a Pyramid WSGI application."""
engine = engine_from_config(settings, "sqlalchemy.")
init_session(engine)
config = Configurator(settings=settings)
config.add_route("ca", "/root.crt", request_method="GET")
config.add_route("cabundle", "/bundle.crt", request_method="GET")
config.add_route("csr", "/{sha256:[0-9a-f]{64}}", request_method="POST")
config.add_route("cert", "/{sha256:[0-9a-f]{64}}", request_method="GET")
config.scan()
return config.make_wsgi_app()
<commit_msg>Caramel: Move pyramid_tm include to caramel.main
Move the setting to include pyramid_tm to caramel.main from ini files.
This is a vital setting that should never be changed by the user.<commit_after>
|
#! /usr/bin/env python
# vim: expandtab shiftwidth=4 softtabstop=4 tabstop=17 filetype=python :
from pyramid.config import Configurator
from sqlalchemy import engine_from_config
from .models import (
init_session,
)
def main(global_config, **settings):
"""This function returns a Pyramid WSGI application."""
engine = engine_from_config(settings, "sqlalchemy.")
init_session(engine)
config = Configurator(settings=settings)
config.include("pyramid_tm")
config.add_route("ca", "/root.crt", request_method="GET")
config.add_route("cabundle", "/bundle.crt", request_method="GET")
config.add_route("csr", "/{sha256:[0-9a-f]{64}}", request_method="POST")
config.add_route("cert", "/{sha256:[0-9a-f]{64}}", request_method="GET")
config.scan()
return config.make_wsgi_app()
|
#! /usr/bin/env python
# vim: expandtab shiftwidth=4 softtabstop=4 tabstop=17 filetype=python :
from pyramid.config import Configurator
from sqlalchemy import engine_from_config
from .models import (
init_session,
)
def main(global_config, **settings):
"""This function returns a Pyramid WSGI application."""
engine = engine_from_config(settings, "sqlalchemy.")
init_session(engine)
config = Configurator(settings=settings)
config.add_route("ca", "/root.crt", request_method="GET")
config.add_route("cabundle", "/bundle.crt", request_method="GET")
config.add_route("csr", "/{sha256:[0-9a-f]{64}}", request_method="POST")
config.add_route("cert", "/{sha256:[0-9a-f]{64}}", request_method="GET")
config.scan()
return config.make_wsgi_app()
Caramel: Move pyramid_tm include to caramel.main
Move the setting to include pyramid_tm to caramel.main from ini files.
This is a vital setting that should never be changed by the user.#! /usr/bin/env python
# vim: expandtab shiftwidth=4 softtabstop=4 tabstop=17 filetype=python :
from pyramid.config import Configurator
from sqlalchemy import engine_from_config
from .models import (
init_session,
)
def main(global_config, **settings):
"""This function returns a Pyramid WSGI application."""
engine = engine_from_config(settings, "sqlalchemy.")
init_session(engine)
config = Configurator(settings=settings)
config.include("pyramid_tm")
config.add_route("ca", "/root.crt", request_method="GET")
config.add_route("cabundle", "/bundle.crt", request_method="GET")
config.add_route("csr", "/{sha256:[0-9a-f]{64}}", request_method="POST")
config.add_route("cert", "/{sha256:[0-9a-f]{64}}", request_method="GET")
config.scan()
return config.make_wsgi_app()
|
<commit_before>#! /usr/bin/env python
# vim: expandtab shiftwidth=4 softtabstop=4 tabstop=17 filetype=python :
from pyramid.config import Configurator
from sqlalchemy import engine_from_config
from .models import (
init_session,
)
def main(global_config, **settings):
"""This function returns a Pyramid WSGI application."""
engine = engine_from_config(settings, "sqlalchemy.")
init_session(engine)
config = Configurator(settings=settings)
config.add_route("ca", "/root.crt", request_method="GET")
config.add_route("cabundle", "/bundle.crt", request_method="GET")
config.add_route("csr", "/{sha256:[0-9a-f]{64}}", request_method="POST")
config.add_route("cert", "/{sha256:[0-9a-f]{64}}", request_method="GET")
config.scan()
return config.make_wsgi_app()
<commit_msg>Caramel: Move pyramid_tm include to caramel.main
Move the setting to include pyramid_tm to caramel.main from ini files.
This is a vital setting that should never be changed by the user.<commit_after>#! /usr/bin/env python
# vim: expandtab shiftwidth=4 softtabstop=4 tabstop=17 filetype=python :
from pyramid.config import Configurator
from sqlalchemy import engine_from_config
from .models import (
init_session,
)
def main(global_config, **settings):
"""This function returns a Pyramid WSGI application."""
engine = engine_from_config(settings, "sqlalchemy.")
init_session(engine)
config = Configurator(settings=settings)
config.include("pyramid_tm")
config.add_route("ca", "/root.crt", request_method="GET")
config.add_route("cabundle", "/bundle.crt", request_method="GET")
config.add_route("csr", "/{sha256:[0-9a-f]{64}}", request_method="POST")
config.add_route("cert", "/{sha256:[0-9a-f]{64}}", request_method="GET")
config.scan()
return config.make_wsgi_app()
|
409ad4af8a4ad933667d91709822a04dbbda77ac
|
km3pipe/cmd.py
|
km3pipe/cmd.py
|
# coding=utf-8
# Filename: cmd.py
"""
KM3Pipe command line utility.
Usage:
km3pipe test
km3pipe tohdf5 [-s] -i FILE -o FILE
km3pipe (-h | --help)
km3pipe --version
Options:
-h --help Show this screen.
-i FILE Input file.
-o FILE Output file.
-s Write each event in a separate dataset.
"""
from __future__ import division, absolute_import, print_function
from km3pipe import version
def tohdf5(input_file, output_file, separate_events=False):
"""Convert ROOT file to HDF5 file"""
from km3pipe import Pipeline # noqa
from km3pipe.pumps import AanetPump, HDF5Sink, HDF5SinkLegacy # noqa
sink = HDF5Sink if separate_events else HDF5SinkLegacy
pipe = Pipeline()
pipe.attach(AanetPump, filename=input_file)
pipe.attach(sink,
filename=output_file,
separate_events=separate_events)
pipe.drain()
def main():
from docopt import docopt
arguments = docopt(__doc__, version=version)
if arguments['tohdf5']:
tohdf5(arguments['-i'], arguments['-o'], arguments['-s'])
|
# coding=utf-8
# Filename: cmd.py
"""
KM3Pipe command line utility.
Usage:
km3pipe test
km3pipe tohdf5 [-s] -i FILE -o FILE
km3pipe (-h | --help)
km3pipe --version
Options:
-h --help Show this screen.
-i FILE Input file.
-o FILE Output file.
-s Write each event in a separate dataset.
"""
from __future__ import division, absolute_import, print_function
from km3pipe import version
def tohdf5(input_file, output_file, use_tables=True):
"""Convert ROOT file to HDF5 file"""
from km3pipe import Pipeline # noqa
from km3pipe.pumps import AanetPump, HDF5Sink, HDF5TableSink # noqa
sink = HDF5TableSink if use_tables else HDF5Sink
pipe = Pipeline()
pipe.attach(AanetPump, filename=input_file)
pipe.attach(sink,
filename=output_file,
separate_events=separate_events)
pipe.drain()
def main():
from docopt import docopt
arguments = docopt(__doc__, version=version)
if arguments['tohdf5']:
tohdf5(arguments['-i'], arguments['-o'], arguments['-s'])
|
Set HDF5TableSink as default sink
|
Set HDF5TableSink as default sink
|
Python
|
mit
|
tamasgal/km3pipe,tamasgal/km3pipe
|
# coding=utf-8
# Filename: cmd.py
"""
KM3Pipe command line utility.
Usage:
km3pipe test
km3pipe tohdf5 [-s] -i FILE -o FILE
km3pipe (-h | --help)
km3pipe --version
Options:
-h --help Show this screen.
-i FILE Input file.
-o FILE Output file.
-s Write each event in a separate dataset.
"""
from __future__ import division, absolute_import, print_function
from km3pipe import version
def tohdf5(input_file, output_file, separate_events=False):
"""Convert ROOT file to HDF5 file"""
from km3pipe import Pipeline # noqa
from km3pipe.pumps import AanetPump, HDF5Sink, HDF5SinkLegacy # noqa
sink = HDF5Sink if separate_events else HDF5SinkLegacy
pipe = Pipeline()
pipe.attach(AanetPump, filename=input_file)
pipe.attach(sink,
filename=output_file,
separate_events=separate_events)
pipe.drain()
def main():
from docopt import docopt
arguments = docopt(__doc__, version=version)
if arguments['tohdf5']:
tohdf5(arguments['-i'], arguments['-o'], arguments['-s'])
Set HDF5TableSink as default sink
|
# coding=utf-8
# Filename: cmd.py
"""
KM3Pipe command line utility.
Usage:
km3pipe test
km3pipe tohdf5 [-s] -i FILE -o FILE
km3pipe (-h | --help)
km3pipe --version
Options:
-h --help Show this screen.
-i FILE Input file.
-o FILE Output file.
-s Write each event in a separate dataset.
"""
from __future__ import division, absolute_import, print_function
from km3pipe import version
def tohdf5(input_file, output_file, use_tables=True):
"""Convert ROOT file to HDF5 file"""
from km3pipe import Pipeline # noqa
from km3pipe.pumps import AanetPump, HDF5Sink, HDF5TableSink # noqa
sink = HDF5TableSink if use_tables else HDF5Sink
pipe = Pipeline()
pipe.attach(AanetPump, filename=input_file)
pipe.attach(sink,
filename=output_file,
separate_events=separate_events)
pipe.drain()
def main():
from docopt import docopt
arguments = docopt(__doc__, version=version)
if arguments['tohdf5']:
tohdf5(arguments['-i'], arguments['-o'], arguments['-s'])
|
<commit_before># coding=utf-8
# Filename: cmd.py
"""
KM3Pipe command line utility.
Usage:
km3pipe test
km3pipe tohdf5 [-s] -i FILE -o FILE
km3pipe (-h | --help)
km3pipe --version
Options:
-h --help Show this screen.
-i FILE Input file.
-o FILE Output file.
-s Write each event in a separate dataset.
"""
from __future__ import division, absolute_import, print_function
from km3pipe import version
def tohdf5(input_file, output_file, separate_events=False):
"""Convert ROOT file to HDF5 file"""
from km3pipe import Pipeline # noqa
from km3pipe.pumps import AanetPump, HDF5Sink, HDF5SinkLegacy # noqa
sink = HDF5Sink if separate_events else HDF5SinkLegacy
pipe = Pipeline()
pipe.attach(AanetPump, filename=input_file)
pipe.attach(sink,
filename=output_file,
separate_events=separate_events)
pipe.drain()
def main():
from docopt import docopt
arguments = docopt(__doc__, version=version)
if arguments['tohdf5']:
tohdf5(arguments['-i'], arguments['-o'], arguments['-s'])
<commit_msg>Set HDF5TableSink as default sink<commit_after>
|
# coding=utf-8
# Filename: cmd.py
"""
KM3Pipe command line utility.
Usage:
km3pipe test
km3pipe tohdf5 [-s] -i FILE -o FILE
km3pipe (-h | --help)
km3pipe --version
Options:
-h --help Show this screen.
-i FILE Input file.
-o FILE Output file.
-s Write each event in a separate dataset.
"""
from __future__ import division, absolute_import, print_function
from km3pipe import version
def tohdf5(input_file, output_file, use_tables=True):
"""Convert ROOT file to HDF5 file"""
from km3pipe import Pipeline # noqa
from km3pipe.pumps import AanetPump, HDF5Sink, HDF5TableSink # noqa
sink = HDF5TableSink if use_tables else HDF5Sink
pipe = Pipeline()
pipe.attach(AanetPump, filename=input_file)
pipe.attach(sink,
filename=output_file,
separate_events=separate_events)
pipe.drain()
def main():
from docopt import docopt
arguments = docopt(__doc__, version=version)
if arguments['tohdf5']:
tohdf5(arguments['-i'], arguments['-o'], arguments['-s'])
|
# coding=utf-8
# Filename: cmd.py
"""
KM3Pipe command line utility.
Usage:
km3pipe test
km3pipe tohdf5 [-s] -i FILE -o FILE
km3pipe (-h | --help)
km3pipe --version
Options:
-h --help Show this screen.
-i FILE Input file.
-o FILE Output file.
-s Write each event in a separate dataset.
"""
from __future__ import division, absolute_import, print_function
from km3pipe import version
def tohdf5(input_file, output_file, separate_events=False):
"""Convert ROOT file to HDF5 file"""
from km3pipe import Pipeline # noqa
from km3pipe.pumps import AanetPump, HDF5Sink, HDF5SinkLegacy # noqa
sink = HDF5Sink if separate_events else HDF5SinkLegacy
pipe = Pipeline()
pipe.attach(AanetPump, filename=input_file)
pipe.attach(sink,
filename=output_file,
separate_events=separate_events)
pipe.drain()
def main():
from docopt import docopt
arguments = docopt(__doc__, version=version)
if arguments['tohdf5']:
tohdf5(arguments['-i'], arguments['-o'], arguments['-s'])
Set HDF5TableSink as default sink# coding=utf-8
# Filename: cmd.py
"""
KM3Pipe command line utility.
Usage:
km3pipe test
km3pipe tohdf5 [-s] -i FILE -o FILE
km3pipe (-h | --help)
km3pipe --version
Options:
-h --help Show this screen.
-i FILE Input file.
-o FILE Output file.
-s Write each event in a separate dataset.
"""
from __future__ import division, absolute_import, print_function
from km3pipe import version
def tohdf5(input_file, output_file, use_tables=True):
"""Convert ROOT file to HDF5 file"""
from km3pipe import Pipeline # noqa
from km3pipe.pumps import AanetPump, HDF5Sink, HDF5TableSink # noqa
sink = HDF5TableSink if use_tables else HDF5Sink
pipe = Pipeline()
pipe.attach(AanetPump, filename=input_file)
pipe.attach(sink,
filename=output_file,
separate_events=separate_events)
pipe.drain()
def main():
from docopt import docopt
arguments = docopt(__doc__, version=version)
if arguments['tohdf5']:
tohdf5(arguments['-i'], arguments['-o'], arguments['-s'])
|
<commit_before># coding=utf-8
# Filename: cmd.py
"""
KM3Pipe command line utility.
Usage:
km3pipe test
km3pipe tohdf5 [-s] -i FILE -o FILE
km3pipe (-h | --help)
km3pipe --version
Options:
-h --help Show this screen.
-i FILE Input file.
-o FILE Output file.
-s Write each event in a separate dataset.
"""
from __future__ import division, absolute_import, print_function
from km3pipe import version
def tohdf5(input_file, output_file, separate_events=False):
"""Convert ROOT file to HDF5 file"""
from km3pipe import Pipeline # noqa
from km3pipe.pumps import AanetPump, HDF5Sink, HDF5SinkLegacy # noqa
sink = HDF5Sink if separate_events else HDF5SinkLegacy
pipe = Pipeline()
pipe.attach(AanetPump, filename=input_file)
pipe.attach(sink,
filename=output_file,
separate_events=separate_events)
pipe.drain()
def main():
from docopt import docopt
arguments = docopt(__doc__, version=version)
if arguments['tohdf5']:
tohdf5(arguments['-i'], arguments['-o'], arguments['-s'])
<commit_msg>Set HDF5TableSink as default sink<commit_after># coding=utf-8
# Filename: cmd.py
"""
KM3Pipe command line utility.
Usage:
km3pipe test
km3pipe tohdf5 [-s] -i FILE -o FILE
km3pipe (-h | --help)
km3pipe --version
Options:
-h --help Show this screen.
-i FILE Input file.
-o FILE Output file.
-s Write each event in a separate dataset.
"""
from __future__ import division, absolute_import, print_function
from km3pipe import version
def tohdf5(input_file, output_file, use_tables=True):
"""Convert ROOT file to HDF5 file"""
from km3pipe import Pipeline # noqa
from km3pipe.pumps import AanetPump, HDF5Sink, HDF5TableSink # noqa
sink = HDF5TableSink if use_tables else HDF5Sink
pipe = Pipeline()
pipe.attach(AanetPump, filename=input_file)
pipe.attach(sink,
filename=output_file,
separate_events=separate_events)
pipe.drain()
def main():
from docopt import docopt
arguments = docopt(__doc__, version=version)
if arguments['tohdf5']:
tohdf5(arguments['-i'], arguments['-o'], arguments['-s'])
|
16f531cb7e9d067725a4c25a4321773aada9616d
|
api/v2/views/tag.py
|
api/v2/views/tag.py
|
from core.models import Tag
from api.permissions import CloudAdminRequired
from api.v2.serializers.summaries import TagSummarySerializer
from api.v2.views.base import AuthReadOnlyViewSet
class TagViewSet(AuthReadOnlyViewSet):
"""
API endpoint that allows tags to be viewed or edited.
"""
queryset = Tag.objects.all()
serializer_class = TagSummarySerializer
max_paginate_by = 1000
def perform_create(self, serializer):
serializer.save(user=self.request.user)
def get_permissions(self):
if self.request.method in ["POST", "PUT", "DELETE"]:
self.permission_classes = (CloudAdminRequired,)
return super(TagViewSet, self).get_permissions()
|
from threepio import logger
from core.models import Tag
from api.permissions import ApiAuthRequired, CloudAdminRequired,\
InMaintenance
from api.v2.serializers.summaries import TagSummarySerializer
from api.v2.views.base import AuthOptionalViewSet
class TagViewSet(AuthOptionalViewSet):
"""
API endpoint that allows tags to be viewed or edited.
"""
queryset = Tag.objects.all()
serializer_class = TagSummarySerializer
max_paginate_by = 1000
def perform_create(self, serializer):
same_name_tags = Tag.objects.filter(
name__iexact=serializer.validated_data.get("name"))
if same_name_tags:
raise serializers.ValidationError(
"A tag with this name already exists: %s" %
same_name_tags.first().name)
serializer.save(user=self.request.user)
def get_permissions(self):
if self.request.method is "":
self.permission_classes = (ApiAuthRequired,
InMaintenance,)
if self.request.method in ["PUT", "DELETE"]:
self.permission_classes = (CloudAdminRequired,
InMaintenance,)
return super(TagViewSet, self).get_permissions()
|
Address @jchansen's requests. No dupes. POST for authorized users, PUT DELETE for cloud admins and staff.
|
Address @jchansen's requests. No dupes. POST for authorized users, PUT DELETE for cloud admins and staff.
modified: api/v2/views/tag.py
|
Python
|
apache-2.0
|
CCI-MOC/GUI-Backend,CCI-MOC/GUI-Backend,CCI-MOC/GUI-Backend,CCI-MOC/GUI-Backend
|
from core.models import Tag
from api.permissions import CloudAdminRequired
from api.v2.serializers.summaries import TagSummarySerializer
from api.v2.views.base import AuthReadOnlyViewSet
class TagViewSet(AuthReadOnlyViewSet):
"""
API endpoint that allows tags to be viewed or edited.
"""
queryset = Tag.objects.all()
serializer_class = TagSummarySerializer
max_paginate_by = 1000
def perform_create(self, serializer):
serializer.save(user=self.request.user)
def get_permissions(self):
if self.request.method in ["POST", "PUT", "DELETE"]:
self.permission_classes = (CloudAdminRequired,)
return super(TagViewSet, self).get_permissions()
Address @jchansen's requests. No dupes. POST for authorized users, PUT DELETE for cloud admins and staff.
modified: api/v2/views/tag.py
|
from threepio import logger
from core.models import Tag
from api.permissions import ApiAuthRequired, CloudAdminRequired,\
InMaintenance
from api.v2.serializers.summaries import TagSummarySerializer
from api.v2.views.base import AuthOptionalViewSet
class TagViewSet(AuthOptionalViewSet):
"""
API endpoint that allows tags to be viewed or edited.
"""
queryset = Tag.objects.all()
serializer_class = TagSummarySerializer
max_paginate_by = 1000
def perform_create(self, serializer):
same_name_tags = Tag.objects.filter(
name__iexact=serializer.validated_data.get("name"))
if same_name_tags:
raise serializers.ValidationError(
"A tag with this name already exists: %s" %
same_name_tags.first().name)
serializer.save(user=self.request.user)
def get_permissions(self):
if self.request.method is "":
self.permission_classes = (ApiAuthRequired,
InMaintenance,)
if self.request.method in ["PUT", "DELETE"]:
self.permission_classes = (CloudAdminRequired,
InMaintenance,)
return super(TagViewSet, self).get_permissions()
|
<commit_before>from core.models import Tag
from api.permissions import CloudAdminRequired
from api.v2.serializers.summaries import TagSummarySerializer
from api.v2.views.base import AuthReadOnlyViewSet
class TagViewSet(AuthReadOnlyViewSet):
"""
API endpoint that allows tags to be viewed or edited.
"""
queryset = Tag.objects.all()
serializer_class = TagSummarySerializer
max_paginate_by = 1000
def perform_create(self, serializer):
serializer.save(user=self.request.user)
def get_permissions(self):
if self.request.method in ["POST", "PUT", "DELETE"]:
self.permission_classes = (CloudAdminRequired,)
return super(TagViewSet, self).get_permissions()
<commit_msg>Address @jchansen's requests. No dupes. POST for authorized users, PUT DELETE for cloud admins and staff.
modified: api/v2/views/tag.py<commit_after>
|
from threepio import logger
from core.models import Tag
from api.permissions import ApiAuthRequired, CloudAdminRequired,\
InMaintenance
from api.v2.serializers.summaries import TagSummarySerializer
from api.v2.views.base import AuthOptionalViewSet
class TagViewSet(AuthOptionalViewSet):
"""
API endpoint that allows tags to be viewed or edited.
"""
queryset = Tag.objects.all()
serializer_class = TagSummarySerializer
max_paginate_by = 1000
def perform_create(self, serializer):
same_name_tags = Tag.objects.filter(
name__iexact=serializer.validated_data.get("name"))
if same_name_tags:
raise serializers.ValidationError(
"A tag with this name already exists: %s" %
same_name_tags.first().name)
serializer.save(user=self.request.user)
def get_permissions(self):
if self.request.method is "":
self.permission_classes = (ApiAuthRequired,
InMaintenance,)
if self.request.method in ["PUT", "DELETE"]:
self.permission_classes = (CloudAdminRequired,
InMaintenance,)
return super(TagViewSet, self).get_permissions()
|
from core.models import Tag
from api.permissions import CloudAdminRequired
from api.v2.serializers.summaries import TagSummarySerializer
from api.v2.views.base import AuthReadOnlyViewSet
class TagViewSet(AuthReadOnlyViewSet):
"""
API endpoint that allows tags to be viewed or edited.
"""
queryset = Tag.objects.all()
serializer_class = TagSummarySerializer
max_paginate_by = 1000
def perform_create(self, serializer):
serializer.save(user=self.request.user)
def get_permissions(self):
if self.request.method in ["POST", "PUT", "DELETE"]:
self.permission_classes = (CloudAdminRequired,)
return super(TagViewSet, self).get_permissions()
Address @jchansen's requests. No dupes. POST for authorized users, PUT DELETE for cloud admins and staff.
modified: api/v2/views/tag.pyfrom threepio import logger
from core.models import Tag
from api.permissions import ApiAuthRequired, CloudAdminRequired,\
InMaintenance
from api.v2.serializers.summaries import TagSummarySerializer
from api.v2.views.base import AuthOptionalViewSet
class TagViewSet(AuthOptionalViewSet):
"""
API endpoint that allows tags to be viewed or edited.
"""
queryset = Tag.objects.all()
serializer_class = TagSummarySerializer
max_paginate_by = 1000
def perform_create(self, serializer):
same_name_tags = Tag.objects.filter(
name__iexact=serializer.validated_data.get("name"))
if same_name_tags:
raise serializers.ValidationError(
"A tag with this name already exists: %s" %
same_name_tags.first().name)
serializer.save(user=self.request.user)
def get_permissions(self):
if self.request.method is "":
self.permission_classes = (ApiAuthRequired,
InMaintenance,)
if self.request.method in ["PUT", "DELETE"]:
self.permission_classes = (CloudAdminRequired,
InMaintenance,)
return super(TagViewSet, self).get_permissions()
|
<commit_before>from core.models import Tag
from api.permissions import CloudAdminRequired
from api.v2.serializers.summaries import TagSummarySerializer
from api.v2.views.base import AuthReadOnlyViewSet
class TagViewSet(AuthReadOnlyViewSet):
"""
API endpoint that allows tags to be viewed or edited.
"""
queryset = Tag.objects.all()
serializer_class = TagSummarySerializer
max_paginate_by = 1000
def perform_create(self, serializer):
serializer.save(user=self.request.user)
def get_permissions(self):
if self.request.method in ["POST", "PUT", "DELETE"]:
self.permission_classes = (CloudAdminRequired,)
return super(TagViewSet, self).get_permissions()
<commit_msg>Address @jchansen's requests. No dupes. POST for authorized users, PUT DELETE for cloud admins and staff.
modified: api/v2/views/tag.py<commit_after>from threepio import logger
from core.models import Tag
from api.permissions import ApiAuthRequired, CloudAdminRequired,\
InMaintenance
from api.v2.serializers.summaries import TagSummarySerializer
from api.v2.views.base import AuthOptionalViewSet
class TagViewSet(AuthOptionalViewSet):
"""
API endpoint that allows tags to be viewed or edited.
"""
queryset = Tag.objects.all()
serializer_class = TagSummarySerializer
max_paginate_by = 1000
def perform_create(self, serializer):
same_name_tags = Tag.objects.filter(
name__iexact=serializer.validated_data.get("name"))
if same_name_tags:
raise serializers.ValidationError(
"A tag with this name already exists: %s" %
same_name_tags.first().name)
serializer.save(user=self.request.user)
def get_permissions(self):
if self.request.method is "":
self.permission_classes = (ApiAuthRequired,
InMaintenance,)
if self.request.method in ["PUT", "DELETE"]:
self.permission_classes = (CloudAdminRequired,
InMaintenance,)
return super(TagViewSet, self).get_permissions()
|
00a497b21b9c788cb38da6c92a985e1b5c22801a
|
apps/survey/urls.py
|
apps/survey/urls.py
|
from django.conf.urls.defaults import *
from . import views
urlpatterns = patterns('',
url(r'^profile/$', views.profile_index, name='survey_profile'),
url(r'^profile/electric/$', views.profile_electric, name='survey_profile_electric'),
#url(r'^profile/intake/$', views.survey_intake, name='survey_profile_intake'),
url(r'^profile/surveys/$', views.survey_management, name='survey_management'),
url(r'^main/$', views.main_index),
url(r'^survey_management/$', views.survey_management, name='survey_management'),
#url(r'^survey_data/(?P<survey_shortname>.+)/(?P<id>\d+)/$', views.survey_data, name='survey_data'),
url(r'^intake/$', views.survey_data, name='survey_data'),
url(r'^monthly/(?P<id>\d+)/$', views.survey_data_monthly ,name='survey_data_monthly'),
url(r'^thanks_profile/$', views.thanks_profile, name='profile_thanks'),
#url(r'^select/$', views.select_user, name='survey_select_user'),
url(r'^$', views.index, name='survey_index'),
)
|
from django.conf.urls.defaults import *
from . import views
urlpatterns = patterns('',
url(r'^profile/$', views.profile_index, name='survey_profile'),
url(r'^profile/electric/$', views.profile_electric, name='survey_profile_electric'),
url(r'^profile/surveys/$', views.survey_management, name='survey_management'),
url(r'^main/$', views.main_index),
url(r'^survey_management/$', views.survey_management, name='survey_management'),
url(r'^intake/view/$', views.survey_intake_view, name='survey_intake_view'),
url(r'^intake/update/$', views.survey_intake_update, name='survey_intake_update'),
url(r'^monthly/(?P<id>\d+)/$', views.survey_monthly ,name='survey_monthly'),
url(r'^monthly/(?P<id>\d+)/update/$', views.survey_monthly_update ,name='survey_monthly_update'),
url(r'^thanks_profile/$', views.thanks_profile, name='profile_thanks'),
#url(r'^select/$', views.select_user, name='survey_select_user'),
url(r'^$', views.index, name='survey_index'),
)
|
Add view and update decorators
|
Add view and update decorators
|
Python
|
agpl-3.0
|
chispita/epiwork,chispita/epiwork,chispita/epiwork,chispita/epiwork,chispita/epiwork,chispita/epiwork,chispita/epiwork
|
from django.conf.urls.defaults import *
from . import views
urlpatterns = patterns('',
url(r'^profile/$', views.profile_index, name='survey_profile'),
url(r'^profile/electric/$', views.profile_electric, name='survey_profile_electric'),
#url(r'^profile/intake/$', views.survey_intake, name='survey_profile_intake'),
url(r'^profile/surveys/$', views.survey_management, name='survey_management'),
url(r'^main/$', views.main_index),
url(r'^survey_management/$', views.survey_management, name='survey_management'),
#url(r'^survey_data/(?P<survey_shortname>.+)/(?P<id>\d+)/$', views.survey_data, name='survey_data'),
url(r'^intake/$', views.survey_data, name='survey_data'),
url(r'^monthly/(?P<id>\d+)/$', views.survey_data_monthly ,name='survey_data_monthly'),
url(r'^thanks_profile/$', views.thanks_profile, name='profile_thanks'),
#url(r'^select/$', views.select_user, name='survey_select_user'),
url(r'^$', views.index, name='survey_index'),
)
Add view and update decorators
|
from django.conf.urls.defaults import *
from . import views
urlpatterns = patterns('',
url(r'^profile/$', views.profile_index, name='survey_profile'),
url(r'^profile/electric/$', views.profile_electric, name='survey_profile_electric'),
url(r'^profile/surveys/$', views.survey_management, name='survey_management'),
url(r'^main/$', views.main_index),
url(r'^survey_management/$', views.survey_management, name='survey_management'),
url(r'^intake/view/$', views.survey_intake_view, name='survey_intake_view'),
url(r'^intake/update/$', views.survey_intake_update, name='survey_intake_update'),
url(r'^monthly/(?P<id>\d+)/$', views.survey_monthly ,name='survey_monthly'),
url(r'^monthly/(?P<id>\d+)/update/$', views.survey_monthly_update ,name='survey_monthly_update'),
url(r'^thanks_profile/$', views.thanks_profile, name='profile_thanks'),
#url(r'^select/$', views.select_user, name='survey_select_user'),
url(r'^$', views.index, name='survey_index'),
)
|
<commit_before>from django.conf.urls.defaults import *
from . import views
urlpatterns = patterns('',
url(r'^profile/$', views.profile_index, name='survey_profile'),
url(r'^profile/electric/$', views.profile_electric, name='survey_profile_electric'),
#url(r'^profile/intake/$', views.survey_intake, name='survey_profile_intake'),
url(r'^profile/surveys/$', views.survey_management, name='survey_management'),
url(r'^main/$', views.main_index),
url(r'^survey_management/$', views.survey_management, name='survey_management'),
#url(r'^survey_data/(?P<survey_shortname>.+)/(?P<id>\d+)/$', views.survey_data, name='survey_data'),
url(r'^intake/$', views.survey_data, name='survey_data'),
url(r'^monthly/(?P<id>\d+)/$', views.survey_data_monthly ,name='survey_data_monthly'),
url(r'^thanks_profile/$', views.thanks_profile, name='profile_thanks'),
#url(r'^select/$', views.select_user, name='survey_select_user'),
url(r'^$', views.index, name='survey_index'),
)
<commit_msg>Add view and update decorators<commit_after>
|
from django.conf.urls.defaults import *
from . import views
urlpatterns = patterns('',
url(r'^profile/$', views.profile_index, name='survey_profile'),
url(r'^profile/electric/$', views.profile_electric, name='survey_profile_electric'),
url(r'^profile/surveys/$', views.survey_management, name='survey_management'),
url(r'^main/$', views.main_index),
url(r'^survey_management/$', views.survey_management, name='survey_management'),
url(r'^intake/view/$', views.survey_intake_view, name='survey_intake_view'),
url(r'^intake/update/$', views.survey_intake_update, name='survey_intake_update'),
url(r'^monthly/(?P<id>\d+)/$', views.survey_monthly ,name='survey_monthly'),
url(r'^monthly/(?P<id>\d+)/update/$', views.survey_monthly_update ,name='survey_monthly_update'),
url(r'^thanks_profile/$', views.thanks_profile, name='profile_thanks'),
#url(r'^select/$', views.select_user, name='survey_select_user'),
url(r'^$', views.index, name='survey_index'),
)
|
from django.conf.urls.defaults import *
from . import views
urlpatterns = patterns('',
url(r'^profile/$', views.profile_index, name='survey_profile'),
url(r'^profile/electric/$', views.profile_electric, name='survey_profile_electric'),
#url(r'^profile/intake/$', views.survey_intake, name='survey_profile_intake'),
url(r'^profile/surveys/$', views.survey_management, name='survey_management'),
url(r'^main/$', views.main_index),
url(r'^survey_management/$', views.survey_management, name='survey_management'),
#url(r'^survey_data/(?P<survey_shortname>.+)/(?P<id>\d+)/$', views.survey_data, name='survey_data'),
url(r'^intake/$', views.survey_data, name='survey_data'),
url(r'^monthly/(?P<id>\d+)/$', views.survey_data_monthly ,name='survey_data_monthly'),
url(r'^thanks_profile/$', views.thanks_profile, name='profile_thanks'),
#url(r'^select/$', views.select_user, name='survey_select_user'),
url(r'^$', views.index, name='survey_index'),
)
Add view and update decoratorsfrom django.conf.urls.defaults import *
from . import views
urlpatterns = patterns('',
url(r'^profile/$', views.profile_index, name='survey_profile'),
url(r'^profile/electric/$', views.profile_electric, name='survey_profile_electric'),
url(r'^profile/surveys/$', views.survey_management, name='survey_management'),
url(r'^main/$', views.main_index),
url(r'^survey_management/$', views.survey_management, name='survey_management'),
url(r'^intake/view/$', views.survey_intake_view, name='survey_intake_view'),
url(r'^intake/update/$', views.survey_intake_update, name='survey_intake_update'),
url(r'^monthly/(?P<id>\d+)/$', views.survey_monthly ,name='survey_monthly'),
url(r'^monthly/(?P<id>\d+)/update/$', views.survey_monthly_update ,name='survey_monthly_update'),
url(r'^thanks_profile/$', views.thanks_profile, name='profile_thanks'),
#url(r'^select/$', views.select_user, name='survey_select_user'),
url(r'^$', views.index, name='survey_index'),
)
|
<commit_before>from django.conf.urls.defaults import *
from . import views
urlpatterns = patterns('',
url(r'^profile/$', views.profile_index, name='survey_profile'),
url(r'^profile/electric/$', views.profile_electric, name='survey_profile_electric'),
#url(r'^profile/intake/$', views.survey_intake, name='survey_profile_intake'),
url(r'^profile/surveys/$', views.survey_management, name='survey_management'),
url(r'^main/$', views.main_index),
url(r'^survey_management/$', views.survey_management, name='survey_management'),
#url(r'^survey_data/(?P<survey_shortname>.+)/(?P<id>\d+)/$', views.survey_data, name='survey_data'),
url(r'^intake/$', views.survey_data, name='survey_data'),
url(r'^monthly/(?P<id>\d+)/$', views.survey_data_monthly ,name='survey_data_monthly'),
url(r'^thanks_profile/$', views.thanks_profile, name='profile_thanks'),
#url(r'^select/$', views.select_user, name='survey_select_user'),
url(r'^$', views.index, name='survey_index'),
)
<commit_msg>Add view and update decorators<commit_after>from django.conf.urls.defaults import *
from . import views
urlpatterns = patterns('',
url(r'^profile/$', views.profile_index, name='survey_profile'),
url(r'^profile/electric/$', views.profile_electric, name='survey_profile_electric'),
url(r'^profile/surveys/$', views.survey_management, name='survey_management'),
url(r'^main/$', views.main_index),
url(r'^survey_management/$', views.survey_management, name='survey_management'),
url(r'^intake/view/$', views.survey_intake_view, name='survey_intake_view'),
url(r'^intake/update/$', views.survey_intake_update, name='survey_intake_update'),
url(r'^monthly/(?P<id>\d+)/$', views.survey_monthly ,name='survey_monthly'),
url(r'^monthly/(?P<id>\d+)/update/$', views.survey_monthly_update ,name='survey_monthly_update'),
url(r'^thanks_profile/$', views.thanks_profile, name='profile_thanks'),
#url(r'^select/$', views.select_user, name='survey_select_user'),
url(r'^$', views.index, name='survey_index'),
)
|
5a27329e32942523e73d2d01b43ba75ecd281622
|
dask/array/__init__.py
|
dask/array/__init__.py
|
from __future__ import absolute_import, division, print_function
from ..utils import ignoring
from .core import (Array, stack, concatenate, tensordot, transpose, from_array,
choose, coarsen, constant, fromfunction, compute, unique, store)
from .core import (arccos, arcsin, arctan, arctanh, arccosh, arcsinh, arctan2,
ceil, copysign, cos, cosh, degrees, exp, expm1, fabs, floor, fmod,
frexp, hypot, isinf, isnan, ldexp, log, log10, log1p, modf, radians,
sin, sinh, sqrt, tan, tanh, trunc, isnull, notnull)
from .reductions import (sum, prod, mean, std, var, any, all, min, max, vnorm,
argmin, argmax,
nansum, nanmean, nanstd, nanvar, nanmin,
nanmax, nanargmin, nanargmax)
from .percentile import percentile
with ignoring(ImportError):
from .reductions import nanprod
from . import random, linalg, ghost
from .wrap import ones, zeros, empty
from ..context import set_options
|
from __future__ import absolute_import, division, print_function
from ..utils import ignoring
from .core import (Array, stack, concatenate, tensordot, transpose, from_array,
choose, where, coarsen, constant, fromfunction, compute, unique, store)
from .core import (arccos, arcsin, arctan, arctanh, arccosh, arcsinh, arctan2,
ceil, copysign, cos, cosh, degrees, exp, expm1, fabs, floor, fmod,
frexp, hypot, isinf, isnan, ldexp, log, log10, log1p, modf, radians,
sin, sinh, sqrt, tan, tanh, trunc, isnull, notnull)
from .reductions import (sum, prod, mean, std, var, any, all, min, max, vnorm,
argmin, argmax,
nansum, nanmean, nanstd, nanvar, nanmin,
nanmax, nanargmin, nanargmax)
from .percentile import percentile
with ignoring(ImportError):
from .reductions import nanprod
from . import random, linalg, ghost
from .wrap import ones, zeros, empty
from ..context import set_options
|
Add "where" to top level dask.array module
|
Add "where" to top level dask.array module
|
Python
|
bsd-3-clause
|
vikhyat/dask,minrk/dask,esc/dask,gameduell/dask,ssanderson/dask,vikhyat/dask,jayhetee/dask,mrocklin/dask,wiso/dask,dask/dask,ContinuumIO/dask,mrocklin/dask,jcrist/dask,jayhetee/dask,blaze/dask,marianotepper/dask,ContinuumIO/dask,jakirkham/dask,pombredanne/dask,chrisbarber/dask,wiso/dask,PhE/dask,freeman-lab/dask,jcrist/dask,cpcloud/dask,pombredanne/dask,PhE/dask,ssanderson/dask,mraspaud/dask,clarkfitzg/dask,marianotepper/dask,hainm/dask,mikegraham/dask,esc/dask,minrk/dask,simudream/dask,dask/dask,jakirkham/dask,cowlicks/dask,simudream/dask,mraspaud/dask,blaze/dask,clarkfitzg/dask,hainm/dask,freeman-lab/dask
|
from __future__ import absolute_import, division, print_function
from ..utils import ignoring
from .core import (Array, stack, concatenate, tensordot, transpose, from_array,
choose, coarsen, constant, fromfunction, compute, unique, store)
from .core import (arccos, arcsin, arctan, arctanh, arccosh, arcsinh, arctan2,
ceil, copysign, cos, cosh, degrees, exp, expm1, fabs, floor, fmod,
frexp, hypot, isinf, isnan, ldexp, log, log10, log1p, modf, radians,
sin, sinh, sqrt, tan, tanh, trunc, isnull, notnull)
from .reductions import (sum, prod, mean, std, var, any, all, min, max, vnorm,
argmin, argmax,
nansum, nanmean, nanstd, nanvar, nanmin,
nanmax, nanargmin, nanargmax)
from .percentile import percentile
with ignoring(ImportError):
from .reductions import nanprod
from . import random, linalg, ghost
from .wrap import ones, zeros, empty
from ..context import set_options
Add "where" to top level dask.array module
|
from __future__ import absolute_import, division, print_function
from ..utils import ignoring
from .core import (Array, stack, concatenate, tensordot, transpose, from_array,
choose, where, coarsen, constant, fromfunction, compute, unique, store)
from .core import (arccos, arcsin, arctan, arctanh, arccosh, arcsinh, arctan2,
ceil, copysign, cos, cosh, degrees, exp, expm1, fabs, floor, fmod,
frexp, hypot, isinf, isnan, ldexp, log, log10, log1p, modf, radians,
sin, sinh, sqrt, tan, tanh, trunc, isnull, notnull)
from .reductions import (sum, prod, mean, std, var, any, all, min, max, vnorm,
argmin, argmax,
nansum, nanmean, nanstd, nanvar, nanmin,
nanmax, nanargmin, nanargmax)
from .percentile import percentile
with ignoring(ImportError):
from .reductions import nanprod
from . import random, linalg, ghost
from .wrap import ones, zeros, empty
from ..context import set_options
|
<commit_before>from __future__ import absolute_import, division, print_function
from ..utils import ignoring
from .core import (Array, stack, concatenate, tensordot, transpose, from_array,
choose, coarsen, constant, fromfunction, compute, unique, store)
from .core import (arccos, arcsin, arctan, arctanh, arccosh, arcsinh, arctan2,
ceil, copysign, cos, cosh, degrees, exp, expm1, fabs, floor, fmod,
frexp, hypot, isinf, isnan, ldexp, log, log10, log1p, modf, radians,
sin, sinh, sqrt, tan, tanh, trunc, isnull, notnull)
from .reductions import (sum, prod, mean, std, var, any, all, min, max, vnorm,
argmin, argmax,
nansum, nanmean, nanstd, nanvar, nanmin,
nanmax, nanargmin, nanargmax)
from .percentile import percentile
with ignoring(ImportError):
from .reductions import nanprod
from . import random, linalg, ghost
from .wrap import ones, zeros, empty
from ..context import set_options
<commit_msg>Add "where" to top level dask.array module<commit_after>
|
from __future__ import absolute_import, division, print_function
from ..utils import ignoring
from .core import (Array, stack, concatenate, tensordot, transpose, from_array,
choose, where, coarsen, constant, fromfunction, compute, unique, store)
from .core import (arccos, arcsin, arctan, arctanh, arccosh, arcsinh, arctan2,
ceil, copysign, cos, cosh, degrees, exp, expm1, fabs, floor, fmod,
frexp, hypot, isinf, isnan, ldexp, log, log10, log1p, modf, radians,
sin, sinh, sqrt, tan, tanh, trunc, isnull, notnull)
from .reductions import (sum, prod, mean, std, var, any, all, min, max, vnorm,
argmin, argmax,
nansum, nanmean, nanstd, nanvar, nanmin,
nanmax, nanargmin, nanargmax)
from .percentile import percentile
with ignoring(ImportError):
from .reductions import nanprod
from . import random, linalg, ghost
from .wrap import ones, zeros, empty
from ..context import set_options
|
from __future__ import absolute_import, division, print_function
from ..utils import ignoring
from .core import (Array, stack, concatenate, tensordot, transpose, from_array,
choose, coarsen, constant, fromfunction, compute, unique, store)
from .core import (arccos, arcsin, arctan, arctanh, arccosh, arcsinh, arctan2,
ceil, copysign, cos, cosh, degrees, exp, expm1, fabs, floor, fmod,
frexp, hypot, isinf, isnan, ldexp, log, log10, log1p, modf, radians,
sin, sinh, sqrt, tan, tanh, trunc, isnull, notnull)
from .reductions import (sum, prod, mean, std, var, any, all, min, max, vnorm,
argmin, argmax,
nansum, nanmean, nanstd, nanvar, nanmin,
nanmax, nanargmin, nanargmax)
from .percentile import percentile
with ignoring(ImportError):
from .reductions import nanprod
from . import random, linalg, ghost
from .wrap import ones, zeros, empty
from ..context import set_options
Add "where" to top level dask.array modulefrom __future__ import absolute_import, division, print_function
from ..utils import ignoring
from .core import (Array, stack, concatenate, tensordot, transpose, from_array,
choose, where, coarsen, constant, fromfunction, compute, unique, store)
from .core import (arccos, arcsin, arctan, arctanh, arccosh, arcsinh, arctan2,
ceil, copysign, cos, cosh, degrees, exp, expm1, fabs, floor, fmod,
frexp, hypot, isinf, isnan, ldexp, log, log10, log1p, modf, radians,
sin, sinh, sqrt, tan, tanh, trunc, isnull, notnull)
from .reductions import (sum, prod, mean, std, var, any, all, min, max, vnorm,
argmin, argmax,
nansum, nanmean, nanstd, nanvar, nanmin,
nanmax, nanargmin, nanargmax)
from .percentile import percentile
with ignoring(ImportError):
from .reductions import nanprod
from . import random, linalg, ghost
from .wrap import ones, zeros, empty
from ..context import set_options
|
<commit_before>from __future__ import absolute_import, division, print_function
from ..utils import ignoring
from .core import (Array, stack, concatenate, tensordot, transpose, from_array,
choose, coarsen, constant, fromfunction, compute, unique, store)
from .core import (arccos, arcsin, arctan, arctanh, arccosh, arcsinh, arctan2,
ceil, copysign, cos, cosh, degrees, exp, expm1, fabs, floor, fmod,
frexp, hypot, isinf, isnan, ldexp, log, log10, log1p, modf, radians,
sin, sinh, sqrt, tan, tanh, trunc, isnull, notnull)
from .reductions import (sum, prod, mean, std, var, any, all, min, max, vnorm,
argmin, argmax,
nansum, nanmean, nanstd, nanvar, nanmin,
nanmax, nanargmin, nanargmax)
from .percentile import percentile
with ignoring(ImportError):
from .reductions import nanprod
from . import random, linalg, ghost
from .wrap import ones, zeros, empty
from ..context import set_options
<commit_msg>Add "where" to top level dask.array module<commit_after>from __future__ import absolute_import, division, print_function
from ..utils import ignoring
from .core import (Array, stack, concatenate, tensordot, transpose, from_array,
choose, where, coarsen, constant, fromfunction, compute, unique, store)
from .core import (arccos, arcsin, arctan, arctanh, arccosh, arcsinh, arctan2,
ceil, copysign, cos, cosh, degrees, exp, expm1, fabs, floor, fmod,
frexp, hypot, isinf, isnan, ldexp, log, log10, log1p, modf, radians,
sin, sinh, sqrt, tan, tanh, trunc, isnull, notnull)
from .reductions import (sum, prod, mean, std, var, any, all, min, max, vnorm,
argmin, argmax,
nansum, nanmean, nanstd, nanvar, nanmin,
nanmax, nanargmin, nanargmax)
from .percentile import percentile
with ignoring(ImportError):
from .reductions import nanprod
from . import random, linalg, ghost
from .wrap import ones, zeros, empty
from ..context import set_options
|
eef03e6c4eb6d80dd04ccbbea6b530d5679a8142
|
sydent/http/servlets/pubkeyservlets.py
|
sydent/http/servlets/pubkeyservlets.py
|
# -*- coding: utf-8 -*-
# Copyright 2014 matrix.org
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from twisted.web.resource import Resource
import json
import nacl.encoding
class Ed25519Servlet(Resource):
isLeaf = True
def __init__(self, syd):
self.sydent = syd
def render_GET(self, request):
pubKey = self.sydent.signers.ed25519.signing_key.verify_key
pubKeyHex = pubKey.encode(encoder=nacl.encoding.HexEncoder)
return json.dumps({'public_key':pubKeyHex})
|
# -*- coding: utf-8 -*-
# Copyright 2014 matrix.org
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from twisted.web.resource import Resource
import json
import nacl.encoding
class Ed25519Servlet(Resource):
isLeaf = True
def __init__(self, syd):
self.sydent = syd
def render_GET(self, request):
pubKey = self.sydent.keyring.ed25519.verify_key
pubKeyHex = pubKey.encode(encoder=nacl.encoding.HexEncoder)
return json.dumps({'public_key':pubKeyHex})
|
Update pubkey servlet to s/signers/keyring
|
Update pubkey servlet to s/signers/keyring
|
Python
|
apache-2.0
|
matrix-org/sydent,matrix-org/sydent,matrix-org/sydent
|
# -*- coding: utf-8 -*-
# Copyright 2014 matrix.org
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from twisted.web.resource import Resource
import json
import nacl.encoding
class Ed25519Servlet(Resource):
isLeaf = True
def __init__(self, syd):
self.sydent = syd
def render_GET(self, request):
pubKey = self.sydent.signers.ed25519.signing_key.verify_key
pubKeyHex = pubKey.encode(encoder=nacl.encoding.HexEncoder)
return json.dumps({'public_key':pubKeyHex})
Update pubkey servlet to s/signers/keyring
|
# -*- coding: utf-8 -*-
# Copyright 2014 matrix.org
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from twisted.web.resource import Resource
import json
import nacl.encoding
class Ed25519Servlet(Resource):
isLeaf = True
def __init__(self, syd):
self.sydent = syd
def render_GET(self, request):
pubKey = self.sydent.keyring.ed25519.verify_key
pubKeyHex = pubKey.encode(encoder=nacl.encoding.HexEncoder)
return json.dumps({'public_key':pubKeyHex})
|
<commit_before># -*- coding: utf-8 -*-
# Copyright 2014 matrix.org
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from twisted.web.resource import Resource
import json
import nacl.encoding
class Ed25519Servlet(Resource):
isLeaf = True
def __init__(self, syd):
self.sydent = syd
def render_GET(self, request):
pubKey = self.sydent.signers.ed25519.signing_key.verify_key
pubKeyHex = pubKey.encode(encoder=nacl.encoding.HexEncoder)
return json.dumps({'public_key':pubKeyHex})
<commit_msg>Update pubkey servlet to s/signers/keyring<commit_after>
|
# -*- coding: utf-8 -*-
# Copyright 2014 matrix.org
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from twisted.web.resource import Resource
import json
import nacl.encoding
class Ed25519Servlet(Resource):
isLeaf = True
def __init__(self, syd):
self.sydent = syd
def render_GET(self, request):
pubKey = self.sydent.keyring.ed25519.verify_key
pubKeyHex = pubKey.encode(encoder=nacl.encoding.HexEncoder)
return json.dumps({'public_key':pubKeyHex})
|
# -*- coding: utf-8 -*-
# Copyright 2014 matrix.org
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from twisted.web.resource import Resource
import json
import nacl.encoding
class Ed25519Servlet(Resource):
isLeaf = True
def __init__(self, syd):
self.sydent = syd
def render_GET(self, request):
pubKey = self.sydent.signers.ed25519.signing_key.verify_key
pubKeyHex = pubKey.encode(encoder=nacl.encoding.HexEncoder)
return json.dumps({'public_key':pubKeyHex})
Update pubkey servlet to s/signers/keyring# -*- coding: utf-8 -*-
# Copyright 2014 matrix.org
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from twisted.web.resource import Resource
import json
import nacl.encoding
class Ed25519Servlet(Resource):
isLeaf = True
def __init__(self, syd):
self.sydent = syd
def render_GET(self, request):
pubKey = self.sydent.keyring.ed25519.verify_key
pubKeyHex = pubKey.encode(encoder=nacl.encoding.HexEncoder)
return json.dumps({'public_key':pubKeyHex})
|
<commit_before># -*- coding: utf-8 -*-
# Copyright 2014 matrix.org
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from twisted.web.resource import Resource
import json
import nacl.encoding
class Ed25519Servlet(Resource):
isLeaf = True
def __init__(self, syd):
self.sydent = syd
def render_GET(self, request):
pubKey = self.sydent.signers.ed25519.signing_key.verify_key
pubKeyHex = pubKey.encode(encoder=nacl.encoding.HexEncoder)
return json.dumps({'public_key':pubKeyHex})
<commit_msg>Update pubkey servlet to s/signers/keyring<commit_after># -*- coding: utf-8 -*-
# Copyright 2014 matrix.org
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from twisted.web.resource import Resource
import json
import nacl.encoding
class Ed25519Servlet(Resource):
isLeaf = True
def __init__(self, syd):
self.sydent = syd
def render_GET(self, request):
pubKey = self.sydent.keyring.ed25519.verify_key
pubKeyHex = pubKey.encode(encoder=nacl.encoding.HexEncoder)
return json.dumps({'public_key':pubKeyHex})
|
e6f19cc58f32b855fc1f71086dac0ad56b697ed3
|
opps/articles/urls.py
|
opps/articles/urls.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
from django.conf.urls import patterns, url
from .views import OppsDetail, OppsList, Search
urlpatterns = patterns(
'',
url(r'^$', OppsList.as_view(), name='home'),
url(r'^search/', Search(), name='search'),
url(r'^(?P<channel__long_slug>[\w//-]+)/(?P<slug>[\w-]+)$',
OppsDetail.as_view(), name='open'),
url(r'^(?P<channel__long_slug>[\w\b//-]+)/$', OppsList.as_view(),
name='channel'),
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.conf.urls import patterns, url
from django.views.decorators.cache import cache_page
from .views import OppsDetail, OppsList, Search
urlpatterns = patterns(
'',
url(r'^$', cache_page(60 * 2)(OppsList.as_view()), name='home'),
url(r'^search/', Search(), name='search'),
url(r'^(?P<channel__long_slug>[\w//-]+)/(?P<slug>[\w-]+)$',
cache_page(60 * 15)(OppsDetail.as_view()), name='open'),
url(r'^(?P<channel__long_slug>[\w\b//-]+)/$',
cache_page(60 * 2)(OppsList.as_view()), name='channel'),
)
|
Add cache on article page (via url)
|
Add cache on article page (via url)
|
Python
|
mit
|
opps/opps,YACOWS/opps,YACOWS/opps,williamroot/opps,opps/opps,jeanmask/opps,jeanmask/opps,jeanmask/opps,opps/opps,jeanmask/opps,YACOWS/opps,YACOWS/opps,williamroot/opps,opps/opps,williamroot/opps,williamroot/opps
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
from django.conf.urls import patterns, url
from .views import OppsDetail, OppsList, Search
urlpatterns = patterns(
'',
url(r'^$', OppsList.as_view(), name='home'),
url(r'^search/', Search(), name='search'),
url(r'^(?P<channel__long_slug>[\w//-]+)/(?P<slug>[\w-]+)$',
OppsDetail.as_view(), name='open'),
url(r'^(?P<channel__long_slug>[\w\b//-]+)/$', OppsList.as_view(),
name='channel'),
)
Add cache on article page (via url)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.conf.urls import patterns, url
from django.views.decorators.cache import cache_page
from .views import OppsDetail, OppsList, Search
urlpatterns = patterns(
'',
url(r'^$', cache_page(60 * 2)(OppsList.as_view()), name='home'),
url(r'^search/', Search(), name='search'),
url(r'^(?P<channel__long_slug>[\w//-]+)/(?P<slug>[\w-]+)$',
cache_page(60 * 15)(OppsDetail.as_view()), name='open'),
url(r'^(?P<channel__long_slug>[\w\b//-]+)/$',
cache_page(60 * 2)(OppsList.as_view()), name='channel'),
)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
from django.conf.urls import patterns, url
from .views import OppsDetail, OppsList, Search
urlpatterns = patterns(
'',
url(r'^$', OppsList.as_view(), name='home'),
url(r'^search/', Search(), name='search'),
url(r'^(?P<channel__long_slug>[\w//-]+)/(?P<slug>[\w-]+)$',
OppsDetail.as_view(), name='open'),
url(r'^(?P<channel__long_slug>[\w\b//-]+)/$', OppsList.as_view(),
name='channel'),
)
<commit_msg>Add cache on article page (via url)<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.conf.urls import patterns, url
from django.views.decorators.cache import cache_page
from .views import OppsDetail, OppsList, Search
urlpatterns = patterns(
'',
url(r'^$', cache_page(60 * 2)(OppsList.as_view()), name='home'),
url(r'^search/', Search(), name='search'),
url(r'^(?P<channel__long_slug>[\w//-]+)/(?P<slug>[\w-]+)$',
cache_page(60 * 15)(OppsDetail.as_view()), name='open'),
url(r'^(?P<channel__long_slug>[\w\b//-]+)/$',
cache_page(60 * 2)(OppsList.as_view()), name='channel'),
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
from django.conf.urls import patterns, url
from .views import OppsDetail, OppsList, Search
urlpatterns = patterns(
'',
url(r'^$', OppsList.as_view(), name='home'),
url(r'^search/', Search(), name='search'),
url(r'^(?P<channel__long_slug>[\w//-]+)/(?P<slug>[\w-]+)$',
OppsDetail.as_view(), name='open'),
url(r'^(?P<channel__long_slug>[\w\b//-]+)/$', OppsList.as_view(),
name='channel'),
)
Add cache on article page (via url)#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.conf.urls import patterns, url
from django.views.decorators.cache import cache_page
from .views import OppsDetail, OppsList, Search
urlpatterns = patterns(
'',
url(r'^$', cache_page(60 * 2)(OppsList.as_view()), name='home'),
url(r'^search/', Search(), name='search'),
url(r'^(?P<channel__long_slug>[\w//-]+)/(?P<slug>[\w-]+)$',
cache_page(60 * 15)(OppsDetail.as_view()), name='open'),
url(r'^(?P<channel__long_slug>[\w\b//-]+)/$',
cache_page(60 * 2)(OppsList.as_view()), name='channel'),
)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
from django.conf.urls import patterns, url
from .views import OppsDetail, OppsList, Search
urlpatterns = patterns(
'',
url(r'^$', OppsList.as_view(), name='home'),
url(r'^search/', Search(), name='search'),
url(r'^(?P<channel__long_slug>[\w//-]+)/(?P<slug>[\w-]+)$',
OppsDetail.as_view(), name='open'),
url(r'^(?P<channel__long_slug>[\w\b//-]+)/$', OppsList.as_view(),
name='channel'),
)
<commit_msg>Add cache on article page (via url)<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.conf.urls import patterns, url
from django.views.decorators.cache import cache_page
from .views import OppsDetail, OppsList, Search
urlpatterns = patterns(
'',
url(r'^$', cache_page(60 * 2)(OppsList.as_view()), name='home'),
url(r'^search/', Search(), name='search'),
url(r'^(?P<channel__long_slug>[\w//-]+)/(?P<slug>[\w-]+)$',
cache_page(60 * 15)(OppsDetail.as_view()), name='open'),
url(r'^(?P<channel__long_slug>[\w\b//-]+)/$',
cache_page(60 * 2)(OppsList.as_view()), name='channel'),
)
|
cfa22fada64882a20f2daec4c8b83488920e8c3a
|
models/log_entry.py
|
models/log_entry.py
|
from database import db
from conversions import datetime_from_str
class LogEntry(db.Model):
id = db.Column(db.Integer, primary_key=True)
timestamp = db.Column(db.DateTime, index=True)
server = db.Column(db.String(100), index=True)
log_name = db.Column(db.String(760), index=True)
message = db.Column(db.Text(), nullable=True)
def __init__(self, timestamp, server, log_name, message):
self.timestamp = datetime_from_str(timestamp)
self.server = server
self.log_name = log_name
self.message = message
def to_dict(self):
return {
'timestamp': self.timestamp,
'server': self.server,
'log_name': self.log_name,
'message': self.message,
}
|
from database import db
from conversions import datetime_from_str
class LogEntry(db.Model):
id = db.Column(db.Integer, primary_key=True)
timestamp = db.Column(db.DateTime, index=True)
server = db.Column(db.String(100), index=True)
log_name = db.Column(db.String(255), index=True)
message = db.Column(db.Text(), nullable=True)
def __init__(self, timestamp, server, log_name, message):
self.timestamp = datetime_from_str(timestamp)
self.server = server
self.log_name = log_name
self.message = message
def to_dict(self):
return {
'timestamp': self.timestamp,
'server': self.server,
'log_name': self.log_name,
'message': self.message,
}
|
Reduce the size of log_name so it fits within mysql's limit.
|
Reduce the size of log_name so it fits within mysql's limit.
|
Python
|
agpl-3.0
|
izrik/sawmill,izrik/sawmill,izrik/sawmill
|
from database import db
from conversions import datetime_from_str
class LogEntry(db.Model):
id = db.Column(db.Integer, primary_key=True)
timestamp = db.Column(db.DateTime, index=True)
server = db.Column(db.String(100), index=True)
log_name = db.Column(db.String(760), index=True)
message = db.Column(db.Text(), nullable=True)
def __init__(self, timestamp, server, log_name, message):
self.timestamp = datetime_from_str(timestamp)
self.server = server
self.log_name = log_name
self.message = message
def to_dict(self):
return {
'timestamp': self.timestamp,
'server': self.server,
'log_name': self.log_name,
'message': self.message,
}
Reduce the size of log_name so it fits within mysql's limit.
|
from database import db
from conversions import datetime_from_str
class LogEntry(db.Model):
id = db.Column(db.Integer, primary_key=True)
timestamp = db.Column(db.DateTime, index=True)
server = db.Column(db.String(100), index=True)
log_name = db.Column(db.String(255), index=True)
message = db.Column(db.Text(), nullable=True)
def __init__(self, timestamp, server, log_name, message):
self.timestamp = datetime_from_str(timestamp)
self.server = server
self.log_name = log_name
self.message = message
def to_dict(self):
return {
'timestamp': self.timestamp,
'server': self.server,
'log_name': self.log_name,
'message': self.message,
}
|
<commit_before>
from database import db
from conversions import datetime_from_str
class LogEntry(db.Model):
id = db.Column(db.Integer, primary_key=True)
timestamp = db.Column(db.DateTime, index=True)
server = db.Column(db.String(100), index=True)
log_name = db.Column(db.String(760), index=True)
message = db.Column(db.Text(), nullable=True)
def __init__(self, timestamp, server, log_name, message):
self.timestamp = datetime_from_str(timestamp)
self.server = server
self.log_name = log_name
self.message = message
def to_dict(self):
return {
'timestamp': self.timestamp,
'server': self.server,
'log_name': self.log_name,
'message': self.message,
}
<commit_msg>Reduce the size of log_name so it fits within mysql's limit.<commit_after>
|
from database import db
from conversions import datetime_from_str
class LogEntry(db.Model):
id = db.Column(db.Integer, primary_key=True)
timestamp = db.Column(db.DateTime, index=True)
server = db.Column(db.String(100), index=True)
log_name = db.Column(db.String(255), index=True)
message = db.Column(db.Text(), nullable=True)
def __init__(self, timestamp, server, log_name, message):
self.timestamp = datetime_from_str(timestamp)
self.server = server
self.log_name = log_name
self.message = message
def to_dict(self):
return {
'timestamp': self.timestamp,
'server': self.server,
'log_name': self.log_name,
'message': self.message,
}
|
from database import db
from conversions import datetime_from_str
class LogEntry(db.Model):
id = db.Column(db.Integer, primary_key=True)
timestamp = db.Column(db.DateTime, index=True)
server = db.Column(db.String(100), index=True)
log_name = db.Column(db.String(760), index=True)
message = db.Column(db.Text(), nullable=True)
def __init__(self, timestamp, server, log_name, message):
self.timestamp = datetime_from_str(timestamp)
self.server = server
self.log_name = log_name
self.message = message
def to_dict(self):
return {
'timestamp': self.timestamp,
'server': self.server,
'log_name': self.log_name,
'message': self.message,
}
Reduce the size of log_name so it fits within mysql's limit.
from database import db
from conversions import datetime_from_str
class LogEntry(db.Model):
id = db.Column(db.Integer, primary_key=True)
timestamp = db.Column(db.DateTime, index=True)
server = db.Column(db.String(100), index=True)
log_name = db.Column(db.String(255), index=True)
message = db.Column(db.Text(), nullable=True)
def __init__(self, timestamp, server, log_name, message):
self.timestamp = datetime_from_str(timestamp)
self.server = server
self.log_name = log_name
self.message = message
def to_dict(self):
return {
'timestamp': self.timestamp,
'server': self.server,
'log_name': self.log_name,
'message': self.message,
}
|
<commit_before>
from database import db
from conversions import datetime_from_str
class LogEntry(db.Model):
id = db.Column(db.Integer, primary_key=True)
timestamp = db.Column(db.DateTime, index=True)
server = db.Column(db.String(100), index=True)
log_name = db.Column(db.String(760), index=True)
message = db.Column(db.Text(), nullable=True)
def __init__(self, timestamp, server, log_name, message):
self.timestamp = datetime_from_str(timestamp)
self.server = server
self.log_name = log_name
self.message = message
def to_dict(self):
return {
'timestamp': self.timestamp,
'server': self.server,
'log_name': self.log_name,
'message': self.message,
}
<commit_msg>Reduce the size of log_name so it fits within mysql's limit.<commit_after>
from database import db
from conversions import datetime_from_str
class LogEntry(db.Model):
id = db.Column(db.Integer, primary_key=True)
timestamp = db.Column(db.DateTime, index=True)
server = db.Column(db.String(100), index=True)
log_name = db.Column(db.String(255), index=True)
message = db.Column(db.Text(), nullable=True)
def __init__(self, timestamp, server, log_name, message):
self.timestamp = datetime_from_str(timestamp)
self.server = server
self.log_name = log_name
self.message = message
def to_dict(self):
return {
'timestamp': self.timestamp,
'server': self.server,
'log_name': self.log_name,
'message': self.message,
}
|
fb019fa4c277cf988d479333c6cba08a637f948a
|
flexget/plugins/output/dump_config.py
|
flexget/plugins/output/dump_config.py
|
from argparse import SUPPRESS
from loguru import logger
from rich.syntax import Syntax
from flexget import options, plugin
from flexget.event import event
from flexget.terminal import console
logger = logger.bind(name='dump_config')
class OutputDumpConfig:
"""
Dumps task config in STDOUT in yaml at exit or abort event.
"""
@plugin.priority(plugin.PRIORITY_LAST)
def on_task_start(self, task, config):
if task.options.dump_config:
import yaml
console.rule(f'config from task: {task.name}')
syntax = Syntax(yaml.safe_dump(task.config).strip(), 'yaml')
console(syntax)
console.rule()
task.abort(silent=True)
if task.options.dump_config_python:
console(task.config)
task.abort(silent=True)
@event('plugin.register')
def register_plugin():
plugin.register(OutputDumpConfig, 'dump_config', debug=True, builtin=True, api_ver=2)
@event('options.register')
def register_parser_arguments():
exec_parser = options.get_parser('execute')
exec_parser.add_argument(
'--dump-config',
action='store_true',
dest='dump_config',
default=False,
help='display the config of each feed after template merging/config generation occurs',
)
exec_parser.add_argument(
'--dump-config-python',
action='store_true',
dest='dump_config_python',
default=False,
help=SUPPRESS,
)
|
from argparse import SUPPRESS
import yaml
from loguru import logger
from rich.syntax import Syntax
from flexget import options, plugin
from flexget.event import event
from flexget.terminal import console
logger = logger.bind(name='dump_config')
class OutputDumpConfig:
"""
Dumps task config in STDOUT in yaml at exit or abort event.
"""
@plugin.priority(plugin.PRIORITY_LAST)
def on_task_start(self, task, config):
if task.options.dump_config:
console.rule(f'config from task: {task.name}')
syntax = Syntax(yaml.safe_dump(task.config).strip(), 'yaml+jinja', theme='native')
console(syntax)
console.rule()
task.abort(silent=True)
if task.options.dump_config_python:
console(task.config)
task.abort(silent=True)
@event('plugin.register')
def register_plugin():
plugin.register(OutputDumpConfig, 'dump_config', debug=True, builtin=True, api_ver=2)
@event('options.register')
def register_parser_arguments():
exec_parser = options.get_parser('execute')
exec_parser.add_argument(
'--dump-config',
action='store_true',
dest='dump_config',
default=False,
help='display the config of each feed after template merging/config generation occurs',
)
exec_parser.add_argument(
'--dump-config-python',
action='store_true',
dest='dump_config_python',
default=False,
help=SUPPRESS,
)
|
Adjust --dump-config color theme for better readability Add jinja highlighting to --dump-config
|
Adjust --dump-config color theme for better readability
Add jinja highlighting to --dump-config
|
Python
|
mit
|
crawln45/Flexget,Flexget/Flexget,Flexget/Flexget,Flexget/Flexget,Flexget/Flexget,crawln45/Flexget,crawln45/Flexget,crawln45/Flexget
|
from argparse import SUPPRESS
from loguru import logger
from rich.syntax import Syntax
from flexget import options, plugin
from flexget.event import event
from flexget.terminal import console
logger = logger.bind(name='dump_config')
class OutputDumpConfig:
"""
Dumps task config in STDOUT in yaml at exit or abort event.
"""
@plugin.priority(plugin.PRIORITY_LAST)
def on_task_start(self, task, config):
if task.options.dump_config:
import yaml
console.rule(f'config from task: {task.name}')
syntax = Syntax(yaml.safe_dump(task.config).strip(), 'yaml')
console(syntax)
console.rule()
task.abort(silent=True)
if task.options.dump_config_python:
console(task.config)
task.abort(silent=True)
@event('plugin.register')
def register_plugin():
plugin.register(OutputDumpConfig, 'dump_config', debug=True, builtin=True, api_ver=2)
@event('options.register')
def register_parser_arguments():
exec_parser = options.get_parser('execute')
exec_parser.add_argument(
'--dump-config',
action='store_true',
dest='dump_config',
default=False,
help='display the config of each feed after template merging/config generation occurs',
)
exec_parser.add_argument(
'--dump-config-python',
action='store_true',
dest='dump_config_python',
default=False,
help=SUPPRESS,
)
Adjust --dump-config color theme for better readability
Add jinja highlighting to --dump-config
|
from argparse import SUPPRESS
import yaml
from loguru import logger
from rich.syntax import Syntax
from flexget import options, plugin
from flexget.event import event
from flexget.terminal import console
logger = logger.bind(name='dump_config')
class OutputDumpConfig:
"""
Dumps task config in STDOUT in yaml at exit or abort event.
"""
@plugin.priority(plugin.PRIORITY_LAST)
def on_task_start(self, task, config):
if task.options.dump_config:
console.rule(f'config from task: {task.name}')
syntax = Syntax(yaml.safe_dump(task.config).strip(), 'yaml+jinja', theme='native')
console(syntax)
console.rule()
task.abort(silent=True)
if task.options.dump_config_python:
console(task.config)
task.abort(silent=True)
@event('plugin.register')
def register_plugin():
plugin.register(OutputDumpConfig, 'dump_config', debug=True, builtin=True, api_ver=2)
@event('options.register')
def register_parser_arguments():
exec_parser = options.get_parser('execute')
exec_parser.add_argument(
'--dump-config',
action='store_true',
dest='dump_config',
default=False,
help='display the config of each feed after template merging/config generation occurs',
)
exec_parser.add_argument(
'--dump-config-python',
action='store_true',
dest='dump_config_python',
default=False,
help=SUPPRESS,
)
|
<commit_before>from argparse import SUPPRESS
from loguru import logger
from rich.syntax import Syntax
from flexget import options, plugin
from flexget.event import event
from flexget.terminal import console
logger = logger.bind(name='dump_config')
class OutputDumpConfig:
"""
Dumps task config in STDOUT in yaml at exit or abort event.
"""
@plugin.priority(plugin.PRIORITY_LAST)
def on_task_start(self, task, config):
if task.options.dump_config:
import yaml
console.rule(f'config from task: {task.name}')
syntax = Syntax(yaml.safe_dump(task.config).strip(), 'yaml')
console(syntax)
console.rule()
task.abort(silent=True)
if task.options.dump_config_python:
console(task.config)
task.abort(silent=True)
@event('plugin.register')
def register_plugin():
plugin.register(OutputDumpConfig, 'dump_config', debug=True, builtin=True, api_ver=2)
@event('options.register')
def register_parser_arguments():
exec_parser = options.get_parser('execute')
exec_parser.add_argument(
'--dump-config',
action='store_true',
dest='dump_config',
default=False,
help='display the config of each feed after template merging/config generation occurs',
)
exec_parser.add_argument(
'--dump-config-python',
action='store_true',
dest='dump_config_python',
default=False,
help=SUPPRESS,
)
<commit_msg>Adjust --dump-config color theme for better readability
Add jinja highlighting to --dump-config<commit_after>
|
from argparse import SUPPRESS
import yaml
from loguru import logger
from rich.syntax import Syntax
from flexget import options, plugin
from flexget.event import event
from flexget.terminal import console
logger = logger.bind(name='dump_config')
class OutputDumpConfig:
"""
Dumps task config in STDOUT in yaml at exit or abort event.
"""
@plugin.priority(plugin.PRIORITY_LAST)
def on_task_start(self, task, config):
if task.options.dump_config:
console.rule(f'config from task: {task.name}')
syntax = Syntax(yaml.safe_dump(task.config).strip(), 'yaml+jinja', theme='native')
console(syntax)
console.rule()
task.abort(silent=True)
if task.options.dump_config_python:
console(task.config)
task.abort(silent=True)
@event('plugin.register')
def register_plugin():
plugin.register(OutputDumpConfig, 'dump_config', debug=True, builtin=True, api_ver=2)
@event('options.register')
def register_parser_arguments():
exec_parser = options.get_parser('execute')
exec_parser.add_argument(
'--dump-config',
action='store_true',
dest='dump_config',
default=False,
help='display the config of each feed after template merging/config generation occurs',
)
exec_parser.add_argument(
'--dump-config-python',
action='store_true',
dest='dump_config_python',
default=False,
help=SUPPRESS,
)
|
from argparse import SUPPRESS
from loguru import logger
from rich.syntax import Syntax
from flexget import options, plugin
from flexget.event import event
from flexget.terminal import console
logger = logger.bind(name='dump_config')
class OutputDumpConfig:
"""
Dumps task config in STDOUT in yaml at exit or abort event.
"""
@plugin.priority(plugin.PRIORITY_LAST)
def on_task_start(self, task, config):
if task.options.dump_config:
import yaml
console.rule(f'config from task: {task.name}')
syntax = Syntax(yaml.safe_dump(task.config).strip(), 'yaml')
console(syntax)
console.rule()
task.abort(silent=True)
if task.options.dump_config_python:
console(task.config)
task.abort(silent=True)
@event('plugin.register')
def register_plugin():
plugin.register(OutputDumpConfig, 'dump_config', debug=True, builtin=True, api_ver=2)
@event('options.register')
def register_parser_arguments():
exec_parser = options.get_parser('execute')
exec_parser.add_argument(
'--dump-config',
action='store_true',
dest='dump_config',
default=False,
help='display the config of each feed after template merging/config generation occurs',
)
exec_parser.add_argument(
'--dump-config-python',
action='store_true',
dest='dump_config_python',
default=False,
help=SUPPRESS,
)
Adjust --dump-config color theme for better readability
Add jinja highlighting to --dump-configfrom argparse import SUPPRESS
import yaml
from loguru import logger
from rich.syntax import Syntax
from flexget import options, plugin
from flexget.event import event
from flexget.terminal import console
logger = logger.bind(name='dump_config')
class OutputDumpConfig:
"""
Dumps task config in STDOUT in yaml at exit or abort event.
"""
@plugin.priority(plugin.PRIORITY_LAST)
def on_task_start(self, task, config):
if task.options.dump_config:
console.rule(f'config from task: {task.name}')
syntax = Syntax(yaml.safe_dump(task.config).strip(), 'yaml+jinja', theme='native')
console(syntax)
console.rule()
task.abort(silent=True)
if task.options.dump_config_python:
console(task.config)
task.abort(silent=True)
@event('plugin.register')
def register_plugin():
plugin.register(OutputDumpConfig, 'dump_config', debug=True, builtin=True, api_ver=2)
@event('options.register')
def register_parser_arguments():
exec_parser = options.get_parser('execute')
exec_parser.add_argument(
'--dump-config',
action='store_true',
dest='dump_config',
default=False,
help='display the config of each feed after template merging/config generation occurs',
)
exec_parser.add_argument(
'--dump-config-python',
action='store_true',
dest='dump_config_python',
default=False,
help=SUPPRESS,
)
|
<commit_before>from argparse import SUPPRESS
from loguru import logger
from rich.syntax import Syntax
from flexget import options, plugin
from flexget.event import event
from flexget.terminal import console
logger = logger.bind(name='dump_config')
class OutputDumpConfig:
"""
Dumps task config in STDOUT in yaml at exit or abort event.
"""
@plugin.priority(plugin.PRIORITY_LAST)
def on_task_start(self, task, config):
if task.options.dump_config:
import yaml
console.rule(f'config from task: {task.name}')
syntax = Syntax(yaml.safe_dump(task.config).strip(), 'yaml')
console(syntax)
console.rule()
task.abort(silent=True)
if task.options.dump_config_python:
console(task.config)
task.abort(silent=True)
@event('plugin.register')
def register_plugin():
plugin.register(OutputDumpConfig, 'dump_config', debug=True, builtin=True, api_ver=2)
@event('options.register')
def register_parser_arguments():
exec_parser = options.get_parser('execute')
exec_parser.add_argument(
'--dump-config',
action='store_true',
dest='dump_config',
default=False,
help='display the config of each feed after template merging/config generation occurs',
)
exec_parser.add_argument(
'--dump-config-python',
action='store_true',
dest='dump_config_python',
default=False,
help=SUPPRESS,
)
<commit_msg>Adjust --dump-config color theme for better readability
Add jinja highlighting to --dump-config<commit_after>from argparse import SUPPRESS
import yaml
from loguru import logger
from rich.syntax import Syntax
from flexget import options, plugin
from flexget.event import event
from flexget.terminal import console
logger = logger.bind(name='dump_config')
class OutputDumpConfig:
"""
Dumps task config in STDOUT in yaml at exit or abort event.
"""
@plugin.priority(plugin.PRIORITY_LAST)
def on_task_start(self, task, config):
if task.options.dump_config:
console.rule(f'config from task: {task.name}')
syntax = Syntax(yaml.safe_dump(task.config).strip(), 'yaml+jinja', theme='native')
console(syntax)
console.rule()
task.abort(silent=True)
if task.options.dump_config_python:
console(task.config)
task.abort(silent=True)
@event('plugin.register')
def register_plugin():
plugin.register(OutputDumpConfig, 'dump_config', debug=True, builtin=True, api_ver=2)
@event('options.register')
def register_parser_arguments():
exec_parser = options.get_parser('execute')
exec_parser.add_argument(
'--dump-config',
action='store_true',
dest='dump_config',
default=False,
help='display the config of each feed after template merging/config generation occurs',
)
exec_parser.add_argument(
'--dump-config-python',
action='store_true',
dest='dump_config_python',
default=False,
help=SUPPRESS,
)
|
fbc4247fc7b7d36286c3f25e6ae71dfb7ebb2d39
|
example/__init__.py
|
example/__init__.py
|
from pupa.scrape import Jurisdiction
from .people import PersonScraper
class Example(Jurisdiction):
jurisdiction_id = 'ocd-jurisdiction/country:us/state:ex/place:example'
def get_metadata(self):
return {
'name': 'Example Legislature',
'url': 'http://example.com',
'terms': [{
'name': '2013-2014',
'sessions': ['2013'],
'start_year': 2013,
'end_year': 2014
}],
'provides': ['people'],
'parties': [
{'name': 'Independent' },
{'name': 'Green' },
{'name': 'Bull-Moose'}
],
'session_details': {
'2013': {'_scraped_name': '2013'}
},
'feature_flags': [],
}
def get_scraper(self, term, session, scraper_type):
if scraper_type == 'people':
return PersonScraper
def scrape_session_list(self):
return ['2013']
|
from pupa.scrape import Jurisdiction
from .people import PersonScraper
class Example(Jurisdiction):
jurisdiction_id = 'ocd-jurisdiction/country:us/state:ex/place:example'
name = 'Example Legislature'
url = 'http://example.com'
terms = [{
'name': '2013-2014',
'sessions': ['2013'],
'start_year': 2013,
'end_year': 2014
}]
provides = ['people'],
parties = [
{'name': 'Independent' },
{'name': 'Green' },
{'name': 'Bull-Moose'}
]
session_details = {
'2013': {'_scraped_name': '2013'}
}
def get_scraper(self, term, session, scraper_type):
if scraper_type == 'people':
return PersonScraper
def scrape_session_list(self):
return ['2013']
|
Use new-style metadata in example
|
Use new-style metadata in example
|
Python
|
bsd-3-clause
|
rshorey/pupa,rshorey/pupa,mileswwatkins/pupa,opencivicdata/pupa,datamade/pupa,mileswwatkins/pupa,influence-usa/pupa,influence-usa/pupa,datamade/pupa,opencivicdata/pupa
|
from pupa.scrape import Jurisdiction
from .people import PersonScraper
class Example(Jurisdiction):
jurisdiction_id = 'ocd-jurisdiction/country:us/state:ex/place:example'
def get_metadata(self):
return {
'name': 'Example Legislature',
'url': 'http://example.com',
'terms': [{
'name': '2013-2014',
'sessions': ['2013'],
'start_year': 2013,
'end_year': 2014
}],
'provides': ['people'],
'parties': [
{'name': 'Independent' },
{'name': 'Green' },
{'name': 'Bull-Moose'}
],
'session_details': {
'2013': {'_scraped_name': '2013'}
},
'feature_flags': [],
}
def get_scraper(self, term, session, scraper_type):
if scraper_type == 'people':
return PersonScraper
def scrape_session_list(self):
return ['2013']
Use new-style metadata in example
|
from pupa.scrape import Jurisdiction
from .people import PersonScraper
class Example(Jurisdiction):
jurisdiction_id = 'ocd-jurisdiction/country:us/state:ex/place:example'
name = 'Example Legislature'
url = 'http://example.com'
terms = [{
'name': '2013-2014',
'sessions': ['2013'],
'start_year': 2013,
'end_year': 2014
}]
provides = ['people'],
parties = [
{'name': 'Independent' },
{'name': 'Green' },
{'name': 'Bull-Moose'}
]
session_details = {
'2013': {'_scraped_name': '2013'}
}
def get_scraper(self, term, session, scraper_type):
if scraper_type == 'people':
return PersonScraper
def scrape_session_list(self):
return ['2013']
|
<commit_before>from pupa.scrape import Jurisdiction
from .people import PersonScraper
class Example(Jurisdiction):
jurisdiction_id = 'ocd-jurisdiction/country:us/state:ex/place:example'
def get_metadata(self):
return {
'name': 'Example Legislature',
'url': 'http://example.com',
'terms': [{
'name': '2013-2014',
'sessions': ['2013'],
'start_year': 2013,
'end_year': 2014
}],
'provides': ['people'],
'parties': [
{'name': 'Independent' },
{'name': 'Green' },
{'name': 'Bull-Moose'}
],
'session_details': {
'2013': {'_scraped_name': '2013'}
},
'feature_flags': [],
}
def get_scraper(self, term, session, scraper_type):
if scraper_type == 'people':
return PersonScraper
def scrape_session_list(self):
return ['2013']
<commit_msg>Use new-style metadata in example<commit_after>
|
from pupa.scrape import Jurisdiction
from .people import PersonScraper
class Example(Jurisdiction):
jurisdiction_id = 'ocd-jurisdiction/country:us/state:ex/place:example'
name = 'Example Legislature'
url = 'http://example.com'
terms = [{
'name': '2013-2014',
'sessions': ['2013'],
'start_year': 2013,
'end_year': 2014
}]
provides = ['people'],
parties = [
{'name': 'Independent' },
{'name': 'Green' },
{'name': 'Bull-Moose'}
]
session_details = {
'2013': {'_scraped_name': '2013'}
}
def get_scraper(self, term, session, scraper_type):
if scraper_type == 'people':
return PersonScraper
def scrape_session_list(self):
return ['2013']
|
from pupa.scrape import Jurisdiction
from .people import PersonScraper
class Example(Jurisdiction):
jurisdiction_id = 'ocd-jurisdiction/country:us/state:ex/place:example'
def get_metadata(self):
return {
'name': 'Example Legislature',
'url': 'http://example.com',
'terms': [{
'name': '2013-2014',
'sessions': ['2013'],
'start_year': 2013,
'end_year': 2014
}],
'provides': ['people'],
'parties': [
{'name': 'Independent' },
{'name': 'Green' },
{'name': 'Bull-Moose'}
],
'session_details': {
'2013': {'_scraped_name': '2013'}
},
'feature_flags': [],
}
def get_scraper(self, term, session, scraper_type):
if scraper_type == 'people':
return PersonScraper
def scrape_session_list(self):
return ['2013']
Use new-style metadata in examplefrom pupa.scrape import Jurisdiction
from .people import PersonScraper
class Example(Jurisdiction):
jurisdiction_id = 'ocd-jurisdiction/country:us/state:ex/place:example'
name = 'Example Legislature'
url = 'http://example.com'
terms = [{
'name': '2013-2014',
'sessions': ['2013'],
'start_year': 2013,
'end_year': 2014
}]
provides = ['people'],
parties = [
{'name': 'Independent' },
{'name': 'Green' },
{'name': 'Bull-Moose'}
]
session_details = {
'2013': {'_scraped_name': '2013'}
}
def get_scraper(self, term, session, scraper_type):
if scraper_type == 'people':
return PersonScraper
def scrape_session_list(self):
return ['2013']
|
<commit_before>from pupa.scrape import Jurisdiction
from .people import PersonScraper
class Example(Jurisdiction):
jurisdiction_id = 'ocd-jurisdiction/country:us/state:ex/place:example'
def get_metadata(self):
return {
'name': 'Example Legislature',
'url': 'http://example.com',
'terms': [{
'name': '2013-2014',
'sessions': ['2013'],
'start_year': 2013,
'end_year': 2014
}],
'provides': ['people'],
'parties': [
{'name': 'Independent' },
{'name': 'Green' },
{'name': 'Bull-Moose'}
],
'session_details': {
'2013': {'_scraped_name': '2013'}
},
'feature_flags': [],
}
def get_scraper(self, term, session, scraper_type):
if scraper_type == 'people':
return PersonScraper
def scrape_session_list(self):
return ['2013']
<commit_msg>Use new-style metadata in example<commit_after>from pupa.scrape import Jurisdiction
from .people import PersonScraper
class Example(Jurisdiction):
jurisdiction_id = 'ocd-jurisdiction/country:us/state:ex/place:example'
name = 'Example Legislature'
url = 'http://example.com'
terms = [{
'name': '2013-2014',
'sessions': ['2013'],
'start_year': 2013,
'end_year': 2014
}]
provides = ['people'],
parties = [
{'name': 'Independent' },
{'name': 'Green' },
{'name': 'Bull-Moose'}
]
session_details = {
'2013': {'_scraped_name': '2013'}
}
def get_scraper(self, term, session, scraper_type):
if scraper_type == 'people':
return PersonScraper
def scrape_session_list(self):
return ['2013']
|
07da63a9ac95a054332297638df17fcf00ac4291
|
core/components/security/factor.py
|
core/components/security/factor.py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import json
from u2flib_server.u2f import (begin_registration,
begin_authentication,
complete_registration,
complete_authentication)
from components.eternity import config
facet = config.rss['link']
async def enroll(user):
enroll = begin_registration(facet, user.get('_u2f_devices_', []))
user['_u2f_enroll_'] = enroll.json
return user, json.dumps(enroll.data_for_client)
async def bind(user, data):
response = data['tokenResponse']
enroll = user.pop('_u2f_enroll_')
device, cert = complete_registration(enroll, response, [facet])
patch = device
patch['deviceName'] = data['deviceName']
patch['registerDate'] = data['date']
user.setdefault('_u2f_devices_', []).append(json.dumps(patch))
# cert = x509.load_der_x509_certificate(cert, default_backend())
return user, True
async def sign(user):
challenge = begin_authentication(facet, user.get('_u2f_devices_', []))
user['_u2f_challenge_'] = challenge.json
return user, json.dumps(challenge.data_for_client)
async def verify(user, data):
challenge = user.pop('_u2f_challenge_')
try:
complete_authentication(challenge, data, [facet])
except AttributeError:
return user, False
return user, True
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import json
from u2flib_server.u2f import (begin_registration,
begin_authentication,
complete_registration,
complete_authentication)
from components.eternity import config
facet = config.rss['link']
async def enroll(user):
enroll = begin_registration(facet, user.get('_u2f_devices_', []))
user['_u2f_enroll_'] = enroll.json
return user, json.dumps(enroll.data_for_client)
async def bind(user, data):
response = data['tokenResponse']
enroll = user.pop('_u2f_enroll_')
device, cert = complete_registration(enroll, response, [facet])
patch = device
patch['deviceName'] = data['deviceName']
patch['registerDate'] = data['date']
user.setdefault('_u2f_devices_', []).append(json.dumps(patch))
# cert = x509.load_der_x509_certificate(cert, default_backend())
return user, True
async def sign(user):
challenge = begin_authentication(facet, user.get('_u2f_devices_', []))
user['_u2f_challenge_'] = challenge.json
return user, json.dumps(challenge.data_for_client)
async def verify(user, data):
print(user)
challenge = user.pop('_u2f_challenge_')
try:
complete_authentication(challenge, data, [facet])
except AttributeError:
return user, False
return user, True
|
Fix server error when login with u2f
|
Fix server error when login with u2f
|
Python
|
mit
|
chiaki64/Windless,chiaki64/Windless
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import json
from u2flib_server.u2f import (begin_registration,
begin_authentication,
complete_registration,
complete_authentication)
from components.eternity import config
facet = config.rss['link']
async def enroll(user):
enroll = begin_registration(facet, user.get('_u2f_devices_', []))
user['_u2f_enroll_'] = enroll.json
return user, json.dumps(enroll.data_for_client)
async def bind(user, data):
response = data['tokenResponse']
enroll = user.pop('_u2f_enroll_')
device, cert = complete_registration(enroll, response, [facet])
patch = device
patch['deviceName'] = data['deviceName']
patch['registerDate'] = data['date']
user.setdefault('_u2f_devices_', []).append(json.dumps(patch))
# cert = x509.load_der_x509_certificate(cert, default_backend())
return user, True
async def sign(user):
challenge = begin_authentication(facet, user.get('_u2f_devices_', []))
user['_u2f_challenge_'] = challenge.json
return user, json.dumps(challenge.data_for_client)
async def verify(user, data):
challenge = user.pop('_u2f_challenge_')
try:
complete_authentication(challenge, data, [facet])
except AttributeError:
return user, False
return user, True
Fix server error when login with u2f
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import json
from u2flib_server.u2f import (begin_registration,
begin_authentication,
complete_registration,
complete_authentication)
from components.eternity import config
facet = config.rss['link']
async def enroll(user):
enroll = begin_registration(facet, user.get('_u2f_devices_', []))
user['_u2f_enroll_'] = enroll.json
return user, json.dumps(enroll.data_for_client)
async def bind(user, data):
response = data['tokenResponse']
enroll = user.pop('_u2f_enroll_')
device, cert = complete_registration(enroll, response, [facet])
patch = device
patch['deviceName'] = data['deviceName']
patch['registerDate'] = data['date']
user.setdefault('_u2f_devices_', []).append(json.dumps(patch))
# cert = x509.load_der_x509_certificate(cert, default_backend())
return user, True
async def sign(user):
challenge = begin_authentication(facet, user.get('_u2f_devices_', []))
user['_u2f_challenge_'] = challenge.json
return user, json.dumps(challenge.data_for_client)
async def verify(user, data):
print(user)
challenge = user.pop('_u2f_challenge_')
try:
complete_authentication(challenge, data, [facet])
except AttributeError:
return user, False
return user, True
|
<commit_before>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import json
from u2flib_server.u2f import (begin_registration,
begin_authentication,
complete_registration,
complete_authentication)
from components.eternity import config
facet = config.rss['link']
async def enroll(user):
enroll = begin_registration(facet, user.get('_u2f_devices_', []))
user['_u2f_enroll_'] = enroll.json
return user, json.dumps(enroll.data_for_client)
async def bind(user, data):
response = data['tokenResponse']
enroll = user.pop('_u2f_enroll_')
device, cert = complete_registration(enroll, response, [facet])
patch = device
patch['deviceName'] = data['deviceName']
patch['registerDate'] = data['date']
user.setdefault('_u2f_devices_', []).append(json.dumps(patch))
# cert = x509.load_der_x509_certificate(cert, default_backend())
return user, True
async def sign(user):
challenge = begin_authentication(facet, user.get('_u2f_devices_', []))
user['_u2f_challenge_'] = challenge.json
return user, json.dumps(challenge.data_for_client)
async def verify(user, data):
challenge = user.pop('_u2f_challenge_')
try:
complete_authentication(challenge, data, [facet])
except AttributeError:
return user, False
return user, True
<commit_msg>Fix server error when login with u2f<commit_after>
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import json
from u2flib_server.u2f import (begin_registration,
begin_authentication,
complete_registration,
complete_authentication)
from components.eternity import config
facet = config.rss['link']
async def enroll(user):
enroll = begin_registration(facet, user.get('_u2f_devices_', []))
user['_u2f_enroll_'] = enroll.json
return user, json.dumps(enroll.data_for_client)
async def bind(user, data):
response = data['tokenResponse']
enroll = user.pop('_u2f_enroll_')
device, cert = complete_registration(enroll, response, [facet])
patch = device
patch['deviceName'] = data['deviceName']
patch['registerDate'] = data['date']
user.setdefault('_u2f_devices_', []).append(json.dumps(patch))
# cert = x509.load_der_x509_certificate(cert, default_backend())
return user, True
async def sign(user):
challenge = begin_authentication(facet, user.get('_u2f_devices_', []))
user['_u2f_challenge_'] = challenge.json
return user, json.dumps(challenge.data_for_client)
async def verify(user, data):
print(user)
challenge = user.pop('_u2f_challenge_')
try:
complete_authentication(challenge, data, [facet])
except AttributeError:
return user, False
return user, True
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import json
from u2flib_server.u2f import (begin_registration,
begin_authentication,
complete_registration,
complete_authentication)
from components.eternity import config
facet = config.rss['link']
async def enroll(user):
enroll = begin_registration(facet, user.get('_u2f_devices_', []))
user['_u2f_enroll_'] = enroll.json
return user, json.dumps(enroll.data_for_client)
async def bind(user, data):
response = data['tokenResponse']
enroll = user.pop('_u2f_enroll_')
device, cert = complete_registration(enroll, response, [facet])
patch = device
patch['deviceName'] = data['deviceName']
patch['registerDate'] = data['date']
user.setdefault('_u2f_devices_', []).append(json.dumps(patch))
# cert = x509.load_der_x509_certificate(cert, default_backend())
return user, True
async def sign(user):
challenge = begin_authentication(facet, user.get('_u2f_devices_', []))
user['_u2f_challenge_'] = challenge.json
return user, json.dumps(challenge.data_for_client)
async def verify(user, data):
challenge = user.pop('_u2f_challenge_')
try:
complete_authentication(challenge, data, [facet])
except AttributeError:
return user, False
return user, True
Fix server error when login with u2f#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import json
from u2flib_server.u2f import (begin_registration,
begin_authentication,
complete_registration,
complete_authentication)
from components.eternity import config
facet = config.rss['link']
async def enroll(user):
enroll = begin_registration(facet, user.get('_u2f_devices_', []))
user['_u2f_enroll_'] = enroll.json
return user, json.dumps(enroll.data_for_client)
async def bind(user, data):
response = data['tokenResponse']
enroll = user.pop('_u2f_enroll_')
device, cert = complete_registration(enroll, response, [facet])
patch = device
patch['deviceName'] = data['deviceName']
patch['registerDate'] = data['date']
user.setdefault('_u2f_devices_', []).append(json.dumps(patch))
# cert = x509.load_der_x509_certificate(cert, default_backend())
return user, True
async def sign(user):
challenge = begin_authentication(facet, user.get('_u2f_devices_', []))
user['_u2f_challenge_'] = challenge.json
return user, json.dumps(challenge.data_for_client)
async def verify(user, data):
print(user)
challenge = user.pop('_u2f_challenge_')
try:
complete_authentication(challenge, data, [facet])
except AttributeError:
return user, False
return user, True
|
<commit_before>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import json
from u2flib_server.u2f import (begin_registration,
begin_authentication,
complete_registration,
complete_authentication)
from components.eternity import config
facet = config.rss['link']
async def enroll(user):
enroll = begin_registration(facet, user.get('_u2f_devices_', []))
user['_u2f_enroll_'] = enroll.json
return user, json.dumps(enroll.data_for_client)
async def bind(user, data):
response = data['tokenResponse']
enroll = user.pop('_u2f_enroll_')
device, cert = complete_registration(enroll, response, [facet])
patch = device
patch['deviceName'] = data['deviceName']
patch['registerDate'] = data['date']
user.setdefault('_u2f_devices_', []).append(json.dumps(patch))
# cert = x509.load_der_x509_certificate(cert, default_backend())
return user, True
async def sign(user):
challenge = begin_authentication(facet, user.get('_u2f_devices_', []))
user['_u2f_challenge_'] = challenge.json
return user, json.dumps(challenge.data_for_client)
async def verify(user, data):
challenge = user.pop('_u2f_challenge_')
try:
complete_authentication(challenge, data, [facet])
except AttributeError:
return user, False
return user, True
<commit_msg>Fix server error when login with u2f<commit_after>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import json
from u2flib_server.u2f import (begin_registration,
begin_authentication,
complete_registration,
complete_authentication)
from components.eternity import config
facet = config.rss['link']
async def enroll(user):
enroll = begin_registration(facet, user.get('_u2f_devices_', []))
user['_u2f_enroll_'] = enroll.json
return user, json.dumps(enroll.data_for_client)
async def bind(user, data):
response = data['tokenResponse']
enroll = user.pop('_u2f_enroll_')
device, cert = complete_registration(enroll, response, [facet])
patch = device
patch['deviceName'] = data['deviceName']
patch['registerDate'] = data['date']
user.setdefault('_u2f_devices_', []).append(json.dumps(patch))
# cert = x509.load_der_x509_certificate(cert, default_backend())
return user, True
async def sign(user):
challenge = begin_authentication(facet, user.get('_u2f_devices_', []))
user['_u2f_challenge_'] = challenge.json
return user, json.dumps(challenge.data_for_client)
async def verify(user, data):
print(user)
challenge = user.pop('_u2f_challenge_')
try:
complete_authentication(challenge, data, [facet])
except AttributeError:
return user, False
return user, True
|
4c484a29480ec9d85a87ac7c2aaf09ced7d15457
|
nn/file/__init__.py
|
nn/file/__init__.py
|
import functools
import tensorflow as tf
from . import cnn_dailymail_rc
from ..flags import FLAGS
READERS = { "cnn_dailymail_rc": cnn_dailymail_rc.read_files }
def read_files(file_pattern, file_format):
return READERS[file_format](_file_pattern_to_names(file_pattern))
def _file_pattern_to_names(pattern):
return tf.train.string_input_producer(tf.train.match_filenames_once(pattern),
num_epochs=FLAGS.num_epochs,
capacity=FLAGS.filename_queue_capacity)
|
import functools
import tensorflow as tf
from . import cnn_dailymail_rc
from .. import collections
from ..flags import FLAGS
from ..util import func_scope
READERS = { "cnn_dailymail_rc": cnn_dailymail_rc.read_files }
@func_scope
def read_files(file_pattern, file_format):
return monitored_batch_queue(
*READERS[file_format](_file_pattern_to_names(file_pattern)))
@func_scope
def _file_pattern_to_names(pattern):
return tf.train.string_input_producer(tf.train.match_filenames_once(pattern),
num_epochs=FLAGS.num_epochs,
capacity=FLAGS.filename_queue_capacity)
@func_scope
def monitored_batch_queue(*tensors):
queue = tf.FIFOQueue(FLAGS.batch_queue_capacity,
[tensor.dtype for tensor in tensors])
collections.add_metric(queue.size(), "batches_in_queue")
tf.train.add_queue_runner(
tf.train.QueueRunner(queue, [queue.enqueue(tensors)]))
results = queue.dequeue()
for tensor, result in zip(tensors, results):
result.set_shape(tensor.get_shape())
return results
|
Monitor number of batches in a input batch queue
|
Monitor number of batches in a input batch queue
|
Python
|
unlicense
|
raviqqe/tensorflow-extenteten,raviqqe/tensorflow-extenteten
|
import functools
import tensorflow as tf
from . import cnn_dailymail_rc
from ..flags import FLAGS
READERS = { "cnn_dailymail_rc": cnn_dailymail_rc.read_files }
def read_files(file_pattern, file_format):
return READERS[file_format](_file_pattern_to_names(file_pattern))
def _file_pattern_to_names(pattern):
return tf.train.string_input_producer(tf.train.match_filenames_once(pattern),
num_epochs=FLAGS.num_epochs,
capacity=FLAGS.filename_queue_capacity)
Monitor number of batches in a input batch queue
|
import functools
import tensorflow as tf
from . import cnn_dailymail_rc
from .. import collections
from ..flags import FLAGS
from ..util import func_scope
READERS = { "cnn_dailymail_rc": cnn_dailymail_rc.read_files }
@func_scope
def read_files(file_pattern, file_format):
return monitored_batch_queue(
*READERS[file_format](_file_pattern_to_names(file_pattern)))
@func_scope
def _file_pattern_to_names(pattern):
return tf.train.string_input_producer(tf.train.match_filenames_once(pattern),
num_epochs=FLAGS.num_epochs,
capacity=FLAGS.filename_queue_capacity)
@func_scope
def monitored_batch_queue(*tensors):
queue = tf.FIFOQueue(FLAGS.batch_queue_capacity,
[tensor.dtype for tensor in tensors])
collections.add_metric(queue.size(), "batches_in_queue")
tf.train.add_queue_runner(
tf.train.QueueRunner(queue, [queue.enqueue(tensors)]))
results = queue.dequeue()
for tensor, result in zip(tensors, results):
result.set_shape(tensor.get_shape())
return results
|
<commit_before>import functools
import tensorflow as tf
from . import cnn_dailymail_rc
from ..flags import FLAGS
READERS = { "cnn_dailymail_rc": cnn_dailymail_rc.read_files }
def read_files(file_pattern, file_format):
return READERS[file_format](_file_pattern_to_names(file_pattern))
def _file_pattern_to_names(pattern):
return tf.train.string_input_producer(tf.train.match_filenames_once(pattern),
num_epochs=FLAGS.num_epochs,
capacity=FLAGS.filename_queue_capacity)
<commit_msg>Monitor number of batches in a input batch queue<commit_after>
|
import functools
import tensorflow as tf
from . import cnn_dailymail_rc
from .. import collections
from ..flags import FLAGS
from ..util import func_scope
READERS = { "cnn_dailymail_rc": cnn_dailymail_rc.read_files }
@func_scope
def read_files(file_pattern, file_format):
return monitored_batch_queue(
*READERS[file_format](_file_pattern_to_names(file_pattern)))
@func_scope
def _file_pattern_to_names(pattern):
return tf.train.string_input_producer(tf.train.match_filenames_once(pattern),
num_epochs=FLAGS.num_epochs,
capacity=FLAGS.filename_queue_capacity)
@func_scope
def monitored_batch_queue(*tensors):
queue = tf.FIFOQueue(FLAGS.batch_queue_capacity,
[tensor.dtype for tensor in tensors])
collections.add_metric(queue.size(), "batches_in_queue")
tf.train.add_queue_runner(
tf.train.QueueRunner(queue, [queue.enqueue(tensors)]))
results = queue.dequeue()
for tensor, result in zip(tensors, results):
result.set_shape(tensor.get_shape())
return results
|
import functools
import tensorflow as tf
from . import cnn_dailymail_rc
from ..flags import FLAGS
READERS = { "cnn_dailymail_rc": cnn_dailymail_rc.read_files }
def read_files(file_pattern, file_format):
return READERS[file_format](_file_pattern_to_names(file_pattern))
def _file_pattern_to_names(pattern):
return tf.train.string_input_producer(tf.train.match_filenames_once(pattern),
num_epochs=FLAGS.num_epochs,
capacity=FLAGS.filename_queue_capacity)
Monitor number of batches in a input batch queueimport functools
import tensorflow as tf
from . import cnn_dailymail_rc
from .. import collections
from ..flags import FLAGS
from ..util import func_scope
READERS = { "cnn_dailymail_rc": cnn_dailymail_rc.read_files }
@func_scope
def read_files(file_pattern, file_format):
return monitored_batch_queue(
*READERS[file_format](_file_pattern_to_names(file_pattern)))
@func_scope
def _file_pattern_to_names(pattern):
return tf.train.string_input_producer(tf.train.match_filenames_once(pattern),
num_epochs=FLAGS.num_epochs,
capacity=FLAGS.filename_queue_capacity)
@func_scope
def monitored_batch_queue(*tensors):
queue = tf.FIFOQueue(FLAGS.batch_queue_capacity,
[tensor.dtype for tensor in tensors])
collections.add_metric(queue.size(), "batches_in_queue")
tf.train.add_queue_runner(
tf.train.QueueRunner(queue, [queue.enqueue(tensors)]))
results = queue.dequeue()
for tensor, result in zip(tensors, results):
result.set_shape(tensor.get_shape())
return results
|
<commit_before>import functools
import tensorflow as tf
from . import cnn_dailymail_rc
from ..flags import FLAGS
READERS = { "cnn_dailymail_rc": cnn_dailymail_rc.read_files }
def read_files(file_pattern, file_format):
return READERS[file_format](_file_pattern_to_names(file_pattern))
def _file_pattern_to_names(pattern):
return tf.train.string_input_producer(tf.train.match_filenames_once(pattern),
num_epochs=FLAGS.num_epochs,
capacity=FLAGS.filename_queue_capacity)
<commit_msg>Monitor number of batches in a input batch queue<commit_after>import functools
import tensorflow as tf
from . import cnn_dailymail_rc
from .. import collections
from ..flags import FLAGS
from ..util import func_scope
READERS = { "cnn_dailymail_rc": cnn_dailymail_rc.read_files }
@func_scope
def read_files(file_pattern, file_format):
return monitored_batch_queue(
*READERS[file_format](_file_pattern_to_names(file_pattern)))
@func_scope
def _file_pattern_to_names(pattern):
return tf.train.string_input_producer(tf.train.match_filenames_once(pattern),
num_epochs=FLAGS.num_epochs,
capacity=FLAGS.filename_queue_capacity)
@func_scope
def monitored_batch_queue(*tensors):
queue = tf.FIFOQueue(FLAGS.batch_queue_capacity,
[tensor.dtype for tensor in tensors])
collections.add_metric(queue.size(), "batches_in_queue")
tf.train.add_queue_runner(
tf.train.QueueRunner(queue, [queue.enqueue(tensors)]))
results = queue.dequeue()
for tensor, result in zip(tensors, results):
result.set_shape(tensor.get_shape())
return results
|
c0c59a9c5d3aa2d7ed50e8e895f1a3e02a4ae380
|
Basic-Number-Guessing-Game-Challenge.py
|
Basic-Number-Guessing-Game-Challenge.py
|
import random
attempts = 1
number = str(random.randint(1, 100))
while True:
print number
if raw_input("Guess (1 - 100): ") == number:
print "Correct!"
print "It Only Took You", attempts, "Attempts!" if attempts > 1 else "Attempt!"
break
else:
print "Incorrect, Guess Again!"
attempts += 1
|
import random
attempts = 1
number = random.randint(1, 100)
while True:
guess = raw_input("Guess (1 - 100): ")
if guess.isdigit():
guess = int(guess)
if 1 <= guess and guess <= 100:
if guess == number:
print "Correct!"
print "It Only Took You", attempts, "Attempts!" if attempts > 1 else "Attempt!"
break
elif guess > number:
print "That Guess Is Too High!"
elif guess < number:
print "That Guess Is Too Low!"
else:
print "Guesses Must Be Between 1 And 100!"
else:
print "That's Not A Number!"
attempts += 1
|
Verify that the guess is a number, that it is between 1 and 100, and tells the user if the guessed number is too high or low.
|
Verify that the guess is a number, that it is between 1 and 100, and tells the user if the guessed number is too high or low.
|
Python
|
mit
|
RascalTwo/Basic-Number-Guessing-Game-Challenge
|
import random
attempts = 1
number = str(random.randint(1, 100))
while True:
print number
if raw_input("Guess (1 - 100): ") == number:
print "Correct!"
print "It Only Took You", attempts, "Attempts!" if attempts > 1 else "Attempt!"
break
else:
print "Incorrect, Guess Again!"
attempts += 1
Verify that the guess is a number, that it is between 1 and 100, and tells the user if the guessed number is too high or low.
|
import random
attempts = 1
number = random.randint(1, 100)
while True:
guess = raw_input("Guess (1 - 100): ")
if guess.isdigit():
guess = int(guess)
if 1 <= guess and guess <= 100:
if guess == number:
print "Correct!"
print "It Only Took You", attempts, "Attempts!" if attempts > 1 else "Attempt!"
break
elif guess > number:
print "That Guess Is Too High!"
elif guess < number:
print "That Guess Is Too Low!"
else:
print "Guesses Must Be Between 1 And 100!"
else:
print "That's Not A Number!"
attempts += 1
|
<commit_before>import random
attempts = 1
number = str(random.randint(1, 100))
while True:
print number
if raw_input("Guess (1 - 100): ") == number:
print "Correct!"
print "It Only Took You", attempts, "Attempts!" if attempts > 1 else "Attempt!"
break
else:
print "Incorrect, Guess Again!"
attempts += 1
<commit_msg>Verify that the guess is a number, that it is between 1 and 100, and tells the user if the guessed number is too high or low.<commit_after>
|
import random
attempts = 1
number = random.randint(1, 100)
while True:
guess = raw_input("Guess (1 - 100): ")
if guess.isdigit():
guess = int(guess)
if 1 <= guess and guess <= 100:
if guess == number:
print "Correct!"
print "It Only Took You", attempts, "Attempts!" if attempts > 1 else "Attempt!"
break
elif guess > number:
print "That Guess Is Too High!"
elif guess < number:
print "That Guess Is Too Low!"
else:
print "Guesses Must Be Between 1 And 100!"
else:
print "That's Not A Number!"
attempts += 1
|
import random
attempts = 1
number = str(random.randint(1, 100))
while True:
print number
if raw_input("Guess (1 - 100): ") == number:
print "Correct!"
print "It Only Took You", attempts, "Attempts!" if attempts > 1 else "Attempt!"
break
else:
print "Incorrect, Guess Again!"
attempts += 1
Verify that the guess is a number, that it is between 1 and 100, and tells the user if the guessed number is too high or low.import random
attempts = 1
number = random.randint(1, 100)
while True:
guess = raw_input("Guess (1 - 100): ")
if guess.isdigit():
guess = int(guess)
if 1 <= guess and guess <= 100:
if guess == number:
print "Correct!"
print "It Only Took You", attempts, "Attempts!" if attempts > 1 else "Attempt!"
break
elif guess > number:
print "That Guess Is Too High!"
elif guess < number:
print "That Guess Is Too Low!"
else:
print "Guesses Must Be Between 1 And 100!"
else:
print "That's Not A Number!"
attempts += 1
|
<commit_before>import random
attempts = 1
number = str(random.randint(1, 100))
while True:
print number
if raw_input("Guess (1 - 100): ") == number:
print "Correct!"
print "It Only Took You", attempts, "Attempts!" if attempts > 1 else "Attempt!"
break
else:
print "Incorrect, Guess Again!"
attempts += 1
<commit_msg>Verify that the guess is a number, that it is between 1 and 100, and tells the user if the guessed number is too high or low.<commit_after>import random
attempts = 1
number = random.randint(1, 100)
while True:
guess = raw_input("Guess (1 - 100): ")
if guess.isdigit():
guess = int(guess)
if 1 <= guess and guess <= 100:
if guess == number:
print "Correct!"
print "It Only Took You", attempts, "Attempts!" if attempts > 1 else "Attempt!"
break
elif guess > number:
print "That Guess Is Too High!"
elif guess < number:
print "That Guess Is Too Low!"
else:
print "Guesses Must Be Between 1 And 100!"
else:
print "That's Not A Number!"
attempts += 1
|
21e3eac740f54194954d01d517aca0eb841ca1b3
|
wagtail/search/apps.py
|
wagtail/search/apps.py
|
from django.apps import AppConfig
from django.core.checks import Tags, Warning, register
from django.db import connection
from django.utils.translation import gettext_lazy as _
from wagtail.search.signal_handlers import register_signal_handlers
class WagtailSearchAppConfig(AppConfig):
name = 'wagtail.search'
label = 'wagtailsearch'
verbose_name = _("Wagtail search")
default_auto_field = 'django.db.models.AutoField'
def ready(self):
register_signal_handlers()
if connection.vendor == 'postgresql':
# Only PostgreSQL has support for tsvector weights
from wagtail.search.backends.database.postgres.weights import set_weights
set_weights()
from wagtail.search.models import IndexEntry
IndexEntry.add_generic_relations()
@register(Tags.compatibility, Tags.database)
def check_if_sqlite_version_is_supported(app_configs, **kwargs):
if connection.vendor == 'sqlite':
import sqlite3
if sqlite3.sqlite_version_info < (3, 19, 0):
return [Warning('Your SQLite version is older than 3.19.0. A fallback search backend will be used instead.', hint='Upgrade your SQLite version to at least 3.19.0', id='wagtailsearch.W002', obj=WagtailSearchAppConfig)]
return []
|
from django.apps import AppConfig
from django.core.checks import Tags, Warning, register
from django.db import connection
from django.utils.translation import gettext_lazy as _
from wagtail.search.signal_handlers import register_signal_handlers
class WagtailSearchAppConfig(AppConfig):
name = 'wagtail.search'
label = 'wagtailsearch'
verbose_name = _("Wagtail search")
default_auto_field = 'django.db.models.AutoField'
def ready(self):
register_signal_handlers()
if connection.vendor == 'postgresql':
# Only PostgreSQL has support for tsvector weights
from wagtail.search.backends.database.postgres.weights import set_weights
set_weights()
from wagtail.search.models import IndexEntry
IndexEntry.add_generic_relations()
@register(Tags.compatibility, Tags.database)
def check_if_sqlite_version_is_supported(app_configs, **kwargs):
if connection.vendor == 'sqlite':
import sqlite3
from wagtail.search.backends.database.sqlite.utils import fts5_available
if sqlite3.sqlite_version_info < (3, 19, 0):
return [Warning('Your SQLite version is older than 3.19.0. A fallback search backend will be used instead.', hint='Upgrade your SQLite version to at least 3.19.0', id='wagtailsearch.W002', obj=WagtailSearchAppConfig)]
elif not fts5_available():
return [Warning('Your SQLite installation is missing the fts5 extension. A fallback search backend will be used instead.', hint='Upgrade your SQLite installation to a version with fts5 enabled', id='wagtailsearch.W003', obj=WagtailSearchAppConfig)]
return []
|
Add alternative warning if sqlite is >=3.19 but is missing fts5 support
|
Add alternative warning if sqlite is >=3.19 but is missing fts5 support
|
Python
|
bsd-3-clause
|
torchbox/wagtail,torchbox/wagtail,torchbox/wagtail,torchbox/wagtail
|
from django.apps import AppConfig
from django.core.checks import Tags, Warning, register
from django.db import connection
from django.utils.translation import gettext_lazy as _
from wagtail.search.signal_handlers import register_signal_handlers
class WagtailSearchAppConfig(AppConfig):
name = 'wagtail.search'
label = 'wagtailsearch'
verbose_name = _("Wagtail search")
default_auto_field = 'django.db.models.AutoField'
def ready(self):
register_signal_handlers()
if connection.vendor == 'postgresql':
# Only PostgreSQL has support for tsvector weights
from wagtail.search.backends.database.postgres.weights import set_weights
set_weights()
from wagtail.search.models import IndexEntry
IndexEntry.add_generic_relations()
@register(Tags.compatibility, Tags.database)
def check_if_sqlite_version_is_supported(app_configs, **kwargs):
if connection.vendor == 'sqlite':
import sqlite3
if sqlite3.sqlite_version_info < (3, 19, 0):
return [Warning('Your SQLite version is older than 3.19.0. A fallback search backend will be used instead.', hint='Upgrade your SQLite version to at least 3.19.0', id='wagtailsearch.W002', obj=WagtailSearchAppConfig)]
return []
Add alternative warning if sqlite is >=3.19 but is missing fts5 support
|
from django.apps import AppConfig
from django.core.checks import Tags, Warning, register
from django.db import connection
from django.utils.translation import gettext_lazy as _
from wagtail.search.signal_handlers import register_signal_handlers
class WagtailSearchAppConfig(AppConfig):
name = 'wagtail.search'
label = 'wagtailsearch'
verbose_name = _("Wagtail search")
default_auto_field = 'django.db.models.AutoField'
def ready(self):
register_signal_handlers()
if connection.vendor == 'postgresql':
# Only PostgreSQL has support for tsvector weights
from wagtail.search.backends.database.postgres.weights import set_weights
set_weights()
from wagtail.search.models import IndexEntry
IndexEntry.add_generic_relations()
@register(Tags.compatibility, Tags.database)
def check_if_sqlite_version_is_supported(app_configs, **kwargs):
if connection.vendor == 'sqlite':
import sqlite3
from wagtail.search.backends.database.sqlite.utils import fts5_available
if sqlite3.sqlite_version_info < (3, 19, 0):
return [Warning('Your SQLite version is older than 3.19.0. A fallback search backend will be used instead.', hint='Upgrade your SQLite version to at least 3.19.0', id='wagtailsearch.W002', obj=WagtailSearchAppConfig)]
elif not fts5_available():
return [Warning('Your SQLite installation is missing the fts5 extension. A fallback search backend will be used instead.', hint='Upgrade your SQLite installation to a version with fts5 enabled', id='wagtailsearch.W003', obj=WagtailSearchAppConfig)]
return []
|
<commit_before>from django.apps import AppConfig
from django.core.checks import Tags, Warning, register
from django.db import connection
from django.utils.translation import gettext_lazy as _
from wagtail.search.signal_handlers import register_signal_handlers
class WagtailSearchAppConfig(AppConfig):
name = 'wagtail.search'
label = 'wagtailsearch'
verbose_name = _("Wagtail search")
default_auto_field = 'django.db.models.AutoField'
def ready(self):
register_signal_handlers()
if connection.vendor == 'postgresql':
# Only PostgreSQL has support for tsvector weights
from wagtail.search.backends.database.postgres.weights import set_weights
set_weights()
from wagtail.search.models import IndexEntry
IndexEntry.add_generic_relations()
@register(Tags.compatibility, Tags.database)
def check_if_sqlite_version_is_supported(app_configs, **kwargs):
if connection.vendor == 'sqlite':
import sqlite3
if sqlite3.sqlite_version_info < (3, 19, 0):
return [Warning('Your SQLite version is older than 3.19.0. A fallback search backend will be used instead.', hint='Upgrade your SQLite version to at least 3.19.0', id='wagtailsearch.W002', obj=WagtailSearchAppConfig)]
return []
<commit_msg>Add alternative warning if sqlite is >=3.19 but is missing fts5 support<commit_after>
|
from django.apps import AppConfig
from django.core.checks import Tags, Warning, register
from django.db import connection
from django.utils.translation import gettext_lazy as _
from wagtail.search.signal_handlers import register_signal_handlers
class WagtailSearchAppConfig(AppConfig):
name = 'wagtail.search'
label = 'wagtailsearch'
verbose_name = _("Wagtail search")
default_auto_field = 'django.db.models.AutoField'
def ready(self):
register_signal_handlers()
if connection.vendor == 'postgresql':
# Only PostgreSQL has support for tsvector weights
from wagtail.search.backends.database.postgres.weights import set_weights
set_weights()
from wagtail.search.models import IndexEntry
IndexEntry.add_generic_relations()
@register(Tags.compatibility, Tags.database)
def check_if_sqlite_version_is_supported(app_configs, **kwargs):
if connection.vendor == 'sqlite':
import sqlite3
from wagtail.search.backends.database.sqlite.utils import fts5_available
if sqlite3.sqlite_version_info < (3, 19, 0):
return [Warning('Your SQLite version is older than 3.19.0. A fallback search backend will be used instead.', hint='Upgrade your SQLite version to at least 3.19.0', id='wagtailsearch.W002', obj=WagtailSearchAppConfig)]
elif not fts5_available():
return [Warning('Your SQLite installation is missing the fts5 extension. A fallback search backend will be used instead.', hint='Upgrade your SQLite installation to a version with fts5 enabled', id='wagtailsearch.W003', obj=WagtailSearchAppConfig)]
return []
|
from django.apps import AppConfig
from django.core.checks import Tags, Warning, register
from django.db import connection
from django.utils.translation import gettext_lazy as _
from wagtail.search.signal_handlers import register_signal_handlers
class WagtailSearchAppConfig(AppConfig):
name = 'wagtail.search'
label = 'wagtailsearch'
verbose_name = _("Wagtail search")
default_auto_field = 'django.db.models.AutoField'
def ready(self):
register_signal_handlers()
if connection.vendor == 'postgresql':
# Only PostgreSQL has support for tsvector weights
from wagtail.search.backends.database.postgres.weights import set_weights
set_weights()
from wagtail.search.models import IndexEntry
IndexEntry.add_generic_relations()
@register(Tags.compatibility, Tags.database)
def check_if_sqlite_version_is_supported(app_configs, **kwargs):
if connection.vendor == 'sqlite':
import sqlite3
if sqlite3.sqlite_version_info < (3, 19, 0):
return [Warning('Your SQLite version is older than 3.19.0. A fallback search backend will be used instead.', hint='Upgrade your SQLite version to at least 3.19.0', id='wagtailsearch.W002', obj=WagtailSearchAppConfig)]
return []
Add alternative warning if sqlite is >=3.19 but is missing fts5 supportfrom django.apps import AppConfig
from django.core.checks import Tags, Warning, register
from django.db import connection
from django.utils.translation import gettext_lazy as _
from wagtail.search.signal_handlers import register_signal_handlers
class WagtailSearchAppConfig(AppConfig):
name = 'wagtail.search'
label = 'wagtailsearch'
verbose_name = _("Wagtail search")
default_auto_field = 'django.db.models.AutoField'
def ready(self):
register_signal_handlers()
if connection.vendor == 'postgresql':
# Only PostgreSQL has support for tsvector weights
from wagtail.search.backends.database.postgres.weights import set_weights
set_weights()
from wagtail.search.models import IndexEntry
IndexEntry.add_generic_relations()
@register(Tags.compatibility, Tags.database)
def check_if_sqlite_version_is_supported(app_configs, **kwargs):
if connection.vendor == 'sqlite':
import sqlite3
from wagtail.search.backends.database.sqlite.utils import fts5_available
if sqlite3.sqlite_version_info < (3, 19, 0):
return [Warning('Your SQLite version is older than 3.19.0. A fallback search backend will be used instead.', hint='Upgrade your SQLite version to at least 3.19.0', id='wagtailsearch.W002', obj=WagtailSearchAppConfig)]
elif not fts5_available():
return [Warning('Your SQLite installation is missing the fts5 extension. A fallback search backend will be used instead.', hint='Upgrade your SQLite installation to a version with fts5 enabled', id='wagtailsearch.W003', obj=WagtailSearchAppConfig)]
return []
|
<commit_before>from django.apps import AppConfig
from django.core.checks import Tags, Warning, register
from django.db import connection
from django.utils.translation import gettext_lazy as _
from wagtail.search.signal_handlers import register_signal_handlers
class WagtailSearchAppConfig(AppConfig):
name = 'wagtail.search'
label = 'wagtailsearch'
verbose_name = _("Wagtail search")
default_auto_field = 'django.db.models.AutoField'
def ready(self):
register_signal_handlers()
if connection.vendor == 'postgresql':
# Only PostgreSQL has support for tsvector weights
from wagtail.search.backends.database.postgres.weights import set_weights
set_weights()
from wagtail.search.models import IndexEntry
IndexEntry.add_generic_relations()
@register(Tags.compatibility, Tags.database)
def check_if_sqlite_version_is_supported(app_configs, **kwargs):
if connection.vendor == 'sqlite':
import sqlite3
if sqlite3.sqlite_version_info < (3, 19, 0):
return [Warning('Your SQLite version is older than 3.19.0. A fallback search backend will be used instead.', hint='Upgrade your SQLite version to at least 3.19.0', id='wagtailsearch.W002', obj=WagtailSearchAppConfig)]
return []
<commit_msg>Add alternative warning if sqlite is >=3.19 but is missing fts5 support<commit_after>from django.apps import AppConfig
from django.core.checks import Tags, Warning, register
from django.db import connection
from django.utils.translation import gettext_lazy as _
from wagtail.search.signal_handlers import register_signal_handlers
class WagtailSearchAppConfig(AppConfig):
name = 'wagtail.search'
label = 'wagtailsearch'
verbose_name = _("Wagtail search")
default_auto_field = 'django.db.models.AutoField'
def ready(self):
register_signal_handlers()
if connection.vendor == 'postgresql':
# Only PostgreSQL has support for tsvector weights
from wagtail.search.backends.database.postgres.weights import set_weights
set_weights()
from wagtail.search.models import IndexEntry
IndexEntry.add_generic_relations()
@register(Tags.compatibility, Tags.database)
def check_if_sqlite_version_is_supported(app_configs, **kwargs):
if connection.vendor == 'sqlite':
import sqlite3
from wagtail.search.backends.database.sqlite.utils import fts5_available
if sqlite3.sqlite_version_info < (3, 19, 0):
return [Warning('Your SQLite version is older than 3.19.0. A fallback search backend will be used instead.', hint='Upgrade your SQLite version to at least 3.19.0', id='wagtailsearch.W002', obj=WagtailSearchAppConfig)]
elif not fts5_available():
return [Warning('Your SQLite installation is missing the fts5 extension. A fallback search backend will be used instead.', hint='Upgrade your SQLite installation to a version with fts5 enabled', id='wagtailsearch.W003', obj=WagtailSearchAppConfig)]
return []
|
cdfeac780643ddd2502c17f4cd7d949018de8b06
|
warehouse/__about__.py
|
warehouse/__about__.py
|
# Copyright 2013 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals
# This file is automatically generated, do not edit it
__all__ = [
"__title__", "__summary__", "__uri__", "__version__",
"__author__", "__email__", "__license__", "__copyright__",
]
__title__ = "warehouse"
__summary__ = "Next Generation Python Package Repository"
__uri__ = "https://github.com/dstufft/warehouse"
__version__ = "13.10.10"
__build__ = "<development>"
__author__ = "Donald Stufft"
__email__ = "donald@stufft.io"
__license__ = "Apache License, Version 2.0"
__copyright__ = "Copyright 2013 Donald Stufft"
|
# Copyright 2013 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals
# This file is automatically generated, do not edit it
__all__ = [
"__title__", "__summary__", "__uri__", "__version__",
"__author__", "__email__", "__license__", "__copyright__",
]
__title__ = "warehouse"
__summary__ = "Next Generation Python Package Repository"
__uri__ = "https://github.com/dstufft/warehouse"
__version__ = "13.10.10.dev0"
__build__ = "<development>"
__author__ = "Donald Stufft"
__email__ = "donald@stufft.io"
__license__ = "Apache License, Version 2.0"
__copyright__ = "Copyright 2013 Donald Stufft"
|
Add back the development marker
|
Add back the development marker
|
Python
|
apache-2.0
|
mattrobenolt/warehouse,robhudson/warehouse,techtonik/warehouse,robhudson/warehouse,mattrobenolt/warehouse,techtonik/warehouse,mattrobenolt/warehouse
|
# Copyright 2013 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals
# This file is automatically generated, do not edit it
__all__ = [
"__title__", "__summary__", "__uri__", "__version__",
"__author__", "__email__", "__license__", "__copyright__",
]
__title__ = "warehouse"
__summary__ = "Next Generation Python Package Repository"
__uri__ = "https://github.com/dstufft/warehouse"
__version__ = "13.10.10"
__build__ = "<development>"
__author__ = "Donald Stufft"
__email__ = "donald@stufft.io"
__license__ = "Apache License, Version 2.0"
__copyright__ = "Copyright 2013 Donald Stufft"
Add back the development marker
|
# Copyright 2013 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals
# This file is automatically generated, do not edit it
__all__ = [
"__title__", "__summary__", "__uri__", "__version__",
"__author__", "__email__", "__license__", "__copyright__",
]
__title__ = "warehouse"
__summary__ = "Next Generation Python Package Repository"
__uri__ = "https://github.com/dstufft/warehouse"
__version__ = "13.10.10.dev0"
__build__ = "<development>"
__author__ = "Donald Stufft"
__email__ = "donald@stufft.io"
__license__ = "Apache License, Version 2.0"
__copyright__ = "Copyright 2013 Donald Stufft"
|
<commit_before># Copyright 2013 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals
# This file is automatically generated, do not edit it
__all__ = [
"__title__", "__summary__", "__uri__", "__version__",
"__author__", "__email__", "__license__", "__copyright__",
]
__title__ = "warehouse"
__summary__ = "Next Generation Python Package Repository"
__uri__ = "https://github.com/dstufft/warehouse"
__version__ = "13.10.10"
__build__ = "<development>"
__author__ = "Donald Stufft"
__email__ = "donald@stufft.io"
__license__ = "Apache License, Version 2.0"
__copyright__ = "Copyright 2013 Donald Stufft"
<commit_msg>Add back the development marker<commit_after>
|
# Copyright 2013 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals
# This file is automatically generated, do not edit it
__all__ = [
"__title__", "__summary__", "__uri__", "__version__",
"__author__", "__email__", "__license__", "__copyright__",
]
__title__ = "warehouse"
__summary__ = "Next Generation Python Package Repository"
__uri__ = "https://github.com/dstufft/warehouse"
__version__ = "13.10.10.dev0"
__build__ = "<development>"
__author__ = "Donald Stufft"
__email__ = "donald@stufft.io"
__license__ = "Apache License, Version 2.0"
__copyright__ = "Copyright 2013 Donald Stufft"
|
# Copyright 2013 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals
# This file is automatically generated, do not edit it
__all__ = [
"__title__", "__summary__", "__uri__", "__version__",
"__author__", "__email__", "__license__", "__copyright__",
]
__title__ = "warehouse"
__summary__ = "Next Generation Python Package Repository"
__uri__ = "https://github.com/dstufft/warehouse"
__version__ = "13.10.10"
__build__ = "<development>"
__author__ = "Donald Stufft"
__email__ = "donald@stufft.io"
__license__ = "Apache License, Version 2.0"
__copyright__ = "Copyright 2013 Donald Stufft"
Add back the development marker# Copyright 2013 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals
# This file is automatically generated, do not edit it
__all__ = [
"__title__", "__summary__", "__uri__", "__version__",
"__author__", "__email__", "__license__", "__copyright__",
]
__title__ = "warehouse"
__summary__ = "Next Generation Python Package Repository"
__uri__ = "https://github.com/dstufft/warehouse"
__version__ = "13.10.10.dev0"
__build__ = "<development>"
__author__ = "Donald Stufft"
__email__ = "donald@stufft.io"
__license__ = "Apache License, Version 2.0"
__copyright__ = "Copyright 2013 Donald Stufft"
|
<commit_before># Copyright 2013 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals
# This file is automatically generated, do not edit it
__all__ = [
"__title__", "__summary__", "__uri__", "__version__",
"__author__", "__email__", "__license__", "__copyright__",
]
__title__ = "warehouse"
__summary__ = "Next Generation Python Package Repository"
__uri__ = "https://github.com/dstufft/warehouse"
__version__ = "13.10.10"
__build__ = "<development>"
__author__ = "Donald Stufft"
__email__ = "donald@stufft.io"
__license__ = "Apache License, Version 2.0"
__copyright__ = "Copyright 2013 Donald Stufft"
<commit_msg>Add back the development marker<commit_after># Copyright 2013 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals
# This file is automatically generated, do not edit it
__all__ = [
"__title__", "__summary__", "__uri__", "__version__",
"__author__", "__email__", "__license__", "__copyright__",
]
__title__ = "warehouse"
__summary__ = "Next Generation Python Package Repository"
__uri__ = "https://github.com/dstufft/warehouse"
__version__ = "13.10.10.dev0"
__build__ = "<development>"
__author__ = "Donald Stufft"
__email__ = "donald@stufft.io"
__license__ = "Apache License, Version 2.0"
__copyright__ = "Copyright 2013 Donald Stufft"
|
7473a70893a0f31ba717e26a3f508d0adc5026f9
|
osbrain/__init__.py
|
osbrain/__init__.py
|
import Pyro4
Pyro4.config.SERIALIZERS_ACCEPTED.add('pickle')
Pyro4.config.SERIALIZER = 'pickle'
Pyro4.config.THREADPOOL_SIZE = 16
Pyro4.config.SERVERTYPE = 'multiplex'
# TODO: should we set COMMTIMEOUT as well?
Pyro4.config.DETAILED_TRACEBACK = True
__version__ = '0.2.1'
from .core import BaseAgent, Agent, run_agent
from .nameserver import random_nameserver, run_nameserver
from .proxy import Proxy, NSProxy
from .address import SocketAddress, AgentAddress
from .logging import Logger, run_logger
|
import Pyro4
Pyro4.config.SERIALIZERS_ACCEPTED.add('pickle')
Pyro4.config.SERIALIZER = 'pickle'
Pyro4.config.THREADPOOL_SIZE = 16
Pyro4.config.SERVERTYPE = 'multiplex'
Pyro4.config.REQUIRE_EXPOSE = False
# TODO: should we set COMMTIMEOUT as well?
Pyro4.config.DETAILED_TRACEBACK = True
__version__ = '0.2.1'
from .core import BaseAgent, Agent, run_agent
from .nameserver import random_nameserver, run_nameserver
from .proxy import Proxy, NSProxy
from .address import SocketAddress, AgentAddress
from .logging import Logger, run_logger
|
Make osBrain compatible with latest Pyro4 changes
|
Make osBrain compatible with latest Pyro4 changes
|
Python
|
apache-2.0
|
opensistemas-hub/osbrain
|
import Pyro4
Pyro4.config.SERIALIZERS_ACCEPTED.add('pickle')
Pyro4.config.SERIALIZER = 'pickle'
Pyro4.config.THREADPOOL_SIZE = 16
Pyro4.config.SERVERTYPE = 'multiplex'
# TODO: should we set COMMTIMEOUT as well?
Pyro4.config.DETAILED_TRACEBACK = True
__version__ = '0.2.1'
from .core import BaseAgent, Agent, run_agent
from .nameserver import random_nameserver, run_nameserver
from .proxy import Proxy, NSProxy
from .address import SocketAddress, AgentAddress
from .logging import Logger, run_logger
Make osBrain compatible with latest Pyro4 changes
|
import Pyro4
Pyro4.config.SERIALIZERS_ACCEPTED.add('pickle')
Pyro4.config.SERIALIZER = 'pickle'
Pyro4.config.THREADPOOL_SIZE = 16
Pyro4.config.SERVERTYPE = 'multiplex'
Pyro4.config.REQUIRE_EXPOSE = False
# TODO: should we set COMMTIMEOUT as well?
Pyro4.config.DETAILED_TRACEBACK = True
__version__ = '0.2.1'
from .core import BaseAgent, Agent, run_agent
from .nameserver import random_nameserver, run_nameserver
from .proxy import Proxy, NSProxy
from .address import SocketAddress, AgentAddress
from .logging import Logger, run_logger
|
<commit_before>import Pyro4
Pyro4.config.SERIALIZERS_ACCEPTED.add('pickle')
Pyro4.config.SERIALIZER = 'pickle'
Pyro4.config.THREADPOOL_SIZE = 16
Pyro4.config.SERVERTYPE = 'multiplex'
# TODO: should we set COMMTIMEOUT as well?
Pyro4.config.DETAILED_TRACEBACK = True
__version__ = '0.2.1'
from .core import BaseAgent, Agent, run_agent
from .nameserver import random_nameserver, run_nameserver
from .proxy import Proxy, NSProxy
from .address import SocketAddress, AgentAddress
from .logging import Logger, run_logger
<commit_msg>Make osBrain compatible with latest Pyro4 changes<commit_after>
|
import Pyro4
Pyro4.config.SERIALIZERS_ACCEPTED.add('pickle')
Pyro4.config.SERIALIZER = 'pickle'
Pyro4.config.THREADPOOL_SIZE = 16
Pyro4.config.SERVERTYPE = 'multiplex'
Pyro4.config.REQUIRE_EXPOSE = False
# TODO: should we set COMMTIMEOUT as well?
Pyro4.config.DETAILED_TRACEBACK = True
__version__ = '0.2.1'
from .core import BaseAgent, Agent, run_agent
from .nameserver import random_nameserver, run_nameserver
from .proxy import Proxy, NSProxy
from .address import SocketAddress, AgentAddress
from .logging import Logger, run_logger
|
import Pyro4
Pyro4.config.SERIALIZERS_ACCEPTED.add('pickle')
Pyro4.config.SERIALIZER = 'pickle'
Pyro4.config.THREADPOOL_SIZE = 16
Pyro4.config.SERVERTYPE = 'multiplex'
# TODO: should we set COMMTIMEOUT as well?
Pyro4.config.DETAILED_TRACEBACK = True
__version__ = '0.2.1'
from .core import BaseAgent, Agent, run_agent
from .nameserver import random_nameserver, run_nameserver
from .proxy import Proxy, NSProxy
from .address import SocketAddress, AgentAddress
from .logging import Logger, run_logger
Make osBrain compatible with latest Pyro4 changesimport Pyro4
Pyro4.config.SERIALIZERS_ACCEPTED.add('pickle')
Pyro4.config.SERIALIZER = 'pickle'
Pyro4.config.THREADPOOL_SIZE = 16
Pyro4.config.SERVERTYPE = 'multiplex'
Pyro4.config.REQUIRE_EXPOSE = False
# TODO: should we set COMMTIMEOUT as well?
Pyro4.config.DETAILED_TRACEBACK = True
__version__ = '0.2.1'
from .core import BaseAgent, Agent, run_agent
from .nameserver import random_nameserver, run_nameserver
from .proxy import Proxy, NSProxy
from .address import SocketAddress, AgentAddress
from .logging import Logger, run_logger
|
<commit_before>import Pyro4
Pyro4.config.SERIALIZERS_ACCEPTED.add('pickle')
Pyro4.config.SERIALIZER = 'pickle'
Pyro4.config.THREADPOOL_SIZE = 16
Pyro4.config.SERVERTYPE = 'multiplex'
# TODO: should we set COMMTIMEOUT as well?
Pyro4.config.DETAILED_TRACEBACK = True
__version__ = '0.2.1'
from .core import BaseAgent, Agent, run_agent
from .nameserver import random_nameserver, run_nameserver
from .proxy import Proxy, NSProxy
from .address import SocketAddress, AgentAddress
from .logging import Logger, run_logger
<commit_msg>Make osBrain compatible with latest Pyro4 changes<commit_after>import Pyro4
Pyro4.config.SERIALIZERS_ACCEPTED.add('pickle')
Pyro4.config.SERIALIZER = 'pickle'
Pyro4.config.THREADPOOL_SIZE = 16
Pyro4.config.SERVERTYPE = 'multiplex'
Pyro4.config.REQUIRE_EXPOSE = False
# TODO: should we set COMMTIMEOUT as well?
Pyro4.config.DETAILED_TRACEBACK = True
__version__ = '0.2.1'
from .core import BaseAgent, Agent, run_agent
from .nameserver import random_nameserver, run_nameserver
from .proxy import Proxy, NSProxy
from .address import SocketAddress, AgentAddress
from .logging import Logger, run_logger
|
e12de19ae37a6f3fa0335ecfd0db00b18badf730
|
website/files/utils.py
|
website/files/utils.py
|
def copy_files(src, target_node, parent=None, name=None):
"""Copy the files from src to the target node
:param Folder src: The source to copy children from
:param Node target_node: The node to copy files to
:param Folder parent: The parent of to attach the clone of src to, if applicable
"""
assert not parent or not parent.is_file, 'Parent must be a folder'
cloned = src.clone()
cloned.parent = parent
cloned.target = target_node
cloned.name = name or cloned.name
cloned.copied_from = src
cloned.save()
if src.is_file and src.versions.exists():
fileversions = src.versions.select_related('region').order_by('-created')
most_recent_fileversion = fileversions.first()
if most_recent_fileversion.region != target_node.osfstorage_region:
# add all original version except the most recent
cloned.versions.add(*fileversions[1:])
# create a new most recent version and update the region before adding
new_fileversion = most_recent_fileversion.clone()
new_fileversion.region = target_node.osfstorage_region
new_fileversion.save()
cloned.versions.add(new_fileversion)
else:
cloned.versions.add(*src.versions.all())
if not src.is_file:
for child in src.children:
copy_files(child, target_node, parent=cloned)
return cloned
|
def copy_files(src, target_node, parent=None, name=None):
"""Copy the files from src to the target node
:param Folder src: The source to copy children from
:param Node target_node: The node to copy files to
:param Folder parent: The parent of to attach the clone of src to, if applicable
"""
assert not parent or not parent.is_file, 'Parent must be a folder'
cloned = src.clone()
cloned.parent = parent
cloned.target = target_node
cloned.name = name or cloned.name
cloned.copied_from = src
cloned.save()
if src.is_file and src.versions.exists():
fileversions = src.versions.select_related('region').order_by('-created')
most_recent_fileversion = fileversions.first()
if most_recent_fileversion.region and most_recent_fileversion.region != target_node.osfstorage_region:
# add all original version except the most recent
cloned.versions.add(*fileversions[1:])
# create a new most recent version and update the region before adding
new_fileversion = most_recent_fileversion.clone()
new_fileversion.region = target_node.osfstorage_region
new_fileversion.save()
cloned.versions.add(new_fileversion)
else:
cloned.versions.add(*src.versions.all())
if not src.is_file:
for child in src.children:
copy_files(child, target_node, parent=cloned)
return cloned
|
Check for a fileversion region before copying them to the source region
|
Check for a fileversion region before copying them to the source region
This is mainly for test fileversions that are created without regions by default
|
Python
|
apache-2.0
|
aaxelb/osf.io,felliott/osf.io,brianjgeiger/osf.io,HalcyonChimera/osf.io,HalcyonChimera/osf.io,baylee-d/osf.io,mattclark/osf.io,saradbowman/osf.io,Johnetordoff/osf.io,brianjgeiger/osf.io,cslzchen/osf.io,cslzchen/osf.io,mfraezz/osf.io,HalcyonChimera/osf.io,adlius/osf.io,felliott/osf.io,Johnetordoff/osf.io,caseyrollins/osf.io,mattclark/osf.io,mfraezz/osf.io,CenterForOpenScience/osf.io,adlius/osf.io,felliott/osf.io,adlius/osf.io,brianjgeiger/osf.io,mfraezz/osf.io,baylee-d/osf.io,mattclark/osf.io,felliott/osf.io,HalcyonChimera/osf.io,baylee-d/osf.io,mfraezz/osf.io,caseyrollins/osf.io,pattisdr/osf.io,Johnetordoff/osf.io,brianjgeiger/osf.io,aaxelb/osf.io,aaxelb/osf.io,pattisdr/osf.io,pattisdr/osf.io,saradbowman/osf.io,cslzchen/osf.io,adlius/osf.io,CenterForOpenScience/osf.io,cslzchen/osf.io,caseyrollins/osf.io,CenterForOpenScience/osf.io,CenterForOpenScience/osf.io,aaxelb/osf.io,Johnetordoff/osf.io
|
def copy_files(src, target_node, parent=None, name=None):
"""Copy the files from src to the target node
:param Folder src: The source to copy children from
:param Node target_node: The node to copy files to
:param Folder parent: The parent of to attach the clone of src to, if applicable
"""
assert not parent or not parent.is_file, 'Parent must be a folder'
cloned = src.clone()
cloned.parent = parent
cloned.target = target_node
cloned.name = name or cloned.name
cloned.copied_from = src
cloned.save()
if src.is_file and src.versions.exists():
fileversions = src.versions.select_related('region').order_by('-created')
most_recent_fileversion = fileversions.first()
if most_recent_fileversion.region != target_node.osfstorage_region:
# add all original version except the most recent
cloned.versions.add(*fileversions[1:])
# create a new most recent version and update the region before adding
new_fileversion = most_recent_fileversion.clone()
new_fileversion.region = target_node.osfstorage_region
new_fileversion.save()
cloned.versions.add(new_fileversion)
else:
cloned.versions.add(*src.versions.all())
if not src.is_file:
for child in src.children:
copy_files(child, target_node, parent=cloned)
return cloned
Check for a fileversion region before copying them to the source region
This is mainly for test fileversions that are created without regions by default
|
def copy_files(src, target_node, parent=None, name=None):
"""Copy the files from src to the target node
:param Folder src: The source to copy children from
:param Node target_node: The node to copy files to
:param Folder parent: The parent of to attach the clone of src to, if applicable
"""
assert not parent or not parent.is_file, 'Parent must be a folder'
cloned = src.clone()
cloned.parent = parent
cloned.target = target_node
cloned.name = name or cloned.name
cloned.copied_from = src
cloned.save()
if src.is_file and src.versions.exists():
fileversions = src.versions.select_related('region').order_by('-created')
most_recent_fileversion = fileversions.first()
if most_recent_fileversion.region and most_recent_fileversion.region != target_node.osfstorage_region:
# add all original version except the most recent
cloned.versions.add(*fileversions[1:])
# create a new most recent version and update the region before adding
new_fileversion = most_recent_fileversion.clone()
new_fileversion.region = target_node.osfstorage_region
new_fileversion.save()
cloned.versions.add(new_fileversion)
else:
cloned.versions.add(*src.versions.all())
if not src.is_file:
for child in src.children:
copy_files(child, target_node, parent=cloned)
return cloned
|
<commit_before>
def copy_files(src, target_node, parent=None, name=None):
"""Copy the files from src to the target node
:param Folder src: The source to copy children from
:param Node target_node: The node to copy files to
:param Folder parent: The parent of to attach the clone of src to, if applicable
"""
assert not parent or not parent.is_file, 'Parent must be a folder'
cloned = src.clone()
cloned.parent = parent
cloned.target = target_node
cloned.name = name or cloned.name
cloned.copied_from = src
cloned.save()
if src.is_file and src.versions.exists():
fileversions = src.versions.select_related('region').order_by('-created')
most_recent_fileversion = fileversions.first()
if most_recent_fileversion.region != target_node.osfstorage_region:
# add all original version except the most recent
cloned.versions.add(*fileversions[1:])
# create a new most recent version and update the region before adding
new_fileversion = most_recent_fileversion.clone()
new_fileversion.region = target_node.osfstorage_region
new_fileversion.save()
cloned.versions.add(new_fileversion)
else:
cloned.versions.add(*src.versions.all())
if not src.is_file:
for child in src.children:
copy_files(child, target_node, parent=cloned)
return cloned
<commit_msg>Check for a fileversion region before copying them to the source region
This is mainly for test fileversions that are created without regions by default<commit_after>
|
def copy_files(src, target_node, parent=None, name=None):
"""Copy the files from src to the target node
:param Folder src: The source to copy children from
:param Node target_node: The node to copy files to
:param Folder parent: The parent of to attach the clone of src to, if applicable
"""
assert not parent or not parent.is_file, 'Parent must be a folder'
cloned = src.clone()
cloned.parent = parent
cloned.target = target_node
cloned.name = name or cloned.name
cloned.copied_from = src
cloned.save()
if src.is_file and src.versions.exists():
fileversions = src.versions.select_related('region').order_by('-created')
most_recent_fileversion = fileversions.first()
if most_recent_fileversion.region and most_recent_fileversion.region != target_node.osfstorage_region:
# add all original version except the most recent
cloned.versions.add(*fileversions[1:])
# create a new most recent version and update the region before adding
new_fileversion = most_recent_fileversion.clone()
new_fileversion.region = target_node.osfstorage_region
new_fileversion.save()
cloned.versions.add(new_fileversion)
else:
cloned.versions.add(*src.versions.all())
if not src.is_file:
for child in src.children:
copy_files(child, target_node, parent=cloned)
return cloned
|
def copy_files(src, target_node, parent=None, name=None):
"""Copy the files from src to the target node
:param Folder src: The source to copy children from
:param Node target_node: The node to copy files to
:param Folder parent: The parent of to attach the clone of src to, if applicable
"""
assert not parent or not parent.is_file, 'Parent must be a folder'
cloned = src.clone()
cloned.parent = parent
cloned.target = target_node
cloned.name = name or cloned.name
cloned.copied_from = src
cloned.save()
if src.is_file and src.versions.exists():
fileversions = src.versions.select_related('region').order_by('-created')
most_recent_fileversion = fileversions.first()
if most_recent_fileversion.region != target_node.osfstorage_region:
# add all original version except the most recent
cloned.versions.add(*fileversions[1:])
# create a new most recent version and update the region before adding
new_fileversion = most_recent_fileversion.clone()
new_fileversion.region = target_node.osfstorage_region
new_fileversion.save()
cloned.versions.add(new_fileversion)
else:
cloned.versions.add(*src.versions.all())
if not src.is_file:
for child in src.children:
copy_files(child, target_node, parent=cloned)
return cloned
Check for a fileversion region before copying them to the source region
This is mainly for test fileversions that are created without regions by default
def copy_files(src, target_node, parent=None, name=None):
"""Copy the files from src to the target node
:param Folder src: The source to copy children from
:param Node target_node: The node to copy files to
:param Folder parent: The parent of to attach the clone of src to, if applicable
"""
assert not parent or not parent.is_file, 'Parent must be a folder'
cloned = src.clone()
cloned.parent = parent
cloned.target = target_node
cloned.name = name or cloned.name
cloned.copied_from = src
cloned.save()
if src.is_file and src.versions.exists():
fileversions = src.versions.select_related('region').order_by('-created')
most_recent_fileversion = fileversions.first()
if most_recent_fileversion.region and most_recent_fileversion.region != target_node.osfstorage_region:
# add all original version except the most recent
cloned.versions.add(*fileversions[1:])
# create a new most recent version and update the region before adding
new_fileversion = most_recent_fileversion.clone()
new_fileversion.region = target_node.osfstorage_region
new_fileversion.save()
cloned.versions.add(new_fileversion)
else:
cloned.versions.add(*src.versions.all())
if not src.is_file:
for child in src.children:
copy_files(child, target_node, parent=cloned)
return cloned
|
<commit_before>
def copy_files(src, target_node, parent=None, name=None):
"""Copy the files from src to the target node
:param Folder src: The source to copy children from
:param Node target_node: The node to copy files to
:param Folder parent: The parent of to attach the clone of src to, if applicable
"""
assert not parent or not parent.is_file, 'Parent must be a folder'
cloned = src.clone()
cloned.parent = parent
cloned.target = target_node
cloned.name = name or cloned.name
cloned.copied_from = src
cloned.save()
if src.is_file and src.versions.exists():
fileversions = src.versions.select_related('region').order_by('-created')
most_recent_fileversion = fileversions.first()
if most_recent_fileversion.region != target_node.osfstorage_region:
# add all original version except the most recent
cloned.versions.add(*fileversions[1:])
# create a new most recent version and update the region before adding
new_fileversion = most_recent_fileversion.clone()
new_fileversion.region = target_node.osfstorage_region
new_fileversion.save()
cloned.versions.add(new_fileversion)
else:
cloned.versions.add(*src.versions.all())
if not src.is_file:
for child in src.children:
copy_files(child, target_node, parent=cloned)
return cloned
<commit_msg>Check for a fileversion region before copying them to the source region
This is mainly for test fileversions that are created without regions by default<commit_after>
def copy_files(src, target_node, parent=None, name=None):
"""Copy the files from src to the target node
:param Folder src: The source to copy children from
:param Node target_node: The node to copy files to
:param Folder parent: The parent of to attach the clone of src to, if applicable
"""
assert not parent or not parent.is_file, 'Parent must be a folder'
cloned = src.clone()
cloned.parent = parent
cloned.target = target_node
cloned.name = name or cloned.name
cloned.copied_from = src
cloned.save()
if src.is_file and src.versions.exists():
fileversions = src.versions.select_related('region').order_by('-created')
most_recent_fileversion = fileversions.first()
if most_recent_fileversion.region and most_recent_fileversion.region != target_node.osfstorage_region:
# add all original version except the most recent
cloned.versions.add(*fileversions[1:])
# create a new most recent version and update the region before adding
new_fileversion = most_recent_fileversion.clone()
new_fileversion.region = target_node.osfstorage_region
new_fileversion.save()
cloned.versions.add(new_fileversion)
else:
cloned.versions.add(*src.versions.all())
if not src.is_file:
for child in src.children:
copy_files(child, target_node, parent=cloned)
return cloned
|
7e046e4999959e0cfa3527780ca04e581698b328
|
cbagent/collectors/libstats/psstats.py
|
cbagent/collectors/libstats/psstats.py
|
from fabric.api import run
from cbagent.collectors.libstats.remotestats import (
RemoteStats, multi_node_task)
class PSStats(RemoteStats):
METRICS = (
("rss", 1024), # kB -> B
("vsize", 1024),
)
def __init__(self, hosts, user, password):
super(PSStats, self).__init__(hosts, user, password)
self.ps_cmd = "ps -eo pid,rss,vsize,comm | " \
"grep {} | grep -v grep | sort -n -k 2 | tail -n 1"
self.top_cmd = "top -bn2d1 -p {} | grep {}"
@multi_node_task
def get_samples(self, process):
samples = {}
stdout = run(self.ps_cmd.format(process))
if stdout:
for i, value in enumerate(stdout.split()[1:1+len(self.METRICS)]):
metric, multiplier = self.METRICS[i]
title = "{}_{}".format(process, metric)
samples[title] = float(value) * multiplier
pid = stdout.split()[0]
else:
return samples
stdout = run(self.top_cmd.format(pid, process))
if stdout:
title = "{}_cpu".format(process)
samples[title] = float(stdout.split()[8])
return samples
|
from fabric.api import run
from cbagent.collectors.libstats.remotestats import (
RemoteStats, multi_node_task)
class PSStats(RemoteStats):
METRICS = (
("rss", 1024), # kB -> B
("vsize", 1024),
)
def __init__(self, hosts, user, password):
super(PSStats, self).__init__(hosts, user, password)
self.ps_cmd = "ps -eo pid,rss,vsize,comm | " \
"grep {} | grep -v grep | sort -n -k 2 | tail -n 1"
self.top_cmd = "top -bn2 -d1 -p {} | grep {}"
@multi_node_task
def get_samples(self, process):
samples = {}
stdout = run(self.ps_cmd.format(process))
if stdout:
for i, value in enumerate(stdout.split()[1:1+len(self.METRICS)]):
metric, multiplier = self.METRICS[i]
title = "{}_{}".format(process, metric)
samples[title] = float(value) * multiplier
pid = stdout.split()[0]
else:
return samples
stdout = run(self.top_cmd.format(pid, process))
if stdout:
title = "{}_cpu".format(process)
samples[title] = float(stdout.split()[8])
return samples
|
Make `top` work on Ubuntu 12.04
|
MB-13234: Make `top` work on Ubuntu 12.04
The `top` in Ubuntu12.04 seems to do different command line parsing
than the one on CentOS. Separating the parameters should work on
both.
Change-Id: I8f9ec022bcb8e0158316fdaac226acbfb0d9d004
Reviewed-on: http://review.couchbase.org/50126
Reviewed-by: Dave Rigby <a09264da4832c7ff1d3bf1608a19f4b870f93750@couchbase.com>
Reviewed-by: Michael Wiederhold <a17fed27eaa842282862ff7c1b9c8395a26ac320@couchbase.com>
Tested-by: Volker Mische <fb414f8ac0dbbf87663550ae4ef5fc95b1041941@gmail.com>
|
Python
|
apache-2.0
|
mikewied/cbagent,couchbase/cbagent
|
from fabric.api import run
from cbagent.collectors.libstats.remotestats import (
RemoteStats, multi_node_task)
class PSStats(RemoteStats):
METRICS = (
("rss", 1024), # kB -> B
("vsize", 1024),
)
def __init__(self, hosts, user, password):
super(PSStats, self).__init__(hosts, user, password)
self.ps_cmd = "ps -eo pid,rss,vsize,comm | " \
"grep {} | grep -v grep | sort -n -k 2 | tail -n 1"
self.top_cmd = "top -bn2d1 -p {} | grep {}"
@multi_node_task
def get_samples(self, process):
samples = {}
stdout = run(self.ps_cmd.format(process))
if stdout:
for i, value in enumerate(stdout.split()[1:1+len(self.METRICS)]):
metric, multiplier = self.METRICS[i]
title = "{}_{}".format(process, metric)
samples[title] = float(value) * multiplier
pid = stdout.split()[0]
else:
return samples
stdout = run(self.top_cmd.format(pid, process))
if stdout:
title = "{}_cpu".format(process)
samples[title] = float(stdout.split()[8])
return samples
MB-13234: Make `top` work on Ubuntu 12.04
The `top` in Ubuntu12.04 seems to do different command line parsing
than the one on CentOS. Separating the parameters should work on
both.
Change-Id: I8f9ec022bcb8e0158316fdaac226acbfb0d9d004
Reviewed-on: http://review.couchbase.org/50126
Reviewed-by: Dave Rigby <a09264da4832c7ff1d3bf1608a19f4b870f93750@couchbase.com>
Reviewed-by: Michael Wiederhold <a17fed27eaa842282862ff7c1b9c8395a26ac320@couchbase.com>
Tested-by: Volker Mische <fb414f8ac0dbbf87663550ae4ef5fc95b1041941@gmail.com>
|
from fabric.api import run
from cbagent.collectors.libstats.remotestats import (
RemoteStats, multi_node_task)
class PSStats(RemoteStats):
METRICS = (
("rss", 1024), # kB -> B
("vsize", 1024),
)
def __init__(self, hosts, user, password):
super(PSStats, self).__init__(hosts, user, password)
self.ps_cmd = "ps -eo pid,rss,vsize,comm | " \
"grep {} | grep -v grep | sort -n -k 2 | tail -n 1"
self.top_cmd = "top -bn2 -d1 -p {} | grep {}"
@multi_node_task
def get_samples(self, process):
samples = {}
stdout = run(self.ps_cmd.format(process))
if stdout:
for i, value in enumerate(stdout.split()[1:1+len(self.METRICS)]):
metric, multiplier = self.METRICS[i]
title = "{}_{}".format(process, metric)
samples[title] = float(value) * multiplier
pid = stdout.split()[0]
else:
return samples
stdout = run(self.top_cmd.format(pid, process))
if stdout:
title = "{}_cpu".format(process)
samples[title] = float(stdout.split()[8])
return samples
|
<commit_before>from fabric.api import run
from cbagent.collectors.libstats.remotestats import (
RemoteStats, multi_node_task)
class PSStats(RemoteStats):
METRICS = (
("rss", 1024), # kB -> B
("vsize", 1024),
)
def __init__(self, hosts, user, password):
super(PSStats, self).__init__(hosts, user, password)
self.ps_cmd = "ps -eo pid,rss,vsize,comm | " \
"grep {} | grep -v grep | sort -n -k 2 | tail -n 1"
self.top_cmd = "top -bn2d1 -p {} | grep {}"
@multi_node_task
def get_samples(self, process):
samples = {}
stdout = run(self.ps_cmd.format(process))
if stdout:
for i, value in enumerate(stdout.split()[1:1+len(self.METRICS)]):
metric, multiplier = self.METRICS[i]
title = "{}_{}".format(process, metric)
samples[title] = float(value) * multiplier
pid = stdout.split()[0]
else:
return samples
stdout = run(self.top_cmd.format(pid, process))
if stdout:
title = "{}_cpu".format(process)
samples[title] = float(stdout.split()[8])
return samples
<commit_msg>MB-13234: Make `top` work on Ubuntu 12.04
The `top` in Ubuntu12.04 seems to do different command line parsing
than the one on CentOS. Separating the parameters should work on
both.
Change-Id: I8f9ec022bcb8e0158316fdaac226acbfb0d9d004
Reviewed-on: http://review.couchbase.org/50126
Reviewed-by: Dave Rigby <a09264da4832c7ff1d3bf1608a19f4b870f93750@couchbase.com>
Reviewed-by: Michael Wiederhold <a17fed27eaa842282862ff7c1b9c8395a26ac320@couchbase.com>
Tested-by: Volker Mische <fb414f8ac0dbbf87663550ae4ef5fc95b1041941@gmail.com><commit_after>
|
from fabric.api import run
from cbagent.collectors.libstats.remotestats import (
RemoteStats, multi_node_task)
class PSStats(RemoteStats):
METRICS = (
("rss", 1024), # kB -> B
("vsize", 1024),
)
def __init__(self, hosts, user, password):
super(PSStats, self).__init__(hosts, user, password)
self.ps_cmd = "ps -eo pid,rss,vsize,comm | " \
"grep {} | grep -v grep | sort -n -k 2 | tail -n 1"
self.top_cmd = "top -bn2 -d1 -p {} | grep {}"
@multi_node_task
def get_samples(self, process):
samples = {}
stdout = run(self.ps_cmd.format(process))
if stdout:
for i, value in enumerate(stdout.split()[1:1+len(self.METRICS)]):
metric, multiplier = self.METRICS[i]
title = "{}_{}".format(process, metric)
samples[title] = float(value) * multiplier
pid = stdout.split()[0]
else:
return samples
stdout = run(self.top_cmd.format(pid, process))
if stdout:
title = "{}_cpu".format(process)
samples[title] = float(stdout.split()[8])
return samples
|
from fabric.api import run
from cbagent.collectors.libstats.remotestats import (
RemoteStats, multi_node_task)
class PSStats(RemoteStats):
METRICS = (
("rss", 1024), # kB -> B
("vsize", 1024),
)
def __init__(self, hosts, user, password):
super(PSStats, self).__init__(hosts, user, password)
self.ps_cmd = "ps -eo pid,rss,vsize,comm | " \
"grep {} | grep -v grep | sort -n -k 2 | tail -n 1"
self.top_cmd = "top -bn2d1 -p {} | grep {}"
@multi_node_task
def get_samples(self, process):
samples = {}
stdout = run(self.ps_cmd.format(process))
if stdout:
for i, value in enumerate(stdout.split()[1:1+len(self.METRICS)]):
metric, multiplier = self.METRICS[i]
title = "{}_{}".format(process, metric)
samples[title] = float(value) * multiplier
pid = stdout.split()[0]
else:
return samples
stdout = run(self.top_cmd.format(pid, process))
if stdout:
title = "{}_cpu".format(process)
samples[title] = float(stdout.split()[8])
return samples
MB-13234: Make `top` work on Ubuntu 12.04
The `top` in Ubuntu12.04 seems to do different command line parsing
than the one on CentOS. Separating the parameters should work on
both.
Change-Id: I8f9ec022bcb8e0158316fdaac226acbfb0d9d004
Reviewed-on: http://review.couchbase.org/50126
Reviewed-by: Dave Rigby <a09264da4832c7ff1d3bf1608a19f4b870f93750@couchbase.com>
Reviewed-by: Michael Wiederhold <a17fed27eaa842282862ff7c1b9c8395a26ac320@couchbase.com>
Tested-by: Volker Mische <fb414f8ac0dbbf87663550ae4ef5fc95b1041941@gmail.com>from fabric.api import run
from cbagent.collectors.libstats.remotestats import (
RemoteStats, multi_node_task)
class PSStats(RemoteStats):
METRICS = (
("rss", 1024), # kB -> B
("vsize", 1024),
)
def __init__(self, hosts, user, password):
super(PSStats, self).__init__(hosts, user, password)
self.ps_cmd = "ps -eo pid,rss,vsize,comm | " \
"grep {} | grep -v grep | sort -n -k 2 | tail -n 1"
self.top_cmd = "top -bn2 -d1 -p {} | grep {}"
@multi_node_task
def get_samples(self, process):
samples = {}
stdout = run(self.ps_cmd.format(process))
if stdout:
for i, value in enumerate(stdout.split()[1:1+len(self.METRICS)]):
metric, multiplier = self.METRICS[i]
title = "{}_{}".format(process, metric)
samples[title] = float(value) * multiplier
pid = stdout.split()[0]
else:
return samples
stdout = run(self.top_cmd.format(pid, process))
if stdout:
title = "{}_cpu".format(process)
samples[title] = float(stdout.split()[8])
return samples
|
<commit_before>from fabric.api import run
from cbagent.collectors.libstats.remotestats import (
RemoteStats, multi_node_task)
class PSStats(RemoteStats):
METRICS = (
("rss", 1024), # kB -> B
("vsize", 1024),
)
def __init__(self, hosts, user, password):
super(PSStats, self).__init__(hosts, user, password)
self.ps_cmd = "ps -eo pid,rss,vsize,comm | " \
"grep {} | grep -v grep | sort -n -k 2 | tail -n 1"
self.top_cmd = "top -bn2d1 -p {} | grep {}"
@multi_node_task
def get_samples(self, process):
samples = {}
stdout = run(self.ps_cmd.format(process))
if stdout:
for i, value in enumerate(stdout.split()[1:1+len(self.METRICS)]):
metric, multiplier = self.METRICS[i]
title = "{}_{}".format(process, metric)
samples[title] = float(value) * multiplier
pid = stdout.split()[0]
else:
return samples
stdout = run(self.top_cmd.format(pid, process))
if stdout:
title = "{}_cpu".format(process)
samples[title] = float(stdout.split()[8])
return samples
<commit_msg>MB-13234: Make `top` work on Ubuntu 12.04
The `top` in Ubuntu12.04 seems to do different command line parsing
than the one on CentOS. Separating the parameters should work on
both.
Change-Id: I8f9ec022bcb8e0158316fdaac226acbfb0d9d004
Reviewed-on: http://review.couchbase.org/50126
Reviewed-by: Dave Rigby <a09264da4832c7ff1d3bf1608a19f4b870f93750@couchbase.com>
Reviewed-by: Michael Wiederhold <a17fed27eaa842282862ff7c1b9c8395a26ac320@couchbase.com>
Tested-by: Volker Mische <fb414f8ac0dbbf87663550ae4ef5fc95b1041941@gmail.com><commit_after>from fabric.api import run
from cbagent.collectors.libstats.remotestats import (
RemoteStats, multi_node_task)
class PSStats(RemoteStats):
METRICS = (
("rss", 1024), # kB -> B
("vsize", 1024),
)
def __init__(self, hosts, user, password):
super(PSStats, self).__init__(hosts, user, password)
self.ps_cmd = "ps -eo pid,rss,vsize,comm | " \
"grep {} | grep -v grep | sort -n -k 2 | tail -n 1"
self.top_cmd = "top -bn2 -d1 -p {} | grep {}"
@multi_node_task
def get_samples(self, process):
samples = {}
stdout = run(self.ps_cmd.format(process))
if stdout:
for i, value in enumerate(stdout.split()[1:1+len(self.METRICS)]):
metric, multiplier = self.METRICS[i]
title = "{}_{}".format(process, metric)
samples[title] = float(value) * multiplier
pid = stdout.split()[0]
else:
return samples
stdout = run(self.top_cmd.format(pid, process))
if stdout:
title = "{}_cpu".format(process)
samples[title] = float(stdout.split()[8])
return samples
|
b4482c257c5333f902569b40bf4e61c8003dbacc
|
www/config/__init__.py
|
www/config/__init__.py
|
from __future__ import unicode_literals
try:
from local import *
except ImportError:
try:
from dev import *
except ImportError:
pass
try:
DEBUG
TEMPLATE_DEBUG
DATABASES['default']
CELERY_BROKER
except NameError:
raise NameError('Required config values not found. Abort !')
|
from __future__ import unicode_literals, absolute_import
try:
from .local import *
except ImportError:
try:
from dev import *
except ImportError:
pass
try:
DEBUG
TEMPLATE_DEBUG
DATABASES['default']
CELERY_BROKER
except NameError:
raise NameError('Required config values not found. Abort !')
|
Use absolute import to correctly import local.py config file
|
Use absolute import to correctly import local.py config file
|
Python
|
agpl-3.0
|
UrLab/beta402,UrLab/beta402,UrLab/DocHub,UrLab/DocHub,UrLab/beta402,UrLab/DocHub,UrLab/DocHub
|
from __future__ import unicode_literals
try:
from local import *
except ImportError:
try:
from dev import *
except ImportError:
pass
try:
DEBUG
TEMPLATE_DEBUG
DATABASES['default']
CELERY_BROKER
except NameError:
raise NameError('Required config values not found. Abort !')Use absolute import to correctly import local.py config file
|
from __future__ import unicode_literals, absolute_import
try:
from .local import *
except ImportError:
try:
from dev import *
except ImportError:
pass
try:
DEBUG
TEMPLATE_DEBUG
DATABASES['default']
CELERY_BROKER
except NameError:
raise NameError('Required config values not found. Abort !')
|
<commit_before>from __future__ import unicode_literals
try:
from local import *
except ImportError:
try:
from dev import *
except ImportError:
pass
try:
DEBUG
TEMPLATE_DEBUG
DATABASES['default']
CELERY_BROKER
except NameError:
raise NameError('Required config values not found. Abort !')<commit_msg>Use absolute import to correctly import local.py config file<commit_after>
|
from __future__ import unicode_literals, absolute_import
try:
from .local import *
except ImportError:
try:
from dev import *
except ImportError:
pass
try:
DEBUG
TEMPLATE_DEBUG
DATABASES['default']
CELERY_BROKER
except NameError:
raise NameError('Required config values not found. Abort !')
|
from __future__ import unicode_literals
try:
from local import *
except ImportError:
try:
from dev import *
except ImportError:
pass
try:
DEBUG
TEMPLATE_DEBUG
DATABASES['default']
CELERY_BROKER
except NameError:
raise NameError('Required config values not found. Abort !')Use absolute import to correctly import local.py config filefrom __future__ import unicode_literals, absolute_import
try:
from .local import *
except ImportError:
try:
from dev import *
except ImportError:
pass
try:
DEBUG
TEMPLATE_DEBUG
DATABASES['default']
CELERY_BROKER
except NameError:
raise NameError('Required config values not found. Abort !')
|
<commit_before>from __future__ import unicode_literals
try:
from local import *
except ImportError:
try:
from dev import *
except ImportError:
pass
try:
DEBUG
TEMPLATE_DEBUG
DATABASES['default']
CELERY_BROKER
except NameError:
raise NameError('Required config values not found. Abort !')<commit_msg>Use absolute import to correctly import local.py config file<commit_after>from __future__ import unicode_literals, absolute_import
try:
from .local import *
except ImportError:
try:
from dev import *
except ImportError:
pass
try:
DEBUG
TEMPLATE_DEBUG
DATABASES['default']
CELERY_BROKER
except NameError:
raise NameError('Required config values not found. Abort !')
|
bf7626df74d78f2811f20173fb21c36a96cc9500
|
packages/gtk-doc.py
|
packages/gtk-doc.py
|
GnomePackage ('gtk-doc', version_major = '1', version_minor = '17', sources = [
'http://ftp.gnome.org/pub/gnome/sources/%{name}/%{version}/%{name}-%{version}.tar.bz2'
])
|
GnomePackage ('gtk-doc', version_major = '1', version_minor = '17', configure_flags = ['--with-xml-catalog="%{prefix}/etc/xml/catalog"'], sources = [
'http://ftp.gnome.org/pub/gnome/sources/%{name}/%{version}/%{name}-%{version}.tar.bz2'
])
|
Set xml catalog to the right prefix.
|
Set xml catalog to the right prefix.
|
Python
|
mit
|
BansheeMediaPlayer/bockbuild,mono/bockbuild,mono/bockbuild,BansheeMediaPlayer/bockbuild,BansheeMediaPlayer/bockbuild
|
GnomePackage ('gtk-doc', version_major = '1', version_minor = '17', sources = [
'http://ftp.gnome.org/pub/gnome/sources/%{name}/%{version}/%{name}-%{version}.tar.bz2'
])
Set xml catalog to the right prefix.
|
GnomePackage ('gtk-doc', version_major = '1', version_minor = '17', configure_flags = ['--with-xml-catalog="%{prefix}/etc/xml/catalog"'], sources = [
'http://ftp.gnome.org/pub/gnome/sources/%{name}/%{version}/%{name}-%{version}.tar.bz2'
])
|
<commit_before>GnomePackage ('gtk-doc', version_major = '1', version_minor = '17', sources = [
'http://ftp.gnome.org/pub/gnome/sources/%{name}/%{version}/%{name}-%{version}.tar.bz2'
])
<commit_msg>Set xml catalog to the right prefix.<commit_after>
|
GnomePackage ('gtk-doc', version_major = '1', version_minor = '17', configure_flags = ['--with-xml-catalog="%{prefix}/etc/xml/catalog"'], sources = [
'http://ftp.gnome.org/pub/gnome/sources/%{name}/%{version}/%{name}-%{version}.tar.bz2'
])
|
GnomePackage ('gtk-doc', version_major = '1', version_minor = '17', sources = [
'http://ftp.gnome.org/pub/gnome/sources/%{name}/%{version}/%{name}-%{version}.tar.bz2'
])
Set xml catalog to the right prefix.GnomePackage ('gtk-doc', version_major = '1', version_minor = '17', configure_flags = ['--with-xml-catalog="%{prefix}/etc/xml/catalog"'], sources = [
'http://ftp.gnome.org/pub/gnome/sources/%{name}/%{version}/%{name}-%{version}.tar.bz2'
])
|
<commit_before>GnomePackage ('gtk-doc', version_major = '1', version_minor = '17', sources = [
'http://ftp.gnome.org/pub/gnome/sources/%{name}/%{version}/%{name}-%{version}.tar.bz2'
])
<commit_msg>Set xml catalog to the right prefix.<commit_after>GnomePackage ('gtk-doc', version_major = '1', version_minor = '17', configure_flags = ['--with-xml-catalog="%{prefix}/etc/xml/catalog"'], sources = [
'http://ftp.gnome.org/pub/gnome/sources/%{name}/%{version}/%{name}-%{version}.tar.bz2'
])
|
5be3d07995803d81e6238a561c772e856b81a367
|
icekit/templatetags/search_tags.py
|
icekit/templatetags/search_tags.py
|
from django.template import Library, Node
from django.test.client import RequestFactory
register = Library()
factory = RequestFactory()
class FakeRequestNode(Node):
def render(self, context):
req = factory.get('/')
req.notifications = []
context['request'] = req
return ''
@register.tag
def fake_request(parser, token):
"""
Create a fake request object in the context
"""
return FakeRequestNode()
|
from django.contrib.auth.models import AnonymousUser
from django.template import Library, Node
from django.test.client import RequestFactory
register = Library()
factory = RequestFactory()
class FakeRequestNode(Node):
def render(self, context):
req = factory.get('/')
req.notifications = []
req.user = AnonymousUser()
context['request'] = req
return ''
@register.tag
def fake_request(parser, token):
"""
Create a fake request object in the context
"""
return FakeRequestNode()
|
Add anonymous user to fake request object.
|
Add anonymous user to fake request object.
|
Python
|
mit
|
ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit
|
from django.template import Library, Node
from django.test.client import RequestFactory
register = Library()
factory = RequestFactory()
class FakeRequestNode(Node):
def render(self, context):
req = factory.get('/')
req.notifications = []
context['request'] = req
return ''
@register.tag
def fake_request(parser, token):
"""
Create a fake request object in the context
"""
return FakeRequestNode()
Add anonymous user to fake request object.
|
from django.contrib.auth.models import AnonymousUser
from django.template import Library, Node
from django.test.client import RequestFactory
register = Library()
factory = RequestFactory()
class FakeRequestNode(Node):
def render(self, context):
req = factory.get('/')
req.notifications = []
req.user = AnonymousUser()
context['request'] = req
return ''
@register.tag
def fake_request(parser, token):
"""
Create a fake request object in the context
"""
return FakeRequestNode()
|
<commit_before>from django.template import Library, Node
from django.test.client import RequestFactory
register = Library()
factory = RequestFactory()
class FakeRequestNode(Node):
def render(self, context):
req = factory.get('/')
req.notifications = []
context['request'] = req
return ''
@register.tag
def fake_request(parser, token):
"""
Create a fake request object in the context
"""
return FakeRequestNode()
<commit_msg>Add anonymous user to fake request object.<commit_after>
|
from django.contrib.auth.models import AnonymousUser
from django.template import Library, Node
from django.test.client import RequestFactory
register = Library()
factory = RequestFactory()
class FakeRequestNode(Node):
def render(self, context):
req = factory.get('/')
req.notifications = []
req.user = AnonymousUser()
context['request'] = req
return ''
@register.tag
def fake_request(parser, token):
"""
Create a fake request object in the context
"""
return FakeRequestNode()
|
from django.template import Library, Node
from django.test.client import RequestFactory
register = Library()
factory = RequestFactory()
class FakeRequestNode(Node):
def render(self, context):
req = factory.get('/')
req.notifications = []
context['request'] = req
return ''
@register.tag
def fake_request(parser, token):
"""
Create a fake request object in the context
"""
return FakeRequestNode()
Add anonymous user to fake request object.from django.contrib.auth.models import AnonymousUser
from django.template import Library, Node
from django.test.client import RequestFactory
register = Library()
factory = RequestFactory()
class FakeRequestNode(Node):
def render(self, context):
req = factory.get('/')
req.notifications = []
req.user = AnonymousUser()
context['request'] = req
return ''
@register.tag
def fake_request(parser, token):
"""
Create a fake request object in the context
"""
return FakeRequestNode()
|
<commit_before>from django.template import Library, Node
from django.test.client import RequestFactory
register = Library()
factory = RequestFactory()
class FakeRequestNode(Node):
def render(self, context):
req = factory.get('/')
req.notifications = []
context['request'] = req
return ''
@register.tag
def fake_request(parser, token):
"""
Create a fake request object in the context
"""
return FakeRequestNode()
<commit_msg>Add anonymous user to fake request object.<commit_after>from django.contrib.auth.models import AnonymousUser
from django.template import Library, Node
from django.test.client import RequestFactory
register = Library()
factory = RequestFactory()
class FakeRequestNode(Node):
def render(self, context):
req = factory.get('/')
req.notifications = []
req.user = AnonymousUser()
context['request'] = req
return ''
@register.tag
def fake_request(parser, token):
"""
Create a fake request object in the context
"""
return FakeRequestNode()
|
94b6b97dc1e706a6560092aa29cbe4e21f052924
|
froide/account/apps.py
|
froide/account/apps.py
|
from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
class AccountConfig(AppConfig):
name = 'froide.account'
verbose_name = _("Account")
def ready(self):
from froide.bounce.signals import user_email_bounced
user_email_bounced.connect(deactivate_user_after_bounce)
def deactivate_user_after_bounce(sender, bounce, should_deactivate=False, **kwargs):
if not should_deactivate:
return
if not bounce.user:
return
bounce.user.deactivate()
|
from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
from django.urls import reverse
from .menu import menu_registry, MenuItem
class AccountConfig(AppConfig):
name = 'froide.account'
verbose_name = _("Account")
def ready(self):
from froide.bounce.signals import user_email_bounced
user_email_bounced.connect(deactivate_user_after_bounce)
menu_registry.register(get_settings_menu_item)
menu_registry.register(get_request_menu_item)
def deactivate_user_after_bounce(sender, bounce, should_deactivate=False, **kwargs):
if not should_deactivate:
return
if not bounce.user:
return
bounce.user.deactivate()
def get_request_menu_item(request):
return MenuItem(
section='before_request', order=999,
url=reverse('account-show'),
label=_('My requests')
)
def get_settings_menu_item(request):
return MenuItem(
section='after_settings', order=-1,
url=reverse('account-settings'),
label=_('Settings')
)
|
Make settings and requests menu items
|
Make settings and requests menu items
|
Python
|
mit
|
fin/froide,fin/froide,fin/froide,stefanw/froide,stefanw/froide,fin/froide,stefanw/froide,stefanw/froide,stefanw/froide
|
from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
class AccountConfig(AppConfig):
name = 'froide.account'
verbose_name = _("Account")
def ready(self):
from froide.bounce.signals import user_email_bounced
user_email_bounced.connect(deactivate_user_after_bounce)
def deactivate_user_after_bounce(sender, bounce, should_deactivate=False, **kwargs):
if not should_deactivate:
return
if not bounce.user:
return
bounce.user.deactivate()
Make settings and requests menu items
|
from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
from django.urls import reverse
from .menu import menu_registry, MenuItem
class AccountConfig(AppConfig):
name = 'froide.account'
verbose_name = _("Account")
def ready(self):
from froide.bounce.signals import user_email_bounced
user_email_bounced.connect(deactivate_user_after_bounce)
menu_registry.register(get_settings_menu_item)
menu_registry.register(get_request_menu_item)
def deactivate_user_after_bounce(sender, bounce, should_deactivate=False, **kwargs):
if not should_deactivate:
return
if not bounce.user:
return
bounce.user.deactivate()
def get_request_menu_item(request):
return MenuItem(
section='before_request', order=999,
url=reverse('account-show'),
label=_('My requests')
)
def get_settings_menu_item(request):
return MenuItem(
section='after_settings', order=-1,
url=reverse('account-settings'),
label=_('Settings')
)
|
<commit_before>from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
class AccountConfig(AppConfig):
name = 'froide.account'
verbose_name = _("Account")
def ready(self):
from froide.bounce.signals import user_email_bounced
user_email_bounced.connect(deactivate_user_after_bounce)
def deactivate_user_after_bounce(sender, bounce, should_deactivate=False, **kwargs):
if not should_deactivate:
return
if not bounce.user:
return
bounce.user.deactivate()
<commit_msg>Make settings and requests menu items<commit_after>
|
from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
from django.urls import reverse
from .menu import menu_registry, MenuItem
class AccountConfig(AppConfig):
name = 'froide.account'
verbose_name = _("Account")
def ready(self):
from froide.bounce.signals import user_email_bounced
user_email_bounced.connect(deactivate_user_after_bounce)
menu_registry.register(get_settings_menu_item)
menu_registry.register(get_request_menu_item)
def deactivate_user_after_bounce(sender, bounce, should_deactivate=False, **kwargs):
if not should_deactivate:
return
if not bounce.user:
return
bounce.user.deactivate()
def get_request_menu_item(request):
return MenuItem(
section='before_request', order=999,
url=reverse('account-show'),
label=_('My requests')
)
def get_settings_menu_item(request):
return MenuItem(
section='after_settings', order=-1,
url=reverse('account-settings'),
label=_('Settings')
)
|
from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
class AccountConfig(AppConfig):
name = 'froide.account'
verbose_name = _("Account")
def ready(self):
from froide.bounce.signals import user_email_bounced
user_email_bounced.connect(deactivate_user_after_bounce)
def deactivate_user_after_bounce(sender, bounce, should_deactivate=False, **kwargs):
if not should_deactivate:
return
if not bounce.user:
return
bounce.user.deactivate()
Make settings and requests menu itemsfrom django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
from django.urls import reverse
from .menu import menu_registry, MenuItem
class AccountConfig(AppConfig):
name = 'froide.account'
verbose_name = _("Account")
def ready(self):
from froide.bounce.signals import user_email_bounced
user_email_bounced.connect(deactivate_user_after_bounce)
menu_registry.register(get_settings_menu_item)
menu_registry.register(get_request_menu_item)
def deactivate_user_after_bounce(sender, bounce, should_deactivate=False, **kwargs):
if not should_deactivate:
return
if not bounce.user:
return
bounce.user.deactivate()
def get_request_menu_item(request):
return MenuItem(
section='before_request', order=999,
url=reverse('account-show'),
label=_('My requests')
)
def get_settings_menu_item(request):
return MenuItem(
section='after_settings', order=-1,
url=reverse('account-settings'),
label=_('Settings')
)
|
<commit_before>from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
class AccountConfig(AppConfig):
name = 'froide.account'
verbose_name = _("Account")
def ready(self):
from froide.bounce.signals import user_email_bounced
user_email_bounced.connect(deactivate_user_after_bounce)
def deactivate_user_after_bounce(sender, bounce, should_deactivate=False, **kwargs):
if not should_deactivate:
return
if not bounce.user:
return
bounce.user.deactivate()
<commit_msg>Make settings and requests menu items<commit_after>from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
from django.urls import reverse
from .menu import menu_registry, MenuItem
class AccountConfig(AppConfig):
name = 'froide.account'
verbose_name = _("Account")
def ready(self):
from froide.bounce.signals import user_email_bounced
user_email_bounced.connect(deactivate_user_after_bounce)
menu_registry.register(get_settings_menu_item)
menu_registry.register(get_request_menu_item)
def deactivate_user_after_bounce(sender, bounce, should_deactivate=False, **kwargs):
if not should_deactivate:
return
if not bounce.user:
return
bounce.user.deactivate()
def get_request_menu_item(request):
return MenuItem(
section='before_request', order=999,
url=reverse('account-show'),
label=_('My requests')
)
def get_settings_menu_item(request):
return MenuItem(
section='after_settings', order=-1,
url=reverse('account-settings'),
label=_('Settings')
)
|
2538ca440620de8ed08510e0ae902c82184a9daa
|
consts/auth_type.py
|
consts/auth_type.py
|
class AuthType(object):
"""
An auth type defines what write privileges an authenticated agent has.
"""
MATCH_VIDEO = 0
EVENT_TEAMS = 1
EVENT_MATCHES = 2
EVENT_RANKINGS = 3
EVENT_ALLIANCES = 4
EVENT_AWARDS = 5
type_names = {
MATCH_VIDEO: "match video",
EVENT_TEAMS: "event teams",
EVENT_MATCHES: "event matches",
EVENT_RANKINGS: "event rankings",
EVENT_ALLIANCES: "event alliances",
EVENT_AWARDS: "event awrads"
}
|
class AuthType(object):
"""
An auth type defines what write privileges an authenticated agent has.
"""
EVENT_DATA = 0
MATCH_VIDEO = 1
EVENT_TEAMS = 2
EVENT_MATCHES = 3
EVENT_RANKINGS = 4
EVENT_ALLIANCES = 5
EVENT_AWARDS = 6
type_names = {
EVENT_DATA: "event data",
MATCH_VIDEO: "match video",
EVENT_TEAMS: "event teams",
EVENT_MATCHES: "event matches",
EVENT_RANKINGS: "event rankings",
EVENT_ALLIANCES: "event alliances",
EVENT_AWARDS: "event awrads"
}
|
Revert "Remove AuthType.EVENT_DATA and renumber"
|
Revert "Remove AuthType.EVENT_DATA and renumber"
This reverts commit 38248941eb47a04b82fe52e1adca7387dafcf7f3.
|
Python
|
mit
|
phil-lopreiato/the-blue-alliance,fangeugene/the-blue-alliance,bdaroz/the-blue-alliance,bvisness/the-blue-alliance,tsteward/the-blue-alliance,josephbisch/the-blue-alliance,jaredhasenklein/the-blue-alliance,verycumbersome/the-blue-alliance,phil-lopreiato/the-blue-alliance,the-blue-alliance/the-blue-alliance,bdaroz/the-blue-alliance,bdaroz/the-blue-alliance,bdaroz/the-blue-alliance,josephbisch/the-blue-alliance,synth3tk/the-blue-alliance,jaredhasenklein/the-blue-alliance,josephbisch/the-blue-alliance,jaredhasenklein/the-blue-alliance,nwalters512/the-blue-alliance,bvisness/the-blue-alliance,synth3tk/the-blue-alliance,tsteward/the-blue-alliance,jaredhasenklein/the-blue-alliance,nwalters512/the-blue-alliance,fangeugene/the-blue-alliance,verycumbersome/the-blue-alliance,bvisness/the-blue-alliance,tsteward/the-blue-alliance,fangeugene/the-blue-alliance,synth3tk/the-blue-alliance,fangeugene/the-blue-alliance,phil-lopreiato/the-blue-alliance,bvisness/the-blue-alliance,tsteward/the-blue-alliance,fangeugene/the-blue-alliance,fangeugene/the-blue-alliance,tsteward/the-blue-alliance,josephbisch/the-blue-alliance,synth3tk/the-blue-alliance,synth3tk/the-blue-alliance,bdaroz/the-blue-alliance,the-blue-alliance/the-blue-alliance,nwalters512/the-blue-alliance,bvisness/the-blue-alliance,jaredhasenklein/the-blue-alliance,nwalters512/the-blue-alliance,the-blue-alliance/the-blue-alliance,the-blue-alliance/the-blue-alliance,verycumbersome/the-blue-alliance,bvisness/the-blue-alliance,verycumbersome/the-blue-alliance,synth3tk/the-blue-alliance,nwalters512/the-blue-alliance,phil-lopreiato/the-blue-alliance,phil-lopreiato/the-blue-alliance,jaredhasenklein/the-blue-alliance,nwalters512/the-blue-alliance,tsteward/the-blue-alliance,phil-lopreiato/the-blue-alliance,josephbisch/the-blue-alliance,the-blue-alliance/the-blue-alliance,bdaroz/the-blue-alliance,the-blue-alliance/the-blue-alliance,verycumbersome/the-blue-alliance,josephbisch/the-blue-alliance,verycumbersome/the-blue-alliance
|
class AuthType(object):
"""
An auth type defines what write privileges an authenticated agent has.
"""
MATCH_VIDEO = 0
EVENT_TEAMS = 1
EVENT_MATCHES = 2
EVENT_RANKINGS = 3
EVENT_ALLIANCES = 4
EVENT_AWARDS = 5
type_names = {
MATCH_VIDEO: "match video",
EVENT_TEAMS: "event teams",
EVENT_MATCHES: "event matches",
EVENT_RANKINGS: "event rankings",
EVENT_ALLIANCES: "event alliances",
EVENT_AWARDS: "event awrads"
}
Revert "Remove AuthType.EVENT_DATA and renumber"
This reverts commit 38248941eb47a04b82fe52e1adca7387dafcf7f3.
|
class AuthType(object):
"""
An auth type defines what write privileges an authenticated agent has.
"""
EVENT_DATA = 0
MATCH_VIDEO = 1
EVENT_TEAMS = 2
EVENT_MATCHES = 3
EVENT_RANKINGS = 4
EVENT_ALLIANCES = 5
EVENT_AWARDS = 6
type_names = {
EVENT_DATA: "event data",
MATCH_VIDEO: "match video",
EVENT_TEAMS: "event teams",
EVENT_MATCHES: "event matches",
EVENT_RANKINGS: "event rankings",
EVENT_ALLIANCES: "event alliances",
EVENT_AWARDS: "event awrads"
}
|
<commit_before>class AuthType(object):
"""
An auth type defines what write privileges an authenticated agent has.
"""
MATCH_VIDEO = 0
EVENT_TEAMS = 1
EVENT_MATCHES = 2
EVENT_RANKINGS = 3
EVENT_ALLIANCES = 4
EVENT_AWARDS = 5
type_names = {
MATCH_VIDEO: "match video",
EVENT_TEAMS: "event teams",
EVENT_MATCHES: "event matches",
EVENT_RANKINGS: "event rankings",
EVENT_ALLIANCES: "event alliances",
EVENT_AWARDS: "event awrads"
}
<commit_msg>Revert "Remove AuthType.EVENT_DATA and renumber"
This reverts commit 38248941eb47a04b82fe52e1adca7387dafcf7f3.<commit_after>
|
class AuthType(object):
"""
An auth type defines what write privileges an authenticated agent has.
"""
EVENT_DATA = 0
MATCH_VIDEO = 1
EVENT_TEAMS = 2
EVENT_MATCHES = 3
EVENT_RANKINGS = 4
EVENT_ALLIANCES = 5
EVENT_AWARDS = 6
type_names = {
EVENT_DATA: "event data",
MATCH_VIDEO: "match video",
EVENT_TEAMS: "event teams",
EVENT_MATCHES: "event matches",
EVENT_RANKINGS: "event rankings",
EVENT_ALLIANCES: "event alliances",
EVENT_AWARDS: "event awrads"
}
|
class AuthType(object):
"""
An auth type defines what write privileges an authenticated agent has.
"""
MATCH_VIDEO = 0
EVENT_TEAMS = 1
EVENT_MATCHES = 2
EVENT_RANKINGS = 3
EVENT_ALLIANCES = 4
EVENT_AWARDS = 5
type_names = {
MATCH_VIDEO: "match video",
EVENT_TEAMS: "event teams",
EVENT_MATCHES: "event matches",
EVENT_RANKINGS: "event rankings",
EVENT_ALLIANCES: "event alliances",
EVENT_AWARDS: "event awrads"
}
Revert "Remove AuthType.EVENT_DATA and renumber"
This reverts commit 38248941eb47a04b82fe52e1adca7387dafcf7f3.class AuthType(object):
"""
An auth type defines what write privileges an authenticated agent has.
"""
EVENT_DATA = 0
MATCH_VIDEO = 1
EVENT_TEAMS = 2
EVENT_MATCHES = 3
EVENT_RANKINGS = 4
EVENT_ALLIANCES = 5
EVENT_AWARDS = 6
type_names = {
EVENT_DATA: "event data",
MATCH_VIDEO: "match video",
EVENT_TEAMS: "event teams",
EVENT_MATCHES: "event matches",
EVENT_RANKINGS: "event rankings",
EVENT_ALLIANCES: "event alliances",
EVENT_AWARDS: "event awrads"
}
|
<commit_before>class AuthType(object):
"""
An auth type defines what write privileges an authenticated agent has.
"""
MATCH_VIDEO = 0
EVENT_TEAMS = 1
EVENT_MATCHES = 2
EVENT_RANKINGS = 3
EVENT_ALLIANCES = 4
EVENT_AWARDS = 5
type_names = {
MATCH_VIDEO: "match video",
EVENT_TEAMS: "event teams",
EVENT_MATCHES: "event matches",
EVENT_RANKINGS: "event rankings",
EVENT_ALLIANCES: "event alliances",
EVENT_AWARDS: "event awrads"
}
<commit_msg>Revert "Remove AuthType.EVENT_DATA and renumber"
This reverts commit 38248941eb47a04b82fe52e1adca7387dafcf7f3.<commit_after>class AuthType(object):
"""
An auth type defines what write privileges an authenticated agent has.
"""
EVENT_DATA = 0
MATCH_VIDEO = 1
EVENT_TEAMS = 2
EVENT_MATCHES = 3
EVENT_RANKINGS = 4
EVENT_ALLIANCES = 5
EVENT_AWARDS = 6
type_names = {
EVENT_DATA: "event data",
MATCH_VIDEO: "match video",
EVENT_TEAMS: "event teams",
EVENT_MATCHES: "event matches",
EVENT_RANKINGS: "event rankings",
EVENT_ALLIANCES: "event alliances",
EVENT_AWARDS: "event awrads"
}
|
e7df1e7f3e9d8afd5cf1892df2f136751b276136
|
aio_pika/transaction.py
|
aio_pika/transaction.py
|
import asyncio
from enum import Enum
import aiormq
class TransactionStates(Enum):
created = "created"
commited = "commited"
rolled_back = "rolled back"
started = "started"
class Transaction:
def __str__(self):
return self.state.value
def __init__(self, channel):
self.loop = channel.loop
self._channel = channel
self.state = TransactionStates.created # type: TransactionStates
@property
def channel(self) -> aiormq.Channel:
if self._channel is None:
raise RuntimeError("Channel not opened")
if self._channel.is_closed:
raise RuntimeError("Closed channel")
return self._channel
async def select(self, timeout=None) -> aiormq.spec.Tx.SelectOk:
result = await asyncio.wait_for(
self.channel.tx_select(), timeout=timeout,
)
self.state = TransactionStates.started
return result
async def rollback(self, timeout=None):
result = await asyncio.wait_for(
self.channel.tx_rollback(), timeout=timeout,
)
self.state = TransactionStates.rolled_back
return result
async def commit(self, timeout=None):
result = await asyncio.wait_for(
self.channel.tx_commit(), timeout=timeout,
)
self.state = TransactionStates.commited
return result
async def __aenter__(self):
return await self.select()
async def __aexit__(self, exc_type, exc_val, exc_tb):
if exc_type:
await self.rollback()
else:
await self.commit()
|
import asyncio
from enum import Enum
import aiormq
class TransactionStates(Enum):
created = "created"
commited = "commited"
rolled_back = "rolled back"
started = "started"
class Transaction:
def __str__(self):
return self.state.value
def __init__(self, channel):
self.loop = channel.loop
self._channel = channel
self.state = TransactionStates.created # type: TransactionStates
@property
def channel(self) -> aiormq.Channel:
if self._channel is None:
raise RuntimeError("Channel not opened")
if self._channel.is_closed:
raise RuntimeError("Closed channel")
return self._channel
async def select(self, timeout=None) -> aiormq.spec.Tx.SelectOk:
result = await asyncio.wait_for(
self.channel.tx_select(), timeout=timeout,
)
self.state = TransactionStates.started
return result
async def rollback(self, timeout=None):
result = await asyncio.wait_for(
self.channel.tx_rollback(), timeout=timeout,
)
self.state = TransactionStates.rolled_back
return result
async def commit(self, timeout=None):
result = await asyncio.wait_for(
self.channel.tx_commit(), timeout=timeout,
)
self.state = TransactionStates.commited
return result
async def __aenter__(self):
await self.select()
return self
async def __aexit__(self, exc_type, exc_val, exc_tb):
if exc_type:
await self.rollback()
else:
await self.commit()
|
Return self instead of select result in __aenter__
|
Return self instead of select result in __aenter__
|
Python
|
apache-2.0
|
mosquito/aio-pika
|
import asyncio
from enum import Enum
import aiormq
class TransactionStates(Enum):
created = "created"
commited = "commited"
rolled_back = "rolled back"
started = "started"
class Transaction:
def __str__(self):
return self.state.value
def __init__(self, channel):
self.loop = channel.loop
self._channel = channel
self.state = TransactionStates.created # type: TransactionStates
@property
def channel(self) -> aiormq.Channel:
if self._channel is None:
raise RuntimeError("Channel not opened")
if self._channel.is_closed:
raise RuntimeError("Closed channel")
return self._channel
async def select(self, timeout=None) -> aiormq.spec.Tx.SelectOk:
result = await asyncio.wait_for(
self.channel.tx_select(), timeout=timeout,
)
self.state = TransactionStates.started
return result
async def rollback(self, timeout=None):
result = await asyncio.wait_for(
self.channel.tx_rollback(), timeout=timeout,
)
self.state = TransactionStates.rolled_back
return result
async def commit(self, timeout=None):
result = await asyncio.wait_for(
self.channel.tx_commit(), timeout=timeout,
)
self.state = TransactionStates.commited
return result
async def __aenter__(self):
return await self.select()
async def __aexit__(self, exc_type, exc_val, exc_tb):
if exc_type:
await self.rollback()
else:
await self.commit()
Return self instead of select result in __aenter__
|
import asyncio
from enum import Enum
import aiormq
class TransactionStates(Enum):
created = "created"
commited = "commited"
rolled_back = "rolled back"
started = "started"
class Transaction:
def __str__(self):
return self.state.value
def __init__(self, channel):
self.loop = channel.loop
self._channel = channel
self.state = TransactionStates.created # type: TransactionStates
@property
def channel(self) -> aiormq.Channel:
if self._channel is None:
raise RuntimeError("Channel not opened")
if self._channel.is_closed:
raise RuntimeError("Closed channel")
return self._channel
async def select(self, timeout=None) -> aiormq.spec.Tx.SelectOk:
result = await asyncio.wait_for(
self.channel.tx_select(), timeout=timeout,
)
self.state = TransactionStates.started
return result
async def rollback(self, timeout=None):
result = await asyncio.wait_for(
self.channel.tx_rollback(), timeout=timeout,
)
self.state = TransactionStates.rolled_back
return result
async def commit(self, timeout=None):
result = await asyncio.wait_for(
self.channel.tx_commit(), timeout=timeout,
)
self.state = TransactionStates.commited
return result
async def __aenter__(self):
await self.select()
return self
async def __aexit__(self, exc_type, exc_val, exc_tb):
if exc_type:
await self.rollback()
else:
await self.commit()
|
<commit_before>import asyncio
from enum import Enum
import aiormq
class TransactionStates(Enum):
created = "created"
commited = "commited"
rolled_back = "rolled back"
started = "started"
class Transaction:
def __str__(self):
return self.state.value
def __init__(self, channel):
self.loop = channel.loop
self._channel = channel
self.state = TransactionStates.created # type: TransactionStates
@property
def channel(self) -> aiormq.Channel:
if self._channel is None:
raise RuntimeError("Channel not opened")
if self._channel.is_closed:
raise RuntimeError("Closed channel")
return self._channel
async def select(self, timeout=None) -> aiormq.spec.Tx.SelectOk:
result = await asyncio.wait_for(
self.channel.tx_select(), timeout=timeout,
)
self.state = TransactionStates.started
return result
async def rollback(self, timeout=None):
result = await asyncio.wait_for(
self.channel.tx_rollback(), timeout=timeout,
)
self.state = TransactionStates.rolled_back
return result
async def commit(self, timeout=None):
result = await asyncio.wait_for(
self.channel.tx_commit(), timeout=timeout,
)
self.state = TransactionStates.commited
return result
async def __aenter__(self):
return await self.select()
async def __aexit__(self, exc_type, exc_val, exc_tb):
if exc_type:
await self.rollback()
else:
await self.commit()
<commit_msg>Return self instead of select result in __aenter__<commit_after>
|
import asyncio
from enum import Enum
import aiormq
class TransactionStates(Enum):
created = "created"
commited = "commited"
rolled_back = "rolled back"
started = "started"
class Transaction:
def __str__(self):
return self.state.value
def __init__(self, channel):
self.loop = channel.loop
self._channel = channel
self.state = TransactionStates.created # type: TransactionStates
@property
def channel(self) -> aiormq.Channel:
if self._channel is None:
raise RuntimeError("Channel not opened")
if self._channel.is_closed:
raise RuntimeError("Closed channel")
return self._channel
async def select(self, timeout=None) -> aiormq.spec.Tx.SelectOk:
result = await asyncio.wait_for(
self.channel.tx_select(), timeout=timeout,
)
self.state = TransactionStates.started
return result
async def rollback(self, timeout=None):
result = await asyncio.wait_for(
self.channel.tx_rollback(), timeout=timeout,
)
self.state = TransactionStates.rolled_back
return result
async def commit(self, timeout=None):
result = await asyncio.wait_for(
self.channel.tx_commit(), timeout=timeout,
)
self.state = TransactionStates.commited
return result
async def __aenter__(self):
await self.select()
return self
async def __aexit__(self, exc_type, exc_val, exc_tb):
if exc_type:
await self.rollback()
else:
await self.commit()
|
import asyncio
from enum import Enum
import aiormq
class TransactionStates(Enum):
created = "created"
commited = "commited"
rolled_back = "rolled back"
started = "started"
class Transaction:
def __str__(self):
return self.state.value
def __init__(self, channel):
self.loop = channel.loop
self._channel = channel
self.state = TransactionStates.created # type: TransactionStates
@property
def channel(self) -> aiormq.Channel:
if self._channel is None:
raise RuntimeError("Channel not opened")
if self._channel.is_closed:
raise RuntimeError("Closed channel")
return self._channel
async def select(self, timeout=None) -> aiormq.spec.Tx.SelectOk:
result = await asyncio.wait_for(
self.channel.tx_select(), timeout=timeout,
)
self.state = TransactionStates.started
return result
async def rollback(self, timeout=None):
result = await asyncio.wait_for(
self.channel.tx_rollback(), timeout=timeout,
)
self.state = TransactionStates.rolled_back
return result
async def commit(self, timeout=None):
result = await asyncio.wait_for(
self.channel.tx_commit(), timeout=timeout,
)
self.state = TransactionStates.commited
return result
async def __aenter__(self):
return await self.select()
async def __aexit__(self, exc_type, exc_val, exc_tb):
if exc_type:
await self.rollback()
else:
await self.commit()
Return self instead of select result in __aenter__import asyncio
from enum import Enum
import aiormq
class TransactionStates(Enum):
created = "created"
commited = "commited"
rolled_back = "rolled back"
started = "started"
class Transaction:
def __str__(self):
return self.state.value
def __init__(self, channel):
self.loop = channel.loop
self._channel = channel
self.state = TransactionStates.created # type: TransactionStates
@property
def channel(self) -> aiormq.Channel:
if self._channel is None:
raise RuntimeError("Channel not opened")
if self._channel.is_closed:
raise RuntimeError("Closed channel")
return self._channel
async def select(self, timeout=None) -> aiormq.spec.Tx.SelectOk:
result = await asyncio.wait_for(
self.channel.tx_select(), timeout=timeout,
)
self.state = TransactionStates.started
return result
async def rollback(self, timeout=None):
result = await asyncio.wait_for(
self.channel.tx_rollback(), timeout=timeout,
)
self.state = TransactionStates.rolled_back
return result
async def commit(self, timeout=None):
result = await asyncio.wait_for(
self.channel.tx_commit(), timeout=timeout,
)
self.state = TransactionStates.commited
return result
async def __aenter__(self):
await self.select()
return self
async def __aexit__(self, exc_type, exc_val, exc_tb):
if exc_type:
await self.rollback()
else:
await self.commit()
|
<commit_before>import asyncio
from enum import Enum
import aiormq
class TransactionStates(Enum):
created = "created"
commited = "commited"
rolled_back = "rolled back"
started = "started"
class Transaction:
def __str__(self):
return self.state.value
def __init__(self, channel):
self.loop = channel.loop
self._channel = channel
self.state = TransactionStates.created # type: TransactionStates
@property
def channel(self) -> aiormq.Channel:
if self._channel is None:
raise RuntimeError("Channel not opened")
if self._channel.is_closed:
raise RuntimeError("Closed channel")
return self._channel
async def select(self, timeout=None) -> aiormq.spec.Tx.SelectOk:
result = await asyncio.wait_for(
self.channel.tx_select(), timeout=timeout,
)
self.state = TransactionStates.started
return result
async def rollback(self, timeout=None):
result = await asyncio.wait_for(
self.channel.tx_rollback(), timeout=timeout,
)
self.state = TransactionStates.rolled_back
return result
async def commit(self, timeout=None):
result = await asyncio.wait_for(
self.channel.tx_commit(), timeout=timeout,
)
self.state = TransactionStates.commited
return result
async def __aenter__(self):
return await self.select()
async def __aexit__(self, exc_type, exc_val, exc_tb):
if exc_type:
await self.rollback()
else:
await self.commit()
<commit_msg>Return self instead of select result in __aenter__<commit_after>import asyncio
from enum import Enum
import aiormq
class TransactionStates(Enum):
created = "created"
commited = "commited"
rolled_back = "rolled back"
started = "started"
class Transaction:
def __str__(self):
return self.state.value
def __init__(self, channel):
self.loop = channel.loop
self._channel = channel
self.state = TransactionStates.created # type: TransactionStates
@property
def channel(self) -> aiormq.Channel:
if self._channel is None:
raise RuntimeError("Channel not opened")
if self._channel.is_closed:
raise RuntimeError("Closed channel")
return self._channel
async def select(self, timeout=None) -> aiormq.spec.Tx.SelectOk:
result = await asyncio.wait_for(
self.channel.tx_select(), timeout=timeout,
)
self.state = TransactionStates.started
return result
async def rollback(self, timeout=None):
result = await asyncio.wait_for(
self.channel.tx_rollback(), timeout=timeout,
)
self.state = TransactionStates.rolled_back
return result
async def commit(self, timeout=None):
result = await asyncio.wait_for(
self.channel.tx_commit(), timeout=timeout,
)
self.state = TransactionStates.commited
return result
async def __aenter__(self):
await self.select()
return self
async def __aexit__(self, exc_type, exc_val, exc_tb):
if exc_type:
await self.rollback()
else:
await self.commit()
|
e0990fcdb7e5e1c90762a71ced7492e28f24c903
|
raven/utils/compat.py
|
raven/utils/compat.py
|
"""
raven.utils.compat
~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2012 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
try:
from urllib.error import HTTPError
except ImportError:
from urllib2 import HTTPError
try:
from urllib.request import Request, urlopen
except ImportError:
from urllib2 import Request, urlopen
try:
from urllib import quote as urllib_quote
except ImportError:
from urllib.parse import quote as urllib_quote
try:
from queue import Queue
except ImportError:
from Queue import Queue
try:
import urlparse as _urlparse
except ImportError:
from urllib import parse as _urlparse
try:
from unittest import TestCase, skipIf
except ImportError:
from unittest2 import TestCase, skipIf
|
"""
raven.utils.compat
~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2012 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
try:
from urllib.error import HTTPError
except ImportError:
from urllib2 import HTTPError
try:
from urllib.request import Request, urlopen
except ImportError:
from urllib2 import Request, urlopen
try:
from urllib import quote as urllib_quote
except ImportError:
from urllib.parse import quote as urllib_quote
try:
from queue import Queue
except ImportError:
from Queue import Queue
try:
import urlparse as _urlparse
except ImportError:
from urllib import parse as _urlparse
try:
from unittest2 import TestCase, skipIf
except ImportError:
from unittest import TestCase, skipIf
|
Use unittest2 if available and gracefully falls back to unittest.
|
Use unittest2 if available and gracefully falls back to unittest.
|
Python
|
bsd-3-clause
|
getsentry/raven-python,lepture/raven-python,recht/raven-python,nikolas/raven-python,nikolas/raven-python,inspirehep/raven-python,dbravender/raven-python,smarkets/raven-python,percipient/raven-python,Photonomie/raven-python,ronaldevers/raven-python,danriti/raven-python,someonehan/raven-python,smarkets/raven-python,Photonomie/raven-python,recht/raven-python,icereval/raven-python,akalipetis/raven-python,jbarbuto/raven-python,inspirehep/raven-python,danriti/raven-python,jmagnusson/raven-python,icereval/raven-python,hzy/raven-python,dbravender/raven-python,percipient/raven-python,jbarbuto/raven-python,Goldmund-Wyldebeast-Wunderliebe/raven-python,akalipetis/raven-python,ewdurbin/raven-python,recht/raven-python,jmp0xf/raven-python,akheron/raven-python,someonehan/raven-python,someonehan/raven-python,lepture/raven-python,ewdurbin/raven-python,johansteffner/raven-python,Goldmund-Wyldebeast-Wunderliebe/raven-python,nikolas/raven-python,hzy/raven-python,arthurlogilab/raven-python,nikolas/raven-python,ronaldevers/raven-python,danriti/raven-python,arthurlogilab/raven-python,Photonomie/raven-python,jbarbuto/raven-python,arthurlogilab/raven-python,getsentry/raven-python,arthurlogilab/raven-python,inspirehep/raven-python,smarkets/raven-python,icereval/raven-python,akheron/raven-python,lepture/raven-python,icereval/raven-python,jmagnusson/raven-python,jmagnusson/raven-python,akheron/raven-python,jmp0xf/raven-python,ronaldevers/raven-python,smarkets/raven-python,ewdurbin/raven-python,johansteffner/raven-python,inspirehep/raven-python,percipient/raven-python,jbarbuto/raven-python,getsentry/raven-python,johansteffner/raven-python,jmp0xf/raven-python,akalipetis/raven-python,Goldmund-Wyldebeast-Wunderliebe/raven-python,dbravender/raven-python,hzy/raven-python,Goldmund-Wyldebeast-Wunderliebe/raven-python
|
"""
raven.utils.compat
~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2012 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
try:
from urllib.error import HTTPError
except ImportError:
from urllib2 import HTTPError
try:
from urllib.request import Request, urlopen
except ImportError:
from urllib2 import Request, urlopen
try:
from urllib import quote as urllib_quote
except ImportError:
from urllib.parse import quote as urllib_quote
try:
from queue import Queue
except ImportError:
from Queue import Queue
try:
import urlparse as _urlparse
except ImportError:
from urllib import parse as _urlparse
try:
from unittest import TestCase, skipIf
except ImportError:
from unittest2 import TestCase, skipIf
Use unittest2 if available and gracefully falls back to unittest.
|
"""
raven.utils.compat
~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2012 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
try:
from urllib.error import HTTPError
except ImportError:
from urllib2 import HTTPError
try:
from urllib.request import Request, urlopen
except ImportError:
from urllib2 import Request, urlopen
try:
from urllib import quote as urllib_quote
except ImportError:
from urllib.parse import quote as urllib_quote
try:
from queue import Queue
except ImportError:
from Queue import Queue
try:
import urlparse as _urlparse
except ImportError:
from urllib import parse as _urlparse
try:
from unittest2 import TestCase, skipIf
except ImportError:
from unittest import TestCase, skipIf
|
<commit_before>"""
raven.utils.compat
~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2012 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
try:
from urllib.error import HTTPError
except ImportError:
from urllib2 import HTTPError
try:
from urllib.request import Request, urlopen
except ImportError:
from urllib2 import Request, urlopen
try:
from urllib import quote as urllib_quote
except ImportError:
from urllib.parse import quote as urllib_quote
try:
from queue import Queue
except ImportError:
from Queue import Queue
try:
import urlparse as _urlparse
except ImportError:
from urllib import parse as _urlparse
try:
from unittest import TestCase, skipIf
except ImportError:
from unittest2 import TestCase, skipIf
<commit_msg>Use unittest2 if available and gracefully falls back to unittest.<commit_after>
|
"""
raven.utils.compat
~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2012 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
try:
from urllib.error import HTTPError
except ImportError:
from urllib2 import HTTPError
try:
from urllib.request import Request, urlopen
except ImportError:
from urllib2 import Request, urlopen
try:
from urllib import quote as urllib_quote
except ImportError:
from urllib.parse import quote as urllib_quote
try:
from queue import Queue
except ImportError:
from Queue import Queue
try:
import urlparse as _urlparse
except ImportError:
from urllib import parse as _urlparse
try:
from unittest2 import TestCase, skipIf
except ImportError:
from unittest import TestCase, skipIf
|
"""
raven.utils.compat
~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2012 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
try:
from urllib.error import HTTPError
except ImportError:
from urllib2 import HTTPError
try:
from urllib.request import Request, urlopen
except ImportError:
from urllib2 import Request, urlopen
try:
from urllib import quote as urllib_quote
except ImportError:
from urllib.parse import quote as urllib_quote
try:
from queue import Queue
except ImportError:
from Queue import Queue
try:
import urlparse as _urlparse
except ImportError:
from urllib import parse as _urlparse
try:
from unittest import TestCase, skipIf
except ImportError:
from unittest2 import TestCase, skipIf
Use unittest2 if available and gracefully falls back to unittest."""
raven.utils.compat
~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2012 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
try:
from urllib.error import HTTPError
except ImportError:
from urllib2 import HTTPError
try:
from urllib.request import Request, urlopen
except ImportError:
from urllib2 import Request, urlopen
try:
from urllib import quote as urllib_quote
except ImportError:
from urllib.parse import quote as urllib_quote
try:
from queue import Queue
except ImportError:
from Queue import Queue
try:
import urlparse as _urlparse
except ImportError:
from urllib import parse as _urlparse
try:
from unittest2 import TestCase, skipIf
except ImportError:
from unittest import TestCase, skipIf
|
<commit_before>"""
raven.utils.compat
~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2012 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
try:
from urllib.error import HTTPError
except ImportError:
from urllib2 import HTTPError
try:
from urllib.request import Request, urlopen
except ImportError:
from urllib2 import Request, urlopen
try:
from urllib import quote as urllib_quote
except ImportError:
from urllib.parse import quote as urllib_quote
try:
from queue import Queue
except ImportError:
from Queue import Queue
try:
import urlparse as _urlparse
except ImportError:
from urllib import parse as _urlparse
try:
from unittest import TestCase, skipIf
except ImportError:
from unittest2 import TestCase, skipIf
<commit_msg>Use unittest2 if available and gracefully falls back to unittest.<commit_after>"""
raven.utils.compat
~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2012 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
try:
from urllib.error import HTTPError
except ImportError:
from urllib2 import HTTPError
try:
from urllib.request import Request, urlopen
except ImportError:
from urllib2 import Request, urlopen
try:
from urllib import quote as urllib_quote
except ImportError:
from urllib.parse import quote as urllib_quote
try:
from queue import Queue
except ImportError:
from Queue import Queue
try:
import urlparse as _urlparse
except ImportError:
from urllib import parse as _urlparse
try:
from unittest2 import TestCase, skipIf
except ImportError:
from unittest import TestCase, skipIf
|
cbb0ed5ed66571feba22413472a5fe1a20824dbd
|
shared_export.py
|
shared_export.py
|
import bpy
def find_seqs(scene, select_marker):
sequences = {}
sequence_flags = {}
for marker in scene.timeline_markers:
if ":" not in marker.name or (select_marker and not marker.select):
continue
name, what = marker.name.rsplit(":", 1)
if name not in sequences:
sequences[name] = {}
if what in sequences[name]:
print("Warning: Got duplicate '{}' marker for sequence '{}' at frame {} (first was at frame {}), ignoring".format(what, name, marker.frame, sequences[name][what].frame))
continue
sequences[name][what] = marker
if "Sequences" in bpy.data.texts:
for line in bpy.data.texts["Sequences"].as_string().split("\n"):
line = line.strip()
if not line:
continue
if ":" not in line:
print("Invalid line in 'Sequences':", line)
continue
name, flags = line.split(":", 1)
if flags.lstrip():
flags = tuple(map(lambda f: f.strip(), flags.split(",")))
else:
flags = ()
sequence_flags[name] = flags
return sequences, sequence_flags
|
import bpy
def find_seqs(scene, select_marker):
sequences = {}
sequence_flags = {}
for marker in scene.timeline_markers:
if ":" not in marker.name or (select_marker and not marker.select):
continue
name, what = marker.name.rsplit(":", 1)
what = what.lower()
if name not in sequences:
sequences[name] = {}
if what in sequences[name]:
print("Warning: Got duplicate '{}' marker for sequence '{}' at frame {} (first was at frame {}), ignoring".format(what, name, marker.frame, sequences[name][what].frame))
continue
sequences[name][what] = marker
if "Sequences" in bpy.data.texts:
for line in bpy.data.texts["Sequences"].as_string().split("\n"):
line = line.strip()
if not line:
continue
if ":" not in line:
print("Invalid line in 'Sequences':", line)
continue
name, flags = line.split(":", 1)
if flags.lstrip():
flags = tuple(map(lambda f: f.strip(), flags.split(",")))
else:
flags = ()
sequence_flags[name] = flags
return sequences, sequence_flags
|
Make sequence marker types (:type) case insensitive
|
Make sequence marker types (:type) case insensitive
|
Python
|
mit
|
qoh/io_scene_dts,portify/io_scene_dts
|
import bpy
def find_seqs(scene, select_marker):
sequences = {}
sequence_flags = {}
for marker in scene.timeline_markers:
if ":" not in marker.name or (select_marker and not marker.select):
continue
name, what = marker.name.rsplit(":", 1)
if name not in sequences:
sequences[name] = {}
if what in sequences[name]:
print("Warning: Got duplicate '{}' marker for sequence '{}' at frame {} (first was at frame {}), ignoring".format(what, name, marker.frame, sequences[name][what].frame))
continue
sequences[name][what] = marker
if "Sequences" in bpy.data.texts:
for line in bpy.data.texts["Sequences"].as_string().split("\n"):
line = line.strip()
if not line:
continue
if ":" not in line:
print("Invalid line in 'Sequences':", line)
continue
name, flags = line.split(":", 1)
if flags.lstrip():
flags = tuple(map(lambda f: f.strip(), flags.split(",")))
else:
flags = ()
sequence_flags[name] = flags
return sequences, sequence_flagsMake sequence marker types (:type) case insensitive
|
import bpy
def find_seqs(scene, select_marker):
sequences = {}
sequence_flags = {}
for marker in scene.timeline_markers:
if ":" not in marker.name or (select_marker and not marker.select):
continue
name, what = marker.name.rsplit(":", 1)
what = what.lower()
if name not in sequences:
sequences[name] = {}
if what in sequences[name]:
print("Warning: Got duplicate '{}' marker for sequence '{}' at frame {} (first was at frame {}), ignoring".format(what, name, marker.frame, sequences[name][what].frame))
continue
sequences[name][what] = marker
if "Sequences" in bpy.data.texts:
for line in bpy.data.texts["Sequences"].as_string().split("\n"):
line = line.strip()
if not line:
continue
if ":" not in line:
print("Invalid line in 'Sequences':", line)
continue
name, flags = line.split(":", 1)
if flags.lstrip():
flags = tuple(map(lambda f: f.strip(), flags.split(",")))
else:
flags = ()
sequence_flags[name] = flags
return sequences, sequence_flags
|
<commit_before>import bpy
def find_seqs(scene, select_marker):
sequences = {}
sequence_flags = {}
for marker in scene.timeline_markers:
if ":" not in marker.name or (select_marker and not marker.select):
continue
name, what = marker.name.rsplit(":", 1)
if name not in sequences:
sequences[name] = {}
if what in sequences[name]:
print("Warning: Got duplicate '{}' marker for sequence '{}' at frame {} (first was at frame {}), ignoring".format(what, name, marker.frame, sequences[name][what].frame))
continue
sequences[name][what] = marker
if "Sequences" in bpy.data.texts:
for line in bpy.data.texts["Sequences"].as_string().split("\n"):
line = line.strip()
if not line:
continue
if ":" not in line:
print("Invalid line in 'Sequences':", line)
continue
name, flags = line.split(":", 1)
if flags.lstrip():
flags = tuple(map(lambda f: f.strip(), flags.split(",")))
else:
flags = ()
sequence_flags[name] = flags
return sequences, sequence_flags<commit_msg>Make sequence marker types (:type) case insensitive<commit_after>
|
import bpy
def find_seqs(scene, select_marker):
sequences = {}
sequence_flags = {}
for marker in scene.timeline_markers:
if ":" not in marker.name or (select_marker and not marker.select):
continue
name, what = marker.name.rsplit(":", 1)
what = what.lower()
if name not in sequences:
sequences[name] = {}
if what in sequences[name]:
print("Warning: Got duplicate '{}' marker for sequence '{}' at frame {} (first was at frame {}), ignoring".format(what, name, marker.frame, sequences[name][what].frame))
continue
sequences[name][what] = marker
if "Sequences" in bpy.data.texts:
for line in bpy.data.texts["Sequences"].as_string().split("\n"):
line = line.strip()
if not line:
continue
if ":" not in line:
print("Invalid line in 'Sequences':", line)
continue
name, flags = line.split(":", 1)
if flags.lstrip():
flags = tuple(map(lambda f: f.strip(), flags.split(",")))
else:
flags = ()
sequence_flags[name] = flags
return sequences, sequence_flags
|
import bpy
def find_seqs(scene, select_marker):
sequences = {}
sequence_flags = {}
for marker in scene.timeline_markers:
if ":" not in marker.name or (select_marker and not marker.select):
continue
name, what = marker.name.rsplit(":", 1)
if name not in sequences:
sequences[name] = {}
if what in sequences[name]:
print("Warning: Got duplicate '{}' marker for sequence '{}' at frame {} (first was at frame {}), ignoring".format(what, name, marker.frame, sequences[name][what].frame))
continue
sequences[name][what] = marker
if "Sequences" in bpy.data.texts:
for line in bpy.data.texts["Sequences"].as_string().split("\n"):
line = line.strip()
if not line:
continue
if ":" not in line:
print("Invalid line in 'Sequences':", line)
continue
name, flags = line.split(":", 1)
if flags.lstrip():
flags = tuple(map(lambda f: f.strip(), flags.split(",")))
else:
flags = ()
sequence_flags[name] = flags
return sequences, sequence_flagsMake sequence marker types (:type) case insensitiveimport bpy
def find_seqs(scene, select_marker):
sequences = {}
sequence_flags = {}
for marker in scene.timeline_markers:
if ":" not in marker.name or (select_marker and not marker.select):
continue
name, what = marker.name.rsplit(":", 1)
what = what.lower()
if name not in sequences:
sequences[name] = {}
if what in sequences[name]:
print("Warning: Got duplicate '{}' marker for sequence '{}' at frame {} (first was at frame {}), ignoring".format(what, name, marker.frame, sequences[name][what].frame))
continue
sequences[name][what] = marker
if "Sequences" in bpy.data.texts:
for line in bpy.data.texts["Sequences"].as_string().split("\n"):
line = line.strip()
if not line:
continue
if ":" not in line:
print("Invalid line in 'Sequences':", line)
continue
name, flags = line.split(":", 1)
if flags.lstrip():
flags = tuple(map(lambda f: f.strip(), flags.split(",")))
else:
flags = ()
sequence_flags[name] = flags
return sequences, sequence_flags
|
<commit_before>import bpy
def find_seqs(scene, select_marker):
sequences = {}
sequence_flags = {}
for marker in scene.timeline_markers:
if ":" not in marker.name or (select_marker and not marker.select):
continue
name, what = marker.name.rsplit(":", 1)
if name not in sequences:
sequences[name] = {}
if what in sequences[name]:
print("Warning: Got duplicate '{}' marker for sequence '{}' at frame {} (first was at frame {}), ignoring".format(what, name, marker.frame, sequences[name][what].frame))
continue
sequences[name][what] = marker
if "Sequences" in bpy.data.texts:
for line in bpy.data.texts["Sequences"].as_string().split("\n"):
line = line.strip()
if not line:
continue
if ":" not in line:
print("Invalid line in 'Sequences':", line)
continue
name, flags = line.split(":", 1)
if flags.lstrip():
flags = tuple(map(lambda f: f.strip(), flags.split(",")))
else:
flags = ()
sequence_flags[name] = flags
return sequences, sequence_flags<commit_msg>Make sequence marker types (:type) case insensitive<commit_after>import bpy
def find_seqs(scene, select_marker):
sequences = {}
sequence_flags = {}
for marker in scene.timeline_markers:
if ":" not in marker.name or (select_marker and not marker.select):
continue
name, what = marker.name.rsplit(":", 1)
what = what.lower()
if name not in sequences:
sequences[name] = {}
if what in sequences[name]:
print("Warning: Got duplicate '{}' marker for sequence '{}' at frame {} (first was at frame {}), ignoring".format(what, name, marker.frame, sequences[name][what].frame))
continue
sequences[name][what] = marker
if "Sequences" in bpy.data.texts:
for line in bpy.data.texts["Sequences"].as_string().split("\n"):
line = line.strip()
if not line:
continue
if ":" not in line:
print("Invalid line in 'Sequences':", line)
continue
name, flags = line.split(":", 1)
if flags.lstrip():
flags = tuple(map(lambda f: f.strip(), flags.split(",")))
else:
flags = ()
sequence_flags[name] = flags
return sequences, sequence_flags
|
7ceba1f2b83628a2b89ffbdd30e435970e8c5e91
|
tests/test_kafka_streams.py
|
tests/test_kafka_streams.py
|
"""
Test the top-level Kafka Streams class
"""
import pytest
from winton_kafka_streams import kafka_config
from winton_kafka_streams.errors.kafka_streams_error import KafkaStreamsError
from winton_kafka_streams.kafka_streams import KafkaStreams
from winton_kafka_streams.processor.processor import BaseProcessor
from winton_kafka_streams.processor.topology import TopologyBuilder
class MyTestProcessor(BaseProcessor):
pass
def test_Given_StreamAlreadyStarted_When_CallStartAgain_Then_RaiseError():
kafka_config.NUM_STREAM_THREADS = 0
topology_builder = TopologyBuilder()
topology_builder.source('my-source', ['my-input-topic-1'])
topology_builder.processor('my-processor', MyTestProcessor, 'my-source')
topology_builder.sink('my-sink', 'my-output-topic-1', 'my-processor')
topology = topology_builder.build()
kafka_streams = KafkaStreams(topology, kafka_config)
kafka_streams.start()
with pytest.raises(KafkaStreamsError, message='KafkaStreams already started.'):
kafka_streams.start()
|
"""
Test the top-level Kafka Streams class
"""
import pytest
from winton_kafka_streams import kafka_config
from winton_kafka_streams.errors.kafka_streams_error import KafkaStreamsError
from winton_kafka_streams.kafka_streams import KafkaStreams
from winton_kafka_streams.processor.processor import BaseProcessor
from winton_kafka_streams.processor.topology import TopologyBuilder
class MyTestProcessor(BaseProcessor):
pass
def test__given__stream_already_started__when__call_start_again__then__raise_error():
kafka_config.NUM_STREAM_THREADS = 0
topology_builder = TopologyBuilder()
topology_builder.source('my-source', ['my-input-topic-1'])
topology_builder.processor('my-processor', MyTestProcessor, 'my-source')
topology_builder.sink('my-sink', 'my-output-topic-1', 'my-processor')
topology = topology_builder.build()
kafka_streams = KafkaStreams(topology, kafka_config)
kafka_streams.start()
with pytest.raises(KafkaStreamsError, message='KafkaStreams already started.'):
kafka_streams.start()
|
Use more Pythonic name for test.
|
Use more Pythonic name for test.
|
Python
|
apache-2.0
|
wintoncode/winton-kafka-streams
|
"""
Test the top-level Kafka Streams class
"""
import pytest
from winton_kafka_streams import kafka_config
from winton_kafka_streams.errors.kafka_streams_error import KafkaStreamsError
from winton_kafka_streams.kafka_streams import KafkaStreams
from winton_kafka_streams.processor.processor import BaseProcessor
from winton_kafka_streams.processor.topology import TopologyBuilder
class MyTestProcessor(BaseProcessor):
pass
def test_Given_StreamAlreadyStarted_When_CallStartAgain_Then_RaiseError():
kafka_config.NUM_STREAM_THREADS = 0
topology_builder = TopologyBuilder()
topology_builder.source('my-source', ['my-input-topic-1'])
topology_builder.processor('my-processor', MyTestProcessor, 'my-source')
topology_builder.sink('my-sink', 'my-output-topic-1', 'my-processor')
topology = topology_builder.build()
kafka_streams = KafkaStreams(topology, kafka_config)
kafka_streams.start()
with pytest.raises(KafkaStreamsError, message='KafkaStreams already started.'):
kafka_streams.start()
Use more Pythonic name for test.
|
"""
Test the top-level Kafka Streams class
"""
import pytest
from winton_kafka_streams import kafka_config
from winton_kafka_streams.errors.kafka_streams_error import KafkaStreamsError
from winton_kafka_streams.kafka_streams import KafkaStreams
from winton_kafka_streams.processor.processor import BaseProcessor
from winton_kafka_streams.processor.topology import TopologyBuilder
class MyTestProcessor(BaseProcessor):
pass
def test__given__stream_already_started__when__call_start_again__then__raise_error():
kafka_config.NUM_STREAM_THREADS = 0
topology_builder = TopologyBuilder()
topology_builder.source('my-source', ['my-input-topic-1'])
topology_builder.processor('my-processor', MyTestProcessor, 'my-source')
topology_builder.sink('my-sink', 'my-output-topic-1', 'my-processor')
topology = topology_builder.build()
kafka_streams = KafkaStreams(topology, kafka_config)
kafka_streams.start()
with pytest.raises(KafkaStreamsError, message='KafkaStreams already started.'):
kafka_streams.start()
|
<commit_before>"""
Test the top-level Kafka Streams class
"""
import pytest
from winton_kafka_streams import kafka_config
from winton_kafka_streams.errors.kafka_streams_error import KafkaStreamsError
from winton_kafka_streams.kafka_streams import KafkaStreams
from winton_kafka_streams.processor.processor import BaseProcessor
from winton_kafka_streams.processor.topology import TopologyBuilder
class MyTestProcessor(BaseProcessor):
pass
def test_Given_StreamAlreadyStarted_When_CallStartAgain_Then_RaiseError():
kafka_config.NUM_STREAM_THREADS = 0
topology_builder = TopologyBuilder()
topology_builder.source('my-source', ['my-input-topic-1'])
topology_builder.processor('my-processor', MyTestProcessor, 'my-source')
topology_builder.sink('my-sink', 'my-output-topic-1', 'my-processor')
topology = topology_builder.build()
kafka_streams = KafkaStreams(topology, kafka_config)
kafka_streams.start()
with pytest.raises(KafkaStreamsError, message='KafkaStreams already started.'):
kafka_streams.start()
<commit_msg>Use more Pythonic name for test.<commit_after>
|
"""
Test the top-level Kafka Streams class
"""
import pytest
from winton_kafka_streams import kafka_config
from winton_kafka_streams.errors.kafka_streams_error import KafkaStreamsError
from winton_kafka_streams.kafka_streams import KafkaStreams
from winton_kafka_streams.processor.processor import BaseProcessor
from winton_kafka_streams.processor.topology import TopologyBuilder
class MyTestProcessor(BaseProcessor):
pass
def test__given__stream_already_started__when__call_start_again__then__raise_error():
kafka_config.NUM_STREAM_THREADS = 0
topology_builder = TopologyBuilder()
topology_builder.source('my-source', ['my-input-topic-1'])
topology_builder.processor('my-processor', MyTestProcessor, 'my-source')
topology_builder.sink('my-sink', 'my-output-topic-1', 'my-processor')
topology = topology_builder.build()
kafka_streams = KafkaStreams(topology, kafka_config)
kafka_streams.start()
with pytest.raises(KafkaStreamsError, message='KafkaStreams already started.'):
kafka_streams.start()
|
"""
Test the top-level Kafka Streams class
"""
import pytest
from winton_kafka_streams import kafka_config
from winton_kafka_streams.errors.kafka_streams_error import KafkaStreamsError
from winton_kafka_streams.kafka_streams import KafkaStreams
from winton_kafka_streams.processor.processor import BaseProcessor
from winton_kafka_streams.processor.topology import TopologyBuilder
class MyTestProcessor(BaseProcessor):
pass
def test_Given_StreamAlreadyStarted_When_CallStartAgain_Then_RaiseError():
kafka_config.NUM_STREAM_THREADS = 0
topology_builder = TopologyBuilder()
topology_builder.source('my-source', ['my-input-topic-1'])
topology_builder.processor('my-processor', MyTestProcessor, 'my-source')
topology_builder.sink('my-sink', 'my-output-topic-1', 'my-processor')
topology = topology_builder.build()
kafka_streams = KafkaStreams(topology, kafka_config)
kafka_streams.start()
with pytest.raises(KafkaStreamsError, message='KafkaStreams already started.'):
kafka_streams.start()
Use more Pythonic name for test."""
Test the top-level Kafka Streams class
"""
import pytest
from winton_kafka_streams import kafka_config
from winton_kafka_streams.errors.kafka_streams_error import KafkaStreamsError
from winton_kafka_streams.kafka_streams import KafkaStreams
from winton_kafka_streams.processor.processor import BaseProcessor
from winton_kafka_streams.processor.topology import TopologyBuilder
class MyTestProcessor(BaseProcessor):
pass
def test__given__stream_already_started__when__call_start_again__then__raise_error():
kafka_config.NUM_STREAM_THREADS = 0
topology_builder = TopologyBuilder()
topology_builder.source('my-source', ['my-input-topic-1'])
topology_builder.processor('my-processor', MyTestProcessor, 'my-source')
topology_builder.sink('my-sink', 'my-output-topic-1', 'my-processor')
topology = topology_builder.build()
kafka_streams = KafkaStreams(topology, kafka_config)
kafka_streams.start()
with pytest.raises(KafkaStreamsError, message='KafkaStreams already started.'):
kafka_streams.start()
|
<commit_before>"""
Test the top-level Kafka Streams class
"""
import pytest
from winton_kafka_streams import kafka_config
from winton_kafka_streams.errors.kafka_streams_error import KafkaStreamsError
from winton_kafka_streams.kafka_streams import KafkaStreams
from winton_kafka_streams.processor.processor import BaseProcessor
from winton_kafka_streams.processor.topology import TopologyBuilder
class MyTestProcessor(BaseProcessor):
pass
def test_Given_StreamAlreadyStarted_When_CallStartAgain_Then_RaiseError():
kafka_config.NUM_STREAM_THREADS = 0
topology_builder = TopologyBuilder()
topology_builder.source('my-source', ['my-input-topic-1'])
topology_builder.processor('my-processor', MyTestProcessor, 'my-source')
topology_builder.sink('my-sink', 'my-output-topic-1', 'my-processor')
topology = topology_builder.build()
kafka_streams = KafkaStreams(topology, kafka_config)
kafka_streams.start()
with pytest.raises(KafkaStreamsError, message='KafkaStreams already started.'):
kafka_streams.start()
<commit_msg>Use more Pythonic name for test.<commit_after>"""
Test the top-level Kafka Streams class
"""
import pytest
from winton_kafka_streams import kafka_config
from winton_kafka_streams.errors.kafka_streams_error import KafkaStreamsError
from winton_kafka_streams.kafka_streams import KafkaStreams
from winton_kafka_streams.processor.processor import BaseProcessor
from winton_kafka_streams.processor.topology import TopologyBuilder
class MyTestProcessor(BaseProcessor):
pass
def test__given__stream_already_started__when__call_start_again__then__raise_error():
kafka_config.NUM_STREAM_THREADS = 0
topology_builder = TopologyBuilder()
topology_builder.source('my-source', ['my-input-topic-1'])
topology_builder.processor('my-processor', MyTestProcessor, 'my-source')
topology_builder.sink('my-sink', 'my-output-topic-1', 'my-processor')
topology = topology_builder.build()
kafka_streams = KafkaStreams(topology, kafka_config)
kafka_streams.start()
with pytest.raises(KafkaStreamsError, message='KafkaStreams already started.'):
kafka_streams.start()
|
3fb1800548ad421520bf3f2845aad4f51f6f5839
|
rapidsms_multimodem/tests/__init__.py
|
rapidsms_multimodem/tests/__init__.py
|
from test_utils import * # noqa
from test_views import * # noqa
|
from test_outgoing import * # noqa
from test_utils import * # noqa
from test_views import * # noqa
|
Add import for older versions of Django
|
Add import for older versions of Django
|
Python
|
bsd-3-clause
|
caktus/rapidsms-multimodem
|
from test_utils import * # noqa
from test_views import * # noqa
Add import for older versions of Django
|
from test_outgoing import * # noqa
from test_utils import * # noqa
from test_views import * # noqa
|
<commit_before>from test_utils import * # noqa
from test_views import * # noqa
<commit_msg>Add import for older versions of Django<commit_after>
|
from test_outgoing import * # noqa
from test_utils import * # noqa
from test_views import * # noqa
|
from test_utils import * # noqa
from test_views import * # noqa
Add import for older versions of Djangofrom test_outgoing import * # noqa
from test_utils import * # noqa
from test_views import * # noqa
|
<commit_before>from test_utils import * # noqa
from test_views import * # noqa
<commit_msg>Add import for older versions of Django<commit_after>from test_outgoing import * # noqa
from test_utils import * # noqa
from test_views import * # noqa
|
075b11aa830c9a5961e9ee63e42484192990f7d3
|
tools/misc/python/test-data-in-out.py
|
tools/misc/python/test-data-in-out.py
|
# TOOL test-data-in-out.py: "Test data input and output in Python" (Data input output test.)
# INPUT input TYPE GENERIC
# OUTPUT output
# OUTPUT OPTIONAL missing_output.txt
import shutil
shutil.copyfile('input', 'output')
|
# TOOL test-data-in-out.py: "Test data input and output in Python" (Data input output test.)
# INPUT input TYPE GENERIC
# OUTPUT output
# PARAMETER delay: Delay TYPE INTEGER FROM 0 TO 1000 DEFAULT 1 (Delay in seconds)
import shutil
import time
time.sleep(delay)
shutil.copyfile('input', 'output')
|
Add delay to input-output test
|
Add delay to input-output test
|
Python
|
mit
|
chipster/chipster-tools,chipster/chipster-tools,chipster/chipster-tools,chipster/chipster-tools
|
# TOOL test-data-in-out.py: "Test data input and output in Python" (Data input output test.)
# INPUT input TYPE GENERIC
# OUTPUT output
# OUTPUT OPTIONAL missing_output.txt
import shutil
shutil.copyfile('input', 'output')
Add delay to input-output test
|
# TOOL test-data-in-out.py: "Test data input and output in Python" (Data input output test.)
# INPUT input TYPE GENERIC
# OUTPUT output
# PARAMETER delay: Delay TYPE INTEGER FROM 0 TO 1000 DEFAULT 1 (Delay in seconds)
import shutil
import time
time.sleep(delay)
shutil.copyfile('input', 'output')
|
<commit_before># TOOL test-data-in-out.py: "Test data input and output in Python" (Data input output test.)
# INPUT input TYPE GENERIC
# OUTPUT output
# OUTPUT OPTIONAL missing_output.txt
import shutil
shutil.copyfile('input', 'output')
<commit_msg>Add delay to input-output test<commit_after>
|
# TOOL test-data-in-out.py: "Test data input and output in Python" (Data input output test.)
# INPUT input TYPE GENERIC
# OUTPUT output
# PARAMETER delay: Delay TYPE INTEGER FROM 0 TO 1000 DEFAULT 1 (Delay in seconds)
import shutil
import time
time.sleep(delay)
shutil.copyfile('input', 'output')
|
# TOOL test-data-in-out.py: "Test data input and output in Python" (Data input output test.)
# INPUT input TYPE GENERIC
# OUTPUT output
# OUTPUT OPTIONAL missing_output.txt
import shutil
shutil.copyfile('input', 'output')
Add delay to input-output test# TOOL test-data-in-out.py: "Test data input and output in Python" (Data input output test.)
# INPUT input TYPE GENERIC
# OUTPUT output
# PARAMETER delay: Delay TYPE INTEGER FROM 0 TO 1000 DEFAULT 1 (Delay in seconds)
import shutil
import time
time.sleep(delay)
shutil.copyfile('input', 'output')
|
<commit_before># TOOL test-data-in-out.py: "Test data input and output in Python" (Data input output test.)
# INPUT input TYPE GENERIC
# OUTPUT output
# OUTPUT OPTIONAL missing_output.txt
import shutil
shutil.copyfile('input', 'output')
<commit_msg>Add delay to input-output test<commit_after># TOOL test-data-in-out.py: "Test data input and output in Python" (Data input output test.)
# INPUT input TYPE GENERIC
# OUTPUT output
# PARAMETER delay: Delay TYPE INTEGER FROM 0 TO 1000 DEFAULT 1 (Delay in seconds)
import shutil
import time
time.sleep(delay)
shutil.copyfile('input', 'output')
|
943d2648c17facb9dbfd4f26d335beef341e9c49
|
fabfile.py
|
fabfile.py
|
from fabric.api import local
__author__ = 'derek'
def deploy(version):
local('python runtests.py')
local("git tag -a %s -m %s" % (version, version))
local('python setup.py sdist upload')
|
from fabric.api import local
__author__ = 'derek'
def deploy(version):
local('python runtests.py')
local("git tag -a %s -m %s" % (version, version))
local("git push origin --tags")
local('python setup.py sdist upload')
|
Make sure to push the tags
|
Make sure to push the tags
|
Python
|
mit
|
winfieldco/django-mail-queue,Goury/django-mail-queue,Goury/django-mail-queue,dstegelman/django-mail-queue,dstegelman/django-mail-queue,styrmis/django-mail-queue
|
from fabric.api import local
__author__ = 'derek'
def deploy(version):
local('python runtests.py')
local("git tag -a %s -m %s" % (version, version))
local('python setup.py sdist upload')Make sure to push the tags
|
from fabric.api import local
__author__ = 'derek'
def deploy(version):
local('python runtests.py')
local("git tag -a %s -m %s" % (version, version))
local("git push origin --tags")
local('python setup.py sdist upload')
|
<commit_before>from fabric.api import local
__author__ = 'derek'
def deploy(version):
local('python runtests.py')
local("git tag -a %s -m %s" % (version, version))
local('python setup.py sdist upload')<commit_msg>Make sure to push the tags<commit_after>
|
from fabric.api import local
__author__ = 'derek'
def deploy(version):
local('python runtests.py')
local("git tag -a %s -m %s" % (version, version))
local("git push origin --tags")
local('python setup.py sdist upload')
|
from fabric.api import local
__author__ = 'derek'
def deploy(version):
local('python runtests.py')
local("git tag -a %s -m %s" % (version, version))
local('python setup.py sdist upload')Make sure to push the tagsfrom fabric.api import local
__author__ = 'derek'
def deploy(version):
local('python runtests.py')
local("git tag -a %s -m %s" % (version, version))
local("git push origin --tags")
local('python setup.py sdist upload')
|
<commit_before>from fabric.api import local
__author__ = 'derek'
def deploy(version):
local('python runtests.py')
local("git tag -a %s -m %s" % (version, version))
local('python setup.py sdist upload')<commit_msg>Make sure to push the tags<commit_after>from fabric.api import local
__author__ = 'derek'
def deploy(version):
local('python runtests.py')
local("git tag -a %s -m %s" % (version, version))
local("git push origin --tags")
local('python setup.py sdist upload')
|
3e23d60857461b7806f3616cf41b2cd9c812fa7b
|
fabfile.py
|
fabfile.py
|
from fabric.api import cd, sudo, env
import os
expected_vars = [
'PROJECT',
]
for var in expected_vars:
if var not in os.environ:
raise Exception('Please specify %s environment variable' % (
var,))
PROJECT = os.environ['PROJECT']
USER = os.environ.get('USER', 'jmbo')
env.path = os.path.join('var', 'praekelt', PROJECT)
def restart():
sudo('/etc/init.d/nginx restart')
sudo('supervisorctl reload')
def deploy():
with cd(env.path):
sudo('git pull', user=USER)
sudo('ve/bin/python manage.py syncdb --migrate --noinput',
user=USER)
sudo('ve/bin/python manage.py collectstatic --noinput',
user=USER)
def install_packages(force=False):
with cd(env.path):
sudo('ve/bin/pip install %s -r requirements.pip' % (
'--upgrade' if force else '',), user=USER)
|
from fabric.api import cd, sudo, env
import os
expected_vars = [
'PROJECT',
]
for var in expected_vars:
if var not in os.environ:
raise Exception('Please specify %s environment variable' % (
var,))
PROJECT = os.environ['PROJECT']
USER = os.environ.get('USER', 'jmbo')
env.path = os.path.join('/', 'var', 'praekelt', PROJECT)
def restart():
sudo('/etc/init.d/nginx restart')
sudo('supervisorctl reload')
def deploy():
with cd(env.path):
sudo('git pull', user=USER)
sudo('ve/bin/python manage.py syncdb --migrate --noinput',
user=USER)
sudo('ve/bin/python manage.py collectstatic --noinput',
user=USER)
def install_packages(force=False):
with cd(env.path):
sudo('ve/bin/pip install %s -r requirements.pip' % (
'--upgrade' if force else '',), user=USER)
|
Make env.path to /var/praekelt/<PROJECT> an absolute path.
|
Make env.path to /var/praekelt/<PROJECT> an absolute path.
|
Python
|
bsd-3-clause
|
praekelt/go-rts-zambia
|
from fabric.api import cd, sudo, env
import os
expected_vars = [
'PROJECT',
]
for var in expected_vars:
if var not in os.environ:
raise Exception('Please specify %s environment variable' % (
var,))
PROJECT = os.environ['PROJECT']
USER = os.environ.get('USER', 'jmbo')
env.path = os.path.join('var', 'praekelt', PROJECT)
def restart():
sudo('/etc/init.d/nginx restart')
sudo('supervisorctl reload')
def deploy():
with cd(env.path):
sudo('git pull', user=USER)
sudo('ve/bin/python manage.py syncdb --migrate --noinput',
user=USER)
sudo('ve/bin/python manage.py collectstatic --noinput',
user=USER)
def install_packages(force=False):
with cd(env.path):
sudo('ve/bin/pip install %s -r requirements.pip' % (
'--upgrade' if force else '',), user=USER)
Make env.path to /var/praekelt/<PROJECT> an absolute path.
|
from fabric.api import cd, sudo, env
import os
expected_vars = [
'PROJECT',
]
for var in expected_vars:
if var not in os.environ:
raise Exception('Please specify %s environment variable' % (
var,))
PROJECT = os.environ['PROJECT']
USER = os.environ.get('USER', 'jmbo')
env.path = os.path.join('/', 'var', 'praekelt', PROJECT)
def restart():
sudo('/etc/init.d/nginx restart')
sudo('supervisorctl reload')
def deploy():
with cd(env.path):
sudo('git pull', user=USER)
sudo('ve/bin/python manage.py syncdb --migrate --noinput',
user=USER)
sudo('ve/bin/python manage.py collectstatic --noinput',
user=USER)
def install_packages(force=False):
with cd(env.path):
sudo('ve/bin/pip install %s -r requirements.pip' % (
'--upgrade' if force else '',), user=USER)
|
<commit_before>from fabric.api import cd, sudo, env
import os
expected_vars = [
'PROJECT',
]
for var in expected_vars:
if var not in os.environ:
raise Exception('Please specify %s environment variable' % (
var,))
PROJECT = os.environ['PROJECT']
USER = os.environ.get('USER', 'jmbo')
env.path = os.path.join('var', 'praekelt', PROJECT)
def restart():
sudo('/etc/init.d/nginx restart')
sudo('supervisorctl reload')
def deploy():
with cd(env.path):
sudo('git pull', user=USER)
sudo('ve/bin/python manage.py syncdb --migrate --noinput',
user=USER)
sudo('ve/bin/python manage.py collectstatic --noinput',
user=USER)
def install_packages(force=False):
with cd(env.path):
sudo('ve/bin/pip install %s -r requirements.pip' % (
'--upgrade' if force else '',), user=USER)
<commit_msg>Make env.path to /var/praekelt/<PROJECT> an absolute path.<commit_after>
|
from fabric.api import cd, sudo, env
import os
expected_vars = [
'PROJECT',
]
for var in expected_vars:
if var not in os.environ:
raise Exception('Please specify %s environment variable' % (
var,))
PROJECT = os.environ['PROJECT']
USER = os.environ.get('USER', 'jmbo')
env.path = os.path.join('/', 'var', 'praekelt', PROJECT)
def restart():
sudo('/etc/init.d/nginx restart')
sudo('supervisorctl reload')
def deploy():
with cd(env.path):
sudo('git pull', user=USER)
sudo('ve/bin/python manage.py syncdb --migrate --noinput',
user=USER)
sudo('ve/bin/python manage.py collectstatic --noinput',
user=USER)
def install_packages(force=False):
with cd(env.path):
sudo('ve/bin/pip install %s -r requirements.pip' % (
'--upgrade' if force else '',), user=USER)
|
from fabric.api import cd, sudo, env
import os
expected_vars = [
'PROJECT',
]
for var in expected_vars:
if var not in os.environ:
raise Exception('Please specify %s environment variable' % (
var,))
PROJECT = os.environ['PROJECT']
USER = os.environ.get('USER', 'jmbo')
env.path = os.path.join('var', 'praekelt', PROJECT)
def restart():
sudo('/etc/init.d/nginx restart')
sudo('supervisorctl reload')
def deploy():
with cd(env.path):
sudo('git pull', user=USER)
sudo('ve/bin/python manage.py syncdb --migrate --noinput',
user=USER)
sudo('ve/bin/python manage.py collectstatic --noinput',
user=USER)
def install_packages(force=False):
with cd(env.path):
sudo('ve/bin/pip install %s -r requirements.pip' % (
'--upgrade' if force else '',), user=USER)
Make env.path to /var/praekelt/<PROJECT> an absolute path.from fabric.api import cd, sudo, env
import os
expected_vars = [
'PROJECT',
]
for var in expected_vars:
if var not in os.environ:
raise Exception('Please specify %s environment variable' % (
var,))
PROJECT = os.environ['PROJECT']
USER = os.environ.get('USER', 'jmbo')
env.path = os.path.join('/', 'var', 'praekelt', PROJECT)
def restart():
sudo('/etc/init.d/nginx restart')
sudo('supervisorctl reload')
def deploy():
with cd(env.path):
sudo('git pull', user=USER)
sudo('ve/bin/python manage.py syncdb --migrate --noinput',
user=USER)
sudo('ve/bin/python manage.py collectstatic --noinput',
user=USER)
def install_packages(force=False):
with cd(env.path):
sudo('ve/bin/pip install %s -r requirements.pip' % (
'--upgrade' if force else '',), user=USER)
|
<commit_before>from fabric.api import cd, sudo, env
import os
expected_vars = [
'PROJECT',
]
for var in expected_vars:
if var not in os.environ:
raise Exception('Please specify %s environment variable' % (
var,))
PROJECT = os.environ['PROJECT']
USER = os.environ.get('USER', 'jmbo')
env.path = os.path.join('var', 'praekelt', PROJECT)
def restart():
sudo('/etc/init.d/nginx restart')
sudo('supervisorctl reload')
def deploy():
with cd(env.path):
sudo('git pull', user=USER)
sudo('ve/bin/python manage.py syncdb --migrate --noinput',
user=USER)
sudo('ve/bin/python manage.py collectstatic --noinput',
user=USER)
def install_packages(force=False):
with cd(env.path):
sudo('ve/bin/pip install %s -r requirements.pip' % (
'--upgrade' if force else '',), user=USER)
<commit_msg>Make env.path to /var/praekelt/<PROJECT> an absolute path.<commit_after>from fabric.api import cd, sudo, env
import os
expected_vars = [
'PROJECT',
]
for var in expected_vars:
if var not in os.environ:
raise Exception('Please specify %s environment variable' % (
var,))
PROJECT = os.environ['PROJECT']
USER = os.environ.get('USER', 'jmbo')
env.path = os.path.join('/', 'var', 'praekelt', PROJECT)
def restart():
sudo('/etc/init.d/nginx restart')
sudo('supervisorctl reload')
def deploy():
with cd(env.path):
sudo('git pull', user=USER)
sudo('ve/bin/python manage.py syncdb --migrate --noinput',
user=USER)
sudo('ve/bin/python manage.py collectstatic --noinput',
user=USER)
def install_packages(force=False):
with cd(env.path):
sudo('ve/bin/pip install %s -r requirements.pip' % (
'--upgrade' if force else '',), user=USER)
|
e660d8e05e54adbd0ea199a02cc188dc7007089a
|
fabfile.py
|
fabfile.py
|
from fabric.api import cd, sudo, env
import os
expected_vars = [
'PROJECT',
]
for var in expected_vars:
if var not in os.environ:
raise Exception('Please specify %s environment variable' % (
var,))
PROJECT = os.environ['PROJECT']
USER = os.environ.get('USER', 'jmbo')
env.path = os.path.join('/', 'var', 'praekelt', PROJECT)
def restart():
sudo('/etc/init.d/nginx restart')
sudo('supervisorctl reload')
def deploy():
with cd(env.path):
sudo('git pull', user=USER)
sudo('ve/bin/python manage.py syncdb --migrate --noinput',
user=USER)
sudo('ve/bin/python manage.py collectstatic --noinput',
user=USER)
def install_packages(force=False):
with cd(env.path):
sudo('ve/bin/pip install %s -r requirements.pip' % (
'--upgrade' if force else '',), user=USER)
|
from fabric.api import cd, sudo, env
import os
PROJECT = os.environ.get('PROJECT', 'go-rts-zambia')
DEPLOY_USER = os.environ.get('DEPLOY_USER', 'jmbo')
env.path = os.path.join('/', 'var', 'praekelt', PROJECT)
def restart():
sudo('/etc/init.d/nginx restart')
sudo('supervisorctl reload')
def deploy():
with cd(env.path):
sudo('git pull', user=DEPLOY_USER)
sudo('ve/bin/python manage.py syncdb --migrate --noinput',
user=DEPLOY_USER)
sudo('ve/bin/python manage.py collectstatic --noinput',
user=DEPLOY_USER)
def install_packages(force=False):
with cd(env.path):
sudo('ve/bin/pip install %s -r requirements.pip' % (
'--upgrade' if force else '',), user=DEPLOY_USER)
|
Fix USER in fabric file (the deploy user and ssh user aren't necessarily the same). Set default value for PROJECT (this is the go-rts-zambia repo after all).
|
Fix USER in fabric file (the deploy user and ssh user aren't necessarily the same). Set default value for PROJECT (this is the go-rts-zambia repo after all).
|
Python
|
bsd-3-clause
|
praekelt/go-rts-zambia
|
from fabric.api import cd, sudo, env
import os
expected_vars = [
'PROJECT',
]
for var in expected_vars:
if var not in os.environ:
raise Exception('Please specify %s environment variable' % (
var,))
PROJECT = os.environ['PROJECT']
USER = os.environ.get('USER', 'jmbo')
env.path = os.path.join('/', 'var', 'praekelt', PROJECT)
def restart():
sudo('/etc/init.d/nginx restart')
sudo('supervisorctl reload')
def deploy():
with cd(env.path):
sudo('git pull', user=USER)
sudo('ve/bin/python manage.py syncdb --migrate --noinput',
user=USER)
sudo('ve/bin/python manage.py collectstatic --noinput',
user=USER)
def install_packages(force=False):
with cd(env.path):
sudo('ve/bin/pip install %s -r requirements.pip' % (
'--upgrade' if force else '',), user=USER)
Fix USER in fabric file (the deploy user and ssh user aren't necessarily the same). Set default value for PROJECT (this is the go-rts-zambia repo after all).
|
from fabric.api import cd, sudo, env
import os
PROJECT = os.environ.get('PROJECT', 'go-rts-zambia')
DEPLOY_USER = os.environ.get('DEPLOY_USER', 'jmbo')
env.path = os.path.join('/', 'var', 'praekelt', PROJECT)
def restart():
sudo('/etc/init.d/nginx restart')
sudo('supervisorctl reload')
def deploy():
with cd(env.path):
sudo('git pull', user=DEPLOY_USER)
sudo('ve/bin/python manage.py syncdb --migrate --noinput',
user=DEPLOY_USER)
sudo('ve/bin/python manage.py collectstatic --noinput',
user=DEPLOY_USER)
def install_packages(force=False):
with cd(env.path):
sudo('ve/bin/pip install %s -r requirements.pip' % (
'--upgrade' if force else '',), user=DEPLOY_USER)
|
<commit_before>from fabric.api import cd, sudo, env
import os
expected_vars = [
'PROJECT',
]
for var in expected_vars:
if var not in os.environ:
raise Exception('Please specify %s environment variable' % (
var,))
PROJECT = os.environ['PROJECT']
USER = os.environ.get('USER', 'jmbo')
env.path = os.path.join('/', 'var', 'praekelt', PROJECT)
def restart():
sudo('/etc/init.d/nginx restart')
sudo('supervisorctl reload')
def deploy():
with cd(env.path):
sudo('git pull', user=USER)
sudo('ve/bin/python manage.py syncdb --migrate --noinput',
user=USER)
sudo('ve/bin/python manage.py collectstatic --noinput',
user=USER)
def install_packages(force=False):
with cd(env.path):
sudo('ve/bin/pip install %s -r requirements.pip' % (
'--upgrade' if force else '',), user=USER)
<commit_msg>Fix USER in fabric file (the deploy user and ssh user aren't necessarily the same). Set default value for PROJECT (this is the go-rts-zambia repo after all).<commit_after>
|
from fabric.api import cd, sudo, env
import os
PROJECT = os.environ.get('PROJECT', 'go-rts-zambia')
DEPLOY_USER = os.environ.get('DEPLOY_USER', 'jmbo')
env.path = os.path.join('/', 'var', 'praekelt', PROJECT)
def restart():
sudo('/etc/init.d/nginx restart')
sudo('supervisorctl reload')
def deploy():
with cd(env.path):
sudo('git pull', user=DEPLOY_USER)
sudo('ve/bin/python manage.py syncdb --migrate --noinput',
user=DEPLOY_USER)
sudo('ve/bin/python manage.py collectstatic --noinput',
user=DEPLOY_USER)
def install_packages(force=False):
with cd(env.path):
sudo('ve/bin/pip install %s -r requirements.pip' % (
'--upgrade' if force else '',), user=DEPLOY_USER)
|
from fabric.api import cd, sudo, env
import os
expected_vars = [
'PROJECT',
]
for var in expected_vars:
if var not in os.environ:
raise Exception('Please specify %s environment variable' % (
var,))
PROJECT = os.environ['PROJECT']
USER = os.environ.get('USER', 'jmbo')
env.path = os.path.join('/', 'var', 'praekelt', PROJECT)
def restart():
sudo('/etc/init.d/nginx restart')
sudo('supervisorctl reload')
def deploy():
with cd(env.path):
sudo('git pull', user=USER)
sudo('ve/bin/python manage.py syncdb --migrate --noinput',
user=USER)
sudo('ve/bin/python manage.py collectstatic --noinput',
user=USER)
def install_packages(force=False):
with cd(env.path):
sudo('ve/bin/pip install %s -r requirements.pip' % (
'--upgrade' if force else '',), user=USER)
Fix USER in fabric file (the deploy user and ssh user aren't necessarily the same). Set default value for PROJECT (this is the go-rts-zambia repo after all).from fabric.api import cd, sudo, env
import os
PROJECT = os.environ.get('PROJECT', 'go-rts-zambia')
DEPLOY_USER = os.environ.get('DEPLOY_USER', 'jmbo')
env.path = os.path.join('/', 'var', 'praekelt', PROJECT)
def restart():
sudo('/etc/init.d/nginx restart')
sudo('supervisorctl reload')
def deploy():
with cd(env.path):
sudo('git pull', user=DEPLOY_USER)
sudo('ve/bin/python manage.py syncdb --migrate --noinput',
user=DEPLOY_USER)
sudo('ve/bin/python manage.py collectstatic --noinput',
user=DEPLOY_USER)
def install_packages(force=False):
with cd(env.path):
sudo('ve/bin/pip install %s -r requirements.pip' % (
'--upgrade' if force else '',), user=DEPLOY_USER)
|
<commit_before>from fabric.api import cd, sudo, env
import os
expected_vars = [
'PROJECT',
]
for var in expected_vars:
if var not in os.environ:
raise Exception('Please specify %s environment variable' % (
var,))
PROJECT = os.environ['PROJECT']
USER = os.environ.get('USER', 'jmbo')
env.path = os.path.join('/', 'var', 'praekelt', PROJECT)
def restart():
sudo('/etc/init.d/nginx restart')
sudo('supervisorctl reload')
def deploy():
with cd(env.path):
sudo('git pull', user=USER)
sudo('ve/bin/python manage.py syncdb --migrate --noinput',
user=USER)
sudo('ve/bin/python manage.py collectstatic --noinput',
user=USER)
def install_packages(force=False):
with cd(env.path):
sudo('ve/bin/pip install %s -r requirements.pip' % (
'--upgrade' if force else '',), user=USER)
<commit_msg>Fix USER in fabric file (the deploy user and ssh user aren't necessarily the same). Set default value for PROJECT (this is the go-rts-zambia repo after all).<commit_after>from fabric.api import cd, sudo, env
import os
PROJECT = os.environ.get('PROJECT', 'go-rts-zambia')
DEPLOY_USER = os.environ.get('DEPLOY_USER', 'jmbo')
env.path = os.path.join('/', 'var', 'praekelt', PROJECT)
def restart():
sudo('/etc/init.d/nginx restart')
sudo('supervisorctl reload')
def deploy():
with cd(env.path):
sudo('git pull', user=DEPLOY_USER)
sudo('ve/bin/python manage.py syncdb --migrate --noinput',
user=DEPLOY_USER)
sudo('ve/bin/python manage.py collectstatic --noinput',
user=DEPLOY_USER)
def install_packages(force=False):
with cd(env.path):
sudo('ve/bin/pip install %s -r requirements.pip' % (
'--upgrade' if force else '',), user=DEPLOY_USER)
|
04e8206a8610666c6027fc0f4be5e786e4bd5513
|
fabfile.py
|
fabfile.py
|
set(
fab_hosts = ['startthedark.com'],
fab_user = 'startthedark',
)
def unlink_nginx():
'Un-link nginx rules for startthedark.'
sudo('rm -f /etc/nginx/sites-enabled/startthedark.com')
sudo('/etc/init.d/nginx reload')
def link_nginx():
'Link nginx rules for startthedark'
sudo('ln -s /etc/nginx/sites-available/startthedark.com /etc/nginx/sites-enabled/startthedark.com')
sudo('/etc/init.d/nginx reload')
def deploy():
'Deploy startthedark.'
local('bash make_prod_css.sh')
set(fab_fail = 'ignore')
local('git commit -a -m "Rebuilt Prod CSS For Commit"')
local('git push origin master')
set(fab_fail = 'abort')
run('cd /var/www/startthedark.com/startthedark; git pull;')
run('cd /var/www/startthedark.com/startthedark; /usr/bin/python manage.py syncdb')
sudo('/etc/init.d/apache2 reload')
|
set(
fab_hosts = ['startthedark.com'],
fab_user = 'startthedark',
)
def unlink_nginx():
'Un-link nginx rules for startthedark.'
sudo('rm -f /etc/nginx/sites-enabled/startthedark.com')
sudo('/etc/init.d/nginx reload')
def link_nginx():
'Link nginx rules for startthedark'
sudo('ln -s /etc/nginx/sites-available/startthedark.com /etc/nginx/sites-enabled/startthedark.com')
sudo('/etc/init.d/nginx reload')
def rebuild_prod_css():
local('bash make_prod_css.sh')
local('git commit -a -m "Rebuilt Prod CSS For Commit"')
local('git push origin master')
def deploy():
'Deploy startthedark.'
run('cd /var/www/startthedark.com/startthedark; git pull;')
run('cd /var/www/startthedark.com/startthedark; /usr/bin/python manage.py syncdb')
sudo('/etc/init.d/apache2 reload')
|
Split out the css rebuilding into its own fab method.
|
Split out the css rebuilding into its own fab method.
|
Python
|
bsd-3-clause
|
mvayngrib/startthedark,mvayngrib/startthedark,ericflo/startthedark,ericflo/startthedark,mvayngrib/startthedark,ericflo/startthedark
|
set(
fab_hosts = ['startthedark.com'],
fab_user = 'startthedark',
)
def unlink_nginx():
'Un-link nginx rules for startthedark.'
sudo('rm -f /etc/nginx/sites-enabled/startthedark.com')
sudo('/etc/init.d/nginx reload')
def link_nginx():
'Link nginx rules for startthedark'
sudo('ln -s /etc/nginx/sites-available/startthedark.com /etc/nginx/sites-enabled/startthedark.com')
sudo('/etc/init.d/nginx reload')
def deploy():
'Deploy startthedark.'
local('bash make_prod_css.sh')
set(fab_fail = 'ignore')
local('git commit -a -m "Rebuilt Prod CSS For Commit"')
local('git push origin master')
set(fab_fail = 'abort')
run('cd /var/www/startthedark.com/startthedark; git pull;')
run('cd /var/www/startthedark.com/startthedark; /usr/bin/python manage.py syncdb')
sudo('/etc/init.d/apache2 reload')
Split out the css rebuilding into its own fab method.
|
set(
fab_hosts = ['startthedark.com'],
fab_user = 'startthedark',
)
def unlink_nginx():
'Un-link nginx rules for startthedark.'
sudo('rm -f /etc/nginx/sites-enabled/startthedark.com')
sudo('/etc/init.d/nginx reload')
def link_nginx():
'Link nginx rules for startthedark'
sudo('ln -s /etc/nginx/sites-available/startthedark.com /etc/nginx/sites-enabled/startthedark.com')
sudo('/etc/init.d/nginx reload')
def rebuild_prod_css():
local('bash make_prod_css.sh')
local('git commit -a -m "Rebuilt Prod CSS For Commit"')
local('git push origin master')
def deploy():
'Deploy startthedark.'
run('cd /var/www/startthedark.com/startthedark; git pull;')
run('cd /var/www/startthedark.com/startthedark; /usr/bin/python manage.py syncdb')
sudo('/etc/init.d/apache2 reload')
|
<commit_before>set(
fab_hosts = ['startthedark.com'],
fab_user = 'startthedark',
)
def unlink_nginx():
'Un-link nginx rules for startthedark.'
sudo('rm -f /etc/nginx/sites-enabled/startthedark.com')
sudo('/etc/init.d/nginx reload')
def link_nginx():
'Link nginx rules for startthedark'
sudo('ln -s /etc/nginx/sites-available/startthedark.com /etc/nginx/sites-enabled/startthedark.com')
sudo('/etc/init.d/nginx reload')
def deploy():
'Deploy startthedark.'
local('bash make_prod_css.sh')
set(fab_fail = 'ignore')
local('git commit -a -m "Rebuilt Prod CSS For Commit"')
local('git push origin master')
set(fab_fail = 'abort')
run('cd /var/www/startthedark.com/startthedark; git pull;')
run('cd /var/www/startthedark.com/startthedark; /usr/bin/python manage.py syncdb')
sudo('/etc/init.d/apache2 reload')
<commit_msg>Split out the css rebuilding into its own fab method.<commit_after>
|
set(
fab_hosts = ['startthedark.com'],
fab_user = 'startthedark',
)
def unlink_nginx():
'Un-link nginx rules for startthedark.'
sudo('rm -f /etc/nginx/sites-enabled/startthedark.com')
sudo('/etc/init.d/nginx reload')
def link_nginx():
'Link nginx rules for startthedark'
sudo('ln -s /etc/nginx/sites-available/startthedark.com /etc/nginx/sites-enabled/startthedark.com')
sudo('/etc/init.d/nginx reload')
def rebuild_prod_css():
local('bash make_prod_css.sh')
local('git commit -a -m "Rebuilt Prod CSS For Commit"')
local('git push origin master')
def deploy():
'Deploy startthedark.'
run('cd /var/www/startthedark.com/startthedark; git pull;')
run('cd /var/www/startthedark.com/startthedark; /usr/bin/python manage.py syncdb')
sudo('/etc/init.d/apache2 reload')
|
set(
fab_hosts = ['startthedark.com'],
fab_user = 'startthedark',
)
def unlink_nginx():
'Un-link nginx rules for startthedark.'
sudo('rm -f /etc/nginx/sites-enabled/startthedark.com')
sudo('/etc/init.d/nginx reload')
def link_nginx():
'Link nginx rules for startthedark'
sudo('ln -s /etc/nginx/sites-available/startthedark.com /etc/nginx/sites-enabled/startthedark.com')
sudo('/etc/init.d/nginx reload')
def deploy():
'Deploy startthedark.'
local('bash make_prod_css.sh')
set(fab_fail = 'ignore')
local('git commit -a -m "Rebuilt Prod CSS For Commit"')
local('git push origin master')
set(fab_fail = 'abort')
run('cd /var/www/startthedark.com/startthedark; git pull;')
run('cd /var/www/startthedark.com/startthedark; /usr/bin/python manage.py syncdb')
sudo('/etc/init.d/apache2 reload')
Split out the css rebuilding into its own fab method.set(
fab_hosts = ['startthedark.com'],
fab_user = 'startthedark',
)
def unlink_nginx():
'Un-link nginx rules for startthedark.'
sudo('rm -f /etc/nginx/sites-enabled/startthedark.com')
sudo('/etc/init.d/nginx reload')
def link_nginx():
'Link nginx rules for startthedark'
sudo('ln -s /etc/nginx/sites-available/startthedark.com /etc/nginx/sites-enabled/startthedark.com')
sudo('/etc/init.d/nginx reload')
def rebuild_prod_css():
local('bash make_prod_css.sh')
local('git commit -a -m "Rebuilt Prod CSS For Commit"')
local('git push origin master')
def deploy():
'Deploy startthedark.'
run('cd /var/www/startthedark.com/startthedark; git pull;')
run('cd /var/www/startthedark.com/startthedark; /usr/bin/python manage.py syncdb')
sudo('/etc/init.d/apache2 reload')
|
<commit_before>set(
fab_hosts = ['startthedark.com'],
fab_user = 'startthedark',
)
def unlink_nginx():
'Un-link nginx rules for startthedark.'
sudo('rm -f /etc/nginx/sites-enabled/startthedark.com')
sudo('/etc/init.d/nginx reload')
def link_nginx():
'Link nginx rules for startthedark'
sudo('ln -s /etc/nginx/sites-available/startthedark.com /etc/nginx/sites-enabled/startthedark.com')
sudo('/etc/init.d/nginx reload')
def deploy():
'Deploy startthedark.'
local('bash make_prod_css.sh')
set(fab_fail = 'ignore')
local('git commit -a -m "Rebuilt Prod CSS For Commit"')
local('git push origin master')
set(fab_fail = 'abort')
run('cd /var/www/startthedark.com/startthedark; git pull;')
run('cd /var/www/startthedark.com/startthedark; /usr/bin/python manage.py syncdb')
sudo('/etc/init.d/apache2 reload')
<commit_msg>Split out the css rebuilding into its own fab method.<commit_after>set(
fab_hosts = ['startthedark.com'],
fab_user = 'startthedark',
)
def unlink_nginx():
'Un-link nginx rules for startthedark.'
sudo('rm -f /etc/nginx/sites-enabled/startthedark.com')
sudo('/etc/init.d/nginx reload')
def link_nginx():
'Link nginx rules for startthedark'
sudo('ln -s /etc/nginx/sites-available/startthedark.com /etc/nginx/sites-enabled/startthedark.com')
sudo('/etc/init.d/nginx reload')
def rebuild_prod_css():
local('bash make_prod_css.sh')
local('git commit -a -m "Rebuilt Prod CSS For Commit"')
local('git push origin master')
def deploy():
'Deploy startthedark.'
run('cd /var/www/startthedark.com/startthedark; git pull;')
run('cd /var/www/startthedark.com/startthedark; /usr/bin/python manage.py syncdb')
sudo('/etc/init.d/apache2 reload')
|
b8b72be48328ba0bc6e946a4ecf15c00f5f8b3b6
|
director/director/config/dev.py
|
director/director/config/dev.py
|
import imp
import os
import sys
import traceback
from os.path import dirname
from configurations import values
from .common import Common, BASE_DIR, external_keys
SECRETS_DIR = os.path.join(dirname(BASE_DIR), "secrets")
class Dev(Common):
DEBUG = True
INSTALLED_APPS = Common.INSTALLED_APPS + [
'django_extensions'
]
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
@classmethod
def setup(cls):
super(Dev, cls).setup()
filename = "director_dev_secrets.py"
try:
dev_secrets = imp.load_source(
"dev_secrets", os.path.join(SECRETS_DIR, filename))
except ImportError as e:
print("Could not import %s: %s" % (filename, e))
return
for key in external_keys:
if not hasattr(cls, key) and hasattr(dev_secrets, key):
setattr(cls, key, getattr(dev_secrets, key))
|
import imp
import os
import sys
import traceback
from os.path import dirname
from configurations import values
from .common import Common, BASE_DIR, external_keys
SECRETS_DIR = os.path.join(dirname(BASE_DIR), "secrets")
class Dev(Common):
DEBUG = True
INSTALLED_APPS = Common.INSTALLED_APPS + [
'django_extensions'
]
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
@classmethod
def setup(cls):
super(Dev, cls).setup()
filename = "director_dev_secrets.py"
path = os.path.join(SECRETS_DIR, filename)
try:
dev_secrets = imp.load_source("dev_secrets", path)
except ImportError as e:
print("Could not import %s: %s" % (filename, e))
return
except FileNotFoundError as e:
print("File %s not found" % path)
return
for key in external_keys:
if not hasattr(cls, key) and hasattr(dev_secrets, key):
setattr(cls, key, getattr(dev_secrets, key))
|
Print message when secrets file is missing
|
Print message when secrets file is missing
|
Python
|
apache-2.0
|
stencila/hub,stencila/hub,stencila/hub,stencila/hub,stencila/hub
|
import imp
import os
import sys
import traceback
from os.path import dirname
from configurations import values
from .common import Common, BASE_DIR, external_keys
SECRETS_DIR = os.path.join(dirname(BASE_DIR), "secrets")
class Dev(Common):
DEBUG = True
INSTALLED_APPS = Common.INSTALLED_APPS + [
'django_extensions'
]
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
@classmethod
def setup(cls):
super(Dev, cls).setup()
filename = "director_dev_secrets.py"
try:
dev_secrets = imp.load_source(
"dev_secrets", os.path.join(SECRETS_DIR, filename))
except ImportError as e:
print("Could not import %s: %s" % (filename, e))
return
for key in external_keys:
if not hasattr(cls, key) and hasattr(dev_secrets, key):
setattr(cls, key, getattr(dev_secrets, key))
Print message when secrets file is missing
|
import imp
import os
import sys
import traceback
from os.path import dirname
from configurations import values
from .common import Common, BASE_DIR, external_keys
SECRETS_DIR = os.path.join(dirname(BASE_DIR), "secrets")
class Dev(Common):
DEBUG = True
INSTALLED_APPS = Common.INSTALLED_APPS + [
'django_extensions'
]
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
@classmethod
def setup(cls):
super(Dev, cls).setup()
filename = "director_dev_secrets.py"
path = os.path.join(SECRETS_DIR, filename)
try:
dev_secrets = imp.load_source("dev_secrets", path)
except ImportError as e:
print("Could not import %s: %s" % (filename, e))
return
except FileNotFoundError as e:
print("File %s not found" % path)
return
for key in external_keys:
if not hasattr(cls, key) and hasattr(dev_secrets, key):
setattr(cls, key, getattr(dev_secrets, key))
|
<commit_before>import imp
import os
import sys
import traceback
from os.path import dirname
from configurations import values
from .common import Common, BASE_DIR, external_keys
SECRETS_DIR = os.path.join(dirname(BASE_DIR), "secrets")
class Dev(Common):
DEBUG = True
INSTALLED_APPS = Common.INSTALLED_APPS + [
'django_extensions'
]
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
@classmethod
def setup(cls):
super(Dev, cls).setup()
filename = "director_dev_secrets.py"
try:
dev_secrets = imp.load_source(
"dev_secrets", os.path.join(SECRETS_DIR, filename))
except ImportError as e:
print("Could not import %s: %s" % (filename, e))
return
for key in external_keys:
if not hasattr(cls, key) and hasattr(dev_secrets, key):
setattr(cls, key, getattr(dev_secrets, key))
<commit_msg>Print message when secrets file is missing<commit_after>
|
import imp
import os
import sys
import traceback
from os.path import dirname
from configurations import values
from .common import Common, BASE_DIR, external_keys
SECRETS_DIR = os.path.join(dirname(BASE_DIR), "secrets")
class Dev(Common):
DEBUG = True
INSTALLED_APPS = Common.INSTALLED_APPS + [
'django_extensions'
]
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
@classmethod
def setup(cls):
super(Dev, cls).setup()
filename = "director_dev_secrets.py"
path = os.path.join(SECRETS_DIR, filename)
try:
dev_secrets = imp.load_source("dev_secrets", path)
except ImportError as e:
print("Could not import %s: %s" % (filename, e))
return
except FileNotFoundError as e:
print("File %s not found" % path)
return
for key in external_keys:
if not hasattr(cls, key) and hasattr(dev_secrets, key):
setattr(cls, key, getattr(dev_secrets, key))
|
import imp
import os
import sys
import traceback
from os.path import dirname
from configurations import values
from .common import Common, BASE_DIR, external_keys
SECRETS_DIR = os.path.join(dirname(BASE_DIR), "secrets")
class Dev(Common):
DEBUG = True
INSTALLED_APPS = Common.INSTALLED_APPS + [
'django_extensions'
]
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
@classmethod
def setup(cls):
super(Dev, cls).setup()
filename = "director_dev_secrets.py"
try:
dev_secrets = imp.load_source(
"dev_secrets", os.path.join(SECRETS_DIR, filename))
except ImportError as e:
print("Could not import %s: %s" % (filename, e))
return
for key in external_keys:
if not hasattr(cls, key) and hasattr(dev_secrets, key):
setattr(cls, key, getattr(dev_secrets, key))
Print message when secrets file is missingimport imp
import os
import sys
import traceback
from os.path import dirname
from configurations import values
from .common import Common, BASE_DIR, external_keys
SECRETS_DIR = os.path.join(dirname(BASE_DIR), "secrets")
class Dev(Common):
DEBUG = True
INSTALLED_APPS = Common.INSTALLED_APPS + [
'django_extensions'
]
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
@classmethod
def setup(cls):
super(Dev, cls).setup()
filename = "director_dev_secrets.py"
path = os.path.join(SECRETS_DIR, filename)
try:
dev_secrets = imp.load_source("dev_secrets", path)
except ImportError as e:
print("Could not import %s: %s" % (filename, e))
return
except FileNotFoundError as e:
print("File %s not found" % path)
return
for key in external_keys:
if not hasattr(cls, key) and hasattr(dev_secrets, key):
setattr(cls, key, getattr(dev_secrets, key))
|
<commit_before>import imp
import os
import sys
import traceback
from os.path import dirname
from configurations import values
from .common import Common, BASE_DIR, external_keys
SECRETS_DIR = os.path.join(dirname(BASE_DIR), "secrets")
class Dev(Common):
DEBUG = True
INSTALLED_APPS = Common.INSTALLED_APPS + [
'django_extensions'
]
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
@classmethod
def setup(cls):
super(Dev, cls).setup()
filename = "director_dev_secrets.py"
try:
dev_secrets = imp.load_source(
"dev_secrets", os.path.join(SECRETS_DIR, filename))
except ImportError as e:
print("Could not import %s: %s" % (filename, e))
return
for key in external_keys:
if not hasattr(cls, key) and hasattr(dev_secrets, key):
setattr(cls, key, getattr(dev_secrets, key))
<commit_msg>Print message when secrets file is missing<commit_after>import imp
import os
import sys
import traceback
from os.path import dirname
from configurations import values
from .common import Common, BASE_DIR, external_keys
SECRETS_DIR = os.path.join(dirname(BASE_DIR), "secrets")
class Dev(Common):
DEBUG = True
INSTALLED_APPS = Common.INSTALLED_APPS + [
'django_extensions'
]
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
@classmethod
def setup(cls):
super(Dev, cls).setup()
filename = "director_dev_secrets.py"
path = os.path.join(SECRETS_DIR, filename)
try:
dev_secrets = imp.load_source("dev_secrets", path)
except ImportError as e:
print("Could not import %s: %s" % (filename, e))
return
except FileNotFoundError as e:
print("File %s not found" % path)
return
for key in external_keys:
if not hasattr(cls, key) and hasattr(dev_secrets, key):
setattr(cls, key, getattr(dev_secrets, key))
|
9e90fe6523e7d61d6b94f9bb37a2dbf711cd3b83
|
example/__init__.py
|
example/__init__.py
|
"""An example application using Confit for configuration."""
from __future__ import print_function
from __future__ import unicode_literals
import confit
import argparse
config = confit.LazyConfig('ConfitExample', __name__)
def main():
parser = argparse.ArgumentParser(description='example Confit program')
parser.add_argument('--library', '-l', dest='library', metavar='LIBPATH',
help='library database file')
parser.add_argument('--directory', '-d', dest='directory',
metavar='DIRECTORY',
help='destination music directory')
parser.add_argument('--verbose', '-v', dest='verbose', action='store_true',
help='print debugging messages')
args = parser.parse_args()
config.set_args(args)
print('configuration directory is', config.config_dir())
# Use a boolean flag and the transient overlay.
if config['verbose']:
print('verbose mode')
config['log']['level'] = 2
else:
config['log']['level'] = 0
print('logging level is', config['log']['level'].validate(int))
# Some validated/converted values.
print('directory is', config['directory'].as_filename())
print('library is', config['library'].as_filename())
|
"""An example application using Confit for configuration."""
from __future__ import print_function
from __future__ import unicode_literals
import confit
import argparse
config = confit.LazyConfig('ConfitExample', __name__)
def main():
parser = argparse.ArgumentParser(description='example Confit program')
parser.add_argument('--library', '-l', dest='library', metavar='LIBPATH',
help='library database file')
parser.add_argument('--directory', '-d', dest='directory',
metavar='DIRECTORY',
help='destination music directory')
parser.add_argument('--verbose', '-v', dest='verbose', action='store_true',
help='print debugging messages')
args = parser.parse_args()
config.set_args(args)
print('configuration directory is', config.config_dir())
# Use a boolean flag and the transient overlay.
if config['verbose']:
print('verbose mode')
config['log']['level'] = 2
else:
config['log']['level'] = 0
print('logging level is', config['log']['level'].get(int))
# Some validated/converted values.
print('directory is', config['directory'].as_filename())
print('library is', config['library'].as_filename())
|
Fix example to use get
|
Fix example to use get
|
Python
|
mit
|
sampsyo/confit,sampsyo/confuse
|
"""An example application using Confit for configuration."""
from __future__ import print_function
from __future__ import unicode_literals
import confit
import argparse
config = confit.LazyConfig('ConfitExample', __name__)
def main():
parser = argparse.ArgumentParser(description='example Confit program')
parser.add_argument('--library', '-l', dest='library', metavar='LIBPATH',
help='library database file')
parser.add_argument('--directory', '-d', dest='directory',
metavar='DIRECTORY',
help='destination music directory')
parser.add_argument('--verbose', '-v', dest='verbose', action='store_true',
help='print debugging messages')
args = parser.parse_args()
config.set_args(args)
print('configuration directory is', config.config_dir())
# Use a boolean flag and the transient overlay.
if config['verbose']:
print('verbose mode')
config['log']['level'] = 2
else:
config['log']['level'] = 0
print('logging level is', config['log']['level'].validate(int))
# Some validated/converted values.
print('directory is', config['directory'].as_filename())
print('library is', config['library'].as_filename())
Fix example to use get
|
"""An example application using Confit for configuration."""
from __future__ import print_function
from __future__ import unicode_literals
import confit
import argparse
config = confit.LazyConfig('ConfitExample', __name__)
def main():
parser = argparse.ArgumentParser(description='example Confit program')
parser.add_argument('--library', '-l', dest='library', metavar='LIBPATH',
help='library database file')
parser.add_argument('--directory', '-d', dest='directory',
metavar='DIRECTORY',
help='destination music directory')
parser.add_argument('--verbose', '-v', dest='verbose', action='store_true',
help='print debugging messages')
args = parser.parse_args()
config.set_args(args)
print('configuration directory is', config.config_dir())
# Use a boolean flag and the transient overlay.
if config['verbose']:
print('verbose mode')
config['log']['level'] = 2
else:
config['log']['level'] = 0
print('logging level is', config['log']['level'].get(int))
# Some validated/converted values.
print('directory is', config['directory'].as_filename())
print('library is', config['library'].as_filename())
|
<commit_before>"""An example application using Confit for configuration."""
from __future__ import print_function
from __future__ import unicode_literals
import confit
import argparse
config = confit.LazyConfig('ConfitExample', __name__)
def main():
parser = argparse.ArgumentParser(description='example Confit program')
parser.add_argument('--library', '-l', dest='library', metavar='LIBPATH',
help='library database file')
parser.add_argument('--directory', '-d', dest='directory',
metavar='DIRECTORY',
help='destination music directory')
parser.add_argument('--verbose', '-v', dest='verbose', action='store_true',
help='print debugging messages')
args = parser.parse_args()
config.set_args(args)
print('configuration directory is', config.config_dir())
# Use a boolean flag and the transient overlay.
if config['verbose']:
print('verbose mode')
config['log']['level'] = 2
else:
config['log']['level'] = 0
print('logging level is', config['log']['level'].validate(int))
# Some validated/converted values.
print('directory is', config['directory'].as_filename())
print('library is', config['library'].as_filename())
<commit_msg>Fix example to use get<commit_after>
|
"""An example application using Confit for configuration."""
from __future__ import print_function
from __future__ import unicode_literals
import confit
import argparse
config = confit.LazyConfig('ConfitExample', __name__)
def main():
parser = argparse.ArgumentParser(description='example Confit program')
parser.add_argument('--library', '-l', dest='library', metavar='LIBPATH',
help='library database file')
parser.add_argument('--directory', '-d', dest='directory',
metavar='DIRECTORY',
help='destination music directory')
parser.add_argument('--verbose', '-v', dest='verbose', action='store_true',
help='print debugging messages')
args = parser.parse_args()
config.set_args(args)
print('configuration directory is', config.config_dir())
# Use a boolean flag and the transient overlay.
if config['verbose']:
print('verbose mode')
config['log']['level'] = 2
else:
config['log']['level'] = 0
print('logging level is', config['log']['level'].get(int))
# Some validated/converted values.
print('directory is', config['directory'].as_filename())
print('library is', config['library'].as_filename())
|
"""An example application using Confit for configuration."""
from __future__ import print_function
from __future__ import unicode_literals
import confit
import argparse
config = confit.LazyConfig('ConfitExample', __name__)
def main():
parser = argparse.ArgumentParser(description='example Confit program')
parser.add_argument('--library', '-l', dest='library', metavar='LIBPATH',
help='library database file')
parser.add_argument('--directory', '-d', dest='directory',
metavar='DIRECTORY',
help='destination music directory')
parser.add_argument('--verbose', '-v', dest='verbose', action='store_true',
help='print debugging messages')
args = parser.parse_args()
config.set_args(args)
print('configuration directory is', config.config_dir())
# Use a boolean flag and the transient overlay.
if config['verbose']:
print('verbose mode')
config['log']['level'] = 2
else:
config['log']['level'] = 0
print('logging level is', config['log']['level'].validate(int))
# Some validated/converted values.
print('directory is', config['directory'].as_filename())
print('library is', config['library'].as_filename())
Fix example to use get"""An example application using Confit for configuration."""
from __future__ import print_function
from __future__ import unicode_literals
import confit
import argparse
config = confit.LazyConfig('ConfitExample', __name__)
def main():
parser = argparse.ArgumentParser(description='example Confit program')
parser.add_argument('--library', '-l', dest='library', metavar='LIBPATH',
help='library database file')
parser.add_argument('--directory', '-d', dest='directory',
metavar='DIRECTORY',
help='destination music directory')
parser.add_argument('--verbose', '-v', dest='verbose', action='store_true',
help='print debugging messages')
args = parser.parse_args()
config.set_args(args)
print('configuration directory is', config.config_dir())
# Use a boolean flag and the transient overlay.
if config['verbose']:
print('verbose mode')
config['log']['level'] = 2
else:
config['log']['level'] = 0
print('logging level is', config['log']['level'].get(int))
# Some validated/converted values.
print('directory is', config['directory'].as_filename())
print('library is', config['library'].as_filename())
|
<commit_before>"""An example application using Confit for configuration."""
from __future__ import print_function
from __future__ import unicode_literals
import confit
import argparse
config = confit.LazyConfig('ConfitExample', __name__)
def main():
parser = argparse.ArgumentParser(description='example Confit program')
parser.add_argument('--library', '-l', dest='library', metavar='LIBPATH',
help='library database file')
parser.add_argument('--directory', '-d', dest='directory',
metavar='DIRECTORY',
help='destination music directory')
parser.add_argument('--verbose', '-v', dest='verbose', action='store_true',
help='print debugging messages')
args = parser.parse_args()
config.set_args(args)
print('configuration directory is', config.config_dir())
# Use a boolean flag and the transient overlay.
if config['verbose']:
print('verbose mode')
config['log']['level'] = 2
else:
config['log']['level'] = 0
print('logging level is', config['log']['level'].validate(int))
# Some validated/converted values.
print('directory is', config['directory'].as_filename())
print('library is', config['library'].as_filename())
<commit_msg>Fix example to use get<commit_after>"""An example application using Confit for configuration."""
from __future__ import print_function
from __future__ import unicode_literals
import confit
import argparse
config = confit.LazyConfig('ConfitExample', __name__)
def main():
parser = argparse.ArgumentParser(description='example Confit program')
parser.add_argument('--library', '-l', dest='library', metavar='LIBPATH',
help='library database file')
parser.add_argument('--directory', '-d', dest='directory',
metavar='DIRECTORY',
help='destination music directory')
parser.add_argument('--verbose', '-v', dest='verbose', action='store_true',
help='print debugging messages')
args = parser.parse_args()
config.set_args(args)
print('configuration directory is', config.config_dir())
# Use a boolean flag and the transient overlay.
if config['verbose']:
print('verbose mode')
config['log']['level'] = 2
else:
config['log']['level'] = 0
print('logging level is', config['log']['level'].get(int))
# Some validated/converted values.
print('directory is', config['directory'].as_filename())
print('library is', config['library'].as_filename())
|
b575099c0d1f23916038172d46852a264a5f5a95
|
bluebottle/utils/staticfiles_finders.py
|
bluebottle/utils/staticfiles_finders.py
|
from django.utils._os import safe_join
import os
from django.conf import settings
from django.contrib.staticfiles.finders import FileSystemFinder
from bluebottle.clients.models import Client
class TenantStaticFilesFinder(FileSystemFinder):
def find(self, path, all=False):
"""
Looks for files in the client static directories.
static/assets/greatbarier/images/logo.jpg
will translate to
MULTI_TENANT_DIR/greatbarier/static/images/logo.jpg
"""
tenants = Client.objects.all()
tenant_dir = getattr(settings, 'MULTI_TENANT_DIR', None)
if not tenant_dir:
return
for tenant in tenants:
if "{0}/".format(tenant.client_name) in path:
tenant_path = path.replace('{0}/'.format(tenant.client_name),
'{0}/static/'.format(tenant.client_name))
local_path = safe_join(tenant_dir, tenant_path)
if os.path.exists(local_path):
return local_path
return
|
from django.utils._os import safe_join
import os
from django.conf import settings
from django.contrib.staticfiles.finders import FileSystemFinder
from bluebottle.clients.models import Client
class TenantStaticFilesFinder(FileSystemFinder):
def find(self, path, all=False):
"""
Looks for files in the client static directories.
static/assets/greatbarier/images/logo.jpg
will translate to
MULTI_TENANT_DIR/greatbarier/static/images/logo.jpg
"""
tenants = Client.objects.all()
tenant_dir = getattr(settings, 'MULTI_TENANT_DIR', None)
if not tenant_dir:
return []
for tenant in tenants:
if "{0}/".format(tenant.client_name) in path:
tenant_path = path.replace('{0}/'.format(tenant.client_name),
'{0}/static/'.format(tenant.client_name))
local_path = safe_join(tenant_dir, tenant_path)
if os.path.exists(local_path):
if all:
return [local_path]
return local_path
return []
|
Fix static files finder errors
|
Fix static files finder errors
|
Python
|
bsd-3-clause
|
jfterpstra/bluebottle,onepercentclub/bluebottle,jfterpstra/bluebottle,onepercentclub/bluebottle,jfterpstra/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,jfterpstra/bluebottle
|
from django.utils._os import safe_join
import os
from django.conf import settings
from django.contrib.staticfiles.finders import FileSystemFinder
from bluebottle.clients.models import Client
class TenantStaticFilesFinder(FileSystemFinder):
def find(self, path, all=False):
"""
Looks for files in the client static directories.
static/assets/greatbarier/images/logo.jpg
will translate to
MULTI_TENANT_DIR/greatbarier/static/images/logo.jpg
"""
tenants = Client.objects.all()
tenant_dir = getattr(settings, 'MULTI_TENANT_DIR', None)
if not tenant_dir:
return
for tenant in tenants:
if "{0}/".format(tenant.client_name) in path:
tenant_path = path.replace('{0}/'.format(tenant.client_name),
'{0}/static/'.format(tenant.client_name))
local_path = safe_join(tenant_dir, tenant_path)
if os.path.exists(local_path):
return local_path
return
Fix static files finder errors
|
from django.utils._os import safe_join
import os
from django.conf import settings
from django.contrib.staticfiles.finders import FileSystemFinder
from bluebottle.clients.models import Client
class TenantStaticFilesFinder(FileSystemFinder):
def find(self, path, all=False):
"""
Looks for files in the client static directories.
static/assets/greatbarier/images/logo.jpg
will translate to
MULTI_TENANT_DIR/greatbarier/static/images/logo.jpg
"""
tenants = Client.objects.all()
tenant_dir = getattr(settings, 'MULTI_TENANT_DIR', None)
if not tenant_dir:
return []
for tenant in tenants:
if "{0}/".format(tenant.client_name) in path:
tenant_path = path.replace('{0}/'.format(tenant.client_name),
'{0}/static/'.format(tenant.client_name))
local_path = safe_join(tenant_dir, tenant_path)
if os.path.exists(local_path):
if all:
return [local_path]
return local_path
return []
|
<commit_before>from django.utils._os import safe_join
import os
from django.conf import settings
from django.contrib.staticfiles.finders import FileSystemFinder
from bluebottle.clients.models import Client
class TenantStaticFilesFinder(FileSystemFinder):
def find(self, path, all=False):
"""
Looks for files in the client static directories.
static/assets/greatbarier/images/logo.jpg
will translate to
MULTI_TENANT_DIR/greatbarier/static/images/logo.jpg
"""
tenants = Client.objects.all()
tenant_dir = getattr(settings, 'MULTI_TENANT_DIR', None)
if not tenant_dir:
return
for tenant in tenants:
if "{0}/".format(tenant.client_name) in path:
tenant_path = path.replace('{0}/'.format(tenant.client_name),
'{0}/static/'.format(tenant.client_name))
local_path = safe_join(tenant_dir, tenant_path)
if os.path.exists(local_path):
return local_path
return
<commit_msg>Fix static files finder errors<commit_after>
|
from django.utils._os import safe_join
import os
from django.conf import settings
from django.contrib.staticfiles.finders import FileSystemFinder
from bluebottle.clients.models import Client
class TenantStaticFilesFinder(FileSystemFinder):
def find(self, path, all=False):
"""
Looks for files in the client static directories.
static/assets/greatbarier/images/logo.jpg
will translate to
MULTI_TENANT_DIR/greatbarier/static/images/logo.jpg
"""
tenants = Client.objects.all()
tenant_dir = getattr(settings, 'MULTI_TENANT_DIR', None)
if not tenant_dir:
return []
for tenant in tenants:
if "{0}/".format(tenant.client_name) in path:
tenant_path = path.replace('{0}/'.format(tenant.client_name),
'{0}/static/'.format(tenant.client_name))
local_path = safe_join(tenant_dir, tenant_path)
if os.path.exists(local_path):
if all:
return [local_path]
return local_path
return []
|
from django.utils._os import safe_join
import os
from django.conf import settings
from django.contrib.staticfiles.finders import FileSystemFinder
from bluebottle.clients.models import Client
class TenantStaticFilesFinder(FileSystemFinder):
def find(self, path, all=False):
"""
Looks for files in the client static directories.
static/assets/greatbarier/images/logo.jpg
will translate to
MULTI_TENANT_DIR/greatbarier/static/images/logo.jpg
"""
tenants = Client.objects.all()
tenant_dir = getattr(settings, 'MULTI_TENANT_DIR', None)
if not tenant_dir:
return
for tenant in tenants:
if "{0}/".format(tenant.client_name) in path:
tenant_path = path.replace('{0}/'.format(tenant.client_name),
'{0}/static/'.format(tenant.client_name))
local_path = safe_join(tenant_dir, tenant_path)
if os.path.exists(local_path):
return local_path
return
Fix static files finder errorsfrom django.utils._os import safe_join
import os
from django.conf import settings
from django.contrib.staticfiles.finders import FileSystemFinder
from bluebottle.clients.models import Client
class TenantStaticFilesFinder(FileSystemFinder):
def find(self, path, all=False):
"""
Looks for files in the client static directories.
static/assets/greatbarier/images/logo.jpg
will translate to
MULTI_TENANT_DIR/greatbarier/static/images/logo.jpg
"""
tenants = Client.objects.all()
tenant_dir = getattr(settings, 'MULTI_TENANT_DIR', None)
if not tenant_dir:
return []
for tenant in tenants:
if "{0}/".format(tenant.client_name) in path:
tenant_path = path.replace('{0}/'.format(tenant.client_name),
'{0}/static/'.format(tenant.client_name))
local_path = safe_join(tenant_dir, tenant_path)
if os.path.exists(local_path):
if all:
return [local_path]
return local_path
return []
|
<commit_before>from django.utils._os import safe_join
import os
from django.conf import settings
from django.contrib.staticfiles.finders import FileSystemFinder
from bluebottle.clients.models import Client
class TenantStaticFilesFinder(FileSystemFinder):
def find(self, path, all=False):
"""
Looks for files in the client static directories.
static/assets/greatbarier/images/logo.jpg
will translate to
MULTI_TENANT_DIR/greatbarier/static/images/logo.jpg
"""
tenants = Client.objects.all()
tenant_dir = getattr(settings, 'MULTI_TENANT_DIR', None)
if not tenant_dir:
return
for tenant in tenants:
if "{0}/".format(tenant.client_name) in path:
tenant_path = path.replace('{0}/'.format(tenant.client_name),
'{0}/static/'.format(tenant.client_name))
local_path = safe_join(tenant_dir, tenant_path)
if os.path.exists(local_path):
return local_path
return
<commit_msg>Fix static files finder errors<commit_after>from django.utils._os import safe_join
import os
from django.conf import settings
from django.contrib.staticfiles.finders import FileSystemFinder
from bluebottle.clients.models import Client
class TenantStaticFilesFinder(FileSystemFinder):
def find(self, path, all=False):
"""
Looks for files in the client static directories.
static/assets/greatbarier/images/logo.jpg
will translate to
MULTI_TENANT_DIR/greatbarier/static/images/logo.jpg
"""
tenants = Client.objects.all()
tenant_dir = getattr(settings, 'MULTI_TENANT_DIR', None)
if not tenant_dir:
return []
for tenant in tenants:
if "{0}/".format(tenant.client_name) in path:
tenant_path = path.replace('{0}/'.format(tenant.client_name),
'{0}/static/'.format(tenant.client_name))
local_path = safe_join(tenant_dir, tenant_path)
if os.path.exists(local_path):
if all:
return [local_path]
return local_path
return []
|
d52d8ce18745d5ec0e722340cf09735938c8a0c0
|
src/BaseUtils.py
|
src/BaseUtils.py
|
'''
Base NLP utilities
'''
def get_words(sentence):
''' Return all the words found in a sentence.
Ignore whitespace and all punctuation
>>> get_words('a most interesting piece')
>>> ['a', 'most', 'interesting', 'piece']
>>> get_words('a, most$ **interesting piece')
>>> ['a', 'most', 'interesting', 'piece']
'''
clean_sentence = ''.join([char for char in sentence if ord(char) in
xrange(97, 123) or ord(char) in xrange(75, 91)
or ord(char) == 32])
segments = clean_sentence.split(' ')
words = [word for word in segments if not word == '']
return words
|
'''
Base NLP utilities
'''
def get_words(sentence):
''' Return all the words found in a sentence.
Ignore whitespace and all punctuation
>>> get_words('a most interesting piece')
>>> ['a', 'most', 'interesting', 'piece']
>>> get_words('a, most$ **interesting piece')
>>> ['a', 'most', 'interesting', 'piece']
'''
clean_sentence = ''.join([char for char in sentence if char.isalpha()
or char.isspace()])
segments = clean_sentence.split(' ')
words = [word for word in segments if not word == '']
return words
|
Replace ASCII checking chars and space with library methods
|
Replace ASCII checking chars and space with library methods
|
Python
|
bsd-2-clause
|
ambidextrousTx/RNLTK
|
'''
Base NLP utilities
'''
def get_words(sentence):
''' Return all the words found in a sentence.
Ignore whitespace and all punctuation
>>> get_words('a most interesting piece')
>>> ['a', 'most', 'interesting', 'piece']
>>> get_words('a, most$ **interesting piece')
>>> ['a', 'most', 'interesting', 'piece']
'''
clean_sentence = ''.join([char for char in sentence if ord(char) in
xrange(97, 123) or ord(char) in xrange(75, 91)
or ord(char) == 32])
segments = clean_sentence.split(' ')
words = [word for word in segments if not word == '']
return words
Replace ASCII checking chars and space with library methods
|
'''
Base NLP utilities
'''
def get_words(sentence):
''' Return all the words found in a sentence.
Ignore whitespace and all punctuation
>>> get_words('a most interesting piece')
>>> ['a', 'most', 'interesting', 'piece']
>>> get_words('a, most$ **interesting piece')
>>> ['a', 'most', 'interesting', 'piece']
'''
clean_sentence = ''.join([char for char in sentence if char.isalpha()
or char.isspace()])
segments = clean_sentence.split(' ')
words = [word for word in segments if not word == '']
return words
|
<commit_before>'''
Base NLP utilities
'''
def get_words(sentence):
''' Return all the words found in a sentence.
Ignore whitespace and all punctuation
>>> get_words('a most interesting piece')
>>> ['a', 'most', 'interesting', 'piece']
>>> get_words('a, most$ **interesting piece')
>>> ['a', 'most', 'interesting', 'piece']
'''
clean_sentence = ''.join([char for char in sentence if ord(char) in
xrange(97, 123) or ord(char) in xrange(75, 91)
or ord(char) == 32])
segments = clean_sentence.split(' ')
words = [word for word in segments if not word == '']
return words
<commit_msg>Replace ASCII checking chars and space with library methods<commit_after>
|
'''
Base NLP utilities
'''
def get_words(sentence):
''' Return all the words found in a sentence.
Ignore whitespace and all punctuation
>>> get_words('a most interesting piece')
>>> ['a', 'most', 'interesting', 'piece']
>>> get_words('a, most$ **interesting piece')
>>> ['a', 'most', 'interesting', 'piece']
'''
clean_sentence = ''.join([char for char in sentence if char.isalpha()
or char.isspace()])
segments = clean_sentence.split(' ')
words = [word for word in segments if not word == '']
return words
|
'''
Base NLP utilities
'''
def get_words(sentence):
''' Return all the words found in a sentence.
Ignore whitespace and all punctuation
>>> get_words('a most interesting piece')
>>> ['a', 'most', 'interesting', 'piece']
>>> get_words('a, most$ **interesting piece')
>>> ['a', 'most', 'interesting', 'piece']
'''
clean_sentence = ''.join([char for char in sentence if ord(char) in
xrange(97, 123) or ord(char) in xrange(75, 91)
or ord(char) == 32])
segments = clean_sentence.split(' ')
words = [word for word in segments if not word == '']
return words
Replace ASCII checking chars and space with library methods'''
Base NLP utilities
'''
def get_words(sentence):
''' Return all the words found in a sentence.
Ignore whitespace and all punctuation
>>> get_words('a most interesting piece')
>>> ['a', 'most', 'interesting', 'piece']
>>> get_words('a, most$ **interesting piece')
>>> ['a', 'most', 'interesting', 'piece']
'''
clean_sentence = ''.join([char for char in sentence if char.isalpha()
or char.isspace()])
segments = clean_sentence.split(' ')
words = [word for word in segments if not word == '']
return words
|
<commit_before>'''
Base NLP utilities
'''
def get_words(sentence):
''' Return all the words found in a sentence.
Ignore whitespace and all punctuation
>>> get_words('a most interesting piece')
>>> ['a', 'most', 'interesting', 'piece']
>>> get_words('a, most$ **interesting piece')
>>> ['a', 'most', 'interesting', 'piece']
'''
clean_sentence = ''.join([char for char in sentence if ord(char) in
xrange(97, 123) or ord(char) in xrange(75, 91)
or ord(char) == 32])
segments = clean_sentence.split(' ')
words = [word for word in segments if not word == '']
return words
<commit_msg>Replace ASCII checking chars and space with library methods<commit_after>'''
Base NLP utilities
'''
def get_words(sentence):
''' Return all the words found in a sentence.
Ignore whitespace and all punctuation
>>> get_words('a most interesting piece')
>>> ['a', 'most', 'interesting', 'piece']
>>> get_words('a, most$ **interesting piece')
>>> ['a', 'most', 'interesting', 'piece']
'''
clean_sentence = ''.join([char for char in sentence if char.isalpha()
or char.isspace()])
segments = clean_sentence.split(' ')
words = [word for word in segments if not word == '']
return words
|
716d967971d9ea23ab54d327231ba873b681a7c7
|
isserviceup/services/models/service.py
|
isserviceup/services/models/service.py
|
from enum import Enum
class Status(Enum):
ok = 1 # green
maintenance = 2 # blue
minor = 3 # yellow
major = 4 # orange
critical = 5 # red
unavailable = 6 # gray
class Service(object):
@property
def id(self):
return self.__class__.__name__
@property
def status_url(self):
raise NotImplemented()
@property
def name(self):
raise NotImplemented()
def get_status(self):
raise NotImplemented()
|
from enum import Enum
class Status(Enum):
ok = 1 # green
maintenance = 2 # blue
minor = 3 # yellow
major = 4 # orange
critical = 5 # red
unavailable = 6 # gray
class Service(object):
@property
def id(self):
return self.__class__.__name__
@property
def status_url(self):
raise NotImplemented()
@property
def icon_url(self):
raise NotImplemented()
@property
def name(self):
raise NotImplemented()
def get_status(self):
raise NotImplemented()
|
Add icon_url as abstract property
|
Add icon_url as abstract property
|
Python
|
apache-2.0
|
marcopaz/is-service-up,marcopaz/is-service-up,marcopaz/is-service-up
|
from enum import Enum
class Status(Enum):
ok = 1 # green
maintenance = 2 # blue
minor = 3 # yellow
major = 4 # orange
critical = 5 # red
unavailable = 6 # gray
class Service(object):
@property
def id(self):
return self.__class__.__name__
@property
def status_url(self):
raise NotImplemented()
@property
def name(self):
raise NotImplemented()
def get_status(self):
raise NotImplemented()
Add icon_url as abstract property
|
from enum import Enum
class Status(Enum):
ok = 1 # green
maintenance = 2 # blue
minor = 3 # yellow
major = 4 # orange
critical = 5 # red
unavailable = 6 # gray
class Service(object):
@property
def id(self):
return self.__class__.__name__
@property
def status_url(self):
raise NotImplemented()
@property
def icon_url(self):
raise NotImplemented()
@property
def name(self):
raise NotImplemented()
def get_status(self):
raise NotImplemented()
|
<commit_before>from enum import Enum
class Status(Enum):
ok = 1 # green
maintenance = 2 # blue
minor = 3 # yellow
major = 4 # orange
critical = 5 # red
unavailable = 6 # gray
class Service(object):
@property
def id(self):
return self.__class__.__name__
@property
def status_url(self):
raise NotImplemented()
@property
def name(self):
raise NotImplemented()
def get_status(self):
raise NotImplemented()
<commit_msg>Add icon_url as abstract property<commit_after>
|
from enum import Enum
class Status(Enum):
ok = 1 # green
maintenance = 2 # blue
minor = 3 # yellow
major = 4 # orange
critical = 5 # red
unavailable = 6 # gray
class Service(object):
@property
def id(self):
return self.__class__.__name__
@property
def status_url(self):
raise NotImplemented()
@property
def icon_url(self):
raise NotImplemented()
@property
def name(self):
raise NotImplemented()
def get_status(self):
raise NotImplemented()
|
from enum import Enum
class Status(Enum):
ok = 1 # green
maintenance = 2 # blue
minor = 3 # yellow
major = 4 # orange
critical = 5 # red
unavailable = 6 # gray
class Service(object):
@property
def id(self):
return self.__class__.__name__
@property
def status_url(self):
raise NotImplemented()
@property
def name(self):
raise NotImplemented()
def get_status(self):
raise NotImplemented()
Add icon_url as abstract propertyfrom enum import Enum
class Status(Enum):
ok = 1 # green
maintenance = 2 # blue
minor = 3 # yellow
major = 4 # orange
critical = 5 # red
unavailable = 6 # gray
class Service(object):
@property
def id(self):
return self.__class__.__name__
@property
def status_url(self):
raise NotImplemented()
@property
def icon_url(self):
raise NotImplemented()
@property
def name(self):
raise NotImplemented()
def get_status(self):
raise NotImplemented()
|
<commit_before>from enum import Enum
class Status(Enum):
ok = 1 # green
maintenance = 2 # blue
minor = 3 # yellow
major = 4 # orange
critical = 5 # red
unavailable = 6 # gray
class Service(object):
@property
def id(self):
return self.__class__.__name__
@property
def status_url(self):
raise NotImplemented()
@property
def name(self):
raise NotImplemented()
def get_status(self):
raise NotImplemented()
<commit_msg>Add icon_url as abstract property<commit_after>from enum import Enum
class Status(Enum):
ok = 1 # green
maintenance = 2 # blue
minor = 3 # yellow
major = 4 # orange
critical = 5 # red
unavailable = 6 # gray
class Service(object):
@property
def id(self):
return self.__class__.__name__
@property
def status_url(self):
raise NotImplemented()
@property
def icon_url(self):
raise NotImplemented()
@property
def name(self):
raise NotImplemented()
def get_status(self):
raise NotImplemented()
|
5233e6d7f7d4f494f62576206ede87d13e8f760d
|
calexicon/calendars/tests/test_other.py
|
calexicon/calendars/tests/test_other.py
|
from datetime import date as vanilla_date
from calendar_testing import CalendarTest
from calexicon.calendars.other import JulianDayNumber
class TestJulianDayNumber(CalendarTest):
def setUp(self):
self.calendar = JulianDayNumber()
def test_make_date(self):
vd = vanilla_date(2010, 8, 1)
d = self.calendar.from_date(vd)
self.assertIsNotNone(d)
def test_first_date(self):
vd = vanilla_date(1, 1, 1)
d = self.calendar.from_date(vd)
self.assertEqual(str(d), 'Day 1721423 (Julian Day Number)')
def compare_date_and_number(self, year, month, day, number):
vd = vanilla_date(year, month, day)
d = self.calendar.from_date(vd)
self.assertEqual(d.native_representation(), {'day_number': number})
def test_other_date(self):
self.compare_date_and_number(2013, 1, 1, 2456293)
|
from datetime import date as vanilla_date
from calendar_testing import CalendarTest
from calexicon.calendars.other import JulianDayNumber
class TestJulianDayNumber(CalendarTest):
def setUp(self):
self.calendar = JulianDayNumber()
def test_make_date(self):
vd = vanilla_date(2010, 8, 1)
d = self.calendar.from_date(vd)
self.assertIsNotNone(d)
def test_first_date(self):
vd = vanilla_date(1, 1, 1)
d = self.calendar.from_date(vd)
self.assertEqual(str(d), 'Day 1721423 (Julian Day Number)')
def compare_date_and_number(self, year, month, day, number):
vd = vanilla_date(year, month, day)
d = self.calendar.from_date(vd)
self.assertEqual(d.native_representation(), {'day_number': number})
def test_every_400_years(self):
days_in_400_years = 400 * 365 + 97
for i in range(25):
self.compare_date_and_number(1 + 400 * i, 1, 1, 1721423 + days_in_400_years * i)
def test_another_date(self):
self.compare_date_and_number(2013, 1, 1, 2456293)
|
Add a test to check the right number of days in 400 year cycles.
|
Add a test to check the right number of days in 400 year cycles.
|
Python
|
apache-2.0
|
jwg4/calexicon,jwg4/qual
|
from datetime import date as vanilla_date
from calendar_testing import CalendarTest
from calexicon.calendars.other import JulianDayNumber
class TestJulianDayNumber(CalendarTest):
def setUp(self):
self.calendar = JulianDayNumber()
def test_make_date(self):
vd = vanilla_date(2010, 8, 1)
d = self.calendar.from_date(vd)
self.assertIsNotNone(d)
def test_first_date(self):
vd = vanilla_date(1, 1, 1)
d = self.calendar.from_date(vd)
self.assertEqual(str(d), 'Day 1721423 (Julian Day Number)')
def compare_date_and_number(self, year, month, day, number):
vd = vanilla_date(year, month, day)
d = self.calendar.from_date(vd)
self.assertEqual(d.native_representation(), {'day_number': number})
def test_other_date(self):
self.compare_date_and_number(2013, 1, 1, 2456293)
Add a test to check the right number of days in 400 year cycles.
|
from datetime import date as vanilla_date
from calendar_testing import CalendarTest
from calexicon.calendars.other import JulianDayNumber
class TestJulianDayNumber(CalendarTest):
def setUp(self):
self.calendar = JulianDayNumber()
def test_make_date(self):
vd = vanilla_date(2010, 8, 1)
d = self.calendar.from_date(vd)
self.assertIsNotNone(d)
def test_first_date(self):
vd = vanilla_date(1, 1, 1)
d = self.calendar.from_date(vd)
self.assertEqual(str(d), 'Day 1721423 (Julian Day Number)')
def compare_date_and_number(self, year, month, day, number):
vd = vanilla_date(year, month, day)
d = self.calendar.from_date(vd)
self.assertEqual(d.native_representation(), {'day_number': number})
def test_every_400_years(self):
days_in_400_years = 400 * 365 + 97
for i in range(25):
self.compare_date_and_number(1 + 400 * i, 1, 1, 1721423 + days_in_400_years * i)
def test_another_date(self):
self.compare_date_and_number(2013, 1, 1, 2456293)
|
<commit_before>from datetime import date as vanilla_date
from calendar_testing import CalendarTest
from calexicon.calendars.other import JulianDayNumber
class TestJulianDayNumber(CalendarTest):
def setUp(self):
self.calendar = JulianDayNumber()
def test_make_date(self):
vd = vanilla_date(2010, 8, 1)
d = self.calendar.from_date(vd)
self.assertIsNotNone(d)
def test_first_date(self):
vd = vanilla_date(1, 1, 1)
d = self.calendar.from_date(vd)
self.assertEqual(str(d), 'Day 1721423 (Julian Day Number)')
def compare_date_and_number(self, year, month, day, number):
vd = vanilla_date(year, month, day)
d = self.calendar.from_date(vd)
self.assertEqual(d.native_representation(), {'day_number': number})
def test_other_date(self):
self.compare_date_and_number(2013, 1, 1, 2456293)
<commit_msg>Add a test to check the right number of days in 400 year cycles.<commit_after>
|
from datetime import date as vanilla_date
from calendar_testing import CalendarTest
from calexicon.calendars.other import JulianDayNumber
class TestJulianDayNumber(CalendarTest):
def setUp(self):
self.calendar = JulianDayNumber()
def test_make_date(self):
vd = vanilla_date(2010, 8, 1)
d = self.calendar.from_date(vd)
self.assertIsNotNone(d)
def test_first_date(self):
vd = vanilla_date(1, 1, 1)
d = self.calendar.from_date(vd)
self.assertEqual(str(d), 'Day 1721423 (Julian Day Number)')
def compare_date_and_number(self, year, month, day, number):
vd = vanilla_date(year, month, day)
d = self.calendar.from_date(vd)
self.assertEqual(d.native_representation(), {'day_number': number})
def test_every_400_years(self):
days_in_400_years = 400 * 365 + 97
for i in range(25):
self.compare_date_and_number(1 + 400 * i, 1, 1, 1721423 + days_in_400_years * i)
def test_another_date(self):
self.compare_date_and_number(2013, 1, 1, 2456293)
|
from datetime import date as vanilla_date
from calendar_testing import CalendarTest
from calexicon.calendars.other import JulianDayNumber
class TestJulianDayNumber(CalendarTest):
def setUp(self):
self.calendar = JulianDayNumber()
def test_make_date(self):
vd = vanilla_date(2010, 8, 1)
d = self.calendar.from_date(vd)
self.assertIsNotNone(d)
def test_first_date(self):
vd = vanilla_date(1, 1, 1)
d = self.calendar.from_date(vd)
self.assertEqual(str(d), 'Day 1721423 (Julian Day Number)')
def compare_date_and_number(self, year, month, day, number):
vd = vanilla_date(year, month, day)
d = self.calendar.from_date(vd)
self.assertEqual(d.native_representation(), {'day_number': number})
def test_other_date(self):
self.compare_date_and_number(2013, 1, 1, 2456293)
Add a test to check the right number of days in 400 year cycles.from datetime import date as vanilla_date
from calendar_testing import CalendarTest
from calexicon.calendars.other import JulianDayNumber
class TestJulianDayNumber(CalendarTest):
def setUp(self):
self.calendar = JulianDayNumber()
def test_make_date(self):
vd = vanilla_date(2010, 8, 1)
d = self.calendar.from_date(vd)
self.assertIsNotNone(d)
def test_first_date(self):
vd = vanilla_date(1, 1, 1)
d = self.calendar.from_date(vd)
self.assertEqual(str(d), 'Day 1721423 (Julian Day Number)')
def compare_date_and_number(self, year, month, day, number):
vd = vanilla_date(year, month, day)
d = self.calendar.from_date(vd)
self.assertEqual(d.native_representation(), {'day_number': number})
def test_every_400_years(self):
days_in_400_years = 400 * 365 + 97
for i in range(25):
self.compare_date_and_number(1 + 400 * i, 1, 1, 1721423 + days_in_400_years * i)
def test_another_date(self):
self.compare_date_and_number(2013, 1, 1, 2456293)
|
<commit_before>from datetime import date as vanilla_date
from calendar_testing import CalendarTest
from calexicon.calendars.other import JulianDayNumber
class TestJulianDayNumber(CalendarTest):
def setUp(self):
self.calendar = JulianDayNumber()
def test_make_date(self):
vd = vanilla_date(2010, 8, 1)
d = self.calendar.from_date(vd)
self.assertIsNotNone(d)
def test_first_date(self):
vd = vanilla_date(1, 1, 1)
d = self.calendar.from_date(vd)
self.assertEqual(str(d), 'Day 1721423 (Julian Day Number)')
def compare_date_and_number(self, year, month, day, number):
vd = vanilla_date(year, month, day)
d = self.calendar.from_date(vd)
self.assertEqual(d.native_representation(), {'day_number': number})
def test_other_date(self):
self.compare_date_and_number(2013, 1, 1, 2456293)
<commit_msg>Add a test to check the right number of days in 400 year cycles.<commit_after>from datetime import date as vanilla_date
from calendar_testing import CalendarTest
from calexicon.calendars.other import JulianDayNumber
class TestJulianDayNumber(CalendarTest):
def setUp(self):
self.calendar = JulianDayNumber()
def test_make_date(self):
vd = vanilla_date(2010, 8, 1)
d = self.calendar.from_date(vd)
self.assertIsNotNone(d)
def test_first_date(self):
vd = vanilla_date(1, 1, 1)
d = self.calendar.from_date(vd)
self.assertEqual(str(d), 'Day 1721423 (Julian Day Number)')
def compare_date_and_number(self, year, month, day, number):
vd = vanilla_date(year, month, day)
d = self.calendar.from_date(vd)
self.assertEqual(d.native_representation(), {'day_number': number})
def test_every_400_years(self):
days_in_400_years = 400 * 365 + 97
for i in range(25):
self.compare_date_and_number(1 + 400 * i, 1, 1, 1721423 + days_in_400_years * i)
def test_another_date(self):
self.compare_date_and_number(2013, 1, 1, 2456293)
|
03fb3e68f9ec9432a25c60bda06fcd49d604befc
|
src/__init__.py
|
src/__init__.py
|
from ..override_audit import reload
reload("src", ["core", "events", "contexts", "browse", "settings_proxy"])
reload("src.commands")
from . import core
from .core import *
from .events import *
from .contexts import *
from .settings_proxy import *
from .commands import *
__all__ = [
# core
"core",
# browse
"browse",
# settings_proxy
"OverrideAuditOpenFileCommand",
"OverrideAuditEditSettingsCommand",
# events/contexts
"OverrideAuditEventListener",
"OverrideAuditContextListener",
# commands/*
"OverrideAuditPackageReportCommand",
"OverrideAuditOverrideReportCommand",
"OverrideAuditDiffReportCommand",
"OverrideAuditRefreshReportCommand",
"OverrideAuditToggleOverrideCommand",
"OverrideAuditCreateOverrideCommand",
"OverrideAuditDiffOverrideCommand",
"OverrideAuditRevertOverrideCommand",
"OverrideAuditDiffExternallyCommand",
"OverrideAuditEditOverrideCommand",
"OverrideAuditDeleteOverrideCommand",
"OverrideAuditFreshenOverrideCommand",
"OverrideAuditDiffPackageCommand",
"OverrideAuditFreshenPackageCommand",
"OverrideAuditDiffSingleCommand",
"OverrideAuditModifyMarkCommand"
]
|
from ..override_audit import reload
reload("src", ["core", "events", "contexts", "browse", "settings_proxy"])
reload("src.commands")
from . import core
from .core import *
from .events import *
from .contexts import *
from .settings_proxy import *
from .commands import *
__all__ = [
# core
"core",
# browse
"browse",
# settings_proxy
"OverrideAuditOpenFileCommand",
"OverrideAuditEditSettingsCommand",
# events/contexts
"OverrideAuditEventListener",
"CreateOverrideEventListener",
"OverrideAuditContextListener",
# commands/*
"OverrideAuditPackageReportCommand",
"OverrideAuditOverrideReportCommand",
"OverrideAuditDiffReportCommand",
"OverrideAuditRefreshReportCommand",
"OverrideAuditToggleOverrideCommand",
"OverrideAuditCreateOverrideCommand",
"OverrideAuditDiffOverrideCommand",
"OverrideAuditRevertOverrideCommand",
"OverrideAuditDiffExternallyCommand",
"OverrideAuditEditOverrideCommand",
"OverrideAuditDeleteOverrideCommand",
"OverrideAuditFreshenOverrideCommand",
"OverrideAuditDiffPackageCommand",
"OverrideAuditFreshenPackageCommand",
"OverrideAuditDiffSingleCommand",
"OverrideAuditModifyMarkCommand"
]
|
Add missing event listener for new overrides
|
Add missing event listener for new overrides
While rolling the test code for creating overrides into the base code,
we remembered to make sure that we put the event handler used to handle
override creation in place but forgot to export them to to the base
plugin so that it would actually do something.
|
Python
|
mit
|
OdatNurd/OverrideAudit
|
from ..override_audit import reload
reload("src", ["core", "events", "contexts", "browse", "settings_proxy"])
reload("src.commands")
from . import core
from .core import *
from .events import *
from .contexts import *
from .settings_proxy import *
from .commands import *
__all__ = [
# core
"core",
# browse
"browse",
# settings_proxy
"OverrideAuditOpenFileCommand",
"OverrideAuditEditSettingsCommand",
# events/contexts
"OverrideAuditEventListener",
"OverrideAuditContextListener",
# commands/*
"OverrideAuditPackageReportCommand",
"OverrideAuditOverrideReportCommand",
"OverrideAuditDiffReportCommand",
"OverrideAuditRefreshReportCommand",
"OverrideAuditToggleOverrideCommand",
"OverrideAuditCreateOverrideCommand",
"OverrideAuditDiffOverrideCommand",
"OverrideAuditRevertOverrideCommand",
"OverrideAuditDiffExternallyCommand",
"OverrideAuditEditOverrideCommand",
"OverrideAuditDeleteOverrideCommand",
"OverrideAuditFreshenOverrideCommand",
"OverrideAuditDiffPackageCommand",
"OverrideAuditFreshenPackageCommand",
"OverrideAuditDiffSingleCommand",
"OverrideAuditModifyMarkCommand"
]
Add missing event listener for new overrides
While rolling the test code for creating overrides into the base code,
we remembered to make sure that we put the event handler used to handle
override creation in place but forgot to export them to to the base
plugin so that it would actually do something.
|
from ..override_audit import reload
reload("src", ["core", "events", "contexts", "browse", "settings_proxy"])
reload("src.commands")
from . import core
from .core import *
from .events import *
from .contexts import *
from .settings_proxy import *
from .commands import *
__all__ = [
# core
"core",
# browse
"browse",
# settings_proxy
"OverrideAuditOpenFileCommand",
"OverrideAuditEditSettingsCommand",
# events/contexts
"OverrideAuditEventListener",
"CreateOverrideEventListener",
"OverrideAuditContextListener",
# commands/*
"OverrideAuditPackageReportCommand",
"OverrideAuditOverrideReportCommand",
"OverrideAuditDiffReportCommand",
"OverrideAuditRefreshReportCommand",
"OverrideAuditToggleOverrideCommand",
"OverrideAuditCreateOverrideCommand",
"OverrideAuditDiffOverrideCommand",
"OverrideAuditRevertOverrideCommand",
"OverrideAuditDiffExternallyCommand",
"OverrideAuditEditOverrideCommand",
"OverrideAuditDeleteOverrideCommand",
"OverrideAuditFreshenOverrideCommand",
"OverrideAuditDiffPackageCommand",
"OverrideAuditFreshenPackageCommand",
"OverrideAuditDiffSingleCommand",
"OverrideAuditModifyMarkCommand"
]
|
<commit_before>from ..override_audit import reload
reload("src", ["core", "events", "contexts", "browse", "settings_proxy"])
reload("src.commands")
from . import core
from .core import *
from .events import *
from .contexts import *
from .settings_proxy import *
from .commands import *
__all__ = [
# core
"core",
# browse
"browse",
# settings_proxy
"OverrideAuditOpenFileCommand",
"OverrideAuditEditSettingsCommand",
# events/contexts
"OverrideAuditEventListener",
"OverrideAuditContextListener",
# commands/*
"OverrideAuditPackageReportCommand",
"OverrideAuditOverrideReportCommand",
"OverrideAuditDiffReportCommand",
"OverrideAuditRefreshReportCommand",
"OverrideAuditToggleOverrideCommand",
"OverrideAuditCreateOverrideCommand",
"OverrideAuditDiffOverrideCommand",
"OverrideAuditRevertOverrideCommand",
"OverrideAuditDiffExternallyCommand",
"OverrideAuditEditOverrideCommand",
"OverrideAuditDeleteOverrideCommand",
"OverrideAuditFreshenOverrideCommand",
"OverrideAuditDiffPackageCommand",
"OverrideAuditFreshenPackageCommand",
"OverrideAuditDiffSingleCommand",
"OverrideAuditModifyMarkCommand"
]
<commit_msg>Add missing event listener for new overrides
While rolling the test code for creating overrides into the base code,
we remembered to make sure that we put the event handler used to handle
override creation in place but forgot to export them to to the base
plugin so that it would actually do something.<commit_after>
|
from ..override_audit import reload
reload("src", ["core", "events", "contexts", "browse", "settings_proxy"])
reload("src.commands")
from . import core
from .core import *
from .events import *
from .contexts import *
from .settings_proxy import *
from .commands import *
__all__ = [
# core
"core",
# browse
"browse",
# settings_proxy
"OverrideAuditOpenFileCommand",
"OverrideAuditEditSettingsCommand",
# events/contexts
"OverrideAuditEventListener",
"CreateOverrideEventListener",
"OverrideAuditContextListener",
# commands/*
"OverrideAuditPackageReportCommand",
"OverrideAuditOverrideReportCommand",
"OverrideAuditDiffReportCommand",
"OverrideAuditRefreshReportCommand",
"OverrideAuditToggleOverrideCommand",
"OverrideAuditCreateOverrideCommand",
"OverrideAuditDiffOverrideCommand",
"OverrideAuditRevertOverrideCommand",
"OverrideAuditDiffExternallyCommand",
"OverrideAuditEditOverrideCommand",
"OverrideAuditDeleteOverrideCommand",
"OverrideAuditFreshenOverrideCommand",
"OverrideAuditDiffPackageCommand",
"OverrideAuditFreshenPackageCommand",
"OverrideAuditDiffSingleCommand",
"OverrideAuditModifyMarkCommand"
]
|
from ..override_audit import reload
reload("src", ["core", "events", "contexts", "browse", "settings_proxy"])
reload("src.commands")
from . import core
from .core import *
from .events import *
from .contexts import *
from .settings_proxy import *
from .commands import *
__all__ = [
# core
"core",
# browse
"browse",
# settings_proxy
"OverrideAuditOpenFileCommand",
"OverrideAuditEditSettingsCommand",
# events/contexts
"OverrideAuditEventListener",
"OverrideAuditContextListener",
# commands/*
"OverrideAuditPackageReportCommand",
"OverrideAuditOverrideReportCommand",
"OverrideAuditDiffReportCommand",
"OverrideAuditRefreshReportCommand",
"OverrideAuditToggleOverrideCommand",
"OverrideAuditCreateOverrideCommand",
"OverrideAuditDiffOverrideCommand",
"OverrideAuditRevertOverrideCommand",
"OverrideAuditDiffExternallyCommand",
"OverrideAuditEditOverrideCommand",
"OverrideAuditDeleteOverrideCommand",
"OverrideAuditFreshenOverrideCommand",
"OverrideAuditDiffPackageCommand",
"OverrideAuditFreshenPackageCommand",
"OverrideAuditDiffSingleCommand",
"OverrideAuditModifyMarkCommand"
]
Add missing event listener for new overrides
While rolling the test code for creating overrides into the base code,
we remembered to make sure that we put the event handler used to handle
override creation in place but forgot to export them to to the base
plugin so that it would actually do something.from ..override_audit import reload
reload("src", ["core", "events", "contexts", "browse", "settings_proxy"])
reload("src.commands")
from . import core
from .core import *
from .events import *
from .contexts import *
from .settings_proxy import *
from .commands import *
__all__ = [
# core
"core",
# browse
"browse",
# settings_proxy
"OverrideAuditOpenFileCommand",
"OverrideAuditEditSettingsCommand",
# events/contexts
"OverrideAuditEventListener",
"CreateOverrideEventListener",
"OverrideAuditContextListener",
# commands/*
"OverrideAuditPackageReportCommand",
"OverrideAuditOverrideReportCommand",
"OverrideAuditDiffReportCommand",
"OverrideAuditRefreshReportCommand",
"OverrideAuditToggleOverrideCommand",
"OverrideAuditCreateOverrideCommand",
"OverrideAuditDiffOverrideCommand",
"OverrideAuditRevertOverrideCommand",
"OverrideAuditDiffExternallyCommand",
"OverrideAuditEditOverrideCommand",
"OverrideAuditDeleteOverrideCommand",
"OverrideAuditFreshenOverrideCommand",
"OverrideAuditDiffPackageCommand",
"OverrideAuditFreshenPackageCommand",
"OverrideAuditDiffSingleCommand",
"OverrideAuditModifyMarkCommand"
]
|
<commit_before>from ..override_audit import reload
reload("src", ["core", "events", "contexts", "browse", "settings_proxy"])
reload("src.commands")
from . import core
from .core import *
from .events import *
from .contexts import *
from .settings_proxy import *
from .commands import *
__all__ = [
# core
"core",
# browse
"browse",
# settings_proxy
"OverrideAuditOpenFileCommand",
"OverrideAuditEditSettingsCommand",
# events/contexts
"OverrideAuditEventListener",
"OverrideAuditContextListener",
# commands/*
"OverrideAuditPackageReportCommand",
"OverrideAuditOverrideReportCommand",
"OverrideAuditDiffReportCommand",
"OverrideAuditRefreshReportCommand",
"OverrideAuditToggleOverrideCommand",
"OverrideAuditCreateOverrideCommand",
"OverrideAuditDiffOverrideCommand",
"OverrideAuditRevertOverrideCommand",
"OverrideAuditDiffExternallyCommand",
"OverrideAuditEditOverrideCommand",
"OverrideAuditDeleteOverrideCommand",
"OverrideAuditFreshenOverrideCommand",
"OverrideAuditDiffPackageCommand",
"OverrideAuditFreshenPackageCommand",
"OverrideAuditDiffSingleCommand",
"OverrideAuditModifyMarkCommand"
]
<commit_msg>Add missing event listener for new overrides
While rolling the test code for creating overrides into the base code,
we remembered to make sure that we put the event handler used to handle
override creation in place but forgot to export them to to the base
plugin so that it would actually do something.<commit_after>from ..override_audit import reload
reload("src", ["core", "events", "contexts", "browse", "settings_proxy"])
reload("src.commands")
from . import core
from .core import *
from .events import *
from .contexts import *
from .settings_proxy import *
from .commands import *
__all__ = [
# core
"core",
# browse
"browse",
# settings_proxy
"OverrideAuditOpenFileCommand",
"OverrideAuditEditSettingsCommand",
# events/contexts
"OverrideAuditEventListener",
"CreateOverrideEventListener",
"OverrideAuditContextListener",
# commands/*
"OverrideAuditPackageReportCommand",
"OverrideAuditOverrideReportCommand",
"OverrideAuditDiffReportCommand",
"OverrideAuditRefreshReportCommand",
"OverrideAuditToggleOverrideCommand",
"OverrideAuditCreateOverrideCommand",
"OverrideAuditDiffOverrideCommand",
"OverrideAuditRevertOverrideCommand",
"OverrideAuditDiffExternallyCommand",
"OverrideAuditEditOverrideCommand",
"OverrideAuditDeleteOverrideCommand",
"OverrideAuditFreshenOverrideCommand",
"OverrideAuditDiffPackageCommand",
"OverrideAuditFreshenPackageCommand",
"OverrideAuditDiffSingleCommand",
"OverrideAuditModifyMarkCommand"
]
|
f46770697d668e31518ada41d31fdb59a84f3cf6
|
kokki/cookbooks/aws/recipes/default.py
|
kokki/cookbooks/aws/recipes/default.py
|
from kokki import *
Package("python-boto")
# Mount volumes and format is necessary
for vol in env.config.aws.volumes:
env.cookbooks.aws.EBSVolume(vol['volume_id'],
availability_zone = env.config.aws.availability_zone,
device = vol['device'],
action = "attach")
if vol.get('fstype'):
if vol['fstype'] == "xfs":
Package("xfsprogs")
Execute("mkfs.%(fstype)s -f %(device)s" % vol,
not_if = """if [ "`file -s %(device)s`" = "%(device)s: data" ]; then exit 1; fi""" % vol)
if vol.get('mount_point'):
Mount(vol['mount_point'],
device = vol['device'],
fstype = vol.get('fstype'),
options = vol.get('fsoptions', ["noatime"]),
action = ["mount", "enable"])
|
import os
from kokki import *
# Package("python-boto")
Execute("pip install git+http://github.com/boto/boto.git#egg=boto",
not_if = 'python -c "import boto"')
Execute("mv /usr/lib/pymodules/python2.6/boto /tmp/boto.orig",
only_if = os.path.exists("/usr/lib/pymodules/python2.6/boto"))
# Mount volumes and format is necessary
for vol in env.config.aws.volumes:
env.cookbooks.aws.EBSVolume(vol['volume_id'],
availability_zone = env.config.aws.availability_zone,
device = vol['device'],
action = "attach")
if vol.get('fstype'):
if vol['fstype'] == "xfs":
Package("xfsprogs")
Execute("mkfs.%(fstype)s -f %(device)s" % vol,
not_if = """if [ "`file -s %(device)s`" = "%(device)s: data" ]; then exit 1; fi""" % vol)
if vol.get('mount_point'):
Mount(vol['mount_point'],
device = vol['device'],
fstype = vol.get('fstype'),
options = vol.get('fsoptions', ["noatime"]),
action = ["mount", "enable"])
|
Install github verison of boto in aws cookbook (for now)
|
Install github verison of boto in aws cookbook (for now)
|
Python
|
bsd-3-clause
|
samuel/kokki
|
from kokki import *
Package("python-boto")
# Mount volumes and format is necessary
for vol in env.config.aws.volumes:
env.cookbooks.aws.EBSVolume(vol['volume_id'],
availability_zone = env.config.aws.availability_zone,
device = vol['device'],
action = "attach")
if vol.get('fstype'):
if vol['fstype'] == "xfs":
Package("xfsprogs")
Execute("mkfs.%(fstype)s -f %(device)s" % vol,
not_if = """if [ "`file -s %(device)s`" = "%(device)s: data" ]; then exit 1; fi""" % vol)
if vol.get('mount_point'):
Mount(vol['mount_point'],
device = vol['device'],
fstype = vol.get('fstype'),
options = vol.get('fsoptions', ["noatime"]),
action = ["mount", "enable"])
Install github verison of boto in aws cookbook (for now)
|
import os
from kokki import *
# Package("python-boto")
Execute("pip install git+http://github.com/boto/boto.git#egg=boto",
not_if = 'python -c "import boto"')
Execute("mv /usr/lib/pymodules/python2.6/boto /tmp/boto.orig",
only_if = os.path.exists("/usr/lib/pymodules/python2.6/boto"))
# Mount volumes and format is necessary
for vol in env.config.aws.volumes:
env.cookbooks.aws.EBSVolume(vol['volume_id'],
availability_zone = env.config.aws.availability_zone,
device = vol['device'],
action = "attach")
if vol.get('fstype'):
if vol['fstype'] == "xfs":
Package("xfsprogs")
Execute("mkfs.%(fstype)s -f %(device)s" % vol,
not_if = """if [ "`file -s %(device)s`" = "%(device)s: data" ]; then exit 1; fi""" % vol)
if vol.get('mount_point'):
Mount(vol['mount_point'],
device = vol['device'],
fstype = vol.get('fstype'),
options = vol.get('fsoptions', ["noatime"]),
action = ["mount", "enable"])
|
<commit_before>
from kokki import *
Package("python-boto")
# Mount volumes and format is necessary
for vol in env.config.aws.volumes:
env.cookbooks.aws.EBSVolume(vol['volume_id'],
availability_zone = env.config.aws.availability_zone,
device = vol['device'],
action = "attach")
if vol.get('fstype'):
if vol['fstype'] == "xfs":
Package("xfsprogs")
Execute("mkfs.%(fstype)s -f %(device)s" % vol,
not_if = """if [ "`file -s %(device)s`" = "%(device)s: data" ]; then exit 1; fi""" % vol)
if vol.get('mount_point'):
Mount(vol['mount_point'],
device = vol['device'],
fstype = vol.get('fstype'),
options = vol.get('fsoptions', ["noatime"]),
action = ["mount", "enable"])
<commit_msg>Install github verison of boto in aws cookbook (for now)<commit_after>
|
import os
from kokki import *
# Package("python-boto")
Execute("pip install git+http://github.com/boto/boto.git#egg=boto",
not_if = 'python -c "import boto"')
Execute("mv /usr/lib/pymodules/python2.6/boto /tmp/boto.orig",
only_if = os.path.exists("/usr/lib/pymodules/python2.6/boto"))
# Mount volumes and format is necessary
for vol in env.config.aws.volumes:
env.cookbooks.aws.EBSVolume(vol['volume_id'],
availability_zone = env.config.aws.availability_zone,
device = vol['device'],
action = "attach")
if vol.get('fstype'):
if vol['fstype'] == "xfs":
Package("xfsprogs")
Execute("mkfs.%(fstype)s -f %(device)s" % vol,
not_if = """if [ "`file -s %(device)s`" = "%(device)s: data" ]; then exit 1; fi""" % vol)
if vol.get('mount_point'):
Mount(vol['mount_point'],
device = vol['device'],
fstype = vol.get('fstype'),
options = vol.get('fsoptions', ["noatime"]),
action = ["mount", "enable"])
|
from kokki import *
Package("python-boto")
# Mount volumes and format is necessary
for vol in env.config.aws.volumes:
env.cookbooks.aws.EBSVolume(vol['volume_id'],
availability_zone = env.config.aws.availability_zone,
device = vol['device'],
action = "attach")
if vol.get('fstype'):
if vol['fstype'] == "xfs":
Package("xfsprogs")
Execute("mkfs.%(fstype)s -f %(device)s" % vol,
not_if = """if [ "`file -s %(device)s`" = "%(device)s: data" ]; then exit 1; fi""" % vol)
if vol.get('mount_point'):
Mount(vol['mount_point'],
device = vol['device'],
fstype = vol.get('fstype'),
options = vol.get('fsoptions', ["noatime"]),
action = ["mount", "enable"])
Install github verison of boto in aws cookbook (for now)
import os
from kokki import *
# Package("python-boto")
Execute("pip install git+http://github.com/boto/boto.git#egg=boto",
not_if = 'python -c "import boto"')
Execute("mv /usr/lib/pymodules/python2.6/boto /tmp/boto.orig",
only_if = os.path.exists("/usr/lib/pymodules/python2.6/boto"))
# Mount volumes and format is necessary
for vol in env.config.aws.volumes:
env.cookbooks.aws.EBSVolume(vol['volume_id'],
availability_zone = env.config.aws.availability_zone,
device = vol['device'],
action = "attach")
if vol.get('fstype'):
if vol['fstype'] == "xfs":
Package("xfsprogs")
Execute("mkfs.%(fstype)s -f %(device)s" % vol,
not_if = """if [ "`file -s %(device)s`" = "%(device)s: data" ]; then exit 1; fi""" % vol)
if vol.get('mount_point'):
Mount(vol['mount_point'],
device = vol['device'],
fstype = vol.get('fstype'),
options = vol.get('fsoptions', ["noatime"]),
action = ["mount", "enable"])
|
<commit_before>
from kokki import *
Package("python-boto")
# Mount volumes and format is necessary
for vol in env.config.aws.volumes:
env.cookbooks.aws.EBSVolume(vol['volume_id'],
availability_zone = env.config.aws.availability_zone,
device = vol['device'],
action = "attach")
if vol.get('fstype'):
if vol['fstype'] == "xfs":
Package("xfsprogs")
Execute("mkfs.%(fstype)s -f %(device)s" % vol,
not_if = """if [ "`file -s %(device)s`" = "%(device)s: data" ]; then exit 1; fi""" % vol)
if vol.get('mount_point'):
Mount(vol['mount_point'],
device = vol['device'],
fstype = vol.get('fstype'),
options = vol.get('fsoptions', ["noatime"]),
action = ["mount", "enable"])
<commit_msg>Install github verison of boto in aws cookbook (for now)<commit_after>
import os
from kokki import *
# Package("python-boto")
Execute("pip install git+http://github.com/boto/boto.git#egg=boto",
not_if = 'python -c "import boto"')
Execute("mv /usr/lib/pymodules/python2.6/boto /tmp/boto.orig",
only_if = os.path.exists("/usr/lib/pymodules/python2.6/boto"))
# Mount volumes and format is necessary
for vol in env.config.aws.volumes:
env.cookbooks.aws.EBSVolume(vol['volume_id'],
availability_zone = env.config.aws.availability_zone,
device = vol['device'],
action = "attach")
if vol.get('fstype'):
if vol['fstype'] == "xfs":
Package("xfsprogs")
Execute("mkfs.%(fstype)s -f %(device)s" % vol,
not_if = """if [ "`file -s %(device)s`" = "%(device)s: data" ]; then exit 1; fi""" % vol)
if vol.get('mount_point'):
Mount(vol['mount_point'],
device = vol['device'],
fstype = vol.get('fstype'),
options = vol.get('fsoptions', ["noatime"]),
action = ["mount", "enable"])
|
183f9455425fa63b6ca43c5d4fe650bcf2179da5
|
ironic/drivers/modules/storage/noop.py
|
ironic/drivers/modules/storage/noop.py
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from ironic.drivers import base
class NoopStorage(base.StorageInterface):
"""No-op Storage Interface."""
def validate(self, task):
pass
def get_properties(self):
return {}
def attach_volumes(self, task):
pass
def detach_volumes(self, task):
pass
def should_write_image(self, task):
return True
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from ironic.drivers import base
class NoopStorage(base.StorageInterface):
"""No-op Storage Interface."""
def validate(self, task):
pass
def get_properties(self):
return {}
def attach_volumes(self, task):
pass
def detach_volumes(self, task):
pass
def should_write_image(self, task):
return False
|
Update should_write_image to return False
|
Update should_write_image to return False
Update should_write_image to return False
|
Python
|
apache-2.0
|
SauloAislan/ironic,SauloAislan/ironic
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from ironic.drivers import base
class NoopStorage(base.StorageInterface):
"""No-op Storage Interface."""
def validate(self, task):
pass
def get_properties(self):
return {}
def attach_volumes(self, task):
pass
def detach_volumes(self, task):
pass
def should_write_image(self, task):
return True
Update should_write_image to return False
Update should_write_image to return False
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from ironic.drivers import base
class NoopStorage(base.StorageInterface):
"""No-op Storage Interface."""
def validate(self, task):
pass
def get_properties(self):
return {}
def attach_volumes(self, task):
pass
def detach_volumes(self, task):
pass
def should_write_image(self, task):
return False
|
<commit_before># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from ironic.drivers import base
class NoopStorage(base.StorageInterface):
"""No-op Storage Interface."""
def validate(self, task):
pass
def get_properties(self):
return {}
def attach_volumes(self, task):
pass
def detach_volumes(self, task):
pass
def should_write_image(self, task):
return True
<commit_msg>Update should_write_image to return False
Update should_write_image to return False<commit_after>
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from ironic.drivers import base
class NoopStorage(base.StorageInterface):
"""No-op Storage Interface."""
def validate(self, task):
pass
def get_properties(self):
return {}
def attach_volumes(self, task):
pass
def detach_volumes(self, task):
pass
def should_write_image(self, task):
return False
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from ironic.drivers import base
class NoopStorage(base.StorageInterface):
"""No-op Storage Interface."""
def validate(self, task):
pass
def get_properties(self):
return {}
def attach_volumes(self, task):
pass
def detach_volumes(self, task):
pass
def should_write_image(self, task):
return True
Update should_write_image to return False
Update should_write_image to return False# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from ironic.drivers import base
class NoopStorage(base.StorageInterface):
"""No-op Storage Interface."""
def validate(self, task):
pass
def get_properties(self):
return {}
def attach_volumes(self, task):
pass
def detach_volumes(self, task):
pass
def should_write_image(self, task):
return False
|
<commit_before># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from ironic.drivers import base
class NoopStorage(base.StorageInterface):
"""No-op Storage Interface."""
def validate(self, task):
pass
def get_properties(self):
return {}
def attach_volumes(self, task):
pass
def detach_volumes(self, task):
pass
def should_write_image(self, task):
return True
<commit_msg>Update should_write_image to return False
Update should_write_image to return False<commit_after># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from ironic.drivers import base
class NoopStorage(base.StorageInterface):
"""No-op Storage Interface."""
def validate(self, task):
pass
def get_properties(self):
return {}
def attach_volumes(self, task):
pass
def detach_volumes(self, task):
pass
def should_write_image(self, task):
return False
|
ae689c9de698daeaf8ab5275c384183cb665c903
|
neutron_classifier/common/constants.py
|
neutron_classifier/common/constants.py
|
# Copyright (c) 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
CLASSIFIER_TYPES = ['ip_classifier', 'ipv4_classifier', 'ipv6_classifier',
'transport_classifier', 'ethernet_classifier',
'encapsulation_classifier', 'neutron_port_classifier']
# TODO(sc68cal) add more protocols`
PROTOCOLS = ['tcp', 'udp', 'icmp', 'icmpv6']
ENCAPSULATION_TYPES = ['vxlan', 'gre']
NEUTRON_SERVICES = ['neutron-fwaas', 'networking-sfc', 'security-group']
|
# Copyright (c) 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# TODO(sc68cal) add more protocols`
PROTOCOLS = ['tcp', 'udp', 'icmp', 'icmpv6']
ENCAPSULATION_TYPES = ['vxlan', 'gre']
NEUTRON_SERVICES = ['neutron-fwaas', 'networking-sfc', 'security-group']
|
Remove CLASSIFIER_TYPES constant - it was never used
|
Remove CLASSIFIER_TYPES constant - it was never used
Change-Id: Ia6ba4453f6bc9b9de0da1e83d2dc75147fb91882
|
Python
|
apache-2.0
|
openstack/neutron-classifier,openstack/neutron-classifier
|
# Copyright (c) 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
CLASSIFIER_TYPES = ['ip_classifier', 'ipv4_classifier', 'ipv6_classifier',
'transport_classifier', 'ethernet_classifier',
'encapsulation_classifier', 'neutron_port_classifier']
# TODO(sc68cal) add more protocols`
PROTOCOLS = ['tcp', 'udp', 'icmp', 'icmpv6']
ENCAPSULATION_TYPES = ['vxlan', 'gre']
NEUTRON_SERVICES = ['neutron-fwaas', 'networking-sfc', 'security-group']
Remove CLASSIFIER_TYPES constant - it was never used
Change-Id: Ia6ba4453f6bc9b9de0da1e83d2dc75147fb91882
|
# Copyright (c) 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# TODO(sc68cal) add more protocols`
PROTOCOLS = ['tcp', 'udp', 'icmp', 'icmpv6']
ENCAPSULATION_TYPES = ['vxlan', 'gre']
NEUTRON_SERVICES = ['neutron-fwaas', 'networking-sfc', 'security-group']
|
<commit_before># Copyright (c) 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
CLASSIFIER_TYPES = ['ip_classifier', 'ipv4_classifier', 'ipv6_classifier',
'transport_classifier', 'ethernet_classifier',
'encapsulation_classifier', 'neutron_port_classifier']
# TODO(sc68cal) add more protocols`
PROTOCOLS = ['tcp', 'udp', 'icmp', 'icmpv6']
ENCAPSULATION_TYPES = ['vxlan', 'gre']
NEUTRON_SERVICES = ['neutron-fwaas', 'networking-sfc', 'security-group']
<commit_msg>Remove CLASSIFIER_TYPES constant - it was never used
Change-Id: Ia6ba4453f6bc9b9de0da1e83d2dc75147fb91882<commit_after>
|
# Copyright (c) 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# TODO(sc68cal) add more protocols`
PROTOCOLS = ['tcp', 'udp', 'icmp', 'icmpv6']
ENCAPSULATION_TYPES = ['vxlan', 'gre']
NEUTRON_SERVICES = ['neutron-fwaas', 'networking-sfc', 'security-group']
|
# Copyright (c) 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
CLASSIFIER_TYPES = ['ip_classifier', 'ipv4_classifier', 'ipv6_classifier',
'transport_classifier', 'ethernet_classifier',
'encapsulation_classifier', 'neutron_port_classifier']
# TODO(sc68cal) add more protocols`
PROTOCOLS = ['tcp', 'udp', 'icmp', 'icmpv6']
ENCAPSULATION_TYPES = ['vxlan', 'gre']
NEUTRON_SERVICES = ['neutron-fwaas', 'networking-sfc', 'security-group']
Remove CLASSIFIER_TYPES constant - it was never used
Change-Id: Ia6ba4453f6bc9b9de0da1e83d2dc75147fb91882# Copyright (c) 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# TODO(sc68cal) add more protocols`
PROTOCOLS = ['tcp', 'udp', 'icmp', 'icmpv6']
ENCAPSULATION_TYPES = ['vxlan', 'gre']
NEUTRON_SERVICES = ['neutron-fwaas', 'networking-sfc', 'security-group']
|
<commit_before># Copyright (c) 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
CLASSIFIER_TYPES = ['ip_classifier', 'ipv4_classifier', 'ipv6_classifier',
'transport_classifier', 'ethernet_classifier',
'encapsulation_classifier', 'neutron_port_classifier']
# TODO(sc68cal) add more protocols`
PROTOCOLS = ['tcp', 'udp', 'icmp', 'icmpv6']
ENCAPSULATION_TYPES = ['vxlan', 'gre']
NEUTRON_SERVICES = ['neutron-fwaas', 'networking-sfc', 'security-group']
<commit_msg>Remove CLASSIFIER_TYPES constant - it was never used
Change-Id: Ia6ba4453f6bc9b9de0da1e83d2dc75147fb91882<commit_after># Copyright (c) 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# TODO(sc68cal) add more protocols`
PROTOCOLS = ['tcp', 'udp', 'icmp', 'icmpv6']
ENCAPSULATION_TYPES = ['vxlan', 'gre']
NEUTRON_SERVICES = ['neutron-fwaas', 'networking-sfc', 'security-group']
|
15db774538b4fa18c0653fb741ba14c0373867c8
|
main/admin/forms.py
|
main/admin/forms.py
|
from django import forms
from main.models import Profile, LanProfile
class AdminLanProfileForm(forms.ModelForm):
class Meta:
model = LanProfile
fields = '__all__'
class AdminProfileForm(forms.ModelForm):
class Meta:
model = Profile
fields = '__all__'
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
instance = kwargs.get('instance', None)
if instance:
self.fields['grade'].choices += ((instance.grade, instance.grade), ('none', 'Ukendt'))
|
from django import forms
from main.models import Profile, LanProfile, GRADES
class AdminLanProfileForm(forms.ModelForm):
class Meta:
model = LanProfile
fields = '__all__'
class AdminProfileForm(forms.ModelForm):
class Meta:
model = Profile
fields = '__all__'
grade = forms.ChoiceField(GRADES, required=True, label='Klasse')
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
instance = kwargs.get('instance', None)
if instance:
self.fields['grade'].choices += ((instance.grade, instance.grade), ('none', 'Ukendt'))
|
Make that work with admin too
|
Make that work with admin too
|
Python
|
mit
|
bomjacob/htxaarhuslan,bomjacob/htxaarhuslan,bomjacob/htxaarhuslan
|
from django import forms
from main.models import Profile, LanProfile
class AdminLanProfileForm(forms.ModelForm):
class Meta:
model = LanProfile
fields = '__all__'
class AdminProfileForm(forms.ModelForm):
class Meta:
model = Profile
fields = '__all__'
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
instance = kwargs.get('instance', None)
if instance:
self.fields['grade'].choices += ((instance.grade, instance.grade), ('none', 'Ukendt'))
Make that work with admin too
|
from django import forms
from main.models import Profile, LanProfile, GRADES
class AdminLanProfileForm(forms.ModelForm):
class Meta:
model = LanProfile
fields = '__all__'
class AdminProfileForm(forms.ModelForm):
class Meta:
model = Profile
fields = '__all__'
grade = forms.ChoiceField(GRADES, required=True, label='Klasse')
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
instance = kwargs.get('instance', None)
if instance:
self.fields['grade'].choices += ((instance.grade, instance.grade), ('none', 'Ukendt'))
|
<commit_before>from django import forms
from main.models import Profile, LanProfile
class AdminLanProfileForm(forms.ModelForm):
class Meta:
model = LanProfile
fields = '__all__'
class AdminProfileForm(forms.ModelForm):
class Meta:
model = Profile
fields = '__all__'
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
instance = kwargs.get('instance', None)
if instance:
self.fields['grade'].choices += ((instance.grade, instance.grade), ('none', 'Ukendt'))
<commit_msg>Make that work with admin too<commit_after>
|
from django import forms
from main.models import Profile, LanProfile, GRADES
class AdminLanProfileForm(forms.ModelForm):
class Meta:
model = LanProfile
fields = '__all__'
class AdminProfileForm(forms.ModelForm):
class Meta:
model = Profile
fields = '__all__'
grade = forms.ChoiceField(GRADES, required=True, label='Klasse')
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
instance = kwargs.get('instance', None)
if instance:
self.fields['grade'].choices += ((instance.grade, instance.grade), ('none', 'Ukendt'))
|
from django import forms
from main.models import Profile, LanProfile
class AdminLanProfileForm(forms.ModelForm):
class Meta:
model = LanProfile
fields = '__all__'
class AdminProfileForm(forms.ModelForm):
class Meta:
model = Profile
fields = '__all__'
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
instance = kwargs.get('instance', None)
if instance:
self.fields['grade'].choices += ((instance.grade, instance.grade), ('none', 'Ukendt'))
Make that work with admin toofrom django import forms
from main.models import Profile, LanProfile, GRADES
class AdminLanProfileForm(forms.ModelForm):
class Meta:
model = LanProfile
fields = '__all__'
class AdminProfileForm(forms.ModelForm):
class Meta:
model = Profile
fields = '__all__'
grade = forms.ChoiceField(GRADES, required=True, label='Klasse')
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
instance = kwargs.get('instance', None)
if instance:
self.fields['grade'].choices += ((instance.grade, instance.grade), ('none', 'Ukendt'))
|
<commit_before>from django import forms
from main.models import Profile, LanProfile
class AdminLanProfileForm(forms.ModelForm):
class Meta:
model = LanProfile
fields = '__all__'
class AdminProfileForm(forms.ModelForm):
class Meta:
model = Profile
fields = '__all__'
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
instance = kwargs.get('instance', None)
if instance:
self.fields['grade'].choices += ((instance.grade, instance.grade), ('none', 'Ukendt'))
<commit_msg>Make that work with admin too<commit_after>from django import forms
from main.models import Profile, LanProfile, GRADES
class AdminLanProfileForm(forms.ModelForm):
class Meta:
model = LanProfile
fields = '__all__'
class AdminProfileForm(forms.ModelForm):
class Meta:
model = Profile
fields = '__all__'
grade = forms.ChoiceField(GRADES, required=True, label='Klasse')
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
instance = kwargs.get('instance', None)
if instance:
self.fields['grade'].choices += ((instance.grade, instance.grade), ('none', 'Ukendt'))
|
c557058a7a7206167108535129bc0b160e4fe62b
|
nipype/testing/tests/test_utils.py
|
nipype/testing/tests/test_utils.py
|
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""Test testing utilities
"""
from nipype.testing.utils import TempFATFS
from nose.tools import assert_true
def test_tempfatfs():
with TempFATFS() as tmpdir:
yield assert_true, tmpdir is not None
|
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""Test testing utilities
"""
import os
import warnings
from nipype.testing.utils import TempFATFS
from nose.tools import assert_true
def test_tempfatfs():
try:
fatfs = TempFATFS()
except IOError:
warnings.warn("Cannot mount FAT filesystems with FUSE")
else:
with fatfs as tmpdir:
yield assert_true, os.path.exists(tmpdir)
|
Add warning for TempFATFS test
|
TEST: Add warning for TempFATFS test
|
Python
|
bsd-3-clause
|
mick-d/nipype,carolFrohlich/nipype,FCP-INDI/nipype,sgiavasis/nipype,mick-d/nipype,FCP-INDI/nipype,FCP-INDI/nipype,carolFrohlich/nipype,carolFrohlich/nipype,FCP-INDI/nipype,mick-d/nipype,sgiavasis/nipype,mick-d/nipype,carolFrohlich/nipype,sgiavasis/nipype,sgiavasis/nipype
|
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""Test testing utilities
"""
from nipype.testing.utils import TempFATFS
from nose.tools import assert_true
def test_tempfatfs():
with TempFATFS() as tmpdir:
yield assert_true, tmpdir is not None
TEST: Add warning for TempFATFS test
|
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""Test testing utilities
"""
import os
import warnings
from nipype.testing.utils import TempFATFS
from nose.tools import assert_true
def test_tempfatfs():
try:
fatfs = TempFATFS()
except IOError:
warnings.warn("Cannot mount FAT filesystems with FUSE")
else:
with fatfs as tmpdir:
yield assert_true, os.path.exists(tmpdir)
|
<commit_before># emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""Test testing utilities
"""
from nipype.testing.utils import TempFATFS
from nose.tools import assert_true
def test_tempfatfs():
with TempFATFS() as tmpdir:
yield assert_true, tmpdir is not None
<commit_msg>TEST: Add warning for TempFATFS test<commit_after>
|
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""Test testing utilities
"""
import os
import warnings
from nipype.testing.utils import TempFATFS
from nose.tools import assert_true
def test_tempfatfs():
try:
fatfs = TempFATFS()
except IOError:
warnings.warn("Cannot mount FAT filesystems with FUSE")
else:
with fatfs as tmpdir:
yield assert_true, os.path.exists(tmpdir)
|
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""Test testing utilities
"""
from nipype.testing.utils import TempFATFS
from nose.tools import assert_true
def test_tempfatfs():
with TempFATFS() as tmpdir:
yield assert_true, tmpdir is not None
TEST: Add warning for TempFATFS test# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""Test testing utilities
"""
import os
import warnings
from nipype.testing.utils import TempFATFS
from nose.tools import assert_true
def test_tempfatfs():
try:
fatfs = TempFATFS()
except IOError:
warnings.warn("Cannot mount FAT filesystems with FUSE")
else:
with fatfs as tmpdir:
yield assert_true, os.path.exists(tmpdir)
|
<commit_before># emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""Test testing utilities
"""
from nipype.testing.utils import TempFATFS
from nose.tools import assert_true
def test_tempfatfs():
with TempFATFS() as tmpdir:
yield assert_true, tmpdir is not None
<commit_msg>TEST: Add warning for TempFATFS test<commit_after># emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""Test testing utilities
"""
import os
import warnings
from nipype.testing.utils import TempFATFS
from nose.tools import assert_true
def test_tempfatfs():
try:
fatfs = TempFATFS()
except IOError:
warnings.warn("Cannot mount FAT filesystems with FUSE")
else:
with fatfs as tmpdir:
yield assert_true, os.path.exists(tmpdir)
|
44547695f662c957f5242f7cfefd328b33d99830
|
sso/backends.py
|
sso/backends.py
|
from django.contrib.auth.backends import ModelBackend
from django.contrib.auth.models import User
from hashlib import sha1
class SimpleHashModelBackend(ModelBackend):
def authenticate(self, username=None, password=None):
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
return None
if '$' in user.password:
if user.check_password(password):
user.password = sha1(password).hexdigest()
user.save()
return user
else:
if user.password == sha1(password).hexdigest():
return user
return None
|
from django.contrib.auth.backends import ModelBackend
from django.contrib.auth.models import User
from hashlib import sha1
class SimpleHashModelBackend(ModelBackend):
supports_anonymous_user = False
supports_object_permissions = False
supports_inactive_user = False
def authenticate(self, username=None, password=None):
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
return None
if '$' in user.password:
if user.check_password(password):
user.password = sha1(password).hexdigest()
user.save()
return user
else:
if user.password == sha1(password).hexdigest():
return user
return None
|
Update the authentication backend with upcoming features
|
Update the authentication backend with upcoming features
|
Python
|
bsd-3-clause
|
nikdoof/test-auth
|
from django.contrib.auth.backends import ModelBackend
from django.contrib.auth.models import User
from hashlib import sha1
class SimpleHashModelBackend(ModelBackend):
def authenticate(self, username=None, password=None):
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
return None
if '$' in user.password:
if user.check_password(password):
user.password = sha1(password).hexdigest()
user.save()
return user
else:
if user.password == sha1(password).hexdigest():
return user
return None
Update the authentication backend with upcoming features
|
from django.contrib.auth.backends import ModelBackend
from django.contrib.auth.models import User
from hashlib import sha1
class SimpleHashModelBackend(ModelBackend):
supports_anonymous_user = False
supports_object_permissions = False
supports_inactive_user = False
def authenticate(self, username=None, password=None):
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
return None
if '$' in user.password:
if user.check_password(password):
user.password = sha1(password).hexdigest()
user.save()
return user
else:
if user.password == sha1(password).hexdigest():
return user
return None
|
<commit_before>from django.contrib.auth.backends import ModelBackend
from django.contrib.auth.models import User
from hashlib import sha1
class SimpleHashModelBackend(ModelBackend):
def authenticate(self, username=None, password=None):
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
return None
if '$' in user.password:
if user.check_password(password):
user.password = sha1(password).hexdigest()
user.save()
return user
else:
if user.password == sha1(password).hexdigest():
return user
return None
<commit_msg>Update the authentication backend with upcoming features<commit_after>
|
from django.contrib.auth.backends import ModelBackend
from django.contrib.auth.models import User
from hashlib import sha1
class SimpleHashModelBackend(ModelBackend):
supports_anonymous_user = False
supports_object_permissions = False
supports_inactive_user = False
def authenticate(self, username=None, password=None):
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
return None
if '$' in user.password:
if user.check_password(password):
user.password = sha1(password).hexdigest()
user.save()
return user
else:
if user.password == sha1(password).hexdigest():
return user
return None
|
from django.contrib.auth.backends import ModelBackend
from django.contrib.auth.models import User
from hashlib import sha1
class SimpleHashModelBackend(ModelBackend):
def authenticate(self, username=None, password=None):
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
return None
if '$' in user.password:
if user.check_password(password):
user.password = sha1(password).hexdigest()
user.save()
return user
else:
if user.password == sha1(password).hexdigest():
return user
return None
Update the authentication backend with upcoming featuresfrom django.contrib.auth.backends import ModelBackend
from django.contrib.auth.models import User
from hashlib import sha1
class SimpleHashModelBackend(ModelBackend):
supports_anonymous_user = False
supports_object_permissions = False
supports_inactive_user = False
def authenticate(self, username=None, password=None):
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
return None
if '$' in user.password:
if user.check_password(password):
user.password = sha1(password).hexdigest()
user.save()
return user
else:
if user.password == sha1(password).hexdigest():
return user
return None
|
<commit_before>from django.contrib.auth.backends import ModelBackend
from django.contrib.auth.models import User
from hashlib import sha1
class SimpleHashModelBackend(ModelBackend):
def authenticate(self, username=None, password=None):
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
return None
if '$' in user.password:
if user.check_password(password):
user.password = sha1(password).hexdigest()
user.save()
return user
else:
if user.password == sha1(password).hexdigest():
return user
return None
<commit_msg>Update the authentication backend with upcoming features<commit_after>from django.contrib.auth.backends import ModelBackend
from django.contrib.auth.models import User
from hashlib import sha1
class SimpleHashModelBackend(ModelBackend):
supports_anonymous_user = False
supports_object_permissions = False
supports_inactive_user = False
def authenticate(self, username=None, password=None):
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
return None
if '$' in user.password:
if user.check_password(password):
user.password = sha1(password).hexdigest()
user.save()
return user
else:
if user.password == sha1(password).hexdigest():
return user
return None
|
2515b3402c671c2949e5f3c712cb284777f2accf
|
examples/boilerplates/base_test_case.py
|
examples/boilerplates/base_test_case.py
|
'''
You can use this as a boilerplate for your test framework.
Define your customized library methods in a master class like this.
Then have all your test classes inherit it.
BaseTestCase will inherit SeleniumBase methods from BaseCase.
'''
from seleniumbase import BaseCase
class BaseTestCase(BaseCase):
def setUp(self):
super(BaseTestCase, self).setUp()
# Add custom setUp code for your tests AFTER the super().setUp()
def tearDown(self):
# Add custom tearDown code for your tests BEFORE the super().tearDown()
super(BaseTestCase, self).tearDown()
def login(self):
# <<< Placeholder. Add your code here. >>>
# Reduce duplicate code in tests by having reusable methods like this.
# If the UI changes, the fix can be applied in one place.
pass
def example_method(self):
# <<< Placeholder. Add your code here. >>>
pass
'''
# Now you can do something like this in your test files:
from base_test_case import BaseTestCase
class MyTests(BaseTestCase):
def test_example(self):
self.login()
self.example_method()
'''
|
'''
You can use this as a boilerplate for your test framework.
Define your customized library methods in a master class like this.
Then have all your test classes inherit it.
BaseTestCase will inherit SeleniumBase methods from BaseCase.
'''
from seleniumbase import BaseCase
class BaseTestCase(BaseCase):
def setUp(self):
super(BaseTestCase, self).setUp()
# <<< Add custom setUp code for tests AFTER the super().setUp() >>>
def tearDown(self):
self.save_teardown_screenshot()
# <<< Add custom tearDown code BEFORE the super().tearDown() >>>
super(BaseTestCase, self).tearDown()
def login(self):
# <<< Placeholder. Add your code here. >>>
# Reduce duplicate code in tests by having reusable methods like this.
# If the UI changes, the fix can be applied in one place.
pass
def example_method(self):
# <<< Placeholder. Add your code here. >>>
pass
'''
# Now you can do something like this in your test files:
from base_test_case import BaseTestCase
class MyTests(BaseTestCase):
def test_example(self):
self.login()
self.example_method()
'''
|
Update boilerplate to save a screenshot before the tearDown()
|
Update boilerplate to save a screenshot before the tearDown()
|
Python
|
mit
|
seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase
|
'''
You can use this as a boilerplate for your test framework.
Define your customized library methods in a master class like this.
Then have all your test classes inherit it.
BaseTestCase will inherit SeleniumBase methods from BaseCase.
'''
from seleniumbase import BaseCase
class BaseTestCase(BaseCase):
def setUp(self):
super(BaseTestCase, self).setUp()
# Add custom setUp code for your tests AFTER the super().setUp()
def tearDown(self):
# Add custom tearDown code for your tests BEFORE the super().tearDown()
super(BaseTestCase, self).tearDown()
def login(self):
# <<< Placeholder. Add your code here. >>>
# Reduce duplicate code in tests by having reusable methods like this.
# If the UI changes, the fix can be applied in one place.
pass
def example_method(self):
# <<< Placeholder. Add your code here. >>>
pass
'''
# Now you can do something like this in your test files:
from base_test_case import BaseTestCase
class MyTests(BaseTestCase):
def test_example(self):
self.login()
self.example_method()
'''
Update boilerplate to save a screenshot before the tearDown()
|
'''
You can use this as a boilerplate for your test framework.
Define your customized library methods in a master class like this.
Then have all your test classes inherit it.
BaseTestCase will inherit SeleniumBase methods from BaseCase.
'''
from seleniumbase import BaseCase
class BaseTestCase(BaseCase):
def setUp(self):
super(BaseTestCase, self).setUp()
# <<< Add custom setUp code for tests AFTER the super().setUp() >>>
def tearDown(self):
self.save_teardown_screenshot()
# <<< Add custom tearDown code BEFORE the super().tearDown() >>>
super(BaseTestCase, self).tearDown()
def login(self):
# <<< Placeholder. Add your code here. >>>
# Reduce duplicate code in tests by having reusable methods like this.
# If the UI changes, the fix can be applied in one place.
pass
def example_method(self):
# <<< Placeholder. Add your code here. >>>
pass
'''
# Now you can do something like this in your test files:
from base_test_case import BaseTestCase
class MyTests(BaseTestCase):
def test_example(self):
self.login()
self.example_method()
'''
|
<commit_before>'''
You can use this as a boilerplate for your test framework.
Define your customized library methods in a master class like this.
Then have all your test classes inherit it.
BaseTestCase will inherit SeleniumBase methods from BaseCase.
'''
from seleniumbase import BaseCase
class BaseTestCase(BaseCase):
def setUp(self):
super(BaseTestCase, self).setUp()
# Add custom setUp code for your tests AFTER the super().setUp()
def tearDown(self):
# Add custom tearDown code for your tests BEFORE the super().tearDown()
super(BaseTestCase, self).tearDown()
def login(self):
# <<< Placeholder. Add your code here. >>>
# Reduce duplicate code in tests by having reusable methods like this.
# If the UI changes, the fix can be applied in one place.
pass
def example_method(self):
# <<< Placeholder. Add your code here. >>>
pass
'''
# Now you can do something like this in your test files:
from base_test_case import BaseTestCase
class MyTests(BaseTestCase):
def test_example(self):
self.login()
self.example_method()
'''
<commit_msg>Update boilerplate to save a screenshot before the tearDown()<commit_after>
|
'''
You can use this as a boilerplate for your test framework.
Define your customized library methods in a master class like this.
Then have all your test classes inherit it.
BaseTestCase will inherit SeleniumBase methods from BaseCase.
'''
from seleniumbase import BaseCase
class BaseTestCase(BaseCase):
def setUp(self):
super(BaseTestCase, self).setUp()
# <<< Add custom setUp code for tests AFTER the super().setUp() >>>
def tearDown(self):
self.save_teardown_screenshot()
# <<< Add custom tearDown code BEFORE the super().tearDown() >>>
super(BaseTestCase, self).tearDown()
def login(self):
# <<< Placeholder. Add your code here. >>>
# Reduce duplicate code in tests by having reusable methods like this.
# If the UI changes, the fix can be applied in one place.
pass
def example_method(self):
# <<< Placeholder. Add your code here. >>>
pass
'''
# Now you can do something like this in your test files:
from base_test_case import BaseTestCase
class MyTests(BaseTestCase):
def test_example(self):
self.login()
self.example_method()
'''
|
'''
You can use this as a boilerplate for your test framework.
Define your customized library methods in a master class like this.
Then have all your test classes inherit it.
BaseTestCase will inherit SeleniumBase methods from BaseCase.
'''
from seleniumbase import BaseCase
class BaseTestCase(BaseCase):
def setUp(self):
super(BaseTestCase, self).setUp()
# Add custom setUp code for your tests AFTER the super().setUp()
def tearDown(self):
# Add custom tearDown code for your tests BEFORE the super().tearDown()
super(BaseTestCase, self).tearDown()
def login(self):
# <<< Placeholder. Add your code here. >>>
# Reduce duplicate code in tests by having reusable methods like this.
# If the UI changes, the fix can be applied in one place.
pass
def example_method(self):
# <<< Placeholder. Add your code here. >>>
pass
'''
# Now you can do something like this in your test files:
from base_test_case import BaseTestCase
class MyTests(BaseTestCase):
def test_example(self):
self.login()
self.example_method()
'''
Update boilerplate to save a screenshot before the tearDown()'''
You can use this as a boilerplate for your test framework.
Define your customized library methods in a master class like this.
Then have all your test classes inherit it.
BaseTestCase will inherit SeleniumBase methods from BaseCase.
'''
from seleniumbase import BaseCase
class BaseTestCase(BaseCase):
def setUp(self):
super(BaseTestCase, self).setUp()
# <<< Add custom setUp code for tests AFTER the super().setUp() >>>
def tearDown(self):
self.save_teardown_screenshot()
# <<< Add custom tearDown code BEFORE the super().tearDown() >>>
super(BaseTestCase, self).tearDown()
def login(self):
# <<< Placeholder. Add your code here. >>>
# Reduce duplicate code in tests by having reusable methods like this.
# If the UI changes, the fix can be applied in one place.
pass
def example_method(self):
# <<< Placeholder. Add your code here. >>>
pass
'''
# Now you can do something like this in your test files:
from base_test_case import BaseTestCase
class MyTests(BaseTestCase):
def test_example(self):
self.login()
self.example_method()
'''
|
<commit_before>'''
You can use this as a boilerplate for your test framework.
Define your customized library methods in a master class like this.
Then have all your test classes inherit it.
BaseTestCase will inherit SeleniumBase methods from BaseCase.
'''
from seleniumbase import BaseCase
class BaseTestCase(BaseCase):
def setUp(self):
super(BaseTestCase, self).setUp()
# Add custom setUp code for your tests AFTER the super().setUp()
def tearDown(self):
# Add custom tearDown code for your tests BEFORE the super().tearDown()
super(BaseTestCase, self).tearDown()
def login(self):
# <<< Placeholder. Add your code here. >>>
# Reduce duplicate code in tests by having reusable methods like this.
# If the UI changes, the fix can be applied in one place.
pass
def example_method(self):
# <<< Placeholder. Add your code here. >>>
pass
'''
# Now you can do something like this in your test files:
from base_test_case import BaseTestCase
class MyTests(BaseTestCase):
def test_example(self):
self.login()
self.example_method()
'''
<commit_msg>Update boilerplate to save a screenshot before the tearDown()<commit_after>'''
You can use this as a boilerplate for your test framework.
Define your customized library methods in a master class like this.
Then have all your test classes inherit it.
BaseTestCase will inherit SeleniumBase methods from BaseCase.
'''
from seleniumbase import BaseCase
class BaseTestCase(BaseCase):
def setUp(self):
super(BaseTestCase, self).setUp()
# <<< Add custom setUp code for tests AFTER the super().setUp() >>>
def tearDown(self):
self.save_teardown_screenshot()
# <<< Add custom tearDown code BEFORE the super().tearDown() >>>
super(BaseTestCase, self).tearDown()
def login(self):
# <<< Placeholder. Add your code here. >>>
# Reduce duplicate code in tests by having reusable methods like this.
# If the UI changes, the fix can be applied in one place.
pass
def example_method(self):
# <<< Placeholder. Add your code here. >>>
pass
'''
# Now you can do something like this in your test files:
from base_test_case import BaseTestCase
class MyTests(BaseTestCase):
def test_example(self):
self.login()
self.example_method()
'''
|
1b338aa716311b4c91281993e35e9beda376735a
|
addons/osfstorage/settings/defaults.py
|
addons/osfstorage/settings/defaults.py
|
# encoding: utf-8
import importlib
import os
import logging
from website import settings
logger = logging.getLogger(__name__)
WATERBUTLER_CREDENTIALS = {
'storage': {}
}
WATERBUTLER_SETTINGS = {
'storage': {
'provider': 'filesystem',
'folder': os.path.join(settings.BASE_PATH, 'osfstoragecache'),
}
}
WATERBUTLER_RESOURCE = 'folder'
DISK_SAVING_MODE = settings.DISK_SAVING_MODE
try:
importlib.import_module('.{}'.format(settings.MIGRATION_ENV))
except:
logger.warn('No migration settings loaded for OSFStorage, falling back to local dev.')
|
# encoding: utf-8
import importlib
import os
import logging
from website import settings
logger = logging.getLogger(__name__)
WATERBUTLER_CREDENTIALS = {
'storage': {}
}
WATERBUTLER_SETTINGS = {
'storage': {
'provider': 'filesystem',
'folder': os.path.join(settings.BASE_PATH, 'osfstoragecache'),
}
}
WATERBUTLER_RESOURCE = 'folder'
DISK_SAVING_MODE = settings.DISK_SAVING_MODE
try:
mod = importlib.import_module('.{}'.format(settings.MIGRATION_ENV), package='addons.osfstorage.settings')
globals().update({k: getattr(mod, k) for k in dir(mod)})
except Exception as ex:
logger.warn('No migration settings loaded for OSFStorage, falling back to local dev. {}'.format(ex))
|
Make environmental override of osfstorage settings work
|
Make environmental override of osfstorage settings work
Signed-off-by: Chris Wisecarver <5fccdd17c1f7bcc7e393d2cb5e2fad37705ca69f@cos.io>
|
Python
|
apache-2.0
|
CenterForOpenScience/osf.io,binoculars/osf.io,monikagrabowska/osf.io,caneruguz/osf.io,TomBaxter/osf.io,chrisseto/osf.io,monikagrabowska/osf.io,sloria/osf.io,crcresearch/osf.io,erinspace/osf.io,caseyrollins/osf.io,Nesiehr/osf.io,TomBaxter/osf.io,leb2dg/osf.io,pattisdr/osf.io,baylee-d/osf.io,adlius/osf.io,felliott/osf.io,mfraezz/osf.io,chrisseto/osf.io,acshi/osf.io,CenterForOpenScience/osf.io,cslzchen/osf.io,CenterForOpenScience/osf.io,cslzchen/osf.io,acshi/osf.io,HalcyonChimera/osf.io,mattclark/osf.io,icereval/osf.io,brianjgeiger/osf.io,chennan47/osf.io,Nesiehr/osf.io,hmoco/osf.io,monikagrabowska/osf.io,felliott/osf.io,baylee-d/osf.io,leb2dg/osf.io,leb2dg/osf.io,felliott/osf.io,mfraezz/osf.io,caseyrollins/osf.io,caneruguz/osf.io,sloria/osf.io,monikagrabowska/osf.io,aaxelb/osf.io,acshi/osf.io,aaxelb/osf.io,cwisecarver/osf.io,adlius/osf.io,mattclark/osf.io,aaxelb/osf.io,saradbowman/osf.io,HalcyonChimera/osf.io,hmoco/osf.io,brianjgeiger/osf.io,pattisdr/osf.io,laurenrevere/osf.io,adlius/osf.io,Johnetordoff/osf.io,cslzchen/osf.io,Johnetordoff/osf.io,cwisecarver/osf.io,saradbowman/osf.io,TomBaxter/osf.io,hmoco/osf.io,cwisecarver/osf.io,monikagrabowska/osf.io,HalcyonChimera/osf.io,mattclark/osf.io,icereval/osf.io,chrisseto/osf.io,leb2dg/osf.io,Johnetordoff/osf.io,binoculars/osf.io,hmoco/osf.io,crcresearch/osf.io,Nesiehr/osf.io,brianjgeiger/osf.io,icereval/osf.io,mfraezz/osf.io,baylee-d/osf.io,pattisdr/osf.io,laurenrevere/osf.io,acshi/osf.io,chennan47/osf.io,binoculars/osf.io,cwisecarver/osf.io,brianjgeiger/osf.io,mfraezz/osf.io,chennan47/osf.io,chrisseto/osf.io,caseyrollins/osf.io,adlius/osf.io,felliott/osf.io,Johnetordoff/osf.io,Nesiehr/osf.io,erinspace/osf.io,HalcyonChimera/osf.io,CenterForOpenScience/osf.io,acshi/osf.io,cslzchen/osf.io,caneruguz/osf.io,aaxelb/osf.io,laurenrevere/osf.io,erinspace/osf.io,caneruguz/osf.io,sloria/osf.io,crcresearch/osf.io
|
# encoding: utf-8
import importlib
import os
import logging
from website import settings
logger = logging.getLogger(__name__)
WATERBUTLER_CREDENTIALS = {
'storage': {}
}
WATERBUTLER_SETTINGS = {
'storage': {
'provider': 'filesystem',
'folder': os.path.join(settings.BASE_PATH, 'osfstoragecache'),
}
}
WATERBUTLER_RESOURCE = 'folder'
DISK_SAVING_MODE = settings.DISK_SAVING_MODE
try:
importlib.import_module('.{}'.format(settings.MIGRATION_ENV))
except:
logger.warn('No migration settings loaded for OSFStorage, falling back to local dev.')
Make environmental override of osfstorage settings work
Signed-off-by: Chris Wisecarver <5fccdd17c1f7bcc7e393d2cb5e2fad37705ca69f@cos.io>
|
# encoding: utf-8
import importlib
import os
import logging
from website import settings
logger = logging.getLogger(__name__)
WATERBUTLER_CREDENTIALS = {
'storage': {}
}
WATERBUTLER_SETTINGS = {
'storage': {
'provider': 'filesystem',
'folder': os.path.join(settings.BASE_PATH, 'osfstoragecache'),
}
}
WATERBUTLER_RESOURCE = 'folder'
DISK_SAVING_MODE = settings.DISK_SAVING_MODE
try:
mod = importlib.import_module('.{}'.format(settings.MIGRATION_ENV), package='addons.osfstorage.settings')
globals().update({k: getattr(mod, k) for k in dir(mod)})
except Exception as ex:
logger.warn('No migration settings loaded for OSFStorage, falling back to local dev. {}'.format(ex))
|
<commit_before># encoding: utf-8
import importlib
import os
import logging
from website import settings
logger = logging.getLogger(__name__)
WATERBUTLER_CREDENTIALS = {
'storage': {}
}
WATERBUTLER_SETTINGS = {
'storage': {
'provider': 'filesystem',
'folder': os.path.join(settings.BASE_PATH, 'osfstoragecache'),
}
}
WATERBUTLER_RESOURCE = 'folder'
DISK_SAVING_MODE = settings.DISK_SAVING_MODE
try:
importlib.import_module('.{}'.format(settings.MIGRATION_ENV))
except:
logger.warn('No migration settings loaded for OSFStorage, falling back to local dev.')
<commit_msg>Make environmental override of osfstorage settings work
Signed-off-by: Chris Wisecarver <5fccdd17c1f7bcc7e393d2cb5e2fad37705ca69f@cos.io><commit_after>
|
# encoding: utf-8
import importlib
import os
import logging
from website import settings
logger = logging.getLogger(__name__)
WATERBUTLER_CREDENTIALS = {
'storage': {}
}
WATERBUTLER_SETTINGS = {
'storage': {
'provider': 'filesystem',
'folder': os.path.join(settings.BASE_PATH, 'osfstoragecache'),
}
}
WATERBUTLER_RESOURCE = 'folder'
DISK_SAVING_MODE = settings.DISK_SAVING_MODE
try:
mod = importlib.import_module('.{}'.format(settings.MIGRATION_ENV), package='addons.osfstorage.settings')
globals().update({k: getattr(mod, k) for k in dir(mod)})
except Exception as ex:
logger.warn('No migration settings loaded for OSFStorage, falling back to local dev. {}'.format(ex))
|
# encoding: utf-8
import importlib
import os
import logging
from website import settings
logger = logging.getLogger(__name__)
WATERBUTLER_CREDENTIALS = {
'storage': {}
}
WATERBUTLER_SETTINGS = {
'storage': {
'provider': 'filesystem',
'folder': os.path.join(settings.BASE_PATH, 'osfstoragecache'),
}
}
WATERBUTLER_RESOURCE = 'folder'
DISK_SAVING_MODE = settings.DISK_SAVING_MODE
try:
importlib.import_module('.{}'.format(settings.MIGRATION_ENV))
except:
logger.warn('No migration settings loaded for OSFStorage, falling back to local dev.')
Make environmental override of osfstorage settings work
Signed-off-by: Chris Wisecarver <5fccdd17c1f7bcc7e393d2cb5e2fad37705ca69f@cos.io># encoding: utf-8
import importlib
import os
import logging
from website import settings
logger = logging.getLogger(__name__)
WATERBUTLER_CREDENTIALS = {
'storage': {}
}
WATERBUTLER_SETTINGS = {
'storage': {
'provider': 'filesystem',
'folder': os.path.join(settings.BASE_PATH, 'osfstoragecache'),
}
}
WATERBUTLER_RESOURCE = 'folder'
DISK_SAVING_MODE = settings.DISK_SAVING_MODE
try:
mod = importlib.import_module('.{}'.format(settings.MIGRATION_ENV), package='addons.osfstorage.settings')
globals().update({k: getattr(mod, k) for k in dir(mod)})
except Exception as ex:
logger.warn('No migration settings loaded for OSFStorage, falling back to local dev. {}'.format(ex))
|
<commit_before># encoding: utf-8
import importlib
import os
import logging
from website import settings
logger = logging.getLogger(__name__)
WATERBUTLER_CREDENTIALS = {
'storage': {}
}
WATERBUTLER_SETTINGS = {
'storage': {
'provider': 'filesystem',
'folder': os.path.join(settings.BASE_PATH, 'osfstoragecache'),
}
}
WATERBUTLER_RESOURCE = 'folder'
DISK_SAVING_MODE = settings.DISK_SAVING_MODE
try:
importlib.import_module('.{}'.format(settings.MIGRATION_ENV))
except:
logger.warn('No migration settings loaded for OSFStorage, falling back to local dev.')
<commit_msg>Make environmental override of osfstorage settings work
Signed-off-by: Chris Wisecarver <5fccdd17c1f7bcc7e393d2cb5e2fad37705ca69f@cos.io><commit_after># encoding: utf-8
import importlib
import os
import logging
from website import settings
logger = logging.getLogger(__name__)
WATERBUTLER_CREDENTIALS = {
'storage': {}
}
WATERBUTLER_SETTINGS = {
'storage': {
'provider': 'filesystem',
'folder': os.path.join(settings.BASE_PATH, 'osfstoragecache'),
}
}
WATERBUTLER_RESOURCE = 'folder'
DISK_SAVING_MODE = settings.DISK_SAVING_MODE
try:
mod = importlib.import_module('.{}'.format(settings.MIGRATION_ENV), package='addons.osfstorage.settings')
globals().update({k: getattr(mod, k) for k in dir(mod)})
except Exception as ex:
logger.warn('No migration settings loaded for OSFStorage, falling back to local dev. {}'.format(ex))
|
5a45a312eebe9e432b066b99d914b49a2adb920c
|
openfaas/yaml2json/function/handler.py
|
openfaas/yaml2json/function/handler.py
|
# Author: Milos Buncic
# Date: 2017/10/14
# Description: Convert YAML to JSON and vice versa (OpenFaaS function)
import os
import sys
import json
import yaml
def handle(data, **parms):
def yaml2json(ydata):
"""
Convert YAML to JSON (output: JSON)
"""
try:
d = yaml.load(ydata, Loader=yaml.BaseLoader)
except Exception as e:
d = {'error': '{}'.format(e)}
return json.dumps(d)
def json2yaml(jdata):
"""
Convert JSON to YAML (output: YAML)
"""
try:
d = json.loads(jdata)
except Exception as e:
d = {'error': '{}'.format(e)}
return yaml.dump(d, default_flow_style=False)
if parms.get('reverse') == 'true':
print(json2yaml(data))
else:
print(yaml2json(data))
|
# Author: Milos Buncic
# Date: 2017/10/14
# Description: Convert YAML to JSON and vice versa (OpenFaaS function)
import os
import sys
import json
import yaml
def yaml2json(data):
"""
Convert YAML to JSON (output: JSON)
"""
try:
d = yaml.load(data, Loader=yaml.BaseLoader)
except Exception as e:
d = {'error': '{}'.format(e)}
return json.dumps(d)
def json2yaml(data):
"""
Convert JSON to YAML (output: YAML)
"""
try:
d = json.loads(data)
except Exception as e:
d = {'error': '{}'.format(e)}
return yaml.dump(d, default_flow_style=False)
def handle(data, **parms):
if parms.get('reverse') == 'true':
print(json2yaml(data))
else:
print(yaml2json(data))
|
Make handle function to behave similar as main function
|
Make handle function to behave similar as main function
|
Python
|
mit
|
psyhomb/serverless
|
# Author: Milos Buncic
# Date: 2017/10/14
# Description: Convert YAML to JSON and vice versa (OpenFaaS function)
import os
import sys
import json
import yaml
def handle(data, **parms):
def yaml2json(ydata):
"""
Convert YAML to JSON (output: JSON)
"""
try:
d = yaml.load(ydata, Loader=yaml.BaseLoader)
except Exception as e:
d = {'error': '{}'.format(e)}
return json.dumps(d)
def json2yaml(jdata):
"""
Convert JSON to YAML (output: YAML)
"""
try:
d = json.loads(jdata)
except Exception as e:
d = {'error': '{}'.format(e)}
return yaml.dump(d, default_flow_style=False)
if parms.get('reverse') == 'true':
print(json2yaml(data))
else:
print(yaml2json(data))
Make handle function to behave similar as main function
|
# Author: Milos Buncic
# Date: 2017/10/14
# Description: Convert YAML to JSON and vice versa (OpenFaaS function)
import os
import sys
import json
import yaml
def yaml2json(data):
"""
Convert YAML to JSON (output: JSON)
"""
try:
d = yaml.load(data, Loader=yaml.BaseLoader)
except Exception as e:
d = {'error': '{}'.format(e)}
return json.dumps(d)
def json2yaml(data):
"""
Convert JSON to YAML (output: YAML)
"""
try:
d = json.loads(data)
except Exception as e:
d = {'error': '{}'.format(e)}
return yaml.dump(d, default_flow_style=False)
def handle(data, **parms):
if parms.get('reverse') == 'true':
print(json2yaml(data))
else:
print(yaml2json(data))
|
<commit_before># Author: Milos Buncic
# Date: 2017/10/14
# Description: Convert YAML to JSON and vice versa (OpenFaaS function)
import os
import sys
import json
import yaml
def handle(data, **parms):
def yaml2json(ydata):
"""
Convert YAML to JSON (output: JSON)
"""
try:
d = yaml.load(ydata, Loader=yaml.BaseLoader)
except Exception as e:
d = {'error': '{}'.format(e)}
return json.dumps(d)
def json2yaml(jdata):
"""
Convert JSON to YAML (output: YAML)
"""
try:
d = json.loads(jdata)
except Exception as e:
d = {'error': '{}'.format(e)}
return yaml.dump(d, default_flow_style=False)
if parms.get('reverse') == 'true':
print(json2yaml(data))
else:
print(yaml2json(data))
<commit_msg>Make handle function to behave similar as main function<commit_after>
|
# Author: Milos Buncic
# Date: 2017/10/14
# Description: Convert YAML to JSON and vice versa (OpenFaaS function)
import os
import sys
import json
import yaml
def yaml2json(data):
"""
Convert YAML to JSON (output: JSON)
"""
try:
d = yaml.load(data, Loader=yaml.BaseLoader)
except Exception as e:
d = {'error': '{}'.format(e)}
return json.dumps(d)
def json2yaml(data):
"""
Convert JSON to YAML (output: YAML)
"""
try:
d = json.loads(data)
except Exception as e:
d = {'error': '{}'.format(e)}
return yaml.dump(d, default_flow_style=False)
def handle(data, **parms):
if parms.get('reverse') == 'true':
print(json2yaml(data))
else:
print(yaml2json(data))
|
# Author: Milos Buncic
# Date: 2017/10/14
# Description: Convert YAML to JSON and vice versa (OpenFaaS function)
import os
import sys
import json
import yaml
def handle(data, **parms):
def yaml2json(ydata):
"""
Convert YAML to JSON (output: JSON)
"""
try:
d = yaml.load(ydata, Loader=yaml.BaseLoader)
except Exception as e:
d = {'error': '{}'.format(e)}
return json.dumps(d)
def json2yaml(jdata):
"""
Convert JSON to YAML (output: YAML)
"""
try:
d = json.loads(jdata)
except Exception as e:
d = {'error': '{}'.format(e)}
return yaml.dump(d, default_flow_style=False)
if parms.get('reverse') == 'true':
print(json2yaml(data))
else:
print(yaml2json(data))
Make handle function to behave similar as main function# Author: Milos Buncic
# Date: 2017/10/14
# Description: Convert YAML to JSON and vice versa (OpenFaaS function)
import os
import sys
import json
import yaml
def yaml2json(data):
"""
Convert YAML to JSON (output: JSON)
"""
try:
d = yaml.load(data, Loader=yaml.BaseLoader)
except Exception as e:
d = {'error': '{}'.format(e)}
return json.dumps(d)
def json2yaml(data):
"""
Convert JSON to YAML (output: YAML)
"""
try:
d = json.loads(data)
except Exception as e:
d = {'error': '{}'.format(e)}
return yaml.dump(d, default_flow_style=False)
def handle(data, **parms):
if parms.get('reverse') == 'true':
print(json2yaml(data))
else:
print(yaml2json(data))
|
<commit_before># Author: Milos Buncic
# Date: 2017/10/14
# Description: Convert YAML to JSON and vice versa (OpenFaaS function)
import os
import sys
import json
import yaml
def handle(data, **parms):
def yaml2json(ydata):
"""
Convert YAML to JSON (output: JSON)
"""
try:
d = yaml.load(ydata, Loader=yaml.BaseLoader)
except Exception as e:
d = {'error': '{}'.format(e)}
return json.dumps(d)
def json2yaml(jdata):
"""
Convert JSON to YAML (output: YAML)
"""
try:
d = json.loads(jdata)
except Exception as e:
d = {'error': '{}'.format(e)}
return yaml.dump(d, default_flow_style=False)
if parms.get('reverse') == 'true':
print(json2yaml(data))
else:
print(yaml2json(data))
<commit_msg>Make handle function to behave similar as main function<commit_after># Author: Milos Buncic
# Date: 2017/10/14
# Description: Convert YAML to JSON and vice versa (OpenFaaS function)
import os
import sys
import json
import yaml
def yaml2json(data):
"""
Convert YAML to JSON (output: JSON)
"""
try:
d = yaml.load(data, Loader=yaml.BaseLoader)
except Exception as e:
d = {'error': '{}'.format(e)}
return json.dumps(d)
def json2yaml(data):
"""
Convert JSON to YAML (output: YAML)
"""
try:
d = json.loads(data)
except Exception as e:
d = {'error': '{}'.format(e)}
return yaml.dump(d, default_flow_style=False)
def handle(data, **parms):
if parms.get('reverse') == 'true':
print(json2yaml(data))
else:
print(yaml2json(data))
|
bd9d08870ec3db09c41c825029c6a513ecc4d1c7
|
packs/asserts/actions/object_equals.py
|
packs/asserts/actions/object_equals.py
|
import pprint
import sys
from st2actions.runners.pythonrunner import Action
__all__ = [
'AssertObjectEquals'
]
class AssertObjectEquals(Action):
def run(self, object, expected):
ret = cmp(object, expected)
if ret == 0:
sys.stdout.write('EQUAL.')
else:
pprint.pprint('Input: \n%s' % object, stream=sys.stderr)
pprint.pprint('Expected: \n%s' % expected, stream=sys.stderr)
raise ValueError('Objects not equal. Input: %s, Expected: %s.' % (object, expected))
|
import pprint
import sys
from st2actions.runners.pythonrunner import Action
__all__ = [
'AssertObjectEquals'
]
def cmp(x, y):
return (x > y) - (x < y)
class AssertObjectEquals(Action):
def run(self, object, expected):
ret = cmp(object, expected)
if ret == 0:
sys.stdout.write('EQUAL.')
else:
pprint.pprint('Input: \n%s' % object, stream=sys.stderr)
pprint.pprint('Expected: \n%s' % expected, stream=sys.stderr)
raise ValueError('Objects not equal. Input: %s, Expected: %s.' % (object, expected))
|
Make action python 3 compatible
|
Make action python 3 compatible
|
Python
|
apache-2.0
|
StackStorm/st2tests,StackStorm/st2tests,StackStorm/st2tests
|
import pprint
import sys
from st2actions.runners.pythonrunner import Action
__all__ = [
'AssertObjectEquals'
]
class AssertObjectEquals(Action):
def run(self, object, expected):
ret = cmp(object, expected)
if ret == 0:
sys.stdout.write('EQUAL.')
else:
pprint.pprint('Input: \n%s' % object, stream=sys.stderr)
pprint.pprint('Expected: \n%s' % expected, stream=sys.stderr)
raise ValueError('Objects not equal. Input: %s, Expected: %s.' % (object, expected))
Make action python 3 compatible
|
import pprint
import sys
from st2actions.runners.pythonrunner import Action
__all__ = [
'AssertObjectEquals'
]
def cmp(x, y):
return (x > y) - (x < y)
class AssertObjectEquals(Action):
def run(self, object, expected):
ret = cmp(object, expected)
if ret == 0:
sys.stdout.write('EQUAL.')
else:
pprint.pprint('Input: \n%s' % object, stream=sys.stderr)
pprint.pprint('Expected: \n%s' % expected, stream=sys.stderr)
raise ValueError('Objects not equal. Input: %s, Expected: %s.' % (object, expected))
|
<commit_before>import pprint
import sys
from st2actions.runners.pythonrunner import Action
__all__ = [
'AssertObjectEquals'
]
class AssertObjectEquals(Action):
def run(self, object, expected):
ret = cmp(object, expected)
if ret == 0:
sys.stdout.write('EQUAL.')
else:
pprint.pprint('Input: \n%s' % object, stream=sys.stderr)
pprint.pprint('Expected: \n%s' % expected, stream=sys.stderr)
raise ValueError('Objects not equal. Input: %s, Expected: %s.' % (object, expected))
<commit_msg>Make action python 3 compatible<commit_after>
|
import pprint
import sys
from st2actions.runners.pythonrunner import Action
__all__ = [
'AssertObjectEquals'
]
def cmp(x, y):
return (x > y) - (x < y)
class AssertObjectEquals(Action):
def run(self, object, expected):
ret = cmp(object, expected)
if ret == 0:
sys.stdout.write('EQUAL.')
else:
pprint.pprint('Input: \n%s' % object, stream=sys.stderr)
pprint.pprint('Expected: \n%s' % expected, stream=sys.stderr)
raise ValueError('Objects not equal. Input: %s, Expected: %s.' % (object, expected))
|
import pprint
import sys
from st2actions.runners.pythonrunner import Action
__all__ = [
'AssertObjectEquals'
]
class AssertObjectEquals(Action):
def run(self, object, expected):
ret = cmp(object, expected)
if ret == 0:
sys.stdout.write('EQUAL.')
else:
pprint.pprint('Input: \n%s' % object, stream=sys.stderr)
pprint.pprint('Expected: \n%s' % expected, stream=sys.stderr)
raise ValueError('Objects not equal. Input: %s, Expected: %s.' % (object, expected))
Make action python 3 compatibleimport pprint
import sys
from st2actions.runners.pythonrunner import Action
__all__ = [
'AssertObjectEquals'
]
def cmp(x, y):
return (x > y) - (x < y)
class AssertObjectEquals(Action):
def run(self, object, expected):
ret = cmp(object, expected)
if ret == 0:
sys.stdout.write('EQUAL.')
else:
pprint.pprint('Input: \n%s' % object, stream=sys.stderr)
pprint.pprint('Expected: \n%s' % expected, stream=sys.stderr)
raise ValueError('Objects not equal. Input: %s, Expected: %s.' % (object, expected))
|
<commit_before>import pprint
import sys
from st2actions.runners.pythonrunner import Action
__all__ = [
'AssertObjectEquals'
]
class AssertObjectEquals(Action):
def run(self, object, expected):
ret = cmp(object, expected)
if ret == 0:
sys.stdout.write('EQUAL.')
else:
pprint.pprint('Input: \n%s' % object, stream=sys.stderr)
pprint.pprint('Expected: \n%s' % expected, stream=sys.stderr)
raise ValueError('Objects not equal. Input: %s, Expected: %s.' % (object, expected))
<commit_msg>Make action python 3 compatible<commit_after>import pprint
import sys
from st2actions.runners.pythonrunner import Action
__all__ = [
'AssertObjectEquals'
]
def cmp(x, y):
return (x > y) - (x < y)
class AssertObjectEquals(Action):
def run(self, object, expected):
ret = cmp(object, expected)
if ret == 0:
sys.stdout.write('EQUAL.')
else:
pprint.pprint('Input: \n%s' % object, stream=sys.stderr)
pprint.pprint('Expected: \n%s' % expected, stream=sys.stderr)
raise ValueError('Objects not equal. Input: %s, Expected: %s.' % (object, expected))
|
22412b3f46451177286f8fc58509a69bb2d95731
|
numpy/testing/setup.py
|
numpy/testing/setup.py
|
#!/usr/bin/env python3
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('testing', parent_package, top_path)
config.add_subpackage('_private')
config.add_subpackage('tests')
config.add_data_files('*.pyi')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(maintainer="NumPy Developers",
maintainer_email="numpy-dev@numpy.org",
description="NumPy test module",
url="https://www.numpy.org",
license="NumPy License (BSD Style)",
configuration=configuration,
)
|
#!/usr/bin/env python3
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('testing', parent_package, top_path)
config.add_subpackage('_private')
config.add_subpackage('tests')
config.add_data_files('*.pyi')
config.add_data_files('_private/*.pyi')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(maintainer="NumPy Developers",
maintainer_email="numpy-dev@numpy.org",
description="NumPy test module",
url="https://www.numpy.org",
license="NumPy License (BSD Style)",
configuration=configuration,
)
|
Add support for *.pyi data-files to `np.testing._private`
|
BLD: Add support for *.pyi data-files to `np.testing._private`
|
Python
|
bsd-3-clause
|
rgommers/numpy,numpy/numpy,endolith/numpy,pdebuyl/numpy,simongibbons/numpy,simongibbons/numpy,mhvk/numpy,seberg/numpy,mattip/numpy,jakirkham/numpy,jakirkham/numpy,mattip/numpy,mhvk/numpy,charris/numpy,seberg/numpy,anntzer/numpy,rgommers/numpy,jakirkham/numpy,mhvk/numpy,charris/numpy,anntzer/numpy,pdebuyl/numpy,rgommers/numpy,anntzer/numpy,charris/numpy,jakirkham/numpy,mattip/numpy,endolith/numpy,endolith/numpy,simongibbons/numpy,endolith/numpy,numpy/numpy,mhvk/numpy,charris/numpy,mattip/numpy,jakirkham/numpy,numpy/numpy,simongibbons/numpy,pdebuyl/numpy,simongibbons/numpy,seberg/numpy,anntzer/numpy,mhvk/numpy,rgommers/numpy,numpy/numpy,pdebuyl/numpy,seberg/numpy
|
#!/usr/bin/env python3
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('testing', parent_package, top_path)
config.add_subpackage('_private')
config.add_subpackage('tests')
config.add_data_files('*.pyi')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(maintainer="NumPy Developers",
maintainer_email="numpy-dev@numpy.org",
description="NumPy test module",
url="https://www.numpy.org",
license="NumPy License (BSD Style)",
configuration=configuration,
)
BLD: Add support for *.pyi data-files to `np.testing._private`
|
#!/usr/bin/env python3
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('testing', parent_package, top_path)
config.add_subpackage('_private')
config.add_subpackage('tests')
config.add_data_files('*.pyi')
config.add_data_files('_private/*.pyi')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(maintainer="NumPy Developers",
maintainer_email="numpy-dev@numpy.org",
description="NumPy test module",
url="https://www.numpy.org",
license="NumPy License (BSD Style)",
configuration=configuration,
)
|
<commit_before>#!/usr/bin/env python3
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('testing', parent_package, top_path)
config.add_subpackage('_private')
config.add_subpackage('tests')
config.add_data_files('*.pyi')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(maintainer="NumPy Developers",
maintainer_email="numpy-dev@numpy.org",
description="NumPy test module",
url="https://www.numpy.org",
license="NumPy License (BSD Style)",
configuration=configuration,
)
<commit_msg>BLD: Add support for *.pyi data-files to `np.testing._private`<commit_after>
|
#!/usr/bin/env python3
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('testing', parent_package, top_path)
config.add_subpackage('_private')
config.add_subpackage('tests')
config.add_data_files('*.pyi')
config.add_data_files('_private/*.pyi')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(maintainer="NumPy Developers",
maintainer_email="numpy-dev@numpy.org",
description="NumPy test module",
url="https://www.numpy.org",
license="NumPy License (BSD Style)",
configuration=configuration,
)
|
#!/usr/bin/env python3
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('testing', parent_package, top_path)
config.add_subpackage('_private')
config.add_subpackage('tests')
config.add_data_files('*.pyi')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(maintainer="NumPy Developers",
maintainer_email="numpy-dev@numpy.org",
description="NumPy test module",
url="https://www.numpy.org",
license="NumPy License (BSD Style)",
configuration=configuration,
)
BLD: Add support for *.pyi data-files to `np.testing._private`#!/usr/bin/env python3
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('testing', parent_package, top_path)
config.add_subpackage('_private')
config.add_subpackage('tests')
config.add_data_files('*.pyi')
config.add_data_files('_private/*.pyi')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(maintainer="NumPy Developers",
maintainer_email="numpy-dev@numpy.org",
description="NumPy test module",
url="https://www.numpy.org",
license="NumPy License (BSD Style)",
configuration=configuration,
)
|
<commit_before>#!/usr/bin/env python3
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('testing', parent_package, top_path)
config.add_subpackage('_private')
config.add_subpackage('tests')
config.add_data_files('*.pyi')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(maintainer="NumPy Developers",
maintainer_email="numpy-dev@numpy.org",
description="NumPy test module",
url="https://www.numpy.org",
license="NumPy License (BSD Style)",
configuration=configuration,
)
<commit_msg>BLD: Add support for *.pyi data-files to `np.testing._private`<commit_after>#!/usr/bin/env python3
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('testing', parent_package, top_path)
config.add_subpackage('_private')
config.add_subpackage('tests')
config.add_data_files('*.pyi')
config.add_data_files('_private/*.pyi')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(maintainer="NumPy Developers",
maintainer_email="numpy-dev@numpy.org",
description="NumPy test module",
url="https://www.numpy.org",
license="NumPy License (BSD Style)",
configuration=configuration,
)
|
3c2663d4c8ca523d072b6e82bf872f412aba9321
|
mrgeo-python/src/main/python/pymrgeo/rastermapop.py
|
mrgeo-python/src/main/python/pymrgeo/rastermapop.py
|
import copy
import json
from py4j.java_gateway import JavaClass, java_import
from pymrgeo.instance import is_instance_of as iio
class RasterMapOp(object):
mapop = None
gateway = None
context = None
job = None
def __init__(self, gateway=None, context=None, mapop=None, job=None):
self.gateway = gateway
self.context = context
self.mapop = mapop
self.job = job
@staticmethod
def nan():
return float('nan')
def clone(self):
return copy.copy(self)
def is_instance_of(self, java_object, java_class):
return iio(self.gateway, java_object, java_class)
def metadata(self):
if self.mapop is None:
return None
jvm = self.gateway.jvm
java_import(jvm, "org.mrgeo.mapalgebra.raster.RasterMapOp")
java_import(jvm, "org.mrgeo.image.MrsPyramidMetadata")
meta = self.mapop.metadata().getOrElse(None)
if meta is None:
return None
java_import(jvm, "com.fasterxml.jackson.databind.ObjectMapper")
mapper = jvm.com.fasterxml.jackson.databind.ObjectMapper()
jsonstr = mapper.writeValueAsString(meta)
# print(mapper.writerWithDefaultPrettyPrinter().writeValueAsString(meta))
return json.loads(jsonstr)
|
import copy
import json
from py4j.java_gateway import java_import
from pymrgeo.instance import is_instance_of as iio
class RasterMapOp(object):
mapop = None
gateway = None
context = None
job = None
def __init__(self, gateway=None, context=None, mapop=None, job=None):
self.gateway = gateway
self.context = context
self.mapop = mapop
self.job = job
@staticmethod
def nan():
return float('nan')
def clone(self):
return copy.copy(self)
def is_instance_of(self, java_object, java_class):
return iio(self.gateway, java_object, java_class)
def metadata(self):
if self.mapop is None:
return None
jvm = self.gateway.jvm
java_import(jvm, "org.mrgeo.mapalgebra.raster.RasterMapOp")
java_import(jvm, "org.mrgeo.image.MrsPyramidMetadata")
if self.mapop.metadata().isEmpty():
return None
meta = self.mapop.metadata().get()
java_import(jvm, "com.fasterxml.jackson.databind.ObjectMapper")
mapper = jvm.com.fasterxml.jackson.databind.ObjectMapper()
jsonstr = mapper.writeValueAsString(meta)
# print(mapper.writerWithDefaultPrettyPrinter().writeValueAsString(meta))
return json.loads(jsonstr)
|
Implement empty metadata a little differently
|
Implement empty metadata a little differently
|
Python
|
apache-2.0
|
ngageoint/mrgeo,ngageoint/mrgeo,ngageoint/mrgeo
|
import copy
import json
from py4j.java_gateway import JavaClass, java_import
from pymrgeo.instance import is_instance_of as iio
class RasterMapOp(object):
mapop = None
gateway = None
context = None
job = None
def __init__(self, gateway=None, context=None, mapop=None, job=None):
self.gateway = gateway
self.context = context
self.mapop = mapop
self.job = job
@staticmethod
def nan():
return float('nan')
def clone(self):
return copy.copy(self)
def is_instance_of(self, java_object, java_class):
return iio(self.gateway, java_object, java_class)
def metadata(self):
if self.mapop is None:
return None
jvm = self.gateway.jvm
java_import(jvm, "org.mrgeo.mapalgebra.raster.RasterMapOp")
java_import(jvm, "org.mrgeo.image.MrsPyramidMetadata")
meta = self.mapop.metadata().getOrElse(None)
if meta is None:
return None
java_import(jvm, "com.fasterxml.jackson.databind.ObjectMapper")
mapper = jvm.com.fasterxml.jackson.databind.ObjectMapper()
jsonstr = mapper.writeValueAsString(meta)
# print(mapper.writerWithDefaultPrettyPrinter().writeValueAsString(meta))
return json.loads(jsonstr)
Implement empty metadata a little differently
|
import copy
import json
from py4j.java_gateway import java_import
from pymrgeo.instance import is_instance_of as iio
class RasterMapOp(object):
mapop = None
gateway = None
context = None
job = None
def __init__(self, gateway=None, context=None, mapop=None, job=None):
self.gateway = gateway
self.context = context
self.mapop = mapop
self.job = job
@staticmethod
def nan():
return float('nan')
def clone(self):
return copy.copy(self)
def is_instance_of(self, java_object, java_class):
return iio(self.gateway, java_object, java_class)
def metadata(self):
if self.mapop is None:
return None
jvm = self.gateway.jvm
java_import(jvm, "org.mrgeo.mapalgebra.raster.RasterMapOp")
java_import(jvm, "org.mrgeo.image.MrsPyramidMetadata")
if self.mapop.metadata().isEmpty():
return None
meta = self.mapop.metadata().get()
java_import(jvm, "com.fasterxml.jackson.databind.ObjectMapper")
mapper = jvm.com.fasterxml.jackson.databind.ObjectMapper()
jsonstr = mapper.writeValueAsString(meta)
# print(mapper.writerWithDefaultPrettyPrinter().writeValueAsString(meta))
return json.loads(jsonstr)
|
<commit_before>import copy
import json
from py4j.java_gateway import JavaClass, java_import
from pymrgeo.instance import is_instance_of as iio
class RasterMapOp(object):
mapop = None
gateway = None
context = None
job = None
def __init__(self, gateway=None, context=None, mapop=None, job=None):
self.gateway = gateway
self.context = context
self.mapop = mapop
self.job = job
@staticmethod
def nan():
return float('nan')
def clone(self):
return copy.copy(self)
def is_instance_of(self, java_object, java_class):
return iio(self.gateway, java_object, java_class)
def metadata(self):
if self.mapop is None:
return None
jvm = self.gateway.jvm
java_import(jvm, "org.mrgeo.mapalgebra.raster.RasterMapOp")
java_import(jvm, "org.mrgeo.image.MrsPyramidMetadata")
meta = self.mapop.metadata().getOrElse(None)
if meta is None:
return None
java_import(jvm, "com.fasterxml.jackson.databind.ObjectMapper")
mapper = jvm.com.fasterxml.jackson.databind.ObjectMapper()
jsonstr = mapper.writeValueAsString(meta)
# print(mapper.writerWithDefaultPrettyPrinter().writeValueAsString(meta))
return json.loads(jsonstr)
<commit_msg>Implement empty metadata a little differently<commit_after>
|
import copy
import json
from py4j.java_gateway import java_import
from pymrgeo.instance import is_instance_of as iio
class RasterMapOp(object):
mapop = None
gateway = None
context = None
job = None
def __init__(self, gateway=None, context=None, mapop=None, job=None):
self.gateway = gateway
self.context = context
self.mapop = mapop
self.job = job
@staticmethod
def nan():
return float('nan')
def clone(self):
return copy.copy(self)
def is_instance_of(self, java_object, java_class):
return iio(self.gateway, java_object, java_class)
def metadata(self):
if self.mapop is None:
return None
jvm = self.gateway.jvm
java_import(jvm, "org.mrgeo.mapalgebra.raster.RasterMapOp")
java_import(jvm, "org.mrgeo.image.MrsPyramidMetadata")
if self.mapop.metadata().isEmpty():
return None
meta = self.mapop.metadata().get()
java_import(jvm, "com.fasterxml.jackson.databind.ObjectMapper")
mapper = jvm.com.fasterxml.jackson.databind.ObjectMapper()
jsonstr = mapper.writeValueAsString(meta)
# print(mapper.writerWithDefaultPrettyPrinter().writeValueAsString(meta))
return json.loads(jsonstr)
|
import copy
import json
from py4j.java_gateway import JavaClass, java_import
from pymrgeo.instance import is_instance_of as iio
class RasterMapOp(object):
mapop = None
gateway = None
context = None
job = None
def __init__(self, gateway=None, context=None, mapop=None, job=None):
self.gateway = gateway
self.context = context
self.mapop = mapop
self.job = job
@staticmethod
def nan():
return float('nan')
def clone(self):
return copy.copy(self)
def is_instance_of(self, java_object, java_class):
return iio(self.gateway, java_object, java_class)
def metadata(self):
if self.mapop is None:
return None
jvm = self.gateway.jvm
java_import(jvm, "org.mrgeo.mapalgebra.raster.RasterMapOp")
java_import(jvm, "org.mrgeo.image.MrsPyramidMetadata")
meta = self.mapop.metadata().getOrElse(None)
if meta is None:
return None
java_import(jvm, "com.fasterxml.jackson.databind.ObjectMapper")
mapper = jvm.com.fasterxml.jackson.databind.ObjectMapper()
jsonstr = mapper.writeValueAsString(meta)
# print(mapper.writerWithDefaultPrettyPrinter().writeValueAsString(meta))
return json.loads(jsonstr)
Implement empty metadata a little differentlyimport copy
import json
from py4j.java_gateway import java_import
from pymrgeo.instance import is_instance_of as iio
class RasterMapOp(object):
mapop = None
gateway = None
context = None
job = None
def __init__(self, gateway=None, context=None, mapop=None, job=None):
self.gateway = gateway
self.context = context
self.mapop = mapop
self.job = job
@staticmethod
def nan():
return float('nan')
def clone(self):
return copy.copy(self)
def is_instance_of(self, java_object, java_class):
return iio(self.gateway, java_object, java_class)
def metadata(self):
if self.mapop is None:
return None
jvm = self.gateway.jvm
java_import(jvm, "org.mrgeo.mapalgebra.raster.RasterMapOp")
java_import(jvm, "org.mrgeo.image.MrsPyramidMetadata")
if self.mapop.metadata().isEmpty():
return None
meta = self.mapop.metadata().get()
java_import(jvm, "com.fasterxml.jackson.databind.ObjectMapper")
mapper = jvm.com.fasterxml.jackson.databind.ObjectMapper()
jsonstr = mapper.writeValueAsString(meta)
# print(mapper.writerWithDefaultPrettyPrinter().writeValueAsString(meta))
return json.loads(jsonstr)
|
<commit_before>import copy
import json
from py4j.java_gateway import JavaClass, java_import
from pymrgeo.instance import is_instance_of as iio
class RasterMapOp(object):
mapop = None
gateway = None
context = None
job = None
def __init__(self, gateway=None, context=None, mapop=None, job=None):
self.gateway = gateway
self.context = context
self.mapop = mapop
self.job = job
@staticmethod
def nan():
return float('nan')
def clone(self):
return copy.copy(self)
def is_instance_of(self, java_object, java_class):
return iio(self.gateway, java_object, java_class)
def metadata(self):
if self.mapop is None:
return None
jvm = self.gateway.jvm
java_import(jvm, "org.mrgeo.mapalgebra.raster.RasterMapOp")
java_import(jvm, "org.mrgeo.image.MrsPyramidMetadata")
meta = self.mapop.metadata().getOrElse(None)
if meta is None:
return None
java_import(jvm, "com.fasterxml.jackson.databind.ObjectMapper")
mapper = jvm.com.fasterxml.jackson.databind.ObjectMapper()
jsonstr = mapper.writeValueAsString(meta)
# print(mapper.writerWithDefaultPrettyPrinter().writeValueAsString(meta))
return json.loads(jsonstr)
<commit_msg>Implement empty metadata a little differently<commit_after>import copy
import json
from py4j.java_gateway import java_import
from pymrgeo.instance import is_instance_of as iio
class RasterMapOp(object):
mapop = None
gateway = None
context = None
job = None
def __init__(self, gateway=None, context=None, mapop=None, job=None):
self.gateway = gateway
self.context = context
self.mapop = mapop
self.job = job
@staticmethod
def nan():
return float('nan')
def clone(self):
return copy.copy(self)
def is_instance_of(self, java_object, java_class):
return iio(self.gateway, java_object, java_class)
def metadata(self):
if self.mapop is None:
return None
jvm = self.gateway.jvm
java_import(jvm, "org.mrgeo.mapalgebra.raster.RasterMapOp")
java_import(jvm, "org.mrgeo.image.MrsPyramidMetadata")
if self.mapop.metadata().isEmpty():
return None
meta = self.mapop.metadata().get()
java_import(jvm, "com.fasterxml.jackson.databind.ObjectMapper")
mapper = jvm.com.fasterxml.jackson.databind.ObjectMapper()
jsonstr = mapper.writeValueAsString(meta)
# print(mapper.writerWithDefaultPrettyPrinter().writeValueAsString(meta))
return json.loads(jsonstr)
|
b56446e0dc7c0c0fe45557461dd4fffa7f1da8d5
|
django_project/setup.py
|
django_project/setup.py
|
# coding=utf-8
"""Setup file for distutils / pypi."""
try:
from ez_setup import use_setuptools
use_setuptools()
except ImportError:
pass
from setuptools import setup, find_packages
setup(
name='django-wms-client',
version='0.1.1',
author='Tim Sutton',
author_email='tim@kartoza.com',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
scripts=[],
url='http://pypi.python.org/pypi/django-wms-client/',
license='../LICENSE.txt',
description=(
'An app to let you include browsable OGC WMS '
'maps on your django web site.'),
long_description=open('README.md').read(),
install_requires=[
"Django==1.7",
"django-leaflet==0.14.1",
"psycopg2==2.5.4",
"factory-boy==2.4.1",
],
test_suite='wms_client.run_tests.run',
)
|
# coding=utf-8
"""Setup file for distutils / pypi."""
try:
from ez_setup import use_setuptools
use_setuptools()
except ImportError:
pass
from setuptools import setup, find_packages
setup(
name='django-wms-client',
version='0.1.1',
author='Tim Sutton',
author_email='tim@kartoza.com',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
scripts=[],
url='http://pypi.python.org/pypi/django-wms-client/',
license='../LICENSE.txt',
description=(
'An app to let you include browsable OGC WMS '
'maps on your django web site.'),
long_description=open('README.md').read(),
install_requires=[
"Django",
"django-leaflet",
"psycopg2",
"factory-boy",
],
test_suite='wms_client.run_tests.run',
)
|
Change requirements to look for abstract (latest) versions.
|
Change requirements to look for abstract (latest) versions.
|
Python
|
bsd-2-clause
|
kartoza/django-wms-client,kartoza/django-wms-client,kartoza/django-wms-client,kartoza/django-wms-client
|
# coding=utf-8
"""Setup file for distutils / pypi."""
try:
from ez_setup import use_setuptools
use_setuptools()
except ImportError:
pass
from setuptools import setup, find_packages
setup(
name='django-wms-client',
version='0.1.1',
author='Tim Sutton',
author_email='tim@kartoza.com',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
scripts=[],
url='http://pypi.python.org/pypi/django-wms-client/',
license='../LICENSE.txt',
description=(
'An app to let you include browsable OGC WMS '
'maps on your django web site.'),
long_description=open('README.md').read(),
install_requires=[
"Django==1.7",
"django-leaflet==0.14.1",
"psycopg2==2.5.4",
"factory-boy==2.4.1",
],
test_suite='wms_client.run_tests.run',
)
Change requirements to look for abstract (latest) versions.
|
# coding=utf-8
"""Setup file for distutils / pypi."""
try:
from ez_setup import use_setuptools
use_setuptools()
except ImportError:
pass
from setuptools import setup, find_packages
setup(
name='django-wms-client',
version='0.1.1',
author='Tim Sutton',
author_email='tim@kartoza.com',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
scripts=[],
url='http://pypi.python.org/pypi/django-wms-client/',
license='../LICENSE.txt',
description=(
'An app to let you include browsable OGC WMS '
'maps on your django web site.'),
long_description=open('README.md').read(),
install_requires=[
"Django",
"django-leaflet",
"psycopg2",
"factory-boy",
],
test_suite='wms_client.run_tests.run',
)
|
<commit_before># coding=utf-8
"""Setup file for distutils / pypi."""
try:
from ez_setup import use_setuptools
use_setuptools()
except ImportError:
pass
from setuptools import setup, find_packages
setup(
name='django-wms-client',
version='0.1.1',
author='Tim Sutton',
author_email='tim@kartoza.com',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
scripts=[],
url='http://pypi.python.org/pypi/django-wms-client/',
license='../LICENSE.txt',
description=(
'An app to let you include browsable OGC WMS '
'maps on your django web site.'),
long_description=open('README.md').read(),
install_requires=[
"Django==1.7",
"django-leaflet==0.14.1",
"psycopg2==2.5.4",
"factory-boy==2.4.1",
],
test_suite='wms_client.run_tests.run',
)
<commit_msg>Change requirements to look for abstract (latest) versions.<commit_after>
|
# coding=utf-8
"""Setup file for distutils / pypi."""
try:
from ez_setup import use_setuptools
use_setuptools()
except ImportError:
pass
from setuptools import setup, find_packages
setup(
name='django-wms-client',
version='0.1.1',
author='Tim Sutton',
author_email='tim@kartoza.com',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
scripts=[],
url='http://pypi.python.org/pypi/django-wms-client/',
license='../LICENSE.txt',
description=(
'An app to let you include browsable OGC WMS '
'maps on your django web site.'),
long_description=open('README.md').read(),
install_requires=[
"Django",
"django-leaflet",
"psycopg2",
"factory-boy",
],
test_suite='wms_client.run_tests.run',
)
|
# coding=utf-8
"""Setup file for distutils / pypi."""
try:
from ez_setup import use_setuptools
use_setuptools()
except ImportError:
pass
from setuptools import setup, find_packages
setup(
name='django-wms-client',
version='0.1.1',
author='Tim Sutton',
author_email='tim@kartoza.com',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
scripts=[],
url='http://pypi.python.org/pypi/django-wms-client/',
license='../LICENSE.txt',
description=(
'An app to let you include browsable OGC WMS '
'maps on your django web site.'),
long_description=open('README.md').read(),
install_requires=[
"Django==1.7",
"django-leaflet==0.14.1",
"psycopg2==2.5.4",
"factory-boy==2.4.1",
],
test_suite='wms_client.run_tests.run',
)
Change requirements to look for abstract (latest) versions.# coding=utf-8
"""Setup file for distutils / pypi."""
try:
from ez_setup import use_setuptools
use_setuptools()
except ImportError:
pass
from setuptools import setup, find_packages
setup(
name='django-wms-client',
version='0.1.1',
author='Tim Sutton',
author_email='tim@kartoza.com',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
scripts=[],
url='http://pypi.python.org/pypi/django-wms-client/',
license='../LICENSE.txt',
description=(
'An app to let you include browsable OGC WMS '
'maps on your django web site.'),
long_description=open('README.md').read(),
install_requires=[
"Django",
"django-leaflet",
"psycopg2",
"factory-boy",
],
test_suite='wms_client.run_tests.run',
)
|
<commit_before># coding=utf-8
"""Setup file for distutils / pypi."""
try:
from ez_setup import use_setuptools
use_setuptools()
except ImportError:
pass
from setuptools import setup, find_packages
setup(
name='django-wms-client',
version='0.1.1',
author='Tim Sutton',
author_email='tim@kartoza.com',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
scripts=[],
url='http://pypi.python.org/pypi/django-wms-client/',
license='../LICENSE.txt',
description=(
'An app to let you include browsable OGC WMS '
'maps on your django web site.'),
long_description=open('README.md').read(),
install_requires=[
"Django==1.7",
"django-leaflet==0.14.1",
"psycopg2==2.5.4",
"factory-boy==2.4.1",
],
test_suite='wms_client.run_tests.run',
)
<commit_msg>Change requirements to look for abstract (latest) versions.<commit_after># coding=utf-8
"""Setup file for distutils / pypi."""
try:
from ez_setup import use_setuptools
use_setuptools()
except ImportError:
pass
from setuptools import setup, find_packages
setup(
name='django-wms-client',
version='0.1.1',
author='Tim Sutton',
author_email='tim@kartoza.com',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
scripts=[],
url='http://pypi.python.org/pypi/django-wms-client/',
license='../LICENSE.txt',
description=(
'An app to let you include browsable OGC WMS '
'maps on your django web site.'),
long_description=open('README.md').read(),
install_requires=[
"Django",
"django-leaflet",
"psycopg2",
"factory-boy",
],
test_suite='wms_client.run_tests.run',
)
|
a78f56e5c4dedc4148ff3503a05705a8d343b638
|
qmpy/web/views/analysis/calculation.py
|
qmpy/web/views/analysis/calculation.py
|
from django.shortcuts import render_to_response
from django.template import RequestContext
import os.path
from qmpy.models import Calculation
from ..tools import get_globals
from bokeh.embed import components
def calculation_view(request, calculation_id):
calculation = Calculation.objects.get(pk=calculation_id)
data = get_globals()
data['calculation'] = calculation
data['stdout'] = ''
data['stderr'] = ''
if os.path.exists(calculation.path+'/stdout.txt'):
data['stdout'] = open(calculation.path+'/stdout.txt').read()
if os.path.exists(calculation.path+'/stderr.txt'):
data['stderr'] = open(calculation.path+'/stderr.txt').read()
#if not calculation.dos is None:
# data['dos'] = calculation.dos.plot.get_flot_script()
if not calculation.dos is None:
script, div = components(calculation.dos.bokeh_plot)
data['dos'] = script
data['dosdiv'] = div
## Get exact INCAR settings from INCAR file
data['incar'] = ''.join(calculation.read_incar())
return render_to_response('analysis/calculation.html',
data, RequestContext(request))
|
from django.shortcuts import render_to_response
from django.template import RequestContext
import os
from qmpy.models import Calculation
from ..tools import get_globals
from bokeh.embed import components
def calculation_view(request, calculation_id):
calculation = Calculation.objects.get(pk=calculation_id)
data = get_globals()
data['calculation'] = calculation
data['stdout'] = ''
data['stderr'] = ''
if os.path.exists(os.path.join(calculation.path, 'stdout.txt')):
with open(os.path.join(calculation.path, 'stdout.txt')) as fr:
data['stdout'] = fr.read()
if os.path.exists(os.path.join(calculation.path, 'stderr.txt')):
with open(os.path.join(calculation.path, 'stderr.txt')) as fr:
data['stderr'] = fr.read()
try:
data['incar'] = ''.join(calculation.read_incar())
except FileNotFoundError:
data['incar'] = 'Could not read INCAR'
if not calculation.dos is None:
script, div = components(calculation.dos.bokeh_plot)
data['dos'] = script
data['dosdiv'] = div
return render_to_response('analysis/calculation.html',
data, RequestContext(request))
|
Handle missing INCAR files gracefully
|
Handle missing INCAR files gracefully
|
Python
|
mit
|
wolverton-research-group/qmpy,wolverton-research-group/qmpy,wolverton-research-group/qmpy,wolverton-research-group/qmpy,wolverton-research-group/qmpy
|
from django.shortcuts import render_to_response
from django.template import RequestContext
import os.path
from qmpy.models import Calculation
from ..tools import get_globals
from bokeh.embed import components
def calculation_view(request, calculation_id):
calculation = Calculation.objects.get(pk=calculation_id)
data = get_globals()
data['calculation'] = calculation
data['stdout'] = ''
data['stderr'] = ''
if os.path.exists(calculation.path+'/stdout.txt'):
data['stdout'] = open(calculation.path+'/stdout.txt').read()
if os.path.exists(calculation.path+'/stderr.txt'):
data['stderr'] = open(calculation.path+'/stderr.txt').read()
#if not calculation.dos is None:
# data['dos'] = calculation.dos.plot.get_flot_script()
if not calculation.dos is None:
script, div = components(calculation.dos.bokeh_plot)
data['dos'] = script
data['dosdiv'] = div
## Get exact INCAR settings from INCAR file
data['incar'] = ''.join(calculation.read_incar())
return render_to_response('analysis/calculation.html',
data, RequestContext(request))
Handle missing INCAR files gracefully
|
from django.shortcuts import render_to_response
from django.template import RequestContext
import os
from qmpy.models import Calculation
from ..tools import get_globals
from bokeh.embed import components
def calculation_view(request, calculation_id):
calculation = Calculation.objects.get(pk=calculation_id)
data = get_globals()
data['calculation'] = calculation
data['stdout'] = ''
data['stderr'] = ''
if os.path.exists(os.path.join(calculation.path, 'stdout.txt')):
with open(os.path.join(calculation.path, 'stdout.txt')) as fr:
data['stdout'] = fr.read()
if os.path.exists(os.path.join(calculation.path, 'stderr.txt')):
with open(os.path.join(calculation.path, 'stderr.txt')) as fr:
data['stderr'] = fr.read()
try:
data['incar'] = ''.join(calculation.read_incar())
except FileNotFoundError:
data['incar'] = 'Could not read INCAR'
if not calculation.dos is None:
script, div = components(calculation.dos.bokeh_plot)
data['dos'] = script
data['dosdiv'] = div
return render_to_response('analysis/calculation.html',
data, RequestContext(request))
|
<commit_before>from django.shortcuts import render_to_response
from django.template import RequestContext
import os.path
from qmpy.models import Calculation
from ..tools import get_globals
from bokeh.embed import components
def calculation_view(request, calculation_id):
calculation = Calculation.objects.get(pk=calculation_id)
data = get_globals()
data['calculation'] = calculation
data['stdout'] = ''
data['stderr'] = ''
if os.path.exists(calculation.path+'/stdout.txt'):
data['stdout'] = open(calculation.path+'/stdout.txt').read()
if os.path.exists(calculation.path+'/stderr.txt'):
data['stderr'] = open(calculation.path+'/stderr.txt').read()
#if not calculation.dos is None:
# data['dos'] = calculation.dos.plot.get_flot_script()
if not calculation.dos is None:
script, div = components(calculation.dos.bokeh_plot)
data['dos'] = script
data['dosdiv'] = div
## Get exact INCAR settings from INCAR file
data['incar'] = ''.join(calculation.read_incar())
return render_to_response('analysis/calculation.html',
data, RequestContext(request))
<commit_msg>Handle missing INCAR files gracefully<commit_after>
|
from django.shortcuts import render_to_response
from django.template import RequestContext
import os
from qmpy.models import Calculation
from ..tools import get_globals
from bokeh.embed import components
def calculation_view(request, calculation_id):
calculation = Calculation.objects.get(pk=calculation_id)
data = get_globals()
data['calculation'] = calculation
data['stdout'] = ''
data['stderr'] = ''
if os.path.exists(os.path.join(calculation.path, 'stdout.txt')):
with open(os.path.join(calculation.path, 'stdout.txt')) as fr:
data['stdout'] = fr.read()
if os.path.exists(os.path.join(calculation.path, 'stderr.txt')):
with open(os.path.join(calculation.path, 'stderr.txt')) as fr:
data['stderr'] = fr.read()
try:
data['incar'] = ''.join(calculation.read_incar())
except FileNotFoundError:
data['incar'] = 'Could not read INCAR'
if not calculation.dos is None:
script, div = components(calculation.dos.bokeh_plot)
data['dos'] = script
data['dosdiv'] = div
return render_to_response('analysis/calculation.html',
data, RequestContext(request))
|
from django.shortcuts import render_to_response
from django.template import RequestContext
import os.path
from qmpy.models import Calculation
from ..tools import get_globals
from bokeh.embed import components
def calculation_view(request, calculation_id):
calculation = Calculation.objects.get(pk=calculation_id)
data = get_globals()
data['calculation'] = calculation
data['stdout'] = ''
data['stderr'] = ''
if os.path.exists(calculation.path+'/stdout.txt'):
data['stdout'] = open(calculation.path+'/stdout.txt').read()
if os.path.exists(calculation.path+'/stderr.txt'):
data['stderr'] = open(calculation.path+'/stderr.txt').read()
#if not calculation.dos is None:
# data['dos'] = calculation.dos.plot.get_flot_script()
if not calculation.dos is None:
script, div = components(calculation.dos.bokeh_plot)
data['dos'] = script
data['dosdiv'] = div
## Get exact INCAR settings from INCAR file
data['incar'] = ''.join(calculation.read_incar())
return render_to_response('analysis/calculation.html',
data, RequestContext(request))
Handle missing INCAR files gracefullyfrom django.shortcuts import render_to_response
from django.template import RequestContext
import os
from qmpy.models import Calculation
from ..tools import get_globals
from bokeh.embed import components
def calculation_view(request, calculation_id):
calculation = Calculation.objects.get(pk=calculation_id)
data = get_globals()
data['calculation'] = calculation
data['stdout'] = ''
data['stderr'] = ''
if os.path.exists(os.path.join(calculation.path, 'stdout.txt')):
with open(os.path.join(calculation.path, 'stdout.txt')) as fr:
data['stdout'] = fr.read()
if os.path.exists(os.path.join(calculation.path, 'stderr.txt')):
with open(os.path.join(calculation.path, 'stderr.txt')) as fr:
data['stderr'] = fr.read()
try:
data['incar'] = ''.join(calculation.read_incar())
except FileNotFoundError:
data['incar'] = 'Could not read INCAR'
if not calculation.dos is None:
script, div = components(calculation.dos.bokeh_plot)
data['dos'] = script
data['dosdiv'] = div
return render_to_response('analysis/calculation.html',
data, RequestContext(request))
|
<commit_before>from django.shortcuts import render_to_response
from django.template import RequestContext
import os.path
from qmpy.models import Calculation
from ..tools import get_globals
from bokeh.embed import components
def calculation_view(request, calculation_id):
calculation = Calculation.objects.get(pk=calculation_id)
data = get_globals()
data['calculation'] = calculation
data['stdout'] = ''
data['stderr'] = ''
if os.path.exists(calculation.path+'/stdout.txt'):
data['stdout'] = open(calculation.path+'/stdout.txt').read()
if os.path.exists(calculation.path+'/stderr.txt'):
data['stderr'] = open(calculation.path+'/stderr.txt').read()
#if not calculation.dos is None:
# data['dos'] = calculation.dos.plot.get_flot_script()
if not calculation.dos is None:
script, div = components(calculation.dos.bokeh_plot)
data['dos'] = script
data['dosdiv'] = div
## Get exact INCAR settings from INCAR file
data['incar'] = ''.join(calculation.read_incar())
return render_to_response('analysis/calculation.html',
data, RequestContext(request))
<commit_msg>Handle missing INCAR files gracefully<commit_after>from django.shortcuts import render_to_response
from django.template import RequestContext
import os
from qmpy.models import Calculation
from ..tools import get_globals
from bokeh.embed import components
def calculation_view(request, calculation_id):
calculation = Calculation.objects.get(pk=calculation_id)
data = get_globals()
data['calculation'] = calculation
data['stdout'] = ''
data['stderr'] = ''
if os.path.exists(os.path.join(calculation.path, 'stdout.txt')):
with open(os.path.join(calculation.path, 'stdout.txt')) as fr:
data['stdout'] = fr.read()
if os.path.exists(os.path.join(calculation.path, 'stderr.txt')):
with open(os.path.join(calculation.path, 'stderr.txt')) as fr:
data['stderr'] = fr.read()
try:
data['incar'] = ''.join(calculation.read_incar())
except FileNotFoundError:
data['incar'] = 'Could not read INCAR'
if not calculation.dos is None:
script, div = components(calculation.dos.bokeh_plot)
data['dos'] = script
data['dosdiv'] = div
return render_to_response('analysis/calculation.html',
data, RequestContext(request))
|
a64f8aaa2822ccd280f252e0937be5027d5ec012
|
censusreporter/config/prod/settings.py
|
censusreporter/config/prod/settings.py
|
from censusreporter.config.base.settings import *
import os
DEBUG = False
ROOT_URLCONF = 'censusreporter.config.prod.urls'
WSGI_APPLICATION = "censusreporter.config.prod.wsgi.application"
ALLOWED_HOSTS = [
'censusreporter.org',
'www.censusreporter.org',
'censusreporter.dokku.censusreporter.org',
]
CACHES = {
'default': {
'BACKEND': 'redis_cache.RedisCache',
'LOCATION': os.environ.get('REDIS_URL'),
}
}
|
from censusreporter.config.base.settings import *
import os
DEBUG = False
ROOT_URLCONF = 'censusreporter.config.prod.urls'
WSGI_APPLICATION = "censusreporter.config.prod.wsgi.application"
ALLOWED_HOSTS = [
'censusreporter.org',
'www.censusreporter.org',
'censusreporter.dokku.censusreporter.org',
]
CACHES = {
'default': {
'BACKEND': 'redis_cache.RedisCache',
'LOCATION': os.environ.get('REDIS_URL', ''),
}
}
|
Fix Dockerfile so it can cache requirements
|
Fix Dockerfile so it can cache requirements
|
Python
|
mit
|
censusreporter/censusreporter,censusreporter/censusreporter,censusreporter/censusreporter,censusreporter/censusreporter
|
from censusreporter.config.base.settings import *
import os
DEBUG = False
ROOT_URLCONF = 'censusreporter.config.prod.urls'
WSGI_APPLICATION = "censusreporter.config.prod.wsgi.application"
ALLOWED_HOSTS = [
'censusreporter.org',
'www.censusreporter.org',
'censusreporter.dokku.censusreporter.org',
]
CACHES = {
'default': {
'BACKEND': 'redis_cache.RedisCache',
'LOCATION': os.environ.get('REDIS_URL'),
}
}
Fix Dockerfile so it can cache requirements
|
from censusreporter.config.base.settings import *
import os
DEBUG = False
ROOT_URLCONF = 'censusreporter.config.prod.urls'
WSGI_APPLICATION = "censusreporter.config.prod.wsgi.application"
ALLOWED_HOSTS = [
'censusreporter.org',
'www.censusreporter.org',
'censusreporter.dokku.censusreporter.org',
]
CACHES = {
'default': {
'BACKEND': 'redis_cache.RedisCache',
'LOCATION': os.environ.get('REDIS_URL', ''),
}
}
|
<commit_before>from censusreporter.config.base.settings import *
import os
DEBUG = False
ROOT_URLCONF = 'censusreporter.config.prod.urls'
WSGI_APPLICATION = "censusreporter.config.prod.wsgi.application"
ALLOWED_HOSTS = [
'censusreporter.org',
'www.censusreporter.org',
'censusreporter.dokku.censusreporter.org',
]
CACHES = {
'default': {
'BACKEND': 'redis_cache.RedisCache',
'LOCATION': os.environ.get('REDIS_URL'),
}
}
<commit_msg>Fix Dockerfile so it can cache requirements<commit_after>
|
from censusreporter.config.base.settings import *
import os
DEBUG = False
ROOT_URLCONF = 'censusreporter.config.prod.urls'
WSGI_APPLICATION = "censusreporter.config.prod.wsgi.application"
ALLOWED_HOSTS = [
'censusreporter.org',
'www.censusreporter.org',
'censusreporter.dokku.censusreporter.org',
]
CACHES = {
'default': {
'BACKEND': 'redis_cache.RedisCache',
'LOCATION': os.environ.get('REDIS_URL', ''),
}
}
|
from censusreporter.config.base.settings import *
import os
DEBUG = False
ROOT_URLCONF = 'censusreporter.config.prod.urls'
WSGI_APPLICATION = "censusreporter.config.prod.wsgi.application"
ALLOWED_HOSTS = [
'censusreporter.org',
'www.censusreporter.org',
'censusreporter.dokku.censusreporter.org',
]
CACHES = {
'default': {
'BACKEND': 'redis_cache.RedisCache',
'LOCATION': os.environ.get('REDIS_URL'),
}
}
Fix Dockerfile so it can cache requirementsfrom censusreporter.config.base.settings import *
import os
DEBUG = False
ROOT_URLCONF = 'censusreporter.config.prod.urls'
WSGI_APPLICATION = "censusreporter.config.prod.wsgi.application"
ALLOWED_HOSTS = [
'censusreporter.org',
'www.censusreporter.org',
'censusreporter.dokku.censusreporter.org',
]
CACHES = {
'default': {
'BACKEND': 'redis_cache.RedisCache',
'LOCATION': os.environ.get('REDIS_URL', ''),
}
}
|
<commit_before>from censusreporter.config.base.settings import *
import os
DEBUG = False
ROOT_URLCONF = 'censusreporter.config.prod.urls'
WSGI_APPLICATION = "censusreporter.config.prod.wsgi.application"
ALLOWED_HOSTS = [
'censusreporter.org',
'www.censusreporter.org',
'censusreporter.dokku.censusreporter.org',
]
CACHES = {
'default': {
'BACKEND': 'redis_cache.RedisCache',
'LOCATION': os.environ.get('REDIS_URL'),
}
}
<commit_msg>Fix Dockerfile so it can cache requirements<commit_after>from censusreporter.config.base.settings import *
import os
DEBUG = False
ROOT_URLCONF = 'censusreporter.config.prod.urls'
WSGI_APPLICATION = "censusreporter.config.prod.wsgi.application"
ALLOWED_HOSTS = [
'censusreporter.org',
'www.censusreporter.org',
'censusreporter.dokku.censusreporter.org',
]
CACHES = {
'default': {
'BACKEND': 'redis_cache.RedisCache',
'LOCATION': os.environ.get('REDIS_URL', ''),
}
}
|
eb453010915f6700edd1baa0febcc634deec81dc
|
src/viewsapp/views.py
|
src/viewsapp/views.py
|
from decorator_plus import (
require_form_methods, require_safe_methods)
from django.shortcuts import (
get_object_or_404, redirect, render)
from .forms import ExampleForm
from .models import ExampleModel
@require_safe_methods
def model_detail(request, *args, **kwargs):
request_slug = kwargs.get('slug')
example_obj = get_object_or_404(
ExampleModel, slug=request_slug)
return render(
request,
'viewsapp/detail.html',
{'object': example_obj})
@require_form_methods
def model_create(request, *args, **kwargs):
if request.method == 'POST':
form = ExampleForm(request.POST)
if form.is_valid():
new_obj = form.save()
return redirect(new_obj)
else:
form = ExampleForm()
return render(
request,
'viewsapp/form.html',
{'form': form})
|
from decorator_plus import require_http_methods
from django.shortcuts import (
get_object_or_404, redirect, render)
from .forms import ExampleForm
from .models import ExampleModel
@require_http_methods(['GET'])
def model_detail(request, *args, **kwargs):
request_slug = kwargs.get('slug')
example_obj = get_object_or_404(
ExampleModel, slug=request_slug)
return render(
request,
'viewsapp/detail.html',
{'object': example_obj})
@require_http_methods(['GET', 'POST'])
def model_create(request, *args, **kwargs):
if request.method == 'POST':
form = ExampleForm(request.POST)
if form.is_valid():
new_obj = form.save()
return redirect(new_obj)
else:
form = ExampleForm()
return render(
request,
'viewsapp/form.html',
{'form': form})
|
Switch to using require_http_methods decorator.
|
Switch to using require_http_methods decorator.
|
Python
|
bsd-2-clause
|
jambonrose/djangocon2015-views,jambonrose/djangocon2015-views
|
from decorator_plus import (
require_form_methods, require_safe_methods)
from django.shortcuts import (
get_object_or_404, redirect, render)
from .forms import ExampleForm
from .models import ExampleModel
@require_safe_methods
def model_detail(request, *args, **kwargs):
request_slug = kwargs.get('slug')
example_obj = get_object_or_404(
ExampleModel, slug=request_slug)
return render(
request,
'viewsapp/detail.html',
{'object': example_obj})
@require_form_methods
def model_create(request, *args, **kwargs):
if request.method == 'POST':
form = ExampleForm(request.POST)
if form.is_valid():
new_obj = form.save()
return redirect(new_obj)
else:
form = ExampleForm()
return render(
request,
'viewsapp/form.html',
{'form': form})
Switch to using require_http_methods decorator.
|
from decorator_plus import require_http_methods
from django.shortcuts import (
get_object_or_404, redirect, render)
from .forms import ExampleForm
from .models import ExampleModel
@require_http_methods(['GET'])
def model_detail(request, *args, **kwargs):
request_slug = kwargs.get('slug')
example_obj = get_object_or_404(
ExampleModel, slug=request_slug)
return render(
request,
'viewsapp/detail.html',
{'object': example_obj})
@require_http_methods(['GET', 'POST'])
def model_create(request, *args, **kwargs):
if request.method == 'POST':
form = ExampleForm(request.POST)
if form.is_valid():
new_obj = form.save()
return redirect(new_obj)
else:
form = ExampleForm()
return render(
request,
'viewsapp/form.html',
{'form': form})
|
<commit_before>from decorator_plus import (
require_form_methods, require_safe_methods)
from django.shortcuts import (
get_object_or_404, redirect, render)
from .forms import ExampleForm
from .models import ExampleModel
@require_safe_methods
def model_detail(request, *args, **kwargs):
request_slug = kwargs.get('slug')
example_obj = get_object_or_404(
ExampleModel, slug=request_slug)
return render(
request,
'viewsapp/detail.html',
{'object': example_obj})
@require_form_methods
def model_create(request, *args, **kwargs):
if request.method == 'POST':
form = ExampleForm(request.POST)
if form.is_valid():
new_obj = form.save()
return redirect(new_obj)
else:
form = ExampleForm()
return render(
request,
'viewsapp/form.html',
{'form': form})
<commit_msg>Switch to using require_http_methods decorator.<commit_after>
|
from decorator_plus import require_http_methods
from django.shortcuts import (
get_object_or_404, redirect, render)
from .forms import ExampleForm
from .models import ExampleModel
@require_http_methods(['GET'])
def model_detail(request, *args, **kwargs):
request_slug = kwargs.get('slug')
example_obj = get_object_or_404(
ExampleModel, slug=request_slug)
return render(
request,
'viewsapp/detail.html',
{'object': example_obj})
@require_http_methods(['GET', 'POST'])
def model_create(request, *args, **kwargs):
if request.method == 'POST':
form = ExampleForm(request.POST)
if form.is_valid():
new_obj = form.save()
return redirect(new_obj)
else:
form = ExampleForm()
return render(
request,
'viewsapp/form.html',
{'form': form})
|
from decorator_plus import (
require_form_methods, require_safe_methods)
from django.shortcuts import (
get_object_or_404, redirect, render)
from .forms import ExampleForm
from .models import ExampleModel
@require_safe_methods
def model_detail(request, *args, **kwargs):
request_slug = kwargs.get('slug')
example_obj = get_object_or_404(
ExampleModel, slug=request_slug)
return render(
request,
'viewsapp/detail.html',
{'object': example_obj})
@require_form_methods
def model_create(request, *args, **kwargs):
if request.method == 'POST':
form = ExampleForm(request.POST)
if form.is_valid():
new_obj = form.save()
return redirect(new_obj)
else:
form = ExampleForm()
return render(
request,
'viewsapp/form.html',
{'form': form})
Switch to using require_http_methods decorator.from decorator_plus import require_http_methods
from django.shortcuts import (
get_object_or_404, redirect, render)
from .forms import ExampleForm
from .models import ExampleModel
@require_http_methods(['GET'])
def model_detail(request, *args, **kwargs):
request_slug = kwargs.get('slug')
example_obj = get_object_or_404(
ExampleModel, slug=request_slug)
return render(
request,
'viewsapp/detail.html',
{'object': example_obj})
@require_http_methods(['GET', 'POST'])
def model_create(request, *args, **kwargs):
if request.method == 'POST':
form = ExampleForm(request.POST)
if form.is_valid():
new_obj = form.save()
return redirect(new_obj)
else:
form = ExampleForm()
return render(
request,
'viewsapp/form.html',
{'form': form})
|
<commit_before>from decorator_plus import (
require_form_methods, require_safe_methods)
from django.shortcuts import (
get_object_or_404, redirect, render)
from .forms import ExampleForm
from .models import ExampleModel
@require_safe_methods
def model_detail(request, *args, **kwargs):
request_slug = kwargs.get('slug')
example_obj = get_object_or_404(
ExampleModel, slug=request_slug)
return render(
request,
'viewsapp/detail.html',
{'object': example_obj})
@require_form_methods
def model_create(request, *args, **kwargs):
if request.method == 'POST':
form = ExampleForm(request.POST)
if form.is_valid():
new_obj = form.save()
return redirect(new_obj)
else:
form = ExampleForm()
return render(
request,
'viewsapp/form.html',
{'form': form})
<commit_msg>Switch to using require_http_methods decorator.<commit_after>from decorator_plus import require_http_methods
from django.shortcuts import (
get_object_or_404, redirect, render)
from .forms import ExampleForm
from .models import ExampleModel
@require_http_methods(['GET'])
def model_detail(request, *args, **kwargs):
request_slug = kwargs.get('slug')
example_obj = get_object_or_404(
ExampleModel, slug=request_slug)
return render(
request,
'viewsapp/detail.html',
{'object': example_obj})
@require_http_methods(['GET', 'POST'])
def model_create(request, *args, **kwargs):
if request.method == 'POST':
form = ExampleForm(request.POST)
if form.is_valid():
new_obj = form.save()
return redirect(new_obj)
else:
form = ExampleForm()
return render(
request,
'viewsapp/form.html',
{'form': form})
|
94e44e18832311bc063830c0a6bfe23e04e40a8d
|
srsly/_msgpack_api.py
|
srsly/_msgpack_api.py
|
# coding: utf8
from __future__ import unicode_literals
import gc
from . import msgpack
from .util import force_path
def msgpack_dumps(data):
return msgpack.dumps(data, use_bin_type=True)
def msgpack_loads(data):
# msgpack-python docs suggest disabling gc before unpacking large messages
gc.disable()
msg = msgpack.loads(data, raw=False)
gc.enable()
return msg
def read_msgpack(location):
file_path = force_path(location)
with file_path.open("rb") as f:
gc.disable()
msg = msgpack.load(f, raw=False)
gc.enable()
return msg
def write_msgpack(location, data):
file_path = force_path(location, require_exists=False)
with file_path.open("wb") as f:
msgpack.dump(data, f, use_bin_type=True)
|
# coding: utf8
from __future__ import unicode_literals
import gc
from . import msgpack
from .util import force_path
def msgpack_dumps(data):
return msgpack.dumps(data, use_bin_type=True)
def msgpack_loads(data, use_list=True):
# msgpack-python docs suggest disabling gc before unpacking large messages
gc.disable()
msg = msgpack.loads(data, raw=False, use_list=use_list)
gc.enable()
return msg
def read_msgpack(location):
file_path = force_path(location)
with file_path.open("rb") as f:
gc.disable()
msg = msgpack.load(f, raw=False)
gc.enable()
return msg
def write_msgpack(location, data):
file_path = force_path(location, require_exists=False)
with file_path.open("wb") as f:
msgpack.dump(data, f, use_bin_type=True)
|
Allow passing use_list to msgpack_loads
|
Allow passing use_list to msgpack_loads
|
Python
|
mit
|
explosion/srsly,explosion/srsly,explosion/srsly,explosion/srsly
|
# coding: utf8
from __future__ import unicode_literals
import gc
from . import msgpack
from .util import force_path
def msgpack_dumps(data):
return msgpack.dumps(data, use_bin_type=True)
def msgpack_loads(data):
# msgpack-python docs suggest disabling gc before unpacking large messages
gc.disable()
msg = msgpack.loads(data, raw=False)
gc.enable()
return msg
def read_msgpack(location):
file_path = force_path(location)
with file_path.open("rb") as f:
gc.disable()
msg = msgpack.load(f, raw=False)
gc.enable()
return msg
def write_msgpack(location, data):
file_path = force_path(location, require_exists=False)
with file_path.open("wb") as f:
msgpack.dump(data, f, use_bin_type=True)
Allow passing use_list to msgpack_loads
|
# coding: utf8
from __future__ import unicode_literals
import gc
from . import msgpack
from .util import force_path
def msgpack_dumps(data):
return msgpack.dumps(data, use_bin_type=True)
def msgpack_loads(data, use_list=True):
# msgpack-python docs suggest disabling gc before unpacking large messages
gc.disable()
msg = msgpack.loads(data, raw=False, use_list=use_list)
gc.enable()
return msg
def read_msgpack(location):
file_path = force_path(location)
with file_path.open("rb") as f:
gc.disable()
msg = msgpack.load(f, raw=False)
gc.enable()
return msg
def write_msgpack(location, data):
file_path = force_path(location, require_exists=False)
with file_path.open("wb") as f:
msgpack.dump(data, f, use_bin_type=True)
|
<commit_before># coding: utf8
from __future__ import unicode_literals
import gc
from . import msgpack
from .util import force_path
def msgpack_dumps(data):
return msgpack.dumps(data, use_bin_type=True)
def msgpack_loads(data):
# msgpack-python docs suggest disabling gc before unpacking large messages
gc.disable()
msg = msgpack.loads(data, raw=False)
gc.enable()
return msg
def read_msgpack(location):
file_path = force_path(location)
with file_path.open("rb") as f:
gc.disable()
msg = msgpack.load(f, raw=False)
gc.enable()
return msg
def write_msgpack(location, data):
file_path = force_path(location, require_exists=False)
with file_path.open("wb") as f:
msgpack.dump(data, f, use_bin_type=True)
<commit_msg>Allow passing use_list to msgpack_loads<commit_after>
|
# coding: utf8
from __future__ import unicode_literals
import gc
from . import msgpack
from .util import force_path
def msgpack_dumps(data):
return msgpack.dumps(data, use_bin_type=True)
def msgpack_loads(data, use_list=True):
# msgpack-python docs suggest disabling gc before unpacking large messages
gc.disable()
msg = msgpack.loads(data, raw=False, use_list=use_list)
gc.enable()
return msg
def read_msgpack(location):
file_path = force_path(location)
with file_path.open("rb") as f:
gc.disable()
msg = msgpack.load(f, raw=False)
gc.enable()
return msg
def write_msgpack(location, data):
file_path = force_path(location, require_exists=False)
with file_path.open("wb") as f:
msgpack.dump(data, f, use_bin_type=True)
|
# coding: utf8
from __future__ import unicode_literals
import gc
from . import msgpack
from .util import force_path
def msgpack_dumps(data):
return msgpack.dumps(data, use_bin_type=True)
def msgpack_loads(data):
# msgpack-python docs suggest disabling gc before unpacking large messages
gc.disable()
msg = msgpack.loads(data, raw=False)
gc.enable()
return msg
def read_msgpack(location):
file_path = force_path(location)
with file_path.open("rb") as f:
gc.disable()
msg = msgpack.load(f, raw=False)
gc.enable()
return msg
def write_msgpack(location, data):
file_path = force_path(location, require_exists=False)
with file_path.open("wb") as f:
msgpack.dump(data, f, use_bin_type=True)
Allow passing use_list to msgpack_loads# coding: utf8
from __future__ import unicode_literals
import gc
from . import msgpack
from .util import force_path
def msgpack_dumps(data):
return msgpack.dumps(data, use_bin_type=True)
def msgpack_loads(data, use_list=True):
# msgpack-python docs suggest disabling gc before unpacking large messages
gc.disable()
msg = msgpack.loads(data, raw=False, use_list=use_list)
gc.enable()
return msg
def read_msgpack(location):
file_path = force_path(location)
with file_path.open("rb") as f:
gc.disable()
msg = msgpack.load(f, raw=False)
gc.enable()
return msg
def write_msgpack(location, data):
file_path = force_path(location, require_exists=False)
with file_path.open("wb") as f:
msgpack.dump(data, f, use_bin_type=True)
|
<commit_before># coding: utf8
from __future__ import unicode_literals
import gc
from . import msgpack
from .util import force_path
def msgpack_dumps(data):
return msgpack.dumps(data, use_bin_type=True)
def msgpack_loads(data):
# msgpack-python docs suggest disabling gc before unpacking large messages
gc.disable()
msg = msgpack.loads(data, raw=False)
gc.enable()
return msg
def read_msgpack(location):
file_path = force_path(location)
with file_path.open("rb") as f:
gc.disable()
msg = msgpack.load(f, raw=False)
gc.enable()
return msg
def write_msgpack(location, data):
file_path = force_path(location, require_exists=False)
with file_path.open("wb") as f:
msgpack.dump(data, f, use_bin_type=True)
<commit_msg>Allow passing use_list to msgpack_loads<commit_after># coding: utf8
from __future__ import unicode_literals
import gc
from . import msgpack
from .util import force_path
def msgpack_dumps(data):
return msgpack.dumps(data, use_bin_type=True)
def msgpack_loads(data, use_list=True):
# msgpack-python docs suggest disabling gc before unpacking large messages
gc.disable()
msg = msgpack.loads(data, raw=False, use_list=use_list)
gc.enable()
return msg
def read_msgpack(location):
file_path = force_path(location)
with file_path.open("rb") as f:
gc.disable()
msg = msgpack.load(f, raw=False)
gc.enable()
return msg
def write_msgpack(location, data):
file_path = force_path(location, require_exists=False)
with file_path.open("wb") as f:
msgpack.dump(data, f, use_bin_type=True)
|
e78b3f53150a5f1c170b860f8719e982cf1c6f9e
|
integration/main.py
|
integration/main.py
|
import os
import sys
from spec import Spec, skip
from invoke import run
class Integration(Spec):
def setup(self):
from tessera.application import db
# Ensure we have a clean db target.
self.dbpath = db.engine.url.database
msg = "You seem to have a db in the default location ({0}) - please (re)move it before running tests to avoid collisions."
assert not os.path.exists(self.dbpath), msg.format(self.dbpath)
def teardown(self):
# Teardown only runs if setup completed, so the below will not nuke
# pre-existing dbs that cause setup's check to fail.
if os.path.exists(self.dbpath):
os.remove(self.dbpath)
def is_importable(self):
import tessera
assert tessera.app
assert tessera.db
def can_initdb(self):
from tessera.application import db
from tessera.model.database import Dashboard
# Make sure we can create and look at the DB
db.create_all()
assert len(Dashboard.query.all()) == 0
|
import os
import sys
from spec import Spec, skip, eq_
from invoke import run
class Integration(Spec):
def setup(self):
from tessera.application import db
# Ensure we have a clean db target.
self.dbpath = db.engine.url.database
msg = "You seem to have a db in the default location ({0}) - please (re)move it before running tests to avoid collisions."
assert not os.path.exists(self.dbpath), msg.format(self.dbpath)
def teardown(self):
from tessera.application import db
# Teardown only runs if setup completed, so the below will not nuke
# pre-existing dbs that cause setup's check to fail.
if os.path.exists(self.dbpath):
os.remove(self.dbpath)
# Ensure no cached session crap
db.session.close_all()
def is_importable(self):
import tessera
assert tessera.app
assert tessera.db
def can_initdb(self):
from tessera.application import db
from tessera.model.database import Dashboard
# Make sure we can create and look at the DB
db.create_all()
eq_(len(Dashboard.query.all()), 0)
def can_import_fixtures(self):
from tessera.application import db
from tessera.importer.json import JsonImporter
from tessera.model.database import Dashboard
db.create_all()
path = os.path.abspath(os.path.join(
os.path.dirname(__file__), '..', 'demo', 'demo-gallery.json'
))
JsonImporter.import_file(path)
eq_(len(Dashboard.query.all()), 1)
|
Fix state bleed, add fixture import test
|
Fix state bleed, add fixture import test
|
Python
|
apache-2.0
|
section-io/tessera,urbanairship/tessera,tessera-metrics/tessera,jmptrader/tessera,section-io/tessera,urbanairship/tessera,aalpern/tessera,section-io/tessera,jmptrader/tessera,filippog/tessera,aalpern/tessera,aalpern/tessera,Slach/tessera,Slach/tessera,urbanairship/tessera,urbanairship/tessera,Slach/tessera,jmptrader/tessera,jmptrader/tessera,tessera-metrics/tessera,aalpern/tessera,section-io/tessera,urbanairship/tessera,tessera-metrics/tessera,jmptrader/tessera,tessera-metrics/tessera,Slach/tessera,filippog/tessera,filippog/tessera,aalpern/tessera,tessera-metrics/tessera
|
import os
import sys
from spec import Spec, skip
from invoke import run
class Integration(Spec):
def setup(self):
from tessera.application import db
# Ensure we have a clean db target.
self.dbpath = db.engine.url.database
msg = "You seem to have a db in the default location ({0}) - please (re)move it before running tests to avoid collisions."
assert not os.path.exists(self.dbpath), msg.format(self.dbpath)
def teardown(self):
# Teardown only runs if setup completed, so the below will not nuke
# pre-existing dbs that cause setup's check to fail.
if os.path.exists(self.dbpath):
os.remove(self.dbpath)
def is_importable(self):
import tessera
assert tessera.app
assert tessera.db
def can_initdb(self):
from tessera.application import db
from tessera.model.database import Dashboard
# Make sure we can create and look at the DB
db.create_all()
assert len(Dashboard.query.all()) == 0
Fix state bleed, add fixture import test
|
import os
import sys
from spec import Spec, skip, eq_
from invoke import run
class Integration(Spec):
def setup(self):
from tessera.application import db
# Ensure we have a clean db target.
self.dbpath = db.engine.url.database
msg = "You seem to have a db in the default location ({0}) - please (re)move it before running tests to avoid collisions."
assert not os.path.exists(self.dbpath), msg.format(self.dbpath)
def teardown(self):
from tessera.application import db
# Teardown only runs if setup completed, so the below will not nuke
# pre-existing dbs that cause setup's check to fail.
if os.path.exists(self.dbpath):
os.remove(self.dbpath)
# Ensure no cached session crap
db.session.close_all()
def is_importable(self):
import tessera
assert tessera.app
assert tessera.db
def can_initdb(self):
from tessera.application import db
from tessera.model.database import Dashboard
# Make sure we can create and look at the DB
db.create_all()
eq_(len(Dashboard.query.all()), 0)
def can_import_fixtures(self):
from tessera.application import db
from tessera.importer.json import JsonImporter
from tessera.model.database import Dashboard
db.create_all()
path = os.path.abspath(os.path.join(
os.path.dirname(__file__), '..', 'demo', 'demo-gallery.json'
))
JsonImporter.import_file(path)
eq_(len(Dashboard.query.all()), 1)
|
<commit_before>import os
import sys
from spec import Spec, skip
from invoke import run
class Integration(Spec):
def setup(self):
from tessera.application import db
# Ensure we have a clean db target.
self.dbpath = db.engine.url.database
msg = "You seem to have a db in the default location ({0}) - please (re)move it before running tests to avoid collisions."
assert not os.path.exists(self.dbpath), msg.format(self.dbpath)
def teardown(self):
# Teardown only runs if setup completed, so the below will not nuke
# pre-existing dbs that cause setup's check to fail.
if os.path.exists(self.dbpath):
os.remove(self.dbpath)
def is_importable(self):
import tessera
assert tessera.app
assert tessera.db
def can_initdb(self):
from tessera.application import db
from tessera.model.database import Dashboard
# Make sure we can create and look at the DB
db.create_all()
assert len(Dashboard.query.all()) == 0
<commit_msg>Fix state bleed, add fixture import test<commit_after>
|
import os
import sys
from spec import Spec, skip, eq_
from invoke import run
class Integration(Spec):
def setup(self):
from tessera.application import db
# Ensure we have a clean db target.
self.dbpath = db.engine.url.database
msg = "You seem to have a db in the default location ({0}) - please (re)move it before running tests to avoid collisions."
assert not os.path.exists(self.dbpath), msg.format(self.dbpath)
def teardown(self):
from tessera.application import db
# Teardown only runs if setup completed, so the below will not nuke
# pre-existing dbs that cause setup's check to fail.
if os.path.exists(self.dbpath):
os.remove(self.dbpath)
# Ensure no cached session crap
db.session.close_all()
def is_importable(self):
import tessera
assert tessera.app
assert tessera.db
def can_initdb(self):
from tessera.application import db
from tessera.model.database import Dashboard
# Make sure we can create and look at the DB
db.create_all()
eq_(len(Dashboard.query.all()), 0)
def can_import_fixtures(self):
from tessera.application import db
from tessera.importer.json import JsonImporter
from tessera.model.database import Dashboard
db.create_all()
path = os.path.abspath(os.path.join(
os.path.dirname(__file__), '..', 'demo', 'demo-gallery.json'
))
JsonImporter.import_file(path)
eq_(len(Dashboard.query.all()), 1)
|
import os
import sys
from spec import Spec, skip
from invoke import run
class Integration(Spec):
def setup(self):
from tessera.application import db
# Ensure we have a clean db target.
self.dbpath = db.engine.url.database
msg = "You seem to have a db in the default location ({0}) - please (re)move it before running tests to avoid collisions."
assert not os.path.exists(self.dbpath), msg.format(self.dbpath)
def teardown(self):
# Teardown only runs if setup completed, so the below will not nuke
# pre-existing dbs that cause setup's check to fail.
if os.path.exists(self.dbpath):
os.remove(self.dbpath)
def is_importable(self):
import tessera
assert tessera.app
assert tessera.db
def can_initdb(self):
from tessera.application import db
from tessera.model.database import Dashboard
# Make sure we can create and look at the DB
db.create_all()
assert len(Dashboard.query.all()) == 0
Fix state bleed, add fixture import testimport os
import sys
from spec import Spec, skip, eq_
from invoke import run
class Integration(Spec):
def setup(self):
from tessera.application import db
# Ensure we have a clean db target.
self.dbpath = db.engine.url.database
msg = "You seem to have a db in the default location ({0}) - please (re)move it before running tests to avoid collisions."
assert not os.path.exists(self.dbpath), msg.format(self.dbpath)
def teardown(self):
from tessera.application import db
# Teardown only runs if setup completed, so the below will not nuke
# pre-existing dbs that cause setup's check to fail.
if os.path.exists(self.dbpath):
os.remove(self.dbpath)
# Ensure no cached session crap
db.session.close_all()
def is_importable(self):
import tessera
assert tessera.app
assert tessera.db
def can_initdb(self):
from tessera.application import db
from tessera.model.database import Dashboard
# Make sure we can create and look at the DB
db.create_all()
eq_(len(Dashboard.query.all()), 0)
def can_import_fixtures(self):
from tessera.application import db
from tessera.importer.json import JsonImporter
from tessera.model.database import Dashboard
db.create_all()
path = os.path.abspath(os.path.join(
os.path.dirname(__file__), '..', 'demo', 'demo-gallery.json'
))
JsonImporter.import_file(path)
eq_(len(Dashboard.query.all()), 1)
|
<commit_before>import os
import sys
from spec import Spec, skip
from invoke import run
class Integration(Spec):
def setup(self):
from tessera.application import db
# Ensure we have a clean db target.
self.dbpath = db.engine.url.database
msg = "You seem to have a db in the default location ({0}) - please (re)move it before running tests to avoid collisions."
assert not os.path.exists(self.dbpath), msg.format(self.dbpath)
def teardown(self):
# Teardown only runs if setup completed, so the below will not nuke
# pre-existing dbs that cause setup's check to fail.
if os.path.exists(self.dbpath):
os.remove(self.dbpath)
def is_importable(self):
import tessera
assert tessera.app
assert tessera.db
def can_initdb(self):
from tessera.application import db
from tessera.model.database import Dashboard
# Make sure we can create and look at the DB
db.create_all()
assert len(Dashboard.query.all()) == 0
<commit_msg>Fix state bleed, add fixture import test<commit_after>import os
import sys
from spec import Spec, skip, eq_
from invoke import run
class Integration(Spec):
def setup(self):
from tessera.application import db
# Ensure we have a clean db target.
self.dbpath = db.engine.url.database
msg = "You seem to have a db in the default location ({0}) - please (re)move it before running tests to avoid collisions."
assert not os.path.exists(self.dbpath), msg.format(self.dbpath)
def teardown(self):
from tessera.application import db
# Teardown only runs if setup completed, so the below will not nuke
# pre-existing dbs that cause setup's check to fail.
if os.path.exists(self.dbpath):
os.remove(self.dbpath)
# Ensure no cached session crap
db.session.close_all()
def is_importable(self):
import tessera
assert tessera.app
assert tessera.db
def can_initdb(self):
from tessera.application import db
from tessera.model.database import Dashboard
# Make sure we can create and look at the DB
db.create_all()
eq_(len(Dashboard.query.all()), 0)
def can_import_fixtures(self):
from tessera.application import db
from tessera.importer.json import JsonImporter
from tessera.model.database import Dashboard
db.create_all()
path = os.path.abspath(os.path.join(
os.path.dirname(__file__), '..', 'demo', 'demo-gallery.json'
))
JsonImporter.import_file(path)
eq_(len(Dashboard.query.all()), 1)
|
0ad29aa9945448236500b221fc489c1627e6693b
|
api.py
|
api.py
|
from collections import namedtuple
import requests
QUERY_TEMPLATE = '?token={}&domain={}'
BASE_URL = 'https://pddimp.yandex.ru/api2/'
Connection = namedtuple('Connection', ['auth', 'domain'])
def list_emails(connection):
url = '{}admin/email/list'.format(BASE_URL) + QUERY_TEMPLATE.format(*connection)
ret = requests.get(url)
return ret.json()
|
import json
import random
import string
from collections import namedtuple
import requests
QUERY_TEMPLATE = '?token={}&domain={}'
BASE_URL = 'https://pddimp.yandex.ru/api2/'
Connection = namedtuple('Connection', ['auth', 'domain'])
class YandexException(Exception):
pass
rndstr = lambda: ''.join(random.sample(string.ascii_letters + string.hexdigits, 17))
def _check_error(ret_json):
if ret_json.get('success') == 'error':
raise YandexException(ret_json['error'])
def list_emails(connection):
url = '{}admin/email/list'.format(BASE_URL) + QUERY_TEMPLATE.format(*connection)
ret = requests.get(url).json()
_check_error(ret)
return ret
def create_email(connection, email, password=None):
if not password:
password = rndstr()
url = '{}admin/email/add'.format(BASE_URL) + QUERY_TEMPLATE.format(*connection)
url += '&login={}&password={}'.format(email, password)
ret = requests.post(url).json()
_check_error(ret)
return ret, password
def delete_email(connection, email=None, uid=None):
if not email and uid:
raise YandexException('Must specify email or uid')
url = '{}admin/email/del'.format(BASE_URL) + QUERY_TEMPLATE.format(*connection)
if email:
url += '&login={}'.format(email)
else:
url += '&uid={}'.format(uid)
ret = requests.post(url).json()
_check_error(ret)
return ret
|
Add create email and delete email methods
|
Add create email and delete email methods
|
Python
|
mit
|
chrisseto/dinosaurs.sexy,chrisseto/dinosaurs.sexy
|
from collections import namedtuple
import requests
QUERY_TEMPLATE = '?token={}&domain={}'
BASE_URL = 'https://pddimp.yandex.ru/api2/'
Connection = namedtuple('Connection', ['auth', 'domain'])
def list_emails(connection):
url = '{}admin/email/list'.format(BASE_URL) + QUERY_TEMPLATE.format(*connection)
ret = requests.get(url)
return ret.json()
Add create email and delete email methods
|
import json
import random
import string
from collections import namedtuple
import requests
QUERY_TEMPLATE = '?token={}&domain={}'
BASE_URL = 'https://pddimp.yandex.ru/api2/'
Connection = namedtuple('Connection', ['auth', 'domain'])
class YandexException(Exception):
pass
rndstr = lambda: ''.join(random.sample(string.ascii_letters + string.hexdigits, 17))
def _check_error(ret_json):
if ret_json.get('success') == 'error':
raise YandexException(ret_json['error'])
def list_emails(connection):
url = '{}admin/email/list'.format(BASE_URL) + QUERY_TEMPLATE.format(*connection)
ret = requests.get(url).json()
_check_error(ret)
return ret
def create_email(connection, email, password=None):
if not password:
password = rndstr()
url = '{}admin/email/add'.format(BASE_URL) + QUERY_TEMPLATE.format(*connection)
url += '&login={}&password={}'.format(email, password)
ret = requests.post(url).json()
_check_error(ret)
return ret, password
def delete_email(connection, email=None, uid=None):
if not email and uid:
raise YandexException('Must specify email or uid')
url = '{}admin/email/del'.format(BASE_URL) + QUERY_TEMPLATE.format(*connection)
if email:
url += '&login={}'.format(email)
else:
url += '&uid={}'.format(uid)
ret = requests.post(url).json()
_check_error(ret)
return ret
|
<commit_before>from collections import namedtuple
import requests
QUERY_TEMPLATE = '?token={}&domain={}'
BASE_URL = 'https://pddimp.yandex.ru/api2/'
Connection = namedtuple('Connection', ['auth', 'domain'])
def list_emails(connection):
url = '{}admin/email/list'.format(BASE_URL) + QUERY_TEMPLATE.format(*connection)
ret = requests.get(url)
return ret.json()
<commit_msg>Add create email and delete email methods<commit_after>
|
import json
import random
import string
from collections import namedtuple
import requests
QUERY_TEMPLATE = '?token={}&domain={}'
BASE_URL = 'https://pddimp.yandex.ru/api2/'
Connection = namedtuple('Connection', ['auth', 'domain'])
class YandexException(Exception):
pass
rndstr = lambda: ''.join(random.sample(string.ascii_letters + string.hexdigits, 17))
def _check_error(ret_json):
if ret_json.get('success') == 'error':
raise YandexException(ret_json['error'])
def list_emails(connection):
url = '{}admin/email/list'.format(BASE_URL) + QUERY_TEMPLATE.format(*connection)
ret = requests.get(url).json()
_check_error(ret)
return ret
def create_email(connection, email, password=None):
if not password:
password = rndstr()
url = '{}admin/email/add'.format(BASE_URL) + QUERY_TEMPLATE.format(*connection)
url += '&login={}&password={}'.format(email, password)
ret = requests.post(url).json()
_check_error(ret)
return ret, password
def delete_email(connection, email=None, uid=None):
if not email and uid:
raise YandexException('Must specify email or uid')
url = '{}admin/email/del'.format(BASE_URL) + QUERY_TEMPLATE.format(*connection)
if email:
url += '&login={}'.format(email)
else:
url += '&uid={}'.format(uid)
ret = requests.post(url).json()
_check_error(ret)
return ret
|
from collections import namedtuple
import requests
QUERY_TEMPLATE = '?token={}&domain={}'
BASE_URL = 'https://pddimp.yandex.ru/api2/'
Connection = namedtuple('Connection', ['auth', 'domain'])
def list_emails(connection):
url = '{}admin/email/list'.format(BASE_URL) + QUERY_TEMPLATE.format(*connection)
ret = requests.get(url)
return ret.json()
Add create email and delete email methodsimport json
import random
import string
from collections import namedtuple
import requests
QUERY_TEMPLATE = '?token={}&domain={}'
BASE_URL = 'https://pddimp.yandex.ru/api2/'
Connection = namedtuple('Connection', ['auth', 'domain'])
class YandexException(Exception):
pass
rndstr = lambda: ''.join(random.sample(string.ascii_letters + string.hexdigits, 17))
def _check_error(ret_json):
if ret_json.get('success') == 'error':
raise YandexException(ret_json['error'])
def list_emails(connection):
url = '{}admin/email/list'.format(BASE_URL) + QUERY_TEMPLATE.format(*connection)
ret = requests.get(url).json()
_check_error(ret)
return ret
def create_email(connection, email, password=None):
if not password:
password = rndstr()
url = '{}admin/email/add'.format(BASE_URL) + QUERY_TEMPLATE.format(*connection)
url += '&login={}&password={}'.format(email, password)
ret = requests.post(url).json()
_check_error(ret)
return ret, password
def delete_email(connection, email=None, uid=None):
if not email and uid:
raise YandexException('Must specify email or uid')
url = '{}admin/email/del'.format(BASE_URL) + QUERY_TEMPLATE.format(*connection)
if email:
url += '&login={}'.format(email)
else:
url += '&uid={}'.format(uid)
ret = requests.post(url).json()
_check_error(ret)
return ret
|
<commit_before>from collections import namedtuple
import requests
QUERY_TEMPLATE = '?token={}&domain={}'
BASE_URL = 'https://pddimp.yandex.ru/api2/'
Connection = namedtuple('Connection', ['auth', 'domain'])
def list_emails(connection):
url = '{}admin/email/list'.format(BASE_URL) + QUERY_TEMPLATE.format(*connection)
ret = requests.get(url)
return ret.json()
<commit_msg>Add create email and delete email methods<commit_after>import json
import random
import string
from collections import namedtuple
import requests
QUERY_TEMPLATE = '?token={}&domain={}'
BASE_URL = 'https://pddimp.yandex.ru/api2/'
Connection = namedtuple('Connection', ['auth', 'domain'])
class YandexException(Exception):
pass
rndstr = lambda: ''.join(random.sample(string.ascii_letters + string.hexdigits, 17))
def _check_error(ret_json):
if ret_json.get('success') == 'error':
raise YandexException(ret_json['error'])
def list_emails(connection):
url = '{}admin/email/list'.format(BASE_URL) + QUERY_TEMPLATE.format(*connection)
ret = requests.get(url).json()
_check_error(ret)
return ret
def create_email(connection, email, password=None):
if not password:
password = rndstr()
url = '{}admin/email/add'.format(BASE_URL) + QUERY_TEMPLATE.format(*connection)
url += '&login={}&password={}'.format(email, password)
ret = requests.post(url).json()
_check_error(ret)
return ret, password
def delete_email(connection, email=None, uid=None):
if not email and uid:
raise YandexException('Must specify email or uid')
url = '{}admin/email/del'.format(BASE_URL) + QUERY_TEMPLATE.format(*connection)
if email:
url += '&login={}'.format(email)
else:
url += '&uid={}'.format(uid)
ret = requests.post(url).json()
_check_error(ret)
return ret
|
df325e7caee163f754c1b85cf71dab7810700933
|
src/waldur_keycloak_rancher/tasks.py
|
src/waldur_keycloak_rancher/tasks.py
|
import logging
from celery import shared_task
from django.conf import settings
from waldur_keycloak.models import ProjectGroup
from waldur_rancher.models import Cluster, ClusterRole
logger = logging.getLogger(__name__)
@shared_task(name='waldur_keycloak_rancher.sync_groups')
def sync_groups():
if not settings.WALDUR_KEYCLOAK['ENABLED']:
logger.debug('Skipping Keycloak synchronization because plugin is disabled.')
return
for project_group in ProjectGroup.objects.all():
project = project_group.project
for cluster in Cluster.objects.filter(project=project):
backend = cluster.get_backend()
try:
backend.get_or_create_cluster_group_role(
f'keycloakoidc_group://{project.name}',
cluster.backend_id,
ClusterRole.CLUSTER_MEMBER,
)
except Exception:
logger.warning(
'Unable to create cluster group for project %s and cluster %s',
project,
cluster,
)
|
import logging
from celery import shared_task
from django.conf import settings
from waldur_keycloak.models import ProjectGroup
from waldur_rancher.enums import ClusterRoles
from waldur_rancher.models import Cluster
logger = logging.getLogger(__name__)
@shared_task(name='waldur_keycloak_rancher.sync_groups')
def sync_groups():
if not settings.WALDUR_KEYCLOAK['ENABLED']:
logger.debug('Skipping Keycloak synchronization because plugin is disabled.')
return
for project_group in ProjectGroup.objects.all():
project = project_group.project
for cluster in Cluster.objects.filter(project=project):
backend = cluster.get_backend()
try:
backend.get_or_create_cluster_group_role(
f'keycloakoidc_group://{project.name}',
cluster.backend_id,
ClusterRoles.cluster_member,
)
except Exception:
logger.warning(
'Unable to create cluster group for project %s and cluster %s',
project,
cluster,
)
|
Fix cluster group membership synchronization.
|
Fix cluster group membership synchronization.
|
Python
|
mit
|
opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/waldur-mastermind
|
import logging
from celery import shared_task
from django.conf import settings
from waldur_keycloak.models import ProjectGroup
from waldur_rancher.models import Cluster, ClusterRole
logger = logging.getLogger(__name__)
@shared_task(name='waldur_keycloak_rancher.sync_groups')
def sync_groups():
if not settings.WALDUR_KEYCLOAK['ENABLED']:
logger.debug('Skipping Keycloak synchronization because plugin is disabled.')
return
for project_group in ProjectGroup.objects.all():
project = project_group.project
for cluster in Cluster.objects.filter(project=project):
backend = cluster.get_backend()
try:
backend.get_or_create_cluster_group_role(
f'keycloakoidc_group://{project.name}',
cluster.backend_id,
ClusterRole.CLUSTER_MEMBER,
)
except Exception:
logger.warning(
'Unable to create cluster group for project %s and cluster %s',
project,
cluster,
)
Fix cluster group membership synchronization.
|
import logging
from celery import shared_task
from django.conf import settings
from waldur_keycloak.models import ProjectGroup
from waldur_rancher.enums import ClusterRoles
from waldur_rancher.models import Cluster
logger = logging.getLogger(__name__)
@shared_task(name='waldur_keycloak_rancher.sync_groups')
def sync_groups():
if not settings.WALDUR_KEYCLOAK['ENABLED']:
logger.debug('Skipping Keycloak synchronization because plugin is disabled.')
return
for project_group in ProjectGroup.objects.all():
project = project_group.project
for cluster in Cluster.objects.filter(project=project):
backend = cluster.get_backend()
try:
backend.get_or_create_cluster_group_role(
f'keycloakoidc_group://{project.name}',
cluster.backend_id,
ClusterRoles.cluster_member,
)
except Exception:
logger.warning(
'Unable to create cluster group for project %s and cluster %s',
project,
cluster,
)
|
<commit_before>import logging
from celery import shared_task
from django.conf import settings
from waldur_keycloak.models import ProjectGroup
from waldur_rancher.models import Cluster, ClusterRole
logger = logging.getLogger(__name__)
@shared_task(name='waldur_keycloak_rancher.sync_groups')
def sync_groups():
if not settings.WALDUR_KEYCLOAK['ENABLED']:
logger.debug('Skipping Keycloak synchronization because plugin is disabled.')
return
for project_group in ProjectGroup.objects.all():
project = project_group.project
for cluster in Cluster.objects.filter(project=project):
backend = cluster.get_backend()
try:
backend.get_or_create_cluster_group_role(
f'keycloakoidc_group://{project.name}',
cluster.backend_id,
ClusterRole.CLUSTER_MEMBER,
)
except Exception:
logger.warning(
'Unable to create cluster group for project %s and cluster %s',
project,
cluster,
)
<commit_msg>Fix cluster group membership synchronization.<commit_after>
|
import logging
from celery import shared_task
from django.conf import settings
from waldur_keycloak.models import ProjectGroup
from waldur_rancher.enums import ClusterRoles
from waldur_rancher.models import Cluster
logger = logging.getLogger(__name__)
@shared_task(name='waldur_keycloak_rancher.sync_groups')
def sync_groups():
if not settings.WALDUR_KEYCLOAK['ENABLED']:
logger.debug('Skipping Keycloak synchronization because plugin is disabled.')
return
for project_group in ProjectGroup.objects.all():
project = project_group.project
for cluster in Cluster.objects.filter(project=project):
backend = cluster.get_backend()
try:
backend.get_or_create_cluster_group_role(
f'keycloakoidc_group://{project.name}',
cluster.backend_id,
ClusterRoles.cluster_member,
)
except Exception:
logger.warning(
'Unable to create cluster group for project %s and cluster %s',
project,
cluster,
)
|
import logging
from celery import shared_task
from django.conf import settings
from waldur_keycloak.models import ProjectGroup
from waldur_rancher.models import Cluster, ClusterRole
logger = logging.getLogger(__name__)
@shared_task(name='waldur_keycloak_rancher.sync_groups')
def sync_groups():
if not settings.WALDUR_KEYCLOAK['ENABLED']:
logger.debug('Skipping Keycloak synchronization because plugin is disabled.')
return
for project_group in ProjectGroup.objects.all():
project = project_group.project
for cluster in Cluster.objects.filter(project=project):
backend = cluster.get_backend()
try:
backend.get_or_create_cluster_group_role(
f'keycloakoidc_group://{project.name}',
cluster.backend_id,
ClusterRole.CLUSTER_MEMBER,
)
except Exception:
logger.warning(
'Unable to create cluster group for project %s and cluster %s',
project,
cluster,
)
Fix cluster group membership synchronization.import logging
from celery import shared_task
from django.conf import settings
from waldur_keycloak.models import ProjectGroup
from waldur_rancher.enums import ClusterRoles
from waldur_rancher.models import Cluster
logger = logging.getLogger(__name__)
@shared_task(name='waldur_keycloak_rancher.sync_groups')
def sync_groups():
if not settings.WALDUR_KEYCLOAK['ENABLED']:
logger.debug('Skipping Keycloak synchronization because plugin is disabled.')
return
for project_group in ProjectGroup.objects.all():
project = project_group.project
for cluster in Cluster.objects.filter(project=project):
backend = cluster.get_backend()
try:
backend.get_or_create_cluster_group_role(
f'keycloakoidc_group://{project.name}',
cluster.backend_id,
ClusterRoles.cluster_member,
)
except Exception:
logger.warning(
'Unable to create cluster group for project %s and cluster %s',
project,
cluster,
)
|
<commit_before>import logging
from celery import shared_task
from django.conf import settings
from waldur_keycloak.models import ProjectGroup
from waldur_rancher.models import Cluster, ClusterRole
logger = logging.getLogger(__name__)
@shared_task(name='waldur_keycloak_rancher.sync_groups')
def sync_groups():
if not settings.WALDUR_KEYCLOAK['ENABLED']:
logger.debug('Skipping Keycloak synchronization because plugin is disabled.')
return
for project_group in ProjectGroup.objects.all():
project = project_group.project
for cluster in Cluster.objects.filter(project=project):
backend = cluster.get_backend()
try:
backend.get_or_create_cluster_group_role(
f'keycloakoidc_group://{project.name}',
cluster.backend_id,
ClusterRole.CLUSTER_MEMBER,
)
except Exception:
logger.warning(
'Unable to create cluster group for project %s and cluster %s',
project,
cluster,
)
<commit_msg>Fix cluster group membership synchronization.<commit_after>import logging
from celery import shared_task
from django.conf import settings
from waldur_keycloak.models import ProjectGroup
from waldur_rancher.enums import ClusterRoles
from waldur_rancher.models import Cluster
logger = logging.getLogger(__name__)
@shared_task(name='waldur_keycloak_rancher.sync_groups')
def sync_groups():
if not settings.WALDUR_KEYCLOAK['ENABLED']:
logger.debug('Skipping Keycloak synchronization because plugin is disabled.')
return
for project_group in ProjectGroup.objects.all():
project = project_group.project
for cluster in Cluster.objects.filter(project=project):
backend = cluster.get_backend()
try:
backend.get_or_create_cluster_group_role(
f'keycloakoidc_group://{project.name}',
cluster.backend_id,
ClusterRoles.cluster_member,
)
except Exception:
logger.warning(
'Unable to create cluster group for project %s and cluster %s',
project,
cluster,
)
|
cbddfe308f4e0da728974777f10b245a966520b6
|
summarize/__init__.py
|
summarize/__init__.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from itertools import combinations
from operator import itemgetter
from distance import jaccard
from networkx import Graph, pagerank
from nltk import tokenize
from .utils import get_stopwords, get_words
def summarize(text, sentence_count=5, language='english'):
stopwords = get_stopwords(language)
sentence_list = tokenize.sent_tokenize(text, language)
wordsets = [get_words(sentence, stopwords) for sentence in sentence_list]
graph = Graph()
pairs = combinations(enumerate(filter(None, wordsets)), 2)
for (index_a, words_a), (index_b, words_b) in pairs:
similarity = 1 - jaccard(words_a, words_b)
if similarity > 0:
graph.add_edge(index_a, index_b, weight=similarity)
ranked_sentence_indexes = pagerank(graph).items()
sentences_by_rank = sorted(
ranked_sentence_indexes, key=itemgetter(1), reverse=True)
best_sentences = map(itemgetter(0), sentences_by_rank[:sentence_count])
best_sentences_in_order = sorted(best_sentences)
return ' '.join(sentence_list[index] for index in best_sentences_in_order)
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from itertools import combinations
from operator import itemgetter
from distance import jaccard
from networkx import Graph, pagerank
from nltk import tokenize
from .utils import get_stopwords, get_words
def summarize(text, sentence_count=5, language='english'):
stopwords = get_stopwords(language)
sentence_list = tokenize.sent_tokenize(text, language)
wordsets = [get_words(sentence, stopwords) for sentence in sentence_list]
graph = Graph()
pairs = combinations(enumerate(wordsets), 2)
for (index_a, words_a), (index_b, words_b) in pairs:
if words_a and words_b:
similarity = 1 - jaccard(words_a, words_b)
if similarity > 0:
graph.add_edge(index_a, index_b, weight=similarity)
ranked_sentence_indexes = pagerank(graph).items()
sentences_by_rank = sorted(
ranked_sentence_indexes, key=itemgetter(1), reverse=True)
best_sentences = map(itemgetter(0), sentences_by_rank[:sentence_count])
best_sentences_in_order = sorted(best_sentences)
return ' '.join(sentence_list[index] for index in best_sentences_in_order)
|
Fix sentence index shifting with empty sentences
|
Fix sentence index shifting with empty sentences
|
Python
|
mit
|
despawnerer/summarize
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from itertools import combinations
from operator import itemgetter
from distance import jaccard
from networkx import Graph, pagerank
from nltk import tokenize
from .utils import get_stopwords, get_words
def summarize(text, sentence_count=5, language='english'):
stopwords = get_stopwords(language)
sentence_list = tokenize.sent_tokenize(text, language)
wordsets = [get_words(sentence, stopwords) for sentence in sentence_list]
graph = Graph()
pairs = combinations(enumerate(filter(None, wordsets)), 2)
for (index_a, words_a), (index_b, words_b) in pairs:
similarity = 1 - jaccard(words_a, words_b)
if similarity > 0:
graph.add_edge(index_a, index_b, weight=similarity)
ranked_sentence_indexes = pagerank(graph).items()
sentences_by_rank = sorted(
ranked_sentence_indexes, key=itemgetter(1), reverse=True)
best_sentences = map(itemgetter(0), sentences_by_rank[:sentence_count])
best_sentences_in_order = sorted(best_sentences)
return ' '.join(sentence_list[index] for index in best_sentences_in_order)
Fix sentence index shifting with empty sentences
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from itertools import combinations
from operator import itemgetter
from distance import jaccard
from networkx import Graph, pagerank
from nltk import tokenize
from .utils import get_stopwords, get_words
def summarize(text, sentence_count=5, language='english'):
stopwords = get_stopwords(language)
sentence_list = tokenize.sent_tokenize(text, language)
wordsets = [get_words(sentence, stopwords) for sentence in sentence_list]
graph = Graph()
pairs = combinations(enumerate(wordsets), 2)
for (index_a, words_a), (index_b, words_b) in pairs:
if words_a and words_b:
similarity = 1 - jaccard(words_a, words_b)
if similarity > 0:
graph.add_edge(index_a, index_b, weight=similarity)
ranked_sentence_indexes = pagerank(graph).items()
sentences_by_rank = sorted(
ranked_sentence_indexes, key=itemgetter(1), reverse=True)
best_sentences = map(itemgetter(0), sentences_by_rank[:sentence_count])
best_sentences_in_order = sorted(best_sentences)
return ' '.join(sentence_list[index] for index in best_sentences_in_order)
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from itertools import combinations
from operator import itemgetter
from distance import jaccard
from networkx import Graph, pagerank
from nltk import tokenize
from .utils import get_stopwords, get_words
def summarize(text, sentence_count=5, language='english'):
stopwords = get_stopwords(language)
sentence_list = tokenize.sent_tokenize(text, language)
wordsets = [get_words(sentence, stopwords) for sentence in sentence_list]
graph = Graph()
pairs = combinations(enumerate(filter(None, wordsets)), 2)
for (index_a, words_a), (index_b, words_b) in pairs:
similarity = 1 - jaccard(words_a, words_b)
if similarity > 0:
graph.add_edge(index_a, index_b, weight=similarity)
ranked_sentence_indexes = pagerank(graph).items()
sentences_by_rank = sorted(
ranked_sentence_indexes, key=itemgetter(1), reverse=True)
best_sentences = map(itemgetter(0), sentences_by_rank[:sentence_count])
best_sentences_in_order = sorted(best_sentences)
return ' '.join(sentence_list[index] for index in best_sentences_in_order)
<commit_msg>Fix sentence index shifting with empty sentences<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from itertools import combinations
from operator import itemgetter
from distance import jaccard
from networkx import Graph, pagerank
from nltk import tokenize
from .utils import get_stopwords, get_words
def summarize(text, sentence_count=5, language='english'):
stopwords = get_stopwords(language)
sentence_list = tokenize.sent_tokenize(text, language)
wordsets = [get_words(sentence, stopwords) for sentence in sentence_list]
graph = Graph()
pairs = combinations(enumerate(wordsets), 2)
for (index_a, words_a), (index_b, words_b) in pairs:
if words_a and words_b:
similarity = 1 - jaccard(words_a, words_b)
if similarity > 0:
graph.add_edge(index_a, index_b, weight=similarity)
ranked_sentence_indexes = pagerank(graph).items()
sentences_by_rank = sorted(
ranked_sentence_indexes, key=itemgetter(1), reverse=True)
best_sentences = map(itemgetter(0), sentences_by_rank[:sentence_count])
best_sentences_in_order = sorted(best_sentences)
return ' '.join(sentence_list[index] for index in best_sentences_in_order)
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from itertools import combinations
from operator import itemgetter
from distance import jaccard
from networkx import Graph, pagerank
from nltk import tokenize
from .utils import get_stopwords, get_words
def summarize(text, sentence_count=5, language='english'):
stopwords = get_stopwords(language)
sentence_list = tokenize.sent_tokenize(text, language)
wordsets = [get_words(sentence, stopwords) for sentence in sentence_list]
graph = Graph()
pairs = combinations(enumerate(filter(None, wordsets)), 2)
for (index_a, words_a), (index_b, words_b) in pairs:
similarity = 1 - jaccard(words_a, words_b)
if similarity > 0:
graph.add_edge(index_a, index_b, weight=similarity)
ranked_sentence_indexes = pagerank(graph).items()
sentences_by_rank = sorted(
ranked_sentence_indexes, key=itemgetter(1), reverse=True)
best_sentences = map(itemgetter(0), sentences_by_rank[:sentence_count])
best_sentences_in_order = sorted(best_sentences)
return ' '.join(sentence_list[index] for index in best_sentences_in_order)
Fix sentence index shifting with empty sentences# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from itertools import combinations
from operator import itemgetter
from distance import jaccard
from networkx import Graph, pagerank
from nltk import tokenize
from .utils import get_stopwords, get_words
def summarize(text, sentence_count=5, language='english'):
stopwords = get_stopwords(language)
sentence_list = tokenize.sent_tokenize(text, language)
wordsets = [get_words(sentence, stopwords) for sentence in sentence_list]
graph = Graph()
pairs = combinations(enumerate(wordsets), 2)
for (index_a, words_a), (index_b, words_b) in pairs:
if words_a and words_b:
similarity = 1 - jaccard(words_a, words_b)
if similarity > 0:
graph.add_edge(index_a, index_b, weight=similarity)
ranked_sentence_indexes = pagerank(graph).items()
sentences_by_rank = sorted(
ranked_sentence_indexes, key=itemgetter(1), reverse=True)
best_sentences = map(itemgetter(0), sentences_by_rank[:sentence_count])
best_sentences_in_order = sorted(best_sentences)
return ' '.join(sentence_list[index] for index in best_sentences_in_order)
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from itertools import combinations
from operator import itemgetter
from distance import jaccard
from networkx import Graph, pagerank
from nltk import tokenize
from .utils import get_stopwords, get_words
def summarize(text, sentence_count=5, language='english'):
stopwords = get_stopwords(language)
sentence_list = tokenize.sent_tokenize(text, language)
wordsets = [get_words(sentence, stopwords) for sentence in sentence_list]
graph = Graph()
pairs = combinations(enumerate(filter(None, wordsets)), 2)
for (index_a, words_a), (index_b, words_b) in pairs:
similarity = 1 - jaccard(words_a, words_b)
if similarity > 0:
graph.add_edge(index_a, index_b, weight=similarity)
ranked_sentence_indexes = pagerank(graph).items()
sentences_by_rank = sorted(
ranked_sentence_indexes, key=itemgetter(1), reverse=True)
best_sentences = map(itemgetter(0), sentences_by_rank[:sentence_count])
best_sentences_in_order = sorted(best_sentences)
return ' '.join(sentence_list[index] for index in best_sentences_in_order)
<commit_msg>Fix sentence index shifting with empty sentences<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from itertools import combinations
from operator import itemgetter
from distance import jaccard
from networkx import Graph, pagerank
from nltk import tokenize
from .utils import get_stopwords, get_words
def summarize(text, sentence_count=5, language='english'):
stopwords = get_stopwords(language)
sentence_list = tokenize.sent_tokenize(text, language)
wordsets = [get_words(sentence, stopwords) for sentence in sentence_list]
graph = Graph()
pairs = combinations(enumerate(wordsets), 2)
for (index_a, words_a), (index_b, words_b) in pairs:
if words_a and words_b:
similarity = 1 - jaccard(words_a, words_b)
if similarity > 0:
graph.add_edge(index_a, index_b, weight=similarity)
ranked_sentence_indexes = pagerank(graph).items()
sentences_by_rank = sorted(
ranked_sentence_indexes, key=itemgetter(1), reverse=True)
best_sentences = map(itemgetter(0), sentences_by_rank[:sentence_count])
best_sentences_in_order = sorted(best_sentences)
return ' '.join(sentence_list[index] for index in best_sentences_in_order)
|
dc68813d5f555a01f1bdd2511d9d2de820369573
|
conditional/blueprints/spring_evals.py
|
conditional/blueprints/spring_evals.py
|
from flask import Blueprint
from flask import render_template
from flask import request
spring_evals_bp = Blueprint('spring_evals_bp', __name__)
@spring_evals_bp.route('/spring_evals/')
def display_spring_evals():
# get user data
user_name = request.headers.get('x-webauth-user')
members = [
{
'name': "Liam Middlebrook",
'uid': 'loothelion',
'committee_meetings': 24,
'house_meetings_missed': [{'date': "aprial fools fayas ads", 'reason': "I was playing videogames"}],
'major_project': 'open_container',
'major_project_passed': True,
'social_events': "",
'comments': "please don't fail me",
'result': 'Pending'
},
{
'name': "Julien Eid",
'uid': 'jeid',
'committee_meetings': 69,
'house_meetings_missed': [],
'major_project': 'wii-u shit',
'major_project_passed': True,
'social_events': "Manipulation and Opportunism",
'comments': "imdabes",
'result': 'Passed'
}
]
# return names in 'first last (username)' format
return render_template('spring_evals.html',
username = user_name,
members = members)
|
from flask import Blueprint
from flask import render_template
from flask import request
spring_evals_bp = Blueprint('spring_evals_bp', __name__)
@spring_evals_bp.route('/spring_evals/')
def display_spring_evals():
# get user data
user_name = request.headers.get('x-webauth-user')
members = [
{
'name': "Liam Middlebrook",
'uid': 'loothelion',
'committee_meetings': 24,
'house_meetings_missed': [{'date': "aprial fools fayas ads", 'reason': "I was playing videogames"}],
'major_projects': [
{
'name': "open container",
'status': "Passed",
'description': "Riding With A Flask"
}],
'major_projects_len': 1,
'major_project_passed': True,
'social_events': "",
'comments': "please don't fail me",
'result': 'Pending'
},
{
'name': "Julien Eid",
'uid': 'jeid',
'committee_meetings': 69,
'house_meetings_missed': [],
'major_projects': [
{
'name': "wii-u shit",
'status': "Failed",
'description': "Rot 3 Encryption"
}],
'major_projects_len': 1,
'major_project_passed': False,
'social_events': "Manipulation and Opportunism",
'comments': "imdabes",
'result': 'Passed'
}
]
# return names in 'first last (username)' format
return render_template('spring_evals.html',
username = user_name,
members = members)
|
Restructure major projects for spring evals 👷
|
Restructure major projects for spring evals 👷
|
Python
|
mit
|
RamZallan/conditional,ComputerScienceHouse/conditional,RamZallan/conditional,ComputerScienceHouse/conditional,RamZallan/conditional,ComputerScienceHouse/conditional
|
from flask import Blueprint
from flask import render_template
from flask import request
spring_evals_bp = Blueprint('spring_evals_bp', __name__)
@spring_evals_bp.route('/spring_evals/')
def display_spring_evals():
# get user data
user_name = request.headers.get('x-webauth-user')
members = [
{
'name': "Liam Middlebrook",
'uid': 'loothelion',
'committee_meetings': 24,
'house_meetings_missed': [{'date': "aprial fools fayas ads", 'reason': "I was playing videogames"}],
'major_project': 'open_container',
'major_project_passed': True,
'social_events': "",
'comments': "please don't fail me",
'result': 'Pending'
},
{
'name': "Julien Eid",
'uid': 'jeid',
'committee_meetings': 69,
'house_meetings_missed': [],
'major_project': 'wii-u shit',
'major_project_passed': True,
'social_events': "Manipulation and Opportunism",
'comments': "imdabes",
'result': 'Passed'
}
]
# return names in 'first last (username)' format
return render_template('spring_evals.html',
username = user_name,
members = members)
Restructure major projects for spring evals 👷
|
from flask import Blueprint
from flask import render_template
from flask import request
spring_evals_bp = Blueprint('spring_evals_bp', __name__)
@spring_evals_bp.route('/spring_evals/')
def display_spring_evals():
# get user data
user_name = request.headers.get('x-webauth-user')
members = [
{
'name': "Liam Middlebrook",
'uid': 'loothelion',
'committee_meetings': 24,
'house_meetings_missed': [{'date': "aprial fools fayas ads", 'reason': "I was playing videogames"}],
'major_projects': [
{
'name': "open container",
'status': "Passed",
'description': "Riding With A Flask"
}],
'major_projects_len': 1,
'major_project_passed': True,
'social_events': "",
'comments': "please don't fail me",
'result': 'Pending'
},
{
'name': "Julien Eid",
'uid': 'jeid',
'committee_meetings': 69,
'house_meetings_missed': [],
'major_projects': [
{
'name': "wii-u shit",
'status': "Failed",
'description': "Rot 3 Encryption"
}],
'major_projects_len': 1,
'major_project_passed': False,
'social_events': "Manipulation and Opportunism",
'comments': "imdabes",
'result': 'Passed'
}
]
# return names in 'first last (username)' format
return render_template('spring_evals.html',
username = user_name,
members = members)
|
<commit_before>from flask import Blueprint
from flask import render_template
from flask import request
spring_evals_bp = Blueprint('spring_evals_bp', __name__)
@spring_evals_bp.route('/spring_evals/')
def display_spring_evals():
# get user data
user_name = request.headers.get('x-webauth-user')
members = [
{
'name': "Liam Middlebrook",
'uid': 'loothelion',
'committee_meetings': 24,
'house_meetings_missed': [{'date': "aprial fools fayas ads", 'reason': "I was playing videogames"}],
'major_project': 'open_container',
'major_project_passed': True,
'social_events': "",
'comments': "please don't fail me",
'result': 'Pending'
},
{
'name': "Julien Eid",
'uid': 'jeid',
'committee_meetings': 69,
'house_meetings_missed': [],
'major_project': 'wii-u shit',
'major_project_passed': True,
'social_events': "Manipulation and Opportunism",
'comments': "imdabes",
'result': 'Passed'
}
]
# return names in 'first last (username)' format
return render_template('spring_evals.html',
username = user_name,
members = members)
<commit_msg>Restructure major projects for spring evals 👷<commit_after>
|
from flask import Blueprint
from flask import render_template
from flask import request
spring_evals_bp = Blueprint('spring_evals_bp', __name__)
@spring_evals_bp.route('/spring_evals/')
def display_spring_evals():
# get user data
user_name = request.headers.get('x-webauth-user')
members = [
{
'name': "Liam Middlebrook",
'uid': 'loothelion',
'committee_meetings': 24,
'house_meetings_missed': [{'date': "aprial fools fayas ads", 'reason': "I was playing videogames"}],
'major_projects': [
{
'name': "open container",
'status': "Passed",
'description': "Riding With A Flask"
}],
'major_projects_len': 1,
'major_project_passed': True,
'social_events': "",
'comments': "please don't fail me",
'result': 'Pending'
},
{
'name': "Julien Eid",
'uid': 'jeid',
'committee_meetings': 69,
'house_meetings_missed': [],
'major_projects': [
{
'name': "wii-u shit",
'status': "Failed",
'description': "Rot 3 Encryption"
}],
'major_projects_len': 1,
'major_project_passed': False,
'social_events': "Manipulation and Opportunism",
'comments': "imdabes",
'result': 'Passed'
}
]
# return names in 'first last (username)' format
return render_template('spring_evals.html',
username = user_name,
members = members)
|
from flask import Blueprint
from flask import render_template
from flask import request
spring_evals_bp = Blueprint('spring_evals_bp', __name__)
@spring_evals_bp.route('/spring_evals/')
def display_spring_evals():
# get user data
user_name = request.headers.get('x-webauth-user')
members = [
{
'name': "Liam Middlebrook",
'uid': 'loothelion',
'committee_meetings': 24,
'house_meetings_missed': [{'date': "aprial fools fayas ads", 'reason': "I was playing videogames"}],
'major_project': 'open_container',
'major_project_passed': True,
'social_events': "",
'comments': "please don't fail me",
'result': 'Pending'
},
{
'name': "Julien Eid",
'uid': 'jeid',
'committee_meetings': 69,
'house_meetings_missed': [],
'major_project': 'wii-u shit',
'major_project_passed': True,
'social_events': "Manipulation and Opportunism",
'comments': "imdabes",
'result': 'Passed'
}
]
# return names in 'first last (username)' format
return render_template('spring_evals.html',
username = user_name,
members = members)
Restructure major projects for spring evals 👷from flask import Blueprint
from flask import render_template
from flask import request
spring_evals_bp = Blueprint('spring_evals_bp', __name__)
@spring_evals_bp.route('/spring_evals/')
def display_spring_evals():
# get user data
user_name = request.headers.get('x-webauth-user')
members = [
{
'name': "Liam Middlebrook",
'uid': 'loothelion',
'committee_meetings': 24,
'house_meetings_missed': [{'date': "aprial fools fayas ads", 'reason': "I was playing videogames"}],
'major_projects': [
{
'name': "open container",
'status': "Passed",
'description': "Riding With A Flask"
}],
'major_projects_len': 1,
'major_project_passed': True,
'social_events': "",
'comments': "please don't fail me",
'result': 'Pending'
},
{
'name': "Julien Eid",
'uid': 'jeid',
'committee_meetings': 69,
'house_meetings_missed': [],
'major_projects': [
{
'name': "wii-u shit",
'status': "Failed",
'description': "Rot 3 Encryption"
}],
'major_projects_len': 1,
'major_project_passed': False,
'social_events': "Manipulation and Opportunism",
'comments': "imdabes",
'result': 'Passed'
}
]
# return names in 'first last (username)' format
return render_template('spring_evals.html',
username = user_name,
members = members)
|
<commit_before>from flask import Blueprint
from flask import render_template
from flask import request
spring_evals_bp = Blueprint('spring_evals_bp', __name__)
@spring_evals_bp.route('/spring_evals/')
def display_spring_evals():
# get user data
user_name = request.headers.get('x-webauth-user')
members = [
{
'name': "Liam Middlebrook",
'uid': 'loothelion',
'committee_meetings': 24,
'house_meetings_missed': [{'date': "aprial fools fayas ads", 'reason': "I was playing videogames"}],
'major_project': 'open_container',
'major_project_passed': True,
'social_events': "",
'comments': "please don't fail me",
'result': 'Pending'
},
{
'name': "Julien Eid",
'uid': 'jeid',
'committee_meetings': 69,
'house_meetings_missed': [],
'major_project': 'wii-u shit',
'major_project_passed': True,
'social_events': "Manipulation and Opportunism",
'comments': "imdabes",
'result': 'Passed'
}
]
# return names in 'first last (username)' format
return render_template('spring_evals.html',
username = user_name,
members = members)
<commit_msg>Restructure major projects for spring evals 👷<commit_after>from flask import Blueprint
from flask import render_template
from flask import request
spring_evals_bp = Blueprint('spring_evals_bp', __name__)
@spring_evals_bp.route('/spring_evals/')
def display_spring_evals():
# get user data
user_name = request.headers.get('x-webauth-user')
members = [
{
'name': "Liam Middlebrook",
'uid': 'loothelion',
'committee_meetings': 24,
'house_meetings_missed': [{'date': "aprial fools fayas ads", 'reason': "I was playing videogames"}],
'major_projects': [
{
'name': "open container",
'status': "Passed",
'description': "Riding With A Flask"
}],
'major_projects_len': 1,
'major_project_passed': True,
'social_events': "",
'comments': "please don't fail me",
'result': 'Pending'
},
{
'name': "Julien Eid",
'uid': 'jeid',
'committee_meetings': 69,
'house_meetings_missed': [],
'major_projects': [
{
'name': "wii-u shit",
'status': "Failed",
'description': "Rot 3 Encryption"
}],
'major_projects_len': 1,
'major_project_passed': False,
'social_events': "Manipulation and Opportunism",
'comments': "imdabes",
'result': 'Passed'
}
]
# return names in 'first last (username)' format
return render_template('spring_evals.html',
username = user_name,
members = members)
|
3ffea173c206d4c8a58953eceb34d80fb66d609b
|
utils/tasks.py
|
utils/tasks.py
|
from celery.task import task
from utils import send_templated_email
@task
def send_templated_email_async(to, subject, body_template, body_dict,
from_email=None, ct="html", fail_silently=False):
return send_templated_email(to,subject, body_template, body_dict,
from_email=None, ct="html", fail_silently=False)
|
from celery.task import task
from utils import send_templated_email
@task
def send_templated_email_async(to, subject, body_template, body_dict,
from_email=None, ct="html", fail_silently=False, check_user_preference=True):
return send_templated_email(to,subject, body_template, body_dict,
from_email=None, ct="html", fail_silently=False)
|
Allow check_user_preferences for email to be passed from async templated email send
|
Allow check_user_preferences for email to be passed from async templated email send
|
Python
|
agpl-3.0
|
ReachingOut/unisubs,norayr/unisubs,wevoice/wesub,eloquence/unisubs,ujdhesa/unisubs,ujdhesa/unisubs,ReachingOut/unisubs,pculture/unisubs,eloquence/unisubs,wevoice/wesub,ReachingOut/unisubs,ofer43211/unisubs,ofer43211/unisubs,ofer43211/unisubs,wevoice/wesub,ujdhesa/unisubs,pculture/unisubs,wevoice/wesub,ujdhesa/unisubs,norayr/unisubs,ReachingOut/unisubs,pculture/unisubs,pculture/unisubs,norayr/unisubs,eloquence/unisubs,eloquence/unisubs,ofer43211/unisubs,norayr/unisubs
|
from celery.task import task
from utils import send_templated_email
@task
def send_templated_email_async(to, subject, body_template, body_dict,
from_email=None, ct="html", fail_silently=False):
return send_templated_email(to,subject, body_template, body_dict,
from_email=None, ct="html", fail_silently=False)
Allow check_user_preferences for email to be passed from async templated email send
|
from celery.task import task
from utils import send_templated_email
@task
def send_templated_email_async(to, subject, body_template, body_dict,
from_email=None, ct="html", fail_silently=False, check_user_preference=True):
return send_templated_email(to,subject, body_template, body_dict,
from_email=None, ct="html", fail_silently=False)
|
<commit_before>from celery.task import task
from utils import send_templated_email
@task
def send_templated_email_async(to, subject, body_template, body_dict,
from_email=None, ct="html", fail_silently=False):
return send_templated_email(to,subject, body_template, body_dict,
from_email=None, ct="html", fail_silently=False)
<commit_msg>Allow check_user_preferences for email to be passed from async templated email send<commit_after>
|
from celery.task import task
from utils import send_templated_email
@task
def send_templated_email_async(to, subject, body_template, body_dict,
from_email=None, ct="html", fail_silently=False, check_user_preference=True):
return send_templated_email(to,subject, body_template, body_dict,
from_email=None, ct="html", fail_silently=False)
|
from celery.task import task
from utils import send_templated_email
@task
def send_templated_email_async(to, subject, body_template, body_dict,
from_email=None, ct="html", fail_silently=False):
return send_templated_email(to,subject, body_template, body_dict,
from_email=None, ct="html", fail_silently=False)
Allow check_user_preferences for email to be passed from async templated email sendfrom celery.task import task
from utils import send_templated_email
@task
def send_templated_email_async(to, subject, body_template, body_dict,
from_email=None, ct="html", fail_silently=False, check_user_preference=True):
return send_templated_email(to,subject, body_template, body_dict,
from_email=None, ct="html", fail_silently=False)
|
<commit_before>from celery.task import task
from utils import send_templated_email
@task
def send_templated_email_async(to, subject, body_template, body_dict,
from_email=None, ct="html", fail_silently=False):
return send_templated_email(to,subject, body_template, body_dict,
from_email=None, ct="html", fail_silently=False)
<commit_msg>Allow check_user_preferences for email to be passed from async templated email send<commit_after>from celery.task import task
from utils import send_templated_email
@task
def send_templated_email_async(to, subject, body_template, body_dict,
from_email=None, ct="html", fail_silently=False, check_user_preference=True):
return send_templated_email(to,subject, body_template, body_dict,
from_email=None, ct="html", fail_silently=False)
|
45963022a39f8c0f3d57199017adc78b39005d6a
|
openspending/lib/unicode_dict_reader.py
|
openspending/lib/unicode_dict_reader.py
|
# work around python2's csv.py's difficulty with utf8
# partly cribbed from http://stackoverflow.com/questions/5478659/python-module-like-csv-dictreader-with-full-utf8-support
class EmptyCSVError(Exception):
pass
def UnicodeDictReader(file_or_str, encoding='utf8', **kwargs):
import csv
def decode(s, encoding):
if s is None:
return None
return s.decode(encoding)
csv_reader = csv.DictReader(file_or_str, **kwargs)
if not csv_reader.fieldnames:
raise EmptyCSVError("No fieldnames in CSV reader: empty file?")
keymap = dict((k, k.decode(encoding)) for k in csv_reader.fieldnames)
for row in csv_reader:
yield dict((keymap[k], decode(v, encoding)) for k, v in row.iteritems())
|
# work around python2's csv.py's difficulty with utf8
# partly cribbed from http://stackoverflow.com/questions/5478659/python-module-like-csv-dictreader-with-full-utf8-support
import csv
class EmptyCSVError(Exception):
pass
class UnicodeDictReader(object):
def __init__(self, file_or_str, encoding='utf8', **kwargs):
self.encoding = encoding
self.reader = csv.DictReader(file_or_str, **kwargs)
if not self.reader.fieldnames:
raise EmptyCSVError("No fieldnames in CSV reader: empty file?")
self.keymap = dict((k, k.decode(encoding)) for k in self.reader.fieldnames)
def __iter__(self):
return (self._decode_row(row) for row in self.reader)
def _decode_row(self, row):
return dict(
(self.keymap[k], self._decode_str(v)) for k, v in row.iteritems()
)
def _decode_str(self, s):
if s is None:
return None
return s.decode(self.encoding)
|
Convert UnicodeDictReader to an iterator class, so that EmptyCSVError will get thrown on instantiation, rather than on iteration.
|
Convert UnicodeDictReader to an iterator class, so that EmptyCSVError will get thrown on instantiation, rather than on iteration.
|
Python
|
agpl-3.0
|
pudo/spendb,nathanhilbert/FPA_Core,USStateDept/FPA_Core,openspending/spendb,CivicVision/datahub,nathanhilbert/FPA_Core,CivicVision/datahub,spendb/spendb,johnjohndoe/spendb,johnjohndoe/spendb,spendb/spendb,USStateDept/FPA_Core,nathanhilbert/FPA_Core,pudo/spendb,openspending/spendb,johnjohndoe/spendb,openspending/spendb,pudo/spendb,spendb/spendb,CivicVision/datahub,USStateDept/FPA_Core
|
# work around python2's csv.py's difficulty with utf8
# partly cribbed from http://stackoverflow.com/questions/5478659/python-module-like-csv-dictreader-with-full-utf8-support
class EmptyCSVError(Exception):
pass
def UnicodeDictReader(file_or_str, encoding='utf8', **kwargs):
import csv
def decode(s, encoding):
if s is None:
return None
return s.decode(encoding)
csv_reader = csv.DictReader(file_or_str, **kwargs)
if not csv_reader.fieldnames:
raise EmptyCSVError("No fieldnames in CSV reader: empty file?")
keymap = dict((k, k.decode(encoding)) for k in csv_reader.fieldnames)
for row in csv_reader:
yield dict((keymap[k], decode(v, encoding)) for k, v in row.iteritems())
Convert UnicodeDictReader to an iterator class, so that EmptyCSVError will get thrown on instantiation, rather than on iteration.
|
# work around python2's csv.py's difficulty with utf8
# partly cribbed from http://stackoverflow.com/questions/5478659/python-module-like-csv-dictreader-with-full-utf8-support
import csv
class EmptyCSVError(Exception):
pass
class UnicodeDictReader(object):
def __init__(self, file_or_str, encoding='utf8', **kwargs):
self.encoding = encoding
self.reader = csv.DictReader(file_or_str, **kwargs)
if not self.reader.fieldnames:
raise EmptyCSVError("No fieldnames in CSV reader: empty file?")
self.keymap = dict((k, k.decode(encoding)) for k in self.reader.fieldnames)
def __iter__(self):
return (self._decode_row(row) for row in self.reader)
def _decode_row(self, row):
return dict(
(self.keymap[k], self._decode_str(v)) for k, v in row.iteritems()
)
def _decode_str(self, s):
if s is None:
return None
return s.decode(self.encoding)
|
<commit_before># work around python2's csv.py's difficulty with utf8
# partly cribbed from http://stackoverflow.com/questions/5478659/python-module-like-csv-dictreader-with-full-utf8-support
class EmptyCSVError(Exception):
pass
def UnicodeDictReader(file_or_str, encoding='utf8', **kwargs):
import csv
def decode(s, encoding):
if s is None:
return None
return s.decode(encoding)
csv_reader = csv.DictReader(file_or_str, **kwargs)
if not csv_reader.fieldnames:
raise EmptyCSVError("No fieldnames in CSV reader: empty file?")
keymap = dict((k, k.decode(encoding)) for k in csv_reader.fieldnames)
for row in csv_reader:
yield dict((keymap[k], decode(v, encoding)) for k, v in row.iteritems())
<commit_msg>Convert UnicodeDictReader to an iterator class, so that EmptyCSVError will get thrown on instantiation, rather than on iteration.<commit_after>
|
# work around python2's csv.py's difficulty with utf8
# partly cribbed from http://stackoverflow.com/questions/5478659/python-module-like-csv-dictreader-with-full-utf8-support
import csv
class EmptyCSVError(Exception):
pass
class UnicodeDictReader(object):
def __init__(self, file_or_str, encoding='utf8', **kwargs):
self.encoding = encoding
self.reader = csv.DictReader(file_or_str, **kwargs)
if not self.reader.fieldnames:
raise EmptyCSVError("No fieldnames in CSV reader: empty file?")
self.keymap = dict((k, k.decode(encoding)) for k in self.reader.fieldnames)
def __iter__(self):
return (self._decode_row(row) for row in self.reader)
def _decode_row(self, row):
return dict(
(self.keymap[k], self._decode_str(v)) for k, v in row.iteritems()
)
def _decode_str(self, s):
if s is None:
return None
return s.decode(self.encoding)
|
# work around python2's csv.py's difficulty with utf8
# partly cribbed from http://stackoverflow.com/questions/5478659/python-module-like-csv-dictreader-with-full-utf8-support
class EmptyCSVError(Exception):
pass
def UnicodeDictReader(file_or_str, encoding='utf8', **kwargs):
import csv
def decode(s, encoding):
if s is None:
return None
return s.decode(encoding)
csv_reader = csv.DictReader(file_or_str, **kwargs)
if not csv_reader.fieldnames:
raise EmptyCSVError("No fieldnames in CSV reader: empty file?")
keymap = dict((k, k.decode(encoding)) for k in csv_reader.fieldnames)
for row in csv_reader:
yield dict((keymap[k], decode(v, encoding)) for k, v in row.iteritems())
Convert UnicodeDictReader to an iterator class, so that EmptyCSVError will get thrown on instantiation, rather than on iteration.# work around python2's csv.py's difficulty with utf8
# partly cribbed from http://stackoverflow.com/questions/5478659/python-module-like-csv-dictreader-with-full-utf8-support
import csv
class EmptyCSVError(Exception):
pass
class UnicodeDictReader(object):
def __init__(self, file_or_str, encoding='utf8', **kwargs):
self.encoding = encoding
self.reader = csv.DictReader(file_or_str, **kwargs)
if not self.reader.fieldnames:
raise EmptyCSVError("No fieldnames in CSV reader: empty file?")
self.keymap = dict((k, k.decode(encoding)) for k in self.reader.fieldnames)
def __iter__(self):
return (self._decode_row(row) for row in self.reader)
def _decode_row(self, row):
return dict(
(self.keymap[k], self._decode_str(v)) for k, v in row.iteritems()
)
def _decode_str(self, s):
if s is None:
return None
return s.decode(self.encoding)
|
<commit_before># work around python2's csv.py's difficulty with utf8
# partly cribbed from http://stackoverflow.com/questions/5478659/python-module-like-csv-dictreader-with-full-utf8-support
class EmptyCSVError(Exception):
pass
def UnicodeDictReader(file_or_str, encoding='utf8', **kwargs):
import csv
def decode(s, encoding):
if s is None:
return None
return s.decode(encoding)
csv_reader = csv.DictReader(file_or_str, **kwargs)
if not csv_reader.fieldnames:
raise EmptyCSVError("No fieldnames in CSV reader: empty file?")
keymap = dict((k, k.decode(encoding)) for k in csv_reader.fieldnames)
for row in csv_reader:
yield dict((keymap[k], decode(v, encoding)) for k, v in row.iteritems())
<commit_msg>Convert UnicodeDictReader to an iterator class, so that EmptyCSVError will get thrown on instantiation, rather than on iteration.<commit_after># work around python2's csv.py's difficulty with utf8
# partly cribbed from http://stackoverflow.com/questions/5478659/python-module-like-csv-dictreader-with-full-utf8-support
import csv
class EmptyCSVError(Exception):
pass
class UnicodeDictReader(object):
def __init__(self, file_or_str, encoding='utf8', **kwargs):
self.encoding = encoding
self.reader = csv.DictReader(file_or_str, **kwargs)
if not self.reader.fieldnames:
raise EmptyCSVError("No fieldnames in CSV reader: empty file?")
self.keymap = dict((k, k.decode(encoding)) for k in self.reader.fieldnames)
def __iter__(self):
return (self._decode_row(row) for row in self.reader)
def _decode_row(self, row):
return dict(
(self.keymap[k], self._decode_str(v)) for k, v in row.iteritems()
)
def _decode_str(self, s):
if s is None:
return None
return s.decode(self.encoding)
|
db935a152efc8ab730491fc860db6d4c9cf65c5f
|
test/test_packages.py
|
test/test_packages.py
|
import pytest
@pytest.mark.parametrize("name", [
("apt-file"),
("apt-transport-https"),
("atom"),
("blktrace"),
("ca-certificates"),
("chromium-browser"),
("cron"),
("curl"),
("diod"),
("docker-ce"),
("fonts-font-awesome"),
("git"),
("gnupg"),
("gnupg2"),
("gnupg-agent"),
("handbrake"),
("handbrake-cli"),
("haveged"),
("htop"),
("i3"),
("iotop"),
("language-pack-en-base"),
("laptop-mode-tools"),
("nfs-common"),
("ntop"),
("ntp"),
("openssh-client"),
("openssh-server"),
("openssh-sftp-server"),
("openssl"),
("pinta"),
("python"),
("python-pip"),
("scrot"),
("software-properties-common"),
("suckless-tools"),
("sysstat"),
("tree"),
("vagrant"),
("vim"),
("virtualbox"),
("vlc"),
("wget"),
("whois"),
("x264"),
("xfce4-terminal"),
("xfonts-terminus"),
("xinit"),
])
def test_packages(Package, name):
assert Package(name).is_installed
|
import pytest
@pytest.mark.parametrize("name", [
("apt-file"),
("apt-transport-https"),
("atom"),
("blktrace"),
("ca-certificates"),
("chromium-browser"),
("cron"),
("curl"),
("diod"),
("docker-ce"),
("fonts-font-awesome"),
("git"),
("gnupg"),
("gnupg2"),
("gnupg-agent"),
("handbrake"),
("handbrake-cli"),
("haveged"),
("htop"),
("i3"),
("iotop"),
("language-pack-en-base"),
("laptop-mode-tools"),
("nfs-common"),
("ntop"),
("ntp"),
("openssh-client"),
("openssh-server"),
("openssh-sftp-server"),
("openssl"),
("pinta"),
("python"),
("python-pip"),
("scrot"),
("software-properties-common"),
("suckless-tools"),
("sysstat"),
("tree"),
("vagrant"),
("vim"),
("virtualbox"),
("vlc"),
("wget"),
("whois"),
("x264"),
("xfce4-terminal"),
("xfonts-terminus"),
("xinit"),
])
def test_packages(host, name):
pkg = host.package(name)
assert pkg.is_installed
|
Change test function as existing method deprecated
|
Change test function as existing method deprecated
|
Python
|
mit
|
wicksy/laptop-build,wicksy/laptop-build,wicksy/laptop-build,wicksy/laptop-build
|
import pytest
@pytest.mark.parametrize("name", [
("apt-file"),
("apt-transport-https"),
("atom"),
("blktrace"),
("ca-certificates"),
("chromium-browser"),
("cron"),
("curl"),
("diod"),
("docker-ce"),
("fonts-font-awesome"),
("git"),
("gnupg"),
("gnupg2"),
("gnupg-agent"),
("handbrake"),
("handbrake-cli"),
("haveged"),
("htop"),
("i3"),
("iotop"),
("language-pack-en-base"),
("laptop-mode-tools"),
("nfs-common"),
("ntop"),
("ntp"),
("openssh-client"),
("openssh-server"),
("openssh-sftp-server"),
("openssl"),
("pinta"),
("python"),
("python-pip"),
("scrot"),
("software-properties-common"),
("suckless-tools"),
("sysstat"),
("tree"),
("vagrant"),
("vim"),
("virtualbox"),
("vlc"),
("wget"),
("whois"),
("x264"),
("xfce4-terminal"),
("xfonts-terminus"),
("xinit"),
])
def test_packages(Package, name):
assert Package(name).is_installed
Change test function as existing method deprecated
|
import pytest
@pytest.mark.parametrize("name", [
("apt-file"),
("apt-transport-https"),
("atom"),
("blktrace"),
("ca-certificates"),
("chromium-browser"),
("cron"),
("curl"),
("diod"),
("docker-ce"),
("fonts-font-awesome"),
("git"),
("gnupg"),
("gnupg2"),
("gnupg-agent"),
("handbrake"),
("handbrake-cli"),
("haveged"),
("htop"),
("i3"),
("iotop"),
("language-pack-en-base"),
("laptop-mode-tools"),
("nfs-common"),
("ntop"),
("ntp"),
("openssh-client"),
("openssh-server"),
("openssh-sftp-server"),
("openssl"),
("pinta"),
("python"),
("python-pip"),
("scrot"),
("software-properties-common"),
("suckless-tools"),
("sysstat"),
("tree"),
("vagrant"),
("vim"),
("virtualbox"),
("vlc"),
("wget"),
("whois"),
("x264"),
("xfce4-terminal"),
("xfonts-terminus"),
("xinit"),
])
def test_packages(host, name):
pkg = host.package(name)
assert pkg.is_installed
|
<commit_before>import pytest
@pytest.mark.parametrize("name", [
("apt-file"),
("apt-transport-https"),
("atom"),
("blktrace"),
("ca-certificates"),
("chromium-browser"),
("cron"),
("curl"),
("diod"),
("docker-ce"),
("fonts-font-awesome"),
("git"),
("gnupg"),
("gnupg2"),
("gnupg-agent"),
("handbrake"),
("handbrake-cli"),
("haveged"),
("htop"),
("i3"),
("iotop"),
("language-pack-en-base"),
("laptop-mode-tools"),
("nfs-common"),
("ntop"),
("ntp"),
("openssh-client"),
("openssh-server"),
("openssh-sftp-server"),
("openssl"),
("pinta"),
("python"),
("python-pip"),
("scrot"),
("software-properties-common"),
("suckless-tools"),
("sysstat"),
("tree"),
("vagrant"),
("vim"),
("virtualbox"),
("vlc"),
("wget"),
("whois"),
("x264"),
("xfce4-terminal"),
("xfonts-terminus"),
("xinit"),
])
def test_packages(Package, name):
assert Package(name).is_installed
<commit_msg>Change test function as existing method deprecated<commit_after>
|
import pytest
@pytest.mark.parametrize("name", [
("apt-file"),
("apt-transport-https"),
("atom"),
("blktrace"),
("ca-certificates"),
("chromium-browser"),
("cron"),
("curl"),
("diod"),
("docker-ce"),
("fonts-font-awesome"),
("git"),
("gnupg"),
("gnupg2"),
("gnupg-agent"),
("handbrake"),
("handbrake-cli"),
("haveged"),
("htop"),
("i3"),
("iotop"),
("language-pack-en-base"),
("laptop-mode-tools"),
("nfs-common"),
("ntop"),
("ntp"),
("openssh-client"),
("openssh-server"),
("openssh-sftp-server"),
("openssl"),
("pinta"),
("python"),
("python-pip"),
("scrot"),
("software-properties-common"),
("suckless-tools"),
("sysstat"),
("tree"),
("vagrant"),
("vim"),
("virtualbox"),
("vlc"),
("wget"),
("whois"),
("x264"),
("xfce4-terminal"),
("xfonts-terminus"),
("xinit"),
])
def test_packages(host, name):
pkg = host.package(name)
assert pkg.is_installed
|
import pytest
@pytest.mark.parametrize("name", [
("apt-file"),
("apt-transport-https"),
("atom"),
("blktrace"),
("ca-certificates"),
("chromium-browser"),
("cron"),
("curl"),
("diod"),
("docker-ce"),
("fonts-font-awesome"),
("git"),
("gnupg"),
("gnupg2"),
("gnupg-agent"),
("handbrake"),
("handbrake-cli"),
("haveged"),
("htop"),
("i3"),
("iotop"),
("language-pack-en-base"),
("laptop-mode-tools"),
("nfs-common"),
("ntop"),
("ntp"),
("openssh-client"),
("openssh-server"),
("openssh-sftp-server"),
("openssl"),
("pinta"),
("python"),
("python-pip"),
("scrot"),
("software-properties-common"),
("suckless-tools"),
("sysstat"),
("tree"),
("vagrant"),
("vim"),
("virtualbox"),
("vlc"),
("wget"),
("whois"),
("x264"),
("xfce4-terminal"),
("xfonts-terminus"),
("xinit"),
])
def test_packages(Package, name):
assert Package(name).is_installed
Change test function as existing method deprecatedimport pytest
@pytest.mark.parametrize("name", [
("apt-file"),
("apt-transport-https"),
("atom"),
("blktrace"),
("ca-certificates"),
("chromium-browser"),
("cron"),
("curl"),
("diod"),
("docker-ce"),
("fonts-font-awesome"),
("git"),
("gnupg"),
("gnupg2"),
("gnupg-agent"),
("handbrake"),
("handbrake-cli"),
("haveged"),
("htop"),
("i3"),
("iotop"),
("language-pack-en-base"),
("laptop-mode-tools"),
("nfs-common"),
("ntop"),
("ntp"),
("openssh-client"),
("openssh-server"),
("openssh-sftp-server"),
("openssl"),
("pinta"),
("python"),
("python-pip"),
("scrot"),
("software-properties-common"),
("suckless-tools"),
("sysstat"),
("tree"),
("vagrant"),
("vim"),
("virtualbox"),
("vlc"),
("wget"),
("whois"),
("x264"),
("xfce4-terminal"),
("xfonts-terminus"),
("xinit"),
])
def test_packages(host, name):
pkg = host.package(name)
assert pkg.is_installed
|
<commit_before>import pytest
@pytest.mark.parametrize("name", [
("apt-file"),
("apt-transport-https"),
("atom"),
("blktrace"),
("ca-certificates"),
("chromium-browser"),
("cron"),
("curl"),
("diod"),
("docker-ce"),
("fonts-font-awesome"),
("git"),
("gnupg"),
("gnupg2"),
("gnupg-agent"),
("handbrake"),
("handbrake-cli"),
("haveged"),
("htop"),
("i3"),
("iotop"),
("language-pack-en-base"),
("laptop-mode-tools"),
("nfs-common"),
("ntop"),
("ntp"),
("openssh-client"),
("openssh-server"),
("openssh-sftp-server"),
("openssl"),
("pinta"),
("python"),
("python-pip"),
("scrot"),
("software-properties-common"),
("suckless-tools"),
("sysstat"),
("tree"),
("vagrant"),
("vim"),
("virtualbox"),
("vlc"),
("wget"),
("whois"),
("x264"),
("xfce4-terminal"),
("xfonts-terminus"),
("xinit"),
])
def test_packages(Package, name):
assert Package(name).is_installed
<commit_msg>Change test function as existing method deprecated<commit_after>import pytest
@pytest.mark.parametrize("name", [
("apt-file"),
("apt-transport-https"),
("atom"),
("blktrace"),
("ca-certificates"),
("chromium-browser"),
("cron"),
("curl"),
("diod"),
("docker-ce"),
("fonts-font-awesome"),
("git"),
("gnupg"),
("gnupg2"),
("gnupg-agent"),
("handbrake"),
("handbrake-cli"),
("haveged"),
("htop"),
("i3"),
("iotop"),
("language-pack-en-base"),
("laptop-mode-tools"),
("nfs-common"),
("ntop"),
("ntp"),
("openssh-client"),
("openssh-server"),
("openssh-sftp-server"),
("openssl"),
("pinta"),
("python"),
("python-pip"),
("scrot"),
("software-properties-common"),
("suckless-tools"),
("sysstat"),
("tree"),
("vagrant"),
("vim"),
("virtualbox"),
("vlc"),
("wget"),
("whois"),
("x264"),
("xfce4-terminal"),
("xfonts-terminus"),
("xinit"),
])
def test_packages(host, name):
pkg = host.package(name)
assert pkg.is_installed
|
7d621db3618db90679461550fb0c952417616402
|
bot.py
|
bot.py
|
import asyncio
import configparser
import discord
from discord.ext import commands
class Bot(commands.Bot):
def __init__(self, config_filepath, command_prefix):
super().__init__(command_prefix)
self._load_config_data(config_filepath)
def _load_config_data(self, filepath):
config = configparser.ConfigParser()
config.read(filepath)
self.email = config['LOGIN']['email']
self.password = config['LOGIN']['password']
self.owner_ID = config['OWNER']['id']
self.twitch_ID = config['TWITCH']['client_id']
self.carbon_key = config['CARBON']['key']
def run(self):
pass
|
import asyncio
import configparser
import discord
from discord.ext import commands
class Bot(commands.Bot):
def __init__(self, config_filepath, command_prefix):
super().__init__(command_prefix)
self._load_config_data(config_filepath)
def _load_config_data(self, filepath):
config = configparser.ConfigParser()
config.read(filepath)
self.email = config['LOGIN']['email']
self.password = config['LOGIN']['password']
self.owner_ID = config['OWNER']['id']
self.twitch_ID = config['TWITCH']['client_id']
self.carbon_key = config['CARBON']['key']
async def on_ready(self):
print("Logged in as {}".format(self.user.name))
print("User ID: {}".format(self.user.id))
print("Library: {} - {}".format(discord.__title__, discord.__version__))
def run(self):
try:
self.loop.run_until_complete(self.start(self.email, self.password))
except KeyboardInterrupt:
self.loop.run_until_complete(self.logout())
finally:
self.loop.close()
|
Add Bot.run and an on_ready message
|
Add Bot.run and an on_ready message
|
Python
|
mit
|
MagiChau/ZonBot
|
import asyncio
import configparser
import discord
from discord.ext import commands
class Bot(commands.Bot):
def __init__(self, config_filepath, command_prefix):
super().__init__(command_prefix)
self._load_config_data(config_filepath)
def _load_config_data(self, filepath):
config = configparser.ConfigParser()
config.read(filepath)
self.email = config['LOGIN']['email']
self.password = config['LOGIN']['password']
self.owner_ID = config['OWNER']['id']
self.twitch_ID = config['TWITCH']['client_id']
self.carbon_key = config['CARBON']['key']
def run(self):
passAdd Bot.run and an on_ready message
|
import asyncio
import configparser
import discord
from discord.ext import commands
class Bot(commands.Bot):
def __init__(self, config_filepath, command_prefix):
super().__init__(command_prefix)
self._load_config_data(config_filepath)
def _load_config_data(self, filepath):
config = configparser.ConfigParser()
config.read(filepath)
self.email = config['LOGIN']['email']
self.password = config['LOGIN']['password']
self.owner_ID = config['OWNER']['id']
self.twitch_ID = config['TWITCH']['client_id']
self.carbon_key = config['CARBON']['key']
async def on_ready(self):
print("Logged in as {}".format(self.user.name))
print("User ID: {}".format(self.user.id))
print("Library: {} - {}".format(discord.__title__, discord.__version__))
def run(self):
try:
self.loop.run_until_complete(self.start(self.email, self.password))
except KeyboardInterrupt:
self.loop.run_until_complete(self.logout())
finally:
self.loop.close()
|
<commit_before>import asyncio
import configparser
import discord
from discord.ext import commands
class Bot(commands.Bot):
def __init__(self, config_filepath, command_prefix):
super().__init__(command_prefix)
self._load_config_data(config_filepath)
def _load_config_data(self, filepath):
config = configparser.ConfigParser()
config.read(filepath)
self.email = config['LOGIN']['email']
self.password = config['LOGIN']['password']
self.owner_ID = config['OWNER']['id']
self.twitch_ID = config['TWITCH']['client_id']
self.carbon_key = config['CARBON']['key']
def run(self):
pass<commit_msg>Add Bot.run and an on_ready message<commit_after>
|
import asyncio
import configparser
import discord
from discord.ext import commands
class Bot(commands.Bot):
def __init__(self, config_filepath, command_prefix):
super().__init__(command_prefix)
self._load_config_data(config_filepath)
def _load_config_data(self, filepath):
config = configparser.ConfigParser()
config.read(filepath)
self.email = config['LOGIN']['email']
self.password = config['LOGIN']['password']
self.owner_ID = config['OWNER']['id']
self.twitch_ID = config['TWITCH']['client_id']
self.carbon_key = config['CARBON']['key']
async def on_ready(self):
print("Logged in as {}".format(self.user.name))
print("User ID: {}".format(self.user.id))
print("Library: {} - {}".format(discord.__title__, discord.__version__))
def run(self):
try:
self.loop.run_until_complete(self.start(self.email, self.password))
except KeyboardInterrupt:
self.loop.run_until_complete(self.logout())
finally:
self.loop.close()
|
import asyncio
import configparser
import discord
from discord.ext import commands
class Bot(commands.Bot):
def __init__(self, config_filepath, command_prefix):
super().__init__(command_prefix)
self._load_config_data(config_filepath)
def _load_config_data(self, filepath):
config = configparser.ConfigParser()
config.read(filepath)
self.email = config['LOGIN']['email']
self.password = config['LOGIN']['password']
self.owner_ID = config['OWNER']['id']
self.twitch_ID = config['TWITCH']['client_id']
self.carbon_key = config['CARBON']['key']
def run(self):
passAdd Bot.run and an on_ready messageimport asyncio
import configparser
import discord
from discord.ext import commands
class Bot(commands.Bot):
def __init__(self, config_filepath, command_prefix):
super().__init__(command_prefix)
self._load_config_data(config_filepath)
def _load_config_data(self, filepath):
config = configparser.ConfigParser()
config.read(filepath)
self.email = config['LOGIN']['email']
self.password = config['LOGIN']['password']
self.owner_ID = config['OWNER']['id']
self.twitch_ID = config['TWITCH']['client_id']
self.carbon_key = config['CARBON']['key']
async def on_ready(self):
print("Logged in as {}".format(self.user.name))
print("User ID: {}".format(self.user.id))
print("Library: {} - {}".format(discord.__title__, discord.__version__))
def run(self):
try:
self.loop.run_until_complete(self.start(self.email, self.password))
except KeyboardInterrupt:
self.loop.run_until_complete(self.logout())
finally:
self.loop.close()
|
<commit_before>import asyncio
import configparser
import discord
from discord.ext import commands
class Bot(commands.Bot):
def __init__(self, config_filepath, command_prefix):
super().__init__(command_prefix)
self._load_config_data(config_filepath)
def _load_config_data(self, filepath):
config = configparser.ConfigParser()
config.read(filepath)
self.email = config['LOGIN']['email']
self.password = config['LOGIN']['password']
self.owner_ID = config['OWNER']['id']
self.twitch_ID = config['TWITCH']['client_id']
self.carbon_key = config['CARBON']['key']
def run(self):
pass<commit_msg>Add Bot.run and an on_ready message<commit_after>import asyncio
import configparser
import discord
from discord.ext import commands
class Bot(commands.Bot):
def __init__(self, config_filepath, command_prefix):
super().__init__(command_prefix)
self._load_config_data(config_filepath)
def _load_config_data(self, filepath):
config = configparser.ConfigParser()
config.read(filepath)
self.email = config['LOGIN']['email']
self.password = config['LOGIN']['password']
self.owner_ID = config['OWNER']['id']
self.twitch_ID = config['TWITCH']['client_id']
self.carbon_key = config['CARBON']['key']
async def on_ready(self):
print("Logged in as {}".format(self.user.name))
print("User ID: {}".format(self.user.id))
print("Library: {} - {}".format(discord.__title__, discord.__version__))
def run(self):
try:
self.loop.run_until_complete(self.start(self.email, self.password))
except KeyboardInterrupt:
self.loop.run_until_complete(self.logout())
finally:
self.loop.close()
|
ee2ed250fdf42d0e4616f0e783ff1ace0a201514
|
scripts/spat_conlisk_univariate_fig2.py
|
scripts/spat_conlisk_univariate_fig2.py
|
"""
Purpose: to recreate the univariate pdfs of Conlisk et al. (2007)
in figure 2. This provides a test that the univariate pdfs are
working correctly
"""
import numpy as np
import mete
import csv
n0 = 617
c = 256
sing_pdfs = np.zeros((4, 7))
psi = [0.01, 0.25, 0.5, 0.75]
for i in range(0, len(psi)):
sing_pdfs[i, : ] = [mete.single_prob(n, n0, psi[i], c) for n in range(0, 7)]
writer = open('./data/conlisk_data_fig2a.csv', 'wb')
datawriter = csv.writer(writer)
for i in range(0, np.shape(sing_pdfs)[0]):
datawriter.writerow(sing_pdfs[i, ])
writer.close()
|
"""
Purpose: to recreate the univariate pdfs of Conlisk et al. (2007)
in figure 2. This provides a test that the univariate pdfs are
working correctly
"""
import numpy as np
import mete
import matplotlib.pyplot as plt
n0 = 617
c = 256
sing_pdfs = np.zeros((4, 8))
psi = [0.01, 0.25, 0.5, 0.75]
for i in range(0, len(psi)):
sing_pdfs[i, : ] = [mete.single_prob(n, n0, psi[i], c) for n in range(0, 8)]
n = range(0, 8)
plt.plot(n, sing_pdfs[0, :], color='black', linewidth=1)
plt.plot(n, sing_pdfs[1, :], color='black', linewidth=1, ls='--')
plt.plot(n, sing_pdfs[2, :], color='black', linewidth=2)
plt.plot(n, sing_pdfs[3, :], color='lightgrey', linewidth=2)
plt.axis([0, 7, 0, 0.9])
plt.xlabel('n')
plt.ylabel('P(n)')
plt.legend(['psi = 0.01', 'psi = 0.25', 'psi = 0.50','psi = 0.75'])
plt.savefig('../figs/conlisk_univaritate_fig2a.pdf')
|
Create a pdf of the univariate pdfs instead of writing to a .csv file.
|
Create a pdf of the univariate pdfs instead of writing to a .csv file.
|
Python
|
mit
|
weecology/mete-spatial,weecology/mete-spatial,weecology/mete-spatial,weecology/mete-spatial
|
"""
Purpose: to recreate the univariate pdfs of Conlisk et al. (2007)
in figure 2. This provides a test that the univariate pdfs are
working correctly
"""
import numpy as np
import mete
import csv
n0 = 617
c = 256
sing_pdfs = np.zeros((4, 7))
psi = [0.01, 0.25, 0.5, 0.75]
for i in range(0, len(psi)):
sing_pdfs[i, : ] = [mete.single_prob(n, n0, psi[i], c) for n in range(0, 7)]
writer = open('./data/conlisk_data_fig2a.csv', 'wb')
datawriter = csv.writer(writer)
for i in range(0, np.shape(sing_pdfs)[0]):
datawriter.writerow(sing_pdfs[i, ])
writer.close()
Create a pdf of the univariate pdfs instead of writing to a .csv file.
|
"""
Purpose: to recreate the univariate pdfs of Conlisk et al. (2007)
in figure 2. This provides a test that the univariate pdfs are
working correctly
"""
import numpy as np
import mete
import matplotlib.pyplot as plt
n0 = 617
c = 256
sing_pdfs = np.zeros((4, 8))
psi = [0.01, 0.25, 0.5, 0.75]
for i in range(0, len(psi)):
sing_pdfs[i, : ] = [mete.single_prob(n, n0, psi[i], c) for n in range(0, 8)]
n = range(0, 8)
plt.plot(n, sing_pdfs[0, :], color='black', linewidth=1)
plt.plot(n, sing_pdfs[1, :], color='black', linewidth=1, ls='--')
plt.plot(n, sing_pdfs[2, :], color='black', linewidth=2)
plt.plot(n, sing_pdfs[3, :], color='lightgrey', linewidth=2)
plt.axis([0, 7, 0, 0.9])
plt.xlabel('n')
plt.ylabel('P(n)')
plt.legend(['psi = 0.01', 'psi = 0.25', 'psi = 0.50','psi = 0.75'])
plt.savefig('../figs/conlisk_univaritate_fig2a.pdf')
|
<commit_before>"""
Purpose: to recreate the univariate pdfs of Conlisk et al. (2007)
in figure 2. This provides a test that the univariate pdfs are
working correctly
"""
import numpy as np
import mete
import csv
n0 = 617
c = 256
sing_pdfs = np.zeros((4, 7))
psi = [0.01, 0.25, 0.5, 0.75]
for i in range(0, len(psi)):
sing_pdfs[i, : ] = [mete.single_prob(n, n0, psi[i], c) for n in range(0, 7)]
writer = open('./data/conlisk_data_fig2a.csv', 'wb')
datawriter = csv.writer(writer)
for i in range(0, np.shape(sing_pdfs)[0]):
datawriter.writerow(sing_pdfs[i, ])
writer.close()
<commit_msg>Create a pdf of the univariate pdfs instead of writing to a .csv file.<commit_after>
|
"""
Purpose: to recreate the univariate pdfs of Conlisk et al. (2007)
in figure 2. This provides a test that the univariate pdfs are
working correctly
"""
import numpy as np
import mete
import matplotlib.pyplot as plt
n0 = 617
c = 256
sing_pdfs = np.zeros((4, 8))
psi = [0.01, 0.25, 0.5, 0.75]
for i in range(0, len(psi)):
sing_pdfs[i, : ] = [mete.single_prob(n, n0, psi[i], c) for n in range(0, 8)]
n = range(0, 8)
plt.plot(n, sing_pdfs[0, :], color='black', linewidth=1)
plt.plot(n, sing_pdfs[1, :], color='black', linewidth=1, ls='--')
plt.plot(n, sing_pdfs[2, :], color='black', linewidth=2)
plt.plot(n, sing_pdfs[3, :], color='lightgrey', linewidth=2)
plt.axis([0, 7, 0, 0.9])
plt.xlabel('n')
plt.ylabel('P(n)')
plt.legend(['psi = 0.01', 'psi = 0.25', 'psi = 0.50','psi = 0.75'])
plt.savefig('../figs/conlisk_univaritate_fig2a.pdf')
|
"""
Purpose: to recreate the univariate pdfs of Conlisk et al. (2007)
in figure 2. This provides a test that the univariate pdfs are
working correctly
"""
import numpy as np
import mete
import csv
n0 = 617
c = 256
sing_pdfs = np.zeros((4, 7))
psi = [0.01, 0.25, 0.5, 0.75]
for i in range(0, len(psi)):
sing_pdfs[i, : ] = [mete.single_prob(n, n0, psi[i], c) for n in range(0, 7)]
writer = open('./data/conlisk_data_fig2a.csv', 'wb')
datawriter = csv.writer(writer)
for i in range(0, np.shape(sing_pdfs)[0]):
datawriter.writerow(sing_pdfs[i, ])
writer.close()
Create a pdf of the univariate pdfs instead of writing to a .csv file."""
Purpose: to recreate the univariate pdfs of Conlisk et al. (2007)
in figure 2. This provides a test that the univariate pdfs are
working correctly
"""
import numpy as np
import mete
import matplotlib.pyplot as plt
n0 = 617
c = 256
sing_pdfs = np.zeros((4, 8))
psi = [0.01, 0.25, 0.5, 0.75]
for i in range(0, len(psi)):
sing_pdfs[i, : ] = [mete.single_prob(n, n0, psi[i], c) for n in range(0, 8)]
n = range(0, 8)
plt.plot(n, sing_pdfs[0, :], color='black', linewidth=1)
plt.plot(n, sing_pdfs[1, :], color='black', linewidth=1, ls='--')
plt.plot(n, sing_pdfs[2, :], color='black', linewidth=2)
plt.plot(n, sing_pdfs[3, :], color='lightgrey', linewidth=2)
plt.axis([0, 7, 0, 0.9])
plt.xlabel('n')
plt.ylabel('P(n)')
plt.legend(['psi = 0.01', 'psi = 0.25', 'psi = 0.50','psi = 0.75'])
plt.savefig('../figs/conlisk_univaritate_fig2a.pdf')
|
<commit_before>"""
Purpose: to recreate the univariate pdfs of Conlisk et al. (2007)
in figure 2. This provides a test that the univariate pdfs are
working correctly
"""
import numpy as np
import mete
import csv
n0 = 617
c = 256
sing_pdfs = np.zeros((4, 7))
psi = [0.01, 0.25, 0.5, 0.75]
for i in range(0, len(psi)):
sing_pdfs[i, : ] = [mete.single_prob(n, n0, psi[i], c) for n in range(0, 7)]
writer = open('./data/conlisk_data_fig2a.csv', 'wb')
datawriter = csv.writer(writer)
for i in range(0, np.shape(sing_pdfs)[0]):
datawriter.writerow(sing_pdfs[i, ])
writer.close()
<commit_msg>Create a pdf of the univariate pdfs instead of writing to a .csv file.<commit_after>"""
Purpose: to recreate the univariate pdfs of Conlisk et al. (2007)
in figure 2. This provides a test that the univariate pdfs are
working correctly
"""
import numpy as np
import mete
import matplotlib.pyplot as plt
n0 = 617
c = 256
sing_pdfs = np.zeros((4, 8))
psi = [0.01, 0.25, 0.5, 0.75]
for i in range(0, len(psi)):
sing_pdfs[i, : ] = [mete.single_prob(n, n0, psi[i], c) for n in range(0, 8)]
n = range(0, 8)
plt.plot(n, sing_pdfs[0, :], color='black', linewidth=1)
plt.plot(n, sing_pdfs[1, :], color='black', linewidth=1, ls='--')
plt.plot(n, sing_pdfs[2, :], color='black', linewidth=2)
plt.plot(n, sing_pdfs[3, :], color='lightgrey', linewidth=2)
plt.axis([0, 7, 0, 0.9])
plt.xlabel('n')
plt.ylabel('P(n)')
plt.legend(['psi = 0.01', 'psi = 0.25', 'psi = 0.50','psi = 0.75'])
plt.savefig('../figs/conlisk_univaritate_fig2a.pdf')
|
54e715f26ed62e62e8794d8084110091c8db580b
|
oauth_provider/utils.py
|
oauth_provider/utils.py
|
import oauth.oauth as oauth
from django.conf import settings
from django.http import HttpResponse
from stores import DataStore
OAUTH_REALM_KEY_NAME = 'OAUTH_REALM_KEY_NAME'
def initialize_server_request(request):
"""Shortcut for initialization."""
oauth_request = oauth.OAuthRequest.from_request(request.method,
request.build_absolute_uri(),
headers=request.META,
parameters=dict(request.REQUEST.items()),
query_string=request.environ.get('QUERY_STRING', ''))
if oauth_request:
oauth_server = oauth.OAuthServer(DataStore(oauth_request))
oauth_server.add_signature_method(oauth.OAuthSignatureMethod_PLAINTEXT())
oauth_server.add_signature_method(oauth.OAuthSignatureMethod_HMAC_SHA1())
else:
oauth_server = None
return oauth_server, oauth_request
def send_oauth_error(err=None):
"""Shortcut for sending an error."""
# send a 401 error
response = HttpResponse(err.message.encode('utf-8'), mimetype="text/plain")
response.status_code = 401
# return the authenticate header
realm = getattr(settings, OAUTH_REALM_KEY_NAME, '')
header = oauth.build_authenticate_header(realm=realm)
for k, v in header.iteritems():
response[k] = v
return response
|
import oauth.oauth as oauth
from django.conf import settings
from django.http import HttpResponse
from stores import DataStore
OAUTH_REALM_KEY_NAME = 'OAUTH_REALM_KEY_NAME'
def initialize_server_request(request):
"""Shortcut for initialization."""
# Django converts Authorization header in HTTP_AUTHORIZATION
# Warning: it doesn't happen in tests but it's useful, do not remove!
auth_header = {}
if 'Authorization' in request.META:
auth_header = {'Authorization': request.META['Authorization']}
elif 'HTTP_AUTHORIZATION' in request.META:
auth_header = {'Authorization': request.META['HTTP_AUTHORIZATION']}
oauth_request = oauth.OAuthRequest.from_request(request.method,
request.build_absolute_uri(),
headers=auth_header,
parameters=dict(request.REQUEST.items()),
query_string=request.environ.get('QUERY_STRING', ''))
if oauth_request:
oauth_server = oauth.OAuthServer(DataStore(oauth_request))
oauth_server.add_signature_method(oauth.OAuthSignatureMethod_PLAINTEXT())
oauth_server.add_signature_method(oauth.OAuthSignatureMethod_HMAC_SHA1())
else:
oauth_server = None
return oauth_server, oauth_request
def send_oauth_error(err=None):
"""Shortcut for sending an error."""
# send a 401 error
response = HttpResponse(err.message.encode('utf-8'), mimetype="text/plain")
response.status_code = 401
# return the authenticate header
realm = getattr(settings, OAUTH_REALM_KEY_NAME, '')
header = oauth.build_authenticate_header(realm=realm)
for k, v in header.iteritems():
response[k] = v
return response
|
Fix a bug introduced in the latest revision, testing auth header in initialize_server_request now, thanks Chris McMichael for the report and patch
|
Fix a bug introduced in the latest revision, testing auth header in initialize_server_request now, thanks Chris McMichael for the report and patch
|
Python
|
bsd-3-clause
|
lukegb/django-oauth-plus,amrox/django-oauth-plus
|
import oauth.oauth as oauth
from django.conf import settings
from django.http import HttpResponse
from stores import DataStore
OAUTH_REALM_KEY_NAME = 'OAUTH_REALM_KEY_NAME'
def initialize_server_request(request):
"""Shortcut for initialization."""
oauth_request = oauth.OAuthRequest.from_request(request.method,
request.build_absolute_uri(),
headers=request.META,
parameters=dict(request.REQUEST.items()),
query_string=request.environ.get('QUERY_STRING', ''))
if oauth_request:
oauth_server = oauth.OAuthServer(DataStore(oauth_request))
oauth_server.add_signature_method(oauth.OAuthSignatureMethod_PLAINTEXT())
oauth_server.add_signature_method(oauth.OAuthSignatureMethod_HMAC_SHA1())
else:
oauth_server = None
return oauth_server, oauth_request
def send_oauth_error(err=None):
"""Shortcut for sending an error."""
# send a 401 error
response = HttpResponse(err.message.encode('utf-8'), mimetype="text/plain")
response.status_code = 401
# return the authenticate header
realm = getattr(settings, OAUTH_REALM_KEY_NAME, '')
header = oauth.build_authenticate_header(realm=realm)
for k, v in header.iteritems():
response[k] = v
return response
Fix a bug introduced in the latest revision, testing auth header in initialize_server_request now, thanks Chris McMichael for the report and patch
|
import oauth.oauth as oauth
from django.conf import settings
from django.http import HttpResponse
from stores import DataStore
OAUTH_REALM_KEY_NAME = 'OAUTH_REALM_KEY_NAME'
def initialize_server_request(request):
"""Shortcut for initialization."""
# Django converts Authorization header in HTTP_AUTHORIZATION
# Warning: it doesn't happen in tests but it's useful, do not remove!
auth_header = {}
if 'Authorization' in request.META:
auth_header = {'Authorization': request.META['Authorization']}
elif 'HTTP_AUTHORIZATION' in request.META:
auth_header = {'Authorization': request.META['HTTP_AUTHORIZATION']}
oauth_request = oauth.OAuthRequest.from_request(request.method,
request.build_absolute_uri(),
headers=auth_header,
parameters=dict(request.REQUEST.items()),
query_string=request.environ.get('QUERY_STRING', ''))
if oauth_request:
oauth_server = oauth.OAuthServer(DataStore(oauth_request))
oauth_server.add_signature_method(oauth.OAuthSignatureMethod_PLAINTEXT())
oauth_server.add_signature_method(oauth.OAuthSignatureMethod_HMAC_SHA1())
else:
oauth_server = None
return oauth_server, oauth_request
def send_oauth_error(err=None):
"""Shortcut for sending an error."""
# send a 401 error
response = HttpResponse(err.message.encode('utf-8'), mimetype="text/plain")
response.status_code = 401
# return the authenticate header
realm = getattr(settings, OAUTH_REALM_KEY_NAME, '')
header = oauth.build_authenticate_header(realm=realm)
for k, v in header.iteritems():
response[k] = v
return response
|
<commit_before>import oauth.oauth as oauth
from django.conf import settings
from django.http import HttpResponse
from stores import DataStore
OAUTH_REALM_KEY_NAME = 'OAUTH_REALM_KEY_NAME'
def initialize_server_request(request):
"""Shortcut for initialization."""
oauth_request = oauth.OAuthRequest.from_request(request.method,
request.build_absolute_uri(),
headers=request.META,
parameters=dict(request.REQUEST.items()),
query_string=request.environ.get('QUERY_STRING', ''))
if oauth_request:
oauth_server = oauth.OAuthServer(DataStore(oauth_request))
oauth_server.add_signature_method(oauth.OAuthSignatureMethod_PLAINTEXT())
oauth_server.add_signature_method(oauth.OAuthSignatureMethod_HMAC_SHA1())
else:
oauth_server = None
return oauth_server, oauth_request
def send_oauth_error(err=None):
"""Shortcut for sending an error."""
# send a 401 error
response = HttpResponse(err.message.encode('utf-8'), mimetype="text/plain")
response.status_code = 401
# return the authenticate header
realm = getattr(settings, OAUTH_REALM_KEY_NAME, '')
header = oauth.build_authenticate_header(realm=realm)
for k, v in header.iteritems():
response[k] = v
return response
<commit_msg>Fix a bug introduced in the latest revision, testing auth header in initialize_server_request now, thanks Chris McMichael for the report and patch<commit_after>
|
import oauth.oauth as oauth
from django.conf import settings
from django.http import HttpResponse
from stores import DataStore
OAUTH_REALM_KEY_NAME = 'OAUTH_REALM_KEY_NAME'
def initialize_server_request(request):
"""Shortcut for initialization."""
# Django converts Authorization header in HTTP_AUTHORIZATION
# Warning: it doesn't happen in tests but it's useful, do not remove!
auth_header = {}
if 'Authorization' in request.META:
auth_header = {'Authorization': request.META['Authorization']}
elif 'HTTP_AUTHORIZATION' in request.META:
auth_header = {'Authorization': request.META['HTTP_AUTHORIZATION']}
oauth_request = oauth.OAuthRequest.from_request(request.method,
request.build_absolute_uri(),
headers=auth_header,
parameters=dict(request.REQUEST.items()),
query_string=request.environ.get('QUERY_STRING', ''))
if oauth_request:
oauth_server = oauth.OAuthServer(DataStore(oauth_request))
oauth_server.add_signature_method(oauth.OAuthSignatureMethod_PLAINTEXT())
oauth_server.add_signature_method(oauth.OAuthSignatureMethod_HMAC_SHA1())
else:
oauth_server = None
return oauth_server, oauth_request
def send_oauth_error(err=None):
"""Shortcut for sending an error."""
# send a 401 error
response = HttpResponse(err.message.encode('utf-8'), mimetype="text/plain")
response.status_code = 401
# return the authenticate header
realm = getattr(settings, OAUTH_REALM_KEY_NAME, '')
header = oauth.build_authenticate_header(realm=realm)
for k, v in header.iteritems():
response[k] = v
return response
|
import oauth.oauth as oauth
from django.conf import settings
from django.http import HttpResponse
from stores import DataStore
OAUTH_REALM_KEY_NAME = 'OAUTH_REALM_KEY_NAME'
def initialize_server_request(request):
"""Shortcut for initialization."""
oauth_request = oauth.OAuthRequest.from_request(request.method,
request.build_absolute_uri(),
headers=request.META,
parameters=dict(request.REQUEST.items()),
query_string=request.environ.get('QUERY_STRING', ''))
if oauth_request:
oauth_server = oauth.OAuthServer(DataStore(oauth_request))
oauth_server.add_signature_method(oauth.OAuthSignatureMethod_PLAINTEXT())
oauth_server.add_signature_method(oauth.OAuthSignatureMethod_HMAC_SHA1())
else:
oauth_server = None
return oauth_server, oauth_request
def send_oauth_error(err=None):
"""Shortcut for sending an error."""
# send a 401 error
response = HttpResponse(err.message.encode('utf-8'), mimetype="text/plain")
response.status_code = 401
# return the authenticate header
realm = getattr(settings, OAUTH_REALM_KEY_NAME, '')
header = oauth.build_authenticate_header(realm=realm)
for k, v in header.iteritems():
response[k] = v
return response
Fix a bug introduced in the latest revision, testing auth header in initialize_server_request now, thanks Chris McMichael for the report and patchimport oauth.oauth as oauth
from django.conf import settings
from django.http import HttpResponse
from stores import DataStore
OAUTH_REALM_KEY_NAME = 'OAUTH_REALM_KEY_NAME'
def initialize_server_request(request):
"""Shortcut for initialization."""
# Django converts Authorization header in HTTP_AUTHORIZATION
# Warning: it doesn't happen in tests but it's useful, do not remove!
auth_header = {}
if 'Authorization' in request.META:
auth_header = {'Authorization': request.META['Authorization']}
elif 'HTTP_AUTHORIZATION' in request.META:
auth_header = {'Authorization': request.META['HTTP_AUTHORIZATION']}
oauth_request = oauth.OAuthRequest.from_request(request.method,
request.build_absolute_uri(),
headers=auth_header,
parameters=dict(request.REQUEST.items()),
query_string=request.environ.get('QUERY_STRING', ''))
if oauth_request:
oauth_server = oauth.OAuthServer(DataStore(oauth_request))
oauth_server.add_signature_method(oauth.OAuthSignatureMethod_PLAINTEXT())
oauth_server.add_signature_method(oauth.OAuthSignatureMethod_HMAC_SHA1())
else:
oauth_server = None
return oauth_server, oauth_request
def send_oauth_error(err=None):
"""Shortcut for sending an error."""
# send a 401 error
response = HttpResponse(err.message.encode('utf-8'), mimetype="text/plain")
response.status_code = 401
# return the authenticate header
realm = getattr(settings, OAUTH_REALM_KEY_NAME, '')
header = oauth.build_authenticate_header(realm=realm)
for k, v in header.iteritems():
response[k] = v
return response
|
<commit_before>import oauth.oauth as oauth
from django.conf import settings
from django.http import HttpResponse
from stores import DataStore
OAUTH_REALM_KEY_NAME = 'OAUTH_REALM_KEY_NAME'
def initialize_server_request(request):
"""Shortcut for initialization."""
oauth_request = oauth.OAuthRequest.from_request(request.method,
request.build_absolute_uri(),
headers=request.META,
parameters=dict(request.REQUEST.items()),
query_string=request.environ.get('QUERY_STRING', ''))
if oauth_request:
oauth_server = oauth.OAuthServer(DataStore(oauth_request))
oauth_server.add_signature_method(oauth.OAuthSignatureMethod_PLAINTEXT())
oauth_server.add_signature_method(oauth.OAuthSignatureMethod_HMAC_SHA1())
else:
oauth_server = None
return oauth_server, oauth_request
def send_oauth_error(err=None):
"""Shortcut for sending an error."""
# send a 401 error
response = HttpResponse(err.message.encode('utf-8'), mimetype="text/plain")
response.status_code = 401
# return the authenticate header
realm = getattr(settings, OAUTH_REALM_KEY_NAME, '')
header = oauth.build_authenticate_header(realm=realm)
for k, v in header.iteritems():
response[k] = v
return response
<commit_msg>Fix a bug introduced in the latest revision, testing auth header in initialize_server_request now, thanks Chris McMichael for the report and patch<commit_after>import oauth.oauth as oauth
from django.conf import settings
from django.http import HttpResponse
from stores import DataStore
OAUTH_REALM_KEY_NAME = 'OAUTH_REALM_KEY_NAME'
def initialize_server_request(request):
"""Shortcut for initialization."""
# Django converts Authorization header in HTTP_AUTHORIZATION
# Warning: it doesn't happen in tests but it's useful, do not remove!
auth_header = {}
if 'Authorization' in request.META:
auth_header = {'Authorization': request.META['Authorization']}
elif 'HTTP_AUTHORIZATION' in request.META:
auth_header = {'Authorization': request.META['HTTP_AUTHORIZATION']}
oauth_request = oauth.OAuthRequest.from_request(request.method,
request.build_absolute_uri(),
headers=auth_header,
parameters=dict(request.REQUEST.items()),
query_string=request.environ.get('QUERY_STRING', ''))
if oauth_request:
oauth_server = oauth.OAuthServer(DataStore(oauth_request))
oauth_server.add_signature_method(oauth.OAuthSignatureMethod_PLAINTEXT())
oauth_server.add_signature_method(oauth.OAuthSignatureMethod_HMAC_SHA1())
else:
oauth_server = None
return oauth_server, oauth_request
def send_oauth_error(err=None):
"""Shortcut for sending an error."""
# send a 401 error
response = HttpResponse(err.message.encode('utf-8'), mimetype="text/plain")
response.status_code = 401
# return the authenticate header
realm = getattr(settings, OAUTH_REALM_KEY_NAME, '')
header = oauth.build_authenticate_header(realm=realm)
for k, v in header.iteritems():
response[k] = v
return response
|
73fe535416dbf744752d745f0186d5406bd15d8c
|
test/client/local_recognizer_test.py
|
test/client/local_recognizer_test.py
|
import unittest
import os
from speech_recognition import WavFile
from mycroft.client.speech.listener import RecognizerLoop
__author__ = 'seanfitz'
DATA_DIR = os.path.join(os.path.abspath(os.path.dirname(__file__)), "data")
class LocalRecognizerTest(unittest.TestCase):
def setUp(self):
self.recognizer = RecognizerLoop.create_mycroft_recognizer(16000,
"en-us")
def testRecognizerWrapper(self):
source = WavFile(os.path.join(DATA_DIR, "hey_mycroft.wav"))
with source as audio:
hyp = self.recognizer.transcribe(audio.stream.read())
assert self.recognizer.key_phrase in hyp.hypstr.lower()
source = WavFile(os.path.join(DATA_DIR, "mycroft.wav"))
with source as audio:
hyp = self.recognizer.transcribe(audio.stream.read())
assert self.recognizer.key_phrase in hyp.hypstr.lower()
def testRecognitionInLongerUtterance(self):
source = WavFile(os.path.join(DATA_DIR, "weather_mycroft.wav"))
with source as audio:
hyp = self.recognizer.transcribe(audio.stream.read())
assert self.recognizer.key_phrase in hyp.hypstr.lower()
|
import unittest
import os
from speech_recognition import WavFile
from mycroft.client.speech.listener import RecognizerLoop
__author__ = 'seanfitz'
DATA_DIR = os.path.join(os.path.abspath(os.path.dirname(__file__)), "data")
class LocalRecognizerTest(unittest.TestCase):
def setUp(self):
rl = RecognizerLoop()
self.recognizer = RecognizerLoop.create_mycroft_recognizer(rl,
16000,
"en-us")
def testRecognizerWrapper(self):
source = WavFile(os.path.join(DATA_DIR, "hey_mycroft.wav"))
with source as audio:
hyp = self.recognizer.transcribe(audio.stream.read())
assert self.recognizer.key_phrase in hyp.hypstr.lower()
source = WavFile(os.path.join(DATA_DIR, "mycroft.wav"))
with source as audio:
hyp = self.recognizer.transcribe(audio.stream.read())
assert self.recognizer.key_phrase in hyp.hypstr.lower()
def testRecognitionInLongerUtterance(self):
source = WavFile(os.path.join(DATA_DIR, "weather_mycroft.wav"))
with source as audio:
hyp = self.recognizer.transcribe(audio.stream.read())
assert self.recognizer.key_phrase in hyp.hypstr.lower()
|
Fix init of local recognizer
|
Fix init of local recognizer
|
Python
|
apache-2.0
|
linuxipho/mycroft-core,aatchison/mycroft-core,MycroftAI/mycroft-core,aatchison/mycroft-core,forslund/mycroft-core,linuxipho/mycroft-core,Dark5ide/mycroft-core,Dark5ide/mycroft-core,MycroftAI/mycroft-core,forslund/mycroft-core
|
import unittest
import os
from speech_recognition import WavFile
from mycroft.client.speech.listener import RecognizerLoop
__author__ = 'seanfitz'
DATA_DIR = os.path.join(os.path.abspath(os.path.dirname(__file__)), "data")
class LocalRecognizerTest(unittest.TestCase):
def setUp(self):
self.recognizer = RecognizerLoop.create_mycroft_recognizer(16000,
"en-us")
def testRecognizerWrapper(self):
source = WavFile(os.path.join(DATA_DIR, "hey_mycroft.wav"))
with source as audio:
hyp = self.recognizer.transcribe(audio.stream.read())
assert self.recognizer.key_phrase in hyp.hypstr.lower()
source = WavFile(os.path.join(DATA_DIR, "mycroft.wav"))
with source as audio:
hyp = self.recognizer.transcribe(audio.stream.read())
assert self.recognizer.key_phrase in hyp.hypstr.lower()
def testRecognitionInLongerUtterance(self):
source = WavFile(os.path.join(DATA_DIR, "weather_mycroft.wav"))
with source as audio:
hyp = self.recognizer.transcribe(audio.stream.read())
assert self.recognizer.key_phrase in hyp.hypstr.lower()
Fix init of local recognizer
|
import unittest
import os
from speech_recognition import WavFile
from mycroft.client.speech.listener import RecognizerLoop
__author__ = 'seanfitz'
DATA_DIR = os.path.join(os.path.abspath(os.path.dirname(__file__)), "data")
class LocalRecognizerTest(unittest.TestCase):
def setUp(self):
rl = RecognizerLoop()
self.recognizer = RecognizerLoop.create_mycroft_recognizer(rl,
16000,
"en-us")
def testRecognizerWrapper(self):
source = WavFile(os.path.join(DATA_DIR, "hey_mycroft.wav"))
with source as audio:
hyp = self.recognizer.transcribe(audio.stream.read())
assert self.recognizer.key_phrase in hyp.hypstr.lower()
source = WavFile(os.path.join(DATA_DIR, "mycroft.wav"))
with source as audio:
hyp = self.recognizer.transcribe(audio.stream.read())
assert self.recognizer.key_phrase in hyp.hypstr.lower()
def testRecognitionInLongerUtterance(self):
source = WavFile(os.path.join(DATA_DIR, "weather_mycroft.wav"))
with source as audio:
hyp = self.recognizer.transcribe(audio.stream.read())
assert self.recognizer.key_phrase in hyp.hypstr.lower()
|
<commit_before>import unittest
import os
from speech_recognition import WavFile
from mycroft.client.speech.listener import RecognizerLoop
__author__ = 'seanfitz'
DATA_DIR = os.path.join(os.path.abspath(os.path.dirname(__file__)), "data")
class LocalRecognizerTest(unittest.TestCase):
def setUp(self):
self.recognizer = RecognizerLoop.create_mycroft_recognizer(16000,
"en-us")
def testRecognizerWrapper(self):
source = WavFile(os.path.join(DATA_DIR, "hey_mycroft.wav"))
with source as audio:
hyp = self.recognizer.transcribe(audio.stream.read())
assert self.recognizer.key_phrase in hyp.hypstr.lower()
source = WavFile(os.path.join(DATA_DIR, "mycroft.wav"))
with source as audio:
hyp = self.recognizer.transcribe(audio.stream.read())
assert self.recognizer.key_phrase in hyp.hypstr.lower()
def testRecognitionInLongerUtterance(self):
source = WavFile(os.path.join(DATA_DIR, "weather_mycroft.wav"))
with source as audio:
hyp = self.recognizer.transcribe(audio.stream.read())
assert self.recognizer.key_phrase in hyp.hypstr.lower()
<commit_msg>Fix init of local recognizer<commit_after>
|
import unittest
import os
from speech_recognition import WavFile
from mycroft.client.speech.listener import RecognizerLoop
__author__ = 'seanfitz'
DATA_DIR = os.path.join(os.path.abspath(os.path.dirname(__file__)), "data")
class LocalRecognizerTest(unittest.TestCase):
def setUp(self):
rl = RecognizerLoop()
self.recognizer = RecognizerLoop.create_mycroft_recognizer(rl,
16000,
"en-us")
def testRecognizerWrapper(self):
source = WavFile(os.path.join(DATA_DIR, "hey_mycroft.wav"))
with source as audio:
hyp = self.recognizer.transcribe(audio.stream.read())
assert self.recognizer.key_phrase in hyp.hypstr.lower()
source = WavFile(os.path.join(DATA_DIR, "mycroft.wav"))
with source as audio:
hyp = self.recognizer.transcribe(audio.stream.read())
assert self.recognizer.key_phrase in hyp.hypstr.lower()
def testRecognitionInLongerUtterance(self):
source = WavFile(os.path.join(DATA_DIR, "weather_mycroft.wav"))
with source as audio:
hyp = self.recognizer.transcribe(audio.stream.read())
assert self.recognizer.key_phrase in hyp.hypstr.lower()
|
import unittest
import os
from speech_recognition import WavFile
from mycroft.client.speech.listener import RecognizerLoop
__author__ = 'seanfitz'
DATA_DIR = os.path.join(os.path.abspath(os.path.dirname(__file__)), "data")
class LocalRecognizerTest(unittest.TestCase):
def setUp(self):
self.recognizer = RecognizerLoop.create_mycroft_recognizer(16000,
"en-us")
def testRecognizerWrapper(self):
source = WavFile(os.path.join(DATA_DIR, "hey_mycroft.wav"))
with source as audio:
hyp = self.recognizer.transcribe(audio.stream.read())
assert self.recognizer.key_phrase in hyp.hypstr.lower()
source = WavFile(os.path.join(DATA_DIR, "mycroft.wav"))
with source as audio:
hyp = self.recognizer.transcribe(audio.stream.read())
assert self.recognizer.key_phrase in hyp.hypstr.lower()
def testRecognitionInLongerUtterance(self):
source = WavFile(os.path.join(DATA_DIR, "weather_mycroft.wav"))
with source as audio:
hyp = self.recognizer.transcribe(audio.stream.read())
assert self.recognizer.key_phrase in hyp.hypstr.lower()
Fix init of local recognizerimport unittest
import os
from speech_recognition import WavFile
from mycroft.client.speech.listener import RecognizerLoop
__author__ = 'seanfitz'
DATA_DIR = os.path.join(os.path.abspath(os.path.dirname(__file__)), "data")
class LocalRecognizerTest(unittest.TestCase):
def setUp(self):
rl = RecognizerLoop()
self.recognizer = RecognizerLoop.create_mycroft_recognizer(rl,
16000,
"en-us")
def testRecognizerWrapper(self):
source = WavFile(os.path.join(DATA_DIR, "hey_mycroft.wav"))
with source as audio:
hyp = self.recognizer.transcribe(audio.stream.read())
assert self.recognizer.key_phrase in hyp.hypstr.lower()
source = WavFile(os.path.join(DATA_DIR, "mycroft.wav"))
with source as audio:
hyp = self.recognizer.transcribe(audio.stream.read())
assert self.recognizer.key_phrase in hyp.hypstr.lower()
def testRecognitionInLongerUtterance(self):
source = WavFile(os.path.join(DATA_DIR, "weather_mycroft.wav"))
with source as audio:
hyp = self.recognizer.transcribe(audio.stream.read())
assert self.recognizer.key_phrase in hyp.hypstr.lower()
|
<commit_before>import unittest
import os
from speech_recognition import WavFile
from mycroft.client.speech.listener import RecognizerLoop
__author__ = 'seanfitz'
DATA_DIR = os.path.join(os.path.abspath(os.path.dirname(__file__)), "data")
class LocalRecognizerTest(unittest.TestCase):
def setUp(self):
self.recognizer = RecognizerLoop.create_mycroft_recognizer(16000,
"en-us")
def testRecognizerWrapper(self):
source = WavFile(os.path.join(DATA_DIR, "hey_mycroft.wav"))
with source as audio:
hyp = self.recognizer.transcribe(audio.stream.read())
assert self.recognizer.key_phrase in hyp.hypstr.lower()
source = WavFile(os.path.join(DATA_DIR, "mycroft.wav"))
with source as audio:
hyp = self.recognizer.transcribe(audio.stream.read())
assert self.recognizer.key_phrase in hyp.hypstr.lower()
def testRecognitionInLongerUtterance(self):
source = WavFile(os.path.join(DATA_DIR, "weather_mycroft.wav"))
with source as audio:
hyp = self.recognizer.transcribe(audio.stream.read())
assert self.recognizer.key_phrase in hyp.hypstr.lower()
<commit_msg>Fix init of local recognizer<commit_after>import unittest
import os
from speech_recognition import WavFile
from mycroft.client.speech.listener import RecognizerLoop
__author__ = 'seanfitz'
DATA_DIR = os.path.join(os.path.abspath(os.path.dirname(__file__)), "data")
class LocalRecognizerTest(unittest.TestCase):
def setUp(self):
rl = RecognizerLoop()
self.recognizer = RecognizerLoop.create_mycroft_recognizer(rl,
16000,
"en-us")
def testRecognizerWrapper(self):
source = WavFile(os.path.join(DATA_DIR, "hey_mycroft.wav"))
with source as audio:
hyp = self.recognizer.transcribe(audio.stream.read())
assert self.recognizer.key_phrase in hyp.hypstr.lower()
source = WavFile(os.path.join(DATA_DIR, "mycroft.wav"))
with source as audio:
hyp = self.recognizer.transcribe(audio.stream.read())
assert self.recognizer.key_phrase in hyp.hypstr.lower()
def testRecognitionInLongerUtterance(self):
source = WavFile(os.path.join(DATA_DIR, "weather_mycroft.wav"))
with source as audio:
hyp = self.recognizer.transcribe(audio.stream.read())
assert self.recognizer.key_phrase in hyp.hypstr.lower()
|
eb06e85c7dcb93febe22d20cd7e3e694939449ba
|
tests/test_xelatex.py
|
tests/test_xelatex.py
|
from latex.build import LatexMkBuilder
def test_xelatex():
min_latex = r"""
\documentclass{article}
\begin{document}
Hello, world!
\end{document}
"""
builder = LatexMkBuilder(variant='xelatex')
pdf = builder.build_pdf(min_latex)
assert pdf
|
from latex.build import LatexMkBuilder
def test_xelatex():
# the example below should not compile on pdflatex, but on xelatex
min_latex = r"""
\documentclass[12pt]{article}
\usepackage{fontspec}
\setmainfont{Times New Roman}
\title{Sample font document}
\author{Hubert Farnsworth}
\date{this month, 2014}
\begin{document}
\maketitle
This an \textit{example} of document compiled
with \textbf{xelatex} compiler. LuaLaTeX should
work fine also.
\end{document}
"""
builder = LatexMkBuilder(variant='xelatex')
pdf = builder.build_pdf(min_latex)
assert pdf
|
Make XeLaTeX test harder (to actually test xelatex being used).
|
Make XeLaTeX test harder (to actually test xelatex being used).
|
Python
|
bsd-3-clause
|
mbr/latex
|
from latex.build import LatexMkBuilder
def test_xelatex():
min_latex = r"""
\documentclass{article}
\begin{document}
Hello, world!
\end{document}
"""
builder = LatexMkBuilder(variant='xelatex')
pdf = builder.build_pdf(min_latex)
assert pdf
Make XeLaTeX test harder (to actually test xelatex being used).
|
from latex.build import LatexMkBuilder
def test_xelatex():
# the example below should not compile on pdflatex, but on xelatex
min_latex = r"""
\documentclass[12pt]{article}
\usepackage{fontspec}
\setmainfont{Times New Roman}
\title{Sample font document}
\author{Hubert Farnsworth}
\date{this month, 2014}
\begin{document}
\maketitle
This an \textit{example} of document compiled
with \textbf{xelatex} compiler. LuaLaTeX should
work fine also.
\end{document}
"""
builder = LatexMkBuilder(variant='xelatex')
pdf = builder.build_pdf(min_latex)
assert pdf
|
<commit_before>from latex.build import LatexMkBuilder
def test_xelatex():
min_latex = r"""
\documentclass{article}
\begin{document}
Hello, world!
\end{document}
"""
builder = LatexMkBuilder(variant='xelatex')
pdf = builder.build_pdf(min_latex)
assert pdf
<commit_msg>Make XeLaTeX test harder (to actually test xelatex being used).<commit_after>
|
from latex.build import LatexMkBuilder
def test_xelatex():
# the example below should not compile on pdflatex, but on xelatex
min_latex = r"""
\documentclass[12pt]{article}
\usepackage{fontspec}
\setmainfont{Times New Roman}
\title{Sample font document}
\author{Hubert Farnsworth}
\date{this month, 2014}
\begin{document}
\maketitle
This an \textit{example} of document compiled
with \textbf{xelatex} compiler. LuaLaTeX should
work fine also.
\end{document}
"""
builder = LatexMkBuilder(variant='xelatex')
pdf = builder.build_pdf(min_latex)
assert pdf
|
from latex.build import LatexMkBuilder
def test_xelatex():
min_latex = r"""
\documentclass{article}
\begin{document}
Hello, world!
\end{document}
"""
builder = LatexMkBuilder(variant='xelatex')
pdf = builder.build_pdf(min_latex)
assert pdf
Make XeLaTeX test harder (to actually test xelatex being used).from latex.build import LatexMkBuilder
def test_xelatex():
# the example below should not compile on pdflatex, but on xelatex
min_latex = r"""
\documentclass[12pt]{article}
\usepackage{fontspec}
\setmainfont{Times New Roman}
\title{Sample font document}
\author{Hubert Farnsworth}
\date{this month, 2014}
\begin{document}
\maketitle
This an \textit{example} of document compiled
with \textbf{xelatex} compiler. LuaLaTeX should
work fine also.
\end{document}
"""
builder = LatexMkBuilder(variant='xelatex')
pdf = builder.build_pdf(min_latex)
assert pdf
|
<commit_before>from latex.build import LatexMkBuilder
def test_xelatex():
min_latex = r"""
\documentclass{article}
\begin{document}
Hello, world!
\end{document}
"""
builder = LatexMkBuilder(variant='xelatex')
pdf = builder.build_pdf(min_latex)
assert pdf
<commit_msg>Make XeLaTeX test harder (to actually test xelatex being used).<commit_after>from latex.build import LatexMkBuilder
def test_xelatex():
# the example below should not compile on pdflatex, but on xelatex
min_latex = r"""
\documentclass[12pt]{article}
\usepackage{fontspec}
\setmainfont{Times New Roman}
\title{Sample font document}
\author{Hubert Farnsworth}
\date{this month, 2014}
\begin{document}
\maketitle
This an \textit{example} of document compiled
with \textbf{xelatex} compiler. LuaLaTeX should
work fine also.
\end{document}
"""
builder = LatexMkBuilder(variant='xelatex')
pdf = builder.build_pdf(min_latex)
assert pdf
|
d95d71f996483bdde4f0b27d9d9c023aef706c65
|
freebasics/tests/test_env_variables.py
|
freebasics/tests/test_env_variables.py
|
from django.test import TestCase, RequestFactory
from molo.core.tests.base import MoloTestCaseMixin
from freebasics.views import HomeView
from freebasics.templatetags import freebasics_tags
class EnvTestCase(TestCase, MoloTestCaseMixin):
def setUp(self):
self.mk_main()
def test_block_ordering(self):
with self.settings(BLOCK_POSITION_BANNER=4,
BLOCK_POSITION_LATEST=3,
BLOCK_POSITION_QUESTIONS=2,
BLOCK_POSITION_SECTIONS=1):
factory = RequestFactory()
request = factory.get('/')
request.site = self.site
home = HomeView()
home.request = request
context = home.get_context_data()
self.assertEquals(context['blocks'][0], (
'blocks/sections.html', 1))
self.assertEquals(context['blocks'][1], (
'blocks/questions.html', 2))
self.assertEquals(context['blocks'][2], ('blocks/latest.html', 3))
self.assertEquals(context['blocks'][3], ('blocks/banners.html', 4))
def test_css_vars(self):
with self.settings(CUSTOM_CSS_BLOCK_TEXT_TRANSFORM="lowercase",
CUSTOM_CSS_ACCENT_2="red"):
styles = freebasics_tags.custom_css(context='')
self.assertEquals(styles['accent_2'], 'red')
self.assertEquals(styles['text_transform'], 'lowercase')
|
from django.test import TestCase, RequestFactory
from molo.core.tests.base import MoloTestCaseMixin
from freebasics.views import HomeView
from freebasics.templatetags import freebasics_tags
class EnvTestCase(TestCase, MoloTestCaseMixin):
def setUp(self):
self.mk_main()
def test_block_ordering(self):
with self.settings(BLOCK_POSITION_BANNER=4,
BLOCK_POSITION_LATEST=3,
BLOCK_POSITION_QUESTIONS=2,
BLOCK_POSITION_SECTIONS=1):
factory = RequestFactory()
request = factory.get('/')
request.site = self.site
home = HomeView()
home.request = request
context = home.get_context_data()
self.assertEquals(context['blocks'][0], (
'blocks/sections.html', 1))
self.assertEquals(context['blocks'][1], (
'blocks/questions.html', 2))
self.assertEquals(context['blocks'][2], ('blocks/latest.html', 3))
self.assertEquals(context['blocks'][3], ('blocks/banners.html', 4))
def test_css_vars(self):
with self.settings(CUSTOM_CSS_BLOCK_TEXT_TRANSFORM="lowercase",
CUSTOM_CSS_ACCENT_2="red"):
styles = freebasics_tags.custom_css(context='')
self.assertEquals(styles['accent_2'], 'red')
self.assertEquals(styles['text_transform'], 'lowercase')
def test_custom_css(self):
response = self.client.get('/')
self.assertContains(response, '.fb-body .base-bcolor')
self.assertContains(response, '.fb-body .block-heading')
self.assertContains(response, '.section-nav__items')
|
Add test for custom css
|
Add test for custom css
|
Python
|
bsd-2-clause
|
praekelt/molo-freebasics,praekelt/molo-freebasics,praekelt/molo-freebasics,praekelt/molo-freebasics
|
from django.test import TestCase, RequestFactory
from molo.core.tests.base import MoloTestCaseMixin
from freebasics.views import HomeView
from freebasics.templatetags import freebasics_tags
class EnvTestCase(TestCase, MoloTestCaseMixin):
def setUp(self):
self.mk_main()
def test_block_ordering(self):
with self.settings(BLOCK_POSITION_BANNER=4,
BLOCK_POSITION_LATEST=3,
BLOCK_POSITION_QUESTIONS=2,
BLOCK_POSITION_SECTIONS=1):
factory = RequestFactory()
request = factory.get('/')
request.site = self.site
home = HomeView()
home.request = request
context = home.get_context_data()
self.assertEquals(context['blocks'][0], (
'blocks/sections.html', 1))
self.assertEquals(context['blocks'][1], (
'blocks/questions.html', 2))
self.assertEquals(context['blocks'][2], ('blocks/latest.html', 3))
self.assertEquals(context['blocks'][3], ('blocks/banners.html', 4))
def test_css_vars(self):
with self.settings(CUSTOM_CSS_BLOCK_TEXT_TRANSFORM="lowercase",
CUSTOM_CSS_ACCENT_2="red"):
styles = freebasics_tags.custom_css(context='')
self.assertEquals(styles['accent_2'], 'red')
self.assertEquals(styles['text_transform'], 'lowercase')
Add test for custom css
|
from django.test import TestCase, RequestFactory
from molo.core.tests.base import MoloTestCaseMixin
from freebasics.views import HomeView
from freebasics.templatetags import freebasics_tags
class EnvTestCase(TestCase, MoloTestCaseMixin):
def setUp(self):
self.mk_main()
def test_block_ordering(self):
with self.settings(BLOCK_POSITION_BANNER=4,
BLOCK_POSITION_LATEST=3,
BLOCK_POSITION_QUESTIONS=2,
BLOCK_POSITION_SECTIONS=1):
factory = RequestFactory()
request = factory.get('/')
request.site = self.site
home = HomeView()
home.request = request
context = home.get_context_data()
self.assertEquals(context['blocks'][0], (
'blocks/sections.html', 1))
self.assertEquals(context['blocks'][1], (
'blocks/questions.html', 2))
self.assertEquals(context['blocks'][2], ('blocks/latest.html', 3))
self.assertEquals(context['blocks'][3], ('blocks/banners.html', 4))
def test_css_vars(self):
with self.settings(CUSTOM_CSS_BLOCK_TEXT_TRANSFORM="lowercase",
CUSTOM_CSS_ACCENT_2="red"):
styles = freebasics_tags.custom_css(context='')
self.assertEquals(styles['accent_2'], 'red')
self.assertEquals(styles['text_transform'], 'lowercase')
def test_custom_css(self):
response = self.client.get('/')
self.assertContains(response, '.fb-body .base-bcolor')
self.assertContains(response, '.fb-body .block-heading')
self.assertContains(response, '.section-nav__items')
|
<commit_before>from django.test import TestCase, RequestFactory
from molo.core.tests.base import MoloTestCaseMixin
from freebasics.views import HomeView
from freebasics.templatetags import freebasics_tags
class EnvTestCase(TestCase, MoloTestCaseMixin):
def setUp(self):
self.mk_main()
def test_block_ordering(self):
with self.settings(BLOCK_POSITION_BANNER=4,
BLOCK_POSITION_LATEST=3,
BLOCK_POSITION_QUESTIONS=2,
BLOCK_POSITION_SECTIONS=1):
factory = RequestFactory()
request = factory.get('/')
request.site = self.site
home = HomeView()
home.request = request
context = home.get_context_data()
self.assertEquals(context['blocks'][0], (
'blocks/sections.html', 1))
self.assertEquals(context['blocks'][1], (
'blocks/questions.html', 2))
self.assertEquals(context['blocks'][2], ('blocks/latest.html', 3))
self.assertEquals(context['blocks'][3], ('blocks/banners.html', 4))
def test_css_vars(self):
with self.settings(CUSTOM_CSS_BLOCK_TEXT_TRANSFORM="lowercase",
CUSTOM_CSS_ACCENT_2="red"):
styles = freebasics_tags.custom_css(context='')
self.assertEquals(styles['accent_2'], 'red')
self.assertEquals(styles['text_transform'], 'lowercase')
<commit_msg>Add test for custom css<commit_after>
|
from django.test import TestCase, RequestFactory
from molo.core.tests.base import MoloTestCaseMixin
from freebasics.views import HomeView
from freebasics.templatetags import freebasics_tags
class EnvTestCase(TestCase, MoloTestCaseMixin):
def setUp(self):
self.mk_main()
def test_block_ordering(self):
with self.settings(BLOCK_POSITION_BANNER=4,
BLOCK_POSITION_LATEST=3,
BLOCK_POSITION_QUESTIONS=2,
BLOCK_POSITION_SECTIONS=1):
factory = RequestFactory()
request = factory.get('/')
request.site = self.site
home = HomeView()
home.request = request
context = home.get_context_data()
self.assertEquals(context['blocks'][0], (
'blocks/sections.html', 1))
self.assertEquals(context['blocks'][1], (
'blocks/questions.html', 2))
self.assertEquals(context['blocks'][2], ('blocks/latest.html', 3))
self.assertEquals(context['blocks'][3], ('blocks/banners.html', 4))
def test_css_vars(self):
with self.settings(CUSTOM_CSS_BLOCK_TEXT_TRANSFORM="lowercase",
CUSTOM_CSS_ACCENT_2="red"):
styles = freebasics_tags.custom_css(context='')
self.assertEquals(styles['accent_2'], 'red')
self.assertEquals(styles['text_transform'], 'lowercase')
def test_custom_css(self):
response = self.client.get('/')
self.assertContains(response, '.fb-body .base-bcolor')
self.assertContains(response, '.fb-body .block-heading')
self.assertContains(response, '.section-nav__items')
|
from django.test import TestCase, RequestFactory
from molo.core.tests.base import MoloTestCaseMixin
from freebasics.views import HomeView
from freebasics.templatetags import freebasics_tags
class EnvTestCase(TestCase, MoloTestCaseMixin):
def setUp(self):
self.mk_main()
def test_block_ordering(self):
with self.settings(BLOCK_POSITION_BANNER=4,
BLOCK_POSITION_LATEST=3,
BLOCK_POSITION_QUESTIONS=2,
BLOCK_POSITION_SECTIONS=1):
factory = RequestFactory()
request = factory.get('/')
request.site = self.site
home = HomeView()
home.request = request
context = home.get_context_data()
self.assertEquals(context['blocks'][0], (
'blocks/sections.html', 1))
self.assertEquals(context['blocks'][1], (
'blocks/questions.html', 2))
self.assertEquals(context['blocks'][2], ('blocks/latest.html', 3))
self.assertEquals(context['blocks'][3], ('blocks/banners.html', 4))
def test_css_vars(self):
with self.settings(CUSTOM_CSS_BLOCK_TEXT_TRANSFORM="lowercase",
CUSTOM_CSS_ACCENT_2="red"):
styles = freebasics_tags.custom_css(context='')
self.assertEquals(styles['accent_2'], 'red')
self.assertEquals(styles['text_transform'], 'lowercase')
Add test for custom cssfrom django.test import TestCase, RequestFactory
from molo.core.tests.base import MoloTestCaseMixin
from freebasics.views import HomeView
from freebasics.templatetags import freebasics_tags
class EnvTestCase(TestCase, MoloTestCaseMixin):
def setUp(self):
self.mk_main()
def test_block_ordering(self):
with self.settings(BLOCK_POSITION_BANNER=4,
BLOCK_POSITION_LATEST=3,
BLOCK_POSITION_QUESTIONS=2,
BLOCK_POSITION_SECTIONS=1):
factory = RequestFactory()
request = factory.get('/')
request.site = self.site
home = HomeView()
home.request = request
context = home.get_context_data()
self.assertEquals(context['blocks'][0], (
'blocks/sections.html', 1))
self.assertEquals(context['blocks'][1], (
'blocks/questions.html', 2))
self.assertEquals(context['blocks'][2], ('blocks/latest.html', 3))
self.assertEquals(context['blocks'][3], ('blocks/banners.html', 4))
def test_css_vars(self):
with self.settings(CUSTOM_CSS_BLOCK_TEXT_TRANSFORM="lowercase",
CUSTOM_CSS_ACCENT_2="red"):
styles = freebasics_tags.custom_css(context='')
self.assertEquals(styles['accent_2'], 'red')
self.assertEquals(styles['text_transform'], 'lowercase')
def test_custom_css(self):
response = self.client.get('/')
self.assertContains(response, '.fb-body .base-bcolor')
self.assertContains(response, '.fb-body .block-heading')
self.assertContains(response, '.section-nav__items')
|
<commit_before>from django.test import TestCase, RequestFactory
from molo.core.tests.base import MoloTestCaseMixin
from freebasics.views import HomeView
from freebasics.templatetags import freebasics_tags
class EnvTestCase(TestCase, MoloTestCaseMixin):
def setUp(self):
self.mk_main()
def test_block_ordering(self):
with self.settings(BLOCK_POSITION_BANNER=4,
BLOCK_POSITION_LATEST=3,
BLOCK_POSITION_QUESTIONS=2,
BLOCK_POSITION_SECTIONS=1):
factory = RequestFactory()
request = factory.get('/')
request.site = self.site
home = HomeView()
home.request = request
context = home.get_context_data()
self.assertEquals(context['blocks'][0], (
'blocks/sections.html', 1))
self.assertEquals(context['blocks'][1], (
'blocks/questions.html', 2))
self.assertEquals(context['blocks'][2], ('blocks/latest.html', 3))
self.assertEquals(context['blocks'][3], ('blocks/banners.html', 4))
def test_css_vars(self):
with self.settings(CUSTOM_CSS_BLOCK_TEXT_TRANSFORM="lowercase",
CUSTOM_CSS_ACCENT_2="red"):
styles = freebasics_tags.custom_css(context='')
self.assertEquals(styles['accent_2'], 'red')
self.assertEquals(styles['text_transform'], 'lowercase')
<commit_msg>Add test for custom css<commit_after>from django.test import TestCase, RequestFactory
from molo.core.tests.base import MoloTestCaseMixin
from freebasics.views import HomeView
from freebasics.templatetags import freebasics_tags
class EnvTestCase(TestCase, MoloTestCaseMixin):
def setUp(self):
self.mk_main()
def test_block_ordering(self):
with self.settings(BLOCK_POSITION_BANNER=4,
BLOCK_POSITION_LATEST=3,
BLOCK_POSITION_QUESTIONS=2,
BLOCK_POSITION_SECTIONS=1):
factory = RequestFactory()
request = factory.get('/')
request.site = self.site
home = HomeView()
home.request = request
context = home.get_context_data()
self.assertEquals(context['blocks'][0], (
'blocks/sections.html', 1))
self.assertEquals(context['blocks'][1], (
'blocks/questions.html', 2))
self.assertEquals(context['blocks'][2], ('blocks/latest.html', 3))
self.assertEquals(context['blocks'][3], ('blocks/banners.html', 4))
def test_css_vars(self):
with self.settings(CUSTOM_CSS_BLOCK_TEXT_TRANSFORM="lowercase",
CUSTOM_CSS_ACCENT_2="red"):
styles = freebasics_tags.custom_css(context='')
self.assertEquals(styles['accent_2'], 'red')
self.assertEquals(styles['text_transform'], 'lowercase')
def test_custom_css(self):
response = self.client.get('/')
self.assertContains(response, '.fb-body .base-bcolor')
self.assertContains(response, '.fb-body .block-heading')
self.assertContains(response, '.section-nav__items')
|
59c66d0e172b23ea7106a70866871d20bbcabe5b
|
timezones/__init__.py
|
timezones/__init__.py
|
import pytz
TIMEZONE_CHOICES = zip(pytz.all_timezones, pytz.all_timezones)
|
import pytz
TIMEZONE_CHOICES = zip(pytz.common_timezones, pytz.common_timezones)
|
Use common timezones and not all timezones pytz provides.
|
Use common timezones and not all timezones pytz provides.
git-svn-id: 13ea2b4cf383b32e0a7498d153ee882d068671f7@13 86ebb30f-654e-0410-bc0d-7bf82786d749
|
Python
|
bsd-2-clause
|
mfogel/django-timezone-field,brosner/django-timezones
|
import pytz
TIMEZONE_CHOICES = zip(pytz.all_timezones, pytz.all_timezones)
Use common timezones and not all timezones pytz provides.
git-svn-id: 13ea2b4cf383b32e0a7498d153ee882d068671f7@13 86ebb30f-654e-0410-bc0d-7bf82786d749
|
import pytz
TIMEZONE_CHOICES = zip(pytz.common_timezones, pytz.common_timezones)
|
<commit_before>
import pytz
TIMEZONE_CHOICES = zip(pytz.all_timezones, pytz.all_timezones)
<commit_msg>Use common timezones and not all timezones pytz provides.
git-svn-id: 13ea2b4cf383b32e0a7498d153ee882d068671f7@13 86ebb30f-654e-0410-bc0d-7bf82786d749<commit_after>
|
import pytz
TIMEZONE_CHOICES = zip(pytz.common_timezones, pytz.common_timezones)
|
import pytz
TIMEZONE_CHOICES = zip(pytz.all_timezones, pytz.all_timezones)
Use common timezones and not all timezones pytz provides.
git-svn-id: 13ea2b4cf383b32e0a7498d153ee882d068671f7@13 86ebb30f-654e-0410-bc0d-7bf82786d749
import pytz
TIMEZONE_CHOICES = zip(pytz.common_timezones, pytz.common_timezones)
|
<commit_before>
import pytz
TIMEZONE_CHOICES = zip(pytz.all_timezones, pytz.all_timezones)
<commit_msg>Use common timezones and not all timezones pytz provides.
git-svn-id: 13ea2b4cf383b32e0a7498d153ee882d068671f7@13 86ebb30f-654e-0410-bc0d-7bf82786d749<commit_after>
import pytz
TIMEZONE_CHOICES = zip(pytz.common_timezones, pytz.common_timezones)
|
8bd738972cebd27b068250bd52db8aacea6c7876
|
src/condor_tests/ornithology/plugin.py
|
src/condor_tests/ornithology/plugin.py
|
import pytest
from .scripts import SCRIPTS
# This module is loaded as a "plugin" by pytest by a setting in conftest.py
# Any fixtures defined here will be globally available in tests,
# as if they were defined in conftest.py itself.
@pytest.fixture(scope="session")
def path_to_sleep():
return SCRIPTS["sleep"]
|
import sys
import pytest
from .scripts import SCRIPTS
# This module is loaded as a "plugin" by pytest by a setting in conftest.py
# Any fixtures defined here will be globally available in tests,
# as if they were defined in conftest.py itself.
@pytest.fixture(scope="session")
def path_to_sleep():
return SCRIPTS["sleep"]
@pytest.fixture(scope="session")
def path_to_python():
return sys.executable
|
Add path_to_python fixture to make writing multiplatform job scripts easier.
|
Add path_to_python fixture to make writing multiplatform job scripts easier.
|
Python
|
apache-2.0
|
htcondor/htcondor,htcondor/htcondor,htcondor/htcondor,htcondor/htcondor,htcondor/htcondor,htcondor/htcondor,htcondor/htcondor,htcondor/htcondor
|
import pytest
from .scripts import SCRIPTS
# This module is loaded as a "plugin" by pytest by a setting in conftest.py
# Any fixtures defined here will be globally available in tests,
# as if they were defined in conftest.py itself.
@pytest.fixture(scope="session")
def path_to_sleep():
return SCRIPTS["sleep"]
Add path_to_python fixture to make writing multiplatform job scripts easier.
|
import sys
import pytest
from .scripts import SCRIPTS
# This module is loaded as a "plugin" by pytest by a setting in conftest.py
# Any fixtures defined here will be globally available in tests,
# as if they were defined in conftest.py itself.
@pytest.fixture(scope="session")
def path_to_sleep():
return SCRIPTS["sleep"]
@pytest.fixture(scope="session")
def path_to_python():
return sys.executable
|
<commit_before>import pytest
from .scripts import SCRIPTS
# This module is loaded as a "plugin" by pytest by a setting in conftest.py
# Any fixtures defined here will be globally available in tests,
# as if they were defined in conftest.py itself.
@pytest.fixture(scope="session")
def path_to_sleep():
return SCRIPTS["sleep"]
<commit_msg>Add path_to_python fixture to make writing multiplatform job scripts easier.<commit_after>
|
import sys
import pytest
from .scripts import SCRIPTS
# This module is loaded as a "plugin" by pytest by a setting in conftest.py
# Any fixtures defined here will be globally available in tests,
# as if they were defined in conftest.py itself.
@pytest.fixture(scope="session")
def path_to_sleep():
return SCRIPTS["sleep"]
@pytest.fixture(scope="session")
def path_to_python():
return sys.executable
|
import pytest
from .scripts import SCRIPTS
# This module is loaded as a "plugin" by pytest by a setting in conftest.py
# Any fixtures defined here will be globally available in tests,
# as if they were defined in conftest.py itself.
@pytest.fixture(scope="session")
def path_to_sleep():
return SCRIPTS["sleep"]
Add path_to_python fixture to make writing multiplatform job scripts easier.import sys
import pytest
from .scripts import SCRIPTS
# This module is loaded as a "plugin" by pytest by a setting in conftest.py
# Any fixtures defined here will be globally available in tests,
# as if they were defined in conftest.py itself.
@pytest.fixture(scope="session")
def path_to_sleep():
return SCRIPTS["sleep"]
@pytest.fixture(scope="session")
def path_to_python():
return sys.executable
|
<commit_before>import pytest
from .scripts import SCRIPTS
# This module is loaded as a "plugin" by pytest by a setting in conftest.py
# Any fixtures defined here will be globally available in tests,
# as if they were defined in conftest.py itself.
@pytest.fixture(scope="session")
def path_to_sleep():
return SCRIPTS["sleep"]
<commit_msg>Add path_to_python fixture to make writing multiplatform job scripts easier.<commit_after>import sys
import pytest
from .scripts import SCRIPTS
# This module is loaded as a "plugin" by pytest by a setting in conftest.py
# Any fixtures defined here will be globally available in tests,
# as if they were defined in conftest.py itself.
@pytest.fixture(scope="session")
def path_to_sleep():
return SCRIPTS["sleep"]
@pytest.fixture(scope="session")
def path_to_python():
return sys.executable
|
775170d69862aaff63231b669639a872596ed2cd
|
test_interpreter.py
|
test_interpreter.py
|
import unittest
import brainfuck
test_cases = [("++++++++[>++++[>++>+++>+++>+<<<<-]>+>+>->>+[<]<-]>>.>---.+++++++..+++.>>.<-.<.+++.------.--------.>>+.>++.", "Hello World!\n")]
class InterpreterTestCase(unittest.TestCase):
def setUp(self):
self.interpreter = brainfuck.BrainfuckInterpreter()
def runTest(self):
for case in test_cases:
self.assertEqual(case[1], self.interpreter.eval(case[0]))
|
import unittest
import brainfuck
hello_case = ("++++++++[>++++[>++>+++>+++>+<<<<-]>+>+>->>+[<]<-]>>.>---.+++++++..+++.>>.<-.<.+++.------.--------.>>+.>++.", "Hello World!\n")
class InterpreterTestCase(unittest.TestCase):
def setUp(self):
self.interpreter = brainfuck.BrainfuckInterpreter()
def test_hello_world(self):
self.assertEqual(hello_case[1], self.interpreter.eval(hello_case[0]))
def test_missing_parenthesis(self):
self.assertRaises(SyntaxError, self.interpreter.eval, '[++]+]')
|
Add unittest for missing parenthesis
|
Add unittest for missing parenthesis
|
Python
|
bsd-3-clause
|
handrake/brainfuck
|
import unittest
import brainfuck
test_cases = [("++++++++[>++++[>++>+++>+++>+<<<<-]>+>+>->>+[<]<-]>>.>---.+++++++..+++.>>.<-.<.+++.------.--------.>>+.>++.", "Hello World!\n")]
class InterpreterTestCase(unittest.TestCase):
def setUp(self):
self.interpreter = brainfuck.BrainfuckInterpreter()
def runTest(self):
for case in test_cases:
self.assertEqual(case[1], self.interpreter.eval(case[0]))
Add unittest for missing parenthesis
|
import unittest
import brainfuck
hello_case = ("++++++++[>++++[>++>+++>+++>+<<<<-]>+>+>->>+[<]<-]>>.>---.+++++++..+++.>>.<-.<.+++.------.--------.>>+.>++.", "Hello World!\n")
class InterpreterTestCase(unittest.TestCase):
def setUp(self):
self.interpreter = brainfuck.BrainfuckInterpreter()
def test_hello_world(self):
self.assertEqual(hello_case[1], self.interpreter.eval(hello_case[0]))
def test_missing_parenthesis(self):
self.assertRaises(SyntaxError, self.interpreter.eval, '[++]+]')
|
<commit_before>import unittest
import brainfuck
test_cases = [("++++++++[>++++[>++>+++>+++>+<<<<-]>+>+>->>+[<]<-]>>.>---.+++++++..+++.>>.<-.<.+++.------.--------.>>+.>++.", "Hello World!\n")]
class InterpreterTestCase(unittest.TestCase):
def setUp(self):
self.interpreter = brainfuck.BrainfuckInterpreter()
def runTest(self):
for case in test_cases:
self.assertEqual(case[1], self.interpreter.eval(case[0]))
<commit_msg>Add unittest for missing parenthesis<commit_after>
|
import unittest
import brainfuck
hello_case = ("++++++++[>++++[>++>+++>+++>+<<<<-]>+>+>->>+[<]<-]>>.>---.+++++++..+++.>>.<-.<.+++.------.--------.>>+.>++.", "Hello World!\n")
class InterpreterTestCase(unittest.TestCase):
def setUp(self):
self.interpreter = brainfuck.BrainfuckInterpreter()
def test_hello_world(self):
self.assertEqual(hello_case[1], self.interpreter.eval(hello_case[0]))
def test_missing_parenthesis(self):
self.assertRaises(SyntaxError, self.interpreter.eval, '[++]+]')
|
import unittest
import brainfuck
test_cases = [("++++++++[>++++[>++>+++>+++>+<<<<-]>+>+>->>+[<]<-]>>.>---.+++++++..+++.>>.<-.<.+++.------.--------.>>+.>++.", "Hello World!\n")]
class InterpreterTestCase(unittest.TestCase):
def setUp(self):
self.interpreter = brainfuck.BrainfuckInterpreter()
def runTest(self):
for case in test_cases:
self.assertEqual(case[1], self.interpreter.eval(case[0]))
Add unittest for missing parenthesisimport unittest
import brainfuck
hello_case = ("++++++++[>++++[>++>+++>+++>+<<<<-]>+>+>->>+[<]<-]>>.>---.+++++++..+++.>>.<-.<.+++.------.--------.>>+.>++.", "Hello World!\n")
class InterpreterTestCase(unittest.TestCase):
def setUp(self):
self.interpreter = brainfuck.BrainfuckInterpreter()
def test_hello_world(self):
self.assertEqual(hello_case[1], self.interpreter.eval(hello_case[0]))
def test_missing_parenthesis(self):
self.assertRaises(SyntaxError, self.interpreter.eval, '[++]+]')
|
<commit_before>import unittest
import brainfuck
test_cases = [("++++++++[>++++[>++>+++>+++>+<<<<-]>+>+>->>+[<]<-]>>.>---.+++++++..+++.>>.<-.<.+++.------.--------.>>+.>++.", "Hello World!\n")]
class InterpreterTestCase(unittest.TestCase):
def setUp(self):
self.interpreter = brainfuck.BrainfuckInterpreter()
def runTest(self):
for case in test_cases:
self.assertEqual(case[1], self.interpreter.eval(case[0]))
<commit_msg>Add unittest for missing parenthesis<commit_after>import unittest
import brainfuck
hello_case = ("++++++++[>++++[>++>+++>+++>+<<<<-]>+>+>->>+[<]<-]>>.>---.+++++++..+++.>>.<-.<.+++.------.--------.>>+.>++.", "Hello World!\n")
class InterpreterTestCase(unittest.TestCase):
def setUp(self):
self.interpreter = brainfuck.BrainfuckInterpreter()
def test_hello_world(self):
self.assertEqual(hello_case[1], self.interpreter.eval(hello_case[0]))
def test_missing_parenthesis(self):
self.assertRaises(SyntaxError, self.interpreter.eval, '[++]+]')
|
131454ffaec010442c17f748365ab491668d947f
|
plumeria/plugins/bing_images.py
|
plumeria/plugins/bing_images.py
|
from aiohttp import BasicAuth
from plumeria import config, scoped_config
from plumeria.command import commands, CommandError
from plumeria.config.common import nsfw
from plumeria.message import Response
from plumeria.util import http
from plumeria.util.ratelimit import rate_limit
SEARCH_URL = "https://api.datamarket.azure.com/Bing/Search/v1/Image"
api_key = config.create("bing", "key",
fallback="unset",
comment="An API key from Bing")
@commands.register("image", "images", "i", category="Search")
@rate_limit()
async def image(message):
"""
Search Bing for an image and returns a URL to that image.
Example::
/image socially awkward penguin
"""
q = message.content.strip()
if not q:
raise CommandError("Search term required!")
r = await http.get(SEARCH_URL, params=[
('$format', 'json'),
('$top', '10'),
('Adult', "'Off'" if scoped_config.get(nsfw, message.channel) else "'Strict'"),
('Query', "'{}'".format(q)),
], auth=BasicAuth("", password=api_key()))
data = r.json()['d']
if len(data['results']):
return Response(data['results'][0]['MediaUrl'])
else:
raise CommandError("no results found")
|
import random
from aiohttp import BasicAuth
from plumeria import config, scoped_config
from plumeria.command import commands, CommandError
from plumeria.config.common import nsfw
from plumeria.message import Response
from plumeria.util import http
from plumeria.util.ratelimit import rate_limit
SEARCH_URL = "https://api.datamarket.azure.com/Bing/Search/v1/Image"
api_key = config.create("bing", "key",
fallback="unset",
comment="An API key from Bing")
@commands.register("image", "images", "i", category="Search")
@rate_limit()
async def image(message):
"""
Search Bing for an image and returns a URL to that image.
Example::
/image socially awkward penguin
"""
q = message.content.strip()
if not q:
raise CommandError("Search term required!")
r = await http.get(SEARCH_URL, params=[
('$format', 'json'),
('$top', '20'),
('Adult', "'Off'" if scoped_config.get(nsfw, message.channel) else "'Strict'"),
('Query', "'{}'".format(q)),
], auth=BasicAuth("", password=api_key()))
data = r.json()['d']
if len(data['results']):
return Response(random.choice(data['results'])['MediaUrl'])
else:
raise CommandError("no results found")
|
Make the Bing images search pick a random top 20 image.
|
Make the Bing images search pick a random top 20 image.
|
Python
|
mit
|
sk89q/Plumeria,sk89q/Plumeria,sk89q/Plumeria
|
from aiohttp import BasicAuth
from plumeria import config, scoped_config
from plumeria.command import commands, CommandError
from plumeria.config.common import nsfw
from plumeria.message import Response
from plumeria.util import http
from plumeria.util.ratelimit import rate_limit
SEARCH_URL = "https://api.datamarket.azure.com/Bing/Search/v1/Image"
api_key = config.create("bing", "key",
fallback="unset",
comment="An API key from Bing")
@commands.register("image", "images", "i", category="Search")
@rate_limit()
async def image(message):
"""
Search Bing for an image and returns a URL to that image.
Example::
/image socially awkward penguin
"""
q = message.content.strip()
if not q:
raise CommandError("Search term required!")
r = await http.get(SEARCH_URL, params=[
('$format', 'json'),
('$top', '10'),
('Adult', "'Off'" if scoped_config.get(nsfw, message.channel) else "'Strict'"),
('Query', "'{}'".format(q)),
], auth=BasicAuth("", password=api_key()))
data = r.json()['d']
if len(data['results']):
return Response(data['results'][0]['MediaUrl'])
else:
raise CommandError("no results found")
Make the Bing images search pick a random top 20 image.
|
import random
from aiohttp import BasicAuth
from plumeria import config, scoped_config
from plumeria.command import commands, CommandError
from plumeria.config.common import nsfw
from plumeria.message import Response
from plumeria.util import http
from plumeria.util.ratelimit import rate_limit
SEARCH_URL = "https://api.datamarket.azure.com/Bing/Search/v1/Image"
api_key = config.create("bing", "key",
fallback="unset",
comment="An API key from Bing")
@commands.register("image", "images", "i", category="Search")
@rate_limit()
async def image(message):
"""
Search Bing for an image and returns a URL to that image.
Example::
/image socially awkward penguin
"""
q = message.content.strip()
if not q:
raise CommandError("Search term required!")
r = await http.get(SEARCH_URL, params=[
('$format', 'json'),
('$top', '20'),
('Adult', "'Off'" if scoped_config.get(nsfw, message.channel) else "'Strict'"),
('Query', "'{}'".format(q)),
], auth=BasicAuth("", password=api_key()))
data = r.json()['d']
if len(data['results']):
return Response(random.choice(data['results'])['MediaUrl'])
else:
raise CommandError("no results found")
|
<commit_before>from aiohttp import BasicAuth
from plumeria import config, scoped_config
from plumeria.command import commands, CommandError
from plumeria.config.common import nsfw
from plumeria.message import Response
from plumeria.util import http
from plumeria.util.ratelimit import rate_limit
SEARCH_URL = "https://api.datamarket.azure.com/Bing/Search/v1/Image"
api_key = config.create("bing", "key",
fallback="unset",
comment="An API key from Bing")
@commands.register("image", "images", "i", category="Search")
@rate_limit()
async def image(message):
"""
Search Bing for an image and returns a URL to that image.
Example::
/image socially awkward penguin
"""
q = message.content.strip()
if not q:
raise CommandError("Search term required!")
r = await http.get(SEARCH_URL, params=[
('$format', 'json'),
('$top', '10'),
('Adult', "'Off'" if scoped_config.get(nsfw, message.channel) else "'Strict'"),
('Query', "'{}'".format(q)),
], auth=BasicAuth("", password=api_key()))
data = r.json()['d']
if len(data['results']):
return Response(data['results'][0]['MediaUrl'])
else:
raise CommandError("no results found")
<commit_msg>Make the Bing images search pick a random top 20 image.<commit_after>
|
import random
from aiohttp import BasicAuth
from plumeria import config, scoped_config
from plumeria.command import commands, CommandError
from plumeria.config.common import nsfw
from plumeria.message import Response
from plumeria.util import http
from plumeria.util.ratelimit import rate_limit
SEARCH_URL = "https://api.datamarket.azure.com/Bing/Search/v1/Image"
api_key = config.create("bing", "key",
fallback="unset",
comment="An API key from Bing")
@commands.register("image", "images", "i", category="Search")
@rate_limit()
async def image(message):
"""
Search Bing for an image and returns a URL to that image.
Example::
/image socially awkward penguin
"""
q = message.content.strip()
if not q:
raise CommandError("Search term required!")
r = await http.get(SEARCH_URL, params=[
('$format', 'json'),
('$top', '20'),
('Adult', "'Off'" if scoped_config.get(nsfw, message.channel) else "'Strict'"),
('Query', "'{}'".format(q)),
], auth=BasicAuth("", password=api_key()))
data = r.json()['d']
if len(data['results']):
return Response(random.choice(data['results'])['MediaUrl'])
else:
raise CommandError("no results found")
|
from aiohttp import BasicAuth
from plumeria import config, scoped_config
from plumeria.command import commands, CommandError
from plumeria.config.common import nsfw
from plumeria.message import Response
from plumeria.util import http
from plumeria.util.ratelimit import rate_limit
SEARCH_URL = "https://api.datamarket.azure.com/Bing/Search/v1/Image"
api_key = config.create("bing", "key",
fallback="unset",
comment="An API key from Bing")
@commands.register("image", "images", "i", category="Search")
@rate_limit()
async def image(message):
"""
Search Bing for an image and returns a URL to that image.
Example::
/image socially awkward penguin
"""
q = message.content.strip()
if not q:
raise CommandError("Search term required!")
r = await http.get(SEARCH_URL, params=[
('$format', 'json'),
('$top', '10'),
('Adult', "'Off'" if scoped_config.get(nsfw, message.channel) else "'Strict'"),
('Query', "'{}'".format(q)),
], auth=BasicAuth("", password=api_key()))
data = r.json()['d']
if len(data['results']):
return Response(data['results'][0]['MediaUrl'])
else:
raise CommandError("no results found")
Make the Bing images search pick a random top 20 image.import random
from aiohttp import BasicAuth
from plumeria import config, scoped_config
from plumeria.command import commands, CommandError
from plumeria.config.common import nsfw
from plumeria.message import Response
from plumeria.util import http
from plumeria.util.ratelimit import rate_limit
SEARCH_URL = "https://api.datamarket.azure.com/Bing/Search/v1/Image"
api_key = config.create("bing", "key",
fallback="unset",
comment="An API key from Bing")
@commands.register("image", "images", "i", category="Search")
@rate_limit()
async def image(message):
"""
Search Bing for an image and returns a URL to that image.
Example::
/image socially awkward penguin
"""
q = message.content.strip()
if not q:
raise CommandError("Search term required!")
r = await http.get(SEARCH_URL, params=[
('$format', 'json'),
('$top', '20'),
('Adult', "'Off'" if scoped_config.get(nsfw, message.channel) else "'Strict'"),
('Query', "'{}'".format(q)),
], auth=BasicAuth("", password=api_key()))
data = r.json()['d']
if len(data['results']):
return Response(random.choice(data['results'])['MediaUrl'])
else:
raise CommandError("no results found")
|
<commit_before>from aiohttp import BasicAuth
from plumeria import config, scoped_config
from plumeria.command import commands, CommandError
from plumeria.config.common import nsfw
from plumeria.message import Response
from plumeria.util import http
from plumeria.util.ratelimit import rate_limit
SEARCH_URL = "https://api.datamarket.azure.com/Bing/Search/v1/Image"
api_key = config.create("bing", "key",
fallback="unset",
comment="An API key from Bing")
@commands.register("image", "images", "i", category="Search")
@rate_limit()
async def image(message):
"""
Search Bing for an image and returns a URL to that image.
Example::
/image socially awkward penguin
"""
q = message.content.strip()
if not q:
raise CommandError("Search term required!")
r = await http.get(SEARCH_URL, params=[
('$format', 'json'),
('$top', '10'),
('Adult', "'Off'" if scoped_config.get(nsfw, message.channel) else "'Strict'"),
('Query', "'{}'".format(q)),
], auth=BasicAuth("", password=api_key()))
data = r.json()['d']
if len(data['results']):
return Response(data['results'][0]['MediaUrl'])
else:
raise CommandError("no results found")
<commit_msg>Make the Bing images search pick a random top 20 image.<commit_after>import random
from aiohttp import BasicAuth
from plumeria import config, scoped_config
from plumeria.command import commands, CommandError
from plumeria.config.common import nsfw
from plumeria.message import Response
from plumeria.util import http
from plumeria.util.ratelimit import rate_limit
SEARCH_URL = "https://api.datamarket.azure.com/Bing/Search/v1/Image"
api_key = config.create("bing", "key",
fallback="unset",
comment="An API key from Bing")
@commands.register("image", "images", "i", category="Search")
@rate_limit()
async def image(message):
"""
Search Bing for an image and returns a URL to that image.
Example::
/image socially awkward penguin
"""
q = message.content.strip()
if not q:
raise CommandError("Search term required!")
r = await http.get(SEARCH_URL, params=[
('$format', 'json'),
('$top', '20'),
('Adult', "'Off'" if scoped_config.get(nsfw, message.channel) else "'Strict'"),
('Query', "'{}'".format(q)),
], auth=BasicAuth("", password=api_key()))
data = r.json()['d']
if len(data['results']):
return Response(random.choice(data['results'])['MediaUrl'])
else:
raise CommandError("no results found")
|
f7caec9d0c0058b5d760992172b434b461f70d90
|
molecule/default/tests/test_default.py
|
molecule/default/tests/test_default.py
|
import os
import pytest
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
def test_docker_service_enabled(host):
service = host.service('docker')
assert service.is_enabled
def test_docker_service_running(host):
service = host.service('docker')
assert service.is_running
@pytest.mark.parametrize('socket_def', [
# listening on localhost, for tcp sockets on port 1883 (MQQT)
('tcp://127.0.0.1:1883'),
# all IPv4 tcp sockets on port 8883 (MQQTS)
('tcp://8883'),
])
def test_listening_sockets(host, socket_def):
socket = host.socket(socket_def)
assert socket.is_listening
# Tests to write:
# - using the localhost listener:
# - verify that a message can be published
# - verify that a published message can be subscribed to
|
import os
import pytest
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
def test_docker_service_enabled(host):
service = host.service('docker')
assert service.is_enabled
def test_docker_service_running(host):
service = host.service('docker')
assert service.is_running
@pytest.mark.parametrize('socket_def', [
# all IPv4 tcp sockets on port 8883 (MQQTS)
('tcp://8883'),
])
def test_listening_sockets(host, socket_def):
socket = host.socket(socket_def)
assert socket.is_listening
# Tests to write:
# - using the localhost listener:
# - verify that a message can be published
# - verify that a published message can be subscribed to
|
Remove test enforcing listening on port 1883
|
Remove test enforcing listening on port 1883
|
Python
|
mit
|
triplepoint/ansible-mosquitto
|
import os
import pytest
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
def test_docker_service_enabled(host):
service = host.service('docker')
assert service.is_enabled
def test_docker_service_running(host):
service = host.service('docker')
assert service.is_running
@pytest.mark.parametrize('socket_def', [
# listening on localhost, for tcp sockets on port 1883 (MQQT)
('tcp://127.0.0.1:1883'),
# all IPv4 tcp sockets on port 8883 (MQQTS)
('tcp://8883'),
])
def test_listening_sockets(host, socket_def):
socket = host.socket(socket_def)
assert socket.is_listening
# Tests to write:
# - using the localhost listener:
# - verify that a message can be published
# - verify that a published message can be subscribed to
Remove test enforcing listening on port 1883
|
import os
import pytest
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
def test_docker_service_enabled(host):
service = host.service('docker')
assert service.is_enabled
def test_docker_service_running(host):
service = host.service('docker')
assert service.is_running
@pytest.mark.parametrize('socket_def', [
# all IPv4 tcp sockets on port 8883 (MQQTS)
('tcp://8883'),
])
def test_listening_sockets(host, socket_def):
socket = host.socket(socket_def)
assert socket.is_listening
# Tests to write:
# - using the localhost listener:
# - verify that a message can be published
# - verify that a published message can be subscribed to
|
<commit_before>import os
import pytest
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
def test_docker_service_enabled(host):
service = host.service('docker')
assert service.is_enabled
def test_docker_service_running(host):
service = host.service('docker')
assert service.is_running
@pytest.mark.parametrize('socket_def', [
# listening on localhost, for tcp sockets on port 1883 (MQQT)
('tcp://127.0.0.1:1883'),
# all IPv4 tcp sockets on port 8883 (MQQTS)
('tcp://8883'),
])
def test_listening_sockets(host, socket_def):
socket = host.socket(socket_def)
assert socket.is_listening
# Tests to write:
# - using the localhost listener:
# - verify that a message can be published
# - verify that a published message can be subscribed to
<commit_msg>Remove test enforcing listening on port 1883<commit_after>
|
import os
import pytest
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
def test_docker_service_enabled(host):
service = host.service('docker')
assert service.is_enabled
def test_docker_service_running(host):
service = host.service('docker')
assert service.is_running
@pytest.mark.parametrize('socket_def', [
# all IPv4 tcp sockets on port 8883 (MQQTS)
('tcp://8883'),
])
def test_listening_sockets(host, socket_def):
socket = host.socket(socket_def)
assert socket.is_listening
# Tests to write:
# - using the localhost listener:
# - verify that a message can be published
# - verify that a published message can be subscribed to
|
import os
import pytest
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
def test_docker_service_enabled(host):
service = host.service('docker')
assert service.is_enabled
def test_docker_service_running(host):
service = host.service('docker')
assert service.is_running
@pytest.mark.parametrize('socket_def', [
# listening on localhost, for tcp sockets on port 1883 (MQQT)
('tcp://127.0.0.1:1883'),
# all IPv4 tcp sockets on port 8883 (MQQTS)
('tcp://8883'),
])
def test_listening_sockets(host, socket_def):
socket = host.socket(socket_def)
assert socket.is_listening
# Tests to write:
# - using the localhost listener:
# - verify that a message can be published
# - verify that a published message can be subscribed to
Remove test enforcing listening on port 1883import os
import pytest
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
def test_docker_service_enabled(host):
service = host.service('docker')
assert service.is_enabled
def test_docker_service_running(host):
service = host.service('docker')
assert service.is_running
@pytest.mark.parametrize('socket_def', [
# all IPv4 tcp sockets on port 8883 (MQQTS)
('tcp://8883'),
])
def test_listening_sockets(host, socket_def):
socket = host.socket(socket_def)
assert socket.is_listening
# Tests to write:
# - using the localhost listener:
# - verify that a message can be published
# - verify that a published message can be subscribed to
|
<commit_before>import os
import pytest
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
def test_docker_service_enabled(host):
service = host.service('docker')
assert service.is_enabled
def test_docker_service_running(host):
service = host.service('docker')
assert service.is_running
@pytest.mark.parametrize('socket_def', [
# listening on localhost, for tcp sockets on port 1883 (MQQT)
('tcp://127.0.0.1:1883'),
# all IPv4 tcp sockets on port 8883 (MQQTS)
('tcp://8883'),
])
def test_listening_sockets(host, socket_def):
socket = host.socket(socket_def)
assert socket.is_listening
# Tests to write:
# - using the localhost listener:
# - verify that a message can be published
# - verify that a published message can be subscribed to
<commit_msg>Remove test enforcing listening on port 1883<commit_after>import os
import pytest
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
def test_docker_service_enabled(host):
service = host.service('docker')
assert service.is_enabled
def test_docker_service_running(host):
service = host.service('docker')
assert service.is_running
@pytest.mark.parametrize('socket_def', [
# all IPv4 tcp sockets on port 8883 (MQQTS)
('tcp://8883'),
])
def test_listening_sockets(host, socket_def):
socket = host.socket(socket_def)
assert socket.is_listening
# Tests to write:
# - using the localhost listener:
# - verify that a message can be published
# - verify that a published message can be subscribed to
|
3dc06581d07a204a3044e3a78deb84950a6ebf74
|
mtp_transaction_uploader/api_client.py
|
mtp_transaction_uploader/api_client.py
|
from urllib.parse import urljoin
from oauthlib.oauth2 import LegacyApplicationClient
from requests_oauthlib import OAuth2Session
import slumber
from . import settings
REQUEST_TOKEN_URL = urljoin(settings.API_URL, '/oauth2/token/')
def get_authenticated_connection():
"""
Returns:
an authenticated slumber connection
"""
session = OAuth2Session(
client=LegacyApplicationClient(
client_id=settings.API_CLIENT_ID
)
)
session.fetch_token(
token_url=REQUEST_TOKEN_URL,
username=settings.API_USERNAME,
password=settings.API_PASSWORD,
client_id=settings.API_CLIENT_ID,
client_secret=settings.API_CLIENT_SECRET
)
return slumber.API(
base_url=settings.API_URL, session=session
)
|
from urllib.parse import urljoin
from oauthlib.oauth2 import LegacyApplicationClient
from requests.auth import HTTPBasicAuth
from requests_oauthlib import OAuth2Session
import slumber
from . import settings
REQUEST_TOKEN_URL = urljoin(settings.API_URL, '/oauth2/token/')
def get_authenticated_connection():
"""
Returns:
an authenticated slumber connection
"""
session = OAuth2Session(
client=LegacyApplicationClient(
client_id=settings.API_CLIENT_ID
)
)
session.fetch_token(
token_url=REQUEST_TOKEN_URL,
username=settings.API_USERNAME,
password=settings.API_PASSWORD,
auth=HTTPBasicAuth(settings.API_CLIENT_ID, settings.API_CLIENT_SECRET)
)
return slumber.API(
base_url=settings.API_URL, session=session
)
|
Use HTTPBasicAuth when connecting to the API
|
Use HTTPBasicAuth when connecting to the API
|
Python
|
mit
|
ministryofjustice/money-to-prisoners-transaction-uploader
|
from urllib.parse import urljoin
from oauthlib.oauth2 import LegacyApplicationClient
from requests_oauthlib import OAuth2Session
import slumber
from . import settings
REQUEST_TOKEN_URL = urljoin(settings.API_URL, '/oauth2/token/')
def get_authenticated_connection():
"""
Returns:
an authenticated slumber connection
"""
session = OAuth2Session(
client=LegacyApplicationClient(
client_id=settings.API_CLIENT_ID
)
)
session.fetch_token(
token_url=REQUEST_TOKEN_URL,
username=settings.API_USERNAME,
password=settings.API_PASSWORD,
client_id=settings.API_CLIENT_ID,
client_secret=settings.API_CLIENT_SECRET
)
return slumber.API(
base_url=settings.API_URL, session=session
)
Use HTTPBasicAuth when connecting to the API
|
from urllib.parse import urljoin
from oauthlib.oauth2 import LegacyApplicationClient
from requests.auth import HTTPBasicAuth
from requests_oauthlib import OAuth2Session
import slumber
from . import settings
REQUEST_TOKEN_URL = urljoin(settings.API_URL, '/oauth2/token/')
def get_authenticated_connection():
"""
Returns:
an authenticated slumber connection
"""
session = OAuth2Session(
client=LegacyApplicationClient(
client_id=settings.API_CLIENT_ID
)
)
session.fetch_token(
token_url=REQUEST_TOKEN_URL,
username=settings.API_USERNAME,
password=settings.API_PASSWORD,
auth=HTTPBasicAuth(settings.API_CLIENT_ID, settings.API_CLIENT_SECRET)
)
return slumber.API(
base_url=settings.API_URL, session=session
)
|
<commit_before>from urllib.parse import urljoin
from oauthlib.oauth2 import LegacyApplicationClient
from requests_oauthlib import OAuth2Session
import slumber
from . import settings
REQUEST_TOKEN_URL = urljoin(settings.API_URL, '/oauth2/token/')
def get_authenticated_connection():
"""
Returns:
an authenticated slumber connection
"""
session = OAuth2Session(
client=LegacyApplicationClient(
client_id=settings.API_CLIENT_ID
)
)
session.fetch_token(
token_url=REQUEST_TOKEN_URL,
username=settings.API_USERNAME,
password=settings.API_PASSWORD,
client_id=settings.API_CLIENT_ID,
client_secret=settings.API_CLIENT_SECRET
)
return slumber.API(
base_url=settings.API_URL, session=session
)
<commit_msg>Use HTTPBasicAuth when connecting to the API<commit_after>
|
from urllib.parse import urljoin
from oauthlib.oauth2 import LegacyApplicationClient
from requests.auth import HTTPBasicAuth
from requests_oauthlib import OAuth2Session
import slumber
from . import settings
REQUEST_TOKEN_URL = urljoin(settings.API_URL, '/oauth2/token/')
def get_authenticated_connection():
"""
Returns:
an authenticated slumber connection
"""
session = OAuth2Session(
client=LegacyApplicationClient(
client_id=settings.API_CLIENT_ID
)
)
session.fetch_token(
token_url=REQUEST_TOKEN_URL,
username=settings.API_USERNAME,
password=settings.API_PASSWORD,
auth=HTTPBasicAuth(settings.API_CLIENT_ID, settings.API_CLIENT_SECRET)
)
return slumber.API(
base_url=settings.API_URL, session=session
)
|
from urllib.parse import urljoin
from oauthlib.oauth2 import LegacyApplicationClient
from requests_oauthlib import OAuth2Session
import slumber
from . import settings
REQUEST_TOKEN_URL = urljoin(settings.API_URL, '/oauth2/token/')
def get_authenticated_connection():
"""
Returns:
an authenticated slumber connection
"""
session = OAuth2Session(
client=LegacyApplicationClient(
client_id=settings.API_CLIENT_ID
)
)
session.fetch_token(
token_url=REQUEST_TOKEN_URL,
username=settings.API_USERNAME,
password=settings.API_PASSWORD,
client_id=settings.API_CLIENT_ID,
client_secret=settings.API_CLIENT_SECRET
)
return slumber.API(
base_url=settings.API_URL, session=session
)
Use HTTPBasicAuth when connecting to the APIfrom urllib.parse import urljoin
from oauthlib.oauth2 import LegacyApplicationClient
from requests.auth import HTTPBasicAuth
from requests_oauthlib import OAuth2Session
import slumber
from . import settings
REQUEST_TOKEN_URL = urljoin(settings.API_URL, '/oauth2/token/')
def get_authenticated_connection():
"""
Returns:
an authenticated slumber connection
"""
session = OAuth2Session(
client=LegacyApplicationClient(
client_id=settings.API_CLIENT_ID
)
)
session.fetch_token(
token_url=REQUEST_TOKEN_URL,
username=settings.API_USERNAME,
password=settings.API_PASSWORD,
auth=HTTPBasicAuth(settings.API_CLIENT_ID, settings.API_CLIENT_SECRET)
)
return slumber.API(
base_url=settings.API_URL, session=session
)
|
<commit_before>from urllib.parse import urljoin
from oauthlib.oauth2 import LegacyApplicationClient
from requests_oauthlib import OAuth2Session
import slumber
from . import settings
REQUEST_TOKEN_URL = urljoin(settings.API_URL, '/oauth2/token/')
def get_authenticated_connection():
"""
Returns:
an authenticated slumber connection
"""
session = OAuth2Session(
client=LegacyApplicationClient(
client_id=settings.API_CLIENT_ID
)
)
session.fetch_token(
token_url=REQUEST_TOKEN_URL,
username=settings.API_USERNAME,
password=settings.API_PASSWORD,
client_id=settings.API_CLIENT_ID,
client_secret=settings.API_CLIENT_SECRET
)
return slumber.API(
base_url=settings.API_URL, session=session
)
<commit_msg>Use HTTPBasicAuth when connecting to the API<commit_after>from urllib.parse import urljoin
from oauthlib.oauth2 import LegacyApplicationClient
from requests.auth import HTTPBasicAuth
from requests_oauthlib import OAuth2Session
import slumber
from . import settings
REQUEST_TOKEN_URL = urljoin(settings.API_URL, '/oauth2/token/')
def get_authenticated_connection():
"""
Returns:
an authenticated slumber connection
"""
session = OAuth2Session(
client=LegacyApplicationClient(
client_id=settings.API_CLIENT_ID
)
)
session.fetch_token(
token_url=REQUEST_TOKEN_URL,
username=settings.API_USERNAME,
password=settings.API_PASSWORD,
auth=HTTPBasicAuth(settings.API_CLIENT_ID, settings.API_CLIENT_SECRET)
)
return slumber.API(
base_url=settings.API_URL, session=session
)
|
e6933a46086f83913de307fb8803ccbfd3c55114
|
mysite/mysite/tests/test_middleware.py
|
mysite/mysite/tests/test_middleware.py
|
from django.contrib.auth.models import User
from django.test import TestCase
from DjangoLibrary.middleware import FactoryBoyMiddleware
from mock import Mock
import json
class TestFactoryBoyMiddleware(TestCase):
def setUp(self):
self.middleware = FactoryBoyMiddleware()
self.request = Mock()
self.request.session = {}
def test_process_request_creates_object(self):
self.request.configure_mock(
**{
'GET': {
'FACTORY_BOY_MODEL_PATH': 'mysite.tests.factories.UserFactory', # noqa
'FACTORY_BOY_ARGS': ''
}
}
)
response = self.middleware.process_request(self.request)
self.assertEqual(201, response.status_code)
self.assertEqual(
'johndoe',
json.loads(response.content).get('username')
)
self.assertEqual(1, len(User.objects.values()))
self.assertEqual('johndoe', User.objects.values()[0]['username'])
|
from django.contrib.auth.models import User
from django.test import TestCase
from DjangoLibrary.middleware import FactoryBoyMiddleware
from mock import Mock
import json
class TestFactoryBoyMiddleware(TestCase):
def setUp(self):
self.middleware = FactoryBoyMiddleware()
self.request = Mock()
self.request.session = {}
def test_process_request_creates_object(self):
self.request.configure_mock(
**{
'GET': {
'FACTORY_BOY_MODEL_PATH': 'mysite.tests.factories.UserFactory', # noqa
'FACTORY_BOY_ARGS': ''
}
}
)
response = self.middleware.process_request(self.request)
self.assertEqual(201, response.status_code)
self.assertEqual(
'johndoe',
json.loads(response.content.decode('utf-8')).get('username')
)
self.assertEqual(1, len(User.objects.values()))
self.assertEqual('johndoe', User.objects.values()[0]['username'])
|
Fix integration tests for Python 3.
|
Fix integration tests for Python 3.
|
Python
|
apache-2.0
|
kitconcept/robotframework-djangolibrary
|
from django.contrib.auth.models import User
from django.test import TestCase
from DjangoLibrary.middleware import FactoryBoyMiddleware
from mock import Mock
import json
class TestFactoryBoyMiddleware(TestCase):
def setUp(self):
self.middleware = FactoryBoyMiddleware()
self.request = Mock()
self.request.session = {}
def test_process_request_creates_object(self):
self.request.configure_mock(
**{
'GET': {
'FACTORY_BOY_MODEL_PATH': 'mysite.tests.factories.UserFactory', # noqa
'FACTORY_BOY_ARGS': ''
}
}
)
response = self.middleware.process_request(self.request)
self.assertEqual(201, response.status_code)
self.assertEqual(
'johndoe',
json.loads(response.content).get('username')
)
self.assertEqual(1, len(User.objects.values()))
self.assertEqual('johndoe', User.objects.values()[0]['username'])
Fix integration tests for Python 3.
|
from django.contrib.auth.models import User
from django.test import TestCase
from DjangoLibrary.middleware import FactoryBoyMiddleware
from mock import Mock
import json
class TestFactoryBoyMiddleware(TestCase):
def setUp(self):
self.middleware = FactoryBoyMiddleware()
self.request = Mock()
self.request.session = {}
def test_process_request_creates_object(self):
self.request.configure_mock(
**{
'GET': {
'FACTORY_BOY_MODEL_PATH': 'mysite.tests.factories.UserFactory', # noqa
'FACTORY_BOY_ARGS': ''
}
}
)
response = self.middleware.process_request(self.request)
self.assertEqual(201, response.status_code)
self.assertEqual(
'johndoe',
json.loads(response.content.decode('utf-8')).get('username')
)
self.assertEqual(1, len(User.objects.values()))
self.assertEqual('johndoe', User.objects.values()[0]['username'])
|
<commit_before>from django.contrib.auth.models import User
from django.test import TestCase
from DjangoLibrary.middleware import FactoryBoyMiddleware
from mock import Mock
import json
class TestFactoryBoyMiddleware(TestCase):
def setUp(self):
self.middleware = FactoryBoyMiddleware()
self.request = Mock()
self.request.session = {}
def test_process_request_creates_object(self):
self.request.configure_mock(
**{
'GET': {
'FACTORY_BOY_MODEL_PATH': 'mysite.tests.factories.UserFactory', # noqa
'FACTORY_BOY_ARGS': ''
}
}
)
response = self.middleware.process_request(self.request)
self.assertEqual(201, response.status_code)
self.assertEqual(
'johndoe',
json.loads(response.content).get('username')
)
self.assertEqual(1, len(User.objects.values()))
self.assertEqual('johndoe', User.objects.values()[0]['username'])
<commit_msg>Fix integration tests for Python 3.<commit_after>
|
from django.contrib.auth.models import User
from django.test import TestCase
from DjangoLibrary.middleware import FactoryBoyMiddleware
from mock import Mock
import json
class TestFactoryBoyMiddleware(TestCase):
def setUp(self):
self.middleware = FactoryBoyMiddleware()
self.request = Mock()
self.request.session = {}
def test_process_request_creates_object(self):
self.request.configure_mock(
**{
'GET': {
'FACTORY_BOY_MODEL_PATH': 'mysite.tests.factories.UserFactory', # noqa
'FACTORY_BOY_ARGS': ''
}
}
)
response = self.middleware.process_request(self.request)
self.assertEqual(201, response.status_code)
self.assertEqual(
'johndoe',
json.loads(response.content.decode('utf-8')).get('username')
)
self.assertEqual(1, len(User.objects.values()))
self.assertEqual('johndoe', User.objects.values()[0]['username'])
|
from django.contrib.auth.models import User
from django.test import TestCase
from DjangoLibrary.middleware import FactoryBoyMiddleware
from mock import Mock
import json
class TestFactoryBoyMiddleware(TestCase):
def setUp(self):
self.middleware = FactoryBoyMiddleware()
self.request = Mock()
self.request.session = {}
def test_process_request_creates_object(self):
self.request.configure_mock(
**{
'GET': {
'FACTORY_BOY_MODEL_PATH': 'mysite.tests.factories.UserFactory', # noqa
'FACTORY_BOY_ARGS': ''
}
}
)
response = self.middleware.process_request(self.request)
self.assertEqual(201, response.status_code)
self.assertEqual(
'johndoe',
json.loads(response.content).get('username')
)
self.assertEqual(1, len(User.objects.values()))
self.assertEqual('johndoe', User.objects.values()[0]['username'])
Fix integration tests for Python 3.from django.contrib.auth.models import User
from django.test import TestCase
from DjangoLibrary.middleware import FactoryBoyMiddleware
from mock import Mock
import json
class TestFactoryBoyMiddleware(TestCase):
def setUp(self):
self.middleware = FactoryBoyMiddleware()
self.request = Mock()
self.request.session = {}
def test_process_request_creates_object(self):
self.request.configure_mock(
**{
'GET': {
'FACTORY_BOY_MODEL_PATH': 'mysite.tests.factories.UserFactory', # noqa
'FACTORY_BOY_ARGS': ''
}
}
)
response = self.middleware.process_request(self.request)
self.assertEqual(201, response.status_code)
self.assertEqual(
'johndoe',
json.loads(response.content.decode('utf-8')).get('username')
)
self.assertEqual(1, len(User.objects.values()))
self.assertEqual('johndoe', User.objects.values()[0]['username'])
|
<commit_before>from django.contrib.auth.models import User
from django.test import TestCase
from DjangoLibrary.middleware import FactoryBoyMiddleware
from mock import Mock
import json
class TestFactoryBoyMiddleware(TestCase):
def setUp(self):
self.middleware = FactoryBoyMiddleware()
self.request = Mock()
self.request.session = {}
def test_process_request_creates_object(self):
self.request.configure_mock(
**{
'GET': {
'FACTORY_BOY_MODEL_PATH': 'mysite.tests.factories.UserFactory', # noqa
'FACTORY_BOY_ARGS': ''
}
}
)
response = self.middleware.process_request(self.request)
self.assertEqual(201, response.status_code)
self.assertEqual(
'johndoe',
json.loads(response.content).get('username')
)
self.assertEqual(1, len(User.objects.values()))
self.assertEqual('johndoe', User.objects.values()[0]['username'])
<commit_msg>Fix integration tests for Python 3.<commit_after>from django.contrib.auth.models import User
from django.test import TestCase
from DjangoLibrary.middleware import FactoryBoyMiddleware
from mock import Mock
import json
class TestFactoryBoyMiddleware(TestCase):
def setUp(self):
self.middleware = FactoryBoyMiddleware()
self.request = Mock()
self.request.session = {}
def test_process_request_creates_object(self):
self.request.configure_mock(
**{
'GET': {
'FACTORY_BOY_MODEL_PATH': 'mysite.tests.factories.UserFactory', # noqa
'FACTORY_BOY_ARGS': ''
}
}
)
response = self.middleware.process_request(self.request)
self.assertEqual(201, response.status_code)
self.assertEqual(
'johndoe',
json.loads(response.content.decode('utf-8')).get('username')
)
self.assertEqual(1, len(User.objects.values()))
self.assertEqual('johndoe', User.objects.values()[0]['username'])
|
47e5fdce2d248e6bf78addcb46e1a8f12fcc07d6
|
tests/app/main/test_form_validators.py
|
tests/app/main/test_form_validators.py
|
import mock
import pytest
from flask.ext.wtf import Form
from wtforms.fields.core import Field
from wtforms.validators import StopValidation
from app.main.forms import AdminEmailAddressValidator
@mock.patch('app.main.forms.data_api_client')
class TestAdminEmailAddressValidator(object):
def setup_method(self):
self.form_mock = mock.MagicMock(Form)
self.field_mock = mock.MagicMock(Field, data='the_email_address')
self.validator = AdminEmailAddressValidator(message='The message passed to validator')
def test_admin_email_address_validator_calls_api(self, data_api_client):
self.validator(self.form_mock, self.field_mock)
data_api_client.email_is_valid_for_admin_user.assert_called_once_with('the_email_address')
def test_admin_email_address_validator_raises_with_invalid_response(self, data_api_client):
data_api_client.email_is_valid_for_admin_user.return_value = False
with pytest.raises(StopValidation, match='The message passed to validator'):
self.validator(self.form_mock, self.field_mock)
def test_admin_email_address_validator_passes_with_valid_response(self, data_api_client):
data_api_client.email_is_valid_for_admin_user.return_value = True
assert self.validator(self.form_mock, self.field_mock) is None
|
import mock
import pytest
from flask_wtf import Form
from wtforms.fields.core import Field
from wtforms.validators import StopValidation
from app.main.forms import AdminEmailAddressValidator
@mock.patch('app.main.forms.data_api_client')
class TestAdminEmailAddressValidator(object):
def setup_method(self):
self.form_mock = mock.MagicMock(Form)
self.field_mock = mock.MagicMock(Field, data='the_email_address')
self.validator = AdminEmailAddressValidator(message='The message passed to validator')
def test_admin_email_address_validator_calls_api(self, data_api_client):
self.validator(self.form_mock, self.field_mock)
data_api_client.email_is_valid_for_admin_user.assert_called_once_with('the_email_address')
def test_admin_email_address_validator_raises_with_invalid_response(self, data_api_client):
data_api_client.email_is_valid_for_admin_user.return_value = False
with pytest.raises(StopValidation, match='The message passed to validator'):
self.validator(self.form_mock, self.field_mock)
def test_admin_email_address_validator_passes_with_valid_response(self, data_api_client):
data_api_client.email_is_valid_for_admin_user.return_value = True
assert self.validator(self.form_mock, self.field_mock) is None
|
Tidy up test imports a bit
|
Tidy up test imports a bit
|
Python
|
mit
|
alphagov/digitalmarketplace-admin-frontend,alphagov/digitalmarketplace-admin-frontend,alphagov/digitalmarketplace-admin-frontend,alphagov/digitalmarketplace-admin-frontend
|
import mock
import pytest
from flask.ext.wtf import Form
from wtforms.fields.core import Field
from wtforms.validators import StopValidation
from app.main.forms import AdminEmailAddressValidator
@mock.patch('app.main.forms.data_api_client')
class TestAdminEmailAddressValidator(object):
def setup_method(self):
self.form_mock = mock.MagicMock(Form)
self.field_mock = mock.MagicMock(Field, data='the_email_address')
self.validator = AdminEmailAddressValidator(message='The message passed to validator')
def test_admin_email_address_validator_calls_api(self, data_api_client):
self.validator(self.form_mock, self.field_mock)
data_api_client.email_is_valid_for_admin_user.assert_called_once_with('the_email_address')
def test_admin_email_address_validator_raises_with_invalid_response(self, data_api_client):
data_api_client.email_is_valid_for_admin_user.return_value = False
with pytest.raises(StopValidation, match='The message passed to validator'):
self.validator(self.form_mock, self.field_mock)
def test_admin_email_address_validator_passes_with_valid_response(self, data_api_client):
data_api_client.email_is_valid_for_admin_user.return_value = True
assert self.validator(self.form_mock, self.field_mock) is None
Tidy up test imports a bit
|
import mock
import pytest
from flask_wtf import Form
from wtforms.fields.core import Field
from wtforms.validators import StopValidation
from app.main.forms import AdminEmailAddressValidator
@mock.patch('app.main.forms.data_api_client')
class TestAdminEmailAddressValidator(object):
def setup_method(self):
self.form_mock = mock.MagicMock(Form)
self.field_mock = mock.MagicMock(Field, data='the_email_address')
self.validator = AdminEmailAddressValidator(message='The message passed to validator')
def test_admin_email_address_validator_calls_api(self, data_api_client):
self.validator(self.form_mock, self.field_mock)
data_api_client.email_is_valid_for_admin_user.assert_called_once_with('the_email_address')
def test_admin_email_address_validator_raises_with_invalid_response(self, data_api_client):
data_api_client.email_is_valid_for_admin_user.return_value = False
with pytest.raises(StopValidation, match='The message passed to validator'):
self.validator(self.form_mock, self.field_mock)
def test_admin_email_address_validator_passes_with_valid_response(self, data_api_client):
data_api_client.email_is_valid_for_admin_user.return_value = True
assert self.validator(self.form_mock, self.field_mock) is None
|
<commit_before>import mock
import pytest
from flask.ext.wtf import Form
from wtforms.fields.core import Field
from wtforms.validators import StopValidation
from app.main.forms import AdminEmailAddressValidator
@mock.patch('app.main.forms.data_api_client')
class TestAdminEmailAddressValidator(object):
def setup_method(self):
self.form_mock = mock.MagicMock(Form)
self.field_mock = mock.MagicMock(Field, data='the_email_address')
self.validator = AdminEmailAddressValidator(message='The message passed to validator')
def test_admin_email_address_validator_calls_api(self, data_api_client):
self.validator(self.form_mock, self.field_mock)
data_api_client.email_is_valid_for_admin_user.assert_called_once_with('the_email_address')
def test_admin_email_address_validator_raises_with_invalid_response(self, data_api_client):
data_api_client.email_is_valid_for_admin_user.return_value = False
with pytest.raises(StopValidation, match='The message passed to validator'):
self.validator(self.form_mock, self.field_mock)
def test_admin_email_address_validator_passes_with_valid_response(self, data_api_client):
data_api_client.email_is_valid_for_admin_user.return_value = True
assert self.validator(self.form_mock, self.field_mock) is None
<commit_msg>Tidy up test imports a bit<commit_after>
|
import mock
import pytest
from flask_wtf import Form
from wtforms.fields.core import Field
from wtforms.validators import StopValidation
from app.main.forms import AdminEmailAddressValidator
@mock.patch('app.main.forms.data_api_client')
class TestAdminEmailAddressValidator(object):
def setup_method(self):
self.form_mock = mock.MagicMock(Form)
self.field_mock = mock.MagicMock(Field, data='the_email_address')
self.validator = AdminEmailAddressValidator(message='The message passed to validator')
def test_admin_email_address_validator_calls_api(self, data_api_client):
self.validator(self.form_mock, self.field_mock)
data_api_client.email_is_valid_for_admin_user.assert_called_once_with('the_email_address')
def test_admin_email_address_validator_raises_with_invalid_response(self, data_api_client):
data_api_client.email_is_valid_for_admin_user.return_value = False
with pytest.raises(StopValidation, match='The message passed to validator'):
self.validator(self.form_mock, self.field_mock)
def test_admin_email_address_validator_passes_with_valid_response(self, data_api_client):
data_api_client.email_is_valid_for_admin_user.return_value = True
assert self.validator(self.form_mock, self.field_mock) is None
|
import mock
import pytest
from flask.ext.wtf import Form
from wtforms.fields.core import Field
from wtforms.validators import StopValidation
from app.main.forms import AdminEmailAddressValidator
@mock.patch('app.main.forms.data_api_client')
class TestAdminEmailAddressValidator(object):
def setup_method(self):
self.form_mock = mock.MagicMock(Form)
self.field_mock = mock.MagicMock(Field, data='the_email_address')
self.validator = AdminEmailAddressValidator(message='The message passed to validator')
def test_admin_email_address_validator_calls_api(self, data_api_client):
self.validator(self.form_mock, self.field_mock)
data_api_client.email_is_valid_for_admin_user.assert_called_once_with('the_email_address')
def test_admin_email_address_validator_raises_with_invalid_response(self, data_api_client):
data_api_client.email_is_valid_for_admin_user.return_value = False
with pytest.raises(StopValidation, match='The message passed to validator'):
self.validator(self.form_mock, self.field_mock)
def test_admin_email_address_validator_passes_with_valid_response(self, data_api_client):
data_api_client.email_is_valid_for_admin_user.return_value = True
assert self.validator(self.form_mock, self.field_mock) is None
Tidy up test imports a bitimport mock
import pytest
from flask_wtf import Form
from wtforms.fields.core import Field
from wtforms.validators import StopValidation
from app.main.forms import AdminEmailAddressValidator
@mock.patch('app.main.forms.data_api_client')
class TestAdminEmailAddressValidator(object):
def setup_method(self):
self.form_mock = mock.MagicMock(Form)
self.field_mock = mock.MagicMock(Field, data='the_email_address')
self.validator = AdminEmailAddressValidator(message='The message passed to validator')
def test_admin_email_address_validator_calls_api(self, data_api_client):
self.validator(self.form_mock, self.field_mock)
data_api_client.email_is_valid_for_admin_user.assert_called_once_with('the_email_address')
def test_admin_email_address_validator_raises_with_invalid_response(self, data_api_client):
data_api_client.email_is_valid_for_admin_user.return_value = False
with pytest.raises(StopValidation, match='The message passed to validator'):
self.validator(self.form_mock, self.field_mock)
def test_admin_email_address_validator_passes_with_valid_response(self, data_api_client):
data_api_client.email_is_valid_for_admin_user.return_value = True
assert self.validator(self.form_mock, self.field_mock) is None
|
<commit_before>import mock
import pytest
from flask.ext.wtf import Form
from wtforms.fields.core import Field
from wtforms.validators import StopValidation
from app.main.forms import AdminEmailAddressValidator
@mock.patch('app.main.forms.data_api_client')
class TestAdminEmailAddressValidator(object):
def setup_method(self):
self.form_mock = mock.MagicMock(Form)
self.field_mock = mock.MagicMock(Field, data='the_email_address')
self.validator = AdminEmailAddressValidator(message='The message passed to validator')
def test_admin_email_address_validator_calls_api(self, data_api_client):
self.validator(self.form_mock, self.field_mock)
data_api_client.email_is_valid_for_admin_user.assert_called_once_with('the_email_address')
def test_admin_email_address_validator_raises_with_invalid_response(self, data_api_client):
data_api_client.email_is_valid_for_admin_user.return_value = False
with pytest.raises(StopValidation, match='The message passed to validator'):
self.validator(self.form_mock, self.field_mock)
def test_admin_email_address_validator_passes_with_valid_response(self, data_api_client):
data_api_client.email_is_valid_for_admin_user.return_value = True
assert self.validator(self.form_mock, self.field_mock) is None
<commit_msg>Tidy up test imports a bit<commit_after>import mock
import pytest
from flask_wtf import Form
from wtforms.fields.core import Field
from wtforms.validators import StopValidation
from app.main.forms import AdminEmailAddressValidator
@mock.patch('app.main.forms.data_api_client')
class TestAdminEmailAddressValidator(object):
def setup_method(self):
self.form_mock = mock.MagicMock(Form)
self.field_mock = mock.MagicMock(Field, data='the_email_address')
self.validator = AdminEmailAddressValidator(message='The message passed to validator')
def test_admin_email_address_validator_calls_api(self, data_api_client):
self.validator(self.form_mock, self.field_mock)
data_api_client.email_is_valid_for_admin_user.assert_called_once_with('the_email_address')
def test_admin_email_address_validator_raises_with_invalid_response(self, data_api_client):
data_api_client.email_is_valid_for_admin_user.return_value = False
with pytest.raises(StopValidation, match='The message passed to validator'):
self.validator(self.form_mock, self.field_mock)
def test_admin_email_address_validator_passes_with_valid_response(self, data_api_client):
data_api_client.email_is_valid_for_admin_user.return_value = True
assert self.validator(self.form_mock, self.field_mock) is None
|
e68c38428c055f7c001011c6cc325593d2a26a81
|
pyFxTrader/strategy/__init__.py
|
pyFxTrader/strategy/__init__.py
|
# -*- coding: utf-8 -*-
class Strategy(object):
TIMEFRAMES = [] # e.g. ['M30', 'H2']
def __init__(self, instrument):
self.instrument = instrument
if not self.TIMEFRAMES:
raise ValueError('Please define TIMEFRAMES variable.')
def start(self):
"""Called on strategy start."""
raise NotImplementedError()
def new_bar(self, instrument, cur_index):
"""Called on every bar of every instrument that client is subscribed on."""
raise NotImplementedError()
def execute(self, engine, instruments, cur_index):
"""Called on after all indicators have been updated for this bar's index"""
raise NotImplementedError()
def end(self, engine):
"""Called on strategy stop."""
raise NotImplementedError()
|
# -*- coding: utf-8 -*-
from collections import deque
from logbook import Logger
log = Logger('pyFxTrader')
class Strategy(object):
TIMEFRAMES = [] # e.g. ['M30', 'H2']
BUFFER_SIZE = 500
feeds = {}
def __init__(self, instrument):
self.instrument = instrument
if not self.TIMEFRAMES:
raise ValueError('Please define TIMEFRAMES variable.')
for tf in self.TIMEFRAMES:
self.feeds[tf] = deque(maxlen=self.BUFFER_SIZE)
log.info('Initialized %s feed for %s' % (tf, self.instrument))
def start(self):
"""Called on strategy start."""
raise NotImplementedError()
def new_bar(self, instrument, cur_index):
"""Called on every bar of every instrument that client is subscribed on."""
raise NotImplementedError()
def execute(self, engine, instruments, cur_index):
"""Called on after all indicators have been updated for this bar's index"""
raise NotImplementedError()
def end(self, engine):
"""Called on strategy stop."""
raise NotImplementedError()
|
Add default BUFFER_SIZE for feeds
|
Add default BUFFER_SIZE for feeds
|
Python
|
mit
|
jmelett/pyfx,jmelett/pyFxTrader,jmelett/pyfx
|
# -*- coding: utf-8 -*-
class Strategy(object):
TIMEFRAMES = [] # e.g. ['M30', 'H2']
def __init__(self, instrument):
self.instrument = instrument
if not self.TIMEFRAMES:
raise ValueError('Please define TIMEFRAMES variable.')
def start(self):
"""Called on strategy start."""
raise NotImplementedError()
def new_bar(self, instrument, cur_index):
"""Called on every bar of every instrument that client is subscribed on."""
raise NotImplementedError()
def execute(self, engine, instruments, cur_index):
"""Called on after all indicators have been updated for this bar's index"""
raise NotImplementedError()
def end(self, engine):
"""Called on strategy stop."""
raise NotImplementedError()
Add default BUFFER_SIZE for feeds
|
# -*- coding: utf-8 -*-
from collections import deque
from logbook import Logger
log = Logger('pyFxTrader')
class Strategy(object):
TIMEFRAMES = [] # e.g. ['M30', 'H2']
BUFFER_SIZE = 500
feeds = {}
def __init__(self, instrument):
self.instrument = instrument
if not self.TIMEFRAMES:
raise ValueError('Please define TIMEFRAMES variable.')
for tf in self.TIMEFRAMES:
self.feeds[tf] = deque(maxlen=self.BUFFER_SIZE)
log.info('Initialized %s feed for %s' % (tf, self.instrument))
def start(self):
"""Called on strategy start."""
raise NotImplementedError()
def new_bar(self, instrument, cur_index):
"""Called on every bar of every instrument that client is subscribed on."""
raise NotImplementedError()
def execute(self, engine, instruments, cur_index):
"""Called on after all indicators have been updated for this bar's index"""
raise NotImplementedError()
def end(self, engine):
"""Called on strategy stop."""
raise NotImplementedError()
|
<commit_before># -*- coding: utf-8 -*-
class Strategy(object):
TIMEFRAMES = [] # e.g. ['M30', 'H2']
def __init__(self, instrument):
self.instrument = instrument
if not self.TIMEFRAMES:
raise ValueError('Please define TIMEFRAMES variable.')
def start(self):
"""Called on strategy start."""
raise NotImplementedError()
def new_bar(self, instrument, cur_index):
"""Called on every bar of every instrument that client is subscribed on."""
raise NotImplementedError()
def execute(self, engine, instruments, cur_index):
"""Called on after all indicators have been updated for this bar's index"""
raise NotImplementedError()
def end(self, engine):
"""Called on strategy stop."""
raise NotImplementedError()
<commit_msg>Add default BUFFER_SIZE for feeds<commit_after>
|
# -*- coding: utf-8 -*-
from collections import deque
from logbook import Logger
log = Logger('pyFxTrader')
class Strategy(object):
TIMEFRAMES = [] # e.g. ['M30', 'H2']
BUFFER_SIZE = 500
feeds = {}
def __init__(self, instrument):
self.instrument = instrument
if not self.TIMEFRAMES:
raise ValueError('Please define TIMEFRAMES variable.')
for tf in self.TIMEFRAMES:
self.feeds[tf] = deque(maxlen=self.BUFFER_SIZE)
log.info('Initialized %s feed for %s' % (tf, self.instrument))
def start(self):
"""Called on strategy start."""
raise NotImplementedError()
def new_bar(self, instrument, cur_index):
"""Called on every bar of every instrument that client is subscribed on."""
raise NotImplementedError()
def execute(self, engine, instruments, cur_index):
"""Called on after all indicators have been updated for this bar's index"""
raise NotImplementedError()
def end(self, engine):
"""Called on strategy stop."""
raise NotImplementedError()
|
# -*- coding: utf-8 -*-
class Strategy(object):
TIMEFRAMES = [] # e.g. ['M30', 'H2']
def __init__(self, instrument):
self.instrument = instrument
if not self.TIMEFRAMES:
raise ValueError('Please define TIMEFRAMES variable.')
def start(self):
"""Called on strategy start."""
raise NotImplementedError()
def new_bar(self, instrument, cur_index):
"""Called on every bar of every instrument that client is subscribed on."""
raise NotImplementedError()
def execute(self, engine, instruments, cur_index):
"""Called on after all indicators have been updated for this bar's index"""
raise NotImplementedError()
def end(self, engine):
"""Called on strategy stop."""
raise NotImplementedError()
Add default BUFFER_SIZE for feeds# -*- coding: utf-8 -*-
from collections import deque
from logbook import Logger
log = Logger('pyFxTrader')
class Strategy(object):
TIMEFRAMES = [] # e.g. ['M30', 'H2']
BUFFER_SIZE = 500
feeds = {}
def __init__(self, instrument):
self.instrument = instrument
if not self.TIMEFRAMES:
raise ValueError('Please define TIMEFRAMES variable.')
for tf in self.TIMEFRAMES:
self.feeds[tf] = deque(maxlen=self.BUFFER_SIZE)
log.info('Initialized %s feed for %s' % (tf, self.instrument))
def start(self):
"""Called on strategy start."""
raise NotImplementedError()
def new_bar(self, instrument, cur_index):
"""Called on every bar of every instrument that client is subscribed on."""
raise NotImplementedError()
def execute(self, engine, instruments, cur_index):
"""Called on after all indicators have been updated for this bar's index"""
raise NotImplementedError()
def end(self, engine):
"""Called on strategy stop."""
raise NotImplementedError()
|
<commit_before># -*- coding: utf-8 -*-
class Strategy(object):
TIMEFRAMES = [] # e.g. ['M30', 'H2']
def __init__(self, instrument):
self.instrument = instrument
if not self.TIMEFRAMES:
raise ValueError('Please define TIMEFRAMES variable.')
def start(self):
"""Called on strategy start."""
raise NotImplementedError()
def new_bar(self, instrument, cur_index):
"""Called on every bar of every instrument that client is subscribed on."""
raise NotImplementedError()
def execute(self, engine, instruments, cur_index):
"""Called on after all indicators have been updated for this bar's index"""
raise NotImplementedError()
def end(self, engine):
"""Called on strategy stop."""
raise NotImplementedError()
<commit_msg>Add default BUFFER_SIZE for feeds<commit_after># -*- coding: utf-8 -*-
from collections import deque
from logbook import Logger
log = Logger('pyFxTrader')
class Strategy(object):
TIMEFRAMES = [] # e.g. ['M30', 'H2']
BUFFER_SIZE = 500
feeds = {}
def __init__(self, instrument):
self.instrument = instrument
if not self.TIMEFRAMES:
raise ValueError('Please define TIMEFRAMES variable.')
for tf in self.TIMEFRAMES:
self.feeds[tf] = deque(maxlen=self.BUFFER_SIZE)
log.info('Initialized %s feed for %s' % (tf, self.instrument))
def start(self):
"""Called on strategy start."""
raise NotImplementedError()
def new_bar(self, instrument, cur_index):
"""Called on every bar of every instrument that client is subscribed on."""
raise NotImplementedError()
def execute(self, engine, instruments, cur_index):
"""Called on after all indicators have been updated for this bar's index"""
raise NotImplementedError()
def end(self, engine):
"""Called on strategy stop."""
raise NotImplementedError()
|
73d9049dea55ddfa32e4cb09f969b6ff083fee2c
|
tests/redisdl_test.py
|
tests/redisdl_test.py
|
import redisdl
import unittest
import json
import os.path
class RedisdlTest(unittest.TestCase):
def setUp(self):
import redis
self.r = redis.Redis()
for key in self.r.keys('*'):
self.r.delete(key)
def test_roundtrip(self):
path = os.path.join(os.path.dirname(__file__), 'fixtures', 'dump.json')
with open(path) as f:
dump = f.read()
redisdl.loads(dump)
redump = redisdl.dumps()
expected = json.loads(dump)
actual = json.loads(redump)
self.assertEqual(expected, actual)
|
import redisdl
import unittest
import json
import os.path
class RedisdlTest(unittest.TestCase):
def setUp(self):
import redis
self.r = redis.Redis()
for key in self.r.keys('*'):
self.r.delete(key)
def test_roundtrip(self):
path = os.path.join(os.path.dirname(__file__), 'fixtures', 'dump.json')
with open(path) as f:
dump = f.read()
redisdl.loads(dump)
redump = redisdl.dumps()
expected = json.loads(dump)
actual = json.loads(redump)
self.assertEqual(expected, actual)
def test_dump_string_value(self):
self.r.set('key', 'value')
dump = redisdl.dumps()
actual = json.loads(dump)
expected = {'key': {'type': 'string', 'value': 'value'}}
self.assertEqual(expected, actual)
def test_dump_unicode_value(self):
self.r.set('key', u"\u041c\u043e\u0441\u043a\u0432\u0430")
dump = redisdl.dumps()
actual = json.loads(dump)
expected = {'key': {'type': 'string', 'value': u"\u041c\u043e\u0441\u043a\u0432\u0430"}}
self.assertEqual(expected, actual)
|
Add tests for dumping string and unicode values
|
Add tests for dumping string and unicode values
|
Python
|
bsd-2-clause
|
p/redis-dump-load,p/redis-dump-load,hyunchel/redis-dump-load,hyunchel/redis-dump-load
|
import redisdl
import unittest
import json
import os.path
class RedisdlTest(unittest.TestCase):
def setUp(self):
import redis
self.r = redis.Redis()
for key in self.r.keys('*'):
self.r.delete(key)
def test_roundtrip(self):
path = os.path.join(os.path.dirname(__file__), 'fixtures', 'dump.json')
with open(path) as f:
dump = f.read()
redisdl.loads(dump)
redump = redisdl.dumps()
expected = json.loads(dump)
actual = json.loads(redump)
self.assertEqual(expected, actual)
Add tests for dumping string and unicode values
|
import redisdl
import unittest
import json
import os.path
class RedisdlTest(unittest.TestCase):
def setUp(self):
import redis
self.r = redis.Redis()
for key in self.r.keys('*'):
self.r.delete(key)
def test_roundtrip(self):
path = os.path.join(os.path.dirname(__file__), 'fixtures', 'dump.json')
with open(path) as f:
dump = f.read()
redisdl.loads(dump)
redump = redisdl.dumps()
expected = json.loads(dump)
actual = json.loads(redump)
self.assertEqual(expected, actual)
def test_dump_string_value(self):
self.r.set('key', 'value')
dump = redisdl.dumps()
actual = json.loads(dump)
expected = {'key': {'type': 'string', 'value': 'value'}}
self.assertEqual(expected, actual)
def test_dump_unicode_value(self):
self.r.set('key', u"\u041c\u043e\u0441\u043a\u0432\u0430")
dump = redisdl.dumps()
actual = json.loads(dump)
expected = {'key': {'type': 'string', 'value': u"\u041c\u043e\u0441\u043a\u0432\u0430"}}
self.assertEqual(expected, actual)
|
<commit_before>import redisdl
import unittest
import json
import os.path
class RedisdlTest(unittest.TestCase):
def setUp(self):
import redis
self.r = redis.Redis()
for key in self.r.keys('*'):
self.r.delete(key)
def test_roundtrip(self):
path = os.path.join(os.path.dirname(__file__), 'fixtures', 'dump.json')
with open(path) as f:
dump = f.read()
redisdl.loads(dump)
redump = redisdl.dumps()
expected = json.loads(dump)
actual = json.loads(redump)
self.assertEqual(expected, actual)
<commit_msg>Add tests for dumping string and unicode values<commit_after>
|
import redisdl
import unittest
import json
import os.path
class RedisdlTest(unittest.TestCase):
def setUp(self):
import redis
self.r = redis.Redis()
for key in self.r.keys('*'):
self.r.delete(key)
def test_roundtrip(self):
path = os.path.join(os.path.dirname(__file__), 'fixtures', 'dump.json')
with open(path) as f:
dump = f.read()
redisdl.loads(dump)
redump = redisdl.dumps()
expected = json.loads(dump)
actual = json.loads(redump)
self.assertEqual(expected, actual)
def test_dump_string_value(self):
self.r.set('key', 'value')
dump = redisdl.dumps()
actual = json.loads(dump)
expected = {'key': {'type': 'string', 'value': 'value'}}
self.assertEqual(expected, actual)
def test_dump_unicode_value(self):
self.r.set('key', u"\u041c\u043e\u0441\u043a\u0432\u0430")
dump = redisdl.dumps()
actual = json.loads(dump)
expected = {'key': {'type': 'string', 'value': u"\u041c\u043e\u0441\u043a\u0432\u0430"}}
self.assertEqual(expected, actual)
|
import redisdl
import unittest
import json
import os.path
class RedisdlTest(unittest.TestCase):
def setUp(self):
import redis
self.r = redis.Redis()
for key in self.r.keys('*'):
self.r.delete(key)
def test_roundtrip(self):
path = os.path.join(os.path.dirname(__file__), 'fixtures', 'dump.json')
with open(path) as f:
dump = f.read()
redisdl.loads(dump)
redump = redisdl.dumps()
expected = json.loads(dump)
actual = json.loads(redump)
self.assertEqual(expected, actual)
Add tests for dumping string and unicode valuesimport redisdl
import unittest
import json
import os.path
class RedisdlTest(unittest.TestCase):
def setUp(self):
import redis
self.r = redis.Redis()
for key in self.r.keys('*'):
self.r.delete(key)
def test_roundtrip(self):
path = os.path.join(os.path.dirname(__file__), 'fixtures', 'dump.json')
with open(path) as f:
dump = f.read()
redisdl.loads(dump)
redump = redisdl.dumps()
expected = json.loads(dump)
actual = json.loads(redump)
self.assertEqual(expected, actual)
def test_dump_string_value(self):
self.r.set('key', 'value')
dump = redisdl.dumps()
actual = json.loads(dump)
expected = {'key': {'type': 'string', 'value': 'value'}}
self.assertEqual(expected, actual)
def test_dump_unicode_value(self):
self.r.set('key', u"\u041c\u043e\u0441\u043a\u0432\u0430")
dump = redisdl.dumps()
actual = json.loads(dump)
expected = {'key': {'type': 'string', 'value': u"\u041c\u043e\u0441\u043a\u0432\u0430"}}
self.assertEqual(expected, actual)
|
<commit_before>import redisdl
import unittest
import json
import os.path
class RedisdlTest(unittest.TestCase):
def setUp(self):
import redis
self.r = redis.Redis()
for key in self.r.keys('*'):
self.r.delete(key)
def test_roundtrip(self):
path = os.path.join(os.path.dirname(__file__), 'fixtures', 'dump.json')
with open(path) as f:
dump = f.read()
redisdl.loads(dump)
redump = redisdl.dumps()
expected = json.loads(dump)
actual = json.loads(redump)
self.assertEqual(expected, actual)
<commit_msg>Add tests for dumping string and unicode values<commit_after>import redisdl
import unittest
import json
import os.path
class RedisdlTest(unittest.TestCase):
def setUp(self):
import redis
self.r = redis.Redis()
for key in self.r.keys('*'):
self.r.delete(key)
def test_roundtrip(self):
path = os.path.join(os.path.dirname(__file__), 'fixtures', 'dump.json')
with open(path) as f:
dump = f.read()
redisdl.loads(dump)
redump = redisdl.dumps()
expected = json.loads(dump)
actual = json.loads(redump)
self.assertEqual(expected, actual)
def test_dump_string_value(self):
self.r.set('key', 'value')
dump = redisdl.dumps()
actual = json.loads(dump)
expected = {'key': {'type': 'string', 'value': 'value'}}
self.assertEqual(expected, actual)
def test_dump_unicode_value(self):
self.r.set('key', u"\u041c\u043e\u0441\u043a\u0432\u0430")
dump = redisdl.dumps()
actual = json.loads(dump)
expected = {'key': {'type': 'string', 'value': u"\u041c\u043e\u0441\u043a\u0432\u0430"}}
self.assertEqual(expected, actual)
|
23a0db627060afc3e1563d298c733edd8bb106a1
|
src/ConfigLoader.py
|
src/ConfigLoader.py
|
import json
import sys
def load_config_file(out=sys.stdout):
default_filepath = "../resources/config/default-config.json"
user_filepath = "../resources/config/user-config.json"
try:
default_json = read_json(default_filepath)
user_json = read_json(user_filepath)
for property in user_json:
default_json[property] = user_json[property]
except FileNotFoundError as e:
out.write("Cannot find file: " + e.filename)
else:
out.write("Read styling config JSON correctly.")
return default_json
def read_json(filepath):
config_string = ''
with open(filepath) as f:
for line in f:
line = line.lstrip()
if not line.startswith("//"):
config_string += line
config_json = json.loads(config_string)
return config_json
if __name__ == "__main__":
load_config_file()
|
import json
import sys
def load_config_file(out=sys.stdout):
if sys.argv[0].endswith('nosetests'):
default_filepath = "./resources/config/default-config.json"
user_filepath = "./resources/config/user-config.json"
else:
default_filepath = "../resources/config/default-config.json"
user_filepath = "../resources/config/user-config.json"
try:
default_json = read_json(default_filepath)
user_json = read_json(user_filepath)
for property in user_json:
default_json[property] = user_json[property]
except FileNotFoundError as e:
out.write("Cannot find file: " + e.filename)
else:
out.write("Read styling config JSON correctly.")
return default_json
def read_json(filepath):
config_string = ''
with open(filepath) as f:
for line in f:
line = line.lstrip()
if not line.startswith("//"):
config_string += line
config_json = json.loads(config_string)
return config_json
if __name__ == "__main__":
load_config_file()
|
Fix nosetests for config file loading
|
Fix nosetests for config file loading
|
Python
|
bsd-3-clause
|
sky-uk/bslint
|
import json
import sys
def load_config_file(out=sys.stdout):
default_filepath = "../resources/config/default-config.json"
user_filepath = "../resources/config/user-config.json"
try:
default_json = read_json(default_filepath)
user_json = read_json(user_filepath)
for property in user_json:
default_json[property] = user_json[property]
except FileNotFoundError as e:
out.write("Cannot find file: " + e.filename)
else:
out.write("Read styling config JSON correctly.")
return default_json
def read_json(filepath):
config_string = ''
with open(filepath) as f:
for line in f:
line = line.lstrip()
if not line.startswith("//"):
config_string += line
config_json = json.loads(config_string)
return config_json
if __name__ == "__main__":
load_config_file()Fix nosetests for config file loading
|
import json
import sys
def load_config_file(out=sys.stdout):
if sys.argv[0].endswith('nosetests'):
default_filepath = "./resources/config/default-config.json"
user_filepath = "./resources/config/user-config.json"
else:
default_filepath = "../resources/config/default-config.json"
user_filepath = "../resources/config/user-config.json"
try:
default_json = read_json(default_filepath)
user_json = read_json(user_filepath)
for property in user_json:
default_json[property] = user_json[property]
except FileNotFoundError as e:
out.write("Cannot find file: " + e.filename)
else:
out.write("Read styling config JSON correctly.")
return default_json
def read_json(filepath):
config_string = ''
with open(filepath) as f:
for line in f:
line = line.lstrip()
if not line.startswith("//"):
config_string += line
config_json = json.loads(config_string)
return config_json
if __name__ == "__main__":
load_config_file()
|
<commit_before>import json
import sys
def load_config_file(out=sys.stdout):
default_filepath = "../resources/config/default-config.json"
user_filepath = "../resources/config/user-config.json"
try:
default_json = read_json(default_filepath)
user_json = read_json(user_filepath)
for property in user_json:
default_json[property] = user_json[property]
except FileNotFoundError as e:
out.write("Cannot find file: " + e.filename)
else:
out.write("Read styling config JSON correctly.")
return default_json
def read_json(filepath):
config_string = ''
with open(filepath) as f:
for line in f:
line = line.lstrip()
if not line.startswith("//"):
config_string += line
config_json = json.loads(config_string)
return config_json
if __name__ == "__main__":
load_config_file()<commit_msg>Fix nosetests for config file loading<commit_after>
|
import json
import sys
def load_config_file(out=sys.stdout):
if sys.argv[0].endswith('nosetests'):
default_filepath = "./resources/config/default-config.json"
user_filepath = "./resources/config/user-config.json"
else:
default_filepath = "../resources/config/default-config.json"
user_filepath = "../resources/config/user-config.json"
try:
default_json = read_json(default_filepath)
user_json = read_json(user_filepath)
for property in user_json:
default_json[property] = user_json[property]
except FileNotFoundError as e:
out.write("Cannot find file: " + e.filename)
else:
out.write("Read styling config JSON correctly.")
return default_json
def read_json(filepath):
config_string = ''
with open(filepath) as f:
for line in f:
line = line.lstrip()
if not line.startswith("//"):
config_string += line
config_json = json.loads(config_string)
return config_json
if __name__ == "__main__":
load_config_file()
|
import json
import sys
def load_config_file(out=sys.stdout):
default_filepath = "../resources/config/default-config.json"
user_filepath = "../resources/config/user-config.json"
try:
default_json = read_json(default_filepath)
user_json = read_json(user_filepath)
for property in user_json:
default_json[property] = user_json[property]
except FileNotFoundError as e:
out.write("Cannot find file: " + e.filename)
else:
out.write("Read styling config JSON correctly.")
return default_json
def read_json(filepath):
config_string = ''
with open(filepath) as f:
for line in f:
line = line.lstrip()
if not line.startswith("//"):
config_string += line
config_json = json.loads(config_string)
return config_json
if __name__ == "__main__":
load_config_file()Fix nosetests for config file loadingimport json
import sys
def load_config_file(out=sys.stdout):
if sys.argv[0].endswith('nosetests'):
default_filepath = "./resources/config/default-config.json"
user_filepath = "./resources/config/user-config.json"
else:
default_filepath = "../resources/config/default-config.json"
user_filepath = "../resources/config/user-config.json"
try:
default_json = read_json(default_filepath)
user_json = read_json(user_filepath)
for property in user_json:
default_json[property] = user_json[property]
except FileNotFoundError as e:
out.write("Cannot find file: " + e.filename)
else:
out.write("Read styling config JSON correctly.")
return default_json
def read_json(filepath):
config_string = ''
with open(filepath) as f:
for line in f:
line = line.lstrip()
if not line.startswith("//"):
config_string += line
config_json = json.loads(config_string)
return config_json
if __name__ == "__main__":
load_config_file()
|
<commit_before>import json
import sys
def load_config_file(out=sys.stdout):
default_filepath = "../resources/config/default-config.json"
user_filepath = "../resources/config/user-config.json"
try:
default_json = read_json(default_filepath)
user_json = read_json(user_filepath)
for property in user_json:
default_json[property] = user_json[property]
except FileNotFoundError as e:
out.write("Cannot find file: " + e.filename)
else:
out.write("Read styling config JSON correctly.")
return default_json
def read_json(filepath):
config_string = ''
with open(filepath) as f:
for line in f:
line = line.lstrip()
if not line.startswith("//"):
config_string += line
config_json = json.loads(config_string)
return config_json
if __name__ == "__main__":
load_config_file()<commit_msg>Fix nosetests for config file loading<commit_after>import json
import sys
def load_config_file(out=sys.stdout):
if sys.argv[0].endswith('nosetests'):
default_filepath = "./resources/config/default-config.json"
user_filepath = "./resources/config/user-config.json"
else:
default_filepath = "../resources/config/default-config.json"
user_filepath = "../resources/config/user-config.json"
try:
default_json = read_json(default_filepath)
user_json = read_json(user_filepath)
for property in user_json:
default_json[property] = user_json[property]
except FileNotFoundError as e:
out.write("Cannot find file: " + e.filename)
else:
out.write("Read styling config JSON correctly.")
return default_json
def read_json(filepath):
config_string = ''
with open(filepath) as f:
for line in f:
line = line.lstrip()
if not line.startswith("//"):
config_string += line
config_json = json.loads(config_string)
return config_json
if __name__ == "__main__":
load_config_file()
|
4fb39abc5afef5b0ca87e5c3b40e3dc9c9c0b2ef
|
tests/functions_tests/test_accuracy.py
|
tests/functions_tests/test_accuracy.py
|
import unittest
import numpy
import six
import chainer
from chainer import cuda
from chainer import gradient_check
from chainer.testing import attr
if cuda.available:
cuda.init()
class TestAccuracy(unittest.TestCase):
def setUp(self):
self.x = numpy.random.uniform(-1, 1, (10, 3)).astype(numpy.float32)
self.t = numpy.random.randint(3, size=(10,)).astype(numpy.int32)
def check_forward(self, x_data, t_data):
x = chainer.Variable(x_data)
t = chainer.Variable(t_data)
y = chainer.functions.accuracy(x, t)
count = 0
for i in six.moves.range(self.t.size):
pred = self.x[i].argmax()
if pred == self.t[i]:
count += 1
expected = float(count) / self.t.size
gradient_check.assert_allclose(expected, cuda.to_cpu(y.data))
def test_forward_cpu(self):
self.check_forward(self.x, self.t)
@attr.gpu
def test_forward_gpu(self):
self.check_forward(cuda.to_gpu(self.x), cuda.to_gpu(self.t))
|
import unittest
import numpy
import six
import chainer
from chainer import cuda
from chainer import gradient_check
from chainer.testing import attr
if cuda.available:
cuda.init()
class TestAccuracy(unittest.TestCase):
def setUp(self):
self.x = numpy.random.uniform(-1, 1, (10, 3)).astype(numpy.float32)
self.t = numpy.random.randint(3, size=(10,)).astype(numpy.int32)
def check_forward(self, x_data, t_data):
x = chainer.Variable(x_data)
t = chainer.Variable(t_data)
y = chainer.functions.accuracy(x, t)
self.assertEqual((), y.data.shape)
count = 0
for i in six.moves.range(self.t.size):
pred = self.x[i].argmax()
if pred == self.t[i]:
count += 1
expected = float(count) / self.t.size
gradient_check.assert_allclose(expected, cuda.to_cpu(y.data))
def test_forward_cpu(self):
self.check_forward(self.x, self.t)
@attr.gpu
def test_forward_gpu(self):
self.check_forward(cuda.to_gpu(self.x), cuda.to_gpu(self.t))
|
Add test fot shape of result of accuracy function
|
Add test fot shape of result of accuracy function
|
Python
|
mit
|
elviswf/chainer,1986ks/chainer,cupy/cupy,ktnyt/chainer,hvy/chainer,tigerneil/chainer,rezoo/chainer,sou81821/chainer,tereka114/chainer,hvy/chainer,wavelets/chainer,jfsantos/chainer,chainer/chainer,ikasumi/chainer,okuta/chainer,aonotas/chainer,ysekky/chainer,kashif/chainer,ktnyt/chainer,niboshi/chainer,pfnet/chainer,ytoyama/yans_chainer_hackathon,cupy/cupy,jnishi/chainer,muupan/chainer,AlpacaDB/chainer,cemoody/chainer,chainer/chainer,anaruse/chainer,wkentaro/chainer,ronekko/chainer,kiyukuta/chainer,masia02/chainer,truongdq/chainer,kikusu/chainer,umitanuki/chainer,hidenori-t/chainer,cupy/cupy,t-abe/chainer,keisuke-umezawa/chainer,jnishi/chainer,jnishi/chainer,keisuke-umezawa/chainer,truongdq/chainer,jnishi/chainer,AlpacaDB/chainer,niboshi/chainer,laysakura/chainer,niboshi/chainer,okuta/chainer,benob/chainer,hvy/chainer,t-abe/chainer,keisuke-umezawa/chainer,ktnyt/chainer,kikusu/chainer,kuwa32/chainer,chainer/chainer,delta2323/chainer,wkentaro/chainer,woodshop/complex-chainer,tscohen/chainer,muupan/chainer,ktnyt/chainer,woodshop/chainer,wkentaro/chainer,bayerj/chainer,okuta/chainer,minhpqn/chainer,Kaisuke5/chainer,cupy/cupy,sinhrks/chainer,benob/chainer,okuta/chainer,wkentaro/chainer,hvy/chainer,tkerola/chainer,chainer/chainer,keisuke-umezawa/chainer,niboshi/chainer,sinhrks/chainer,yanweifu/chainer
|
import unittest
import numpy
import six
import chainer
from chainer import cuda
from chainer import gradient_check
from chainer.testing import attr
if cuda.available:
cuda.init()
class TestAccuracy(unittest.TestCase):
def setUp(self):
self.x = numpy.random.uniform(-1, 1, (10, 3)).astype(numpy.float32)
self.t = numpy.random.randint(3, size=(10,)).astype(numpy.int32)
def check_forward(self, x_data, t_data):
x = chainer.Variable(x_data)
t = chainer.Variable(t_data)
y = chainer.functions.accuracy(x, t)
count = 0
for i in six.moves.range(self.t.size):
pred = self.x[i].argmax()
if pred == self.t[i]:
count += 1
expected = float(count) / self.t.size
gradient_check.assert_allclose(expected, cuda.to_cpu(y.data))
def test_forward_cpu(self):
self.check_forward(self.x, self.t)
@attr.gpu
def test_forward_gpu(self):
self.check_forward(cuda.to_gpu(self.x), cuda.to_gpu(self.t))
Add test fot shape of result of accuracy function
|
import unittest
import numpy
import six
import chainer
from chainer import cuda
from chainer import gradient_check
from chainer.testing import attr
if cuda.available:
cuda.init()
class TestAccuracy(unittest.TestCase):
def setUp(self):
self.x = numpy.random.uniform(-1, 1, (10, 3)).astype(numpy.float32)
self.t = numpy.random.randint(3, size=(10,)).astype(numpy.int32)
def check_forward(self, x_data, t_data):
x = chainer.Variable(x_data)
t = chainer.Variable(t_data)
y = chainer.functions.accuracy(x, t)
self.assertEqual((), y.data.shape)
count = 0
for i in six.moves.range(self.t.size):
pred = self.x[i].argmax()
if pred == self.t[i]:
count += 1
expected = float(count) / self.t.size
gradient_check.assert_allclose(expected, cuda.to_cpu(y.data))
def test_forward_cpu(self):
self.check_forward(self.x, self.t)
@attr.gpu
def test_forward_gpu(self):
self.check_forward(cuda.to_gpu(self.x), cuda.to_gpu(self.t))
|
<commit_before>import unittest
import numpy
import six
import chainer
from chainer import cuda
from chainer import gradient_check
from chainer.testing import attr
if cuda.available:
cuda.init()
class TestAccuracy(unittest.TestCase):
def setUp(self):
self.x = numpy.random.uniform(-1, 1, (10, 3)).astype(numpy.float32)
self.t = numpy.random.randint(3, size=(10,)).astype(numpy.int32)
def check_forward(self, x_data, t_data):
x = chainer.Variable(x_data)
t = chainer.Variable(t_data)
y = chainer.functions.accuracy(x, t)
count = 0
for i in six.moves.range(self.t.size):
pred = self.x[i].argmax()
if pred == self.t[i]:
count += 1
expected = float(count) / self.t.size
gradient_check.assert_allclose(expected, cuda.to_cpu(y.data))
def test_forward_cpu(self):
self.check_forward(self.x, self.t)
@attr.gpu
def test_forward_gpu(self):
self.check_forward(cuda.to_gpu(self.x), cuda.to_gpu(self.t))
<commit_msg>Add test fot shape of result of accuracy function<commit_after>
|
import unittest
import numpy
import six
import chainer
from chainer import cuda
from chainer import gradient_check
from chainer.testing import attr
if cuda.available:
cuda.init()
class TestAccuracy(unittest.TestCase):
def setUp(self):
self.x = numpy.random.uniform(-1, 1, (10, 3)).astype(numpy.float32)
self.t = numpy.random.randint(3, size=(10,)).astype(numpy.int32)
def check_forward(self, x_data, t_data):
x = chainer.Variable(x_data)
t = chainer.Variable(t_data)
y = chainer.functions.accuracy(x, t)
self.assertEqual((), y.data.shape)
count = 0
for i in six.moves.range(self.t.size):
pred = self.x[i].argmax()
if pred == self.t[i]:
count += 1
expected = float(count) / self.t.size
gradient_check.assert_allclose(expected, cuda.to_cpu(y.data))
def test_forward_cpu(self):
self.check_forward(self.x, self.t)
@attr.gpu
def test_forward_gpu(self):
self.check_forward(cuda.to_gpu(self.x), cuda.to_gpu(self.t))
|
import unittest
import numpy
import six
import chainer
from chainer import cuda
from chainer import gradient_check
from chainer.testing import attr
if cuda.available:
cuda.init()
class TestAccuracy(unittest.TestCase):
def setUp(self):
self.x = numpy.random.uniform(-1, 1, (10, 3)).astype(numpy.float32)
self.t = numpy.random.randint(3, size=(10,)).astype(numpy.int32)
def check_forward(self, x_data, t_data):
x = chainer.Variable(x_data)
t = chainer.Variable(t_data)
y = chainer.functions.accuracy(x, t)
count = 0
for i in six.moves.range(self.t.size):
pred = self.x[i].argmax()
if pred == self.t[i]:
count += 1
expected = float(count) / self.t.size
gradient_check.assert_allclose(expected, cuda.to_cpu(y.data))
def test_forward_cpu(self):
self.check_forward(self.x, self.t)
@attr.gpu
def test_forward_gpu(self):
self.check_forward(cuda.to_gpu(self.x), cuda.to_gpu(self.t))
Add test fot shape of result of accuracy functionimport unittest
import numpy
import six
import chainer
from chainer import cuda
from chainer import gradient_check
from chainer.testing import attr
if cuda.available:
cuda.init()
class TestAccuracy(unittest.TestCase):
def setUp(self):
self.x = numpy.random.uniform(-1, 1, (10, 3)).astype(numpy.float32)
self.t = numpy.random.randint(3, size=(10,)).astype(numpy.int32)
def check_forward(self, x_data, t_data):
x = chainer.Variable(x_data)
t = chainer.Variable(t_data)
y = chainer.functions.accuracy(x, t)
self.assertEqual((), y.data.shape)
count = 0
for i in six.moves.range(self.t.size):
pred = self.x[i].argmax()
if pred == self.t[i]:
count += 1
expected = float(count) / self.t.size
gradient_check.assert_allclose(expected, cuda.to_cpu(y.data))
def test_forward_cpu(self):
self.check_forward(self.x, self.t)
@attr.gpu
def test_forward_gpu(self):
self.check_forward(cuda.to_gpu(self.x), cuda.to_gpu(self.t))
|
<commit_before>import unittest
import numpy
import six
import chainer
from chainer import cuda
from chainer import gradient_check
from chainer.testing import attr
if cuda.available:
cuda.init()
class TestAccuracy(unittest.TestCase):
def setUp(self):
self.x = numpy.random.uniform(-1, 1, (10, 3)).astype(numpy.float32)
self.t = numpy.random.randint(3, size=(10,)).astype(numpy.int32)
def check_forward(self, x_data, t_data):
x = chainer.Variable(x_data)
t = chainer.Variable(t_data)
y = chainer.functions.accuracy(x, t)
count = 0
for i in six.moves.range(self.t.size):
pred = self.x[i].argmax()
if pred == self.t[i]:
count += 1
expected = float(count) / self.t.size
gradient_check.assert_allclose(expected, cuda.to_cpu(y.data))
def test_forward_cpu(self):
self.check_forward(self.x, self.t)
@attr.gpu
def test_forward_gpu(self):
self.check_forward(cuda.to_gpu(self.x), cuda.to_gpu(self.t))
<commit_msg>Add test fot shape of result of accuracy function<commit_after>import unittest
import numpy
import six
import chainer
from chainer import cuda
from chainer import gradient_check
from chainer.testing import attr
if cuda.available:
cuda.init()
class TestAccuracy(unittest.TestCase):
def setUp(self):
self.x = numpy.random.uniform(-1, 1, (10, 3)).astype(numpy.float32)
self.t = numpy.random.randint(3, size=(10,)).astype(numpy.int32)
def check_forward(self, x_data, t_data):
x = chainer.Variable(x_data)
t = chainer.Variable(t_data)
y = chainer.functions.accuracy(x, t)
self.assertEqual((), y.data.shape)
count = 0
for i in six.moves.range(self.t.size):
pred = self.x[i].argmax()
if pred == self.t[i]:
count += 1
expected = float(count) / self.t.size
gradient_check.assert_allclose(expected, cuda.to_cpu(y.data))
def test_forward_cpu(self):
self.check_forward(self.x, self.t)
@attr.gpu
def test_forward_gpu(self):
self.check_forward(cuda.to_gpu(self.x), cuda.to_gpu(self.t))
|
b458e34c0e6466afd3125fff6b1f36278572857b
|
pipes/iam/consts.py
|
pipes/iam/consts.py
|
"""Constant variables for Spinnaker IAM Pipe."""
API_URL = 'http://spinnaker.build.example.com:8084'
|
"""Constant variables for Spinnaker IAM Pipe."""
API_URL = 'http://gate-api.build.example.com:8084'
|
Update to use new gate api
|
Update to use new gate api
|
Python
|
apache-2.0
|
gogoair/foremast,gogoair/foremast
|
"""Constant variables for Spinnaker IAM Pipe."""
API_URL = 'http://spinnaker.build.example.com:8084'
Update to use new gate api
|
"""Constant variables for Spinnaker IAM Pipe."""
API_URL = 'http://gate-api.build.example.com:8084'
|
<commit_before>"""Constant variables for Spinnaker IAM Pipe."""
API_URL = 'http://spinnaker.build.example.com:8084'
<commit_msg>Update to use new gate api<commit_after>
|
"""Constant variables for Spinnaker IAM Pipe."""
API_URL = 'http://gate-api.build.example.com:8084'
|
"""Constant variables for Spinnaker IAM Pipe."""
API_URL = 'http://spinnaker.build.example.com:8084'
Update to use new gate api"""Constant variables for Spinnaker IAM Pipe."""
API_URL = 'http://gate-api.build.example.com:8084'
|
<commit_before>"""Constant variables for Spinnaker IAM Pipe."""
API_URL = 'http://spinnaker.build.example.com:8084'
<commit_msg>Update to use new gate api<commit_after>"""Constant variables for Spinnaker IAM Pipe."""
API_URL = 'http://gate-api.build.example.com:8084'
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.