commit
stringlengths 40
40
| old_file
stringlengths 4
150
| new_file
stringlengths 4
150
| old_contents
stringlengths 0
3.26k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
501
| message
stringlengths 15
4.06k
| lang
stringclasses 4
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
| diff
stringlengths 0
4.35k
|
|---|---|---|---|---|---|---|---|---|---|---|
0483563fd08063e856915099075b203379e61e7c
|
bejmy/categories/admin.py
|
bejmy/categories/admin.py
|
from django.contrib import admin
from bejmy.categories.models import Category
@admin.register(Category)
class CategoryAdmin(admin.ModelAdmin):
list_display = (
'name',
'user',
'transaction_type',
)
list_filter = (
'user',
'transaction_type',
)
search_fields = (
'name',
)
raw_id_fields = ('parent',)
def get_queryset(self, request, *args, **kwargs):
queryset = super().get_queryset(request, *args, **kwargs)
if not self.request.user.is_superuser():
queryset = queryset.filter(user=request.user)
return queryset
|
from django.contrib import admin
from bejmy.categories.models import Category
from mptt.admin import MPTTModelAdmin
@admin.register(Category)
class CategoryAdmin(MPTTModelAdmin):
list_display = (
'name',
'user',
'transaction_type',
)
list_filter = (
'user',
'transaction_type',
)
search_fields = (
'name',
)
raw_id_fields = ('parent',)
def get_queryset(self, request, *args, **kwargs):
queryset = super().get_queryset(request, *args, **kwargs)
if not self.request.user.is_superuser():
queryset = queryset.filter(user=request.user)
return queryset
|
Access to all accounts only for superusers
|
Access to all accounts only for superusers
|
Python
|
mit
|
bejmy/backend,bejmy/backend
|
---
+++
@@ -2,9 +2,11 @@
from bejmy.categories.models import Category
+from mptt.admin import MPTTModelAdmin
+
@admin.register(Category)
-class CategoryAdmin(admin.ModelAdmin):
+class CategoryAdmin(MPTTModelAdmin):
list_display = (
'name',
'user',
|
c0c7222f4ab1c39dadd78c9bde40d882780ce741
|
benchexec/tools/legion.py
|
benchexec/tools/legion.py
|
"""
BenchExec is a framework for reliable benchmarking.
This file is part of BenchExec.
Copyright (C) 2007-2015 Dirk Beyer
All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import benchexec.result as result
import benchexec.util as util
import benchexec.tools.template
class Tool(benchexec.tools.template.BaseTool):
"""
Tool info for Legion (https://github.com/Alan32Liu/Principes).
"""
REQUIRED_PATHS = [
"legion-sv",
"Legion.py",
"__VERIFIER.c",
"__trace_jump.s",
"tracejump.py",
"lib",
]
def executable(self):
return util.find_executable("legion-sv")
def version(self, executable):
return self._version_from_tool(executable)
def name(self):
return "Legion"
|
"""
BenchExec is a framework for reliable benchmarking.
This file is part of BenchExec.
Copyright (C) 2007-2015 Dirk Beyer
All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import benchexec.result as result
import benchexec.util as util
import benchexec.tools.template
class Tool(benchexec.tools.template.BaseTool):
"""
Tool info for Legion (https://github.com/Alan32Liu/Principes).
"""
REQUIRED_PATHS = [
"legion-sv",
"Legion.py",
"__VERIFIER.c",
"__VERIFIER_assume.c",
"__VERIFIER_assume.instr.s",
"__trace_jump.s",
"__trace_buffered.c",
"tracejump.py",
"lib",
]
def executable(self):
return util.find_executable("legion-sv")
def version(self, executable):
return self._version_from_tool(executable)
def name(self):
return "Legion"
|
Add some files to Legion
|
Add some files to Legion
|
Python
|
apache-2.0
|
ultimate-pa/benchexec,dbeyer/benchexec,sosy-lab/benchexec,sosy-lab/benchexec,sosy-lab/benchexec,sosy-lab/benchexec,dbeyer/benchexec,ultimate-pa/benchexec,dbeyer/benchexec,sosy-lab/benchexec,dbeyer/benchexec,sosy-lab/benchexec,ultimate-pa/benchexec,ultimate-pa/benchexec,ultimate-pa/benchexec,ultimate-pa/benchexec
|
---
+++
@@ -32,7 +32,10 @@
"legion-sv",
"Legion.py",
"__VERIFIER.c",
+ "__VERIFIER_assume.c",
+ "__VERIFIER_assume.instr.s",
"__trace_jump.s",
+ "__trace_buffered.c",
"tracejump.py",
"lib",
]
|
4636fc514b0ebf7b16e82cc3eb7de6b69431cd43
|
site_analytics.py
|
site_analytics.py
|
#!/usr/local/bin/python3.6
# read nginx access log
# parse and get the ip addresses and times
# match ip addresses to geoip
# possibly ignore bots
import re
def get_log_lines(path):
"""Return a list of regex matched log lines from the passed nginx access log path"""
lines = []
with open(path) as f:
r = re.compile("""(?P<remote>[^ ]*) (?P<host>[^ ]*) (?P<user>[^ ]*) \[(?P<time>[^\]]*)\] "(?P<method>\S+)(?: +(?P<path>[^\"]*) +\S*)?" (?P<code>[^ ]*) (?P<size>[^ ]*)(?: "(?P<referer>[^\"]*)" "(?P<agent>[^\"]*)")""")
for line in f:
m = r.match(line)
if m is not None:
md = m.groupdict()
lines.append(md)
return lines
def get_ip_address_city(ip_address):
pass
def is_bot(useragent):
pass
def summarize(data):
pass
if __name__ == "__main__":
access_file_path = "/var/log/nginx/imadm.ca.access.log"
access_log = get_log_lines(access_file_path)
|
#!/usr/local/bin/python3.6
# read nginx access log
# parse and get the ip addresses and times
# match ip addresses to geoip
# possibly ignore bots
import re
def get_log_lines(path):
"""Return a list of regex matched log lines from the passed nginx access log path"""
lines = []
with open(path) as f:
r = re.compile("""(?P<remote>[^ ]*) (?P<host>[^ ]*) (?P<user>[^ ]*) \[(?P<time>[^\]]*)\] "(?P<method>\S+)(?: +(?P<path>[^\"]*) +\S*)?" (?P<code>[^ ]*) (?P<size>[^ ]*)(?: "(?P<referer>[^\"]*)" "(?P<agent>[^\"]*)")""")
for line in f:
m = r.match(line)
if m is not None:
md = m.groupdict()
lines.append(md)
return lines
def get_ip_address_city(ip_address):
pass
def is_bot(useragent):
pass
def summarize(data):
pass
if __name__ == "__main__":
access_file_path = "/var/log/nginx/imadm.ca.access.log"
access_log = get_log_lines(access_file_path)
|
Fix tab spacing from 2 to 4 spaces
|
Fix tab spacing from 2 to 4 spaces
|
Python
|
mit
|
mouhtasi/basic_site_analytics
|
---
+++
@@ -9,29 +9,28 @@
def get_log_lines(path):
- """Return a list of regex matched log lines from the passed nginx access log path"""
- lines = []
- with open(path) as f:
- r = re.compile("""(?P<remote>[^ ]*) (?P<host>[^ ]*) (?P<user>[^ ]*) \[(?P<time>[^\]]*)\] "(?P<method>\S+)(?: +(?P<path>[^\"]*) +\S*)?" (?P<code>[^ ]*) (?P<size>[^ ]*)(?: "(?P<referer>[^\"]*)" "(?P<agent>[^\"]*)")""")
- for line in f:
- m = r.match(line)
- if m is not None:
- md = m.groupdict()
- lines.append(md)
-
+ """Return a list of regex matched log lines from the passed nginx access log path"""
+ lines = []
+ with open(path) as f:
+ r = re.compile("""(?P<remote>[^ ]*) (?P<host>[^ ]*) (?P<user>[^ ]*) \[(?P<time>[^\]]*)\] "(?P<method>\S+)(?: +(?P<path>[^\"]*) +\S*)?" (?P<code>[^ ]*) (?P<size>[^ ]*)(?: "(?P<referer>[^\"]*)" "(?P<agent>[^\"]*)")""")
+ for line in f:
+ m = r.match(line)
+ if m is not None:
+ md = m.groupdict()
+ lines.append(md)
+
return lines
def get_ip_address_city(ip_address):
- pass
+ pass
def is_bot(useragent):
- pass
+ pass
def summarize(data):
- pass
+ pass
if __name__ == "__main__":
- access_file_path = "/var/log/nginx/imadm.ca.access.log"
-
- access_log = get_log_lines(access_file_path)
+ access_file_path = "/var/log/nginx/imadm.ca.access.log"
+ access_log = get_log_lines(access_file_path)
|
2264e4195e873760b922e6d346eb56d8e1ec6e09
|
examples/marshmallow/main.py
|
examples/marshmallow/main.py
|
import uplink
# Local imports
import github
BASE_URL = "https://api.github.com/"
if __name__ == "__main__":
# Create a client that uses the marshmallow converter
gh = github.GitHub(
base_url=BASE_URL, converter=uplink.MarshmallowConverter()
)
# Get all public repositories
repos = gh.get_repos()
# Shorten to first 10 results to avoid hitting the rate limit.
repos = repos[:10]
# Print contributors for those repositories
for repo in repos:
contributors = gh.get_contributors(repo.owner, repo.name)
print("Contributors for %s: %s" % (repo, contributors))
|
# Standard library imports
from pprint import pformat
# Local imports
import github
BASE_URL = "https://api.github.com/"
if __name__ == "__main__":
# Create a GitHub API client
gh = github.GitHub(base_url=BASE_URL)
# Get all public repositories
repos = gh.get_repos()
# Shorten to first 10 results to avoid hitting the rate limit.
repos = repos[:10]
# Print contributors for those repositories
for repo in repos:
contributors = gh.get_contributors(repo.owner, repo.name)
print(
"Contributors for %s:\n%s" % (repo, pformat(contributors, indent=4))
)
|
Remove needless creation of MarshmallowConverter
|
Remove needless creation of MarshmallowConverter
|
Python
|
mit
|
prkumar/uplink
|
---
+++
@@ -1,15 +1,16 @@
-import uplink
+# Standard library imports
+from pprint import pformat
# Local imports
import github
+
BASE_URL = "https://api.github.com/"
+
if __name__ == "__main__":
- # Create a client that uses the marshmallow converter
- gh = github.GitHub(
- base_url=BASE_URL, converter=uplink.MarshmallowConverter()
- )
+ # Create a GitHub API client
+ gh = github.GitHub(base_url=BASE_URL)
# Get all public repositories
repos = gh.get_repos()
@@ -20,4 +21,6 @@
# Print contributors for those repositories
for repo in repos:
contributors = gh.get_contributors(repo.owner, repo.name)
- print("Contributors for %s: %s" % (repo, contributors))
+ print(
+ "Contributors for %s:\n%s" % (repo, pformat(contributors, indent=4))
+ )
|
4a7ef27e895ec0f22890062931a2ed68f17a1398
|
BadTranslator.py
|
BadTranslator.py
|
from translate import Translator
translator= Translator(to_lang="ru")
translation = translator.translate("Hello, world!")
print translation
|
from translate import Translator
import random
langs = ["af", "ach", "ak", "am", "ar", "az", "be", "bem", "bg", "bh", "bn", "br", "bs", "ca", "chr", "ckb", "co", "crs", "cs", "cy", "da", "de", "ee", "el", "en", "eo", "es", "es-419", "et", "eu", "fa", "fi", "fo", "fr", "fy", "ga", "gaa", "gd", "gl", "gn", "gu", "ha", "haw", "hi", "hr", "ht", "hu", "hy", "ia", "id", "ig", "is", "it", "iw", "ja", "jw", "ka", "kg", "kk", "km", "kn", "ko", "kri", "ku", "ky", "la", "lg", "ln", "lo", "loz", "lt", "lua", "lv", "mfe", "mg", "mi", "mk", "ml", "mn", "mo", "mr", "ms", "mt", "ne", "nl", "nn", "no", "nso", "ny", "nyn", "oc", "om", "or", "pa", "pcm", "pl", "ps", "pt-BR", "pt-PT", "qu", "rm", "rn", "ro", "ru", "rw", "sd", "sh", "si", "sk", "sl", "sn", "so", "sq", "sr", "sr-ME", "st", "su", "sv", "sw", "ta", "te", "tg", "th", "ti", "tk", "tl", "tn", "to", "tr", "tt", "tum", "tw", "ug", "uk", "ur", "uz", "vi", "wo", "xh", "xx-bork", "xx-elmer", "xx-hacker", "xx-klingon", "xx-pirate", "yi", "yo", "zh-CN", "zh-TW","zu"]
lang = random.randint(0, len(langs))
translator = Translator(to_lang=langs[lang])
translator2 = Translator(to_lang="en")
translation = translator.translate("Hello, World!")
print translation
translation2 = translator2.translate(translation.encode('utf-8'))
print translation2
|
Add langs list, add random lang
|
Add langs list, add random lang
Add langs list that includes all supported google translate languages.
Add random language selector.
|
Python
|
mit
|
powderblock/PyBad-Translator
|
---
+++
@@ -1,4 +1,10 @@
from translate import Translator
-translator= Translator(to_lang="ru")
-translation = translator.translate("Hello, world!")
+import random
+langs = ["af", "ach", "ak", "am", "ar", "az", "be", "bem", "bg", "bh", "bn", "br", "bs", "ca", "chr", "ckb", "co", "crs", "cs", "cy", "da", "de", "ee", "el", "en", "eo", "es", "es-419", "et", "eu", "fa", "fi", "fo", "fr", "fy", "ga", "gaa", "gd", "gl", "gn", "gu", "ha", "haw", "hi", "hr", "ht", "hu", "hy", "ia", "id", "ig", "is", "it", "iw", "ja", "jw", "ka", "kg", "kk", "km", "kn", "ko", "kri", "ku", "ky", "la", "lg", "ln", "lo", "loz", "lt", "lua", "lv", "mfe", "mg", "mi", "mk", "ml", "mn", "mo", "mr", "ms", "mt", "ne", "nl", "nn", "no", "nso", "ny", "nyn", "oc", "om", "or", "pa", "pcm", "pl", "ps", "pt-BR", "pt-PT", "qu", "rm", "rn", "ro", "ru", "rw", "sd", "sh", "si", "sk", "sl", "sn", "so", "sq", "sr", "sr-ME", "st", "su", "sv", "sw", "ta", "te", "tg", "th", "ti", "tk", "tl", "tn", "to", "tr", "tt", "tum", "tw", "ug", "uk", "ur", "uz", "vi", "wo", "xh", "xx-bork", "xx-elmer", "xx-hacker", "xx-klingon", "xx-pirate", "yi", "yo", "zh-CN", "zh-TW","zu"]
+lang = random.randint(0, len(langs))
+translator = Translator(to_lang=langs[lang])
+translator2 = Translator(to_lang="en")
+translation = translator.translate("Hello, World!")
print translation
+translation2 = translator2.translate(translation.encode('utf-8'))
+print translation2
|
ed36889bbac47015722d50e0253f72a609203c5e
|
cellardoor/serializers/msgpack_serializer.py
|
cellardoor/serializers/msgpack_serializer.py
|
import msgpack
from datetime import datetime
from . import Serializer
def default_handler(obj):
try:
iterable = iter(obj)
except TypeError:
pass
else:
return list(iterable)
if isinstance(obj, datetime):
return obj.isoformat()
raise ValueError, "Can't pack object of type %s" % type(obj).__name__
class MsgPackSerializer(Serializer):
mimetype = 'application/x-msgpack'
def serialize(self, obj):
return msgpack.packb(obj, default=default_handler)
def unserialize(self, stream):
return msgpack.unpack(stream)
|
import msgpack
from datetime import datetime
import collections
from . import Serializer
def default_handler(obj):
if isinstance(obj, collections.Iterable):
return list(obj)
if isinstance(obj, datetime):
return obj.isoformat()
raise ValueError, "Can't pack object of type %s" % type(obj).__name__
class MsgPackSerializer(Serializer):
mimetype = 'application/x-msgpack'
def serialize(self, obj):
return msgpack.packb(obj, default=default_handler)
def unserialize(self, stream):
return msgpack.unpack(stream)
|
Use more reliable method of detecting iterables in msgpack serializer
|
Use more reliable method of detecting iterables in msgpack serializer
|
Python
|
mit
|
cooper-software/cellardoor
|
---
+++
@@ -1,15 +1,12 @@
import msgpack
from datetime import datetime
+import collections
from . import Serializer
def default_handler(obj):
- try:
- iterable = iter(obj)
- except TypeError:
- pass
- else:
- return list(iterable)
+ if isinstance(obj, collections.Iterable):
+ return list(obj)
if isinstance(obj, datetime):
return obj.isoformat()
|
215fba180eee818b123e31a15e4b9d6a6a895c79
|
scripts/overhead.py
|
scripts/overhead.py
|
#!/usr/bin/env python
import sys
if sys.argv.__len__() < 3:
print "Usage : Enter time needed for each portion of the code"
print " % overhead <advance> <exchange> <regrid>"
sys.exit();
a = float(sys.argv[1])
e = float(sys.argv[2])
r = float(sys.argv[3])
o = r + e
print " "
# print "%40s %6.1f%%" % ("Overhead as percentage of ADVANCE",100.0*o/a)
# print "%40s %6.1f%%" % ("Overhead as percentage of total",100.0*o/(a + o))
print "%40s %6.1f%%" % ("ADVANCE as percentage of total",100.0*a/o)
print "%40s %6.1f%%" % ("EXCHANGE as percentage of total",100.0*e/o)
print "%40s %6.1f%%" % ("REGRID as percentage of total",100.0*r/o)
print " "
print "Grid advances take %5.2f times as long as exchange and regridding" % (a/o)
print " "
|
#!/usr/bin/env python
import sys
if sys.argv.__len__() < 3:
print "Usage : Enter time needed for each portion of the code"
print " % overhead <advance> <exchange> <regrid>"
sys.exit();
a = float(sys.argv[1])
e = float(sys.argv[2])
r = float(sys.argv[3])
o = r + e + a
print " "
# print "%40s %6.1f%%" % ("Overhead as percentage of ADVANCE",100.0*o/a)
# print "%40s %6.1f%%" % ("Overhead as percentage of total",100.0*o/(a + o))
print "%40s %6.1f%%" % ("ADVANCE as percentage of total",100.0*a/o)
print "%40s %6.1f%%" % ("EXCHANGE as percentage of total",100.0*e/o)
print "%40s %6.1f%%" % ("REGRID as percentage of total",100.0*r/o)
print " "
print "Grid advances take %5.2f times as long as exchange and regridding" % (a/o)
print " "
|
Make everything a percentage of the total
|
Make everything a percentage of the total
|
Python
|
bsd-2-clause
|
ForestClaw/forestclaw,ForestClaw/forestclaw,ForestClaw/forestclaw,ForestClaw/forestclaw,ForestClaw/forestclaw,ForestClaw/forestclaw
|
---
+++
@@ -10,7 +10,7 @@
e = float(sys.argv[2])
r = float(sys.argv[3])
-o = r + e
+o = r + e + a
print " "
# print "%40s %6.1f%%" % ("Overhead as percentage of ADVANCE",100.0*o/a)
|
b4fdec74ac1af2b50ab5c79f6127d87033a9d297
|
wagtail/wagtailsearch/signal_handlers.py
|
wagtail/wagtailsearch/signal_handlers.py
|
from django.db.models.signals import post_save, post_delete
from django.db import models
from wagtail.wagtailsearch.index import Indexed
from wagtail.wagtailsearch.backends import get_search_backends
def post_save_signal_handler(instance, **kwargs):
for backend in get_search_backends():
backend.add(instance)
def post_delete_signal_handler(instance, **kwargs):
for backend in get_search_backends():
backend.delete(instance)
def register_signal_handlers():
# Get list of models that should be indexed
indexed_models = [model for model in models.get_models() if issubclass(model, Indexed)]
# Loop through list and register signal handlers for each one
for model in indexed_models:
post_save.connect(post_save_signal_handler, sender=model)
post_delete.connect(post_delete_signal_handler, sender=model)
|
from django.db.models.signals import post_save, post_delete
from django.db import models
from wagtail.wagtailsearch.index import Indexed
from wagtail.wagtailsearch.backends import get_search_backends
def post_save_signal_handler(instance, **kwargs):
if instance not in type(instance).get_indexed_objects():
return
for backend in get_search_backends():
backend.add(instance)
def post_delete_signal_handler(instance, **kwargs):
if instance not in type(instance).get_indexed_objects():
return
for backend in get_search_backends():
backend.delete(instance)
def register_signal_handlers():
# Get list of models that should be indexed
indexed_models = [model for model in models.get_models() if issubclass(model, Indexed)]
# Loop through list and register signal handlers for each one
for model in indexed_models:
post_save.connect(post_save_signal_handler, sender=model)
post_delete.connect(post_delete_signal_handler, sender=model)
|
Make search signal handlers use get_indexed_objects
|
Make search signal handlers use get_indexed_objects
|
Python
|
bsd-3-clause
|
rv816/wagtail,serzans/wagtail,serzans/wagtail,FlipperPA/wagtail,iansprice/wagtail,nrsimha/wagtail,Toshakins/wagtail,kurtrwall/wagtail,jorge-marques/wagtail,stevenewey/wagtail,darith27/wagtail,kaedroho/wagtail,iho/wagtail,WQuanfeng/wagtail,nealtodd/wagtail,quru/wagtail,mikedingjan/wagtail,timorieber/wagtail,timorieber/wagtail,takeshineshiro/wagtail,gasman/wagtail,nimasmi/wagtail,FlipperPA/wagtail,quru/wagtail,inonit/wagtail,taedori81/wagtail,taedori81/wagtail,davecranwell/wagtail,hamsterbacke23/wagtail,kurtrwall/wagtail,gogobook/wagtail,jorge-marques/wagtail,Klaudit/wagtail,FlipperPA/wagtail,kurtw/wagtail,thenewguy/wagtail,stevenewey/wagtail,JoshBarr/wagtail,nilnvoid/wagtail,hamsterbacke23/wagtail,davecranwell/wagtail,darith27/wagtail,hanpama/wagtail,taedori81/wagtail,mjec/wagtail,thenewguy/wagtail,tangentlabs/wagtail,jordij/wagtail,tangentlabs/wagtail,mayapurmedia/wagtail,gogobook/wagtail,mephizzle/wagtail,rsalmaso/wagtail,iho/wagtail,mephizzle/wagtail,tangentlabs/wagtail,thenewguy/wagtail,iansprice/wagtail,mikedingjan/wagtail,serzans/wagtail,quru/wagtail,chimeno/wagtail,KimGlazebrook/wagtail-experiment,chimeno/wagtail,benjaoming/wagtail,kaedroho/wagtail,hamsterbacke23/wagtail,hanpama/wagtail,Pennebaker/wagtail,jordij/wagtail,rjsproxy/wagtail,janusnic/wagtail,darith27/wagtail,Tivix/wagtail,chrxr/wagtail,dresiu/wagtail,takeflight/wagtail,kurtw/wagtail,mixxorz/wagtail,nealtodd/wagtail,rv816/wagtail,rjsproxy/wagtail,dresiu/wagtail,janusnic/wagtail,iansprice/wagtail,m-sanders/wagtail,takeshineshiro/wagtail,torchbox/wagtail,thenewguy/wagtail,jnns/wagtail,bjesus/wagtail,m-sanders/wagtail,wagtail/wagtail,inonit/wagtail,nrsimha/wagtail,mayapurmedia/wagtail,chimeno/wagtail,benjaoming/wagtail,stevenewey/wagtail,gasman/wagtail,KimGlazebrook/wagtail-experiment,dresiu/wagtail,WQuanfeng/wagtail,kurtrwall/wagtail,gasman/wagtail,wagtail/wagtail,nilnvoid/wagtail,davecranwell/wagtail,jorge-marques/wagtail,chrxr/wagtail,Klaudit/wagtail,mephizzle/wagtail,JoshBarr/wagtail,Klaudit/wagtail,mikedingjan/wagtail,wagtail/wagtail,stevenewey/wagtail,wagtail/wagtail,mjec/wagtail,nimasmi/wagtail,mixxorz/wagtail,iho/wagtail,nealtodd/wagtail,torchbox/wagtail,Pennebaker/wagtail,mikedingjan/wagtail,zerolab/wagtail,mjec/wagtail,rjsproxy/wagtail,kaedroho/wagtail,nutztherookie/wagtail,Toshakins/wagtail,nimasmi/wagtail,iansprice/wagtail,torchbox/wagtail,serzans/wagtail,Tivix/wagtail,kaedroho/wagtail,benjaoming/wagtail,nutztherookie/wagtail,mayapurmedia/wagtail,dresiu/wagtail,JoshBarr/wagtail,jorge-marques/wagtail,zerolab/wagtail,davecranwell/wagtail,takeflight/wagtail,gasman/wagtail,iho/wagtail,Tivix/wagtail,nimasmi/wagtail,mayapurmedia/wagtail,KimGlazebrook/wagtail-experiment,darith27/wagtail,nilnvoid/wagtail,chimeno/wagtail,takeshineshiro/wagtail,chimeno/wagtail,nrsimha/wagtail,kurtw/wagtail,gasman/wagtail,rv816/wagtail,quru/wagtail,rjsproxy/wagtail,jordij/wagtail,marctc/wagtail,taedori81/wagtail,rsalmaso/wagtail,KimGlazebrook/wagtail-experiment,mixxorz/wagtail,gogobook/wagtail,timorieber/wagtail,jorge-marques/wagtail,torchbox/wagtail,Toshakins/wagtail,nutztherookie/wagtail,tangentlabs/wagtail,kurtw/wagtail,jnns/wagtail,janusnic/wagtail,jordij/wagtail,FlipperPA/wagtail,marctc/wagtail,chrxr/wagtail,benjaoming/wagtail,mjec/wagtail,nrsimha/wagtail,rsalmaso/wagtail,takeflight/wagtail,rsalmaso/wagtail,kurtrwall/wagtail,rsalmaso/wagtail,rv816/wagtail,timorieber/wagtail,takeflight/wagtail,mephizzle/wagtail,bjesus/wagtail,kaedroho/wagtail,mixxorz/wagtail,dresiu/wagtail,nilnvoid/wagtail,zerolab/wagtail,thenewguy/wagtail,m-sanders/wagtail,Pennebaker/wagtail,zerolab/wagtail,marctc/wagtail,wagtail/wagtail,hamsterbacke23/wagtail,gogobook/wagtail,JoshBarr/wagtail,WQuanfeng/wagtail,marctc/wagtail,nealtodd/wagtail,mixxorz/wagtail,bjesus/wagtail,taedori81/wagtail,hanpama/wagtail,inonit/wagtail,Pennebaker/wagtail,Toshakins/wagtail,janusnic/wagtail,WQuanfeng/wagtail,zerolab/wagtail,nutztherookie/wagtail,chrxr/wagtail,m-sanders/wagtail,takeshineshiro/wagtail,bjesus/wagtail,Tivix/wagtail,hanpama/wagtail,inonit/wagtail,jnns/wagtail,jnns/wagtail,Klaudit/wagtail
|
---
+++
@@ -6,11 +6,18 @@
def post_save_signal_handler(instance, **kwargs):
+ if instance not in type(instance).get_indexed_objects():
+ return
+
+
for backend in get_search_backends():
backend.add(instance)
def post_delete_signal_handler(instance, **kwargs):
+ if instance not in type(instance).get_indexed_objects():
+ return
+
for backend in get_search_backends():
backend.delete(instance)
|
87707340ac82f852937dae546380b5d5327f5bc7
|
txlege84/core/views.py
|
txlege84/core/views.py
|
from django.views.generic import ListView
from bills.mixins import AllSubjectsMixin
from core.mixins import ConveneTimeMixin
from legislators.mixins import AllLegislatorsMixin, ChambersMixin
from explainers.models import Explainer
from topics.models import Topic
class LandingView(AllSubjectsMixin, AllLegislatorsMixin,
ChambersMixin, ConveneTimeMixin, ListView):
model = Topic
template_name = 'landing.html'
def get_context_data(self, **kwargs):
context = super(LandingView, self).get_context_data(**kwargs)
context['explainer_list'] = Explainer.objects.all().published()
return context
|
from django.views.generic import ListView
from core.mixins import ConveneTimeMixin
from explainers.models import Explainer
from topics.models import Topic
class LandingView(ConveneTimeMixin, ListView):
model = Topic
template_name = 'landing.html'
def get_context_data(self, **kwargs):
context = super(LandingView, self).get_context_data(**kwargs)
context['explainer_list'] = Explainer.objects.all().published()
return context
|
Remove unneeded mixins from LandingView
|
Remove unneeded mixins from LandingView
|
Python
|
mit
|
texastribune/txlege84,texastribune/txlege84,texastribune/txlege84,texastribune/txlege84
|
---
+++
@@ -1,15 +1,12 @@
from django.views.generic import ListView
-from bills.mixins import AllSubjectsMixin
from core.mixins import ConveneTimeMixin
-from legislators.mixins import AllLegislatorsMixin, ChambersMixin
from explainers.models import Explainer
from topics.models import Topic
-class LandingView(AllSubjectsMixin, AllLegislatorsMixin,
- ChambersMixin, ConveneTimeMixin, ListView):
+class LandingView(ConveneTimeMixin, ListView):
model = Topic
template_name = 'landing.html'
|
47d507bdc4dc0ecd54e9956a40741f3b75664ab2
|
events/models.py
|
events/models.py
|
from django.db import models
# Create your models here.
class Calendar(models.Model):
name = models.CharField(max_length=30, unique=True)
remote_id = models.CharField(max_length=60)
css_class = models.CharField(max_length=10)
def __str__(self):
return self.name
|
from django.db import models
# Create your models here.
class Calendar(models.Model):
name = models.CharField(max_length=30, unique=True)
remote_id = models.CharField(max_length=60)
css_class = models.CharField(max_length=10)
def __str__(self):
return self.name
class Meta:
ordering = ['name',]
|
Set default ordering of Calendars
|
Set default ordering of Calendars
|
Python
|
mit
|
Kromey/fbxnano,Kromey/akwriters,Kromey/akwriters,Kromey/fbxnano,Kromey/akwriters,Kromey/fbxnano,Kromey/akwriters,Kromey/fbxnano
|
---
+++
@@ -9,3 +9,6 @@
def __str__(self):
return self.name
+ class Meta:
+ ordering = ['name',]
+
|
311c6caae6275cebd820aab9607adefc6b125c92
|
utils/celery_worker.py
|
utils/celery_worker.py
|
import os
import sys
# Append .. to sys path
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import multiscanner
from celery import Celery
app = Celery('celery_worker', broker='pyamqp://guest@localhost//')
@app.task
def multiscanner_celery(filelist, config=multiscanner.CONFIG):
'''
TODO: Add other ars + config options...
This function essentially takes in a file list and runs
multiscanner on them. Results are stored in the
storage configured in storage.ini.
Usage:
from celery_worker import multiscanner_celery
multiscanner_celery.delay([list, of, files, to, scan])
'''
storage_conf = multiscanner.common.get_storage_config_path(config)
storage_handler = multiscanner.storage.StorageHandler(configfile=storage_conf)
resultlist = multiscanner.multiscan(filelist, configfile=config)
results = multiscanner.parse_reports(resultlist, python=True)
storage_handler.store(results, wait=False)
storage_handler.close()
return results
|
import os
import sys
# Append .. to sys path
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import multiscanner
from celery import Celery
RABBIT_USER = 'guest'
RABBIT_HOST = 'localhost'
app = Celery('celery_worker', broker='pyamqp://%s@%s//' % (RABBIT_USER, RABBIT_HOST))
@app.task
def multiscanner_celery(filelist, config=multiscanner.CONFIG):
'''
TODO: Figure out how to do batching.
TODO: Add other ars + config options...
This function essentially takes in a file list and runs
multiscanner on them. Results are stored in the
storage configured in storage.ini.
Usage:
from celery_worker import multiscanner_celery
multiscanner_celery.delay([list, of, files, to, scan])
'''
storage_conf = multiscanner.common.get_storage_config_path(config)
storage_handler = multiscanner.storage.StorageHandler(configfile=storage_conf)
resultlist = multiscanner.multiscan(filelist, configfile=config)
results = multiscanner.parse_reports(resultlist, python=True)
storage_handler.store(results, wait=False)
storage_handler.close()
return results
|
Move rabbit vars to globals
|
Move rabbit vars to globals
|
Python
|
mpl-2.0
|
mitre/multiscanner,jmlong1027/multiscanner,awest1339/multiscanner,jmlong1027/multiscanner,awest1339/multiscanner,jmlong1027/multiscanner,awest1339/multiscanner,mitre/multiscanner,awest1339/multiscanner,mitre/multiscanner,MITRECND/multiscanner,jmlong1027/multiscanner,MITRECND/multiscanner
|
---
+++
@@ -6,11 +6,15 @@
from celery import Celery
-app = Celery('celery_worker', broker='pyamqp://guest@localhost//')
+RABBIT_USER = 'guest'
+RABBIT_HOST = 'localhost'
+
+app = Celery('celery_worker', broker='pyamqp://%s@%s//' % (RABBIT_USER, RABBIT_HOST))
@app.task
def multiscanner_celery(filelist, config=multiscanner.CONFIG):
'''
+ TODO: Figure out how to do batching.
TODO: Add other ars + config options...
This function essentially takes in a file list and runs
multiscanner on them. Results are stored in the
|
4c69a59f99fd5f425c31e3fdcbf6e3f78d82d9e4
|
vex_via_wrapper.py
|
vex_via_wrapper.py
|
import requests
MATCH_LIST_URL = "http://data.vexvia.dwabtech.net/mobile/events/csv"
DIVISION_URL = "http://data.vexvia.dwabtech.net/mobile/{}/divisions/csv"
MATCH_URL = "http://data.vexvia.dwabtech.net/mobile/{}/{}/matches/csv"
def get_events(iq=False):
data = requests.get(MATCH_LIST_URL).text.split("\r\n")[1:-1]
if iq:
return [match.split(",") for match in data if u"re-viqc" in match]
else:
return [match.split(",") for match in data if u"re-vrc" in match]
def get_divisions(event_id):
data = requests.get(DIVISION_URL.format(event_id)).text.split("\r\n")[1:-1]
return [division.split(",") for division in data]
def get_matches(event_id, division):
data = requests.get(MATCH_URL.format(
event_id, division)).text.split("\r\n")[1:-1]
return [division.split(",") for division in data]
|
import requests
MATCH_LIST_URL = "http://data.vexvia.dwabtech.net/mobile/events/csv"
DIVISION_URL = "http://data.vexvia.dwabtech.net/mobile/{}/divisions/csv"
MATCH_URL = "http://data.vexvia.dwabtech.net/mobile/{}/{}/matches/csv"
def get_events(is_iq: bool=False) -> list:
"""Get a list of iq events or edr events.
Args:
is_iq: True for vex iq tournaments, False for vex edr(default).
Returns:
A 2D array of the events.
"""
data = requests.get(MATCH_LIST_URL).text.split("\r\n")[1:-1]
if is_iq:
return [match.split(",") for match in data if u"re-viqc" in match]
else:
return [match.split(",") for match in data if u"re-vrc" in match]
def get_divisions(event_id: str) -> list:
"""Get a list of the divisions of an event.
Args:
event_id: The id of the event.
Returns:
A 2D array of the divisions.
"""
data = requests.get(DIVISION_URL.format(event_id)).text.split("\r\n")[1:-1]
return [division.split(",") for division in data]
def get_matches(event_id: str, division: str) -> list:
"""Get a list of the matches in a divisions of an event.
Args:
event_id: The id of the event.
division: The division id.
Returns:
A 2D array of the matches.
"""
data = requests.get(MATCH_URL.format(
event_id, division)).text.split("\r\n")[1:-1]
return [division.split(",") for division in data]
|
Add comments to vex via wrapper
|
Add comments to vex via wrapper
|
Python
|
mit
|
DLProgram/Project_Snake_Sort,DLProgram/Project_Snake_Sort
|
---
+++
@@ -4,20 +4,48 @@
MATCH_URL = "http://data.vexvia.dwabtech.net/mobile/{}/{}/matches/csv"
-def get_events(iq=False):
+def get_events(is_iq: bool=False) -> list:
+ """Get a list of iq events or edr events.
+
+ Args:
+ is_iq: True for vex iq tournaments, False for vex edr(default).
+
+ Returns:
+ A 2D array of the events.
+
+ """
data = requests.get(MATCH_LIST_URL).text.split("\r\n")[1:-1]
- if iq:
+ if is_iq:
return [match.split(",") for match in data if u"re-viqc" in match]
else:
return [match.split(",") for match in data if u"re-vrc" in match]
-def get_divisions(event_id):
+def get_divisions(event_id: str) -> list:
+ """Get a list of the divisions of an event.
+
+ Args:
+ event_id: The id of the event.
+
+ Returns:
+ A 2D array of the divisions.
+
+ """
data = requests.get(DIVISION_URL.format(event_id)).text.split("\r\n")[1:-1]
return [division.split(",") for division in data]
-def get_matches(event_id, division):
+def get_matches(event_id: str, division: str) -> list:
+ """Get a list of the matches in a divisions of an event.
+
+ Args:
+ event_id: The id of the event.
+ division: The division id.
+
+ Returns:
+ A 2D array of the matches.
+
+ """
data = requests.get(MATCH_URL.format(
event_id, division)).text.split("\r\n")[1:-1]
return [division.split(",") for division in data]
|
f9d17e97115d914c9ed231630d01a6d724378f15
|
zou/app/blueprints/source/csv/persons.py
|
zou/app/blueprints/source/csv/persons.py
|
from zou.app.blueprints.source.csv.base import BaseCsvImportResource
from zou.app.models.person import Person
from zou.app.utils import auth, permissions
from sqlalchemy.exc import IntegrityError
class PersonsCsvImportResource(BaseCsvImportResource):
def check_permissions(self):
return permissions.check_admin_permissions()
def import_row(self, row):
first_name = row["First Name"]
last_name = row["Last Name"]
email = row["Email"]
phone = row["Phone"]
try:
password = auth.encrypt_password("default")
person = Person.get_by(email=email)
if person is None:
person = Person.create(
email=email,
password=password,
first_name=first_name,
last_name=last_name,
phone=phone
)
else:
person.update({
"first_name": first_name,
"last_name": last_name,
"phone": phone
})
except IntegrityError:
person = Person.get_by(email=email)
return person.serialize_safe()
|
from zou.app.blueprints.source.csv.base import BaseCsvImportResource
from zou.app.models.person import Person
from zou.app.utils import auth, permissions
from sqlalchemy.exc import IntegrityError
class PersonsCsvImportResource(BaseCsvImportResource):
def check_permissions(self):
return permissions.check_admin_permissions()
def import_row(self, row):
first_name = row["First Name"]
last_name = row["Last Name"]
email = row["Email"]
phone = row["Phone"]
role = row.get("Role", None)
if role == "Studio Manager":
role = "admin"
elif role == "Supervisor":
role = "manager"
elif role == "Client":
role = "client"
if role is not None and \
len(role) > 0 and \
role not in ["admin", "manager"]:
role = "user"
try:
password = auth.encrypt_password("default")
person = Person.get_by(email=email)
if person is None:
person = Person.create(
email=email,
password=password,
first_name=first_name,
last_name=last_name,
phone=phone,
role=role
)
else:
data = {
"first_name": first_name,
"last_name": last_name,
"phone": phone
}
if role is not None and len(role) > 0:
data["role"] = role
person.update(data)
except IntegrityError:
person = Person.get_by(email=email)
return person.serialize_safe()
|
Allow to import roles when importing people
|
Allow to import roles when importing people
|
Python
|
agpl-3.0
|
cgwire/zou
|
---
+++
@@ -16,6 +16,19 @@
last_name = row["Last Name"]
email = row["Email"]
phone = row["Phone"]
+ role = row.get("Role", None)
+
+ if role == "Studio Manager":
+ role = "admin"
+ elif role == "Supervisor":
+ role = "manager"
+ elif role == "Client":
+ role = "client"
+
+ if role is not None and \
+ len(role) > 0 and \
+ role not in ["admin", "manager"]:
+ role = "user"
try:
password = auth.encrypt_password("default")
@@ -27,14 +40,18 @@
password=password,
first_name=first_name,
last_name=last_name,
- phone=phone
+ phone=phone,
+ role=role
)
else:
- person.update({
+ data = {
"first_name": first_name,
"last_name": last_name,
"phone": phone
- })
+ }
+ if role is not None and len(role) > 0:
+ data["role"] = role
+ person.update(data)
except IntegrityError:
person = Person.get_by(email=email)
|
f47c5e3d5ef32f1d02b78c1de9737c26754404b2
|
src/main/webapp/AMI-Scripts/ubuntu-init.py
|
src/main/webapp/AMI-Scripts/ubuntu-init.py
|
#!/usr/bin/python
import os
import httplib
import string
# To install run:
# sudo wget http://$JENKINS_URL/plugin/ec2/AMI-Scripts/ubuntu-init.py -O /usr/bin/userdata
# sudo chmod +x /etc/init.d/userdata
# add the following line to /etc/rc.local "python /usr/bin/userdata"
# If java is installed it will be zero
# If java is not installed it will be non-zero
hasJava = os.system("java -version")
if hasJava != 0:
os.system("sudo apt-get update")
os.system("sudo apt-get install openjdk-7-jre -y")
conn = httplib.HTTPConnection("169.254.169.254")
conn.request("GET", "/latest/user-data")
response = conn.getresponse()
userdata = response.read()
args = string.split(userdata, "&")
jenkinsUrl = ""
slaveName = ""
for arg in args:
if arg.split("=")[0] == "JENKINS_URL":
jenkinsUrl = arg.split("=")[1]
if arg.split("=")[0] == "SLAVE_NAME":
slaveName = arg.split("=")[1]
os.system("wget " + jenkinsUrl + "jnlpJars/slave.jar -O slave.jar")
os.system("java -jar slave.jar -jnlpUrl " + jenkinsUrl + "computer/" + slaveName + "/slave-agent.jnlp")
|
#!/usr/bin/python
import os
import httplib
import string
# To install run:
# sudo wget http://$JENKINS_URL/plugin/ec2/AMI-Scripts/ubuntu-init.py -O /usr/bin/userdata
# sudo chmod +x /etc/init.d/userdata
# add the following line to /etc/rc.local "python /usr/bin/userdata"
# If java is installed it will be zero
# If java is not installed it will be non-zero
hasJava = os.system("java -version")
if hasJava != 0:
os.system("sudo apt-get update")
os.system("sudo apt-get install openjdk-7-jre -y")
conn = httplib.HTTPConnection("169.254.169.254")
conn.request("GET", "/latest/user-data")
response = conn.getresponse()
userdata = response.read()
args = string.split(userdata, "&")
jenkinsUrl = ""
slaveName = ""
for arg in args:
if arg.split("=")[0] == "JENKINS_URL":
jenkinsUrl = arg.split("=")[1]
if arg.split("=")[0] == "SLAVE_NAME":
slaveName = arg.split("=")[1]
os.system("wget " + jenkinsUrl + "jnlpJars/slave.jar -O slave.jar")
os.system("java -jar slave.jar -jnlpUrl " + jenkinsUrl + "computer/" + slaveName + "/slave-agent.jnlp")
|
Fix trailing spaces/tabs in Python code
|
Fix trailing spaces/tabs in Python code
|
Python
|
mit
|
mkozell/ec2-plugin,jenkinsci/ec2-plugin,jenkinsci/ec2-plugin,jenkinsci/ec2-plugin,jenkinsci/ec2-plugin,mkozell/ec2-plugin,mkozell/ec2-plugin,mkozell/ec2-plugin
|
---
+++
@@ -30,7 +30,6 @@
jenkinsUrl = arg.split("=")[1]
if arg.split("=")[0] == "SLAVE_NAME":
slaveName = arg.split("=")[1]
-
+
os.system("wget " + jenkinsUrl + "jnlpJars/slave.jar -O slave.jar")
os.system("java -jar slave.jar -jnlpUrl " + jenkinsUrl + "computer/" + slaveName + "/slave-agent.jnlp")
-
|
6a71271ed00ba164cf2755f728f0dbf2ed310f6b
|
zsi/setup.py
|
zsi/setup.py
|
#! /usr/bin/env python
# $Header$
import sys
from distutils.core import setup
_url = "http://www.zolera.com/resources/opensrc/zsi"
import ConfigParser
cf = ConfigParser.ConfigParser()
cf.read('setup.cfg')
_version = "%d.%d" % \
( cf.getint('version', 'major'), cf.getint('version', 'minor') )
try:
open('ZSI/version.py', 'r').close()
except:
print 'ZSI/version.py not found; run "make"'
sys.exit(1)
setup(
name="ZSI",
version=_version,
licence="Python",
packages=[ "ZSI", ],
description="Zolera SOAP Infrastructure",
author="Rich Salz",
author_email="rsalz@zolera.com",
maintainer="Rich Salz",
maintainer_email="rsalz@zolera.com",
url=_url,
long_description='For additional information, please see ' + _url
)
|
#! /usr/bin/env python
# $Header$
import sys
from distutils.core import setup
_url = "http://pywebsvcs.sf.net/"
import ConfigParser
cf = ConfigParser.ConfigParser()
cf.read('setup.cfg')
_version = "%d.%d" % \
( cf.getint('version', 'major'), cf.getint('version', 'minor') )
try:
open('ZSI/version.py', 'r').close()
except:
print 'ZSI/version.py not found; run "make"'
sys.exit(1)
setup(
name="ZSI",
version=_version,
licence="Python",
packages=[ "ZSI", ],
description="Zolera SOAP Infrastructure",
author="Rich Salz",
author_email="rsalz@zolera.com",
maintainer="Rich Salz",
maintainer_email="rsalz@zolera.com",
url=_url,
long_description='For additional information, please see ' + _url
)
|
Change URL from Zolera (sob, snif, sigh :) to SF
|
Change URL from Zolera (sob, snif, sigh :) to SF
git-svn-id: c4afb4e777bcbfe9afa898413b708b5abcd43877@123 7150bf37-e60d-0410-b93f-83e91ef0e581
|
Python
|
mit
|
acigna/pywez,acigna/pywez,acigna/pywez
|
---
+++
@@ -3,7 +3,7 @@
import sys
from distutils.core import setup
-_url = "http://www.zolera.com/resources/opensrc/zsi"
+_url = "http://pywebsvcs.sf.net/"
import ConfigParser
cf = ConfigParser.ConfigParser()
|
960436b17211a225a729805a528653f2aff675d7
|
src/sentry/utils/social_auth.py
|
src/sentry/utils/social_auth.py
|
"""
sentry.utils.social_auth
~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2013 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
from django.conf import settings
from social_auth.backends.pipeline.user import create_user
from social_auth.exceptions import SocialAuthBaseException
class AuthNotAllowed(SocialAuthBaseException):
pass
def create_user_if_enabled(*args, **kwargs):
"""
A pipeline step for django-social-auth
Create user. Depends on get_username pipeline.
"""
if not settings.SOCIAL_AUTH_CREATE_USERS and not kwargs.get('user'):
raise AuthNotAllowed('You must create an account before associating an identity.')
return create_user(*args, **kwargs)
|
"""
sentry.utils.social_auth
~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2013 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
from django.conf import settings
from social_auth.backends.pipeline.user import create_user
from social_auth.exceptions import SocialAuthBaseException
class AuthNotAllowed(SocialAuthBaseException):
pass
def create_user_if_enabled(*args, **kwargs):
"""
A pipeline step for django-social-auth
Create user. Depends on get_username pipeline.
"""
if not settings.SOCIAL_AUTH_CREATE_USERS and not kwargs.get('user'):
raise AuthNotAllowed('You must create an account before associating an identity.')
backend = kwargs.pop('backend')
details = kwargs.pop('details')
response = kwargs.pop('response')
uid = kwargs.pop('uid')
username = kwargs.pop('username', None)
user = kwargs.pop('user', None)
return create_user(backend=backend, details=details, response=response, uid=uid, username=username, user=user, *args, **kwargs)
|
Call to create_user fails because django-social-auth module requires 5 parameters now for create_user now.
|
Call to create_user fails because django-social-auth module requires 5 parameters now for create_user now.
The exception is quietly suppressed in social_auth/backends/__init__.py:143 in the pipeline
stage for this module since TypeErrors in general are try/except in the authenticate()
stage.
It can be repro'd by putting a breakpoint on authenticate() in django.contrib.auth and
the authenticate() function for the Google backend.
Will there be a similar issue once this create_user() function gets even further ported to python-social-auth?
|
Python
|
bsd-3-clause
|
BuildingLink/sentry,gg7/sentry,kevinlondon/sentry,zenefits/sentry,ewdurbin/sentry,daevaorn/sentry,looker/sentry,zenefits/sentry,BuildingLink/sentry,1tush/sentry,wujuguang/sentry,ewdurbin/sentry,rdio/sentry,pauloschilling/sentry,songyi199111/sentry,argonemyth/sentry,wong2/sentry,jokey2k/sentry,zenefits/sentry,beeftornado/sentry,jean/sentry,alexm92/sentry,wong2/sentry,beeftornado/sentry,TedaLIEz/sentry,looker/sentry,BuildingLink/sentry,ifduyue/sentry,daevaorn/sentry,fotinakis/sentry,JamesMura/sentry,zenefits/sentry,daevaorn/sentry,drcapulet/sentry,fuziontech/sentry,llonchj/sentry,jokey2k/sentry,felixbuenemann/sentry,boneyao/sentry,wujuguang/sentry,vperron/sentry,boneyao/sentry,jean/sentry,vperron/sentry,ewdurbin/sentry,camilonova/sentry,fuziontech/sentry,drcapulet/sentry,camilonova/sentry,felixbuenemann/sentry,JackDanger/sentry,kevinastone/sentry,JackDanger/sentry,ifduyue/sentry,rdio/sentry,fotinakis/sentry,JamesMura/sentry,Natim/sentry,rdio/sentry,kevinastone/sentry,Natim/sentry,argonemyth/sentry,mvaled/sentry,rdio/sentry,alexm92/sentry,gencer/sentry,TedaLIEz/sentry,ifduyue/sentry,korealerts1/sentry,JackDanger/sentry,mitsuhiko/sentry,daevaorn/sentry,looker/sentry,alexm92/sentry,songyi199111/sentry,BayanGroup/sentry,imankulov/sentry,argonemyth/sentry,pauloschilling/sentry,mvaled/sentry,JTCunning/sentry,imankulov/sentry,jean/sentry,felixbuenemann/sentry,JTCunning/sentry,jean/sentry,kevinlondon/sentry,fotinakis/sentry,gencer/sentry,BayanGroup/sentry,Natim/sentry,boneyao/sentry,mitsuhiko/sentry,songyi199111/sentry,TedaLIEz/sentry,ngonzalvez/sentry,BayanGroup/sentry,ngonzalvez/sentry,mvaled/sentry,looker/sentry,hongliang5623/sentry,gencer/sentry,ifduyue/sentry,gg7/sentry,drcapulet/sentry,vperron/sentry,kevinlondon/sentry,nicholasserra/sentry,gg7/sentry,camilonova/sentry,fotinakis/sentry,nicholasserra/sentry,llonchj/sentry,fuziontech/sentry,looker/sentry,gencer/sentry,jean/sentry,1tush/sentry,JamesMura/sentry,JamesMura/sentry,BuildingLink/sentry,1tush/sentry,llonchj/sentry,mvaled/sentry,nicholasserra/sentry,JamesMura/sentry,mvaled/sentry,ngonzalvez/sentry,beeftornado/sentry,hongliang5623/sentry,zenefits/sentry,wong2/sentry,JTCunning/sentry,Kryz/sentry,jokey2k/sentry,wujuguang/sentry,gencer/sentry,korealerts1/sentry,Kryz/sentry,Kryz/sentry,mvaled/sentry,BuildingLink/sentry,korealerts1/sentry,hongliang5623/sentry,kevinastone/sentry,imankulov/sentry,pauloschilling/sentry,ifduyue/sentry
|
---
+++
@@ -25,4 +25,11 @@
if not settings.SOCIAL_AUTH_CREATE_USERS and not kwargs.get('user'):
raise AuthNotAllowed('You must create an account before associating an identity.')
- return create_user(*args, **kwargs)
+ backend = kwargs.pop('backend')
+ details = kwargs.pop('details')
+ response = kwargs.pop('response')
+ uid = kwargs.pop('uid')
+ username = kwargs.pop('username', None)
+ user = kwargs.pop('user', None)
+
+ return create_user(backend=backend, details=details, response=response, uid=uid, username=username, user=user, *args, **kwargs)
|
b6941b35f5bb20dbc2c7e05bbf6100bf0879be3f
|
foyer/tests/test_plugin.py
|
foyer/tests/test_plugin.py
|
import pytest
def test_basic_import():
import foyer
assert 'forcefields' in dir(foyer)
import foyer.forcefields.forcefields
|
import pytest
import foyer
def test_basic_import():
assert 'forcefields' in dir(foyer)
@pytest.mark.parametrize('ff_name', ['OPLSAA', 'TRAPPE_UA'])
def test_forcefields_exist(ff_name):
ff_name in dir(foyer.forcefields)
def test_load_forcefield():
OPLSAA = foyer.forcefields.get_forcefield(name='oplsaa')
TRAPPE_UA = foyer.forcefields.get_forcefield(name='trappe-ua')
with pytest.raises(ValueError):
foyer.forcefields.get_forcefield('bogus_name')
|
Update test to check more internals
|
Update test to check more internals
|
Python
|
mit
|
mosdef-hub/foyer,iModels/foyer,iModels/foyer,mosdef-hub/foyer
|
---
+++
@@ -1,7 +1,18 @@
import pytest
+import foyer
def test_basic_import():
- import foyer
assert 'forcefields' in dir(foyer)
- import foyer.forcefields.forcefields
+
+
+@pytest.mark.parametrize('ff_name', ['OPLSAA', 'TRAPPE_UA'])
+def test_forcefields_exist(ff_name):
+ ff_name in dir(foyer.forcefields)
+
+
+def test_load_forcefield():
+ OPLSAA = foyer.forcefields.get_forcefield(name='oplsaa')
+ TRAPPE_UA = foyer.forcefields.get_forcefield(name='trappe-ua')
+ with pytest.raises(ValueError):
+ foyer.forcefields.get_forcefield('bogus_name')
|
ec613fe1df31dd65d8a52351a29482b54ce007b3
|
skvideo/__init__.py
|
skvideo/__init__.py
|
from skvideo.stuff import *
from skvideo.version import __version__
# If you want to use Numpy's testing framerwork, use the following.
# Tests go under directory tests/, benchmarks under directory benchmarks/
from numpy.testing import Tester
test = Tester().test
bench = Tester().bench
|
from skvideo.version import __version__
# If you want to use Numpy's testing framerwork, use the following.
# Tests go under directory tests/, benchmarks under directory benchmarks/
from numpy.testing import Tester
test = Tester().test
bench = Tester().bench
|
Remove some unused parts of skeleton
|
Remove some unused parts of skeleton
|
Python
|
bsd-3-clause
|
aizvorski/scikit-video
|
---
+++
@@ -1,4 +1,3 @@
-from skvideo.stuff import *
from skvideo.version import __version__
# If you want to use Numpy's testing framerwork, use the following.
|
682010eafe28eed1eeb32ba9d34e213b4f2d7d4b
|
sourcer/__init__.py
|
sourcer/__init__.py
|
from .compiler import ParseResult
from .expressions import (
Alt,
And,
Any,
Backtrack,
Bind,
End,
Expect,
Fail,
ForwardRef,
Left,
List,
Literal,
Not,
Opt,
Or,
Require,
Return,
Right,
Some,
Start,
Struct,
Term,
Transform,
Where,
)
from .interpreter import (
ParseError,
parse,
parse_prefix,
tokenize,
tokenize_and_parse,
)
from .precedence import (
InfixLeft,
InfixRight,
LeftAssoc,
Operation,
OperatorPrecedence,
Postfix,
Prefix,
ReduceLeft,
ReduceRight,
RightAssoc,
)
from .tokens import (
AnyChar,
AnyString,
Content,
Pattern,
Regex,
Skip,
Token,
TokenSyntax,
Verbose,
)
|
from .compiler import ParseResult
from .expressions import (
Alt,
And,
Any,
AnyOf,
Backtrack,
Bind,
End,
Expect,
Fail,
ForwardRef,
Left,
List,
Literal,
Not,
Opt,
Or,
Require,
Return,
Right,
Some,
Start,
Struct,
Term,
Transform,
Where,
)
from .interpreter import (
ParseError,
parse,
parse_prefix,
tokenize,
tokenize_and_parse,
)
from .precedence import (
InfixLeft,
InfixRight,
LeftAssoc,
Operation,
OperatorPrecedence,
Postfix,
Prefix,
ReduceLeft,
ReduceRight,
RightAssoc,
)
from .tokens import (
AnyChar,
AnyString,
Content,
Pattern,
Regex,
Skip,
Token,
TokenSyntax,
Verbose,
)
|
Add "AnyOf" to public API.
|
Add "AnyOf" to public API.
|
Python
|
mit
|
jvs/sourcer
|
---
+++
@@ -4,6 +4,7 @@
Alt,
And,
Any,
+ AnyOf,
Backtrack,
Bind,
End,
|
9e07a21df955b599d27eb8b98b53395fa7170257
|
spoj/00005/palin.py
|
spoj/00005/palin.py
|
#!/usr/bin/env python3
def next_palindrome(k):
palin = list(k)
n = len(k)
mid = n // 2
# case 1: forward right
just_copy = False
for i in range(mid, n):
mirrored = n - 1 - i
if k[i] < k[mirrored]:
just_copy = True
if just_copy:
palin[i] = palin[mirrored]
# case 2: backward left
if not just_copy:
i = (n - 1) // 2
while i >= 0 and k[i] == '9':
i -= 1
if i >= 0:
palin[i] = str(int(k[i]) + 1)
for j in range(i + 1, mid + 1):
palin[j] = '0'
for j in range(mid + 1, n):
mirrored = n - 1 - j
palin[j] = palin[mirrored]
else:
# case 3: "99...9" -> "100..01"
palin = ['0'] * (n + 1)
palin[0] = palin[-1] = '1'
return ''.join(palin)
if __name__ == '__main__':
t = int(input())
for _ in range(t):
k = input()
print(next_palindrome(k))
|
#!/usr/bin/env python3
def next_palindrome(k):
palin = list(k)
n = len(k)
mid = n // 2
# case 1: forward right
just_copy = False
for i in range(mid, n):
mirrored = n - 1 - i
if k[i] < k[mirrored]:
just_copy = True
if just_copy:
palin[i] = palin[mirrored]
# case 2: backward left
if not just_copy:
i = (n - 1) // 2
while i >= 0 and k[i] == '9':
i -= 1
if i >= 0:
palin[i] = str(int(k[i]) + 1)
for j in range(i + 1, mid):
palin[j] = '0'
for j in range(mid, n):
mirrored = n - 1 - j
palin[j] = palin[mirrored]
else:
# case 3: "99...9" -> "100..01"
palin = ['0'] * (n + 1)
palin[0] = palin[-1] = '1'
return ''.join(palin)
if __name__ == '__main__':
t = int(input())
for _ in range(t):
k = input()
print(next_palindrome(k))
|
Fix off-by-1 bug for `mid`
|
Fix off-by-1 bug for `mid`
- in SPOJ palin
Signed-off-by: Karel Ha <70f8965fdfb04f1fc0e708a55d9e822c449f57d3@gmail.com>
|
Python
|
mit
|
mathemage/CompetitiveProgramming,mathemage/CompetitiveProgramming,mathemage/CompetitiveProgramming,mathemage/CompetitiveProgramming,mathemage/CompetitiveProgramming,mathemage/CompetitiveProgramming
|
---
+++
@@ -20,9 +20,9 @@
i -= 1
if i >= 0:
palin[i] = str(int(k[i]) + 1)
- for j in range(i + 1, mid + 1):
+ for j in range(i + 1, mid):
palin[j] = '0'
- for j in range(mid + 1, n):
+ for j in range(mid, n):
mirrored = n - 1 - j
palin[j] = palin[mirrored]
else:
|
74d668cb8291822a167d1ddd0fecf7e580375377
|
serv/rcompserv/serv.py
|
serv/rcompserv/serv.py
|
from aiohttp import web
from . import __version__
class Server:
def __init__(self, host='127.0.0.1', port=8080):
self._host = host
self._port = port
self.app = web.Application()
self.app.router.add_get('/', self.index)
self.known_commands = ['version']
self.app.router.add_get('/version', self.version)
async def index(self, request):
return web.json_response({'commands': self.known_commands})
async def version(self, request):
return web.json_response({'version': __version__})
def run(self):
web.run_app(self.app, host=self._host, port=self._port)
|
import uuid
from datetime import datetime
from aiohttp import web
import redis
from . import __version__
class Server:
def __init__(self, host='127.0.0.1', port=8080):
self._host = host
self._port = port
self.app = web.Application()
self.app.on_startup.append(self.start_redis)
self.app.router.add_get('/', self.index)
self.known_commands = ['version', 'trivial']
self.app.router.add_get('/version', self.version)
self.app.router.add_get('/trivial', self.trivial)
async def start_redis(self, app):
app['redis'] = redis.StrictRedis()
async def index(self, request):
return web.json_response({'commands': self.known_commands})
async def version(self, request):
return web.json_response({'version': __version__})
async def trivial(self, request):
job_id = str(uuid.uuid4())
start_time = str(datetime.utcnow())
request.app['redis'].hset(job_id, 'cmd', 'trivial')
request.app['redis'].hset(job_id, 'stime', start_time)
request.app['redis'].hset(job_id, 'done', 1)
request.app['redis'].hset(job_id, 'output', '')
return web.json_response({
'cmd': str(request.app['redis'].hget(job_id, 'cmd'), encoding='utf-8'),
'id': job_id,
'stime': str(request.app['redis'].hget(job_id, 'stime'), encoding='utf-8'),
'done': False if request.app['redis'].hget(job_id, 'done') == 0 else True,
'output': str(request.app['redis'].hget(job_id, 'output'), encoding='utf-8')
})
def run(self):
web.run_app(self.app, host=self._host, port=self._port)
|
Add route for `trivial` (vacuous) command
|
Add route for `trivial` (vacuous) command
|
Python
|
bsd-3-clause
|
slivingston/rcomp,slivingston/rcomp,slivingston/rcomp
|
---
+++
@@ -1,4 +1,8 @@
+import uuid
+from datetime import datetime
+
from aiohttp import web
+import redis
from . import __version__
@@ -8,9 +12,14 @@
self._host = host
self._port = port
self.app = web.Application()
+ self.app.on_startup.append(self.start_redis)
self.app.router.add_get('/', self.index)
- self.known_commands = ['version']
+ self.known_commands = ['version', 'trivial']
self.app.router.add_get('/version', self.version)
+ self.app.router.add_get('/trivial', self.trivial)
+
+ async def start_redis(self, app):
+ app['redis'] = redis.StrictRedis()
async def index(self, request):
return web.json_response({'commands': self.known_commands})
@@ -18,5 +27,20 @@
async def version(self, request):
return web.json_response({'version': __version__})
+ async def trivial(self, request):
+ job_id = str(uuid.uuid4())
+ start_time = str(datetime.utcnow())
+ request.app['redis'].hset(job_id, 'cmd', 'trivial')
+ request.app['redis'].hset(job_id, 'stime', start_time)
+ request.app['redis'].hset(job_id, 'done', 1)
+ request.app['redis'].hset(job_id, 'output', '')
+ return web.json_response({
+ 'cmd': str(request.app['redis'].hget(job_id, 'cmd'), encoding='utf-8'),
+ 'id': job_id,
+ 'stime': str(request.app['redis'].hget(job_id, 'stime'), encoding='utf-8'),
+ 'done': False if request.app['redis'].hget(job_id, 'done') == 0 else True,
+ 'output': str(request.app['redis'].hget(job_id, 'output'), encoding='utf-8')
+ })
+
def run(self):
web.run_app(self.app, host=self._host, port=self._port)
|
c8100c89298091179c9ad7f84452328e28efaa03
|
crawler/CrawlerExample.py
|
crawler/CrawlerExample.py
|
__author__ = 'pascal'
from crawler.Crawler import Crawler
from utils.path import RessourceUtil
# _____ _
# | ____|_ ____ _ _ __ ___ _ __ | | ___
# | _| \ \/ / _` | '_ ` _ \| '_ \| |/ _ \
# | |___ > < (_| | | | | | | |_) | | __/
# |_____/_/\_\__,_|_| |_| |_| .__/|_|\___|
# |_|
def crawler_example():
# Create a seed
seed = [
RessourceUtil.get_ressource_path('d01.html'),
RessourceUtil.get_ressource_path('d06.html'),
RessourceUtil.get_ressource_path('d08.html')
]
# Instatiate the crawler.
crawler = Crawler()
# Start the crawler with the seed.
crawler.start_crawling(seed)
# Access the data.
crawler.data
# Print the data...
for page in crawler.data:
# # ... with print_page(page_object)
Crawler.print_page(page)
# Print the link structure
link_structure_txt = crawler.get_link_structure_text()
print(link_structure_txt)
|
__author__ = 'pascal'
from crawler.Crawler import Crawler
from indexer.indexer import Indexer
from utils.path import RessourceUtil
# _____ _
# | ____|_ ____ _ _ __ ___ _ __ | | ___
# | _| \ \/ / _` | '_ ` _ \| '_ \| |/ _ \
# | |___ > < (_| | | | | | | |_) | | __/
# |_____/_/\_\__,_|_| |_| |_| .__/|_|\___|
# |_|
def crawler_example():
# Create a seed
seed = [
RessourceUtil.get_ressource_path('d01.html'),
RessourceUtil.get_ressource_path('d06.html'),
RessourceUtil.get_ressource_path('d08.html')
]
# Instatiate the crawler.
crawler = Crawler()
# Start the crawler with the seed.
crawler.start_crawling(seed)
# Access the data.
crawler.data
# Print the data...
for page in crawler.data:
# # ... with print_page(page_object)
Crawler.print_page(page)
# Print the link structure
link_structure_txt = crawler.get_link_structure_text()
print(link_structure_txt)
# Create an Indexer
indexer = Indexer()
# Index the pages
indexer.index_pages(*crawler.data)
# Print your index
print(indexer.index)
|
Add an example for building and printing the index.
|
Add an example for building and printing the index.
You can find the example inside the CrawlerExample.py.
|
Python
|
cc0-1.0
|
pascalweiss/SearchEngine,yveskaufmann/SearchEngine,yveskaufmann/SearchEngine
|
---
+++
@@ -1,6 +1,7 @@
__author__ = 'pascal'
from crawler.Crawler import Crawler
+from indexer.indexer import Indexer
from utils.path import RessourceUtil
# _____ _
@@ -36,3 +37,17 @@
# Print the link structure
link_structure_txt = crawler.get_link_structure_text()
print(link_structure_txt)
+
+ # Create an Indexer
+ indexer = Indexer()
+
+ # Index the pages
+ indexer.index_pages(*crawler.data)
+
+ # Print your index
+ print(indexer.index)
+
+
+
+
+
|
708b519e066b8d443ed4768293db4517021d68fc
|
thinglang/__init__.py
|
thinglang/__init__.py
|
import os
from thinglang import utils
from thinglang.execution.execution import ExecutionEngine
from thinglang.lexer.lexer import lexer
from thinglang.parser.analyzer import Analyzer
from thinglang.parser.parser import parse
from thinglang.parser.simplifier import Simplifier
BASE_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'include')
def collect_includes():
files = [os.path.join(BASE_DIR, path) for path in os.listdir(BASE_DIR)]
return '\n' + '\n'.join(open(f).read() for f in files)
def run(source):
if not source:
raise ValueError('Source cannot be empty')
source = (source + collect_includes()).strip().replace(' ' * 4, '\t')
utils.print_header('Source', source)
lexical_groups = list(lexer(source))
ast = parse(lexical_groups)
Simplifier(ast).run()
utils.print_header('C++ Transpilation', ast.transpile_children())
utils.print_header('Parsed AST', ast.tree())
Analyzer(ast).run()
with ExecutionEngine(ast) as engine:
engine.execute()
return engine.results()
|
import os
from thinglang import utils
from thinglang.execution.execution import ExecutionEngine
from thinglang.lexer.lexer import lexer
from thinglang.parser.analyzer import Analyzer
from thinglang.parser.parser import parse
from thinglang.parser.simplifier import Simplifier
BASE_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'include')
def collect_includes():
files = [os.path.join(BASE_DIR, path) for path in os.listdir(BASE_DIR)]
return '\n' + '\n'.join(open(f).read() for f in files)
def compiler(source):
if not source:
raise ValueError('Source cannot be empty')
source = (source + collect_includes()).strip().replace(' ' * 4, '\t')
utils.print_header('Source', source)
lexical_groups = list(lexer(source))
ast = parse(lexical_groups)
Simplifier(ast).run()
utils.print_header('C++ Transpilation', ast.transpile_children())
utils.print_header('Parsed AST', ast.tree())
Analyzer(ast).run()
return ast
def run(source):
ast = compiler(source)
with ExecutionEngine(ast) as engine:
engine.execute()
return engine.results()
|
Split compiler and execution steps
|
Split compiler and execution steps
|
Python
|
mit
|
ytanay/thinglang,ytanay/thinglang,ytanay/thinglang,ytanay/thinglang
|
---
+++
@@ -15,7 +15,7 @@
return '\n' + '\n'.join(open(f).read() for f in files)
-def run(source):
+def compiler(source):
if not source:
raise ValueError('Source cannot be empty')
@@ -33,6 +33,12 @@
Analyzer(ast).run()
+ return ast
+
+
+def run(source):
+ ast = compiler(source)
+
with ExecutionEngine(ast) as engine:
engine.execute()
return engine.results()
|
f7a9f65e68b2fe78a0180acb1b2bef552e9633f3
|
media/collector.py
|
media/collector.py
|
import feedparser
import pickle
import requests
hub = "http://feeds.feedburner.com/ampparit-politiikka" ## collect from ampparit all politics related sites
feed = feedparser.parse( hub )
stored = []
try:
stored = pickle.load( open( '.history' , 'r' ) )
except:
pass
stored = []
out = open( 'urls.txt' , 'a')
for item in feed['items']:
link = item['links'][0]['href']
_id = link.split('?id=')[1]
if _id not in stored:
r = requests.head( link )
url = r.headers.get('location')
out.write( _id + ',' + url + '\n' )
stored.append( _id )
pickle.dump( stored, open( '.history' , 'w' ) )
|
import feedparser
import pickle
import requests
import sys
hub = "http://feeds.feedburner.com/ampparit-politiikka" ## collect from ampparit all politics related sites
feed = feedparser.parse( hub )
stored = []
path = sys.argv[0]
try:
stored = pickle.load( open( path + '/.history' , 'r' ) )
except:
pass
out = open( path + '/urls.txt' , 'a')
for item in feed['items']:
link = item['links'][0]['href']
_id = link.split('?id=')[1]
if _id not in stored:
r = requests.head( link )
url = r.headers.get('location')
out.write( _id + ',' + url + '\n' )
stored.append( _id )
pickle.dump( stored, open( path + '/.history' , 'w' ) )
|
Make path variable working for server side
|
Make path variable working for server side
|
Python
|
mit
|
HIIT/digivaalit-2015,HIIT/digivaalit-2015,HIIT/digivaalit-2015
|
---
+++
@@ -1,6 +1,7 @@
import feedparser
import pickle
import requests
+import sys
hub = "http://feeds.feedburner.com/ampparit-politiikka" ## collect from ampparit all politics related sites
@@ -8,14 +9,14 @@
stored = []
+path = sys.argv[0]
+
try:
- stored = pickle.load( open( '.history' , 'r' ) )
+ stored = pickle.load( open( path + '/.history' , 'r' ) )
except:
pass
-stored = []
-
-out = open( 'urls.txt' , 'a')
+out = open( path + '/urls.txt' , 'a')
for item in feed['items']:
@@ -32,4 +33,4 @@
stored.append( _id )
-pickle.dump( stored, open( '.history' , 'w' ) )
+pickle.dump( stored, open( path + '/.history' , 'w' ) )
|
19a8a0e2f85b7ab01cbd3e2dd283e8e1e9b97373
|
example/example/tasksapp/run_tasks.py
|
example/example/tasksapp/run_tasks.py
|
import time
from dj_experiment.tasks.tasks import longtime_add
if __name__ == '__main__':
result = longtime_add.delay(1, 2)
# at this time, our task is not finished, so it will return False
print 'Task finished? ', result.ready()
print 'Task result: ', result.result
# sleep 10 seconds to ensure the task has been finished
time.sleep(10)
# now the task should be finished and ready method will return True
print 'Task finished? ', result.ready()
print 'Task result: ', result.result
|
import time
from dj_experiment.tasks.tasks import longtime_add, netcdf_save
if __name__ == '__main__':
result = longtime_add.delay(1, 2)
# at this time, our task is not finished, so it will return False
print 'Task finished? ', result.ready()
print 'Task result: ', result.result
# sleep 10 seconds to ensure the task has been finished
time.sleep(10)
# now the task should be finished and ready method will return True
print 'Task finished? ', result.ready()
print 'Task result: ', result.result
result1 = netcdf_save.delay(14, '')
print 'Task netcdf finished? ', result1.ready()
print 'Task result1: ', result1.result
time.sleep(10)
print 'Task netcdf finished? ', result1.ready()
print 'Task result1: ', result1.result
|
Add the use of task to the example app
|
Add the use of task to the example app
|
Python
|
mit
|
francbartoli/dj-experiment,francbartoli/dj-experiment
|
---
+++
@@ -1,6 +1,6 @@
import time
-from dj_experiment.tasks.tasks import longtime_add
+from dj_experiment.tasks.tasks import longtime_add, netcdf_save
if __name__ == '__main__':
result = longtime_add.delay(1, 2)
@@ -12,3 +12,10 @@
# now the task should be finished and ready method will return True
print 'Task finished? ', result.ready()
print 'Task result: ', result.result
+
+ result1 = netcdf_save.delay(14, '')
+ print 'Task netcdf finished? ', result1.ready()
+ print 'Task result1: ', result1.result
+ time.sleep(10)
+ print 'Task netcdf finished? ', result1.ready()
+ print 'Task result1: ', result1.result
|
332f275b3ac4b93c523b474c94268bac834c180c
|
memorize/models.py
|
memorize/models.py
|
from datetime import datetime, timedelta
import datetime
from django.utils.timezone import utc
from django.db import models
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes import generic
from .algorithm import interval
class Practice(models.Model):
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
item = generic.GenericForeignKey('content_type', 'object_id')
started_last_viewing = models.DateTimeField(null=True,blank=True, auto_now_add=True)
ended_last_viewing = models.DateTimeField(null=True,blank=True, auto_now_add=True)
user = models.ForeignKey(User)
next_practice = models.DateTimeField(auto_now_add=True)
times_practiced = models.PositiveIntegerField(default=0)
easy_factor = models.FloatField(default=2.5)
class Meta:
ordering = ['next_practice']
def set_next_practice(self, rating):
self.times_practiced += 1
minutes, ef = interval(self.times_practiced, rating, self.easy_factor)
self.next_practice = datetime.datetime.utcnow().replace(tzinfo=utc) + timedelta(minutes=minutes)
self.easy_factor = ef
def delay(self):
self.next_practice = datetime.utcnow().replace(utc) + timedelta(minutes=10)
|
from datetime import datetime, timedelta
from django.utils.timezone import utc
from django.db import models
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes import generic
from .algorithm import interval
class Practice(models.Model):
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
item = generic.GenericForeignKey('content_type', 'object_id')
started_last_viewing = models.DateTimeField(null=True,blank=True, auto_now_add=True)
ended_last_viewing = models.DateTimeField(null=True,blank=True, auto_now_add=True)
user = models.ForeignKey(User)
next_practice = models.DateTimeField(auto_now_add=True)
times_practiced = models.PositiveIntegerField(default=0)
easy_factor = models.FloatField(default=2.5)
class Meta:
ordering = ['next_practice']
def set_next_practice(self, rating):
self.times_practiced += 1
minutes, ef = interval(self.times_practiced, rating, self.easy_factor)
self.next_practice = datetime.utcnow().replace(tzinfo=utc) + timedelta(minutes=minutes)
self.easy_factor = ef
def delay(self):
self.next_practice = datetime.utcnow().replace(tzinfo=utc) + timedelta(minutes=10)
|
Fix datetime usage in memorize model
|
Fix datetime usage in memorize model
|
Python
|
mit
|
DummyDivision/Tsune,DummyDivision/Tsune,DummyDivision/Tsune
|
---
+++
@@ -1,5 +1,5 @@
from datetime import datetime, timedelta
-import datetime
+
from django.utils.timezone import utc
from django.db import models
@@ -27,8 +27,8 @@
def set_next_practice(self, rating):
self.times_practiced += 1
minutes, ef = interval(self.times_practiced, rating, self.easy_factor)
- self.next_practice = datetime.datetime.utcnow().replace(tzinfo=utc) + timedelta(minutes=minutes)
+ self.next_practice = datetime.utcnow().replace(tzinfo=utc) + timedelta(minutes=minutes)
self.easy_factor = ef
def delay(self):
- self.next_practice = datetime.utcnow().replace(utc) + timedelta(minutes=10)
+ self.next_practice = datetime.utcnow().replace(tzinfo=utc) + timedelta(minutes=10)
|
8c7daf1c0e140cb68c425b34eb60d9b001fd7063
|
fiduswriter/base/management/commands/jest.py
|
fiduswriter/base/management/commands/jest.py
|
from pathlib import Path
import shutil
from subprocess import call
from django.core.management.base import BaseCommand
from django.core.management import call_command
from django.conf import settings
BABEL_CONF = '''
module.exports = {
presets: [
[
'@babel/preset-env',
{
targets: {
node: 'current',
},
},
],
],
}
'''
class Command(BaseCommand):
help = 'Run jest unit tests.'
def handle(self, *ars, **options):
call_command('transpile')
p = Path(settings.PROJECT_PATH) / '.transpile'
shutil.os.chdir(p)
conf_file = p / 'babel.config.js'
if not conf_file.exists():
print(f'Creating "babel.config.js" at {p}.')
conf_file.write_text(BABEL_CONF)
command_array = [
p / 'node_modules' / '.bin' / 'jest',
'--no-cache',
]
return_value = call(command_array)
if return_value > 0:
exit(return_value)
|
from pathlib import Path
import shutil
from subprocess import call
from django.core.management.base import BaseCommand
from django.core.management import call_command
from django.conf import settings
BABEL_CONF = '''
module.exports = {
presets: [
[
'@babel/preset-env',
{
targets: {
node: 'current',
},
},
],
],
}
'''
class Command(BaseCommand):
help = 'Run jest unit tests.'
def handle(self, *ars, **options):
call_command('transpile')
p = Path(settings.PROJECT_PATH) / '.transpile'
shutil.os.chdir(p)
conf_file = p / 'babel.config.js'
if not conf_file.exists():
print(f'Creating "babel.config.js" at {p}.')
conf_file.write_text(BABEL_CONF)
command_array = [
p / 'node_modules' / '.bin' / 'jest',
'--no-cache',
'--passWithNoTests',
]
return_value = call(command_array)
if return_value > 0:
exit(return_value)
|
Make test suite pass when there are no tests
|
Make test suite pass when there are no tests
|
Python
|
agpl-3.0
|
fiduswriter/fiduswriter,fiduswriter/fiduswriter,fiduswriter/fiduswriter,fiduswriter/fiduswriter
|
---
+++
@@ -39,6 +39,7 @@
command_array = [
p / 'node_modules' / '.bin' / 'jest',
'--no-cache',
+ '--passWithNoTests',
]
return_value = call(command_array)
if return_value > 0:
|
d6500b3d9af37fb2cd0fa14c82f78b165f9d221b
|
test_framework/test_settings.py
|
test_framework/test_settings.py
|
from .settings import * # NOQA
# Django 1.8 still has INSTALLED_APPS as a tuple
INSTALLED_APPS = list(INSTALLED_APPS)
INSTALLED_APPS.append('djoyapp')
|
from .settings import * # NOQA
INSTALLED_APPS.append('djoyapp')
|
Remove handling of apps tuple, it is always list now
|
Remove handling of apps tuple, it is always list now
Since Django 1.11, app settings are lists by default
|
Python
|
mit
|
jamescooke/factory_djoy
|
---
+++
@@ -1,6 +1,4 @@
from .settings import * # NOQA
-# Django 1.8 still has INSTALLED_APPS as a tuple
-INSTALLED_APPS = list(INSTALLED_APPS)
INSTALLED_APPS.append('djoyapp')
|
46af8faf699d893a95ecec402030ef74e07e77ed
|
recharges/tasks.py
|
recharges/tasks.py
|
import requests
from django.conf import settings
from celery.task import Task
from celery.utils.log import get_task_logger
from .models import Account
logger = get_task_logger(__name__)
class Hotsocket_Login(Task):
"""
Task to get the username and password varified then produce a token
"""
name = "gopherairtime.recharges.tasks.hotsocket_login"
def run(self, **kwargs):
"""
Returns the token
"""
l = self.get_logger(**kwargs)
l.info("Logging into hotsocket")
auth = {'username': 'trial_acc_1212', 'password': 'tr14l_l1k3m00n',
'as_json': True}
r = requests.post("%s/login" % settings.HOTSOCKET_API_ENDPOINT,
data=auth)
result = r.json()
token = result["response"]["token"]
account = Account()
account.token = token
account.save()
return True
hotsocket_login = Hotsocket_Login()
|
import requests
from django.conf import settings
from celery.task import Task
from celery.utils.log import get_task_logger
from .models import Account
logger = get_task_logger(__name__)
class Hotsocket_Login(Task):
"""
Task to get the username and password varified then produce a token
"""
name = "gopherairtime.recharges.tasks.hotsocket_login"
def run(self, **kwargs):
"""
Returns the token
"""
l = self.get_logger(**kwargs)
l.info("Logging into hotsocket")
auth = {'username': settings.HOTSOCKET_API_USERNAME,
'password': settings.HOTSOCKET_API_PASSWORD,
'as_json': True}
r = requests.post("%s/login" % settings.HOTSOCKET_API_ENDPOINT,
data=auth)
result = r.json()
# Store the result for other tasks to use
token = result["response"]["token"]
account = Account()
account.token = token
account.save()
return True
hotsocket_login = Hotsocket_Login()
|
Update login task to get username from settings
|
Update login task to get username from settings
|
Python
|
bsd-3-clause
|
westerncapelabs/gopherairtime,westerncapelabs/gopherairtime
|
---
+++
@@ -22,11 +22,15 @@
"""
l = self.get_logger(**kwargs)
l.info("Logging into hotsocket")
- auth = {'username': 'trial_acc_1212', 'password': 'tr14l_l1k3m00n',
+
+ auth = {'username': settings.HOTSOCKET_API_USERNAME,
+ 'password': settings.HOTSOCKET_API_PASSWORD,
'as_json': True}
+
r = requests.post("%s/login" % settings.HOTSOCKET_API_ENDPOINT,
data=auth)
result = r.json()
+ # Store the result for other tasks to use
token = result["response"]["token"]
account = Account()
account.token = token
|
2932698f81a17204b824763e648cd56dbab5f5b2
|
hawkpost/settings/development.py
|
hawkpost/settings/development.py
|
from .common import *
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': "hawkpost_dev",
}
}
# Development Applications
INSTALLED_APPS += (
'debug_toolbar',
'django_extensions'
)
EMAIL_HOST = "127.0.0.1"
EMAIL_PORT = 1025
|
from .common import *
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': "hawkpost_dev",
}
}
# If the DB_HOST was specified it is overriding the default connection
if 'DB_HOST' in os.environ:
DATABASES['default']['HOST'] = os.environ.get("DB_HOST")
DATABASES['default']['PORT'] = os.environ.get("DB_PORT", 5432)
DATABASES['default']['USER'] = os.environ.get("DB_USER")
DATABASES['default']['NAME'] = os.environ.get("DB_NAME", "hawkpost_dev")
if 'DB_PASSWORD' in os.environ:
DATABASES['default']['PASSWORD'] = os.environ.get("DB_PASSWORD")
# Development Applications
INSTALLED_APPS += (
'debug_toolbar',
'django_extensions'
)
EMAIL_HOST = os.environ.get("EMAIL_HOST", "127.0.0.1")
EMAIL_PORT = os.environ.get("EMAIL_PORT", 1025)
|
Allow overriding database and mail_debug settings
|
Allow overriding database and mail_debug settings
Using environment variables to override default database connection
and mail_debug settings in development mode. This allows setting
the values needed by the Docker environment.
|
Python
|
mit
|
whitesmith/hawkpost,whitesmith/hawkpost,whitesmith/hawkpost
|
---
+++
@@ -13,6 +13,15 @@
}
}
+# If the DB_HOST was specified it is overriding the default connection
+if 'DB_HOST' in os.environ:
+ DATABASES['default']['HOST'] = os.environ.get("DB_HOST")
+ DATABASES['default']['PORT'] = os.environ.get("DB_PORT", 5432)
+ DATABASES['default']['USER'] = os.environ.get("DB_USER")
+ DATABASES['default']['NAME'] = os.environ.get("DB_NAME", "hawkpost_dev")
+
+ if 'DB_PASSWORD' in os.environ:
+ DATABASES['default']['PASSWORD'] = os.environ.get("DB_PASSWORD")
# Development Applications
INSTALLED_APPS += (
@@ -20,5 +29,5 @@
'django_extensions'
)
-EMAIL_HOST = "127.0.0.1"
-EMAIL_PORT = 1025
+EMAIL_HOST = os.environ.get("EMAIL_HOST", "127.0.0.1")
+EMAIL_PORT = os.environ.get("EMAIL_PORT", 1025)
|
aeefef1f80ba92c7900c95c436b61b019d8ffb6a
|
src/waldur_mastermind/marketplace_openstack/migrations/0011_limit_components.py
|
src/waldur_mastermind/marketplace_openstack/migrations/0011_limit_components.py
|
from django.db import migrations
TENANT_TYPE = 'Packages.Template'
LIMIT = 'limit'
def process_components(apps, schema_editor):
OfferingComponent = apps.get_model('marketplace', 'OfferingComponent')
OfferingComponent.objects.filter(offering__type=TENANT_TYPE).update(
billing_type=LIMIT
)
class Migration(migrations.Migration):
dependencies = [
('marketplace_openstack', '0010_split_invoice_items'),
]
operations = [migrations.RunPython(process_components)]
|
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('marketplace_openstack', '0010_split_invoice_items'),
]
|
Remove invalid migration script: it has been superceded by 0052_limit_components
|
Remove invalid migration script: it has been superceded by 0052_limit_components
|
Python
|
mit
|
opennode/waldur-mastermind,opennode/nodeconductor-assembly-waldur,opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/nodeconductor-assembly-waldur,opennode/nodeconductor-assembly-waldur
|
---
+++
@@ -1,19 +1,7 @@
from django.db import migrations
-
-TENANT_TYPE = 'Packages.Template'
-LIMIT = 'limit'
-
-
-def process_components(apps, schema_editor):
- OfferingComponent = apps.get_model('marketplace', 'OfferingComponent')
- OfferingComponent.objects.filter(offering__type=TENANT_TYPE).update(
- billing_type=LIMIT
- )
class Migration(migrations.Migration):
dependencies = [
('marketplace_openstack', '0010_split_invoice_items'),
]
-
- operations = [migrations.RunPython(process_components)]
|
353ad2e4d03d5ad5a8c5a1e949e8cd3251c7d85b
|
holviapi/tests/test_api_idempotent.py
|
holviapi/tests/test_api_idempotent.py
|
# -*- coding: utf-8 -*-
import os
import pytest
import holviapi
@pytest.fixture
def connection():
pool = os.environ.get('HOLVI_POOL', None)
key = os.environ.get('HOLVI_KEY', None)
if not pool or not key:
raise RuntimeError("HOLVI_POOL and HOLVI_KEY must be in ENV for these tests")
cnc = holviapi.Connection(pool,key)
return cnc
@pytest.fixture
def invoiceapi():
cnc = connection()
ia = holviapi.InvoiceAPI(cnc)
return ia
def test_list_invoices(invoiceapi):
l = invoiceapi.list_invoices()
i = next(l)
assert type(i) == holviapi.Invoice
|
# -*- coding: utf-8 -*-
import os
import pytest
import holviapi
@pytest.fixture
def connection():
pool = os.environ.get('HOLVI_POOL', None)
key = os.environ.get('HOLVI_KEY', None)
if not pool or not key:
raise RuntimeError("HOLVI_POOL and HOLVI_KEY must be in ENV for these tests")
cnc = holviapi.Connection(pool,key)
return cnc
@pytest.fixture
def invoiceapi():
cnc = connection()
ia = holviapi.InvoiceAPI(cnc)
return ia
def test_list_invoices(invoiceapi):
l = invoiceapi.list_invoices()
i = next(l)
assert type(i) == holviapi.Invoice
def test_get_invoice(invoiceapi):
l = invoiceapi.list_invoices()
i = next(l)
assert type(i) == holviapi.Invoice
i2 = invoiceapi.get_invoice(i.code)
assert i.code == i2.code
|
Test getting invoice by code
|
Test getting invoice by code
|
Python
|
mit
|
rambo/python-holviapi,rambo/python-holviapi
|
---
+++
@@ -22,3 +22,10 @@
l = invoiceapi.list_invoices()
i = next(l)
assert type(i) == holviapi.Invoice
+
+def test_get_invoice(invoiceapi):
+ l = invoiceapi.list_invoices()
+ i = next(l)
+ assert type(i) == holviapi.Invoice
+ i2 = invoiceapi.get_invoice(i.code)
+ assert i.code == i2.code
|
745c03d3cc5ae31fb852ba7bfc9d0ad6a9ac4716
|
unittests.py
|
unittests.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
import const
import uniformdh
import obfsproxy.network.buffer as obfs_buf
class UniformDHTest( unittest.TestCase ):
def setUp( self ):
weAreServer = True
self.udh = uniformdh.new("A" * const.SHARED_SECRET_LENGTH, weAreServer)
def test1_createHandshake( self ):
handshake = self.udh.createHandshake()
self.failUnless((const.PUBLIC_KEY_LENGTH +
const.MARKER_LENGTH +
const.HMAC_LENGTH) <= len(handshake) <=
(const.MARKER_LENGTH +
const.HMAC_LENGTH +
const.MAX_PADDING_LENGTH))
def test2_receivePublicKey( self ):
buf = obfs_buf.Buffer(self.udh.createHandshake())
self.failUnless(self.udh.receivePublicKey(buf, lambda x: x) == True)
publicKey = self.udh.getRemotePublicKey()
self.failUnless(len(publicKey) == const.PUBLIC_KEY_LENGTH)
if __name__ == '__main__':
unittest.main()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
import const
import uniformdh
import obfsproxy.network.buffer as obfs_buf
class UniformDHTest( unittest.TestCase ):
def setUp( self ):
weAreServer = True
self.udh = uniformdh.new("A" * const.SHARED_SECRET_LENGTH, weAreServer)
def test1_createHandshake( self ):
handshake = self.udh.createHandshake()
self.failUnless((const.PUBLIC_KEY_LENGTH +
const.MARKER_LENGTH +
const.HMAC_LENGTH) <= len(handshake) <=
(const.MARKER_LENGTH +
const.HMAC_LENGTH +
const.MAX_PADDING_LENGTH))
def test2_receivePublicKey( self ):
buf = obfs_buf.Buffer(self.udh.createHandshake())
self.failUnless(self.udh.receivePublicKey(buf, lambda x: x) == True)
publicKey = self.udh.getRemotePublicKey()
self.failUnless(len(publicKey) == const.PUBLIC_KEY_LENGTH)
def test3_invalidHMAC( self ):
# Make the HMAC invalid.
handshake = self.udh.createHandshake()
if handshake[-1] != 'a':
handshake = handshake[:-1] + 'a'
else:
handshake = handshake[:-1] + 'b'
buf = obfs_buf.Buffer(handshake)
self.failIf(self.udh.receivePublicKey(buf, lambda x: x) == True)
if __name__ == '__main__':
unittest.main()
|
Add UniformDH unit test to test for invalid HMACs.
|
Add UniformDH unit test to test for invalid HMACs.
|
Python
|
bsd-3-clause
|
isislovecruft/scramblesuit,isislovecruft/scramblesuit
|
---
+++
@@ -31,5 +31,18 @@
publicKey = self.udh.getRemotePublicKey()
self.failUnless(len(publicKey) == const.PUBLIC_KEY_LENGTH)
+ def test3_invalidHMAC( self ):
+ # Make the HMAC invalid.
+ handshake = self.udh.createHandshake()
+ if handshake[-1] != 'a':
+ handshake = handshake[:-1] + 'a'
+ else:
+ handshake = handshake[:-1] + 'b'
+
+ buf = obfs_buf.Buffer(handshake)
+
+ self.failIf(self.udh.receivePublicKey(buf, lambda x: x) == True)
+
+
if __name__ == '__main__':
unittest.main()
|
da47bca1bbdffff536d240cafe780533ee79809e
|
mesh.py
|
mesh.py
|
#!/usr/bin/env python3
import os
import shutil
import sys
import readline
import traceback
readline.parse_and_bind('tab: complete')
readline.parse_and_bind('set editing-mode vi')
builtin_cmds = {'cd', 'pwd', 'exit',}
def prompt():
return '%s$ ' % os.getcwd()
def read_command():
line = input(prompt())
return line
def parse_command(cmd_text):
return (cmd_text, cmd_text.strip().split())
def record_command(command):
return True
def run_builtin(cmd, cmd_text):
if shutil.which(cmd[0]):
os.system(cmd_text)
if cmd[0] == 'cd':
os.chdir(cmd[1])
elif cmd[0] == 'pwd':
print(os.getcwd())
elif cmd[0] == 'exit':
sys.exit()
if __name__ == "__main__":
while True:
try:
cmd_text = read_command()
cmd_text, cmd = parse_command(cmd_text)
record_command(cmd)
if cmd[0] in builtin_cmds:
run_builtin(cmd, cmd_text)
else:
#pid = subprocess.Popen(cmd_text, stdin=None, stdout=None, shell=True)
os.system(cmd_text)
except SystemExit:
break
except:
traceback.print_exc()
|
#!/usr/bin/env python3
import os
import shutil
import sys
import readline
import traceback
readline.parse_and_bind('tab: complete')
readline.parse_and_bind('set editing-mode vi')
builtin_cmds = {'cd', 'pwd', 'exit',}
def prompt():
return '%s$ ' % os.getcwd()
def read_command():
line = input(prompt())
return line
def parse_command(cmd_text):
return (cmd_text, cmd_text.strip().split())
def record_command(command):
return True
def run_builtin(cmd, cmd_text):
if shutil.which(cmd[0]):
os.system(cmd_text)
if cmd[0] == 'cd':
os.chdir(cmd[1])
elif cmd[0] == 'pwd':
print(os.getcwd())
elif cmd[0] == 'exit':
sys.exit()
if __name__ == "__main__":
while True:
try:
cmd_text = read_command()
cmd_text, cmd = parse_command(cmd_text)
record_command(cmd)
if cmd[0] in builtin_cmds:
run_builtin(cmd, cmd_text)
else:
#pid = subprocess.Popen(cmd_text, stdin=None, stdout=None, shell=True)
os.system(cmd_text)
except KeyboardInterrupt:
print('')
pass
except SystemExit:
break
except EOFError:
print('')
break
except:
traceback.print_exc()
|
Handle ctrl-c and ctrl-d properly
|
Handle ctrl-c and ctrl-d properly
|
Python
|
mit
|
mmichie/mesh
|
---
+++
@@ -45,7 +45,13 @@
else:
#pid = subprocess.Popen(cmd_text, stdin=None, stdout=None, shell=True)
os.system(cmd_text)
+ except KeyboardInterrupt:
+ print('')
+ pass
except SystemExit:
+ break
+ except EOFError:
+ print('')
break
except:
traceback.print_exc()
|
6c932dc133ca2e6608297a93489e5c57ad73d5c2
|
models/fallahi_eval/evidence_sources.py
|
models/fallahi_eval/evidence_sources.py
|
from util import pklload
from collections import defaultdict
import indra.tools.assemble_corpus as ac
if __name__ == '__main__':
# Load cached Statements just before going into the model
stmts = pklload('pysb_stmts')
# Start a dictionary for source counts
sources_count = defaultdict(int)
# Count statements according to sources of evidence
for stmt in stmts:
sources = tuple(sorted(list(set([ev.source_api for ev in stmt.evidence]))))
sources_count[sources] += 1
# Statements from databases only
db_only = 0
# Statements from reading only
reading_only = 0
# Statements from databases and reading
mixture = 0
# Database sources
dbs = set(['bel', 'biopax', 'phosphosite', 'signor'])
# Reader sources
readers = set(['reach', 'trips', 'sparser', 'r3'])
for k, v in sources_count.items():
d = set(k).intersection(dbs)
r = set(k).intersection(readers)
if d and r:
mixture += v
if d and not r:
db_only += v
if r and not d:
reading_only += v
for k, v in sorted(sources_count.items(), key=lambda x: (len(x[1]), x[1])):
sources_str = ','.join(k)
line_str = sources_str + ',' + str(v)
|
from util import pklload
from collections import defaultdict
import indra.tools.assemble_corpus as ac
if __name__ == '__main__':
# Load cached Statements just before going into the model
stmts = pklload('pysb_stmts')
# Start a dictionary for source counts
sources_count = defaultdict(int)
# Count statements according to sources of evidence
for stmt in stmts:
sources = tuple(sorted(list(set([ev.source_api for ev in stmt.evidence]))))
sources_count[sources] += 1
# Statements from databases only
db_only = 0
# Statements from reading only
reading_only = 0
# Statements from databases and reading
mixture = 0
# Database sources
dbs = set(['bel', 'biopax', 'phosphosite', 'signor'])
# Reader sources
readers = set(['reach', 'trips', 'sparser', 'r3'])
for k, v in sources_count.items():
d = set(k).intersection(dbs)
r = set(k).intersection(readers)
if d and r:
mixture += v
if d and not r:
db_only += v
if r and not d:
reading_only += v
for k, v in sorted(sources_count.items(), key=lambda x: (len(x[0]), ','.join(sorted(x[0])))):
sources_str = ','.join(k)
line_str = sources_str + '\t' + str(v)
print(line_str)
|
Fix some things in evidence sources
|
Fix some things in evidence sources
|
Python
|
bsd-2-clause
|
johnbachman/belpy,sorgerlab/indra,johnbachman/indra,johnbachman/belpy,sorgerlab/belpy,pvtodorov/indra,bgyori/indra,pvtodorov/indra,pvtodorov/indra,sorgerlab/indra,sorgerlab/belpy,johnbachman/belpy,sorgerlab/belpy,pvtodorov/indra,johnbachman/indra,sorgerlab/indra,bgyori/indra,johnbachman/indra,bgyori/indra
|
---
+++
@@ -34,6 +34,7 @@
if r and not d:
reading_only += v
- for k, v in sorted(sources_count.items(), key=lambda x: (len(x[1]), x[1])):
+ for k, v in sorted(sources_count.items(), key=lambda x: (len(x[0]), ','.join(sorted(x[0])))):
sources_str = ','.join(k)
- line_str = sources_str + ',' + str(v)
+ line_str = sources_str + '\t' + str(v)
+ print(line_str)
|
ee485b086e66f6c423e6c9b728d43a6ace071d55
|
Lib/test/test_frozen.py
|
Lib/test/test_frozen.py
|
# Test the frozen module defined in frozen.c.
from __future__ import with_statement
from test.test_support import captured_stdout, run_unittest
import unittest
import sys, os
class FrozenTests(unittest.TestCase):
def test_frozen(self):
with captured_stdout() as stdout:
try:
import __hello__
except ImportError as x:
self.fail("import __hello__ failed:" + str(x))
try:
import __phello__
except ImportError as x:
self.fail("import __phello__ failed:" + str(x))
try:
import __phello__.spam
except ImportError as x:
self.fail("import __phello__.spam failed:" + str(x))
if sys.platform != "mac": # On the Mac this import does succeed.
try:
import __phello__.foo
except ImportError:
pass
else:
self.fail("import __phello__.foo should have failed")
self.assertEquals(stdout.getvalue(),
'Hello world...\nHello world...\nHello world...\n')
def test_main():
run_unittest(FrozenTests)
|
# Test the frozen module defined in frozen.c.
from __future__ import with_statement
from test.test_support import captured_stdout, run_unittest
import unittest
import sys, os
class FrozenTests(unittest.TestCase):
def test_frozen(self):
with captured_stdout() as stdout:
try:
import __hello__
except ImportError as x:
self.fail("import __hello__ failed:" + str(x))
try:
import __phello__
except ImportError as x:
self.fail("import __phello__ failed:" + str(x))
try:
import __phello__.spam
except ImportError as x:
self.fail("import __phello__.spam failed:" + str(x))
if sys.platform != "mac": # On the Mac this import does succeed.
try:
import __phello__.foo
except ImportError:
pass
else:
self.fail("import __phello__.foo should have failed")
self.assertEquals(stdout.getvalue(),
'Hello world...\nHello world...\nHello world...\n')
def test_main():
run_unittest(FrozenTests)
if __name__ == "__main__":
test_main()
|
Make it possible to run this test stand-alone.
|
Make it possible to run this test stand-alone.
|
Python
|
mit
|
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
|
---
+++
@@ -38,3 +38,6 @@
def test_main():
run_unittest(FrozenTests)
+
+if __name__ == "__main__":
+ test_main()
|
61f710b64f32da26bd36c7c95a3f46e4d21c991a
|
modules/output_statistics.py
|
modules/output_statistics.py
|
from pysqlite2 import dbapi2 as sqlite
import sys, os.path
class Statistics:
def __init__(self):
self.total = 0
self.passed = 0
self.failed = 0
def register(self, manager, parser):
manager.register(instance=self, event='input', keyword='stats', callback=self.input, order=65535)
manager.register(instance=self, event='exit', keyword='stats', callback=self.exit)
def init(self, con):
create = """
CREATE TABLE IF NOT EXISTS statistics
(
timestamp TIMESTAMP,
feed varchar(255),
success integer,
failure integer
);"""
cur = con.cursor()
cur.execute(create)
con.commit()
def input(self, feed):
self.total = len(feed.entries)
def exit(self, feed):
self.passed = len(feed.entries)
self.failed = self.total - self.passed
dbname = os.path.join(sys.path[0], feed.manager.configname+".db")
con = sqlite.connect(dbname)
self.init(con)
cur = con.cursor()
cur.execute("insert into statistics (timestamp, feed, success, failure) values (date('now'), '%s', %d, %d);" % (feed.name, self.passed, self.failed))
con.commit()
con.close()
|
import sys, os.path
has_sqlite = True
try:
from pysqlite2 import dbapi2 as sqlite
except:
has_sqlite = False
class Statistics:
def __init__(self):
self.total = 0
self.passed = 0
self.failed = 0
def register(self, manager, parser):
manager.register(instance=self, event='input', keyword='stats', callback=self.input, order=65535)
manager.register(instance=self, event='exit', keyword='stats', callback=self.exit)
def init(self, con):
create = """
CREATE TABLE IF NOT EXISTS statistics
(
timestamp TIMESTAMP,
feed varchar(255),
success integer,
failure integer
);"""
cur = con.cursor()
cur.execute(create)
con.commit()
def input(self, feed):
if not has_sqlite:
raise Exception('module statistics requires python-sqlite2 (Sqlite v3) library.')
self.total = len(feed.entries)
def exit(self, feed):
self.passed = len(feed.entries)
self.failed = self.total - self.passed
dbname = os.path.join(sys.path[0], feed.manager.configname+".db")
con = sqlite.connect(dbname)
self.init(con)
cur = con.cursor()
cur.execute("insert into statistics (timestamp, feed, success, failure) values (date('now'), '%s', %d, %d);" % (feed.name, self.passed, self.failed))
con.commit()
con.close()
|
Check that python-sqlite2 is installed
|
Check that python-sqlite2 is installed
git-svn-id: ad91b9aa7ba7638d69f912c9f5d012e3326e9f74@147 3942dd89-8c5d-46d7-aeed-044bccf3e60c
|
Python
|
mit
|
asm0dey/Flexget,gazpachoking/Flexget,vfrc2/Flexget,X-dark/Flexget,ianstalk/Flexget,patsissons/Flexget,tvcsantos/Flexget,ibrahimkarahan/Flexget,Flexget/Flexget,ibrahimkarahan/Flexget,lildadou/Flexget,LynxyssCZ/Flexget,antivirtel/Flexget,drwyrm/Flexget,jawilson/Flexget,offbyone/Flexget,ZefQ/Flexget,thalamus/Flexget,malkavi/Flexget,oxc/Flexget,vfrc2/Flexget,Danfocus/Flexget,Pretagonist/Flexget,grrr2/Flexget,ratoaq2/Flexget,JorisDeRieck/Flexget,drwyrm/Flexget,qvazzler/Flexget,dsemi/Flexget,tarzasai/Flexget,voriux/Flexget,gazpachoking/Flexget,antivirtel/Flexget,ratoaq2/Flexget,grrr2/Flexget,cvium/Flexget,tsnoam/Flexget,jacobmetrick/Flexget,thalamus/Flexget,jawilson/Flexget,vfrc2/Flexget,LynxyssCZ/Flexget,spencerjanssen/Flexget,patsissons/Flexget,crawln45/Flexget,tsnoam/Flexget,tsnoam/Flexget,camon/Flexget,Danfocus/Flexget,antivirtel/Flexget,Danfocus/Flexget,Danfocus/Flexget,thalamus/Flexget,sean797/Flexget,malkavi/Flexget,v17al/Flexget,LynxyssCZ/Flexget,patsissons/Flexget,qk4l/Flexget,ratoaq2/Flexget,sean797/Flexget,OmgOhnoes/Flexget,tarzasai/Flexget,offbyone/Flexget,qk4l/Flexget,dsemi/Flexget,qvazzler/Flexget,lildadou/Flexget,v17al/Flexget,JorisDeRieck/Flexget,xfouloux/Flexget,jacobmetrick/Flexget,LynxyssCZ/Flexget,jawilson/Flexget,OmgOhnoes/Flexget,asm0dey/Flexget,spencerjanssen/Flexget,sean797/Flexget,ZefQ/Flexget,Flexget/Flexget,jacobmetrick/Flexget,xfouloux/Flexget,offbyone/Flexget,Flexget/Flexget,X-dark/Flexget,Flexget/Flexget,camon/Flexget,tobinjt/Flexget,poulpito/Flexget,Pretagonist/Flexget,lildadou/Flexget,ianstalk/Flexget,ibrahimkarahan/Flexget,malkavi/Flexget,xfouloux/Flexget,ZefQ/Flexget,jawilson/Flexget,tobinjt/Flexget,tobinjt/Flexget,Pretagonist/Flexget,tobinjt/Flexget,JorisDeRieck/Flexget,JorisDeRieck/Flexget,OmgOhnoes/Flexget,crawln45/Flexget,cvium/Flexget,grrr2/Flexget,asm0dey/Flexget,tvcsantos/Flexget,v17al/Flexget,oxc/Flexget,ianstalk/Flexget,malkavi/Flexget,qvazzler/Flexget,dsemi/Flexget,poulpito/Flexget,qk4l/Flexget,crawln45/Flexget,drwyrm/Flexget,crawln45/Flexget,voriux/Flexget,X-dark/Flexget,poulpito/Flexget,tarzasai/Flexget,cvium/Flexget,spencerjanssen/Flexget,oxc/Flexget
|
---
+++
@@ -1,5 +1,10 @@
-from pysqlite2 import dbapi2 as sqlite
import sys, os.path
+
+has_sqlite = True
+try:
+ from pysqlite2 import dbapi2 as sqlite
+except:
+ has_sqlite = False
class Statistics:
def __init__(self):
@@ -25,6 +30,8 @@
con.commit()
def input(self, feed):
+ if not has_sqlite:
+ raise Exception('module statistics requires python-sqlite2 (Sqlite v3) library.')
self.total = len(feed.entries)
def exit(self, feed):
|
62c5e911689555c62931692e0d6ff87ed7340559
|
src/models/split.py
|
src/models/split.py
|
# Third-party modules
import numpy as np
import pandas as pd
from sklearn.model_selection import KFold
# Hand-made modules
from .base import BloscpackMixin
KWARGS_READ_CSV = {
"sep": "\t",
"header": 0,
"parse_dates": [0],
"index_col": 0
}
class ValidationSplitHandler(BloscpackMixin):
def __init__(self):
super().__init__()
def separate_and_serialize_validation_index(self,
train_filepath_prefix,
location,
n_splits):
train_dataframe_filepath = '.'.join([train_filepath_prefix,
location + ".tsv"])
df = pd.read_csv(train_dataframe_filepath, **KWARGS_READ_CSV)
train_index = df.index
for n_iter, (_, test_index) in enumerate(KFold(n_splits=n_splits).split(train_index)):
serialized_filepath = '.'.join([train_filepath_prefix,
"index.crossval{i}".format(i=n_iter),
location + ".blp"])
self.to_blp(
np.asarray(train_index[test_index]), serialized_filepath
)
if __name__ == '__main__':
print("Validation index splitter !")
|
# Third-party modules
import numpy as np
import pandas as pd
from sklearn.model_selection import KFold
# Hand-made modules
from .base import PathHandlerBase, BloscpackMixin
KWARGS_READ_CSV = {
"sep": "\t",
"header": 0,
"parse_dates": [0],
"index_col": 0
}
KWARGS_TO_CSV = {
"sep": "\t"
}
OBJECTIVE_LABEL_NAMES = ["kwh", ]
Y_TRUE_FILEPATH_PREFIX = "train_y"
Y_TRUE_FILEPATH_SUFFIX = "tsv"
class ValidationSplitHandler(BloscpackMixin):
def __init__(self):
super().__init__()
def separate_and_serialize_validation_index(self,
train_filepath_prefix,
location,
n_splits):
df = pd.read_csv('.'.join([train_filepath_prefix,
"{l}.tsv".format(l=location)]),
**KWARGS_READ_CSV)
train_index = df.index
for n_iter, (_, test_index) in enumerate(KFold(n_splits=n_splits).split(train_index)):
serialized_filepath = '.'.join([train_filepath_prefix,
"index.crossval{i}".format(i=n_iter),
location + ".blp"])
self.to_blp(
np.asarray(train_index[test_index]), serialized_filepath
)
if __name__ == '__main__':
print("Validation index splitter !")
|
Adjust to current serialization conditions
|
Adjust to current serialization conditions
+ blp -> tsv
|
Python
|
mit
|
gciteam6/xgboost,gciteam6/xgboost
|
---
+++
@@ -3,7 +3,7 @@
import pandas as pd
from sklearn.model_selection import KFold
# Hand-made modules
-from .base import BloscpackMixin
+from .base import PathHandlerBase, BloscpackMixin
KWARGS_READ_CSV = {
"sep": "\t",
@@ -11,6 +11,12 @@
"parse_dates": [0],
"index_col": 0
}
+KWARGS_TO_CSV = {
+ "sep": "\t"
+}
+OBJECTIVE_LABEL_NAMES = ["kwh", ]
+Y_TRUE_FILEPATH_PREFIX = "train_y"
+Y_TRUE_FILEPATH_SUFFIX = "tsv"
class ValidationSplitHandler(BloscpackMixin):
@@ -21,9 +27,9 @@
train_filepath_prefix,
location,
n_splits):
- train_dataframe_filepath = '.'.join([train_filepath_prefix,
- location + ".tsv"])
- df = pd.read_csv(train_dataframe_filepath, **KWARGS_READ_CSV)
+ df = pd.read_csv('.'.join([train_filepath_prefix,
+ "{l}.tsv".format(l=location)]),
+ **KWARGS_READ_CSV)
train_index = df.index
for n_iter, (_, test_index) in enumerate(KFold(n_splits=n_splits).split(train_index)):
|
47b751c5578d2419eaf1a7bb90c53b46eea80c9f
|
objectcube/settings.py
|
objectcube/settings.py
|
import os
# Database configurations.
DB_HOST = os.environ.get('OBJECTCUBE_DB_HOST', 'localhost')
DB_USER = os.environ.get('OBJECTCUBE_DB_USER', os.environ.get('LOGNAME'))
DB_PORT = int(os.environ.get('OBJECTCUBE_DB_PORT', 5432))
DB_DBNAME = os.environ.get('OBJECTCUBE_DB_NAME', os.environ.get('LOGNAME'))
DB_PASSWORD = os.environ.get('OBJECTCUBE_DB_PASSWORD',
os.environ.get('LOGNAME'))
# Concept service configuration.
FACTORY_CONFIG = {
'TagService': 'objectcube.services.impl.postgresql.tag.'
'TagService',
'DimensionService': 'objectcube.services.impl.postgresql.dimension.'
'DimensionService',
'ObjectService': 'objectcube.services.impl.postgresql.object_service.'
'ObjectService',
'BlobService': 'objectcube.services.impl.filesystem.'
'blob_service.FileBlobService',
'ConceptService': 'objectcube.services.impl.postgresql.concept.'
'ConceptService',
'PluginService': 'objectcube.services.impl.postgresql.plugin.'
'PluginService',
'TaggingService': 'objectcube.services.impl.postgresql.tagging.'
'TaggingService',
}
PLUGINS = (
'objectcube.plugin.exif.ExifPlugin'
)
|
import os
# Database configurations.
DB_HOST = os.environ.get('OBJECTCUBE_DB_HOST', 'localhost')
DB_USER = os.environ.get('OBJECTCUBE_DB_USER', os.environ.get('LOGNAME'))
DB_PORT = int(os.environ.get('OBJECTCUBE_DB_PORT', 5432))
DB_DBNAME = os.environ.get('OBJECTCUBE_DB_NAME', os.environ.get('LOGNAME'))
DB_PASSWORD = os.environ.get('OBJECTCUBE_DB_PASSWORD',
os.environ.get('LOGNAME'))
# Concept service configuration.
FACTORY_CONFIG = {
'TagService': 'objectcube.services.impl.postgresql.tag.'
'TagService',
'DimensionService': 'objectcube.services.impl.postgresql.dimension.'
'DimensionService',
'ObjectService': 'objectcube.services.impl.postgresql.object.'
'ObjectService',
'BlobService': 'objectcube.services.impl.filesystem.'
'blob_service.FileBlobService',
'ConceptService': 'objectcube.services.impl.postgresql.concept.'
'ConceptService',
'PluginService': 'objectcube.services.impl.postgresql.plugin.'
'PluginService',
'TaggingService': 'objectcube.services.impl.postgresql.tagging.'
'TaggingService',
}
PLUGINS = (
'objectcube.plugin.exif.ExifPlugin'
)
|
Correct the Object service after rename of object_service.py file to object.py
|
Correct the Object service after rename of object_service.py file to object.py
|
Python
|
bsd-2-clause
|
rudatalab/python-objectcube,rudatalab/python-objectcube,rudatalab/python-objectcube
|
---
+++
@@ -16,7 +16,7 @@
'DimensionService': 'objectcube.services.impl.postgresql.dimension.'
'DimensionService',
- 'ObjectService': 'objectcube.services.impl.postgresql.object_service.'
+ 'ObjectService': 'objectcube.services.impl.postgresql.object.'
'ObjectService',
'BlobService': 'objectcube.services.impl.filesystem.'
|
f081906482bf080363dd494a6ab0ca6ed63b49f5
|
loremipsum/tests/plugs_testpackage/plugin.py
|
loremipsum/tests/plugs_testpackage/plugin.py
|
"""Test plugin."""
def load(*args, **kwargs):
pass
def dump(*args, **kwargs):
pass
def plugin():
import sys
return (__name__.split('.')[-1], sys.modules.get(__name__))
|
"""Test plugin.
def load(*args, **kwargs): pass
def dump(*args, **kwargs): pass
def plugin():
return (__name__.split('.')[-1], sys.modules.get(__name__))
"""
|
Put useless module function into docstring
|
Put useless module function into docstring
|
Python
|
bsd-3-clause
|
monkeython/loremipsum
|
---
+++
@@ -1,14 +1,13 @@
-"""Test plugin."""
+"""Test plugin.
-def load(*args, **kwargs):
- pass
+def load(*args, **kwargs): pass
-def dump(*args, **kwargs):
- pass
+def dump(*args, **kwargs): pass
def plugin():
- import sys
return (__name__.split('.')[-1], sys.modules.get(__name__))
+
+"""
|
28252bbc3c5f784e5f6267788a7f4196473d7292
|
tests/conftest.py
|
tests/conftest.py
|
# -*- coding: utf-8 -*-
import pytest
from cheetah_lint import five
@pytest.yield_fixture(autouse=True)
def no_warnings(recwarn):
yield
ret = len(tuple(
warning for warning in recwarn
# cheetah raises this warning when compiling a trivial file
if not (
isinstance(warning.message, UserWarning) and
five.text(warning.message) == (
'You supplied an empty string for the source!'
)
)
))
assert ret == 0
|
# -*- coding: utf-8 -*-
import pytest
from cheetah_lint import five
@pytest.fixture(autouse=True)
def no_warnings(recwarn):
yield
ret = len(tuple(
warning for warning in recwarn
# cheetah raises this warning when compiling a trivial file
if not (
isinstance(warning.message, UserWarning) and
five.text(warning.message) == (
'You supplied an empty string for the source!'
)
)
))
assert ret == 0
|
Replace deprecated yield_fixture with fixture
|
Replace deprecated yield_fixture with fixture
Committed via https://github.com/asottile/all-repos
|
Python
|
mit
|
asottile/cheetah_lint
|
---
+++
@@ -4,7 +4,7 @@
from cheetah_lint import five
-@pytest.yield_fixture(autouse=True)
+@pytest.fixture(autouse=True)
def no_warnings(recwarn):
yield
ret = len(tuple(
|
2551eb35f2d5c5b95952b40c2583468a8deb5565
|
pylib/djangoproj/binalerts/tests.py
|
pylib/djangoproj/binalerts/tests.py
|
"""
Integration-style tests for binalerts. These tests think of things from the web
frontend point of view. They are designed to make sure the application behaves
as required to the user.
"""
# Various tips on testing forms:
# http://stackoverflow.com/questions/2257958/django-unit-testing-for-form-edit
from django.test import TestCase
from django.test import Client
class FrontPageTest(TestCase):
def setUp(self):
self.c = Client()
def test_frontpage_asks_for_postcode(self):
response = self.c.get('/')
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'Please enter your postcode')
self.assertContains(response, '<input type="text" name="postcode" id="id_postcode" />')
self.assertContains(response, '<input type="submit" value="Go" />')
self.assertEqual(response.template.name, 'binalerts/frontpage.html')
# Example doctest in case we need it later
__test__ = {"doctest": """
Another way to test that 1 + 1 is equal to 2.
>>> 1 + 1 == 2
True
"""}
|
"""
Integration-style tests for binalerts. These tests think of things from the web
frontend point of view. They are designed to make sure the application behaves
as required to the user.
"""
# Various tips on testing forms:
# http://stackoverflow.com/questions/2257958/django-unit-testing-for-form-edit
from django.test import TestCase
from django.test import Client
class FrontPageTest(TestCase):
def setUp(self):
self.c = Client()
def test_asks_for_postcode(self):
response = self.c.get('/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.template.name, 'binalerts/frontpage.html')
self.assertContains(response, 'Please enter your postcode')
self.assertContains(response, '<input type="text" name="postcode" id="id_postcode" />')
self.assertContains(response, '<input type="submit" value="Go" />')
def test_error_if_not_postcode(self):
response = self.c.post('/', { 'postcode': 'notapostcode' })
self.assertEqual(response.template.name, 'binalerts/frontpage.html')
self.assertContains(response, 'Sorry')
# Example doctest in case we need it later
__test__ = {"doctest": """
Another way to test that 1 + 1 is equal to 2.
>>> 1 + 1 == 2
True
"""}
|
Test for error if not a postcode.
|
Test for error if not a postcode.
|
Python
|
agpl-3.0
|
mysociety/binalerts,mysociety/binalerts,mysociety/binalerts
|
---
+++
@@ -14,14 +14,21 @@
def setUp(self):
self.c = Client()
- def test_frontpage_asks_for_postcode(self):
+ def test_asks_for_postcode(self):
response = self.c.get('/')
self.assertEqual(response.status_code, 200)
+
+ self.assertEqual(response.template.name, 'binalerts/frontpage.html')
self.assertContains(response, 'Please enter your postcode')
self.assertContains(response, '<input type="text" name="postcode" id="id_postcode" />')
self.assertContains(response, '<input type="submit" value="Go" />')
+
+ def test_error_if_not_postcode(self):
+ response = self.c.post('/', { 'postcode': 'notapostcode' })
+
self.assertEqual(response.template.name, 'binalerts/frontpage.html')
+ self.assertContains(response, 'Sorry')
# Example doctest in case we need it later
|
f87008f6a8c3d4039ab69b558bae17f6ea006fca
|
skcode/__init__.py
|
skcode/__init__.py
|
"""
SkCode (Python implementation of BBcode syntax) parser library.
"""
# Package information
__author__ = "Fabien Batteix (@skywodd)"
__copyright__ = "Copyright 2015, TamiaLab"
__credits__ = ["Fabien Batteix", "TamiaLab"]
__license__ = "GPLv3"
__version__ = "1.0.6"
__maintainer__ = "Fabien Batteix"
__email__ = "fabien.batteix@tamialab.fr"
__status__ = "Development" # "Production"
# User friendly imports
from .treebuilder import parse_skcode
from .render import (render_to_html,
render_to_skcode,
render_to_text)
|
"""
SkCode (Python implementation of BBcode syntax) parser library.
"""
# Package information
__author__ = "Fabien Batteix (@skywodd)"
__copyright__ = "Copyright 2015, TamiaLab"
__credits__ = ["Fabien Batteix", "TamiaLab"]
__license__ = "GPLv3"
__version__ = "1.0.7"
__maintainer__ = "Fabien Batteix"
__email__ = "fabien.batteix@tamialab.fr"
__status__ = "Development" # "Production"
# User friendly imports
from .treebuilder import parse_skcode
from .render import (render_to_html,
render_to_skcode,
render_to_text)
|
Upgrade version from 1.0.6 to 1.0.7
|
Upgrade version from 1.0.6 to 1.0.7
|
Python
|
agpl-3.0
|
TamiaLab/PySkCode
|
---
+++
@@ -7,7 +7,7 @@
__copyright__ = "Copyright 2015, TamiaLab"
__credits__ = ["Fabien Batteix", "TamiaLab"]
__license__ = "GPLv3"
-__version__ = "1.0.6"
+__version__ = "1.0.7"
__maintainer__ = "Fabien Batteix"
__email__ = "fabien.batteix@tamialab.fr"
__status__ = "Development" # "Production"
|
b740490e49b775809cb99b4cf30e3b7cf259d8f6
|
superdesk/io/__init__.py
|
superdesk/io/__init__.py
|
"""Superdesk IO"""
from abc import ABCMeta, abstractmethod
import superdesk
import logging
from superdesk.celery_app import celery
providers = {}
allowed_providers = []
logger = logging.getLogger(__name__)
from .commands.update_ingest import UpdateIngest
from .commands.add_provider import AddProvider # NOQA
def init_app(app):
from .ingest_provider_model import IngestProviderResource
from superdesk.services import BaseService
import superdesk
endpoint_name = 'ingest_providers'
service = BaseService(endpoint_name, backend=superdesk.get_backend())
IngestProviderResource(endpoint_name, app=app, service=service)
def register_provider(type, provider):
providers[type] = provider
allowed_providers.append(type)
superdesk.privilege(name='ingest_providers', label='Ingest Channels', description='User can maintain Ingest Channels.')
@celery.task()
def fetch_ingest():
UpdateIngest().run()
class Parser:
"""
Parent Class for all types of Parsers like News ML 1.2, News ML G2, NITF,...
"""
__metaclass__ = ABCMeta
@abstractmethod
def parse_message(self, xml_doc):
"""
Parses the ingest XML and extracts the relevant elements/attributes values from the XML.
Sub-classes must override.
"""
|
"""Superdesk IO"""
from abc import ABCMeta, abstractmethod
import superdesk
import logging
from superdesk.celery_app import celery
providers = {}
allowed_providers = []
logger = logging.getLogger(__name__)
from .commands.remove_expired_content import RemoveExpiredContent
from .commands.update_ingest import UpdateIngest
from .commands.add_provider import AddProvider # NOQA
def init_app(app):
from .ingest_provider_model import IngestProviderResource
from superdesk.services import BaseService
import superdesk
endpoint_name = 'ingest_providers'
service = BaseService(endpoint_name, backend=superdesk.get_backend())
IngestProviderResource(endpoint_name, app=app, service=service)
def register_provider(type, provider):
providers[type] = provider
allowed_providers.append(type)
superdesk.privilege(name='ingest_providers', label='Ingest Channels', description='User can maintain Ingest Channels.')
@celery.task()
def fetch_ingest():
RemoveExpiredContent().run()
UpdateIngest().run()
class Parser:
"""
Parent Class for all types of Parsers like News ML 1.2, News ML G2, NITF,...
"""
__metaclass__ = ABCMeta
@abstractmethod
def parse_message(self, xml_doc):
"""
Parses the ingest XML and extracts the relevant elements/attributes values from the XML.
Sub-classes must override.
"""
|
Revert "fix(ingest) - disable expired content removal"
|
Revert "fix(ingest) - disable expired content removal"
This reverts commit 281e051344c9fe8e835941117e2d2068ecdabd87.
|
Python
|
agpl-3.0
|
mdhaman/superdesk,akintolga/superdesk-aap,ioanpocol/superdesk-ntb,marwoodandrew/superdesk,marwoodandrew/superdesk-aap,marwoodandrew/superdesk-aap,plamut/superdesk,darconny/superdesk,darconny/superdesk,superdesk/superdesk,amagdas/superdesk,hlmnrmr/superdesk,mdhaman/superdesk-aap,sivakuna-aap/superdesk,liveblog/superdesk,plamut/superdesk,marwoodandrew/superdesk,akintolga/superdesk-aap,marwoodandrew/superdesk,akintolga/superdesk-aap,plamut/superdesk,petrjasek/superdesk-ntb,amagdas/superdesk,Aca-jov/superdesk,superdesk/superdesk-ntb,petrjasek/superdesk,akintolga/superdesk,pavlovicnemanja92/superdesk,petrjasek/superdesk-ntb,Aca-jov/superdesk,gbbr/superdesk,liveblog/superdesk,sivakuna-aap/superdesk,ioanpocol/superdesk-ntb,superdesk/superdesk-ntb,liveblog/superdesk,verifiedpixel/superdesk,pavlovicnemanja/superdesk,petrjasek/superdesk,vied12/superdesk,mdhaman/superdesk-aap,marwoodandrew/superdesk,thnkloud9/superdesk,gbbr/superdesk,pavlovicnemanja92/superdesk,mugurrus/superdesk,ancafarcas/superdesk,superdesk/superdesk-aap,sivakuna-aap/superdesk,verifiedpixel/superdesk,superdesk/superdesk-aap,pavlovicnemanja/superdesk,fritzSF/superdesk,thnkloud9/superdesk,hlmnrmr/superdesk,sjunaid/superdesk,ioanpocol/superdesk-ntb,Aca-jov/superdesk,ioanpocol/superdesk,hlmnrmr/superdesk,liveblog/superdesk,gbbr/superdesk,verifiedpixel/superdesk,thnkloud9/superdesk,superdesk/superdesk,akintolga/superdesk-aap,petrjasek/superdesk-ntb,pavlovicnemanja/superdesk,akintolga/superdesk,ancafarcas/superdesk,marwoodandrew/superdesk-aap,sjunaid/superdesk,superdesk/superdesk,marwoodandrew/superdesk,akintolga/superdesk,ancafarcas/superdesk,vied12/superdesk,fritzSF/superdesk,fritzSF/superdesk,vied12/superdesk,petrjasek/superdesk-server,pavlovicnemanja/superdesk,mdhaman/superdesk,vied12/superdesk,amagdas/superdesk,mdhaman/superdesk,akintolga/superdesk,mugurrus/superdesk,pavlovicnemanja92/superdesk,verifiedpixel/superdesk,superdesk/superdesk-ntb,superdesk/superdesk,pavlovicnemanja92/superdesk,pavlovicnemanja92/superdesk,mdhaman/superdesk-aap,akintolga/superdesk,liveblog/superdesk,sivakuna-aap/superdesk,superdesk/superdesk-ntb,amagdas/superdesk,superdesk/superdesk-aap,petrjasek/superdesk-server,plamut/superdesk,marwoodandrew/superdesk-aap,fritzSF/superdesk,plamut/superdesk,sjunaid/superdesk,sivakuna-aap/superdesk,ioanpocol/superdesk,superdesk/superdesk-aap,ioanpocol/superdesk,darconny/superdesk,petrjasek/superdesk,petrjasek/superdesk-ntb,vied12/superdesk,petrjasek/superdesk,verifiedpixel/superdesk,mdhaman/superdesk-aap,mugurrus/superdesk,amagdas/superdesk,fritzSF/superdesk
|
---
+++
@@ -10,6 +10,7 @@
allowed_providers = []
logger = logging.getLogger(__name__)
+from .commands.remove_expired_content import RemoveExpiredContent
from .commands.update_ingest import UpdateIngest
from .commands.add_provider import AddProvider # NOQA
@@ -33,6 +34,7 @@
@celery.task()
def fetch_ingest():
+ RemoveExpiredContent().run()
UpdateIngest().run()
|
23075e994d081a90a1b3ed48b7e30b82c4614854
|
tests/test_acf.py
|
tests/test_acf.py
|
import pytest
from steamfiles import acf
@pytest.yield_fixture
def acf_data():
with open('tests/test_data/appmanifest_202970.acf', 'rt') as f:
yield f.read()
@pytest.mark.usefixtures('acf_data')
def test_loads_dumps(acf_data):
assert acf.dumps(acf.loads(acf_data)) == acf_data
|
import io
import pytest
from steamfiles import acf
test_file_name = 'tests/test_data/appmanifest_202970.acf'
@pytest.yield_fixture
def acf_data():
with open(test_file_name, 'rt') as f:
yield f.read()
@pytest.mark.usefixtures('acf_data')
def test_loads_dumps(acf_data):
assert acf.dumps(acf.loads(acf_data)) == acf_data
@pytest.mark.usefixtures('acf_data')
def test_load_dump(acf_data):
with open(test_file_name, 'rt') as in_file:
out_file = io.StringIO()
obj = acf.load(in_file)
acf.dump(out_file, obj)
# Rewind to the beginning
out_file.seek(0)
assert out_file.read() == acf_data
|
Add more tests for ACF format
|
Add more tests for ACF format
|
Python
|
mit
|
leovp/steamfiles
|
---
+++
@@ -1,13 +1,28 @@
+import io
import pytest
from steamfiles import acf
+
+test_file_name = 'tests/test_data/appmanifest_202970.acf'
@pytest.yield_fixture
def acf_data():
- with open('tests/test_data/appmanifest_202970.acf', 'rt') as f:
+ with open(test_file_name, 'rt') as f:
yield f.read()
@pytest.mark.usefixtures('acf_data')
def test_loads_dumps(acf_data):
assert acf.dumps(acf.loads(acf_data)) == acf_data
+
+
+@pytest.mark.usefixtures('acf_data')
+def test_load_dump(acf_data):
+ with open(test_file_name, 'rt') as in_file:
+ out_file = io.StringIO()
+ obj = acf.load(in_file)
+ acf.dump(out_file, obj)
+
+ # Rewind to the beginning
+ out_file.seek(0)
+ assert out_file.read() == acf_data
|
3f57221af38a25dceb9d1024c225481ec2f49328
|
parchment/views.py
|
parchment/views.py
|
from django.conf import settings
from django.views.generic import FormView
from .crypto import Parchment
from .forms import ParchmentForm
class ParchmentView(FormView):
form_class = ParchmentForm
template_name = 'parchment/login.html'
def get_initial(self):
sso_key = getattr(settings, 'PARCHMENT_SSO_KEY')
p = Parchment(sso_key)
return {'parch5': p.encrypt('test string'),
'parchiv': p.iv}
|
from urllib import urlencode
from django.conf import settings
from django.views.generic import FormView
from .crypto import Parchment
from .forms import ParchmentForm
class ParchmentView(FormView):
form_class = ParchmentForm
template_name = 'parchment/login.html'
connect_variables = {}
def get(self, request, *args, **kwargs):
for k, v in request.GET.items():
self.connect_variables[k] = v
return super(ParchmentView, self).get(request, *args, **kwargs)
def get_initial(self):
sso_key = getattr(settings, 'PARCHMENT_SSO_KEY')
p = Parchment(sso_key)
return {'parch5': p.encrypt(urlencode(self.connect_variables)),
'parchiv': p.iv}
|
Encrypt all provided GET parameters
|
Encrypt all provided GET parameters
|
Python
|
bsd-3-clause
|
jbittel/django-parchment,jbittel/django-parchment
|
---
+++
@@ -1,3 +1,5 @@
+from urllib import urlencode
+
from django.conf import settings
from django.views.generic import FormView
@@ -8,9 +10,15 @@
class ParchmentView(FormView):
form_class = ParchmentForm
template_name = 'parchment/login.html'
+ connect_variables = {}
+
+ def get(self, request, *args, **kwargs):
+ for k, v in request.GET.items():
+ self.connect_variables[k] = v
+ return super(ParchmentView, self).get(request, *args, **kwargs)
def get_initial(self):
sso_key = getattr(settings, 'PARCHMENT_SSO_KEY')
p = Parchment(sso_key)
- return {'parch5': p.encrypt('test string'),
+ return {'parch5': p.encrypt(urlencode(self.connect_variables)),
'parchiv': p.iv}
|
93ebb6982851a710ff17c856059b1368bed24168
|
server.py
|
server.py
|
import flask
app = flask.Flask(__name__)
@app.route('/')
def index():
return flask.jsonify(hello='world')
if __name__ == '__main__':
app.run(debug=True)
|
import flask
app = flask.Flask(__name__)
def make_tour():
tour = {
'id': 1,
'name': 'Test Tour',
'route': [
{
'description': 'This is a description of this place.',
'photos': ['photo1.jpg', 'photo2.jpg'],
'coordinate': (3, 4),
}, {
'coordinate': (2, 3),
}, {
'coordinate': (4, 1)
}
]
}
return tour
@app.route('/')
def index():
return flask.jsonify(hello='world')
@app.route('/tours')
def tours():
tour_lst = [make_tour()]
return flask.jsonify(tours=tour_lst)
if __name__ == '__main__':
app.run(debug=True)
|
Add /tours endpoint with dummy data
|
Add /tours endpoint with dummy data
|
Python
|
mit
|
wtg/RPI_Tours_Server
|
---
+++
@@ -1,10 +1,33 @@
import flask
app = flask.Flask(__name__)
+
+def make_tour():
+ tour = {
+ 'id': 1,
+ 'name': 'Test Tour',
+ 'route': [
+ {
+ 'description': 'This is a description of this place.',
+ 'photos': ['photo1.jpg', 'photo2.jpg'],
+ 'coordinate': (3, 4),
+ }, {
+ 'coordinate': (2, 3),
+ }, {
+ 'coordinate': (4, 1)
+ }
+ ]
+ }
+ return tour
@app.route('/')
def index():
return flask.jsonify(hello='world')
+@app.route('/tours')
+def tours():
+ tour_lst = [make_tour()]
+ return flask.jsonify(tours=tour_lst)
+
if __name__ == '__main__':
app.run(debug=True)
|
0f2ccc881e8d2b8b0f4064e3e1fae39b14875821
|
tortilla/utils.py
|
tortilla/utils.py
|
# -*- coding: utf-8 -*-
import six
from formats import FormatBank, discover_json, discover_yaml
formats = FormatBank()
discover_json(formats, content_type='application/json')
discover_yaml(formats, content_type='application/x-yaml')
def run_from_ipython():
try:
__IPYTHON__
return True
except NameError:
return False
class Bunch(dict):
def __init__(self, kwargs=None):
if kwargs is None:
kwargs = {}
for key, value in six.iteritems(kwargs):
kwargs[key] = bunchify(value)
super().__init__(kwargs)
self.__dict__ = self
def bunchify(obj):
if isinstance(obj, (list, tuple)):
return [bunchify(item) for item in obj]
if isinstance(obj, dict):
return Bunch(obj)
return obj
|
# -*- coding: utf-8 -*-
import six
from formats import FormatBank, discover_json, discover_yaml
formats = FormatBank()
discover_json(formats, content_type='application/json')
discover_yaml(formats, content_type='application/x-yaml')
def run_from_ipython():
return getattr(__builtins__, "__IPYTHON__", False)
class Bunch(dict):
def __init__(self, kwargs=None):
if kwargs is None:
kwargs = {}
for key, value in six.iteritems(kwargs):
kwargs[key] = bunchify(value)
super().__init__(kwargs)
self.__dict__ = self
def bunchify(obj):
if isinstance(obj, (list, tuple)):
return [bunchify(item) for item in obj]
if isinstance(obj, dict):
return Bunch(obj)
return obj
|
Refactor run_from_ipython() implementation to make it pass static code analysis test
|
Refactor run_from_ipython() implementation to make it pass static code analysis test
|
Python
|
mit
|
redodo/tortilla
|
---
+++
@@ -12,11 +12,7 @@
def run_from_ipython():
- try:
- __IPYTHON__
- return True
- except NameError:
- return False
+ return getattr(__builtins__, "__IPYTHON__", False)
class Bunch(dict):
|
45d442cfe9c737332ca75e68e1488667937015ed
|
src/repository/models.py
|
src/repository/models.py
|
from django.db import models
import git, os
class Github (models.Model):
username = models.CharField(max_length=39)
repository = models.CharField(max_length=100)
def __str__(self):
return self.repository
def clone_repository(self):
DIR_NAME = self.repository
REMOTE_URL = "https://github.com/{0}/{1}.git".format(self.username, self.repository)
os.mkdir(DIR_NAME)
repo = git.Repo.init(DIR_NAME)
origin = repo.create_remote('origin', REMOTE_URL)
origin.fetch()
origin.pull(origin.refs[0].remote_head)
def save(self, *args, **kwargs):
self.clone_repository()
super(Github, self).save(*args, **kwargs)
class Meta:
verbose_name_plural = "projects"
|
from django.db import models
from django.conf import settings
import git, os
class Github (models.Model):
username = models.CharField(max_length=39)
repository = models.CharField(max_length=100)
def __str__(self):
return self.repository
def clone_repository(self):
DIR_NAME = os.path.join(settings.PLAYBOOK_DIR, self.repository)
REMOTE_URL = "https://github.com/{0}/{1}.git".format(self.username, self.repository)
os.mkdir(os.path.join(DIR_NAME))
repo = git.Repo.init(DIR_NAME)
origin = repo.create_remote('origin', REMOTE_URL)
origin.fetch()
origin.pull(origin.refs[0].remote_head)
def save(self, *args, **kwargs):
self.clone_repository()
super(Github, self).save(*args, **kwargs)
class Meta:
verbose_name_plural = "projects"
|
Clone repository to playbooks directory
|
Clone repository to playbooks directory
|
Python
|
bsd-3-clause
|
lozadaOmr/ansible-admin,lozadaOmr/ansible-admin,lozadaOmr/ansible-admin
|
---
+++
@@ -1,4 +1,5 @@
from django.db import models
+from django.conf import settings
import git, os
class Github (models.Model):
@@ -9,10 +10,10 @@
return self.repository
def clone_repository(self):
- DIR_NAME = self.repository
+ DIR_NAME = os.path.join(settings.PLAYBOOK_DIR, self.repository)
REMOTE_URL = "https://github.com/{0}/{1}.git".format(self.username, self.repository)
- os.mkdir(DIR_NAME)
+ os.mkdir(os.path.join(DIR_NAME))
repo = git.Repo.init(DIR_NAME)
origin = repo.create_remote('origin', REMOTE_URL)
|
ca43660869bbd390979a928dc219e016c1a0607a
|
api/route_settings.py
|
api/route_settings.py
|
import json
import falcon
import models
import schemas
import api_util
settings_schema = schemas.SettingSchema(many=True)
setting_schema = schemas.SettingSchema()
class SettingsResource:
def on_get(self, req, resp):
settings = models.Setting.select()
settings_dict = {}
for setting in settings:
settings_dict[setting.key] = setting.value
resp.body = api_util.json_dump({'settings': settings_dict})
def on_put(self, req, resp):
data = api_util.json_load(req.stream.read().decode('utf-8'))
# ensure that this is a dict
if not isinstance(data, dict):
raise falcon.HTTPBadRequest(None, 'Provide a dictionary')
# modify each listed setting
for setting_key, setting_value in data.items():
try:
setting = models.Setting.get(key=setting_key)
except models.Setting.DoesNotExist:
resp.body = json.dumps({'message': f'Setting {setting_key} does not exist'})
raise falcon.HTTPNotFound()
setting.value = setting_value
setting.save()
resp.body = api_util.json_dump(data)
|
import json
import falcon
import models
import schemas
import api_util
class SettingsResource:
def on_get(self, req, resp):
settings = models.Setting.select()
settings_dict = {}
for setting in settings:
settings_dict[setting.key] = setting.value
resp.body = api_util.json_dump({'settings': settings_dict})
def on_put(self, req, resp):
data = api_util.json_load(req.stream.read().decode('utf-8'))
# ensure that this is a dict
if not isinstance(data, dict):
raise falcon.HTTPBadRequest(None, 'Provide a dictionary')
# modify each listed setting
for setting_key, setting_value in data.items():
try:
setting = models.Setting.get(key=setting_key)
except models.Setting.DoesNotExist:
resp.body = json.dumps({'message': f'Setting {setting_key} does not exist'})
raise falcon.HTTPNotFound()
setting.value = setting_value
setting.save()
resp.body = api_util.json_dump(data)
|
Remove reference to now-deleted SettingSchema
|
Remove reference to now-deleted SettingSchema
|
Python
|
mit
|
thepoly/Pipeline,thepoly/Pipeline,thepoly/Pipeline,thepoly/Pipeline,thepoly/Pipeline
|
---
+++
@@ -5,10 +5,6 @@
import models
import schemas
import api_util
-
-
-settings_schema = schemas.SettingSchema(many=True)
-setting_schema = schemas.SettingSchema()
class SettingsResource:
|
b86bcfd1f1762cbb956b3eb42b515107249cb66e
|
production_settings.py
|
production_settings.py
|
from default_settings import *
import dj_database_url
DATABASES = {
'default': dj_database_url.config(),
}
SECRET_KEY = os.environ['SECRET_KEY']
STATICFILES_STORAGE = 's3storage.S3HashedFilesStorage'
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY', '')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_KEY', '')
AWS_STORAGE_BUCKET_NAME = os.environ.get('AWS_BUCKET_NAME', '')
STATIC_URL = 'https://s3.amazonaws.com/%s/' % AWS_STORAGE_BUCKET_NAME
SENTRY_DSN = os.environ.get('SENTRY_DSN', '')
# Run the site over SSL
MIDDLEWARE_CLASSES = (
'sslify.middleware.SSLifyMiddleware',
) + MIDDLEWARE_CLASSES
SESSION_COOKIE_SECURE = True
SESSION_COOKIE_HTTPONLY = True
CSRF_COOKIE_SECURE = True
|
from default_settings import *
import dj_database_url
DATABASES = {
'default': dj_database_url.config(),
}
SECRET_KEY = os.environ['SECRET_KEY']
STATICFILES_STORAGE = 's3storage.S3HashedFilesStorage'
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY', '')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_KEY', '')
AWS_STORAGE_BUCKET_NAME = os.environ.get('AWS_BUCKET_NAME', '')
AWS_QUERYSTRING_AUTH = False
STATIC_URL = 'https://s3.amazonaws.com/%s/' % AWS_STORAGE_BUCKET_NAME
SENTRY_DSN = os.environ.get('SENTRY_DSN', '')
# Run the site over SSL
MIDDLEWARE_CLASSES = (
'sslify.middleware.SSLifyMiddleware',
) + MIDDLEWARE_CLASSES
SESSION_COOKIE_SECURE = True
SESSION_COOKIE_HTTPONLY = True
CSRF_COOKIE_SECURE = True
|
Disable querystring auth for s3
|
Disable querystring auth for s3
|
Python
|
mit
|
brutasse/djangopeople,brutasse/djangopeople,django/djangopeople,django/djangopeople,django/djangopeople,brutasse/djangopeople,polinom/djangopeople,polinom/djangopeople,brutasse/djangopeople,polinom/djangopeople,polinom/djangopeople
|
---
+++
@@ -13,6 +13,7 @@
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY', '')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_KEY', '')
AWS_STORAGE_BUCKET_NAME = os.environ.get('AWS_BUCKET_NAME', '')
+AWS_QUERYSTRING_AUTH = False
STATIC_URL = 'https://s3.amazonaws.com/%s/' % AWS_STORAGE_BUCKET_NAME
|
51a2ac2d66d4245626f3d8830bb47f596b3d9879
|
app/minify.py
|
app/minify.py
|
# Python 3
import glob
import os
uglifyjs = os.path.abspath("../lib/uglifyjs")
input_dir = os.path.abspath("./Resources/Tracker/scripts")
output_dir = os.path.abspath("./Resources/Tracker/scripts.min")
for file in glob.glob(input_dir + "/*.js"):
name = os.path.basename(file)
print("Minifying {0}...".format(name))
os.system("{0} {1} -o {2}/{3}".format(uglifyjs, file, output_dir, name))
print("Done!")
|
#!/usr/bin/env python3
import glob
import os
import shutil
import sys
if os.name == "nt":
uglifyjs = os.path.abspath("../lib/uglifyjs.cmd")
else:
uglifyjs = "uglifyjs"
if shutil.which(uglifyjs) is None:
print("Cannot find executable: {0}".format(uglifyjs))
sys.exit(1)
input_dir = os.path.abspath("./Resources/Tracker/scripts")
output_dir = os.path.abspath("./Resources/Tracker/scripts.min")
for file in glob.glob(input_dir + "/*.js"):
name = os.path.basename(file)
print("Minifying {0}...".format(name))
os.system("{0} {1} -o {2}/{3}".format(uglifyjs, file, output_dir, name))
print("Done!")
|
Fix app minification script on non-Windows systems
|
Fix app minification script on non-Windows systems
|
Python
|
mit
|
chylex/Discord-History-Tracker,chylex/Discord-History-Tracker,chylex/Discord-History-Tracker,chylex/Discord-History-Tracker,chylex/Discord-History-Tracker,chylex/Discord-History-Tracker
|
---
+++
@@ -1,9 +1,19 @@
-# Python 3
+#!/usr/bin/env python3
import glob
import os
+import shutil
+import sys
-uglifyjs = os.path.abspath("../lib/uglifyjs")
+if os.name == "nt":
+ uglifyjs = os.path.abspath("../lib/uglifyjs.cmd")
+else:
+ uglifyjs = "uglifyjs"
+
+if shutil.which(uglifyjs) is None:
+ print("Cannot find executable: {0}".format(uglifyjs))
+ sys.exit(1)
+
input_dir = os.path.abspath("./Resources/Tracker/scripts")
output_dir = os.path.abspath("./Resources/Tracker/scripts.min")
|
c3c8c566c8294715b07614c1d18a2c6de3a7c212
|
app/models.py
|
app/models.py
|
from app import db
class User(db.Model):
__tablename__ = 'users'
username = db.Column(db.String(64), nullable=False, unique=True, primary_key=True)
password = db.Column(db.String(192), nullable=False)
def __init__(self, username, password):
self.username = username
self.email = password
def __repr__(self):
return 'The users name is: %r' % self.name
class Patient(db.Model):
__tablename__ = 'patients'
# Used to determine which nurse triaged a patient.
# clientname = db.Column(db.String(64), db.ForeignKey('users.username'))
mobile = db.Column(db.Integer, unique=True, primary_key=True)
forename = db.Column(db.String(64), nullable=False)
surname = db.Column(db.String(64), nullable=False)
dob = db.Column(db.Date)
def __init__(self, mobile, forename, surname, dob):
self.mobile = mobile
self.forename = forename
self.surname = surname
self.dob = dob
def __repr__(self):
return 'The mobile number and name are: %r, %r %r' % (self.mobile, self.forename, self.surname)
|
from app import db
class User(db.Model):
__tablename__ = 'users'
username = db.Column(db.String(64), nullable=False, unique=True, primary_key=True)
password = db.Column(db.String(192), nullable=False)
def __init__(self, username, password):
self.username = username
self.password = password
def __repr__(self):
return 'The users name is: %r' % self.username
class Patient(db.Model):
__tablename__ = 'patients'
# Used to determine which nurse triaged a patient.
# clientname = db.Column(db.String(64), db.ForeignKey('users.username'))
mobile = db.Column(db.Integer, unique=True, primary_key=True)
forename = db.Column(db.String(64), nullable=False)
surname = db.Column(db.String(64), nullable=False)
dob = db.Column(db.Date)
def __init__(self, mobile, forename, surname, dob):
self.mobile = mobile
self.forename = forename
self.surname = surname
self.dob = dob
def __repr__(self):
return 'The mobile number and name are: %r, %r %r' % (self.mobile, self.forename, self.surname)
|
Fix variable names in model.
|
Fix variable names in model.
|
Python
|
mit
|
jawrainey/atc,jawrainey/atc
|
---
+++
@@ -9,10 +9,10 @@
def __init__(self, username, password):
self.username = username
- self.email = password
+ self.password = password
def __repr__(self):
- return 'The users name is: %r' % self.name
+ return 'The users name is: %r' % self.username
class Patient(db.Model):
|
105d46937babb7a43901d8238fb9cc0a7b00c8c9
|
lyman/tools/commandline.py
|
lyman/tools/commandline.py
|
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("-subjects", nargs="*", dest="subjects",
help=("list of subject ids, name of file in lyman "
"directory, or full path to text file with "
"subject ids"))
parser.add_argument("-plugin", default="multiproc",
choices=["linear", "multiproc",
"ipython", "torque", "sge"],
help="worklow execution plugin")
parser.add_argument("-nprocs", default=4, type=int,
help="number of MultiProc processes to use")
parser.add_argument("-queue", help="which queue for PBS/SGE execution")
parser.add_argument("-dontrun", action="store_true",
help="don't actually execute the workflows")
|
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("-subjects", nargs="*", dest="subjects",
help=("list of subject ids, name of file in lyman "
"directory, or full path to text file with "
"subject ids"))
parser.add_argument("-plugin", default="multiproc",
choices=["linear", "multiproc", "ipython",
"torque", "sge", "slurm"],
help="worklow execution plugin")
parser.add_argument("-nprocs", default=4, type=int,
help="number of MultiProc processes to use")
parser.add_argument("-queue", help="which queue for PBS/SGE execution")
parser.add_argument("-dontrun", action="store_true",
help="don't actually execute the workflows")
|
Add slurm to command line plugin choices
|
Add slurm to command line plugin choices
|
Python
|
bsd-3-clause
|
mwaskom/lyman,tuqc/lyman,kastman/lyman
|
---
+++
@@ -7,8 +7,8 @@
"directory, or full path to text file with "
"subject ids"))
parser.add_argument("-plugin", default="multiproc",
- choices=["linear", "multiproc",
- "ipython", "torque", "sge"],
+ choices=["linear", "multiproc", "ipython",
+ "torque", "sge", "slurm"],
help="worklow execution plugin")
parser.add_argument("-nprocs", default=4, type=int,
help="number of MultiProc processes to use")
|
f3c95f9875c59564faff909b0cf5a8869515b1f3
|
readthedocs/rtd_tests/tests/__init__.py
|
readthedocs/rtd_tests/tests/__init__.py
|
from test_api import *
from view_tests import *
from test_doc_building import *
from test_backend import *
|
from test_api import *
#from view_tests import *
from test_doc_building import *
from test_backend import *
|
Kill the view tests for now, to get them greeeeeen
|
Kill the view tests for now, to get them greeeeeen
|
Python
|
mit
|
agjohnson/readthedocs.org,kdkeyser/readthedocs.org,laplaceliu/readthedocs.org,LukasBoersma/readthedocs.org,fujita-shintaro/readthedocs.org,davidfischer/readthedocs.org,cgourlay/readthedocs.org,stevepiercy/readthedocs.org,soulshake/readthedocs.org,hach-que/readthedocs.org,SteveViss/readthedocs.org,raven47git/readthedocs.org,atsuyim/readthedocs.org,singingwolfboy/readthedocs.org,fujita-shintaro/readthedocs.org,VishvajitP/readthedocs.org,agjohnson/readthedocs.org,SteveViss/readthedocs.org,atsuyim/readthedocs.org,dirn/readthedocs.org,pombredanne/readthedocs.org,jerel/readthedocs.org,GovReady/readthedocs.org,takluyver/readthedocs.org,emawind84/readthedocs.org,davidfischer/readthedocs.org,ojii/readthedocs.org,sunnyzwh/readthedocs.org,nyergler/pythonslides,tddv/readthedocs.org,clarkperkins/readthedocs.org,d0ugal/readthedocs.org,rtfd/readthedocs.org,raven47git/readthedocs.org,titiushko/readthedocs.org,istresearch/readthedocs.org,hach-que/readthedocs.org,kenwang76/readthedocs.org,KamranMackey/readthedocs.org,atsuyim/readthedocs.org,attakei/readthedocs-oauth,SteveViss/readthedocs.org,takluyver/readthedocs.org,sils1297/readthedocs.org,wanghaven/readthedocs.org,d0ugal/readthedocs.org,sunnyzwh/readthedocs.org,GovReady/readthedocs.org,sils1297/readthedocs.org,CedarLogic/readthedocs.org,ojii/readthedocs.org,VishvajitP/readthedocs.org,michaelmcandrew/readthedocs.org,gjtorikian/readthedocs.org,kdkeyser/readthedocs.org,mhils/readthedocs.org,espdev/readthedocs.org,soulshake/readthedocs.org,singingwolfboy/readthedocs.org,wanghaven/readthedocs.org,singingwolfboy/readthedocs.org,mhils/readthedocs.org,laplaceliu/readthedocs.org,Tazer/readthedocs.org,michaelmcandrew/readthedocs.org,techtonik/readthedocs.org,sid-kap/readthedocs.org,emawind84/readthedocs.org,kdkeyser/readthedocs.org,nikolas/readthedocs.org,sid-kap/readthedocs.org,royalwang/readthedocs.org,michaelmcandrew/readthedocs.org,royalwang/readthedocs.org,asampat3090/readthedocs.org,GovReady/readthedocs.org,istresearch/readthedocs.org,attakei/readthedocs-oauth,nyergler/pythonslides,sunnyzwh/readthedocs.org,agjohnson/readthedocs.org,techtonik/readthedocs.org,wijerasa/readthedocs.org,Carreau/readthedocs.org,dirn/readthedocs.org,CedarLogic/readthedocs.org,alex/readthedocs.org,fujita-shintaro/readthedocs.org,attakei/readthedocs-oauth,alex/readthedocs.org,d0ugal/readthedocs.org,wanghaven/readthedocs.org,nikolas/readthedocs.org,agjohnson/readthedocs.org,laplaceliu/readthedocs.org,Carreau/readthedocs.org,istresearch/readthedocs.org,clarkperkins/readthedocs.org,kenshinthebattosai/readthedocs.org,johncosta/private-readthedocs.org,royalwang/readthedocs.org,royalwang/readthedocs.org,KamranMackey/readthedocs.org,jerel/readthedocs.org,gjtorikian/readthedocs.org,Carreau/readthedocs.org,LukasBoersma/readthedocs.org,laplaceliu/readthedocs.org,atsuyim/readthedocs.org,sid-kap/readthedocs.org,LukasBoersma/readthedocs.org,tddv/readthedocs.org,johncosta/private-readthedocs.org,clarkperkins/readthedocs.org,espdev/readthedocs.org,asampat3090/readthedocs.org,kenwang76/readthedocs.org,gjtorikian/readthedocs.org,attakei/readthedocs-oauth,LukasBoersma/readthedocs.org,jerel/readthedocs.org,CedarLogic/readthedocs.org,raven47git/readthedocs.org,rtfd/readthedocs.org,titiushko/readthedocs.org,rtfd/readthedocs.org,espdev/readthedocs.org,hach-que/readthedocs.org,emawind84/readthedocs.org,wijerasa/readthedocs.org,safwanrahman/readthedocs.org,wanghaven/readthedocs.org,kdkeyser/readthedocs.org,raven47git/readthedocs.org,clarkperkins/readthedocs.org,pombredanne/readthedocs.org,Carreau/readthedocs.org,mhils/readthedocs.org,sunnyzwh/readthedocs.org,alex/readthedocs.org,soulshake/readthedocs.org,mrshoki/readthedocs.org,rtfd/readthedocs.org,KamranMackey/readthedocs.org,cgourlay/readthedocs.org,jerel/readthedocs.org,sils1297/readthedocs.org,ojii/readthedocs.org,kenshinthebattosai/readthedocs.org,sils1297/readthedocs.org,ojii/readthedocs.org,wijerasa/readthedocs.org,fujita-shintaro/readthedocs.org,istresearch/readthedocs.org,CedarLogic/readthedocs.org,singingwolfboy/readthedocs.org,kenwang76/readthedocs.org,titiushko/readthedocs.org,tddv/readthedocs.org,michaelmcandrew/readthedocs.org,kenwang76/readthedocs.org,techtonik/readthedocs.org,stevepiercy/readthedocs.org,Tazer/readthedocs.org,nikolas/readthedocs.org,safwanrahman/readthedocs.org,espdev/readthedocs.org,stevepiercy/readthedocs.org,johncosta/private-readthedocs.org,mrshoki/readthedocs.org,VishvajitP/readthedocs.org,KamranMackey/readthedocs.org,safwanrahman/readthedocs.org,dirn/readthedocs.org,sid-kap/readthedocs.org,mrshoki/readthedocs.org,davidfischer/readthedocs.org,Tazer/readthedocs.org,emawind84/readthedocs.org,cgourlay/readthedocs.org,titiushko/readthedocs.org,gjtorikian/readthedocs.org,takluyver/readthedocs.org,alex/readthedocs.org,nikolas/readthedocs.org,wijerasa/readthedocs.org,dirn/readthedocs.org,asampat3090/readthedocs.org,espdev/readthedocs.org,soulshake/readthedocs.org,kenshinthebattosai/readthedocs.org,takluyver/readthedocs.org,safwanrahman/readthedocs.org,d0ugal/readthedocs.org,pombredanne/readthedocs.org,hach-que/readthedocs.org,stevepiercy/readthedocs.org,mhils/readthedocs.org,SteveViss/readthedocs.org,kenshinthebattosai/readthedocs.org,GovReady/readthedocs.org,davidfischer/readthedocs.org,mrshoki/readthedocs.org,asampat3090/readthedocs.org,cgourlay/readthedocs.org,VishvajitP/readthedocs.org,Tazer/readthedocs.org,nyergler/pythonslides,techtonik/readthedocs.org,nyergler/pythonslides
|
---
+++
@@ -1,4 +1,4 @@
from test_api import *
-from view_tests import *
+#from view_tests import *
from test_doc_building import *
from test_backend import *
|
ac95449e6774538756d7813d73c8b113f9dcb6e6
|
axis/configuration.py
|
axis/configuration.py
|
"""Python library to enable Axis devices to integrate with Home Assistant."""
import requests
from requests.auth import HTTPDigestAuth
class Configuration(object):
"""Device configuration."""
def __init__(self, *,
loop, host, username, password,
port=80, web_proto='http', verify_ssl=False,
event_types=None, signal=None):
"""All config params available to the device."""
self.loop = loop
self.web_proto = web_proto
self.host = host
self.port = port
self.username = username
self.password = password
self.session = requests.Session()
self.session.auth = HTTPDigestAuth(
self.username, self.password)
if self.web_proto == 'https':
self.session.verify_ssl = verify_ssl
self.event_types = event_types
self.signal = signal
|
"""Python library to enable Axis devices to integrate with Home Assistant."""
import requests
from requests.auth import HTTPDigestAuth
class Configuration(object):
"""Device configuration."""
def __init__(self, *,
loop, host, username, password,
port=80, web_proto='http', verify_ssl=False,
event_types=None, signal=None):
"""All config params available to the device."""
self.loop = loop
self.web_proto = web_proto
self.host = host
self.port = port
self.username = username
self.password = password
self.session = requests.Session()
self.session.auth = HTTPDigestAuth(self.username, self.password)
self.session.verify = verify_ssl
self.event_types = event_types
self.signal = signal
|
Allow to properly disable verification of SSL
|
Allow to properly disable verification of SSL
|
Python
|
mit
|
Kane610/axis
|
---
+++
@@ -2,6 +2,7 @@
import requests
from requests.auth import HTTPDigestAuth
+
class Configuration(object):
"""Device configuration."""
@@ -19,10 +20,8 @@
self.password = password
self.session = requests.Session()
- self.session.auth = HTTPDigestAuth(
- self.username, self.password)
- if self.web_proto == 'https':
- self.session.verify_ssl = verify_ssl
+ self.session.auth = HTTPDigestAuth(self.username, self.password)
+ self.session.verify = verify_ssl
self.event_types = event_types
self.signal = signal
|
0e30e73ffa928b11fd6ee6c0ea12709100623e5f
|
pltpreview/view.py
|
pltpreview/view.py
|
"""Convenience functions for matplotlib plotting and image viewing."""
import numpy as np
from matplotlib import pyplot as plt
def show(image, blocking=False, title='', **kwargs):
"""Show *image*. If *blocking* is False the call is nonblocking. *title*
is the image title. *kwargs* are passed to matplotlib's ``imshow``
function. This command always creates a new figure. Returns matplotlib's
``AxesImage``.
"""
plt.figure()
mpl_image = plt.imshow(image, **kwargs)
plt.colorbar(ticks=np.linspace(image.min(), image.max(), 8))
plt.title(title)
plt.show(blocking)
return mpl_image
def plot(*args, **kwargs):
"""Plot using matplotlib's ``plot`` function. Pass it *args* and *kwargs*.
*kwargs* are infected with *blocking* and if False or not specified,
the call is nonblocking. *title* is also alowed to be in *kwargs* which
sets the figure title. This command always creates a new figure. Returns
a list of ``Line2D`` instances.
"""
blocking = False if 'blocking' not in kwargs else kwargs.pop('blocking')
title = kwargs.pop('title', '')
plt.figure()
lines = plt.plot(*args, **kwargs)
plt.title(title)
plt.show(blocking)
return lines
|
"""Convenience functions for matplotlib plotting and image viewing."""
import numpy as np
from matplotlib import pyplot as plt
def show(image, blocking=False, title='', **kwargs):
"""Show *image*. If *blocking* is False the call is nonblocking. *title*
is the image title. *kwargs* are passed to matplotlib's ``imshow``
function. This command always creates a new figure. Returns matplotlib's
``AxesImage``.
"""
plt.figure()
mpl_image = plt.imshow(image, **kwargs)
plt.colorbar(ticks=np.linspace(image.min(), image.max(), 8))
plt.title(title)
plt.show(blocking)
return mpl_image
def plot(*args, **kwargs):
"""Plot using matplotlib's ``plot`` function. Pass it *args* and *kwargs*.
*kwargs* are infected with *blocking* and if False or not specified,
the call is nonblocking. *title* is also alowed to be in *kwargs* which
sets the figure title. This command always creates a new figure. Returns
a list of ``Line2D`` instances.
"""
blocking = kwargs.pop('blocking', False)
title = kwargs.pop('title', '')
plt.figure()
lines = plt.plot(*args, **kwargs)
plt.title(title)
plt.show(blocking)
return lines
|
Use pop for getting blocking parameter
|
Use pop for getting blocking parameter
|
Python
|
mit
|
tfarago/pltpreview
|
---
+++
@@ -25,7 +25,7 @@
sets the figure title. This command always creates a new figure. Returns
a list of ``Line2D`` instances.
"""
- blocking = False if 'blocking' not in kwargs else kwargs.pop('blocking')
+ blocking = kwargs.pop('blocking', False)
title = kwargs.pop('title', '')
plt.figure()
|
f8b83fc7976768c2b9d92ab35297aa17637eeb92
|
firefed/feature/addons.py
|
firefed/feature/addons.py
|
import json
from feature import Feature
from output import good, bad, info
from tabulate import tabulate
class Addons(Feature):
def run(self, args):
with open(self.profile_path('extensions.json')) as f:
addons = json.load(f)['addons']
info(('%d addons found. (%d active)\n' %
(len(addons), sum(addon['active'] for addon in addons))))
if args.summarize:
return
addons.sort(key=lambda x: not x['active'])
table = []
for addon in addons:
name = addon['defaultLocale']['name']
id_ = addon['id']
version = addon['version']
active = good('enabled') if addon['active'] else bad('disabled')
table.append([name, id_, version, active])
info(tabulate(table, headers=['Name', 'ID', 'Version', 'Status']))
|
import json
from feature import Feature
from output import good, bad, info
from tabulate import tabulate
def signed_state(num):
# See constants defined in [1]
states = {
-2: 'broken',
-1: 'unknown',
0: 'missing',
1: 'preliminary',
2: 'signed',
3: 'system',
4: 'privileged'
}
text = states[num]
return good(text) if num > 0 else bad(text)
class Addons(Feature):
def run(self, args):
with open(self.profile_path('extensions.json')) as f:
addons = json.load(f)['addons']
info('%d addons found. (%d active)\n' %
(len(addons), sum(addon['active'] for addon in addons)))
if args.summarize:
return
addons.sort(key=lambda x: not x['active'])
table = []
for addon in addons:
name = addon['defaultLocale']['name']
id_ = addon['id']
version = addon['version']
active = good('enabled') if addon['active'] else bad('disabled')
try:
signed = signed_state(addon['signedState'])
except KeyError:
signed = '(unspecified)'
visible = good('true') if addon['visible'] else bad('false')
table.append([name, id_, version, active, signed, visible])
info(tabulate(table, headers=['Name', 'ID', 'Version', 'Status', 'Signature', 'Visible']))
# [1]: https://dxr.mozilla.org/mozilla-central/rev/967c95cee709756596860ed2a3e6ac06ea3a053f/toolkit/mozapps/extensions/AddonManager.jsm#3495
|
Add signature and visibility status to addon feature
|
Add signature and visibility status to addon feature
|
Python
|
mit
|
numirias/firefed
|
---
+++
@@ -4,12 +4,27 @@
from tabulate import tabulate
+def signed_state(num):
+ # See constants defined in [1]
+ states = {
+ -2: 'broken',
+ -1: 'unknown',
+ 0: 'missing',
+ 1: 'preliminary',
+ 2: 'signed',
+ 3: 'system',
+ 4: 'privileged'
+ }
+ text = states[num]
+ return good(text) if num > 0 else bad(text)
+
+
class Addons(Feature):
def run(self, args):
with open(self.profile_path('extensions.json')) as f:
addons = json.load(f)['addons']
- info(('%d addons found. (%d active)\n' %
- (len(addons), sum(addon['active'] for addon in addons))))
+ info('%d addons found. (%d active)\n' %
+ (len(addons), sum(addon['active'] for addon in addons)))
if args.summarize:
return
addons.sort(key=lambda x: not x['active'])
@@ -19,5 +34,13 @@
id_ = addon['id']
version = addon['version']
active = good('enabled') if addon['active'] else bad('disabled')
- table.append([name, id_, version, active])
- info(tabulate(table, headers=['Name', 'ID', 'Version', 'Status']))
+ try:
+ signed = signed_state(addon['signedState'])
+ except KeyError:
+ signed = '(unspecified)'
+ visible = good('true') if addon['visible'] else bad('false')
+ table.append([name, id_, version, active, signed, visible])
+ info(tabulate(table, headers=['Name', 'ID', 'Version', 'Status', 'Signature', 'Visible']))
+
+
+# [1]: https://dxr.mozilla.org/mozilla-central/rev/967c95cee709756596860ed2a3e6ac06ea3a053f/toolkit/mozapps/extensions/AddonManager.jsm#3495
|
314b8cf14eb3bed9b116b78ce0199e73399a4dab
|
businesstime/test/holidays/aus_test.py
|
businesstime/test/holidays/aus_test.py
|
from datetime import datetime, date, timedelta
import unittest
from businesstime.holidays.aus import QueenslandPublicHolidays, BrisbanePublicHolidays
class QueenslandPublicHolidaysTest(unittest.TestCase):
def test_2016_08(self):
holidays_gen = QueenslandPublicHolidays()
self.assertEqual(
list(holidays_gen(date(2016, 8, 1), end=date(2016, 8, 31))),
[]
)
class BrisbanePublicHolidaysTest(unittest.TestCase):
def test_2016_08(self):
holidays_gen = BrisbanePublicHolidays()
self.assertEqual(
list(holidays_gen(date(2016, 8, 1), end=date(2016, 8, 31))),
[
date(2016, 8, 10)
]
)
def test_out_of_range(self):
holidays_gen = BrisbanePublicHolidays()
with self.assertRaises(NotImplementedError):
list(holidays_gen(date(2017, 1, 1), end=date(2017, 12, 31)))
|
from datetime import datetime, date, timedelta
import unittest
from businesstime.holidays.aus import QueenslandPublicHolidays, BrisbanePublicHolidays
class QueenslandPublicHolidaysTest(unittest.TestCase):
def test_2016_08(self):
holidays_gen = QueenslandPublicHolidays()
self.assertEqual(
list(holidays_gen(date(2016, 8, 1), end=date(2016, 8, 31))),
[]
)
class BrisbanePublicHolidaysTest(unittest.TestCase):
def test_2016_08(self):
holidays_gen = BrisbanePublicHolidays()
self.assertEqual(
list(holidays_gen(date(2016, 8, 1), end=date(2016, 8, 31))),
[
date(2016, 8, 10)
]
)
def test_out_of_range(self):
holidays_gen = BrisbanePublicHolidays()
def test():
return list(holidays_gen(date(2017, 1, 1), end=date(2017, 12, 31)))
self.assertRaises(NotImplementedError, test)
|
Fix tests in python 2.6
|
Fix tests in python 2.6
|
Python
|
bsd-2-clause
|
seatgeek/businesstime
|
---
+++
@@ -27,5 +27,6 @@
def test_out_of_range(self):
holidays_gen = BrisbanePublicHolidays()
- with self.assertRaises(NotImplementedError):
- list(holidays_gen(date(2017, 1, 1), end=date(2017, 12, 31)))
+ def test():
+ return list(holidays_gen(date(2017, 1, 1), end=date(2017, 12, 31)))
+ self.assertRaises(NotImplementedError, test)
|
f3cdd316f9e0859f77389c68b073134a6076374b
|
ppp_datamodel_notation_parser/requesthandler.py
|
ppp_datamodel_notation_parser/requesthandler.py
|
"""Request handler of the module."""
from functools import partial
from ppp_datamodel import Sentence, TraceItem, Response
from ppp_datamodel.parsers import parse_triples, ParseError
def tree_to_response(measures, trace, tree):
trace = trace + [TraceItem('DatamodelNotationParser',
tree, measures)]
return Response('en', tree, measures, trace)
class RequestHandler:
def __init__(self, request):
self.request = request
def answer(self):
if not isinstance(self.request.tree, Sentence):
return []
try:
forest = parse_triples(self.request.tree.value)
except ParseError:
return []
measures = {'accuracy': 1, 'relevance': 0.5}
return map(partial(tree_to_response, measures, self.request.trace),
forest)
|
"""Request handler of the module."""
from functools import partial
from ppp_datamodel import Sentence, TraceItem, Response
from ppp_datamodel.parsers import parse_triples, ParseError
def tree_to_response(tree, measures, trace):
trace = trace + [TraceItem('DatamodelNotationParser',
tree, measures)]
return Response('en', tree, measures, trace)
class RequestHandler:
def __init__(self, request):
self.request = request
def answer(self):
if not isinstance(self.request.tree, Sentence):
return []
try:
tree = parse_triples(self.request.tree.value)
except ParseError:
return []
measures = {'accuracy': 1, 'relevance': 0.5}
return [tree_to_response(tree, measures, self.request.trace)]
|
Fix compatibility with new parser.
|
Fix compatibility with new parser.
|
Python
|
mit
|
ProjetPP/PPP-DatamodelNotationParser,ProjetPP/PPP-DatamodelNotationParser
|
---
+++
@@ -5,7 +5,7 @@
from ppp_datamodel import Sentence, TraceItem, Response
from ppp_datamodel.parsers import parse_triples, ParseError
-def tree_to_response(measures, trace, tree):
+def tree_to_response(tree, measures, trace):
trace = trace + [TraceItem('DatamodelNotationParser',
tree, measures)]
return Response('en', tree, measures, trace)
@@ -18,9 +18,8 @@
if not isinstance(self.request.tree, Sentence):
return []
try:
- forest = parse_triples(self.request.tree.value)
+ tree = parse_triples(self.request.tree.value)
except ParseError:
return []
measures = {'accuracy': 1, 'relevance': 0.5}
- return map(partial(tree_to_response, measures, self.request.trace),
- forest)
+ return [tree_to_response(tree, measures, self.request.trace)]
|
9ec8949d62188efe8c40e859c20fc55339f4e7e2
|
taca/utils/filesystem.py
|
taca/utils/filesystem.py
|
"""Filesystem utilities."""
import contextlib
import os
import shutil
RUN_RE = '^\d{6}_[a-zA-Z\d\-]+_\d{4}_[AB0][A-Z\d\-]+$'
@contextlib.contextmanager
def chdir(new_dir):
"""Context manager to temporarily change to a new directory."""
cur_dir = os.getcwd()
os.chdir(new_dir)
try:
yield
finally:
os.chdir(cur_dir)
def create_folder(target_folder):
""" Ensure that a folder exists and create it if it doesn't, including any
parent folders, as necessary.
:param target_folder: the target folder
:returns: True if the folder exists or was created, False if the folder
does not exists and could not be created
"""
try:
os.makedirs(target_folder)
except OSError as e:
pass
return os.path.exists(target_folder)
def touch(file):
open(file, 'w').close()
def do_symlink(src_file, dst_file):
link_f = os.symlink
if not os.path.isfile(dst_file):
link_f(os.path.realpath(src_file), dst_file)
def do_copy(src_path, dst_path):
# copies folder structure and files (recursively)
# if symlinks, will copy content, not the links
# dst_path will be created, it must NOT exist
shutil.copytree(src_path, dst_path)
|
"""Filesystem utilities."""
import contextlib
import os
import shutil
RUN_RE = '^\d{6}_[a-zA-Z\d\-]+_\d{2,}_[AB0][A-Z\d\-]+$'
@contextlib.contextmanager
def chdir(new_dir):
"""Context manager to temporarily change to a new directory."""
cur_dir = os.getcwd()
os.chdir(new_dir)
try:
yield
finally:
os.chdir(cur_dir)
def create_folder(target_folder):
""" Ensure that a folder exists and create it if it doesn't, including any
parent folders, as necessary.
:param target_folder: the target folder
:returns: True if the folder exists or was created, False if the folder
does not exists and could not be created
"""
try:
os.makedirs(target_folder)
except OSError as e:
pass
return os.path.exists(target_folder)
def touch(file):
open(file, 'w').close()
def do_symlink(src_file, dst_file):
link_f = os.symlink
if not os.path.isfile(dst_file):
link_f(os.path.realpath(src_file), dst_file)
def do_copy(src_path, dst_path):
# copies folder structure and files (recursively)
# if symlinks, will copy content, not the links
# dst_path will be created, it must NOT exist
shutil.copytree(src_path, dst_path)
|
Fix FC name pattern for NextSeq2000
|
Fix FC name pattern for NextSeq2000
|
Python
|
mit
|
SciLifeLab/TACA,SciLifeLab/TACA,SciLifeLab/TACA
|
---
+++
@@ -3,7 +3,7 @@
import os
import shutil
-RUN_RE = '^\d{6}_[a-zA-Z\d\-]+_\d{4}_[AB0][A-Z\d\-]+$'
+RUN_RE = '^\d{6}_[a-zA-Z\d\-]+_\d{2,}_[AB0][A-Z\d\-]+$'
@contextlib.contextmanager
def chdir(new_dir):
|
bd31b43fc6f282f2f6cc4bf11a6ae5c51e0e3501
|
bot/config.py
|
bot/config.py
|
import os
import logging
import pytz
BOT_URL = os.getenv("BOT_URL", "")
ENVIRONMENT = os.getenv("ENVIRONMENT", "local")
WEBHOOK = os.getenv("WEBHOOK", "")
BOTTLE_PORT = os.getenv("BOTTLE_PORT", "8080")
BOTTLE_HOST = os.getenv("BOTTLE_HOST", "127.0.0.1")
LAST_UPDATE_ID_FILE = "last_update"
GROUPS_DB_NAME = "tags"
POLL_PERIOD = 1
MAX_TAGS = 5
LOGGING_LEVEL = logging.DEBUG
LOCAL_TIMEZONE = pytz.timezone('America/Mexico_City')
|
import os
import logging
import pytz
BOT_URL = os.getenv("BOT_URL", "")
ENVIRONMENT = os.getenv("ENVIRONMENT", "local")
WEBHOOK = os.getenv("WEBHOOK", "")
BOTTLE_PORT = os.getenv("BOTTLE_PORT", "8080")
BOTTLE_HOST = os.getenv("BOTTLE_HOST", "127.0.0.1")
LAST_UPDATE_ID_FILE = "last_update"
GROUPS_DB_NAME = "tags"
POLL_PERIOD = 1
MAX_TAGS = 5
LOGGING_LEVEL = logging.DEBUG
LOCAL_TIMEZONE = pytz.timezone('America/Mexico_City')
PENDING_MIGRATION = os.getenv("PENDING_MIGRATION", False)
|
Add Pending migration env var
|
Add Pending migration env var
|
Python
|
mit
|
cesar0094/telegram-tldrbot
|
---
+++
@@ -13,3 +13,4 @@
MAX_TAGS = 5
LOGGING_LEVEL = logging.DEBUG
LOCAL_TIMEZONE = pytz.timezone('America/Mexico_City')
+PENDING_MIGRATION = os.getenv("PENDING_MIGRATION", False)
|
e64b0544b146cb810424e0e243835a34aa977f40
|
boxoffice/__init__.py
|
boxoffice/__init__.py
|
# -*- coding: utf-8 -*-
# imports in this file are order-sensitive
from pytz import timezone
from flask import Flask
from flask.ext.mail import Mail
from flask.ext.lastuser import Lastuser
from flask.ext.lastuser.sqlalchemy import UserManager
from baseframe import baseframe, assets, Version
from ._version import __version__
import coaster.app
import wtforms_json
app = Flask(__name__, instance_relative_config=True)
lastuser = Lastuser()
mail = Mail()
# --- Assets ------------------------------------------------------------------
version = Version(__version__)
assets['boxoffice.js'][version] = 'js/scripts.js'
assets['boxoffice.css'][version] = 'css/order.css'
from boxoffice.models import db, User # noqa
from . import extapi, views # noqa
# Configure the app
def init_for(env):
coaster.app.init_app(app, env)
db.init_app(app)
db.app = app
lastuser.init_app(app)
lastuser.init_usermanager(UserManager(db, User))
app.config['tz'] = timezone(app.config['TIMEZONE'])
baseframe.init_app(app, requires=['boxoffice'], ext_requires=['baseframe-bs3', 'fontawesome>=4.0.0', 'ractive'])
mail.init_app(app)
wtforms_json.init()
|
# -*- coding: utf-8 -*-
# imports in this file are order-sensitive
from pytz import timezone
from flask import Flask
from flask.ext.mail import Mail
from flask.ext.lastuser import Lastuser
from flask.ext.lastuser.sqlalchemy import UserManager
from baseframe import baseframe, assets, Version
from ._version import __version__
import coaster.app
import wtforms_json
app = Flask(__name__, instance_relative_config=True)
lastuser = Lastuser()
mail = Mail()
# --- Assets ------------------------------------------------------------------
version = Version(__version__)
assets['boxoffice.js'][version] = 'js/scripts.js'
assets['boxoffice.css'][version] = 'css/order.css'
from boxoffice.models import db, User # noqa
from . import extapi, views # noqa
# Configure the app
def init_for(env):
coaster.app.init_app(app, env)
db.init_app(app)
db.app = app
lastuser.init_app(app)
lastuser.init_usermanager(UserManager(db, User))
app.config['tz'] = timezone(app.config['TIMEZONE'])
baseframe.init_app(app, requires=['boxoffice'], ext_requires=['baseframe-bs3', 'fontawesome>=4.0.0', 'ractive', 'ractive-transitions-fly', 'validate'])
mail.init_app(app)
wtforms_json.init()
|
Add assests ractive-transitions-fly and validate
|
Add assests ractive-transitions-fly and validate
|
Python
|
agpl-3.0
|
hasgeek/boxoffice,hasgeek/boxoffice,hasgeek/boxoffice,hasgeek/boxoffice
|
---
+++
@@ -37,7 +37,7 @@
lastuser.init_usermanager(UserManager(db, User))
app.config['tz'] = timezone(app.config['TIMEZONE'])
- baseframe.init_app(app, requires=['boxoffice'], ext_requires=['baseframe-bs3', 'fontawesome>=4.0.0', 'ractive'])
+ baseframe.init_app(app, requires=['boxoffice'], ext_requires=['baseframe-bs3', 'fontawesome>=4.0.0', 'ractive', 'ractive-transitions-fly', 'validate'])
mail.init_app(app)
wtforms_json.init()
|
5736e8314d5af3346a15224b27448f1c795f665c
|
bin/coverage_check.py
|
bin/coverage_check.py
|
#!/usr/bin/env python
import os
import subprocess
from lib import functional
from util import find_all
def coverage_module(package, module):
command = (
'coverage run --branch'
' --source=%s.%s tests/%s/%s_test.py')
print subprocess.check_output(
command % (package, module, package, module),
stderr=subprocess.STDOUT,
shell=True)
print subprocess.check_output(
'coverage report --fail-under=100 -m',
stderr=subprocess.STDOUT,
shell=True)
subprocess.check_output(
'coverage erase',
shell=True)
def coverage_test_package(package):
def path_to_name(name):
return os.path.split(name)[1].split('.')[0]
for module in functional.removed(
map(path_to_name, find_all(
os.path.join('src', package), '.py')), '__init__'):
print package, module
coverage_module(package, module)
def coverage_test_all():
os.chdir(os.environ['PORTER'])
for package in ['lib', 'engine']:
coverage_test_package(package)
if __name__ == '__main__':
coverage_test_all()
|
#!/usr/bin/env python
import os
import subprocess
from lib import functional
from util import find_all
def coverage_module(package, module):
command = (
'coverage run --branch'
' --source=%s.%s tests/%s/%s_test.py')
print subprocess.check_output(
command % (package, module, package, module),
stderr=subprocess.STDOUT,
shell=True)
print subprocess.check_output(
'coverage report --fail-under=100 -m',
stderr=subprocess.STDOUT,
shell=True)
subprocess.check_output(
'coverage erase',
shell=True)
def coverage_test_package(package):
def path_to_name(name):
return os.path.split(name)[1].split('.')[0]
for module in functional.removed(
map(path_to_name, find_all(
os.path.join('src', package), '.py')), '__init__'):
print package, module
coverage_module(package, module)
def coverage_test_all():
os.chdir(os.environ['PORTER'])
for package in os.listdir('src/'):
coverage_test_package(package)
if __name__ == '__main__':
coverage_test_all()
|
Remove hard coded 'engine' and 'lib' in coverage testing
|
Remove hard coded 'engine' and 'lib' in coverage testing
|
Python
|
mit
|
Tactique/game_engine,Tactique/game_engine
|
---
+++
@@ -38,7 +38,7 @@
def coverage_test_all():
os.chdir(os.environ['PORTER'])
- for package in ['lib', 'engine']:
+ for package in os.listdir('src/'):
coverage_test_package(package)
if __name__ == '__main__':
|
4dd5dbf6c1f693c54b31a84756350cb9588921d1
|
pybinding/model.py
|
pybinding/model.py
|
from scipy.sparse import csr_matrix
from . import _cpp
from .system import System
from .lattice import Lattice
from .support.sparse import SparseMatrix
class Model(_cpp.Model):
def __init__(self, *params):
super().__init__()
self.add(*params)
def add(self, *params):
for param in params:
if param is None:
continue
if isinstance(param, (tuple, list)):
self.add(*param)
else:
super().add(param)
@property
def system(self) -> System:
return System(super().system)
@property
def hamiltonian(self) -> csr_matrix:
matrix = SparseMatrix(super().hamiltonian.matrix)
return matrix.tocsr()
@property
def lattice(self) -> Lattice:
return super().lattice
@property
def modifiers(self) -> list:
return (self.state_modifiers + self.position_modifiers +
self.onsite_modifiers + self.hopping_modifiers)
|
import numpy as np
from scipy.sparse import csr_matrix
from . import _cpp
from . import results
from .system import System
from .lattice import Lattice
from .support.sparse import SparseMatrix
class Model(_cpp.Model):
def __init__(self, *params):
super().__init__()
self.add(*params)
def add(self, *params):
for param in params:
if param is None:
continue
if isinstance(param, (tuple, list)):
self.add(*param)
else:
super().add(param)
@property
def system(self) -> System:
return System(super().system)
@property
def hamiltonian(self) -> csr_matrix:
matrix = SparseMatrix(super().hamiltonian.matrix)
return matrix.tocsr()
@property
def lattice(self) -> Lattice:
return super().lattice
@property
def modifiers(self) -> list:
return (self.state_modifiers + self.position_modifiers +
self.onsite_modifiers + self.hopping_modifiers)
@property
def onsite_map(self) -> results.StructureMap:
"""`StructureMap` of the onsite energy"""
onsite_energy = np.real(self.hamiltonian.tocsr().diagonal())
return results.StructureMap.from_system(onsite_energy, self.system)
|
Add onsite energy map to Model
|
Add onsite energy map to Model
|
Python
|
bsd-2-clause
|
dean0x7d/pybinding,MAndelkovic/pybinding,MAndelkovic/pybinding,dean0x7d/pybinding,dean0x7d/pybinding,MAndelkovic/pybinding
|
---
+++
@@ -1,6 +1,8 @@
+import numpy as np
from scipy.sparse import csr_matrix
from . import _cpp
+from . import results
from .system import System
from .lattice import Lattice
from .support.sparse import SparseMatrix
@@ -38,3 +40,9 @@
def modifiers(self) -> list:
return (self.state_modifiers + self.position_modifiers +
self.onsite_modifiers + self.hopping_modifiers)
+
+ @property
+ def onsite_map(self) -> results.StructureMap:
+ """`StructureMap` of the onsite energy"""
+ onsite_energy = np.real(self.hamiltonian.tocsr().diagonal())
+ return results.StructureMap.from_system(onsite_energy, self.system)
|
517c0f2b1f8e6616cc63ec0c3990dcff2922f0e6
|
pinax/invitations/admin.py
|
pinax/invitations/admin.py
|
from django.contrib import admin
from django.contrib.auth import get_user_model
from .models import InvitationStat, JoinInvitation
User = get_user_model()
class InvitationStatAdmin(admin.ModelAdmin):
raw_id_fields = ["user"]
readonly_fields = ["invites_sent", "invites_accepted"]
list_display = [
"user",
"invites_sent",
"invites_accepted",
"invites_allocated",
"invites_remaining",
"can_send"
]
list_filter = ["invites_sent", "invites_accepted"]
admin.site.register(
JoinInvitation,
list_display=["from_user", "to_user", "sent", "status", "to_user_email"],
list_filter=["sent", "status"],
search_fields=["from_user__{}".format(User.USERNAME_FIELD)]
)
admin.site.register(InvitationStat, InvitationStatAdmin)
|
from django.contrib import admin
from django.contrib.auth import get_user_model
from .models import InvitationStat, JoinInvitation
User = get_user_model()
class InvitationStatAdmin(admin.ModelAdmin):
raw_id_fields = ["user"]
readonly_fields = ["invites_sent", "invites_accepted"]
list_display = [
"user",
"invites_sent",
"invites_accepted",
"invites_allocated",
"invites_remaining",
"can_send"
]
list_filter = ["invites_sent", "invites_accepted"]
admin.site.register(
JoinInvitation,
list_display=["from_user", "to_user", "sent", "status", "to_user_email"],
list_filter=["sent", "status"],
search_fields=[f"from_user__{User.USERNAME_FIELD}"]
)
admin.site.register(InvitationStat, InvitationStatAdmin)
|
Use f-strings in place of `str.format()`
|
Use f-strings in place of `str.format()`
|
Python
|
unknown
|
pinax/pinax-invitations,eldarion/kaleo
|
---
+++
@@ -24,6 +24,6 @@
JoinInvitation,
list_display=["from_user", "to_user", "sent", "status", "to_user_email"],
list_filter=["sent", "status"],
- search_fields=["from_user__{}".format(User.USERNAME_FIELD)]
+ search_fields=[f"from_user__{User.USERNAME_FIELD}"]
)
admin.site.register(InvitationStat, InvitationStatAdmin)
|
1918ed65e441057724b82a3cb710898f8742214b
|
canaryd/subprocess.py
|
canaryd/subprocess.py
|
import os
import shlex
import sys
from canaryd_packages import six
from canaryd.log import logger
if os.name == 'posix' and sys.version_info[0] < 3:
from canaryd_packages.subprocess32 import * # noqa
else:
from subprocess import * # noqa
def get_command_output(command, *args, **kwargs):
logger.debug('Executing command: {0}'.format(command))
if isinstance(command, six.binary_type):
command = command.decode()
if (
not kwargs.get('shell', False)
and not isinstance(command, (list, tuple))
):
command = shlex.split(command)
output = check_output( # noqa
command,
close_fds=True,
stderr=STDOUT, # noqa
*args, **kwargs
)
if isinstance(output, six.binary_type):
output = output.decode()
return output
|
import os
import shlex
import sys
from canaryd_packages import six
from canaryd.log import logger
if os.name == 'posix' and sys.version_info[0] < 3:
from canaryd_packages.subprocess32 import * # noqa
else:
from subprocess import * # noqa
def get_command_output(command, *args, **kwargs):
logger.debug('Executing command: {0}'.format(command))
if (
not kwargs.get('shell', False)
and not isinstance(command, (list, tuple))
):
command = shlex.split(command)
output = check_output( # noqa
command,
close_fds=True,
stderr=STDOUT, # noqa
*args, **kwargs
)
if isinstance(output, six.binary_type):
output = output.decode()
return output
|
Fix indent and don't decode input command.
|
Fix indent and don't decode input command.
|
Python
|
mit
|
Oxygem/canaryd,Oxygem/canaryd
|
---
+++
@@ -16,14 +16,11 @@
def get_command_output(command, *args, **kwargs):
logger.debug('Executing command: {0}'.format(command))
- if isinstance(command, six.binary_type):
- command = command.decode()
-
if (
not kwargs.get('shell', False)
and not isinstance(command, (list, tuple))
):
- command = shlex.split(command)
+ command = shlex.split(command)
output = check_output( # noqa
command,
|
e6d216077a683aa07b811b5f131dd07809f741bc
|
readthedocs/settings/postgres.py
|
readthedocs/settings/postgres.py
|
from .base import *
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'docs',
'USER': 'postgres', # Not used with sqlite3.
'PASSWORD': '',
'HOST': 'golem',
'PORT': '',
}
}
DEBUG = False
TEMPLATE_DEBUG = False
CELERY_ALWAYS_EAGER = False
MEDIA_URL = 'http://media.readthedocs.org/'
ADMIN_MEDIA_PREFIX = MEDIA_URL + 'admin/'
CACHE_BACKEND = 'memcached://localhost:11211/'
SESSION_ENGINE = "django.contrib.sessions.backends.cached_db"
HAYSTACK_SEARCH_ENGINE = 'solr'
HAYSTACK_SOLR_URL = 'http://odin:8983/solr'
try:
from local_settings import *
except:
pass
|
from .base import *
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'docs',
'USER': 'postgres', # Not used with sqlite3.
'PASSWORD': '',
'HOST': '10.177.73.97',
'PORT': '',
}
}
DEBUG = False
TEMPLATE_DEBUG = False
CELERY_ALWAYS_EAGER = False
MEDIA_URL = 'http://media.readthedocs.org/'
ADMIN_MEDIA_PREFIX = MEDIA_URL + 'admin/'
CACHE_BACKEND = 'memcached://localhost:11211/'
SESSION_ENGINE = "django.contrib.sessions.backends.cached_db"
HAYSTACK_SEARCH_ENGINE = 'solr'
HAYSTACK_SOLR_URL = 'http://odin:8983/solr'
try:
from local_settings import *
except:
pass
|
Update database server in the configs.
|
Update database server in the configs.
|
Python
|
mit
|
michaelmcandrew/readthedocs.org,ojii/readthedocs.org,safwanrahman/readthedocs.org,Tazer/readthedocs.org,CedarLogic/readthedocs.org,kdkeyser/readthedocs.org,GovReady/readthedocs.org,Tazer/readthedocs.org,nikolas/readthedocs.org,dirn/readthedocs.org,espdev/readthedocs.org,johncosta/private-readthedocs.org,gjtorikian/readthedocs.org,alex/readthedocs.org,safwanrahman/readthedocs.org,nyergler/pythonslides,kenwang76/readthedocs.org,stevepiercy/readthedocs.org,singingwolfboy/readthedocs.org,ojii/readthedocs.org,davidfischer/readthedocs.org,mrshoki/readthedocs.org,kenshinthebattosai/readthedocs.org,michaelmcandrew/readthedocs.org,KamranMackey/readthedocs.org,gjtorikian/readthedocs.org,singingwolfboy/readthedocs.org,d0ugal/readthedocs.org,Carreau/readthedocs.org,pombredanne/readthedocs.org,soulshake/readthedocs.org,cgourlay/readthedocs.org,wijerasa/readthedocs.org,dirn/readthedocs.org,soulshake/readthedocs.org,stevepiercy/readthedocs.org,sunnyzwh/readthedocs.org,raven47git/readthedocs.org,laplaceliu/readthedocs.org,davidfischer/readthedocs.org,sid-kap/readthedocs.org,wijerasa/readthedocs.org,asampat3090/readthedocs.org,raven47git/readthedocs.org,agjohnson/readthedocs.org,LukasBoersma/readthedocs.org,jerel/readthedocs.org,rtfd/readthedocs.org,istresearch/readthedocs.org,VishvajitP/readthedocs.org,tddv/readthedocs.org,sid-kap/readthedocs.org,d0ugal/readthedocs.org,LukasBoersma/readthedocs.org,techtonik/readthedocs.org,Tazer/readthedocs.org,mhils/readthedocs.org,Carreau/readthedocs.org,CedarLogic/readthedocs.org,atsuyim/readthedocs.org,SteveViss/readthedocs.org,sils1297/readthedocs.org,mrshoki/readthedocs.org,espdev/readthedocs.org,wijerasa/readthedocs.org,michaelmcandrew/readthedocs.org,takluyver/readthedocs.org,SteveViss/readthedocs.org,espdev/readthedocs.org,cgourlay/readthedocs.org,atsuyim/readthedocs.org,jerel/readthedocs.org,GovReady/readthedocs.org,titiushko/readthedocs.org,tddv/readthedocs.org,royalwang/readthedocs.org,pombredanne/readthedocs.org,VishvajitP/readthedocs.org,safwanrahman/readthedocs.org,soulshake/readthedocs.org,kdkeyser/readthedocs.org,wijerasa/readthedocs.org,sid-kap/readthedocs.org,sunnyzwh/readthedocs.org,istresearch/readthedocs.org,titiushko/readthedocs.org,GovReady/readthedocs.org,michaelmcandrew/readthedocs.org,johncosta/private-readthedocs.org,emawind84/readthedocs.org,wanghaven/readthedocs.org,laplaceliu/readthedocs.org,alex/readthedocs.org,asampat3090/readthedocs.org,mhils/readthedocs.org,kenwang76/readthedocs.org,kenwang76/readthedocs.org,VishvajitP/readthedocs.org,CedarLogic/readthedocs.org,clarkperkins/readthedocs.org,hach-que/readthedocs.org,nikolas/readthedocs.org,wanghaven/readthedocs.org,clarkperkins/readthedocs.org,royalwang/readthedocs.org,rtfd/readthedocs.org,stevepiercy/readthedocs.org,clarkperkins/readthedocs.org,fujita-shintaro/readthedocs.org,mrshoki/readthedocs.org,espdev/readthedocs.org,techtonik/readthedocs.org,nyergler/pythonslides,nyergler/pythonslides,rtfd/readthedocs.org,nikolas/readthedocs.org,raven47git/readthedocs.org,alex/readthedocs.org,attakei/readthedocs-oauth,agjohnson/readthedocs.org,jerel/readthedocs.org,royalwang/readthedocs.org,GovReady/readthedocs.org,laplaceliu/readthedocs.org,jerel/readthedocs.org,ojii/readthedocs.org,pombredanne/readthedocs.org,KamranMackey/readthedocs.org,titiushko/readthedocs.org,kdkeyser/readthedocs.org,KamranMackey/readthedocs.org,dirn/readthedocs.org,hach-que/readthedocs.org,alex/readthedocs.org,hach-que/readthedocs.org,cgourlay/readthedocs.org,davidfischer/readthedocs.org,LukasBoersma/readthedocs.org,kenshinthebattosai/readthedocs.org,laplaceliu/readthedocs.org,fujita-shintaro/readthedocs.org,takluyver/readthedocs.org,sid-kap/readthedocs.org,nikolas/readthedocs.org,cgourlay/readthedocs.org,d0ugal/readthedocs.org,SteveViss/readthedocs.org,johncosta/private-readthedocs.org,asampat3090/readthedocs.org,gjtorikian/readthedocs.org,singingwolfboy/readthedocs.org,attakei/readthedocs-oauth,gjtorikian/readthedocs.org,kenshinthebattosai/readthedocs.org,SteveViss/readthedocs.org,emawind84/readthedocs.org,singingwolfboy/readthedocs.org,kenshinthebattosai/readthedocs.org,kenwang76/readthedocs.org,CedarLogic/readthedocs.org,stevepiercy/readthedocs.org,asampat3090/readthedocs.org,nyergler/pythonslides,fujita-shintaro/readthedocs.org,titiushko/readthedocs.org,safwanrahman/readthedocs.org,espdev/readthedocs.org,agjohnson/readthedocs.org,sils1297/readthedocs.org,mhils/readthedocs.org,raven47git/readthedocs.org,emawind84/readthedocs.org,Carreau/readthedocs.org,attakei/readthedocs-oauth,sils1297/readthedocs.org,kdkeyser/readthedocs.org,davidfischer/readthedocs.org,dirn/readthedocs.org,hach-que/readthedocs.org,mhils/readthedocs.org,VishvajitP/readthedocs.org,d0ugal/readthedocs.org,atsuyim/readthedocs.org,royalwang/readthedocs.org,takluyver/readthedocs.org,techtonik/readthedocs.org,clarkperkins/readthedocs.org,techtonik/readthedocs.org,Tazer/readthedocs.org,agjohnson/readthedocs.org,sunnyzwh/readthedocs.org,wanghaven/readthedocs.org,mrshoki/readthedocs.org,sunnyzwh/readthedocs.org,LukasBoersma/readthedocs.org,fujita-shintaro/readthedocs.org,wanghaven/readthedocs.org,istresearch/readthedocs.org,tddv/readthedocs.org,KamranMackey/readthedocs.org,soulshake/readthedocs.org,attakei/readthedocs-oauth,istresearch/readthedocs.org,Carreau/readthedocs.org,sils1297/readthedocs.org,rtfd/readthedocs.org,takluyver/readthedocs.org,ojii/readthedocs.org,emawind84/readthedocs.org,atsuyim/readthedocs.org
|
---
+++
@@ -6,7 +6,7 @@
'NAME': 'docs',
'USER': 'postgres', # Not used with sqlite3.
'PASSWORD': '',
- 'HOST': 'golem',
+ 'HOST': '10.177.73.97',
'PORT': '',
}
}
|
6749c5a4541836fcf25abbc571082b4c909b0bbb
|
corehq/apps/app_manager/migrations/0019_exchangeapplication_required_privileges.py
|
corehq/apps/app_manager/migrations/0019_exchangeapplication_required_privileges.py
|
# Generated by Django 2.2.24 on 2021-09-14 17:54
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('app_manager', '0018_migrate_case_search_labels'),
]
operations = [
migrations.AddField(
model_name='exchangeapplication',
name='required_privileges',
field=models.TextField(help_text='Space-separated list of privilege strings from corehq.privileges', null=True),
),
]
|
# Generated by Django 2.2.24 on 2021-09-14 17:54
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('app_manager', '0018_migrate_case_search_labels'),
]
operations = [
migrations.AddField(
model_name='exchangeapplication',
name='required_privileges',
field=models.TextField(null=True, help_text="Space-separated list of privilege strings from "
"corehq.privileges"),
),
]
|
Fix migration with help text
|
Fix migration with help text
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
---
+++
@@ -13,6 +13,7 @@
migrations.AddField(
model_name='exchangeapplication',
name='required_privileges',
- field=models.TextField(help_text='Space-separated list of privilege strings from corehq.privileges', null=True),
+ field=models.TextField(null=True, help_text="Space-separated list of privilege strings from "
+ "corehq.privileges"),
),
]
|
3291b015a5f3d311c72980913756a08d87b1ac1a
|
scripts/blacklisted.py
|
scripts/blacklisted.py
|
import os
import platform
# If you are adding a new entry, please include a short comment
# explaining why the specific test is blacklisted.
_unix_black_list = set([name.lower() for name in [
'blackparrot',
'blackucode',
'blackunicore',
'earlgrey_nexysvideo', # ram size in ci machines
'lpddr',
'simpleparsertestcache', # race condition
]])
_windows_black_list = _unix_black_list.union(set([name.lower() for name in [
'ariane', # Uses shell script with make command
'earlgrey_verilator_01_05_21', # lowmem is unsupported
'unitpython', # Python is unsupported
]]))
_msys2_black_list = _unix_black_list.union(set([name.lower() for name in [
'earlgrey_verilator_01_05_21', # lowmem is unsupported
]]))
def is_blacklisted(name):
if platform.system() == 'Windows':
blacklist = _msys2_black_list if 'MSYSTEM' in os.environ else _windows_black_list
else:
blacklist = _unix_black_list
return name.lower() in blacklist
|
import os
import platform
# If you are adding a new entry, please include a short comment
# explaining why the specific test is blacklisted.
_unix_black_list = set([name.lower() for name in [
'blackparrot',
'blackucode',
'blackunicore',
'earlgrey_nexysvideo', # ram size in ci machines
'lpddr',
'rsd', # Out of memory on CI machines
'simpleparsertestcache', # race condition
]])
_windows_black_list = _unix_black_list.union(set([name.lower() for name in [
'ariane', # Uses shell script with make command
'earlgrey_verilator_01_05_21', # lowmem is unsupported
'unitpython', # Python is unsupported
'verilator', # Stack overflow with clang due to expression evaluation
]]))
_msys2_black_list = _unix_black_list.union(set([name.lower() for name in [
'earlgrey_verilator_01_05_21', # lowmem is unsupported
]]))
def is_blacklisted(name):
if platform.system() == 'Windows':
blacklist = _msys2_black_list if 'MSYSTEM' in os.environ else _windows_black_list
else:
blacklist = _unix_black_list
return name.lower() in blacklist
|
Exclude a few failing tests
|
Exclude a few failing tests
Rsd - failing on linux due to running out of memory
Verilator - failing on Windows clang due to stack overflow caused by
expression evaluation
|
Python
|
apache-2.0
|
chipsalliance/Surelog,alainmarcel/Surelog,alainmarcel/Surelog,chipsalliance/Surelog,alainmarcel/Surelog,chipsalliance/Surelog,alainmarcel/Surelog,chipsalliance/Surelog
|
---
+++
@@ -8,15 +8,17 @@
'blackparrot',
'blackucode',
'blackunicore',
- 'earlgrey_nexysvideo', # ram size in ci machines
+ 'earlgrey_nexysvideo', # ram size in ci machines
'lpddr',
- 'simpleparsertestcache', # race condition
+ 'rsd', # Out of memory on CI machines
+ 'simpleparsertestcache', # race condition
]])
_windows_black_list = _unix_black_list.union(set([name.lower() for name in [
'ariane', # Uses shell script with make command
'earlgrey_verilator_01_05_21', # lowmem is unsupported
'unitpython', # Python is unsupported
+ 'verilator', # Stack overflow with clang due to expression evaluation
]]))
_msys2_black_list = _unix_black_list.union(set([name.lower() for name in [
|
77fc06c0ee8ca2c8669ca1cd7f45babb21d75ba5
|
opps/__init__.py
|
opps/__init__.py
|
import pkg_resources
pkg_resources.declare_namespace(__name__)
|
# See http://peak.telecommunity.com/DevCenter/setuptools#namespace-packages
try:
__import__('pkg_resources').declare_namespace(__name__)
except ImportError:
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
|
Remove opps package on opps-polls
|
Remove opps package on opps-polls
|
Python
|
mit
|
opps/opps-polls,opps/opps-polls
|
---
+++
@@ -1,3 +1,6 @@
-import pkg_resources
-
-pkg_resources.declare_namespace(__name__)
+# See http://peak.telecommunity.com/DevCenter/setuptools#namespace-packages
+try:
+ __import__('pkg_resources').declare_namespace(__name__)
+except ImportError:
+ from pkgutil import extend_path
+ __path__ = extend_path(__path__, __name__)
|
fb427a72bf3d8fb3802689bf89a9d71dee47108c
|
semillas_backend/users/serializers.py
|
semillas_backend/users/serializers.py
|
#from phonenumber_field.serializerfields import PhoneNumberField
from rest_framework import serializers
from drf_extra_fields.geo_fields import PointField
from .models import User
class UserSerializer(serializers.ModelSerializer):
""" Usage:
from rest_framework.renderers import JSONRenderer
from semillas_backend.users.serializers import UserSerializer
JSONRenderer().render(UserSerializer(user_instance).data)
"""
location = PointField()
class Meta:
model = User
fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login', 'email', 'phone', 'faircoin_address', 'telegram_id')
class UpdateUserSerializer(serializers.ModelSerializer):
name = serializers.CharField(required=False)
#phone = PhoneNumberField(required=False)
email = serializers.CharField(required=False)
picture = serializers.ImageField(required=False)
uuid = serializers.CharField(read_only=True)
class Meta:
model = User
fields = ('name', 'picture', 'phone', 'email', 'uuid', 'faircoin_address', 'telegram_id')
from wallet.serializers import WalletSerializer
class FullUserSerializer(UserSerializer):
wallet = WalletSerializer()
class Meta:
model = User
fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login', 'wallet', 'email', 'phone')
|
#from phonenumber_field.serializerfields import PhoneNumberField
from rest_framework import serializers
from drf_extra_fields.geo_fields import PointField
from .models import User
class UserSerializer(serializers.ModelSerializer):
""" Usage:
from rest_framework.renderers import JSONRenderer
from semillas_backend.users.serializers import UserSerializer
JSONRenderer().render(UserSerializer(user_instance).data)
"""
location = PointField()
class Meta:
model = User
fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login', 'email', 'phone', 'faircoin_address', 'telegram_id')
class UpdateUserSerializer(serializers.ModelSerializer):
name = serializers.CharField(required=False)
#phone = PhoneNumberField(required=False)
email = serializers.CharField(required=False)
picture = serializers.ImageField(required=False)
uuid = serializers.CharField(read_only=True)
location = PointField(required=False)
class Meta:
model = User
fields = ('name', 'picture', 'phone', 'email', 'uuid', 'faircoin_address', 'telegram_id', 'location')
from wallet.serializers import WalletSerializer
class FullUserSerializer(UserSerializer):
wallet = WalletSerializer()
class Meta:
model = User
fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login', 'wallet', 'email', 'phone')
|
Add location to user profile post
|
Add location to user profile post
|
Python
|
mit
|
Semillas/semillas_backend,Semillas/semillas_backend,Semillas/semillas_backend,Semillas/semillas_platform,Semillas/semillas_backend,Semillas/semillas_platform,Semillas/semillas_platform,Semillas/semillas_platform
|
---
+++
@@ -25,10 +25,11 @@
email = serializers.CharField(required=False)
picture = serializers.ImageField(required=False)
uuid = serializers.CharField(read_only=True)
+ location = PointField(required=False)
class Meta:
model = User
- fields = ('name', 'picture', 'phone', 'email', 'uuid', 'faircoin_address', 'telegram_id')
+ fields = ('name', 'picture', 'phone', 'email', 'uuid', 'faircoin_address', 'telegram_id', 'location')
from wallet.serializers import WalletSerializer
class FullUserSerializer(UserSerializer):
|
54f27f507820b5c9a7e832c46eb2a5ba3d918a2f
|
scripts/task/solver.py
|
scripts/task/solver.py
|
import numpy as np
from eigen3 import toEigen
import rbdyn as rbd
class WLSSolver(object):
def __init__(self):
self.tasks = []
def addTask(self, task, weight):
t = [task, weight]
self.tasks.append(t)
return t
def rmTask(self, taskDef):
self.tasks.remove(taskDef)
def solve(self, mb, mbc):
err = np.mat(np.empty((0, 1)))
jac = np.mat(np.empty((0, mb.nrDof())))
for t in self.tasks:
t[0].update(mb, mbc)
err = np.vstack((err, t[1]*t[0].error()))
jac = np.vstack((jac, t[1]*t[0].jacobian()))
#alpha1 = np.linalg.lstsq(jac, err)[0]
alpha2 = np.linalg.pinv(jac)*err
mbc.alpha = rbd.vectorToDof(mb, toEigen(alpha2))
|
import numpy as np
from eigen3 import toEigenX
import rbdyn as rbd
class WLSSolver(object):
def __init__(self):
self.tasks = []
def addTask(self, task, weight):
t = [task, weight]
self.tasks.append(t)
return t
def rmTask(self, taskDef):
self.tasks.remove(taskDef)
def solve(self, mb, mbc):
err = np.mat(np.empty((0, 1)))
jac = np.mat(np.empty((0, mb.nrDof())))
for t in self.tasks:
t[0].update(mb, mbc)
err = np.vstack((err, t[1]*t[0].error()))
jac = np.vstack((jac, t[1]*t[0].jacobian()))
#alpha1 = np.linalg.lstsq(jac, err)[0]
alpha2 = np.linalg.pinv(jac)*err
mbc.alpha = rbd.vectorToDof(mb, toEigenX(alpha2))
|
Fix a bad eigen vector cast.
|
Fix a bad eigen vector cast.
|
Python
|
bsd-2-clause
|
jrl-umi3218/RBDyn,gergondet/RBDyn,gergondet/RBDyn,gergondet/RBDyn,jrl-umi3218/RBDyn,jrl-umi3218/RBDyn,jrl-umi3218/RBDyn,gergondet/RBDyn,gergondet/RBDyn
|
---
+++
@@ -1,6 +1,6 @@
import numpy as np
-from eigen3 import toEigen
+from eigen3 import toEigenX
import rbdyn as rbd
class WLSSolver(object):
@@ -31,5 +31,5 @@
#alpha1 = np.linalg.lstsq(jac, err)[0]
alpha2 = np.linalg.pinv(jac)*err
- mbc.alpha = rbd.vectorToDof(mb, toEigen(alpha2))
+ mbc.alpha = rbd.vectorToDof(mb, toEigenX(alpha2))
|
e913ed7d5643c4acc85ed7ec82a70c235053360f
|
tests/test_token.py
|
tests/test_token.py
|
"""
NOTE: There are no tests that check for data validation at this point since
the interpreter doesn't have any data validation as a feature.
"""
import pytest
from calc import INTEGER, Token
def test_no_defaults():
# There's no valid defaults at the moment.
with pytest.raises(TypeError):
Token()
def test_known_type():
# There's no valid defaults at the moment.
token = Token(type=INTEGER, value=2)
assert token.value == 2
assert token.type == INTEGER
def test_str_non_string_value():
token = Token(type=INTEGER, value=2)
expected_result = "Token(type=INTEGER, value=2)"
assert str(token) == expected_result
|
import pytest
from calc import INTEGER, Token
def test_token_cannot_be_instantiated_with_no_defaults():
"""
Test that there are currently no valid defaults for a :class:`Token`. More
simply, ensure that a :class:`Token` cannot be instantiated without any
arguments.
"""
with pytest.raises(TypeError):
Token()
def test_token_can_be_instantiated_with_known_values():
"""
Test that a :class:`Token` with known good initialization values can be
instantiated.
"""
token = Token(type=INTEGER, value=2)
assert token.value == 2
assert token.type == INTEGER
def test_token_can_be_converted_to_str():
"""
Test that a :class:`Token` can be converted into a string, even when
instantiated with a non-string value such as an integer.
"""
token = Token(type=INTEGER, value=2)
expected_result = "Token(type=INTEGER, value=2)"
assert str(token) == expected_result
|
Improve documentation in token tests. Rename functions to be more clear
|
Improve documentation in token tests. Rename functions to be more clear
|
Python
|
isc
|
bike-barn/red-green-refactor
|
---
+++
@@ -1,27 +1,33 @@
-"""
-
-NOTE: There are no tests that check for data validation at this point since
-the interpreter doesn't have any data validation as a feature.
-"""
import pytest
from calc import INTEGER, Token
-def test_no_defaults():
- # There's no valid defaults at the moment.
+def test_token_cannot_be_instantiated_with_no_defaults():
+ """
+ Test that there are currently no valid defaults for a :class:`Token`. More
+ simply, ensure that a :class:`Token` cannot be instantiated without any
+ arguments.
+ """
with pytest.raises(TypeError):
Token()
-def test_known_type():
- # There's no valid defaults at the moment.
+def test_token_can_be_instantiated_with_known_values():
+ """
+ Test that a :class:`Token` with known good initialization values can be
+ instantiated.
+ """
token = Token(type=INTEGER, value=2)
assert token.value == 2
assert token.type == INTEGER
-def test_str_non_string_value():
+def test_token_can_be_converted_to_str():
+ """
+ Test that a :class:`Token` can be converted into a string, even when
+ instantiated with a non-string value such as an integer.
+ """
token = Token(type=INTEGER, value=2)
expected_result = "Token(type=INTEGER, value=2)"
assert str(token) == expected_result
|
9b043b0bd31f35e140831f61a4484513922f8712
|
stop_words/__init__.py
|
stop_words/__init__.py
|
import os
__VERSION__ = (2014, 5, 26)
CURRENT_DIR = os.path.dirname(os.path.realpath(__file__))
STOP_WORDS_DIR = os.path.join(CURRENT_DIR, 'stop-words/')
def get_version():
"""
:rtype: basestring
"""
return ".".join(str(v) for v in __VERSION__)
def get_stop_words(language):
"""
:type language: basestring
:rtype: list
"""
with open('{0}{1}.txt'.format(STOP_WORDS_DIR, language)) as lang_file:
lines = lang_file.readlines()
return [str(line.strip()).decode('utf-8') for line in lines]
|
import os
__VERSION__ = (2014, 5, 26)
CURRENT_DIR = os.path.dirname(os.path.realpath(__file__))
STOP_WORDS_DIR = os.path.join(CURRENT_DIR, 'stop-words/')
LANGUAGE_MAPPING = {
'ar': 'arabic',
'da': 'danish',
'nl': 'dutch',
'en': 'english',
'fi': 'finnish',
'fr': 'french',
'de': 'german',
'hu': 'hungarian',
'it': 'italian',
'nb': 'norwegian',
'pt': 'portuguese',
'ro': 'romanian',
'ru': 'russian',
'es': 'spanish',
'sv': 'swedish',
'tr': 'turkish',
}
AVAILABLE_LANGUAGES = LANGUAGE_MAPPING.values()
def get_version():
"""
:rtype: basestring
"""
return ".".join(str(v) for v in __VERSION__)
class StopWordError(Exception):
pass
def get_stop_words(language):
"""
:type language: basestring
:rtype: list
"""
try:
language = LANGUAGE_MAPPING[language]
except KeyError:
pass
if language not in AVAILABLE_LANGUAGES:
raise StopWordError('%s language is unavailable')
with open('{0}{1}.txt'.format(STOP_WORDS_DIR, language)) as lang_file:
lines = lang_file.readlines()
return [str(line.strip()).decode('utf-8') for line in lines]
|
Implement language code mapping and check availability of the language
|
Implement language code mapping and check availability of the language
|
Python
|
bsd-3-clause
|
Alir3z4/python-stop-words
|
---
+++
@@ -4,6 +4,26 @@
CURRENT_DIR = os.path.dirname(os.path.realpath(__file__))
STOP_WORDS_DIR = os.path.join(CURRENT_DIR, 'stop-words/')
+LANGUAGE_MAPPING = {
+ 'ar': 'arabic',
+ 'da': 'danish',
+ 'nl': 'dutch',
+ 'en': 'english',
+ 'fi': 'finnish',
+ 'fr': 'french',
+ 'de': 'german',
+ 'hu': 'hungarian',
+ 'it': 'italian',
+ 'nb': 'norwegian',
+ 'pt': 'portuguese',
+ 'ro': 'romanian',
+ 'ru': 'russian',
+ 'es': 'spanish',
+ 'sv': 'swedish',
+ 'tr': 'turkish',
+}
+
+AVAILABLE_LANGUAGES = LANGUAGE_MAPPING.values()
def get_version():
"""
@@ -12,12 +32,24 @@
return ".".join(str(v) for v in __VERSION__)
+class StopWordError(Exception):
+ pass
+
+
def get_stop_words(language):
"""
:type language: basestring
:rtype: list
"""
+ try:
+ language = LANGUAGE_MAPPING[language]
+ except KeyError:
+ pass
+
+ if language not in AVAILABLE_LANGUAGES:
+ raise StopWordError('%s language is unavailable')
+
with open('{0}{1}.txt'.format(STOP_WORDS_DIR, language)) as lang_file:
lines = lang_file.readlines()
return [str(line.strip()).decode('utf-8') for line in lines]
|
c30bd67d4fc1773ce8b0752d8e4a7cc00e2a7ae4
|
app/forms.py
|
app/forms.py
|
from flask.ext.wtf import Form
from wtforms import StringField, BooleanField
from wtforms.validators import DataRequired
class LoginForm(Form):
openid = StringField('openid', validators=[DataRequired()])
remember_me = BooleanField('remember_me', default=False)
|
from flask.ext.wtf import Form
from wtforms import StringField, BooleanField, TextAreaField
from wtforms.validators import DataRequired, Length
class LoginForm(Form):
openid = StringField('openid', validators=[DataRequired()])
remember_me = BooleanField('remember_me', default=False)
class EditForm(Form):
nickname = StringField('nickname', validation=[DataRequired()])
about_me = TextAreaField('about_me', validators=[Length(min=0, max=140)])
|
Define the edit profile form
|
Define the edit profile form
|
Python
|
mit
|
ddayguerrero/blogme,ddayguerrero/blogme,ddayguerrero/blogme,ddayguerrero/blogme,ddayguerrero/blogme
|
---
+++
@@ -1,8 +1,13 @@
from flask.ext.wtf import Form
-from wtforms import StringField, BooleanField
-from wtforms.validators import DataRequired
+from wtforms import StringField, BooleanField, TextAreaField
+from wtforms.validators import DataRequired, Length
class LoginForm(Form):
openid = StringField('openid', validators=[DataRequired()])
remember_me = BooleanField('remember_me', default=False)
+
+
+class EditForm(Form):
+ nickname = StringField('nickname', validation=[DataRequired()])
+ about_me = TextAreaField('about_me', validators=[Length(min=0, max=140)])
|
1d0dd7856d1c1e80f24a94af4fc323530383b009
|
readthedocs/gold/models.py
|
readthedocs/gold/models.py
|
from django.db import models
from django.utils.translation import ugettext_lazy as _
LEVEL_CHOICES = (
('v1-org-patron', '$5'),
('v1-org-supporter', '$10'),
)
class GoldUser(models.Model):
pub_date = models.DateTimeField(_('Publication date'), auto_now_add=True)
modified_date = models.DateTimeField(_('Modified date'), auto_now=True)
user = models.ForeignKey('auth.User', verbose_name=_('User'), unique=True, related_name='gold')
level = models.CharField(_('Level'), max_length=20, choices=LEVEL_CHOICES, default='supporter')
last_4_digits = models.CharField(max_length=4)
stripe_id = models.CharField(max_length=255)
subscribed = models.BooleanField(default=False)
|
from django.db import models
from django.utils.translation import ugettext_lazy as _
LEVEL_CHOICES = (
('v1-org-5', '$5/mo'),
('v1-org-10', '$10/mo'),
('v1-org-15', '$15/mo'),
('v1-org-20', '$20/mo'),
('v1-org-50', '$50/mo'),
('v1-org-100', '$100/mo'),
)
class GoldUser(models.Model):
pub_date = models.DateTimeField(_('Publication date'), auto_now_add=True)
modified_date = models.DateTimeField(_('Modified date'), auto_now=True)
user = models.ForeignKey('auth.User', verbose_name=_('User'), unique=True, related_name='gold')
level = models.CharField(_('Level'), max_length=20, choices=LEVEL_CHOICES, default='supporter')
last_4_digits = models.CharField(max_length=4)
stripe_id = models.CharField(max_length=255)
subscribed = models.BooleanField(default=False)
|
Update plan names and levels
|
Update plan names and levels
|
Python
|
mit
|
hach-que/readthedocs.org,istresearch/readthedocs.org,VishvajitP/readthedocs.org,laplaceliu/readthedocs.org,stevepiercy/readthedocs.org,VishvajitP/readthedocs.org,davidfischer/readthedocs.org,jerel/readthedocs.org,clarkperkins/readthedocs.org,soulshake/readthedocs.org,sunnyzwh/readthedocs.org,michaelmcandrew/readthedocs.org,asampat3090/readthedocs.org,SteveViss/readthedocs.org,hach-que/readthedocs.org,atsuyim/readthedocs.org,michaelmcandrew/readthedocs.org,nikolas/readthedocs.org,clarkperkins/readthedocs.org,laplaceliu/readthedocs.org,dirn/readthedocs.org,tddv/readthedocs.org,sunnyzwh/readthedocs.org,istresearch/readthedocs.org,rtfd/readthedocs.org,techtonik/readthedocs.org,soulshake/readthedocs.org,kenwang76/readthedocs.org,laplaceliu/readthedocs.org,sid-kap/readthedocs.org,raven47git/readthedocs.org,d0ugal/readthedocs.org,laplaceliu/readthedocs.org,singingwolfboy/readthedocs.org,CedarLogic/readthedocs.org,attakei/readthedocs-oauth,CedarLogic/readthedocs.org,emawind84/readthedocs.org,nikolas/readthedocs.org,singingwolfboy/readthedocs.org,sid-kap/readthedocs.org,pombredanne/readthedocs.org,sunnyzwh/readthedocs.org,safwanrahman/readthedocs.org,sunnyzwh/readthedocs.org,royalwang/readthedocs.org,SteveViss/readthedocs.org,royalwang/readthedocs.org,kenshinthebattosai/readthedocs.org,SteveViss/readthedocs.org,espdev/readthedocs.org,fujita-shintaro/readthedocs.org,safwanrahman/readthedocs.org,davidfischer/readthedocs.org,wijerasa/readthedocs.org,titiushko/readthedocs.org,wanghaven/readthedocs.org,takluyver/readthedocs.org,wanghaven/readthedocs.org,kenshinthebattosai/readthedocs.org,titiushko/readthedocs.org,Tazer/readthedocs.org,agjohnson/readthedocs.org,raven47git/readthedocs.org,titiushko/readthedocs.org,dirn/readthedocs.org,istresearch/readthedocs.org,michaelmcandrew/readthedocs.org,LukasBoersma/readthedocs.org,CedarLogic/readthedocs.org,espdev/readthedocs.org,GovReady/readthedocs.org,takluyver/readthedocs.org,kdkeyser/readthedocs.org,clarkperkins/readthedocs.org,asampat3090/readthedocs.org,espdev/readthedocs.org,wijerasa/readthedocs.org,gjtorikian/readthedocs.org,CedarLogic/readthedocs.org,asampat3090/readthedocs.org,jerel/readthedocs.org,kdkeyser/readthedocs.org,nikolas/readthedocs.org,SteveViss/readthedocs.org,gjtorikian/readthedocs.org,attakei/readthedocs-oauth,dirn/readthedocs.org,agjohnson/readthedocs.org,atsuyim/readthedocs.org,jerel/readthedocs.org,mhils/readthedocs.org,wijerasa/readthedocs.org,pombredanne/readthedocs.org,dirn/readthedocs.org,royalwang/readthedocs.org,gjtorikian/readthedocs.org,wijerasa/readthedocs.org,agjohnson/readthedocs.org,davidfischer/readthedocs.org,VishvajitP/readthedocs.org,singingwolfboy/readthedocs.org,fujita-shintaro/readthedocs.org,emawind84/readthedocs.org,soulshake/readthedocs.org,safwanrahman/readthedocs.org,kdkeyser/readthedocs.org,asampat3090/readthedocs.org,attakei/readthedocs-oauth,nikolas/readthedocs.org,GovReady/readthedocs.org,sid-kap/readthedocs.org,stevepiercy/readthedocs.org,d0ugal/readthedocs.org,soulshake/readthedocs.org,fujita-shintaro/readthedocs.org,titiushko/readthedocs.org,mhils/readthedocs.org,sils1297/readthedocs.org,LukasBoersma/readthedocs.org,sils1297/readthedocs.org,emawind84/readthedocs.org,d0ugal/readthedocs.org,kenshinthebattosai/readthedocs.org,espdev/readthedocs.org,Tazer/readthedocs.org,michaelmcandrew/readthedocs.org,cgourlay/readthedocs.org,kenwang76/readthedocs.org,stevepiercy/readthedocs.org,techtonik/readthedocs.org,VishvajitP/readthedocs.org,techtonik/readthedocs.org,rtfd/readthedocs.org,GovReady/readthedocs.org,d0ugal/readthedocs.org,gjtorikian/readthedocs.org,atsuyim/readthedocs.org,royalwang/readthedocs.org,takluyver/readthedocs.org,emawind84/readthedocs.org,tddv/readthedocs.org,istresearch/readthedocs.org,hach-que/readthedocs.org,espdev/readthedocs.org,raven47git/readthedocs.org,rtfd/readthedocs.org,wanghaven/readthedocs.org,LukasBoersma/readthedocs.org,GovReady/readthedocs.org,Tazer/readthedocs.org,rtfd/readthedocs.org,mhils/readthedocs.org,atsuyim/readthedocs.org,cgourlay/readthedocs.org,attakei/readthedocs-oauth,sils1297/readthedocs.org,singingwolfboy/readthedocs.org,tddv/readthedocs.org,techtonik/readthedocs.org,wanghaven/readthedocs.org,takluyver/readthedocs.org,raven47git/readthedocs.org,pombredanne/readthedocs.org,cgourlay/readthedocs.org,LukasBoersma/readthedocs.org,kdkeyser/readthedocs.org,davidfischer/readthedocs.org,jerel/readthedocs.org,stevepiercy/readthedocs.org,hach-que/readthedocs.org,sid-kap/readthedocs.org,mhils/readthedocs.org,sils1297/readthedocs.org,cgourlay/readthedocs.org,kenwang76/readthedocs.org,kenshinthebattosai/readthedocs.org,safwanrahman/readthedocs.org,agjohnson/readthedocs.org,fujita-shintaro/readthedocs.org,Tazer/readthedocs.org,clarkperkins/readthedocs.org,kenwang76/readthedocs.org
|
---
+++
@@ -2,8 +2,12 @@
from django.utils.translation import ugettext_lazy as _
LEVEL_CHOICES = (
- ('v1-org-patron', '$5'),
- ('v1-org-supporter', '$10'),
+ ('v1-org-5', '$5/mo'),
+ ('v1-org-10', '$10/mo'),
+ ('v1-org-15', '$15/mo'),
+ ('v1-org-20', '$20/mo'),
+ ('v1-org-50', '$50/mo'),
+ ('v1-org-100', '$100/mo'),
)
|
e8aca80abcf8c309c13360c386b9505a595e1998
|
app/oauth.py
|
app/oauth.py
|
# -*- coding: utf-8 -*-
import logging
import httplib2
import json
import time
import random
from apiclient import errors
from apiclient.discovery import build
from oauth2client.client import SignedJwtAssertionCredentials
class OAuth():
__services = dict()
@staticmethod
def getCredentials(email, scopes, client_secret, client_id):
key = file(client_secret, 'rb')
privateKey = key.read()
key.close()
credentials = SignedJwtAssertionCredentials(client_id, privateKey, scope=scopes, sub=email)
http = httplib2.Http()
http = credentials.authorize(http)
credentials.refresh(http)
return credentials, http
@staticmethod
def getService(email, api, version, scopes, client_secret, client_id, discoveryUrl=None):
"""
Return the service with constant credential
@param email: email to execute the action
@return: the drive service
"""
if not email.strip():
raise Exception("OAuth.getService : Email for service is missing")
key = email + "/" + api + "/" + version
if key not in OAuth.__services:
credentials, http = OAuth.getCredentials(email, scopes, client_secret, client_id)
if discoveryUrl:
OAuth.__services[key] = build(api, version, http=http, discoveryServiceUrl=discoveryUrl,
cache_discovery=False, cache=None)
else:
OAuth.__services[key] = build(api, version, http=http,
cache_discovery=False, cache=None)
logging.info("OAuth.getService : Service request by - " + email)
return OAuth.__services[key]
|
# -*- coding: utf-8 -*-
import logging
import httplib2
import json
import time
import random
from apiclient import errors
from apiclient.discovery import build
from oauth2client.client import SignedJwtAssertionCredentials
class OAuth():
__services = dict()
@staticmethod
def getCredentials(email, scopes, client_secret, client_id):
key = file(client_secret, 'rb')
privateKey = key.read()
key.close()
credentials = SignedJwtAssertionCredentials(client_id, privateKey, scope=scopes, sub=email)
http = httplib2.Http()
http = credentials.authorize(http)
credentials.refresh(http)
return credentials, http
@staticmethod
def getService(email, api, version, scopes, client_secret, client_id, discoveryUrl=None):
"""
Return the service with constant credential
@param email: email to execute the action
@return: the drive service
"""
if not email.strip():
raise Exception("OAuth.getService : Email for service is missing")
key = email + "/" + api + "/" + version
if key not in OAuth.__services:
credentials, http = OAuth.getCredentials(email, scopes, client_secret, client_id)
if discoveryUrl:
OAuth.__services[key] = build(api, version, http=http, discoveryServiceUrl=discoveryUrl)
else:
OAuth.__services[key] = build(api, version, http=http)
logging.info("OAuth.getService : Service request by - " + email)
return OAuth.__services[key]
|
Revert "Do not cache discovery"
|
Revert "Do not cache discovery"
This reverts commit fcd37e8228d66230008963008a24e9a8afc669e7.
|
Python
|
mit
|
lumapps/lumRest
|
---
+++
@@ -38,11 +38,9 @@
credentials, http = OAuth.getCredentials(email, scopes, client_secret, client_id)
if discoveryUrl:
- OAuth.__services[key] = build(api, version, http=http, discoveryServiceUrl=discoveryUrl,
- cache_discovery=False, cache=None)
+ OAuth.__services[key] = build(api, version, http=http, discoveryServiceUrl=discoveryUrl)
else:
- OAuth.__services[key] = build(api, version, http=http,
- cache_discovery=False, cache=None)
+ OAuth.__services[key] = build(api, version, http=http)
logging.info("OAuth.getService : Service request by - " + email)
return OAuth.__services[key]
|
eb2f19a95175d68c5ac5345d38c8ce8db3b3ba9c
|
packs/linux/actions/get_open_ports.py
|
packs/linux/actions/get_open_ports.py
|
import nmap
from st2actions.runners.pythonrunner import Action
"""
Note: This action requires nmap binary to be available and needs to run as root.
"""
class PortScanner(Action):
def run(self, host):
result = []
port_details = {}
ps = nmap.PortScanner()
scan_res = ps.scan(host, arguments='--min-parallelism 100 -sT -sU -sZ')
for target_host in ps.all_hosts():
if target_host in ps.all_hosts():
for comm in ps[target_host].all_protocols():
if comm in ['tcp','udp','ip','sctp']:
ports = ps[target_host][comm].keys()
ports.sort()
for port in ports:
port_details = {port:{'state':ps[host][comm][port]['state'], 'service':ps[host][comm][port]['name'], 'protocol':comm}}
result.append(port_details)
return result
if __name__ == "__main__":
ps = PortScanner()
ps.run()
|
import nmap
from st2actions.runners.pythonrunner import Action
"""
Note: This action requires nmap binary to be available and needs to run as root.
"""
class PortScanner(Action):
def run(self, host):
result = []
port_details = {}
ps = nmap.PortScanner()
scan_res = ps.scan(host, arguments='--min-parallelism 100 -sT -sU -sZ')
for target_host in ps.all_hosts():
if target_host in ps.all_hosts():
for comm in ps[target_host].all_protocols():
if comm in ['tcp','udp','ip','sctp']:
ports = ps[target_host][comm].keys()
ports.sort()
for port in ports:
port_details = {port:{'state':ps[host][comm][port]['state'], 'service':ps[host][comm][port]['name'], 'protocol':comm}}
result.append(port_details)
return result
|
Remove unused main entry point.
|
Remove unused main entry point.
|
Python
|
apache-2.0
|
pinterb/st2contrib,psychopenguin/st2contrib,tonybaloney/st2contrib,jtopjian/st2contrib,StackStorm/st2contrib,pearsontechnology/st2contrib,psychopenguin/st2contrib,lmEshoo/st2contrib,lmEshoo/st2contrib,meirwah/st2contrib,pidah/st2contrib,armab/st2contrib,tonybaloney/st2contrib,pearsontechnology/st2contrib,digideskio/st2contrib,armab/st2contrib,StackStorm/st2contrib,pidah/st2contrib,dennybaa/st2contrib,dennybaa/st2contrib,tonybaloney/st2contrib,digideskio/st2contrib,pearsontechnology/st2contrib,jtopjian/st2contrib,armab/st2contrib,meirwah/st2contrib,pidah/st2contrib,pearsontechnology/st2contrib,pinterb/st2contrib,StackStorm/st2contrib
|
---
+++
@@ -22,9 +22,5 @@
for port in ports:
port_details = {port:{'state':ps[host][comm][port]['state'], 'service':ps[host][comm][port]['name'], 'protocol':comm}}
result.append(port_details)
+
return result
-
-if __name__ == "__main__":
- ps = PortScanner()
- ps.run()
-
|
0024b8b921d788a0539bc242bd1600c0da666bd6
|
panoptes/state_machine/states/core.py
|
panoptes/state_machine/states/core.py
|
import time
import transitions
from panoptes.utils.logger import has_logger
@has_logger
class PanState(transitions.State):
""" Base class for PANOPTES transitions """
def __init__(self, *args, **kwargs):
name = kwargs.get('name', self.__class__)
self.panoptes = kwargs.get('panoptes', None)
super().__init__(name=name, on_enter=['execute'])
self._sleep_delay = 3 # seconds
def main(self):
assert self.panoptes is not None
msg = "Must implement `main` method inside class {}. Exiting".format(self.name)
self.panoptes.logger.warning(msg)
return 'exit'
def sleep(self, seconds=None):
""" sleep for `seconds` or `_sleep_delay` seconds
This puts the state into a loop that is responsive to outside messages.
Args:
seconds(float): Seconds to sleep for, defaults to `_sleep_delay`.
"""
assert self.panoptes is not None
if seconds is None:
seconds = self._sleep_delay
if seconds > 10:
step_time = seconds / 4
while seconds:
seconds = seconds - step_time
# NOTE: DO SOMETHING RESPONSIVE HERE
time.sleep(step_time)
else:
time.sleep(seconds)
|
import time
import transitions
from panoptes.utils.logger import has_logger
@has_logger
class PanState(transitions.State):
""" Base class for PANOPTES transitions """
def __init__(self, *args, **kwargs):
name = kwargs.get('name', self.__class__)
self.panoptes = kwargs.get('panoptes', None)
super().__init__(name=name, on_enter=['execute'])
self._sleep_delay = 3 # seconds
def main(self):
assert self.panoptes is not None
msg = "Must implement `main` method inside class {}. Exiting".format(self.name)
self.panoptes.logger.warning(msg)
def sleep(self, seconds=None):
""" sleep for `seconds` or `_sleep_delay` seconds
This puts the state into a loop that is responsive to outside messages.
Args:
seconds(float): Seconds to sleep for, defaults to `_sleep_delay`.
"""
assert self.panoptes is not None
if seconds is None:
seconds = self._sleep_delay
if seconds > 10:
step_time = seconds / 4
while seconds:
seconds = seconds - step_time
# NOTE: DO SOMETHING RESPONSIVE HERE
time.sleep(step_time)
else:
time.sleep(seconds)
|
Remove return state from main `main`
|
Remove return state from main `main`
|
Python
|
mit
|
panoptes/POCS,panoptes/POCS,joshwalawender/POCS,AstroHuntsman/POCS,joshwalawender/POCS,joshwalawender/POCS,panoptes/POCS,panoptes/POCS,AstroHuntsman/POCS,AstroHuntsman/POCS,AstroHuntsman/POCS
|
---
+++
@@ -22,7 +22,6 @@
assert self.panoptes is not None
msg = "Must implement `main` method inside class {}. Exiting".format(self.name)
self.panoptes.logger.warning(msg)
- return 'exit'
def sleep(self, seconds=None):
""" sleep for `seconds` or `_sleep_delay` seconds
|
b529ef8eb6985103e8b0a0cf81399a50a26c05f5
|
app/views.py
|
app/views.py
|
from app import mulungwishi_app as url
from flask import render_template
@url.route('/')
def index():
return render_template('index.html')
@url.route('/<query>')
def print_user_input(query):
if '=' in query:
query_container, query_value = query.split('=')
return 'Your query is {} which is equal to {}'.format(query_container, query_value)
return page_not_found(404)
@url.errorhandler(404)
def page_not_found(error):
return render_template('404.html'), 404
@url.errorhandler(403)
def page_forbidden(error):
return render_template('403.html', title='Page Forbidden'), 403
@url.errorhandler(500)
def page_server_error(error):
return render_template('500.html', title='Server Error'), 500
|
from app import mulungwishi_app as url
from flask import render_template
@url.route('/')
def index():
return render_template('index.html')
@url.route('/<query>')
def print_user_input(query):
if '=' in query:
query_container, query_value = query.split('=')
return 'Your query is {} which is equal to {}'.format(query_container, query_value)
return "You've entered an incorrect query. Please check and try again. Input : "+query
@url.errorhandler(404)
def page_not_found(error):
return render_template('404.html'), 404
@url.errorhandler(403)
def page_forbidden(error):
return render_template('403.html', title='Page Forbidden'), 403
@url.errorhandler(500)
def page_server_error(error):
return render_template('500.html', title='Server Error'), 500
|
Replace 404 redirection for incorrect query
|
Replace 404 redirection for incorrect query
|
Python
|
mit
|
admiral96/mulungwishi-webhook,engagespark/mulungwishi-webhook,engagespark/public-webhooks,engagespark/public-webhooks,admiral96/public-webhooks,admiral96/mulungwishi-webhook,engagespark/mulungwishi-webhook,admiral96/public-webhooks
|
---
+++
@@ -12,7 +12,7 @@
if '=' in query:
query_container, query_value = query.split('=')
return 'Your query is {} which is equal to {}'.format(query_container, query_value)
- return page_not_found(404)
+ return "You've entered an incorrect query. Please check and try again. Input : "+query
@url.errorhandler(404)
|
e45b3d3a2428d3703260c25b4275359bf6786a37
|
launcher.py
|
launcher.py
|
from pract2d.game import gamemanager
if __name__ == '__main__':
game = gamemanager.GameManager()
game.run()
|
from pract2d.game import gamemanager
from pract2d.core import files
from platform import system
import os
if __name__ == '__main__':
try:
if system() == 'Windows' or not os.environ["PYSDL2_DLL_PATH"]:
os.environ["PYSDL2_DLL_PATH"] = files.get_path()
except KeyError:
pass
game = gamemanager.GameManager()
game.run()
|
Set the default sdl2 library locations.
|
Set the default sdl2 library locations.
|
Python
|
bsd-2-clause
|
mdsitton/pract2d
|
---
+++
@@ -1,5 +1,13 @@
from pract2d.game import gamemanager
+from pract2d.core import files
+from platform import system
+import os
if __name__ == '__main__':
+ try:
+ if system() == 'Windows' or not os.environ["PYSDL2_DLL_PATH"]:
+ os.environ["PYSDL2_DLL_PATH"] = files.get_path()
+ except KeyError:
+ pass
game = gamemanager.GameManager()
game.run()
|
5c90e74139f2735d0b4d62f524eb624780c48847
|
scripts/migration/projectorganizer/migrate_projectorganizer.py
|
scripts/migration/projectorganizer/migrate_projectorganizer.py
|
"""Fixes nodes without is_folder set.
This script must be run from the OSF root directory for the imports to work.
"""
from framework.mongo import database
def main():
database['node'].update({"is_folder": {'$exists': False}}, {'$set': {'is_folder': False}}, multi=True)
print('-----\nDone.')
if __name__ == '__main__':
main()
|
"""Fixes nodes without is_folder set.
This script must be run from the OSF root directory for the imports to work.
"""
from framework.mongo import database
def main():
database['node'].update({"is_folder": {'$exists': False}}, {'$set': {'is_folder': False}}, multi=True)
database['node'].update({"is_dashboard": {'$exists': False}}, {'$set': {'is_dashboard': False}}, multi=True)
database['node'].update({"expanded": {'$exists': False}}, {'$set': {'expanded': False}}, multi=True)
print('-----\nDone.')
if __name__ == '__main__':
main()
|
Update migration to ensure node's have is_dashboard and expanded fields
|
Update migration to ensure node's have is_dashboard and expanded fields
|
Python
|
apache-2.0
|
kushG/osf.io,cwisecarver/osf.io,cldershem/osf.io,caseyrygt/osf.io,ticklemepierce/osf.io,brandonPurvis/osf.io,danielneis/osf.io,rdhyee/osf.io,pattisdr/osf.io,barbour-em/osf.io,Johnetordoff/osf.io,felliott/osf.io,samchrisinger/osf.io,Johnetordoff/osf.io,mfraezz/osf.io,DanielSBrown/osf.io,crcresearch/osf.io,mluke93/osf.io,cosenal/osf.io,sloria/osf.io,SSJohns/osf.io,cldershem/osf.io,GageGaskins/osf.io,aaxelb/osf.io,asanfilippo7/osf.io,monikagrabowska/osf.io,saradbowman/osf.io,amyshi188/osf.io,ZobairAlijan/osf.io,jinluyuan/osf.io,revanthkolli/osf.io,jmcarp/osf.io,samanehsan/osf.io,emetsger/osf.io,MerlinZhang/osf.io,brianjgeiger/osf.io,felliott/osf.io,abought/osf.io,DanielSBrown/osf.io,sbt9uc/osf.io,jeffreyliu3230/osf.io,GageGaskins/osf.io,himanshuo/osf.io,kwierman/osf.io,jeffreyliu3230/osf.io,crcresearch/osf.io,AndrewSallans/osf.io,chrisseto/osf.io,kushG/osf.io,SSJohns/osf.io,felliott/osf.io,HarryRybacki/osf.io,DanielSBrown/osf.io,brianjgeiger/osf.io,jmcarp/osf.io,asanfilippo7/osf.io,wearpants/osf.io,petermalcolm/osf.io,sloria/osf.io,rdhyee/osf.io,Ghalko/osf.io,cosenal/osf.io,billyhunt/osf.io,samanehsan/osf.io,TomBaxter/osf.io,ticklemepierce/osf.io,kch8qx/osf.io,ticklemepierce/osf.io,ticklemepierce/osf.io,ckc6cz/osf.io,wearpants/osf.io,TomHeatwole/osf.io,haoyuchen1992/osf.io,bdyetton/prettychart,njantrania/osf.io,fabianvf/osf.io,jolene-esposito/osf.io,zkraime/osf.io,haoyuchen1992/osf.io,dplorimer/osf,Nesiehr/osf.io,jnayak1/osf.io,doublebits/osf.io,caseyrygt/osf.io,lyndsysimon/osf.io,kch8qx/osf.io,arpitar/osf.io,mluo613/osf.io,adlius/osf.io,monikagrabowska/osf.io,ZobairAlijan/osf.io,caseyrygt/osf.io,aaxelb/osf.io,adlius/osf.io,doublebits/osf.io,himanshuo/osf.io,caneruguz/osf.io,kwierman/osf.io,laurenrevere/osf.io,AndrewSallans/osf.io,ZobairAlijan/osf.io,mfraezz/osf.io,CenterForOpenScience/osf.io,billyhunt/osf.io,HalcyonChimera/osf.io,jolene-esposito/osf.io,HalcyonChimera/osf.io,amyshi188/osf.io,njantrania/osf.io,reinaH/osf.io,abought/osf.io,zkraime/osf.io,emetsger/osf.io,MerlinZhang/osf.io,jnayak1/osf.io,lamdnhan/osf.io,saradbowman/osf.io,lyndsysimon/osf.io,zachjanicki/osf.io,cwisecarver/osf.io,cldershem/osf.io,RomanZWang/osf.io,HarryRybacki/osf.io,reinaH/osf.io,zamattiac/osf.io,petermalcolm/osf.io,HarryRybacki/osf.io,jinluyuan/osf.io,haoyuchen1992/osf.io,pattisdr/osf.io,sloria/osf.io,petermalcolm/osf.io,kch8qx/osf.io,acshi/osf.io,caneruguz/osf.io,kch8qx/osf.io,jeffreyliu3230/osf.io,zamattiac/osf.io,GaryKriebel/osf.io,Nesiehr/osf.io,brandonPurvis/osf.io,doublebits/osf.io,brandonPurvis/osf.io,arpitar/osf.io,HalcyonChimera/osf.io,cwisecarver/osf.io,cldershem/osf.io,njantrania/osf.io,hmoco/osf.io,zachjanicki/osf.io,dplorimer/osf,zkraime/osf.io,cslzchen/osf.io,CenterForOpenScience/osf.io,cslzchen/osf.io,billyhunt/osf.io,leb2dg/osf.io,erinspace/osf.io,pattisdr/osf.io,monikagrabowska/osf.io,petermalcolm/osf.io,billyhunt/osf.io,njantrania/osf.io,sbt9uc/osf.io,RomanZWang/osf.io,brianjgeiger/osf.io,billyhunt/osf.io,doublebits/osf.io,cosenal/osf.io,monikagrabowska/osf.io,chrisseto/osf.io,fabianvf/osf.io,Nesiehr/osf.io,leb2dg/osf.io,leb2dg/osf.io,arpitar/osf.io,jmcarp/osf.io,alexschiller/osf.io,mluke93/osf.io,GageGaskins/osf.io,binoculars/osf.io,Ghalko/osf.io,caseyrygt/osf.io,reinaH/osf.io,KAsante95/osf.io,SSJohns/osf.io,doublebits/osf.io,mfraezz/osf.io,fabianvf/osf.io,alexschiller/osf.io,alexschiller/osf.io,binoculars/osf.io,barbour-em/osf.io,Johnetordoff/osf.io,kwierman/osf.io,cosenal/osf.io,rdhyee/osf.io,haoyuchen1992/osf.io,CenterForOpenScience/osf.io,CenterForOpenScience/osf.io,mluke93/osf.io,GageGaskins/osf.io,KAsante95/osf.io,mluo613/osf.io,revanthkolli/osf.io,baylee-d/osf.io,mattclark/osf.io,chrisseto/osf.io,mluo613/osf.io,sbt9uc/osf.io,monikagrabowska/osf.io,zachjanicki/osf.io,alexschiller/osf.io,rdhyee/osf.io,binoculars/osf.io,TomHeatwole/osf.io,chrisseto/osf.io,lyndsysimon/osf.io,HarryRybacki/osf.io,RomanZWang/osf.io,chennan47/osf.io,samchrisinger/osf.io,brandonPurvis/osf.io,himanshuo/osf.io,crcresearch/osf.io,TomBaxter/osf.io,cslzchen/osf.io,lamdnhan/osf.io,zamattiac/osf.io,GaryKriebel/osf.io,hmoco/osf.io,KAsante95/osf.io,erinspace/osf.io,ckc6cz/osf.io,dplorimer/osf,caseyrollins/osf.io,acshi/osf.io,bdyetton/prettychart,Nesiehr/osf.io,cwisecarver/osf.io,Ghalko/osf.io,abought/osf.io,jolene-esposito/osf.io,kushG/osf.io,laurenrevere/osf.io,himanshuo/osf.io,samanehsan/osf.io,zamattiac/osf.io,TomBaxter/osf.io,lamdnhan/osf.io,mluo613/osf.io,adlius/osf.io,SSJohns/osf.io,TomHeatwole/osf.io,baylee-d/osf.io,mattclark/osf.io,kch8qx/osf.io,acshi/osf.io,KAsante95/osf.io,emetsger/osf.io,wearpants/osf.io,brandonPurvis/osf.io,mluo613/osf.io,HalcyonChimera/osf.io,samchrisinger/osf.io,amyshi188/osf.io,caneruguz/osf.io,chennan47/osf.io,acshi/osf.io,hmoco/osf.io,lamdnhan/osf.io,MerlinZhang/osf.io,Johnetordoff/osf.io,aaxelb/osf.io,alexschiller/osf.io,aaxelb/osf.io,jmcarp/osf.io,zachjanicki/osf.io,GageGaskins/osf.io,hmoco/osf.io,GaryKriebel/osf.io,danielneis/osf.io,zkraime/osf.io,GaryKriebel/osf.io,samanehsan/osf.io,acshi/osf.io,chennan47/osf.io,sbt9uc/osf.io,icereval/osf.io,kwierman/osf.io,brianjgeiger/osf.io,asanfilippo7/osf.io,mluke93/osf.io,RomanZWang/osf.io,laurenrevere/osf.io,samchrisinger/osf.io,RomanZWang/osf.io,ZobairAlijan/osf.io,jolene-esposito/osf.io,barbour-em/osf.io,mfraezz/osf.io,fabianvf/osf.io,jinluyuan/osf.io,danielneis/osf.io,adlius/osf.io,icereval/osf.io,cslzchen/osf.io,jinluyuan/osf.io,revanthkolli/osf.io,MerlinZhang/osf.io,bdyetton/prettychart,kushG/osf.io,revanthkolli/osf.io,TomHeatwole/osf.io,ckc6cz/osf.io,icereval/osf.io,wearpants/osf.io,bdyetton/prettychart,reinaH/osf.io,caneruguz/osf.io,erinspace/osf.io,amyshi188/osf.io,jeffreyliu3230/osf.io,mattclark/osf.io,ckc6cz/osf.io,barbour-em/osf.io,caseyrollins/osf.io,baylee-d/osf.io,emetsger/osf.io,DanielSBrown/osf.io,jnayak1/osf.io,felliott/osf.io,dplorimer/osf,Ghalko/osf.io,arpitar/osf.io,lyndsysimon/osf.io,leb2dg/osf.io,KAsante95/osf.io,abought/osf.io,asanfilippo7/osf.io,caseyrollins/osf.io,danielneis/osf.io,jnayak1/osf.io
|
---
+++
@@ -9,6 +9,8 @@
def main():
database['node'].update({"is_folder": {'$exists': False}}, {'$set': {'is_folder': False}}, multi=True)
+ database['node'].update({"is_dashboard": {'$exists': False}}, {'$set': {'is_dashboard': False}}, multi=True)
+ database['node'].update({"expanded": {'$exists': False}}, {'$set': {'expanded': False}}, multi=True)
print('-----\nDone.')
|
42465957542fe232739d58c2a46098d13fb9c995
|
tests/parser_db.py
|
tests/parser_db.py
|
from compiler import error, parse
class ParserDB():
"""A class for parsing with memoized parsers."""
parsers = {}
@classmethod
def _parse(cls, data, start='program'):
mock = error.LoggerMock()
try:
parser = cls.parsers[start]
except KeyError:
parser = cls.parsers[start] = parse.Parser(
logger=mock,
start=start
)
tree = parser.parse(data=data)
return tree
|
from compiler import error, parse
class ParserDB():
"""A class for parsing with memoized parsers."""
parsers = {}
@classmethod
def _parse(cls, data, start='program'):
mock = error.LoggerMock()
try:
parser = cls.parsers[start]
except KeyError:
parser = cls.parsers[start] = parse.Parser(
logger=mock,
start=start
)
# Clear previous logger state prior to parsing.
parser.logger.clear()
tree = parser.parse(data=data)
return tree
|
Clear previous logger state on each call.
|
ParserDB: Clear previous logger state on each call.
|
Python
|
mit
|
Renelvon/llama,dionyziz/llama,dionyziz/llama,Renelvon/llama
|
---
+++
@@ -18,6 +18,9 @@
start=start
)
+ # Clear previous logger state prior to parsing.
+ parser.logger.clear()
+
tree = parser.parse(data=data)
return tree
|
c01cef9340a3d55884fe38b60b209dbad5f97ea6
|
nova/db/sqlalchemy/migrate_repo/versions/080_add_hypervisor_hostname_to_compute_nodes.py
|
nova/db/sqlalchemy/migrate_repo/versions/080_add_hypervisor_hostname_to_compute_nodes.py
|
# Copyright 2012 OpenStack, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from sqlalchemy import *
meta = MetaData()
compute_nodes = Table("compute_nodes", meta, Column("id", Integer(),
primary_key=True, nullable=False))
hypervisor_hostname = Column("hypervisor_hostname", String(255))
def upgrade(migrate_engine):
meta.bind = migrate_engine
compute_nodes.create_column(hypervisor_hostname)
def downgrade(migrate_engine):
meta.bind = migrate_engine
compute_nodes.drop_column(hypervisor_hostname)
|
# Copyright 2012 OpenStack, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from sqlalchemy import Column, MetaData, String, Table
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
compute_nodes = Table("compute_nodes", meta, autoload=True)
hypervisor_hostname = Column("hypervisor_hostname", String(255))
compute_nodes.create_column(hypervisor_hostname)
def downgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
compute_nodes = Table("compute_nodes", meta, autoload=True)
compute_nodes.drop_column('hypervisor_hostname')
|
Use sqlalchemy reflection in migration 080
|
Use sqlalchemy reflection in migration 080
Change-Id: If2a0e59461d108d59c6e9907d3db053ba2b44f57
|
Python
|
apache-2.0
|
petrutlucian94/nova,adelina-t/nova,DirectXMan12/nova-hacking,sridevikoushik31/nova,gooddata/openstack-nova,alvarolopez/nova,whitepages/nova,thomasem/nova,affo/nova,berrange/nova,eayunstack/nova,mahak/nova,cernops/nova,felixma/nova,apporc/nova,cyx1231st/nova,openstack/nova,klmitch/nova,JianyuWang/nova,CiscoSystems/nova,devoid/nova,DirectXMan12/nova-hacking,badock/nova,saleemjaveds/https-github.com-openstack-nova,Tehsmash/nova,gspilio/nova,yosshy/nova,mandeepdhami/nova,saleemjaveds/https-github.com-openstack-nova,maoy/zknova,akash1808/nova,Francis-Liu/animated-broccoli,houshengbo/nova_vmware_compute_driver,leilihh/nova,usc-isi/extra-specs,shail2810/nova,spring-week-topos/nova-week,thomasem/nova,mikalstill/nova,blueboxgroup/nova,dstroppa/openstack-smartos-nova-grizzly,Brocade-OpenSource/OpenStack-DNRM-Nova,barnsnake351/nova,spring-week-topos/nova-week,imsplitbit/nova,Stavitsky/nova,petrutlucian94/nova,fnordahl/nova,sridevikoushik31/nova,Triv90/Nova,rahulunair/nova,hanlind/nova,paulmathews/nova,sebrandon1/nova,alexandrucoman/vbox-nova-driver,raildo/nova,qwefi/nova,rajalokan/nova,eharney/nova,gooddata/openstack-nova,Yuriy-Leonov/nova,akash1808/nova,ntt-sic/nova,NoBodyCam/TftpPxeBootBareMetal,TwinkleChawla/nova,rickerc/nova_audit,noironetworks/nova,dawnpower/nova,iuliat/nova,nikesh-mahalka/nova,rickerc/nova_audit,sileht/deb-openstack-nova,savi-dev/nova,JioCloud/nova_test_latest,klmitch/nova,mandeepdhami/nova,bgxavier/nova,cloudbase/nova,cloudbase/nova,houshengbo/nova_vmware_compute_driver,rahulunair/nova,cloudbase/nova-virtualbox,devendermishrajio/nova_test_latest,jeffrey4l/nova,NewpTone/stacklab-nova,Juniper/nova,fajoy/nova,takeshineshiro/nova,belmiromoreira/nova,bclau/nova,mmnelemane/nova,angdraug/nova,Juniper/nova,sridevikoushik31/nova,orbitfp7/nova,NewpTone/stacklab-nova,devendermishrajio/nova,ted-gould/nova,double12gzh/nova,Tehsmash/nova,JioCloud/nova_test_latest,BeyondTheClouds/nova,savi-dev/nova,watonyweng/nova,mgagne/nova,klmitch/nova,rrader/nova-docker-plugin,dims/nova,shootstar/novatest,devoid/nova,bigswitch/nova,dstroppa/openstack-smartos-nova-grizzly,eonpatapon/nova,cloudbau/nova,redhat-openstack/nova,leilihh/novaha,mahak/nova,NeCTAR-RC/nova,CCI-MOC/nova,Triv90/Nova,badock/nova,iuliat/nova,TwinkleChawla/nova,cyx1231st/nova,psiwczak/openstack,yrobla/nova,phenoxim/nova,tudorvio/nova,cloudbase/nova-virtualbox,shail2810/nova,joker946/nova,sridevikoushik31/openstack,yrobla/nova,zhimin711/nova,gspilio/nova,cernops/nova,aristanetworks/arista-ovs-nova,mahak/nova,LoHChina/nova,Triv90/Nova,sebrandon1/nova,edulramirez/nova,usc-isi/extra-specs,rajalokan/nova,sridevikoushik31/nova,angdraug/nova,usc-isi/nova,maheshp/novatest,varunarya10/nova_test_latest,bclau/nova,joker946/nova,psiwczak/openstack,shahar-stratoscale/nova,NoBodyCam/TftpPxeBootBareMetal,rahulunair/nova,noironetworks/nova,gooddata/openstack-nova,citrix-openstack-build/nova,nikesh-mahalka/nova,sebrandon1/nova,hanlind/nova,fajoy/nova,maoy/zknova,TieWei/nova,sacharya/nova,tealover/nova,ewindisch/nova,isyippee/nova,barnsnake351/nova,blueboxgroup/nova,sileht/deb-openstack-nova,sileht/deb-openstack-nova,eneabio/nova,raildo/nova,alaski/nova,kimjaejoong/nova,sridevikoushik31/openstack,berrange/nova,dawnpower/nova,ewindisch/nova,vmturbo/nova,OpenAcademy-OpenStack/nova-scheduler,edulramirez/nova,j-carpentier/nova,SUSE-Cloud/nova,MountainWei/nova,zzicewind/nova,vmturbo/nova,leilihh/nova,fnordahl/nova,yatinkumbhare/openstack-nova,BeyondTheClouds/nova,vladikr/nova_drafts,usc-isi/nova,orbitfp7/nova,sacharya/nova,ruslanloman/nova,Yusuke1987/openstack_template,shahar-stratoscale/nova,MountainWei/nova,gspilio/nova,scripnichenko/nova,maelnor/nova,mmnelemane/nova,CCI-MOC/nova,varunarya10/nova_test_latest,watonyweng/nova,yrobla/nova,devendermishrajio/nova_test_latest,OpenAcademy-OpenStack/nova-scheduler,psiwczak/openstack,rajalokan/nova,CloudServer/nova,openstack/nova,jianghuaw/nova,usc-isi/nova,rrader/nova-docker-plugin,cloudbase/nova,alexandrucoman/vbox-nova-driver,yosshy/nova,tealover/nova,gooddata/openstack-nova,apporc/nova,TieWei/nova,virtualopensystems/nova,hanlind/nova,jeffrey4l/nova,affo/nova,jianghuaw/nova,vmturbo/nova,devendermishrajio/nova,openstack/nova,Stavitsky/nova,jianghuaw/nova,qwefi/nova,klmitch/nova,alaski/nova,viggates/nova,Brocade-OpenSource/OpenStack-DNRM-Nova,maheshp/novatest,scripnichenko/nova,CloudServer/nova,bigswitch/nova,maelnor/nova,NeCTAR-RC/nova,redhat-openstack/nova,tianweizhang/nova,mikalstill/nova,rajalokan/nova,citrix-openstack-build/nova,mikalstill/nova,ntt-sic/nova,ruslanloman/nova,LoHChina/nova,jianghuaw/nova,shootstar/novatest,CEG-FYP-OpenStack/scheduler,kimjaejoong/nova,Francis-Liu/animated-broccoli,eayunstack/nova,bgxavier/nova,josephsuh/extra-specs,NoBodyCam/TftpPxeBootBareMetal,sridevikoushik31/openstack,savi-dev/nova,plumgrid/plumgrid-nova,felixma/nova,JianyuWang/nova,Yuriy-Leonov/nova,silenceli/nova,eharney/nova,imsplitbit/nova,aristanetworks/arista-ovs-nova,projectcalico/calico-nova,isyippee/nova,NewpTone/stacklab-nova,paulmathews/nova,petrutlucian94/nova_dev,Yusuke1987/openstack_template,plumgrid/plumgrid-nova,JioCloud/nova,Juniper/nova,petrutlucian94/nova_dev,luogangyi/bcec-nova,BeyondTheClouds/nova,Metaswitch/calico-nova,maheshp/novatest,projectcalico/calico-nova,alvarolopez/nova,vladikr/nova_drafts,fajoy/nova,dstroppa/openstack-smartos-nova-grizzly,whitepages/nova,eonpatapon/nova,leilihh/novaha,j-carpentier/nova,adelina-t/nova,vmturbo/nova,zaina/nova,double12gzh/nova,tianweizhang/nova,eneabio/nova,aristanetworks/arista-ovs-nova,tangfeixiong/nova,luogangyi/bcec-nova,paulmathews/nova,silenceli/nova,tudorvio/nova,dims/nova,phenoxim/nova,tanglei528/nova,maoy/zknova,JioCloud/nova,zzicewind/nova,ted-gould/nova,cernops/nova,cloudbau/nova,usc-isi/extra-specs,zaina/nova,mgagne/nova,Metaswitch/calico-nova,houshengbo/nova_vmware_compute_driver,tanglei528/nova,akash1808/nova_test_latest,CEG-FYP-OpenStack/scheduler,zhimin711/nova,viggates/nova,Juniper/nova,belmiromoreira/nova,yatinkumbhare/openstack-nova,akash1808/nova_test_latest,DirectXMan12/nova-hacking,eneabio/nova,josephsuh/extra-specs,SUSE-Cloud/nova,josephsuh/extra-specs,virtualopensystems/nova,takeshineshiro/nova,CiscoSystems/nova,tangfeixiong/nova
|
---
+++
@@ -12,22 +12,19 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from sqlalchemy import *
-
-
-meta = MetaData()
-
-compute_nodes = Table("compute_nodes", meta, Column("id", Integer(),
- primary_key=True, nullable=False))
-
-hypervisor_hostname = Column("hypervisor_hostname", String(255))
+from sqlalchemy import Column, MetaData, String, Table
def upgrade(migrate_engine):
+ meta = MetaData()
meta.bind = migrate_engine
+ compute_nodes = Table("compute_nodes", meta, autoload=True)
+ hypervisor_hostname = Column("hypervisor_hostname", String(255))
compute_nodes.create_column(hypervisor_hostname)
def downgrade(migrate_engine):
+ meta = MetaData()
meta.bind = migrate_engine
- compute_nodes.drop_column(hypervisor_hostname)
+ compute_nodes = Table("compute_nodes", meta, autoload=True)
+ compute_nodes.drop_column('hypervisor_hostname')
|
452955ca8b7ba2ef01fc97800e5f350fee3e3a6e
|
tvnamer/renamer.py
|
tvnamer/renamer.py
|
import re
import os
import pytvdbapi.api as tvdb
class Renamer:
def __init__(self, api_key):
self.tvdb = tvdb.TVDB(api_key)
@staticmethod
def flat_file_list(directory):
directory = os.path.normpath(directory)
for dirpath, dirnames, filenames in os.walk(directory):
for filename in filenames:
full_path = os.path.join(dirpath, filename)
# remove directory from the start of the full path
full_path = full_path[len(directory)+1:]
yield full_path
def rename_table(self, directory, input_regex, output_format):
input_pattern = re.compile(input_regex)
filenames = self.flat_file_list(directory)
for filename in filenames:
thing = input_pattern.search(filename)
if thing is not None:
params = thing.groupdict()
output_filename = output_format.format(**params)
yield filename, output_filename
|
import re
import os
import pytvdbapi.api as tvdb
class Renamer:
def __init__(self, api_key):
self.tvdb = tvdb.TVDB(api_key)
@staticmethod
def flat_file_list(directory):
directory = os.path.normpath(directory)
for dirpath, dirnames, filenames in os.walk(directory):
for filename in filenames:
full_path = os.path.join(dirpath, filename)
# remove directory from the start of the full path
full_path = full_path[len(directory)+1:]
yield full_path
@staticmethod
def normalise_params(params):
def normalise(key, value):
if key == "show":
return str(value)
elif key in ["episode", "season"]:
return int(value)
else:
raise ValueError("Unknown parameter: '{}'".format(key))
return {key: normalise(key, value) for key, value in params.items()}
def rename_table(self, directory, input_regex, output_format):
input_pattern = re.compile(input_regex)
filenames = self.flat_file_list(directory)
for filename in filenames:
thing = input_pattern.search(filename)
if thing is not None:
params = self.normalise_params(thing.groupdict())
output_filename = output_format.format(**params)
yield filename, output_filename
|
Add normalising params from the regex input
|
Add normalising params from the regex input
|
Python
|
mit
|
tomleese/tvnamer,thomasleese/tvnamer
|
---
+++
@@ -19,6 +19,18 @@
full_path = full_path[len(directory)+1:]
yield full_path
+ @staticmethod
+ def normalise_params(params):
+ def normalise(key, value):
+ if key == "show":
+ return str(value)
+ elif key in ["episode", "season"]:
+ return int(value)
+ else:
+ raise ValueError("Unknown parameter: '{}'".format(key))
+
+ return {key: normalise(key, value) for key, value in params.items()}
+
def rename_table(self, directory, input_regex, output_format):
input_pattern = re.compile(input_regex)
@@ -26,6 +38,6 @@
for filename in filenames:
thing = input_pattern.search(filename)
if thing is not None:
- params = thing.groupdict()
+ params = self.normalise_params(thing.groupdict())
output_filename = output_format.format(**params)
yield filename, output_filename
|
38496eddbb214ee856b588e5b1cda62d5e353ab7
|
system_maintenance/tests/functional/tests.py
|
system_maintenance/tests/functional/tests.py
|
from selenium import webdriver
import unittest
class FunctionalTest(unittest.TestCase):
def setUp(self):
self.browser = webdriver.Firefox()
self.browser.implicitly_wait(3)
def tearDown(self):
self.browser.quit()
def test_app_home_title(self):
self.browser.get('http://localhost:8000/system_maintenance')
self.assertIn('System Maintenance', self.browser.title)
if __name__ == '__main__':
unittest.main(warnings='ignore')
|
Add simple functional test to test the title of the app's home page
|
Add simple functional test to test the title of the app's home page
|
Python
|
bsd-3-clause
|
mfcovington/django-system-maintenance,mfcovington/django-system-maintenance,mfcovington/django-system-maintenance
|
---
+++
@@ -0,0 +1,20 @@
+from selenium import webdriver
+import unittest
+
+
+class FunctionalTest(unittest.TestCase):
+
+ def setUp(self):
+ self.browser = webdriver.Firefox()
+ self.browser.implicitly_wait(3)
+
+ def tearDown(self):
+ self.browser.quit()
+
+ def test_app_home_title(self):
+ self.browser.get('http://localhost:8000/system_maintenance')
+ self.assertIn('System Maintenance', self.browser.title)
+
+
+if __name__ == '__main__':
+ unittest.main(warnings='ignore')
|
|
c0b09cc5d1f51672e696364616552008c13b89c4
|
packages/Python/lldbsuite/test/commands/expression/import-std-module/sysroot/TestStdModuleSysroot.py
|
packages/Python/lldbsuite/test/commands/expression/import-std-module/sysroot/TestStdModuleSysroot.py
|
"""
Test that we respect the sysroot when building the std module.
"""
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
import os
class ImportStdModule(TestBase):
mydir = TestBase.compute_mydir(__file__)
@skipIf(compiler=no_match("clang"))
def test(self):
self.build()
sysroot = os.path.join(os.getcwd(), "root")
# Set the sysroot.
self.runCmd("platform select --sysroot '" + sysroot + "' host", CURRENT_EXECUTABLE_SET)
lldbutil.run_to_source_breakpoint(self,
"// Set break point at this line.", lldb.SBFileSpec("main.cpp"))
self.runCmd("settings set target.import-std-module true")
# Call our custom function in our sysroot std module.
# If this gives us the correct result, then we used the sysroot.
# We rely on the default argument of -123 to make sure we actually have the C++ module.
# (We don't have default arguments in the debug information).
self.expect("expr std::myabs()", substrs=['(int) $0 = 123'])
|
"""
Test that we respect the sysroot when building the std module.
"""
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
import os
class ImportStdModule(TestBase):
mydir = TestBase.compute_mydir(__file__)
# We only emulate a fake libc++ in this test and don't use the real libc++,
# but we still add the libc++ category so that this test is only run in
# test configurations where libc++ is actually supposed to be tested.
@add_test_categories(["libc++"])
@skipIf(compiler=no_match("clang"))
def test(self):
self.build()
sysroot = os.path.join(os.getcwd(), "root")
# Set the sysroot.
self.runCmd("platform select --sysroot '" + sysroot + "' host", CURRENT_EXECUTABLE_SET)
lldbutil.run_to_source_breakpoint(self,
"// Set break point at this line.", lldb.SBFileSpec("main.cpp"))
self.runCmd("settings set target.import-std-module true")
# Call our custom function in our sysroot std module.
# If this gives us the correct result, then we used the sysroot.
# We rely on the default argument of -123 to make sure we actually have the C++ module.
# (We don't have default arguments in the debug information).
self.expect("expr std::myabs()", substrs=['(int) $0 = 123'])
|
Add import-std-module/sysroot to the libc++ test category.
|
[lldb] Add import-std-module/sysroot to the libc++ test category.
We essentially test libc++ in a sysroot here so let's make sure
that we actually only run this test on platforms where libc++
testing is enabled.
git-svn-id: 4c4cc70b1ef44ba2b7963015e681894188cea27e@374572 91177308-0d34-0410-b5e6-96231b3b80d8
|
Python
|
apache-2.0
|
llvm-mirror/lldb,llvm-mirror/lldb,llvm-mirror/lldb,llvm-mirror/lldb,llvm-mirror/lldb
|
---
+++
@@ -11,6 +11,10 @@
mydir = TestBase.compute_mydir(__file__)
+ # We only emulate a fake libc++ in this test and don't use the real libc++,
+ # but we still add the libc++ category so that this test is only run in
+ # test configurations where libc++ is actually supposed to be tested.
+ @add_test_categories(["libc++"])
@skipIf(compiler=no_match("clang"))
def test(self):
self.build()
|
39086b074dbac8d6d743ede09ce3556e4861e5a4
|
wdim/client/blob.py
|
wdim/client/blob.py
|
import json
import hashlib
from wdim.client.storable import Storable
class Blob(Storable):
HASH_METHOD = 'sha1'
@classmethod
def _create(cls, data):
sha = hashlib(cls.HASH_METHOD, json.dumps(data))
return cls(sha, data)
@classmethod
def _from_document(cls, document):
return cls(document['data'])
@property
def hash(self):
return self._id
def __init__(self, data):
self.data = data
def to_document(self):
return {
'_id': self.hash,
'data': self.data
}
|
import json
import hashlib
from wdim import exceptions
from wdim.client import fields
from wdim.client.storable import Storable
class Blob(Storable):
HASH_METHOD = 'sha256'
_id = fields.StringField(unique=True)
data = fields.DictField()
@classmethod
async def create(cls, data):
sha = hashlib.new(cls.HASH_METHOD, json.dumps(data).encode('utf-8')).hexdigest()
try:
# Classmethod supers need arguments for some reason
return await super(Blob, cls).create(_id=sha, data=data)
except exceptions.UniqueViolation:
return await cls.load(sha)
@property
def hash(self):
return self._id
|
Reimplement Blob, switch to sha256
|
Reimplement Blob, switch to sha256
|
Python
|
mit
|
chrisseto/Still
|
---
+++
@@ -1,31 +1,27 @@
import json
import hashlib
+from wdim import exceptions
+from wdim.client import fields
from wdim.client.storable import Storable
class Blob(Storable):
- HASH_METHOD = 'sha1'
+ HASH_METHOD = 'sha256'
+
+ _id = fields.StringField(unique=True)
+ data = fields.DictField()
@classmethod
- def _create(cls, data):
- sha = hashlib(cls.HASH_METHOD, json.dumps(data))
- return cls(sha, data)
-
- @classmethod
- def _from_document(cls, document):
- return cls(document['data'])
+ async def create(cls, data):
+ sha = hashlib.new(cls.HASH_METHOD, json.dumps(data).encode('utf-8')).hexdigest()
+ try:
+ # Classmethod supers need arguments for some reason
+ return await super(Blob, cls).create(_id=sha, data=data)
+ except exceptions.UniqueViolation:
+ return await cls.load(sha)
@property
def hash(self):
return self._id
-
- def __init__(self, data):
- self.data = data
-
- def to_document(self):
- return {
- '_id': self.hash,
- 'data': self.data
- }
|
ca7580c12ffefafce1705d60ab74fcb22af18eb4
|
examples/python_interop/python_interop.py
|
examples/python_interop/python_interop.py
|
#!/usr/bin/env python
# Copyright 2017 Stanford University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import print_function
import legion
@legion.task
def f(task, regions, context, runtime):
print("inside task f")
@legion.task
def main_task(task, regions, context, runtime):
print("%x" % legion.c.legion_runtime_get_executing_processor(runtime, context).id)
|
#!/usr/bin/env python
# Copyright 2017 Stanford University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import print_function
import legion
@legion.task
def f(ctx):
print("inside task f")
@legion.task
def main_task(ctx):
print("%x" % legion.c.legion_runtime_get_executing_processor(ctx.runtime, ctx.context).id)
f(ctx)
|
Update Python example with a task call.
|
examples: Update Python example with a task call.
|
Python
|
apache-2.0
|
StanfordLegion/legion,StanfordLegion/legion,StanfordLegion/legion,StanfordLegion/legion,StanfordLegion/legion,StanfordLegion/legion,StanfordLegion/legion,StanfordLegion/legion
|
---
+++
@@ -20,9 +20,10 @@
import legion
@legion.task
-def f(task, regions, context, runtime):
+def f(ctx):
print("inside task f")
@legion.task
-def main_task(task, regions, context, runtime):
- print("%x" % legion.c.legion_runtime_get_executing_processor(runtime, context).id)
+def main_task(ctx):
+ print("%x" % legion.c.legion_runtime_get_executing_processor(ctx.runtime, ctx.context).id)
+ f(ctx)
|
a839dcaa51cde0bf191cc87ff2cf54bbc483d61e
|
main.py
|
main.py
|
#!/usr/bin/env python
import sys
import json
from pprint import pprint
try:
import requests
except ImportError:
print(
'Script requires requests package. \n'
'You can install it by running "pip install requests"'
)
exit()
API_URL = 'http://jsonplaceholder.typicode.com/posts/'
def get_by_id(id):
response = requests.get(API_URL + str(id))
return json.loads(response.text)
def get_all():
response = requests.get(API_URL)
return json.loads(response.text)
def validate_id(post_id):
if not post_id.isdigit():
print('Post id should be digit')
return False
elif int(post_id) not in range(1, 100):
print('Post id should be bigger than 0 and smaller than 100')
return False
return True
print('Loading data')
# If user didn't provided id, print all posts.
# Else - validate id and get post by id.
if len(sys.argv) == 1:
pprint(get_all())
else:
post_id = sys.argv[1]
if validate_id(post_id):
pprint(get_by_id(int(post_id)))
else:
print('Quitting')
|
#!/usr/bin/env python
import sys
import json
from pprint import pprint
try:
import requests
except ImportError:
print(
'Script requires requests package. \n'
'You can install it by running "pip install requests"'
)
exit()
API_URL = 'http://jsonplaceholder.typicode.com/posts/'
def get_by_id(id):
response = requests.get(API_URL + str(id))
return json.loads(response.text)
def get_all():
response = requests.get(API_URL)
return json.loads(response.text)
def validate_id(post_id):
if not post_id.isdigit():
print('Post id should be digit')
return False
elif int(post_id) not in range(1, 100):
print('Post id should be bigger than 0 and smaller than 100')
return False
return True
def print_post(post):
for (key, value) in post.items():
print key + ':', value
print('Loading data')
# If user didn't provided id, print all posts.
# Else - validate id and get post by id.
if len(sys.argv) == 1:
posts = get_all()
for post in posts:
print_post(post)
print
else:
post_id = sys.argv[1]
if validate_id(post_id):
post = get_by_id(int(post_id))
print_post(post)
else:
print('Quitting')
|
Print all data from posts
|
Print all data from posts
|
Python
|
mit
|
sevazhidkov/rest-wrapper
|
---
+++
@@ -33,14 +33,23 @@
return False
return True
+
+def print_post(post):
+ for (key, value) in post.items():
+ print key + ':', value
+
print('Loading data')
# If user didn't provided id, print all posts.
# Else - validate id and get post by id.
if len(sys.argv) == 1:
- pprint(get_all())
+ posts = get_all()
+ for post in posts:
+ print_post(post)
+ print
else:
post_id = sys.argv[1]
if validate_id(post_id):
- pprint(get_by_id(int(post_id)))
+ post = get_by_id(int(post_id))
+ print_post(post)
else:
print('Quitting')
|
032bd078b7650905148ceb3adf653d1f78f7e73f
|
srtm.py
|
srtm.py
|
import os
import json
import numpy as np
SAMPLES = 1201 # For SRTM3, use 3601 for SRTM1
def get_elevation(lat, lon):
file = get_file_name(lat, lon)
if file:
return read_elevation_from_file(file, lat, lon)
# Treat it as data void as in SRTM documentation
return -32768
def read_elevation_from_file(file, lat, lon):
with open(file) as hgt_data:
# HGT is 16bit signed integer - big endian
elevations = np.fromfile(hgt_data, np.dtype('>i2'), SAMPLES*SAMPLES)
.reshape((SAMPLES, SAMPLES))
lat_row = round((lat - int(lat))* 1200, 0)
lon_row = round((lon - int(lon))* 1200, 0)
return elevations[1200-lat_row, lon_row].astype(int)
def get_file_name(lat, lon):
file = "N%(lat)dE0%(lon)d.hgt" % {'lat':lat, 'lon':lon}
if os.path.isfile(file):
return file
else:
return None
|
import os
import json
import numpy as np
SAMPLES = 1201 # For SRTM3, use 3601 for SRTM1
HGTDIR = 'hgt' # All 'hgt' files will be kept here uncompressed
def get_elevation(lat, lon):
file = get_file_name(lat, lon)
if file:
return read_elevation_from_file(file, lat, lon)
# Treat it as data void as in SRTM documentation
return -32768
def read_elevation_from_file(file, lat, lon):
with open(file) as hgt_data:
# HGT is 16bit signed integer - big endian
elevations = np.fromfile(hgt_data, np.dtype('>i2'), SAMPLES*SAMPLES)
.reshape((SAMPLES, SAMPLES))
lat_row = round((lat - int(lat))* 1200, 0)
lon_row = round((lon - int(lon))* 1200, 0)
return elevations[1200-lat_row, lon_row].astype(int)
def get_file_name(lat, lon):
file = "N%(lat)dE0%(lon)d.hgt" % {'lat':lat, 'lon':lon}
file = os.path.join(HGTDIR, file)
if os.path.isfile(file):
return file
else:
return None
|
Add HGTDIR to store hgt files inside a directory
|
Add HGTDIR to store hgt files inside a directory
|
Python
|
mit
|
aatishnn/srtm-python
|
---
+++
@@ -3,7 +3,7 @@
import numpy as np
SAMPLES = 1201 # For SRTM3, use 3601 for SRTM1
-
+HGTDIR = 'hgt' # All 'hgt' files will be kept here uncompressed
def get_elevation(lat, lon):
file = get_file_name(lat, lon)
@@ -26,6 +26,7 @@
def get_file_name(lat, lon):
file = "N%(lat)dE0%(lon)d.hgt" % {'lat':lat, 'lon':lon}
+ file = os.path.join(HGTDIR, file)
if os.path.isfile(file):
return file
else:
|
1bf15bca7a492bf874dccab08e24df053b7a859f
|
mesh.py
|
mesh.py
|
import os
import sys
import traceback
builtin_cmds = {'cd', 'pwd',}
def prompt():
print '%s $ ' % os.getcwd(),
def read_command():
return sys.stdin.readline()
def parse_command(cmd_text):
return (cmd_text, cmd_text.strip().split())
def record_command(command):
return True
def run_builtin(cmd):
if cmd[0] == 'cd':
os.chdir(cmd[1])
elif cmd[0] == 'pwd':
print os.getcwd()
if __name__ == "__main__":
while True:
try:
prompt()
cmd_text = read_command()
cmd_text, cmd = parse_command(cmd_text)
record_command(cmd)
if cmd[0] in builtin_cmds:
run_builtin(cmd)
else:
#pid = subprocess.Popen(cmd_text, stdin=None, stdout=None, shell=True)
os.system(cmd_text)
except:
traceback.print_exc()
|
#!/usr/bin/env python3
import os
import shutil
import sys
import traceback
builtin_cmds = {'cd', 'pwd', 'exit',}
def prompt():
print('%s $ ' % os.getcwd(), end='', flush=True)
def read_command():
return sys.stdin.readline()
def parse_command(cmd_text):
return (cmd_text, cmd_text.strip().split())
def record_command(command):
print(command)
return True
def run_builtin(cmd, cmd_text):
if shutil.which(cmd[0]):
os.system(cmd_text)
if cmd[0] == 'cd':
os.chdir(cmd[1])
elif cmd[0] == 'pwd':
print(os.getcwd())
elif cmd[0] == 'exit':
sys.exit()
if __name__ == "__main__":
while True:
try:
prompt()
cmd_text = read_command()
cmd_text, cmd = parse_command(cmd_text)
record_command(cmd)
if cmd[0] in builtin_cmds:
run_builtin(cmd, cmd_text)
else:
#pid = subprocess.Popen(cmd_text, stdin=None, stdout=None, shell=True)
os.system(cmd_text)
except SystemExit:
break
except:
traceback.print_exc()
|
Switch to Python 3, add exit built_in
|
Switch to Python 3, add exit built_in
|
Python
|
mit
|
mmichie/mesh
|
---
+++
@@ -1,11 +1,14 @@
+#!/usr/bin/env python3
+
import os
+import shutil
import sys
import traceback
-builtin_cmds = {'cd', 'pwd',}
+builtin_cmds = {'cd', 'pwd', 'exit',}
def prompt():
- print '%s $ ' % os.getcwd(),
+ print('%s $ ' % os.getcwd(), end='', flush=True)
def read_command():
return sys.stdin.readline()
@@ -14,13 +17,18 @@
return (cmd_text, cmd_text.strip().split())
def record_command(command):
+ print(command)
return True
-def run_builtin(cmd):
+def run_builtin(cmd, cmd_text):
+ if shutil.which(cmd[0]):
+ os.system(cmd_text)
if cmd[0] == 'cd':
os.chdir(cmd[1])
elif cmd[0] == 'pwd':
- print os.getcwd()
+ print(os.getcwd())
+ elif cmd[0] == 'exit':
+ sys.exit()
if __name__ == "__main__":
while True:
@@ -31,9 +39,11 @@
record_command(cmd)
if cmd[0] in builtin_cmds:
- run_builtin(cmd)
+ run_builtin(cmd, cmd_text)
else:
#pid = subprocess.Popen(cmd_text, stdin=None, stdout=None, shell=True)
os.system(cmd_text)
+ except SystemExit:
+ break
except:
traceback.print_exc()
|
35d5ca76a0c7f63545d2e8bc6b877c78ba9eab1d
|
tests/adapter/_path.py
|
tests/adapter/_path.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
def fix():
p = os.path.join(os.path.dirname(__file__), '../../src/')
if p not in sys.path:
sys.path.insert(0, p)
if "__main__" == __name__:
fix()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
try:
from unittest import mock
except ImportError:
import mock
def fix():
p = os.path.join(os.path.dirname(__file__), '../../src/')
if p not in sys.path:
sys.path.insert(0, p)
if "__main__" == __name__:
fix()
|
Make 'mock' mocking library available for test cases stored under 'tests/adapter'
|
Make 'mock' mocking library available for test cases stored under 'tests/adapter'
|
Python
|
bsd-3-clause
|
michalbachowski/pygrapes,michalbachowski/pygrapes,michalbachowski/pygrapes
|
---
+++
@@ -2,11 +2,18 @@
# -*- coding: utf-8 -*-
import os
import sys
+
+try:
+ from unittest import mock
+except ImportError:
+ import mock
+
def fix():
p = os.path.join(os.path.dirname(__file__), '../../src/')
if p not in sys.path:
sys.path.insert(0, p)
+
if "__main__" == __name__:
fix()
|
3e843b9d0474657eeefc896b06e50968defb2514
|
wsgi.py
|
wsgi.py
|
# Yith Library Server is a password storage server.
# Copyright (C) 2015 Lorenzo Gil Sanchez <lorenzo.gil.sanchez@gmail.com>
#
# This file is part of Yith Library Server.
#
# Yith Library Server is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Yith Library Server is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Yith Library Server. If not, see <http://www.gnu.org/licenses/>.
import os
import os.path
from paste.deploy import loadapp
from waitress import serve
basedir= os.path.dirname(os.path.realpath(__file__))
conf_file = os.path.join(basedir, 'production.ini')
application = loadapp('config:%s' % conf_file)
if __name__ == "__main__":
port = int(os.environ.get("PORT", 5000))
scheme = os.environ.get("SCHEME", "https")
serve(application, host='0.0.0.0', port=port, url_scheme=scheme)
|
# Yith Library Web Client is a client for Yith Library Server.
# Copyright (C) 2015 Lorenzo Gil Sanchez <lorenzo.gil.sanchez@gmail.com>
#
# This file is part of Yith Library Web Client.
#
# Yith Library Web Client is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Yith Library Web Client is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Yith Library Web Client. If not, see <http://www.gnu.org/licenses/>.
import os
import os.path
from paste.deploy import loadapp
from waitress import serve
basedir= os.path.dirname(os.path.realpath(__file__))
conf_file = os.path.join(basedir, 'production.ini')
application = loadapp('config:%s' % conf_file)
if __name__ == "__main__":
port = int(os.environ.get("PORT", 5000))
scheme = os.environ.get("SCHEME", "https")
serve(application, host='0.0.0.0', port=port, url_scheme=scheme)
|
Fix project name in license section
|
Fix project name in license section
|
Python
|
agpl-3.0
|
lorenzogil/yith-library-web-client,ablanco/yith-library-web-client,ablanco/yith-library-web-client,lorenzogil/yith-library-web-client,ablanco/yith-library-web-client,ablanco/yith-library-web-client,lorenzogil/yith-library-web-client,lorenzogil/yith-library-web-client
|
---
+++
@@ -1,20 +1,20 @@
-# Yith Library Server is a password storage server.
+# Yith Library Web Client is a client for Yith Library Server.
# Copyright (C) 2015 Lorenzo Gil Sanchez <lorenzo.gil.sanchez@gmail.com>
#
-# This file is part of Yith Library Server.
+# This file is part of Yith Library Web Client.
#
-# Yith Library Server is free software: you can redistribute it and/or modify
+# Yith Library Web Client is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
-# Yith Library Server is distributed in the hope that it will be useful,
+# Yith Library Web Client is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
-# along with Yith Library Server. If not, see <http://www.gnu.org/licenses/>.
+# along with Yith Library Web Client. If not, see <http://www.gnu.org/licenses/>.
import os
import os.path
|
cadab3fef1e95c0b186dc28860ac4797243fdf22
|
tests/test_visualization.py
|
tests/test_visualization.py
|
from tornado.httpclient import HTTPRequest
import json
import os.path
import json
import shutil
from xml.etree import ElementTree as ET
from util import generate_filename
from util import make_trace_folder
from tests.base import BaseRecorderTestCase
class VisualizationTestCase(BaseRecorderTestCase):
def test_no_data(self):
self.http_client.fetch(self.get_url('/visualization'), self.stop)
response = self.wait()
assert 'class="vehicle"' in response.body
|
from tornado.httpclient import HTTPRequest
import json
import os.path
import json
import shutil
from xml.etree import ElementTree as ET
from util import generate_filename
from util import make_trace_folder
from tests.base import BaseRecorderTestCase
class VisualizationTestCase(BaseRecorderTestCase):
def test_no_data(self):
self.http_client.fetch(self.get_url('/visualization'), self.stop)
response = self.wait()
assert "measurement" in response.body
|
Update test case for visualization page.
|
Update test case for visualization page.
|
Python
|
bsd-3-clause
|
openxc/web-logging-example,openxc/web-logging-example
|
---
+++
@@ -15,4 +15,4 @@
def test_no_data(self):
self.http_client.fetch(self.get_url('/visualization'), self.stop)
response = self.wait()
- assert 'class="vehicle"' in response.body
+ assert "measurement" in response.body
|
eaaf941646ff8b22a6d3ef3689f22ad1b9f7a8e2
|
tensorflow/contrib/py2tf/impl/config.py
|
tensorflow/contrib/py2tf/impl/config.py
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Global configuration."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
PYTHON_LITERALS = {
'None': None,
'False': False,
'True': True,
'float': float,
}
DEFAULT_UNCOMPILED_MODULES = set((
('tensorflow',),
))
NO_SIDE_EFFECT_CONSTRUCTORS = set(('tensorflow',))
# TODO(mdan): Also allow controlling the generated names (for testability).
# TODO(mdan): Verify that these names are not hidden by generated code.
# TODO(mdan): Make sure copybara renames the reference below.
COMPILED_IMPORT_STATEMENTS = (
'from __future__ import print_function',
'import tensorflow as tf',
'from tensorflow.contrib.py2tf import utils as '
'py2tf_utils')
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Global configuration."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.py2tf import utils
PYTHON_LITERALS = {
'None': None,
'False': False,
'True': True,
'float': float,
}
DEFAULT_UNCOMPILED_MODULES = set((
('tensorflow',),
(utils.__name__,),
))
NO_SIDE_EFFECT_CONSTRUCTORS = set(('tensorflow',))
# TODO(mdan): Also allow controlling the generated names (for testability).
# TODO(mdan): Verify that these names are not hidden by generated code.
# TODO(mdan): Make sure copybara renames the reference below.
COMPILED_IMPORT_STATEMENTS = (
'from __future__ import print_function',
'import tensorflow as tf',
'from tensorflow.contrib.py2tf import utils as '
'py2tf_utils')
|
Add the utils module to the uncompiled whitelist.
|
Add the utils module to the uncompiled whitelist.
PiperOrigin-RevId: 185733139
|
Python
|
apache-2.0
|
jbedorf/tensorflow,arborh/tensorflow,zasdfgbnm/tensorflow,jhseu/tensorflow,kobejean/tensorflow,Intel-tensorflow/tensorflow,ZhangXinNan/tensorflow,jbedorf/tensorflow,jart/tensorflow,nburn42/tensorflow,annarev/tensorflow,ghchinoy/tensorflow,annarev/tensorflow,kobejean/tensorflow,ageron/tensorflow,dendisuhubdy/tensorflow,cxxgtxy/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,AnishShah/tensorflow,cxxgtxy/tensorflow,nburn42/tensorflow,manipopopo/tensorflow,DavidNorman/tensorflow,dancingdan/tensorflow,renyi533/tensorflow,tensorflow/tensorflow,Xeralux/tensorflow,meteorcloudy/tensorflow,aam-at/tensorflow,girving/tensorflow,allenlavoie/tensorflow,freedomtan/tensorflow,jart/tensorflow,jalexvig/tensorflow,aam-at/tensorflow,asimshankar/tensorflow,gautam1858/tensorflow,ZhangXinNan/tensorflow,davidzchen/tensorflow,theflofly/tensorflow,asimshankar/tensorflow,jendap/tensorflow,ageron/tensorflow,petewarden/tensorflow,allenlavoie/tensorflow,dancingdan/tensorflow,drpngx/tensorflow,frreiss/tensorflow-fred,theflofly/tensorflow,asimshankar/tensorflow,kevin-coder/tensorflow-fork,jbedorf/tensorflow,aselle/tensorflow,jalexvig/tensorflow,xzturn/tensorflow,xodus7/tensorflow,meteorcloudy/tensorflow,Intel-tensorflow/tensorflow,girving/tensorflow,kobejean/tensorflow,chemelnucfin/tensorflow,caisq/tensorflow,zasdfgbnm/tensorflow,yongtang/tensorflow,dongjoon-hyun/tensorflow,lukeiwanski/tensorflow,annarev/tensorflow,hehongliang/tensorflow,zasdfgbnm/tensorflow,aam-at/tensorflow,ageron/tensorflow,snnn/tensorflow,freedomtan/tensorflow,alshedivat/tensorflow,Bismarrck/tensorflow,aam-at/tensorflow,ZhangXinNan/tensorflow,aam-at/tensorflow,alsrgv/tensorflow,alsrgv/tensorflow,Intel-Corporation/tensorflow,frreiss/tensorflow-fred,frreiss/tensorflow-fred,DavidNorman/tensorflow,AnishShah/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,caisq/tensorflow,brchiu/tensorflow,meteorcloudy/tensorflow,aldian/tensorflow,brchiu/tensorflow,dongjoon-hyun/tensorflow,asimshankar/tensorflow,davidzchen/tensorflow,lukeiwanski/tensorflow,ghchinoy/tensorflow,davidzchen/tensorflow,yanchen036/tensorflow,hfp/tensorflow-xsmm,chemelnucfin/tensorflow,tensorflow/tensorflow,alshedivat/tensorflow,drpngx/tensorflow,tensorflow/tensorflow-pywrap_saved_model,alsrgv/tensorflow,sarvex/tensorflow,aam-at/tensorflow,DavidNorman/tensorflow,zasdfgbnm/tensorflow,yongtang/tensorflow,ZhangXinNan/tensorflow,DavidNorman/tensorflow,arborh/tensorflow,arborh/tensorflow,tensorflow/tensorflow-pywrap_saved_model,adit-chandra/tensorflow,dongjoon-hyun/tensorflow,theflofly/tensorflow,ageron/tensorflow,gunan/tensorflow,aselle/tensorflow,nburn42/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,AnishShah/tensorflow,chemelnucfin/tensorflow,jalexvig/tensorflow,davidzchen/tensorflow,meteorcloudy/tensorflow,arborh/tensorflow,jendap/tensorflow,jalexvig/tensorflow,kevin-coder/tensorflow-fork,ageron/tensorflow,paolodedios/tensorflow,allenlavoie/tensorflow,brchiu/tensorflow,karllessard/tensorflow,seanli9jan/tensorflow,tensorflow/tensorflow,xzturn/tensorflow,allenlavoie/tensorflow,ZhangXinNan/tensorflow,ppwwyyxx/tensorflow,yongtang/tensorflow,alsrgv/tensorflow,apark263/tensorflow,caisq/tensorflow,karllessard/tensorflow,sarvex/tensorflow,ppwwyyxx/tensorflow,benoitsteiner/tensorflow-xsmm,adit-chandra/tensorflow,aselle/tensorflow,hfp/tensorflow-xsmm,xodus7/tensorflow,gunan/tensorflow,Intel-tensorflow/tensorflow,dancingdan/tensorflow,freedomtan/tensorflow,seanli9jan/tensorflow,karllessard/tensorflow,sarvex/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,apark263/tensorflow,DavidNorman/tensorflow,alshedivat/tensorflow,brchiu/tensorflow,aldian/tensorflow,jhseu/tensorflow,yongtang/tensorflow,theflofly/tensorflow,benoitsteiner/tensorflow-xsmm,lukeiwanski/tensorflow,gojira/tensorflow,jendap/tensorflow,asimshankar/tensorflow,ageron/tensorflow,xzturn/tensorflow,adit-chandra/tensorflow,kobejean/tensorflow,jhseu/tensorflow,aselle/tensorflow,kevin-coder/tensorflow-fork,petewarden/tensorflow,jhseu/tensorflow,kevin-coder/tensorflow-fork,zasdfgbnm/tensorflow,ageron/tensorflow,eaplatanios/tensorflow,aldian/tensorflow,girving/tensorflow,alshedivat/tensorflow,jalexvig/tensorflow,xzturn/tensorflow,yanchen036/tensorflow,hfp/tensorflow-xsmm,yongtang/tensorflow,ageron/tensorflow,seanli9jan/tensorflow,annarev/tensorflow,dongjoon-hyun/tensorflow,manipopopo/tensorflow,jbedorf/tensorflow,jhseu/tensorflow,tensorflow/tensorflow-pywrap_saved_model,gunan/tensorflow,xzturn/tensorflow,annarev/tensorflow,kobejean/tensorflow,renyi533/tensorflow,apark263/tensorflow,jhseu/tensorflow,kobejean/tensorflow,jhseu/tensorflow,paolodedios/tensorflow,jalexvig/tensorflow,alshedivat/tensorflow,frreiss/tensorflow-fred,davidzchen/tensorflow,alshedivat/tensorflow,frreiss/tensorflow-fred,manipopopo/tensorflow,freedomtan/tensorflow,jart/tensorflow,gojira/tensorflow,girving/tensorflow,jart/tensorflow,cxxgtxy/tensorflow,theflofly/tensorflow,gojira/tensorflow,meteorcloudy/tensorflow,adit-chandra/tensorflow,Bismarrck/tensorflow,tensorflow/tensorflow-pywrap_saved_model,asimshankar/tensorflow,chemelnucfin/tensorflow,yanchen036/tensorflow,petewarden/tensorflow,ghchinoy/tensorflow,gautam1858/tensorflow,chemelnucfin/tensorflow,manipopopo/tensorflow,davidzchen/tensorflow,gautam1858/tensorflow,Bismarrck/tensorflow,frreiss/tensorflow-fred,ghchinoy/tensorflow,lukeiwanski/tensorflow,drpngx/tensorflow,drpngx/tensorflow,jhseu/tensorflow,dancingdan/tensorflow,renyi533/tensorflow,tensorflow/tensorflow,brchiu/tensorflow,paolodedios/tensorflow,jalexvig/tensorflow,AnishShah/tensorflow,zasdfgbnm/tensorflow,brchiu/tensorflow,tensorflow/tensorflow-pywrap_saved_model,drpngx/tensorflow,eaplatanios/tensorflow,eaplatanios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,arborh/tensorflow,alshedivat/tensorflow,dongjoon-hyun/tensorflow,ghchinoy/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,nburn42/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,gunan/tensorflow,dendisuhubdy/tensorflow,allenlavoie/tensorflow,tensorflow/tensorflow-pywrap_saved_model,DavidNorman/tensorflow,yongtang/tensorflow,lukeiwanski/tensorflow,petewarden/tensorflow,manipopopo/tensorflow,chemelnucfin/tensorflow,kevin-coder/tensorflow-fork,kevin-coder/tensorflow-fork,gojira/tensorflow,Xeralux/tensorflow,eaplatanios/tensorflow,ppwwyyxx/tensorflow,hfp/tensorflow-xsmm,girving/tensorflow,jendap/tensorflow,cxxgtxy/tensorflow,paolodedios/tensorflow,aselle/tensorflow,snnn/tensorflow,freedomtan/tensorflow,gunan/tensorflow,hehongliang/tensorflow,frreiss/tensorflow-fred,ppwwyyxx/tensorflow,AnishShah/tensorflow,xzturn/tensorflow,benoitsteiner/tensorflow-xsmm,ppwwyyxx/tensorflow,ghchinoy/tensorflow,aselle/tensorflow,theflofly/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Xeralux/tensorflow,hehongliang/tensorflow,snnn/tensorflow,Bismarrck/tensorflow,ppwwyyxx/tensorflow,ghchinoy/tensorflow,snnn/tensorflow,AnishShah/tensorflow,tensorflow/tensorflow,ppwwyyxx/tensorflow,tensorflow/tensorflow,cxxgtxy/tensorflow,jart/tensorflow,arborh/tensorflow,yanchen036/tensorflow,annarev/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,seanli9jan/tensorflow,aldian/tensorflow,freedomtan/tensorflow,jart/tensorflow,apark263/tensorflow,aselle/tensorflow,Xeralux/tensorflow,ghchinoy/tensorflow,drpngx/tensorflow,xodus7/tensorflow,caisq/tensorflow,freedomtan/tensorflow,Intel-Corporation/tensorflow,benoitsteiner/tensorflow-xsmm,adit-chandra/tensorflow,gunan/tensorflow,aselle/tensorflow,gunan/tensorflow,chemelnucfin/tensorflow,dancingdan/tensorflow,brchiu/tensorflow,theflofly/tensorflow,alsrgv/tensorflow,ageron/tensorflow,seanli9jan/tensorflow,jhseu/tensorflow,snnn/tensorflow,dancingdan/tensorflow,hfp/tensorflow-xsmm,Intel-Corporation/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,gojira/tensorflow,aam-at/tensorflow,drpngx/tensorflow,zasdfgbnm/tensorflow,aam-at/tensorflow,davidzchen/tensorflow,apark263/tensorflow,jalexvig/tensorflow,dendisuhubdy/tensorflow,apark263/tensorflow,jart/tensorflow,jart/tensorflow,arborh/tensorflow,frreiss/tensorflow-fred,adit-chandra/tensorflow,ghchinoy/tensorflow,caisq/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,alshedivat/tensorflow,renyi533/tensorflow,dancingdan/tensorflow,hehongliang/tensorflow,yanchen036/tensorflow,hfp/tensorflow-xsmm,AnishShah/tensorflow,tensorflow/tensorflow,xodus7/tensorflow,Intel-Corporation/tensorflow,dendisuhubdy/tensorflow,nburn42/tensorflow,petewarden/tensorflow,jhseu/tensorflow,kevin-coder/tensorflow-fork,sarvex/tensorflow,xodus7/tensorflow,allenlavoie/tensorflow,paolodedios/tensorflow,drpngx/tensorflow,cxxgtxy/tensorflow,arborh/tensorflow,chemelnucfin/tensorflow,yanchen036/tensorflow,freedomtan/tensorflow,allenlavoie/tensorflow,davidzchen/tensorflow,karllessard/tensorflow,alsrgv/tensorflow,yongtang/tensorflow,Xeralux/tensorflow,kobejean/tensorflow,annarev/tensorflow,gautam1858/tensorflow,xodus7/tensorflow,jalexvig/tensorflow,alsrgv/tensorflow,dendisuhubdy/tensorflow,yongtang/tensorflow,Xeralux/tensorflow,Intel-Corporation/tensorflow,benoitsteiner/tensorflow-xsmm,davidzchen/tensorflow,sarvex/tensorflow,eaplatanios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,asimshankar/tensorflow,jbedorf/tensorflow,dancingdan/tensorflow,annarev/tensorflow,nburn42/tensorflow,gautam1858/tensorflow,Intel-tensorflow/tensorflow,AnishShah/tensorflow,meteorcloudy/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,jart/tensorflow,aldian/tensorflow,girving/tensorflow,ZhangXinNan/tensorflow,petewarden/tensorflow,ghchinoy/tensorflow,caisq/tensorflow,nburn42/tensorflow,apark263/tensorflow,kevin-coder/tensorflow-fork,gautam1858/tensorflow,ageron/tensorflow,adit-chandra/tensorflow,gojira/tensorflow,gautam1858/tensorflow,karllessard/tensorflow,arborh/tensorflow,snnn/tensorflow,aam-at/tensorflow,caisq/tensorflow,jalexvig/tensorflow,Intel-tensorflow/tensorflow,adit-chandra/tensorflow,arborh/tensorflow,DavidNorman/tensorflow,freedomtan/tensorflow,manipopopo/tensorflow,nburn42/tensorflow,Bismarrck/tensorflow,jbedorf/tensorflow,nburn42/tensorflow,alsrgv/tensorflow,sarvex/tensorflow,snnn/tensorflow,drpngx/tensorflow,snnn/tensorflow,dendisuhubdy/tensorflow,benoitsteiner/tensorflow-xsmm,eaplatanios/tensorflow,freedomtan/tensorflow,karllessard/tensorflow,gunan/tensorflow,jart/tensorflow,xzturn/tensorflow,kobejean/tensorflow,lukeiwanski/tensorflow,frreiss/tensorflow-fred,AnishShah/tensorflow,gojira/tensorflow,petewarden/tensorflow,paolodedios/tensorflow,jendap/tensorflow,benoitsteiner/tensorflow-xsmm,alsrgv/tensorflow,yongtang/tensorflow,karllessard/tensorflow,ZhangXinNan/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Bismarrck/tensorflow,gojira/tensorflow,tensorflow/tensorflow,dongjoon-hyun/tensorflow,Bismarrck/tensorflow,chemelnucfin/tensorflow,arborh/tensorflow,hfp/tensorflow-xsmm,renyi533/tensorflow,nburn42/tensorflow,apark263/tensorflow,girving/tensorflow,seanli9jan/tensorflow,gunan/tensorflow,karllessard/tensorflow,ZhangXinNan/tensorflow,gautam1858/tensorflow,kevin-coder/tensorflow-fork,sarvex/tensorflow,dancingdan/tensorflow,lukeiwanski/tensorflow,kevin-coder/tensorflow-fork,brchiu/tensorflow,aldian/tensorflow,caisq/tensorflow,dendisuhubdy/tensorflow,dendisuhubdy/tensorflow,theflofly/tensorflow,manipopopo/tensorflow,apark263/tensorflow,jendap/tensorflow,lukeiwanski/tensorflow,theflofly/tensorflow,aselle/tensorflow,tensorflow/tensorflow-pywrap_saved_model,hehongliang/tensorflow,Bismarrck/tensorflow,jendap/tensorflow,davidzchen/tensorflow,gautam1858/tensorflow,zasdfgbnm/tensorflow,eaplatanios/tensorflow,Intel-Corporation/tensorflow,petewarden/tensorflow,asimshankar/tensorflow,hehongliang/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,meteorcloudy/tensorflow,paolodedios/tensorflow,hfp/tensorflow-xsmm,hfp/tensorflow-xsmm,chemelnucfin/tensorflow,hehongliang/tensorflow,snnn/tensorflow,jendap/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,annarev/tensorflow,frreiss/tensorflow-fred,Intel-Corporation/tensorflow,Bismarrck/tensorflow,lukeiwanski/tensorflow,kobejean/tensorflow,ZhangXinNan/tensorflow,Intel-Corporation/tensorflow,xzturn/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,girving/tensorflow,adit-chandra/tensorflow,zasdfgbnm/tensorflow,ppwwyyxx/tensorflow,eaplatanios/tensorflow,petewarden/tensorflow,paolodedios/tensorflow,frreiss/tensorflow-fred,dendisuhubdy/tensorflow,meteorcloudy/tensorflow,girving/tensorflow,sarvex/tensorflow,kobejean/tensorflow,petewarden/tensorflow,renyi533/tensorflow,girving/tensorflow,Xeralux/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,gautam1858/tensorflow,ppwwyyxx/tensorflow,ageron/tensorflow,aselle/tensorflow,eaplatanios/tensorflow,snnn/tensorflow,jhseu/tensorflow,aam-at/tensorflow,manipopopo/tensorflow,seanli9jan/tensorflow,benoitsteiner/tensorflow-xsmm,theflofly/tensorflow,Xeralux/tensorflow,dongjoon-hyun/tensorflow,xzturn/tensorflow,tensorflow/tensorflow-pywrap_saved_model,gojira/tensorflow,arborh/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,Xeralux/tensorflow,Bismarrck/tensorflow,gunan/tensorflow,manipopopo/tensorflow,jendap/tensorflow,dancingdan/tensorflow,freedomtan/tensorflow,annarev/tensorflow,dendisuhubdy/tensorflow,dongjoon-hyun/tensorflow,renyi533/tensorflow,ppwwyyxx/tensorflow,cxxgtxy/tensorflow,nburn42/tensorflow,girving/tensorflow,jbedorf/tensorflow,Intel-tensorflow/tensorflow,apark263/tensorflow,ageron/tensorflow,allenlavoie/tensorflow,xzturn/tensorflow,alsrgv/tensorflow,tensorflow/tensorflow,theflofly/tensorflow,frreiss/tensorflow-fred,zasdfgbnm/tensorflow,Xeralux/tensorflow,paolodedios/tensorflow,apark263/tensorflow,caisq/tensorflow,eaplatanios/tensorflow,jbedorf/tensorflow,xodus7/tensorflow,cxxgtxy/tensorflow,DavidNorman/tensorflow,AnishShah/tensorflow,asimshankar/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,DavidNorman/tensorflow,eaplatanios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,alshedivat/tensorflow,xodus7/tensorflow,renyi533/tensorflow,theflofly/tensorflow,ppwwyyxx/tensorflow,jalexvig/tensorflow,ppwwyyxx/tensorflow,renyi533/tensorflow,gunan/tensorflow,freedomtan/tensorflow,gojira/tensorflow,gojira/tensorflow,xzturn/tensorflow,xodus7/tensorflow,seanli9jan/tensorflow,benoitsteiner/tensorflow-xsmm,renyi533/tensorflow,jbedorf/tensorflow,benoitsteiner/tensorflow-xsmm,jbedorf/tensorflow,aldian/tensorflow,adit-chandra/tensorflow,meteorcloudy/tensorflow,tensorflow/tensorflow,manipopopo/tensorflow,hfp/tensorflow-xsmm,alsrgv/tensorflow,aam-at/tensorflow,Xeralux/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,chemelnucfin/tensorflow,snnn/tensorflow,dongjoon-hyun/tensorflow,chemelnucfin/tensorflow,jhseu/tensorflow,petewarden/tensorflow,drpngx/tensorflow,gunan/tensorflow,Bismarrck/tensorflow,dongjoon-hyun/tensorflow,seanli9jan/tensorflow,lukeiwanski/tensorflow,ghchinoy/tensorflow,xzturn/tensorflow,xodus7/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,seanli9jan/tensorflow,manipopopo/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,allenlavoie/tensorflow,jendap/tensorflow,Intel-tensorflow/tensorflow,dongjoon-hyun/tensorflow,kevin-coder/tensorflow-fork,xodus7/tensorflow,hfp/tensorflow-xsmm,gautam1858/tensorflow,benoitsteiner/tensorflow-xsmm,ghchinoy/tensorflow,karllessard/tensorflow,AnishShah/tensorflow,yongtang/tensorflow,alshedivat/tensorflow,brchiu/tensorflow,allenlavoie/tensorflow,yanchen036/tensorflow,meteorcloudy/tensorflow,asimshankar/tensorflow,brchiu/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,allenlavoie/tensorflow,kobejean/tensorflow,jendap/tensorflow,aldian/tensorflow,aselle/tensorflow,yanchen036/tensorflow,davidzchen/tensorflow,davidzchen/tensorflow,ZhangXinNan/tensorflow,aam-at/tensorflow,dancingdan/tensorflow,jbedorf/tensorflow,petewarden/tensorflow,annarev/tensorflow,renyi533/tensorflow,DavidNorman/tensorflow,adit-chandra/tensorflow,seanli9jan/tensorflow,asimshankar/tensorflow,adit-chandra/tensorflow,DavidNorman/tensorflow,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,ZhangXinNan/tensorflow,renyi533/tensorflow,alshedivat/tensorflow,brchiu/tensorflow,karllessard/tensorflow,jbedorf/tensorflow,DavidNorman/tensorflow,zasdfgbnm/tensorflow,alsrgv/tensorflow,caisq/tensorflow
|
---
+++
@@ -18,6 +18,9 @@
from __future__ import division
from __future__ import print_function
+from tensorflow.contrib.py2tf import utils
+
+
PYTHON_LITERALS = {
'None': None,
'False': False,
@@ -27,6 +30,7 @@
DEFAULT_UNCOMPILED_MODULES = set((
('tensorflow',),
+ (utils.__name__,),
))
NO_SIDE_EFFECT_CONSTRUCTORS = set(('tensorflow',))
|
cba3b00a92194cb34d27a63e71a17f2239079c7b
|
setup.py
|
setup.py
|
from setuptools import setup
import os
setup(
name = "cmsplugin-bootstrap-carousel",
packages = ['cmsplugin_bootstrap_carousel',],
package_data = {
'': [
'templates/cmsplugin_bootstrap_carousel/*.html',
]
},
version = "0.1.2",
description = "Bootstrap carousel plugin for django-cms 2.2",
long_description=open(os.path.join(os.path.dirname(__file__), 'README.rst')).read(),
author = "Antoine Nguyen",
author_email = "tonio@ngyn.org",
url = "http://bitbucket.org/tonioo/cmsplugin-bootstrap-carousel",
license = "BSD",
keywords = ["django", "django-cms", "bootstrap", "carousel"],
classifiers = [
"Programming Language :: Python",
"Environment :: Web Environment",
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Framework :: Django"
],
include_package_data = True,
zip_safe = True,
install_requires = ['Django-CMS>=2.2'],
)
|
from setuptools import setup
import os
setup(
name = "cmsplugin-bootstrap-carousel",
packages = ['cmsplugin_bootstrap_carousel',],
package_data = {
'': [
'templates/cmsplugin_bootstrap_carousel/*.html',
]
},
version = "0.1.3",
description = "Bootstrap carousel plugin for django-cms 2.2",
long_description=open(os.path.join(os.path.dirname(__file__), 'README.rst')).read(),
author = "Antoine Nguyen",
author_email = "tonio@ngyn.org",
url = "http://bitbucket.org/tonioo/cmsplugin-bootstrap-carousel",
license = "BSD",
keywords = ["django", "django-cms", "bootstrap", "carousel"],
classifiers = [
"Programming Language :: Python",
"Environment :: Web Environment",
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Framework :: Django"
],
include_package_data = True,
zip_safe = True,
install_requires = ['Django-CMS>=2.2'],
)
|
Upgrade the version to force reinstall by wheels
|
Upgrade the version to force reinstall by wheels
|
Python
|
bsd-3-clause
|
360youlun/cmsplugin-bootstrap-carousel,360youlun/cmsplugin-bootstrap-carousel
|
---
+++
@@ -11,7 +11,7 @@
]
},
- version = "0.1.2",
+ version = "0.1.3",
description = "Bootstrap carousel plugin for django-cms 2.2",
long_description=open(os.path.join(os.path.dirname(__file__), 'README.rst')).read(),
author = "Antoine Nguyen",
|
255d1f753ee8b48cbb2d7131e72ebc5dfbaf443c
|
setup.py
|
setup.py
|
from setuptools import setup
import editorconfig
setup(
name='EditorConfig',
version=editorconfig.__version__,
author='EditorConfig Team',
packages=['editorconfig'],
url='http://editorconfig.org/',
license='LICENSE.txt',
description='EditorConfig File Locator and Interpreter for Python',
long_description=open('README.rst').read(),
entry_points = {
'console_scripts': [
'editorconfig.py = editorconfig.main:main',
]
},
)
|
from setuptools import setup
import editorconfig
setup(
name='EditorConfig',
version=editorconfig.__version__,
author='EditorConfig Team',
packages=['editorconfig'],
url='http://editorconfig.org/',
license='LICENSE.txt',
description='EditorConfig File Locator and Interpreter for Python',
long_description=open('README.rst').read(),
entry_points = {
'console_scripts': [
'editorconfig = editorconfig.main:main',
]
},
)
|
Rename `editorconfig.py` command to `editorconfig`
|
Rename `editorconfig.py` command to `editorconfig`
|
Python
|
bsd-2-clause
|
pocke/editorconfig-vim,VictorBjelkholm/editorconfig-vim,benjifisher/editorconfig-vim,VictorBjelkholm/editorconfig-vim,johnfraney/editorconfig-vim,pocke/editorconfig-vim,benjifisher/editorconfig-vim,johnfraney/editorconfig-vim,benjifisher/editorconfig-vim,pocke/editorconfig-vim,johnfraney/editorconfig-vim,VictorBjelkholm/editorconfig-vim
|
---
+++
@@ -12,7 +12,7 @@
long_description=open('README.rst').read(),
entry_points = {
'console_scripts': [
- 'editorconfig.py = editorconfig.main:main',
+ 'editorconfig = editorconfig.main:main',
]
},
)
|
28cdad6e8ab6bd400ef50331a2f93af93620cc7f
|
app/models.py
|
app/models.py
|
from django.db import models
class Event(models.Model):
when = models.DateTimeField(auto_now=True)
what = models.TextField()
|
from django.db import models
class Event(models.Model):
when = models.DateTimeField(auto_now=True)
what = models.TextField()
def time(self):
return '{:%H:%M}'.format(self.when)
|
Return human-sensible time in Event
|
Return human-sensible time in Event
|
Python
|
mit
|
schatten/logan
|
---
+++
@@ -3,3 +3,6 @@
class Event(models.Model):
when = models.DateTimeField(auto_now=True)
what = models.TextField()
+
+ def time(self):
+ return '{:%H:%M}'.format(self.when)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.