commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
d0000ab4c650379667ff018290d2d67d026c330c
|
reaper.py
|
reaper.py
|
#!/usr/bin/env python
import boto
import base64
import json
from datetime import datetime
def seconds_from_hours(hours):
return hours * 60 * 60
def should_kill(instance):
attributes = instance.get_attribute("userData")
if attributes['userData'] == None:
return False
user_data = base64.b64decode(attributes['userData'])
try:
data = json.loads(user_data)
except ValueError:
return False
if not data.get("popfly"):
return False
if instance.state != "running":
return False
start = datetime.strptime(instance.launch_time, "%Y-%m-%dT%H:%M:%S.%fZ")
end = datetime.utcnow()
life = start - end
return life.total_seconds() >= seconds_from_hours(data['duration'])
def kill(instance):
instance.terminate()
print "BOOM"
conn = boto.connect_ec2()
for reservation in conn.get_all_instances():
for instance in reservation.instances:
if should_kill(instance):
kill(instance)
print "Killing {}".format(instance.id)
else:
print "Sparing {}".format(instance.id)
|
#!/usr/bin/env python
import boto
import base64
import json
from datetime import datetime
def seconds_from_hours(hours):
return hours * 60 * 60
def should_kill(instance):
attributes = instance.get_attribute("userData")
if attributes['userData'] == None:
print "Instance has no userdata"
return False
user_data = base64.b64decode(attributes['userData'])
try:
data = json.loads(user_data)
except ValueError:
print "User Data isn't json"
return False
if not data.get("popfly"):
print "No key popfly"
return False
if instance.state != "running":
print "Instance isn't running"
return False
start = datetime.strptime(instance.launch_time, "%Y-%m-%dT%H:%M:%S.%fZ")
end = datetime.utcnow()
life = start - end
return life.total_seconds() >= seconds_from_hours(data['duration'])
def kill(instance):
instance.terminate()
print "BOOM"
conn = boto.connect_ec2()
for reservation in conn.get_all_instances():
for instance in reservation.instances:
if should_kill(instance):
kill(instance)
print "Killing {}".format(instance.id)
else:
print "Sparing {}".format(instance.id)
|
Add debugging to the repear script
|
Add debugging to the repear script
|
Python
|
mit
|
kyleconroy/popfly
|
#!/usr/bin/env python
import boto
import base64
import json
from datetime import datetime
def seconds_from_hours(hours):
return hours * 60 * 60
def should_kill(instance):
attributes = instance.get_attribute("userData")
if attributes['userData'] == None:
return False
user_data = base64.b64decode(attributes['userData'])
try:
data = json.loads(user_data)
except ValueError:
return False
if not data.get("popfly"):
return False
if instance.state != "running":
return False
start = datetime.strptime(instance.launch_time, "%Y-%m-%dT%H:%M:%S.%fZ")
end = datetime.utcnow()
life = start - end
return life.total_seconds() >= seconds_from_hours(data['duration'])
def kill(instance):
instance.terminate()
print "BOOM"
conn = boto.connect_ec2()
for reservation in conn.get_all_instances():
for instance in reservation.instances:
if should_kill(instance):
kill(instance)
print "Killing {}".format(instance.id)
else:
print "Sparing {}".format(instance.id)
Add debugging to the repear script
|
#!/usr/bin/env python
import boto
import base64
import json
from datetime import datetime
def seconds_from_hours(hours):
return hours * 60 * 60
def should_kill(instance):
attributes = instance.get_attribute("userData")
if attributes['userData'] == None:
print "Instance has no userdata"
return False
user_data = base64.b64decode(attributes['userData'])
try:
data = json.loads(user_data)
except ValueError:
print "User Data isn't json"
return False
if not data.get("popfly"):
print "No key popfly"
return False
if instance.state != "running":
print "Instance isn't running"
return False
start = datetime.strptime(instance.launch_time, "%Y-%m-%dT%H:%M:%S.%fZ")
end = datetime.utcnow()
life = start - end
return life.total_seconds() >= seconds_from_hours(data['duration'])
def kill(instance):
instance.terminate()
print "BOOM"
conn = boto.connect_ec2()
for reservation in conn.get_all_instances():
for instance in reservation.instances:
if should_kill(instance):
kill(instance)
print "Killing {}".format(instance.id)
else:
print "Sparing {}".format(instance.id)
|
<commit_before>#!/usr/bin/env python
import boto
import base64
import json
from datetime import datetime
def seconds_from_hours(hours):
return hours * 60 * 60
def should_kill(instance):
attributes = instance.get_attribute("userData")
if attributes['userData'] == None:
return False
user_data = base64.b64decode(attributes['userData'])
try:
data = json.loads(user_data)
except ValueError:
return False
if not data.get("popfly"):
return False
if instance.state != "running":
return False
start = datetime.strptime(instance.launch_time, "%Y-%m-%dT%H:%M:%S.%fZ")
end = datetime.utcnow()
life = start - end
return life.total_seconds() >= seconds_from_hours(data['duration'])
def kill(instance):
instance.terminate()
print "BOOM"
conn = boto.connect_ec2()
for reservation in conn.get_all_instances():
for instance in reservation.instances:
if should_kill(instance):
kill(instance)
print "Killing {}".format(instance.id)
else:
print "Sparing {}".format(instance.id)
<commit_msg>Add debugging to the repear script<commit_after>
|
#!/usr/bin/env python
import boto
import base64
import json
from datetime import datetime
def seconds_from_hours(hours):
return hours * 60 * 60
def should_kill(instance):
attributes = instance.get_attribute("userData")
if attributes['userData'] == None:
print "Instance has no userdata"
return False
user_data = base64.b64decode(attributes['userData'])
try:
data = json.loads(user_data)
except ValueError:
print "User Data isn't json"
return False
if not data.get("popfly"):
print "No key popfly"
return False
if instance.state != "running":
print "Instance isn't running"
return False
start = datetime.strptime(instance.launch_time, "%Y-%m-%dT%H:%M:%S.%fZ")
end = datetime.utcnow()
life = start - end
return life.total_seconds() >= seconds_from_hours(data['duration'])
def kill(instance):
instance.terminate()
print "BOOM"
conn = boto.connect_ec2()
for reservation in conn.get_all_instances():
for instance in reservation.instances:
if should_kill(instance):
kill(instance)
print "Killing {}".format(instance.id)
else:
print "Sparing {}".format(instance.id)
|
#!/usr/bin/env python
import boto
import base64
import json
from datetime import datetime
def seconds_from_hours(hours):
return hours * 60 * 60
def should_kill(instance):
attributes = instance.get_attribute("userData")
if attributes['userData'] == None:
return False
user_data = base64.b64decode(attributes['userData'])
try:
data = json.loads(user_data)
except ValueError:
return False
if not data.get("popfly"):
return False
if instance.state != "running":
return False
start = datetime.strptime(instance.launch_time, "%Y-%m-%dT%H:%M:%S.%fZ")
end = datetime.utcnow()
life = start - end
return life.total_seconds() >= seconds_from_hours(data['duration'])
def kill(instance):
instance.terminate()
print "BOOM"
conn = boto.connect_ec2()
for reservation in conn.get_all_instances():
for instance in reservation.instances:
if should_kill(instance):
kill(instance)
print "Killing {}".format(instance.id)
else:
print "Sparing {}".format(instance.id)
Add debugging to the repear script#!/usr/bin/env python
import boto
import base64
import json
from datetime import datetime
def seconds_from_hours(hours):
return hours * 60 * 60
def should_kill(instance):
attributes = instance.get_attribute("userData")
if attributes['userData'] == None:
print "Instance has no userdata"
return False
user_data = base64.b64decode(attributes['userData'])
try:
data = json.loads(user_data)
except ValueError:
print "User Data isn't json"
return False
if not data.get("popfly"):
print "No key popfly"
return False
if instance.state != "running":
print "Instance isn't running"
return False
start = datetime.strptime(instance.launch_time, "%Y-%m-%dT%H:%M:%S.%fZ")
end = datetime.utcnow()
life = start - end
return life.total_seconds() >= seconds_from_hours(data['duration'])
def kill(instance):
instance.terminate()
print "BOOM"
conn = boto.connect_ec2()
for reservation in conn.get_all_instances():
for instance in reservation.instances:
if should_kill(instance):
kill(instance)
print "Killing {}".format(instance.id)
else:
print "Sparing {}".format(instance.id)
|
<commit_before>#!/usr/bin/env python
import boto
import base64
import json
from datetime import datetime
def seconds_from_hours(hours):
return hours * 60 * 60
def should_kill(instance):
attributes = instance.get_attribute("userData")
if attributes['userData'] == None:
return False
user_data = base64.b64decode(attributes['userData'])
try:
data = json.loads(user_data)
except ValueError:
return False
if not data.get("popfly"):
return False
if instance.state != "running":
return False
start = datetime.strptime(instance.launch_time, "%Y-%m-%dT%H:%M:%S.%fZ")
end = datetime.utcnow()
life = start - end
return life.total_seconds() >= seconds_from_hours(data['duration'])
def kill(instance):
instance.terminate()
print "BOOM"
conn = boto.connect_ec2()
for reservation in conn.get_all_instances():
for instance in reservation.instances:
if should_kill(instance):
kill(instance)
print "Killing {}".format(instance.id)
else:
print "Sparing {}".format(instance.id)
<commit_msg>Add debugging to the repear script<commit_after>#!/usr/bin/env python
import boto
import base64
import json
from datetime import datetime
def seconds_from_hours(hours):
return hours * 60 * 60
def should_kill(instance):
attributes = instance.get_attribute("userData")
if attributes['userData'] == None:
print "Instance has no userdata"
return False
user_data = base64.b64decode(attributes['userData'])
try:
data = json.loads(user_data)
except ValueError:
print "User Data isn't json"
return False
if not data.get("popfly"):
print "No key popfly"
return False
if instance.state != "running":
print "Instance isn't running"
return False
start = datetime.strptime(instance.launch_time, "%Y-%m-%dT%H:%M:%S.%fZ")
end = datetime.utcnow()
life = start - end
return life.total_seconds() >= seconds_from_hours(data['duration'])
def kill(instance):
instance.terminate()
print "BOOM"
conn = boto.connect_ec2()
for reservation in conn.get_all_instances():
for instance in reservation.instances:
if should_kill(instance):
kill(instance)
print "Killing {}".format(instance.id)
else:
print "Sparing {}".format(instance.id)
|
40e2356f907332a4ab04ba3a6511079f5c2cd269
|
core/storage/config/django_models.py
|
core/storage/config/django_models.py
|
# coding: utf-8
#
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Models relating to configuration properties."""
__author__ = 'Sean Lip'
from core import django_utils
import core.storage.base_model.models as base_models
class ConfigPropertyModel(base_models.BaseModel):
"""A class that represents a named configuration property.
The id is the name of the property.
"""
# The property value.
value = django_utils.JsonField(default={})
|
# coding: utf-8
#
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Models relating to configuration properties."""
__author__ = 'Sean Lip'
from core import django_utils
import core.storage.base_model.models as base_models
class ConfigPropertyModel(base_models.BaseModel):
"""A class that represents a named configuration property.
The id is the name of the property.
"""
# The property value.
value = django_utils.JSONField(default={})
|
Fix a typo in the Django ConfigPropertyModel class.
|
Fix a typo in the Django ConfigPropertyModel class.
|
Python
|
apache-2.0
|
openhatch/oh-missions-oppia-beta,prasanna08/oppia,directorlive/oppia,Dev4X/oppia,infinyte/oppia,virajprabhu/oppia,BenHenning/oppia,aldeka/oppia,asandyz/oppia,kennho/oppia,directorlive/oppia,Atlas-Sailed-Co/oppia,fernandopinhati/oppia,brianrodri/oppia,miyucy/oppia,anthkris/oppia,terrameijar/oppia,michaelWagner/oppia,bjvoth/oppia,cleophasmashiri/oppia,directorlive/oppia,sarahfo/oppia,oppia/oppia,shaz13/oppia,Cgruppo/oppia,kevinlee12/oppia,edallison/oppia,whygee/oppia,toooooper/oppia,wangsai/oppia,rackstar17/oppia,miyucy/oppia,VictoriaRoux/oppia,mit0110/oppia,anthkris/oppia,MAKOSCAFEE/oppia,sunu/oh-missions-oppia-beta,asandyz/oppia,sanyaade-teachings/oppia,sanyaade-teachings/oppia,google-code-export/oppia,brylie/oppia,VictoriaRoux/oppia,nagyistoce/oppia,jestapinski/oppia,terrameijar/oppia,sdulal/oppia,aldeka/oppia,CMDann/oppia,himanshu-dixit/oppia,won0089/oppia,kingctan/oppia,DewarM/oppia,won0089/oppia,Dev4X/oppia,michaelWagner/oppia,oulan/oppia,shaz13/oppia,oulan/oppia,terrameijar/oppia,MaximLich/oppia,gale320/oppia,miyucy/oppia,felipecocco/oppia,anthkris/oppia,souravbadami/oppia,sdulal/oppia,fernandopinhati/oppia,kaffeel/oppia,raju249/oppia,kevinlee12/oppia,VictoriaRoux/oppia,Dev4X/oppia,souravbadami/oppia,sunu/oppia,hazmatzo/oppia,jestapinski/oppia,fernandopinhati/oppia,MaximLich/oppia,shaz13/oppia,openhatch/oh-missions-oppia-beta,kennho/oppia,zgchizi/oppia-uc,brylie/oppia,wangsai/oppia,gale320/oppia,mit0110/oppia,oppia/oppia,oulan/oppia,brianrodri/oppia,bjvoth/oppia,kaffeel/oppia,shaz13/oppia,CMDann/oppia,felipecocco/oppia,infinyte/oppia,nagyistoce/oppia,edallison/oppia,won0089/oppia,gale320/oppia,amitdeutsch/oppia,michaelWagner/oppia,sunu/oh-missions-oppia-beta,felipecocco/oppia,Atlas-Sailed-Co/oppia,kingctan/oppia,himanshu-dixit/oppia,raju249/oppia,BenHenning/oppia,sarahfo/oppia,himanshu-dixit/oppia,cleophasmashiri/oppia,amgowano/oppia,Cgruppo/oppia,oppia/oppia,dippatel1994/oppia,DewarM/oppia,AllanYangZhou/oppia,hazmatzo/oppia,danieljjh/oppia,sanyaade-teachings/oppia,BenHenning/oppia,anggorodewanto/oppia,oulan/oppia,raju249/oppia,nagyistoce/oppia,CMDann/oppia,felipecocco/oppia,brylie/oppia,sanyaade-teachings/oppia,Atlas-Sailed-Co/oppia,leandrotoledo/oppia,whygee/oppia,prasanna08/oppia,virajprabhu/oppia,toooooper/oppia,Atlas-Sailed-Co/oppia,kingctan/oppia,gale320/oppia,leandrotoledo/oppia,leandrotoledo/oppia,won0089/oppia,anthkris/oppia,sbhowmik89/oppia,zgchizi/oppia-uc,mindpin/mindpin_oppia,bjvoth/oppia,mindpin/mindpin_oppia,AllanYangZhou/oppia,jestapinski/oppia,himanshu-dixit/oppia,MaximLich/oppia,asandyz/oppia,mit0110/oppia,infinyte/oppia,gale320/oppia,mit0110/oppia,dippatel1994/oppia,CMDann/oppia,kaffeel/oppia,brianrodri/oppia,kingctan/oppia,directorlive/oppia,felipecocco/oppia,brianrodri/oppia,VictoriaRoux/oppia,zgchizi/oppia-uc,dippatel1994/oppia,MAKOSCAFEE/oppia,prasanna08/oppia,google-code-export/oppia,brylie/oppia,miyucy/oppia,oppia/oppia,danieljjh/oppia,kevinlee12/oppia,edallison/oppia,infinyte/oppia,toooooper/oppia,raju249/oppia,sunu/oppia,kennho/oppia,leandrotoledo/oppia,Cgruppo/oppia,cleophasmashiri/oppia,sdulal/oppia,sarahfo/oppia,cleophasmashiri/oppia,amitdeutsch/oppia,VictoriaRoux/oppia,BenHenning/oppia,CMDann/oppia,virajprabhu/oppia,kennho/oppia,MaximLich/oppia,asandyz/oppia,amitdeutsch/oppia,google-code-export/oppia,dippatel1994/oppia,MAKOSCAFEE/oppia,hazmatzo/oppia,kaffeel/oppia,directorlive/oppia,michaelWagner/oppia,sdulal/oppia,fernandopinhati/oppia,sarahfo/oppia,Cgruppo/oppia,DewarM/oppia,sunu/oppia,leandrotoledo/oppia,sarahfo/oppia,souravbadami/oppia,BenHenning/oppia,mit0110/oppia,MAKOSCAFEE/oppia,kaffeel/oppia,mindpin/mindpin_oppia,jestapinski/oppia,rackstar17/oppia,oulan/oppia,whygee/oppia,toooooper/oppia,aldeka/oppia,brylie/oppia,sbhowmik89/oppia,google-code-export/oppia,won0089/oppia,sdulal/oppia,anggorodewanto/oppia,infinyte/oppia,amgowano/oppia,amitdeutsch/oppia,DewarM/oppia,bjvoth/oppia,kennho/oppia,toooooper/oppia,zgchizi/oppia-uc,sunu/oh-missions-oppia-beta,virajprabhu/oppia,openhatch/oh-missions-oppia-beta,michaelWagner/oppia,anggorodewanto/oppia,whygee/oppia,sbhowmik89/oppia,wangsai/oppia,sunu/oppia,aldeka/oppia,Atlas-Sailed-Co/oppia,souravbadami/oppia,nagyistoce/oppia,kevinlee12/oppia,dippatel1994/oppia,sbhowmik89/oppia,google-code-export/oppia,danieljjh/oppia,wangsai/oppia,terrameijar/oppia,cleophasmashiri/oppia,AllanYangZhou/oppia,rackstar17/oppia,Dev4X/oppia,danieljjh/oppia,nagyistoce/oppia,sunu/oppia,hazmatzo/oppia,amgowano/oppia,mindpin/mindpin_oppia,sunu/oh-missions-oppia-beta,DewarM/oppia,prasanna08/oppia,danieljjh/oppia,souravbadami/oppia,Cgruppo/oppia,anggorodewanto/oppia,kingctan/oppia,edallison/oppia,bjvoth/oppia,sanyaade-teachings/oppia,kevinlee12/oppia,prasanna08/oppia,amgowano/oppia,amitdeutsch/oppia,sbhowmik89/oppia,brianrodri/oppia,oppia/oppia,openhatch/oh-missions-oppia-beta,hazmatzo/oppia,wangsai/oppia,fernandopinhati/oppia,rackstar17/oppia,AllanYangZhou/oppia,whygee/oppia,Dev4X/oppia,asandyz/oppia,virajprabhu/oppia
|
# coding: utf-8
#
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Models relating to configuration properties."""
__author__ = 'Sean Lip'
from core import django_utils
import core.storage.base_model.models as base_models
class ConfigPropertyModel(base_models.BaseModel):
"""A class that represents a named configuration property.
The id is the name of the property.
"""
# The property value.
value = django_utils.JsonField(default={})
Fix a typo in the Django ConfigPropertyModel class.
|
# coding: utf-8
#
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Models relating to configuration properties."""
__author__ = 'Sean Lip'
from core import django_utils
import core.storage.base_model.models as base_models
class ConfigPropertyModel(base_models.BaseModel):
"""A class that represents a named configuration property.
The id is the name of the property.
"""
# The property value.
value = django_utils.JSONField(default={})
|
<commit_before># coding: utf-8
#
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Models relating to configuration properties."""
__author__ = 'Sean Lip'
from core import django_utils
import core.storage.base_model.models as base_models
class ConfigPropertyModel(base_models.BaseModel):
"""A class that represents a named configuration property.
The id is the name of the property.
"""
# The property value.
value = django_utils.JsonField(default={})
<commit_msg>Fix a typo in the Django ConfigPropertyModel class.<commit_after>
|
# coding: utf-8
#
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Models relating to configuration properties."""
__author__ = 'Sean Lip'
from core import django_utils
import core.storage.base_model.models as base_models
class ConfigPropertyModel(base_models.BaseModel):
"""A class that represents a named configuration property.
The id is the name of the property.
"""
# The property value.
value = django_utils.JSONField(default={})
|
# coding: utf-8
#
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Models relating to configuration properties."""
__author__ = 'Sean Lip'
from core import django_utils
import core.storage.base_model.models as base_models
class ConfigPropertyModel(base_models.BaseModel):
"""A class that represents a named configuration property.
The id is the name of the property.
"""
# The property value.
value = django_utils.JsonField(default={})
Fix a typo in the Django ConfigPropertyModel class.# coding: utf-8
#
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Models relating to configuration properties."""
__author__ = 'Sean Lip'
from core import django_utils
import core.storage.base_model.models as base_models
class ConfigPropertyModel(base_models.BaseModel):
"""A class that represents a named configuration property.
The id is the name of the property.
"""
# The property value.
value = django_utils.JSONField(default={})
|
<commit_before># coding: utf-8
#
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Models relating to configuration properties."""
__author__ = 'Sean Lip'
from core import django_utils
import core.storage.base_model.models as base_models
class ConfigPropertyModel(base_models.BaseModel):
"""A class that represents a named configuration property.
The id is the name of the property.
"""
# The property value.
value = django_utils.JsonField(default={})
<commit_msg>Fix a typo in the Django ConfigPropertyModel class.<commit_after># coding: utf-8
#
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Models relating to configuration properties."""
__author__ = 'Sean Lip'
from core import django_utils
import core.storage.base_model.models as base_models
class ConfigPropertyModel(base_models.BaseModel):
"""A class that represents a named configuration property.
The id is the name of the property.
"""
# The property value.
value = django_utils.JSONField(default={})
|
7673e33186bb5f48d9f1c35deb55fca91553d526
|
mopidy_soundcloud/actor.py
|
mopidy_soundcloud/actor.py
|
from __future__ import unicode_literals
import logging
import pykka
from mopidy import backend
from .library import SoundCloudLibraryProvider
from .soundcloud import SoundCloudClient
logger = logging.getLogger(__name__)
class SoundCloudBackend(pykka.ThreadingActor, backend.Backend):
def __init__(self, config, audio):
super(SoundCloudBackend, self).__init__()
self.config = config
self.remote = SoundCloudClient(config['soundcloud'])
self.library = SoundCloudLibraryProvider(backend=self)
self.playback = SoundCloudPlaybackProvider(audio=audio, backend=self)
self.uri_schemes = ['soundcloud', 'sc']
class SoundCloudPlaybackProvider(backend.PlaybackProvider):
def play(self, track):
track_id = self.backend.remote.parse_track_uri(track)
track = self.backend.remote.get_track(track_id, True)
return super(SoundCloudPlaybackProvider, self).play(track)
|
from __future__ import unicode_literals
import logging
import pykka
from mopidy import backend
from .library import SoundCloudLibraryProvider
from .soundcloud import SoundCloudClient
logger = logging.getLogger(__name__)
class SoundCloudBackend(pykka.ThreadingActor, backend.Backend):
def __init__(self, config, audio):
super(SoundCloudBackend, self).__init__()
self.config = config
self.remote = SoundCloudClient(config['soundcloud'])
self.library = SoundCloudLibraryProvider(backend=self)
self.playback = SoundCloudPlaybackProvider(audio=audio, backend=self)
self.uri_schemes = ['soundcloud', 'sc']
class SoundCloudPlaybackProvider(backend.PlaybackProvider):
def play(self, track):
track_id = self.backend.remote.parse_track_uri(track)
track = self.backend.remote.get_track(track_id, True)
if hasattr(track, 'uri'):
return super(SoundCloudPlaybackProvider, self).play(track)
else:
return None
|
Handle track that can't be played
|
Handle track that can't be played
|
Python
|
mit
|
mopidy/mopidy-soundcloud,yakumaa/mopidy-soundcloud
|
from __future__ import unicode_literals
import logging
import pykka
from mopidy import backend
from .library import SoundCloudLibraryProvider
from .soundcloud import SoundCloudClient
logger = logging.getLogger(__name__)
class SoundCloudBackend(pykka.ThreadingActor, backend.Backend):
def __init__(self, config, audio):
super(SoundCloudBackend, self).__init__()
self.config = config
self.remote = SoundCloudClient(config['soundcloud'])
self.library = SoundCloudLibraryProvider(backend=self)
self.playback = SoundCloudPlaybackProvider(audio=audio, backend=self)
self.uri_schemes = ['soundcloud', 'sc']
class SoundCloudPlaybackProvider(backend.PlaybackProvider):
def play(self, track):
track_id = self.backend.remote.parse_track_uri(track)
track = self.backend.remote.get_track(track_id, True)
return super(SoundCloudPlaybackProvider, self).play(track)
Handle track that can't be played
|
from __future__ import unicode_literals
import logging
import pykka
from mopidy import backend
from .library import SoundCloudLibraryProvider
from .soundcloud import SoundCloudClient
logger = logging.getLogger(__name__)
class SoundCloudBackend(pykka.ThreadingActor, backend.Backend):
def __init__(self, config, audio):
super(SoundCloudBackend, self).__init__()
self.config = config
self.remote = SoundCloudClient(config['soundcloud'])
self.library = SoundCloudLibraryProvider(backend=self)
self.playback = SoundCloudPlaybackProvider(audio=audio, backend=self)
self.uri_schemes = ['soundcloud', 'sc']
class SoundCloudPlaybackProvider(backend.PlaybackProvider):
def play(self, track):
track_id = self.backend.remote.parse_track_uri(track)
track = self.backend.remote.get_track(track_id, True)
if hasattr(track, 'uri'):
return super(SoundCloudPlaybackProvider, self).play(track)
else:
return None
|
<commit_before>from __future__ import unicode_literals
import logging
import pykka
from mopidy import backend
from .library import SoundCloudLibraryProvider
from .soundcloud import SoundCloudClient
logger = logging.getLogger(__name__)
class SoundCloudBackend(pykka.ThreadingActor, backend.Backend):
def __init__(self, config, audio):
super(SoundCloudBackend, self).__init__()
self.config = config
self.remote = SoundCloudClient(config['soundcloud'])
self.library = SoundCloudLibraryProvider(backend=self)
self.playback = SoundCloudPlaybackProvider(audio=audio, backend=self)
self.uri_schemes = ['soundcloud', 'sc']
class SoundCloudPlaybackProvider(backend.PlaybackProvider):
def play(self, track):
track_id = self.backend.remote.parse_track_uri(track)
track = self.backend.remote.get_track(track_id, True)
return super(SoundCloudPlaybackProvider, self).play(track)
<commit_msg>Handle track that can't be played<commit_after>
|
from __future__ import unicode_literals
import logging
import pykka
from mopidy import backend
from .library import SoundCloudLibraryProvider
from .soundcloud import SoundCloudClient
logger = logging.getLogger(__name__)
class SoundCloudBackend(pykka.ThreadingActor, backend.Backend):
def __init__(self, config, audio):
super(SoundCloudBackend, self).__init__()
self.config = config
self.remote = SoundCloudClient(config['soundcloud'])
self.library = SoundCloudLibraryProvider(backend=self)
self.playback = SoundCloudPlaybackProvider(audio=audio, backend=self)
self.uri_schemes = ['soundcloud', 'sc']
class SoundCloudPlaybackProvider(backend.PlaybackProvider):
def play(self, track):
track_id = self.backend.remote.parse_track_uri(track)
track = self.backend.remote.get_track(track_id, True)
if hasattr(track, 'uri'):
return super(SoundCloudPlaybackProvider, self).play(track)
else:
return None
|
from __future__ import unicode_literals
import logging
import pykka
from mopidy import backend
from .library import SoundCloudLibraryProvider
from .soundcloud import SoundCloudClient
logger = logging.getLogger(__name__)
class SoundCloudBackend(pykka.ThreadingActor, backend.Backend):
def __init__(self, config, audio):
super(SoundCloudBackend, self).__init__()
self.config = config
self.remote = SoundCloudClient(config['soundcloud'])
self.library = SoundCloudLibraryProvider(backend=self)
self.playback = SoundCloudPlaybackProvider(audio=audio, backend=self)
self.uri_schemes = ['soundcloud', 'sc']
class SoundCloudPlaybackProvider(backend.PlaybackProvider):
def play(self, track):
track_id = self.backend.remote.parse_track_uri(track)
track = self.backend.remote.get_track(track_id, True)
return super(SoundCloudPlaybackProvider, self).play(track)
Handle track that can't be playedfrom __future__ import unicode_literals
import logging
import pykka
from mopidy import backend
from .library import SoundCloudLibraryProvider
from .soundcloud import SoundCloudClient
logger = logging.getLogger(__name__)
class SoundCloudBackend(pykka.ThreadingActor, backend.Backend):
def __init__(self, config, audio):
super(SoundCloudBackend, self).__init__()
self.config = config
self.remote = SoundCloudClient(config['soundcloud'])
self.library = SoundCloudLibraryProvider(backend=self)
self.playback = SoundCloudPlaybackProvider(audio=audio, backend=self)
self.uri_schemes = ['soundcloud', 'sc']
class SoundCloudPlaybackProvider(backend.PlaybackProvider):
def play(self, track):
track_id = self.backend.remote.parse_track_uri(track)
track = self.backend.remote.get_track(track_id, True)
if hasattr(track, 'uri'):
return super(SoundCloudPlaybackProvider, self).play(track)
else:
return None
|
<commit_before>from __future__ import unicode_literals
import logging
import pykka
from mopidy import backend
from .library import SoundCloudLibraryProvider
from .soundcloud import SoundCloudClient
logger = logging.getLogger(__name__)
class SoundCloudBackend(pykka.ThreadingActor, backend.Backend):
def __init__(self, config, audio):
super(SoundCloudBackend, self).__init__()
self.config = config
self.remote = SoundCloudClient(config['soundcloud'])
self.library = SoundCloudLibraryProvider(backend=self)
self.playback = SoundCloudPlaybackProvider(audio=audio, backend=self)
self.uri_schemes = ['soundcloud', 'sc']
class SoundCloudPlaybackProvider(backend.PlaybackProvider):
def play(self, track):
track_id = self.backend.remote.parse_track_uri(track)
track = self.backend.remote.get_track(track_id, True)
return super(SoundCloudPlaybackProvider, self).play(track)
<commit_msg>Handle track that can't be played<commit_after>from __future__ import unicode_literals
import logging
import pykka
from mopidy import backend
from .library import SoundCloudLibraryProvider
from .soundcloud import SoundCloudClient
logger = logging.getLogger(__name__)
class SoundCloudBackend(pykka.ThreadingActor, backend.Backend):
def __init__(self, config, audio):
super(SoundCloudBackend, self).__init__()
self.config = config
self.remote = SoundCloudClient(config['soundcloud'])
self.library = SoundCloudLibraryProvider(backend=self)
self.playback = SoundCloudPlaybackProvider(audio=audio, backend=self)
self.uri_schemes = ['soundcloud', 'sc']
class SoundCloudPlaybackProvider(backend.PlaybackProvider):
def play(self, track):
track_id = self.backend.remote.parse_track_uri(track)
track = self.backend.remote.get_track(track_id, True)
if hasattr(track, 'uri'):
return super(SoundCloudPlaybackProvider, self).play(track)
else:
return None
|
e83dcb09125af0949c9001a8889338cfd23d6131
|
contrib/trigger_rtd_build.py
|
contrib/trigger_rtd_build.py
|
#!/usr/bin/env python
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import urllib2
key = sys.argv[1]
url = 'http://readthedocs.org/build/%s' % (key)
req = urllib2.Request(url, '')
f = urllib2.urlopen(req)
print f.read()
|
#!/usr/bin/env python
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import urllib2
key = sys.argv[1]
url = 'https://readthedocs.org/build/%s' % (key)
req = urllib2.Request(url, '')
f = urllib2.urlopen(req)
print f.read()
|
Update trigger rtd build script - use https instead of http.
|
Update trigger rtd build script - use https instead of http.
|
Python
|
apache-2.0
|
ZuluPro/libcloud,mgogoulos/libcloud,jimbobhickville/libcloud,wido/libcloud,Verizon/libcloud,watermelo/libcloud,niteoweb/libcloud,smaffulli/libcloud,mistio/libcloud,Kami/libcloud,NexusIS/libcloud,mgogoulos/libcloud,vongazman/libcloud,Scalr/libcloud,mtekel/libcloud,mathspace/libcloud,smaffulli/libcloud,cryptickp/libcloud,curoverse/libcloud,mtekel/libcloud,NexusIS/libcloud,erjohnso/libcloud,StackPointCloud/libcloud,cryptickp/libcloud,wuyuewen/libcloud,techhat/libcloud,DimensionDataCBUSydney/libcloud,techhat/libcloud,StackPointCloud/libcloud,carletes/libcloud,apache/libcloud,t-tran/libcloud,NexusIS/libcloud,atsaki/libcloud,wrigri/libcloud,carletes/libcloud,vongazman/libcloud,iPlantCollaborativeOpenSource/libcloud,Verizon/libcloud,supertom/libcloud,lochiiconnectivity/libcloud,schaubl/libcloud,aleGpereira/libcloud,carletes/libcloud,Scalr/libcloud,aleGpereira/libcloud,pquentin/libcloud,mbrukman/libcloud,Kami/libcloud,watermelo/libcloud,supertom/libcloud,vongazman/libcloud,jimbobhickville/libcloud,mistio/libcloud,ZuluPro/libcloud,samuelchong/libcloud,mathspace/libcloud,lochiiconnectivity/libcloud,jerryblakley/libcloud,pquentin/libcloud,briancurtin/libcloud,apache/libcloud,schaubl/libcloud,pquentin/libcloud,ByteInternet/libcloud,iPlantCollaborativeOpenSource/libcloud,t-tran/libcloud,wido/libcloud,wuyuewen/libcloud,SecurityCompass/libcloud,mbrukman/libcloud,andrewsomething/libcloud,illfelder/libcloud,pantheon-systems/libcloud,mtekel/libcloud,wrigri/libcloud,Kami/libcloud,wrigri/libcloud,MrBasset/libcloud,MrBasset/libcloud,Scalr/libcloud,aleGpereira/libcloud,atsaki/libcloud,sahildua2305/libcloud,samuelchong/libcloud,pantheon-systems/libcloud,mathspace/libcloud,pantheon-systems/libcloud,wido/libcloud,schaubl/libcloud,illfelder/libcloud,supertom/libcloud,SecurityCompass/libcloud,DimensionDataCBUSydney/libcloud,sahildua2305/libcloud,erjohnso/libcloud,techhat/libcloud,iPlantCollaborativeOpenSource/libcloud,lochiiconnectivity/libcloud,StackPointCloud/libcloud,ZuluPro/libcloud,mistio/libcloud,atsaki/libcloud,niteoweb/libcloud,andrewsomething/libcloud,Verizon/libcloud,jimbobhickville/libcloud,erjohnso/libcloud,curoverse/libcloud,MrBasset/libcloud,watermelo/libcloud,briancurtin/libcloud,SecurityCompass/libcloud,smaffulli/libcloud,DimensionDataCBUSydney/libcloud,niteoweb/libcloud,wuyuewen/libcloud,sahildua2305/libcloud,jerryblakley/libcloud,illfelder/libcloud,mgogoulos/libcloud,apache/libcloud,t-tran/libcloud,ByteInternet/libcloud,mbrukman/libcloud,andrewsomething/libcloud,cryptickp/libcloud,curoverse/libcloud,briancurtin/libcloud,ByteInternet/libcloud,samuelchong/libcloud,jerryblakley/libcloud
|
#!/usr/bin/env python
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import urllib2
key = sys.argv[1]
url = 'http://readthedocs.org/build/%s' % (key)
req = urllib2.Request(url, '')
f = urllib2.urlopen(req)
print f.read()
Update trigger rtd build script - use https instead of http.
|
#!/usr/bin/env python
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import urllib2
key = sys.argv[1]
url = 'https://readthedocs.org/build/%s' % (key)
req = urllib2.Request(url, '')
f = urllib2.urlopen(req)
print f.read()
|
<commit_before>#!/usr/bin/env python
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import urllib2
key = sys.argv[1]
url = 'http://readthedocs.org/build/%s' % (key)
req = urllib2.Request(url, '')
f = urllib2.urlopen(req)
print f.read()
<commit_msg>Update trigger rtd build script - use https instead of http.<commit_after>
|
#!/usr/bin/env python
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import urllib2
key = sys.argv[1]
url = 'https://readthedocs.org/build/%s' % (key)
req = urllib2.Request(url, '')
f = urllib2.urlopen(req)
print f.read()
|
#!/usr/bin/env python
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import urllib2
key = sys.argv[1]
url = 'http://readthedocs.org/build/%s' % (key)
req = urllib2.Request(url, '')
f = urllib2.urlopen(req)
print f.read()
Update trigger rtd build script - use https instead of http.#!/usr/bin/env python
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import urllib2
key = sys.argv[1]
url = 'https://readthedocs.org/build/%s' % (key)
req = urllib2.Request(url, '')
f = urllib2.urlopen(req)
print f.read()
|
<commit_before>#!/usr/bin/env python
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import urllib2
key = sys.argv[1]
url = 'http://readthedocs.org/build/%s' % (key)
req = urllib2.Request(url, '')
f = urllib2.urlopen(req)
print f.read()
<commit_msg>Update trigger rtd build script - use https instead of http.<commit_after>#!/usr/bin/env python
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import urllib2
key = sys.argv[1]
url = 'https://readthedocs.org/build/%s' % (key)
req = urllib2.Request(url, '')
f = urllib2.urlopen(req)
print f.read()
|
377ec226eb1cb8f0e5ea4bca06d8a0db0905b87d
|
comrade/core/decorators.py
|
comrade/core/decorators.py
|
def singleton(cls):
instances = {}
def getinstance():
if cls not in instances:
instances[cls] = cls()
return instances[cls]
return getinstance
|
def singleton(cls):
instances = {}
def getinstance():
if cls not in instances:
instances[cls] = cls()
return instances[cls]
return getinstance
def authorized(test_func, unauthorized_url=None):
"""
Decorator for views that checks that the user passes the given test,
redirecting to the unauthorized page if it fails. The test should be a
callable that takes the user object and returns True if the user passes.
"""
if not unauthorized_url:
from django.conf import settings
unauthorized_url = settings.UNAUTHORIZED_URL
def decorator(view_func):
def _wrapped_view(request, *args, **kwargs):
if test_func(request.user):
return view_func(request, *args, **kwargs)
path = urlquote(request.get_full_path())
return HttpResponseRedirect(unauthorized_url, status=401)
return wraps(view_func,
assigned=available_attrs(view_func))(_wrapped_view)
return decorator
|
Add decorator for checking if a user is authorized to access a page.
|
Add decorator for checking if a user is authorized to access a page.
|
Python
|
mit
|
bueda/django-comrade
|
def singleton(cls):
instances = {}
def getinstance():
if cls not in instances:
instances[cls] = cls()
return instances[cls]
return getinstance
Add decorator for checking if a user is authorized to access a page.
|
def singleton(cls):
instances = {}
def getinstance():
if cls not in instances:
instances[cls] = cls()
return instances[cls]
return getinstance
def authorized(test_func, unauthorized_url=None):
"""
Decorator for views that checks that the user passes the given test,
redirecting to the unauthorized page if it fails. The test should be a
callable that takes the user object and returns True if the user passes.
"""
if not unauthorized_url:
from django.conf import settings
unauthorized_url = settings.UNAUTHORIZED_URL
def decorator(view_func):
def _wrapped_view(request, *args, **kwargs):
if test_func(request.user):
return view_func(request, *args, **kwargs)
path = urlquote(request.get_full_path())
return HttpResponseRedirect(unauthorized_url, status=401)
return wraps(view_func,
assigned=available_attrs(view_func))(_wrapped_view)
return decorator
|
<commit_before>def singleton(cls):
instances = {}
def getinstance():
if cls not in instances:
instances[cls] = cls()
return instances[cls]
return getinstance
<commit_msg>Add decorator for checking if a user is authorized to access a page.<commit_after>
|
def singleton(cls):
instances = {}
def getinstance():
if cls not in instances:
instances[cls] = cls()
return instances[cls]
return getinstance
def authorized(test_func, unauthorized_url=None):
"""
Decorator for views that checks that the user passes the given test,
redirecting to the unauthorized page if it fails. The test should be a
callable that takes the user object and returns True if the user passes.
"""
if not unauthorized_url:
from django.conf import settings
unauthorized_url = settings.UNAUTHORIZED_URL
def decorator(view_func):
def _wrapped_view(request, *args, **kwargs):
if test_func(request.user):
return view_func(request, *args, **kwargs)
path = urlquote(request.get_full_path())
return HttpResponseRedirect(unauthorized_url, status=401)
return wraps(view_func,
assigned=available_attrs(view_func))(_wrapped_view)
return decorator
|
def singleton(cls):
instances = {}
def getinstance():
if cls not in instances:
instances[cls] = cls()
return instances[cls]
return getinstance
Add decorator for checking if a user is authorized to access a page.def singleton(cls):
instances = {}
def getinstance():
if cls not in instances:
instances[cls] = cls()
return instances[cls]
return getinstance
def authorized(test_func, unauthorized_url=None):
"""
Decorator for views that checks that the user passes the given test,
redirecting to the unauthorized page if it fails. The test should be a
callable that takes the user object and returns True if the user passes.
"""
if not unauthorized_url:
from django.conf import settings
unauthorized_url = settings.UNAUTHORIZED_URL
def decorator(view_func):
def _wrapped_view(request, *args, **kwargs):
if test_func(request.user):
return view_func(request, *args, **kwargs)
path = urlquote(request.get_full_path())
return HttpResponseRedirect(unauthorized_url, status=401)
return wraps(view_func,
assigned=available_attrs(view_func))(_wrapped_view)
return decorator
|
<commit_before>def singleton(cls):
instances = {}
def getinstance():
if cls not in instances:
instances[cls] = cls()
return instances[cls]
return getinstance
<commit_msg>Add decorator for checking if a user is authorized to access a page.<commit_after>def singleton(cls):
instances = {}
def getinstance():
if cls not in instances:
instances[cls] = cls()
return instances[cls]
return getinstance
def authorized(test_func, unauthorized_url=None):
"""
Decorator for views that checks that the user passes the given test,
redirecting to the unauthorized page if it fails. The test should be a
callable that takes the user object and returns True if the user passes.
"""
if not unauthorized_url:
from django.conf import settings
unauthorized_url = settings.UNAUTHORIZED_URL
def decorator(view_func):
def _wrapped_view(request, *args, **kwargs):
if test_func(request.user):
return view_func(request, *args, **kwargs)
path = urlquote(request.get_full_path())
return HttpResponseRedirect(unauthorized_url, status=401)
return wraps(view_func,
assigned=available_attrs(view_func))(_wrapped_view)
return decorator
|
d146036998d3595730b3de5f03fd7ac6e63ae498
|
src/sentry/api/serializers/models/organization_member.py
|
src/sentry/api/serializers/models/organization_member.py
|
from __future__ import absolute_import
from sentry.api.serializers import Serializer, register
from sentry.models import OrganizationMember
@register(OrganizationMember)
class OrganizationMemberSerializer(Serializer):
def serialize(self, obj, attrs, user):
d = {
'id': str(obj.id),
'email': obj.get_email(),
'name': obj.user.get_display_name() if obj.user else obj.get_email(),
'role': obj.role,
'roleName': obj.get_role_display(),
'pending': obj.is_pending,
'flags': {
'sso:linked': bool(getattr(obj.flags, 'sso:linked')),
'sso:invalid': bool(getattr(obj.flags, 'sso:invalid')),
},
'dateCreated': obj.date_added,
}
return d
|
from __future__ import absolute_import
from sentry.api.serializers import Serializer, register
from sentry.models import OrganizationMember
@register(OrganizationMember)
class OrganizationMemberSerializer(Serializer):
def serialize(self, obj, attrs, user):
if obj.user:
user_data = {'id': obj.user.id}
else:
user_data = None
d = {
'id': str(obj.id),
'email': obj.get_email(),
'name': obj.user.get_display_name() if obj.user else obj.get_email(),
'user': user_data,
'role': obj.role,
'roleName': obj.get_role_display(),
'pending': obj.is_pending,
'flags': {
'sso:linked': bool(getattr(obj.flags, 'sso:linked')),
'sso:invalid': bool(getattr(obj.flags, 'sso:invalid')),
},
'dateCreated': obj.date_added,
}
return d
|
Add user ID to member
|
Add user ID to member
|
Python
|
bsd-3-clause
|
BuildingLink/sentry,JamesMura/sentry,looker/sentry,looker/sentry,BuildingLink/sentry,jean/sentry,JackDanger/sentry,mvaled/sentry,zenefits/sentry,JamesMura/sentry,jean/sentry,daevaorn/sentry,BuildingLink/sentry,BuildingLink/sentry,mitsuhiko/sentry,fotinakis/sentry,beeftornado/sentry,ifduyue/sentry,daevaorn/sentry,fotinakis/sentry,beeftornado/sentry,gencer/sentry,jean/sentry,zenefits/sentry,JamesMura/sentry,alexm92/sentry,zenefits/sentry,beeftornado/sentry,jean/sentry,ifduyue/sentry,alexm92/sentry,JackDanger/sentry,gencer/sentry,ifduyue/sentry,ifduyue/sentry,zenefits/sentry,daevaorn/sentry,mvaled/sentry,JamesMura/sentry,gencer/sentry,BuildingLink/sentry,looker/sentry,gencer/sentry,daevaorn/sentry,alexm92/sentry,jean/sentry,zenefits/sentry,mitsuhiko/sentry,looker/sentry,gencer/sentry,mvaled/sentry,mvaled/sentry,fotinakis/sentry,JackDanger/sentry,mvaled/sentry,mvaled/sentry,JamesMura/sentry,fotinakis/sentry,looker/sentry,ifduyue/sentry
|
from __future__ import absolute_import
from sentry.api.serializers import Serializer, register
from sentry.models import OrganizationMember
@register(OrganizationMember)
class OrganizationMemberSerializer(Serializer):
def serialize(self, obj, attrs, user):
d = {
'id': str(obj.id),
'email': obj.get_email(),
'name': obj.user.get_display_name() if obj.user else obj.get_email(),
'role': obj.role,
'roleName': obj.get_role_display(),
'pending': obj.is_pending,
'flags': {
'sso:linked': bool(getattr(obj.flags, 'sso:linked')),
'sso:invalid': bool(getattr(obj.flags, 'sso:invalid')),
},
'dateCreated': obj.date_added,
}
return d
Add user ID to member
|
from __future__ import absolute_import
from sentry.api.serializers import Serializer, register
from sentry.models import OrganizationMember
@register(OrganizationMember)
class OrganizationMemberSerializer(Serializer):
def serialize(self, obj, attrs, user):
if obj.user:
user_data = {'id': obj.user.id}
else:
user_data = None
d = {
'id': str(obj.id),
'email': obj.get_email(),
'name': obj.user.get_display_name() if obj.user else obj.get_email(),
'user': user_data,
'role': obj.role,
'roleName': obj.get_role_display(),
'pending': obj.is_pending,
'flags': {
'sso:linked': bool(getattr(obj.flags, 'sso:linked')),
'sso:invalid': bool(getattr(obj.flags, 'sso:invalid')),
},
'dateCreated': obj.date_added,
}
return d
|
<commit_before>from __future__ import absolute_import
from sentry.api.serializers import Serializer, register
from sentry.models import OrganizationMember
@register(OrganizationMember)
class OrganizationMemberSerializer(Serializer):
def serialize(self, obj, attrs, user):
d = {
'id': str(obj.id),
'email': obj.get_email(),
'name': obj.user.get_display_name() if obj.user else obj.get_email(),
'role': obj.role,
'roleName': obj.get_role_display(),
'pending': obj.is_pending,
'flags': {
'sso:linked': bool(getattr(obj.flags, 'sso:linked')),
'sso:invalid': bool(getattr(obj.flags, 'sso:invalid')),
},
'dateCreated': obj.date_added,
}
return d
<commit_msg>Add user ID to member<commit_after>
|
from __future__ import absolute_import
from sentry.api.serializers import Serializer, register
from sentry.models import OrganizationMember
@register(OrganizationMember)
class OrganizationMemberSerializer(Serializer):
def serialize(self, obj, attrs, user):
if obj.user:
user_data = {'id': obj.user.id}
else:
user_data = None
d = {
'id': str(obj.id),
'email': obj.get_email(),
'name': obj.user.get_display_name() if obj.user else obj.get_email(),
'user': user_data,
'role': obj.role,
'roleName': obj.get_role_display(),
'pending': obj.is_pending,
'flags': {
'sso:linked': bool(getattr(obj.flags, 'sso:linked')),
'sso:invalid': bool(getattr(obj.flags, 'sso:invalid')),
},
'dateCreated': obj.date_added,
}
return d
|
from __future__ import absolute_import
from sentry.api.serializers import Serializer, register
from sentry.models import OrganizationMember
@register(OrganizationMember)
class OrganizationMemberSerializer(Serializer):
def serialize(self, obj, attrs, user):
d = {
'id': str(obj.id),
'email': obj.get_email(),
'name': obj.user.get_display_name() if obj.user else obj.get_email(),
'role': obj.role,
'roleName': obj.get_role_display(),
'pending': obj.is_pending,
'flags': {
'sso:linked': bool(getattr(obj.flags, 'sso:linked')),
'sso:invalid': bool(getattr(obj.flags, 'sso:invalid')),
},
'dateCreated': obj.date_added,
}
return d
Add user ID to memberfrom __future__ import absolute_import
from sentry.api.serializers import Serializer, register
from sentry.models import OrganizationMember
@register(OrganizationMember)
class OrganizationMemberSerializer(Serializer):
def serialize(self, obj, attrs, user):
if obj.user:
user_data = {'id': obj.user.id}
else:
user_data = None
d = {
'id': str(obj.id),
'email': obj.get_email(),
'name': obj.user.get_display_name() if obj.user else obj.get_email(),
'user': user_data,
'role': obj.role,
'roleName': obj.get_role_display(),
'pending': obj.is_pending,
'flags': {
'sso:linked': bool(getattr(obj.flags, 'sso:linked')),
'sso:invalid': bool(getattr(obj.flags, 'sso:invalid')),
},
'dateCreated': obj.date_added,
}
return d
|
<commit_before>from __future__ import absolute_import
from sentry.api.serializers import Serializer, register
from sentry.models import OrganizationMember
@register(OrganizationMember)
class OrganizationMemberSerializer(Serializer):
def serialize(self, obj, attrs, user):
d = {
'id': str(obj.id),
'email': obj.get_email(),
'name': obj.user.get_display_name() if obj.user else obj.get_email(),
'role': obj.role,
'roleName': obj.get_role_display(),
'pending': obj.is_pending,
'flags': {
'sso:linked': bool(getattr(obj.flags, 'sso:linked')),
'sso:invalid': bool(getattr(obj.flags, 'sso:invalid')),
},
'dateCreated': obj.date_added,
}
return d
<commit_msg>Add user ID to member<commit_after>from __future__ import absolute_import
from sentry.api.serializers import Serializer, register
from sentry.models import OrganizationMember
@register(OrganizationMember)
class OrganizationMemberSerializer(Serializer):
def serialize(self, obj, attrs, user):
if obj.user:
user_data = {'id': obj.user.id}
else:
user_data = None
d = {
'id': str(obj.id),
'email': obj.get_email(),
'name': obj.user.get_display_name() if obj.user else obj.get_email(),
'user': user_data,
'role': obj.role,
'roleName': obj.get_role_display(),
'pending': obj.is_pending,
'flags': {
'sso:linked': bool(getattr(obj.flags, 'sso:linked')),
'sso:invalid': bool(getattr(obj.flags, 'sso:invalid')),
},
'dateCreated': obj.date_added,
}
return d
|
40fba178d449e0e570da9dc43d6f672cebb01359
|
astroML/plotting/settings.py
|
astroML/plotting/settings.py
|
def setup_text_plots(fontsize=8, usetex=True):
"""
This function adjusts matplotlib settings so that all figures in the
textbook have a uniform format and look.
"""
import matplotlib
from distutils.version import LooseVersion
matplotlib.rc('legend', fontsize=fontsize, handlelength=3)
matplotlib.rc('axes', titlesize=fontsize)
matplotlib.rc('axes', labelsize=fontsize)
matplotlib.rc('xtick', labelsize=fontsize)
matplotlib.rc('ytick', labelsize=fontsize)
matplotlib.rc('text', usetex=usetex)
matplotlib.rc('font', size=fontsize, family='serif',
style='normal', variant='normal',
stretch='normal', weight='normal')
matplotlib.rc('patch', force_edgecolor=True)
if LooseVersion(matplotlib.__version__) < LooseVersion("3.1"):
matplotlib.rc('_internal', classic_mode=True)
else:
# New in mpl 3.1
matplotlib.rc('scatter.edgecolors', 'b')
matplotlib.rc('grid', linestyle=':')
matplotlib.rc('errorbar', capsize=3)
matplotlib.rc('image', cmap='viridis')
matplotlib.rc('axes', xmargin=0)
matplotlib.rc('axes', ymargin=0)
matplotlib.rc('xtick', direction='in')
matplotlib.rc('ytick', direction='in')
matplotlib.rc('xtick', top=True)
matplotlib.rc('ytick', right=True)
|
def setup_text_plots(fontsize=8, usetex=True):
"""
This function adjusts matplotlib settings so that all figures in the
textbook have a uniform format and look.
"""
import matplotlib
from distutils.version import LooseVersion
matplotlib.rc('legend', fontsize=fontsize, handlelength=3)
matplotlib.rc('axes', titlesize=fontsize)
matplotlib.rc('axes', labelsize=fontsize)
matplotlib.rc('xtick', labelsize=fontsize)
matplotlib.rc('ytick', labelsize=fontsize)
matplotlib.rc('text', usetex=usetex)
matplotlib.rc('font', size=fontsize, family='serif',
style='normal', variant='normal',
stretch='normal', weight='normal')
matplotlib.rc('patch', force_edgecolor=True)
if LooseVersion(matplotlib.__version__) < LooseVersion("3.1"):
matplotlib.rc('_internal', classic_mode=True)
else:
# New in mpl 3.1
matplotlib.rc('scatter', edgecolors='b')
matplotlib.rc('grid', linestyle=':')
matplotlib.rc('errorbar', capsize=3)
matplotlib.rc('image', cmap='viridis')
matplotlib.rc('axes', xmargin=0)
matplotlib.rc('axes', ymargin=0)
matplotlib.rc('xtick', direction='in')
matplotlib.rc('ytick', direction='in')
matplotlib.rc('xtick', top=True)
matplotlib.rc('ytick', right=True)
|
Fix rc syntax for mpl 3.1
|
Fix rc syntax for mpl 3.1
|
Python
|
bsd-2-clause
|
astroML/astroML
|
def setup_text_plots(fontsize=8, usetex=True):
"""
This function adjusts matplotlib settings so that all figures in the
textbook have a uniform format and look.
"""
import matplotlib
from distutils.version import LooseVersion
matplotlib.rc('legend', fontsize=fontsize, handlelength=3)
matplotlib.rc('axes', titlesize=fontsize)
matplotlib.rc('axes', labelsize=fontsize)
matplotlib.rc('xtick', labelsize=fontsize)
matplotlib.rc('ytick', labelsize=fontsize)
matplotlib.rc('text', usetex=usetex)
matplotlib.rc('font', size=fontsize, family='serif',
style='normal', variant='normal',
stretch='normal', weight='normal')
matplotlib.rc('patch', force_edgecolor=True)
if LooseVersion(matplotlib.__version__) < LooseVersion("3.1"):
matplotlib.rc('_internal', classic_mode=True)
else:
# New in mpl 3.1
matplotlib.rc('scatter.edgecolors', 'b')
matplotlib.rc('grid', linestyle=':')
matplotlib.rc('errorbar', capsize=3)
matplotlib.rc('image', cmap='viridis')
matplotlib.rc('axes', xmargin=0)
matplotlib.rc('axes', ymargin=0)
matplotlib.rc('xtick', direction='in')
matplotlib.rc('ytick', direction='in')
matplotlib.rc('xtick', top=True)
matplotlib.rc('ytick', right=True)
Fix rc syntax for mpl 3.1
|
def setup_text_plots(fontsize=8, usetex=True):
"""
This function adjusts matplotlib settings so that all figures in the
textbook have a uniform format and look.
"""
import matplotlib
from distutils.version import LooseVersion
matplotlib.rc('legend', fontsize=fontsize, handlelength=3)
matplotlib.rc('axes', titlesize=fontsize)
matplotlib.rc('axes', labelsize=fontsize)
matplotlib.rc('xtick', labelsize=fontsize)
matplotlib.rc('ytick', labelsize=fontsize)
matplotlib.rc('text', usetex=usetex)
matplotlib.rc('font', size=fontsize, family='serif',
style='normal', variant='normal',
stretch='normal', weight='normal')
matplotlib.rc('patch', force_edgecolor=True)
if LooseVersion(matplotlib.__version__) < LooseVersion("3.1"):
matplotlib.rc('_internal', classic_mode=True)
else:
# New in mpl 3.1
matplotlib.rc('scatter', edgecolors='b')
matplotlib.rc('grid', linestyle=':')
matplotlib.rc('errorbar', capsize=3)
matplotlib.rc('image', cmap='viridis')
matplotlib.rc('axes', xmargin=0)
matplotlib.rc('axes', ymargin=0)
matplotlib.rc('xtick', direction='in')
matplotlib.rc('ytick', direction='in')
matplotlib.rc('xtick', top=True)
matplotlib.rc('ytick', right=True)
|
<commit_before>def setup_text_plots(fontsize=8, usetex=True):
"""
This function adjusts matplotlib settings so that all figures in the
textbook have a uniform format and look.
"""
import matplotlib
from distutils.version import LooseVersion
matplotlib.rc('legend', fontsize=fontsize, handlelength=3)
matplotlib.rc('axes', titlesize=fontsize)
matplotlib.rc('axes', labelsize=fontsize)
matplotlib.rc('xtick', labelsize=fontsize)
matplotlib.rc('ytick', labelsize=fontsize)
matplotlib.rc('text', usetex=usetex)
matplotlib.rc('font', size=fontsize, family='serif',
style='normal', variant='normal',
stretch='normal', weight='normal')
matplotlib.rc('patch', force_edgecolor=True)
if LooseVersion(matplotlib.__version__) < LooseVersion("3.1"):
matplotlib.rc('_internal', classic_mode=True)
else:
# New in mpl 3.1
matplotlib.rc('scatter.edgecolors', 'b')
matplotlib.rc('grid', linestyle=':')
matplotlib.rc('errorbar', capsize=3)
matplotlib.rc('image', cmap='viridis')
matplotlib.rc('axes', xmargin=0)
matplotlib.rc('axes', ymargin=0)
matplotlib.rc('xtick', direction='in')
matplotlib.rc('ytick', direction='in')
matplotlib.rc('xtick', top=True)
matplotlib.rc('ytick', right=True)
<commit_msg>Fix rc syntax for mpl 3.1<commit_after>
|
def setup_text_plots(fontsize=8, usetex=True):
"""
This function adjusts matplotlib settings so that all figures in the
textbook have a uniform format and look.
"""
import matplotlib
from distutils.version import LooseVersion
matplotlib.rc('legend', fontsize=fontsize, handlelength=3)
matplotlib.rc('axes', titlesize=fontsize)
matplotlib.rc('axes', labelsize=fontsize)
matplotlib.rc('xtick', labelsize=fontsize)
matplotlib.rc('ytick', labelsize=fontsize)
matplotlib.rc('text', usetex=usetex)
matplotlib.rc('font', size=fontsize, family='serif',
style='normal', variant='normal',
stretch='normal', weight='normal')
matplotlib.rc('patch', force_edgecolor=True)
if LooseVersion(matplotlib.__version__) < LooseVersion("3.1"):
matplotlib.rc('_internal', classic_mode=True)
else:
# New in mpl 3.1
matplotlib.rc('scatter', edgecolors='b')
matplotlib.rc('grid', linestyle=':')
matplotlib.rc('errorbar', capsize=3)
matplotlib.rc('image', cmap='viridis')
matplotlib.rc('axes', xmargin=0)
matplotlib.rc('axes', ymargin=0)
matplotlib.rc('xtick', direction='in')
matplotlib.rc('ytick', direction='in')
matplotlib.rc('xtick', top=True)
matplotlib.rc('ytick', right=True)
|
def setup_text_plots(fontsize=8, usetex=True):
"""
This function adjusts matplotlib settings so that all figures in the
textbook have a uniform format and look.
"""
import matplotlib
from distutils.version import LooseVersion
matplotlib.rc('legend', fontsize=fontsize, handlelength=3)
matplotlib.rc('axes', titlesize=fontsize)
matplotlib.rc('axes', labelsize=fontsize)
matplotlib.rc('xtick', labelsize=fontsize)
matplotlib.rc('ytick', labelsize=fontsize)
matplotlib.rc('text', usetex=usetex)
matplotlib.rc('font', size=fontsize, family='serif',
style='normal', variant='normal',
stretch='normal', weight='normal')
matplotlib.rc('patch', force_edgecolor=True)
if LooseVersion(matplotlib.__version__) < LooseVersion("3.1"):
matplotlib.rc('_internal', classic_mode=True)
else:
# New in mpl 3.1
matplotlib.rc('scatter.edgecolors', 'b')
matplotlib.rc('grid', linestyle=':')
matplotlib.rc('errorbar', capsize=3)
matplotlib.rc('image', cmap='viridis')
matplotlib.rc('axes', xmargin=0)
matplotlib.rc('axes', ymargin=0)
matplotlib.rc('xtick', direction='in')
matplotlib.rc('ytick', direction='in')
matplotlib.rc('xtick', top=True)
matplotlib.rc('ytick', right=True)
Fix rc syntax for mpl 3.1def setup_text_plots(fontsize=8, usetex=True):
"""
This function adjusts matplotlib settings so that all figures in the
textbook have a uniform format and look.
"""
import matplotlib
from distutils.version import LooseVersion
matplotlib.rc('legend', fontsize=fontsize, handlelength=3)
matplotlib.rc('axes', titlesize=fontsize)
matplotlib.rc('axes', labelsize=fontsize)
matplotlib.rc('xtick', labelsize=fontsize)
matplotlib.rc('ytick', labelsize=fontsize)
matplotlib.rc('text', usetex=usetex)
matplotlib.rc('font', size=fontsize, family='serif',
style='normal', variant='normal',
stretch='normal', weight='normal')
matplotlib.rc('patch', force_edgecolor=True)
if LooseVersion(matplotlib.__version__) < LooseVersion("3.1"):
matplotlib.rc('_internal', classic_mode=True)
else:
# New in mpl 3.1
matplotlib.rc('scatter', edgecolors='b')
matplotlib.rc('grid', linestyle=':')
matplotlib.rc('errorbar', capsize=3)
matplotlib.rc('image', cmap='viridis')
matplotlib.rc('axes', xmargin=0)
matplotlib.rc('axes', ymargin=0)
matplotlib.rc('xtick', direction='in')
matplotlib.rc('ytick', direction='in')
matplotlib.rc('xtick', top=True)
matplotlib.rc('ytick', right=True)
|
<commit_before>def setup_text_plots(fontsize=8, usetex=True):
"""
This function adjusts matplotlib settings so that all figures in the
textbook have a uniform format and look.
"""
import matplotlib
from distutils.version import LooseVersion
matplotlib.rc('legend', fontsize=fontsize, handlelength=3)
matplotlib.rc('axes', titlesize=fontsize)
matplotlib.rc('axes', labelsize=fontsize)
matplotlib.rc('xtick', labelsize=fontsize)
matplotlib.rc('ytick', labelsize=fontsize)
matplotlib.rc('text', usetex=usetex)
matplotlib.rc('font', size=fontsize, family='serif',
style='normal', variant='normal',
stretch='normal', weight='normal')
matplotlib.rc('patch', force_edgecolor=True)
if LooseVersion(matplotlib.__version__) < LooseVersion("3.1"):
matplotlib.rc('_internal', classic_mode=True)
else:
# New in mpl 3.1
matplotlib.rc('scatter.edgecolors', 'b')
matplotlib.rc('grid', linestyle=':')
matplotlib.rc('errorbar', capsize=3)
matplotlib.rc('image', cmap='viridis')
matplotlib.rc('axes', xmargin=0)
matplotlib.rc('axes', ymargin=0)
matplotlib.rc('xtick', direction='in')
matplotlib.rc('ytick', direction='in')
matplotlib.rc('xtick', top=True)
matplotlib.rc('ytick', right=True)
<commit_msg>Fix rc syntax for mpl 3.1<commit_after>def setup_text_plots(fontsize=8, usetex=True):
"""
This function adjusts matplotlib settings so that all figures in the
textbook have a uniform format and look.
"""
import matplotlib
from distutils.version import LooseVersion
matplotlib.rc('legend', fontsize=fontsize, handlelength=3)
matplotlib.rc('axes', titlesize=fontsize)
matplotlib.rc('axes', labelsize=fontsize)
matplotlib.rc('xtick', labelsize=fontsize)
matplotlib.rc('ytick', labelsize=fontsize)
matplotlib.rc('text', usetex=usetex)
matplotlib.rc('font', size=fontsize, family='serif',
style='normal', variant='normal',
stretch='normal', weight='normal')
matplotlib.rc('patch', force_edgecolor=True)
if LooseVersion(matplotlib.__version__) < LooseVersion("3.1"):
matplotlib.rc('_internal', classic_mode=True)
else:
# New in mpl 3.1
matplotlib.rc('scatter', edgecolors='b')
matplotlib.rc('grid', linestyle=':')
matplotlib.rc('errorbar', capsize=3)
matplotlib.rc('image', cmap='viridis')
matplotlib.rc('axes', xmargin=0)
matplotlib.rc('axes', ymargin=0)
matplotlib.rc('xtick', direction='in')
matplotlib.rc('ytick', direction='in')
matplotlib.rc('xtick', top=True)
matplotlib.rc('ytick', right=True)
|
15492594186b6a0dcea510e6896310f5e45368fc
|
instance/serializers.py
|
instance/serializers.py
|
"""
Instance serializers (API representation)
"""
#pylint: disable=no-init
from rest_framework import serializers
from .models import OpenStackServer, OpenEdXInstance
class OpenStackServerSerializer(serializers.ModelSerializer):
pk = serializers.HyperlinkedIdentityField(view_name='api:openstackserver-detail')
instance = serializers.HyperlinkedRelatedField(view_name='api:openedxinstance-detail', read_only=True)
class Meta:
model = OpenStackServer
fields = ('pk', 'status', 'instance', 'openstack_id', 'created', 'modified')
class OpenEdXInstanceSerializer(serializers.ModelSerializer):
pk = serializers.HyperlinkedIdentityField(view_name='api:openedxinstance-detail')
server_set = OpenStackServerSerializer(many=True, read_only=True)
class Meta:
model = OpenEdXInstance
fields = ('pk', 'server_set', 'sub_domain', 'base_domain', 'email', 'name', 'protocol',
'domain', 'url', 'branch_name', 'commit_id', 'github_organization_name',
'github_organization_name', 'github_base_url', 'repository_url', 'updates_feed',
'vars_str', 'created', 'modified')
|
"""
Instance serializers (API representation)
"""
#pylint: disable=no-init
from rest_framework import serializers
from .models import OpenStackServer, OpenEdXInstance
class OpenStackServerSerializer(serializers.ModelSerializer):
pk_url = serializers.HyperlinkedIdentityField(view_name='api:openstackserver-detail')
instance = serializers.HyperlinkedRelatedField(view_name='api:openedxinstance-detail', read_only=True)
class Meta:
model = OpenStackServer
fields = ('pk', 'pk_url', 'status', 'instance', 'openstack_id', 'created', 'modified')
class OpenEdXInstanceSerializer(serializers.ModelSerializer):
pk_url = serializers.HyperlinkedIdentityField(view_name='api:openedxinstance-detail')
server_set = OpenStackServerSerializer(many=True, read_only=True)
class Meta:
model = OpenEdXInstance
fields = ('pk', 'pk_url', 'server_set', 'sub_domain', 'base_domain', 'email', 'name', 'protocol',
'domain', 'url', 'branch_name', 'commit_id', 'github_organization_name',
'github_organization_name', 'github_base_url', 'repository_url', 'updates_feed',
'vars_str', 'created', 'modified')
|
Include both the numerical instance.pk & its API URL
|
API: Include both the numerical instance.pk & its API URL
|
Python
|
agpl-3.0
|
open-craft/opencraft,omarkhan/opencraft,open-craft/opencraft,omarkhan/opencraft,omarkhan/opencraft,open-craft/opencraft,omarkhan/opencraft,brousch/opencraft,brousch/opencraft,brousch/opencraft,open-craft/opencraft,open-craft/opencraft
|
"""
Instance serializers (API representation)
"""
#pylint: disable=no-init
from rest_framework import serializers
from .models import OpenStackServer, OpenEdXInstance
class OpenStackServerSerializer(serializers.ModelSerializer):
pk = serializers.HyperlinkedIdentityField(view_name='api:openstackserver-detail')
instance = serializers.HyperlinkedRelatedField(view_name='api:openedxinstance-detail', read_only=True)
class Meta:
model = OpenStackServer
fields = ('pk', 'status', 'instance', 'openstack_id', 'created', 'modified')
class OpenEdXInstanceSerializer(serializers.ModelSerializer):
pk = serializers.HyperlinkedIdentityField(view_name='api:openedxinstance-detail')
server_set = OpenStackServerSerializer(many=True, read_only=True)
class Meta:
model = OpenEdXInstance
fields = ('pk', 'server_set', 'sub_domain', 'base_domain', 'email', 'name', 'protocol',
'domain', 'url', 'branch_name', 'commit_id', 'github_organization_name',
'github_organization_name', 'github_base_url', 'repository_url', 'updates_feed',
'vars_str', 'created', 'modified')
API: Include both the numerical instance.pk & its API URL
|
"""
Instance serializers (API representation)
"""
#pylint: disable=no-init
from rest_framework import serializers
from .models import OpenStackServer, OpenEdXInstance
class OpenStackServerSerializer(serializers.ModelSerializer):
pk_url = serializers.HyperlinkedIdentityField(view_name='api:openstackserver-detail')
instance = serializers.HyperlinkedRelatedField(view_name='api:openedxinstance-detail', read_only=True)
class Meta:
model = OpenStackServer
fields = ('pk', 'pk_url', 'status', 'instance', 'openstack_id', 'created', 'modified')
class OpenEdXInstanceSerializer(serializers.ModelSerializer):
pk_url = serializers.HyperlinkedIdentityField(view_name='api:openedxinstance-detail')
server_set = OpenStackServerSerializer(many=True, read_only=True)
class Meta:
model = OpenEdXInstance
fields = ('pk', 'pk_url', 'server_set', 'sub_domain', 'base_domain', 'email', 'name', 'protocol',
'domain', 'url', 'branch_name', 'commit_id', 'github_organization_name',
'github_organization_name', 'github_base_url', 'repository_url', 'updates_feed',
'vars_str', 'created', 'modified')
|
<commit_before>"""
Instance serializers (API representation)
"""
#pylint: disable=no-init
from rest_framework import serializers
from .models import OpenStackServer, OpenEdXInstance
class OpenStackServerSerializer(serializers.ModelSerializer):
pk = serializers.HyperlinkedIdentityField(view_name='api:openstackserver-detail')
instance = serializers.HyperlinkedRelatedField(view_name='api:openedxinstance-detail', read_only=True)
class Meta:
model = OpenStackServer
fields = ('pk', 'status', 'instance', 'openstack_id', 'created', 'modified')
class OpenEdXInstanceSerializer(serializers.ModelSerializer):
pk = serializers.HyperlinkedIdentityField(view_name='api:openedxinstance-detail')
server_set = OpenStackServerSerializer(many=True, read_only=True)
class Meta:
model = OpenEdXInstance
fields = ('pk', 'server_set', 'sub_domain', 'base_domain', 'email', 'name', 'protocol',
'domain', 'url', 'branch_name', 'commit_id', 'github_organization_name',
'github_organization_name', 'github_base_url', 'repository_url', 'updates_feed',
'vars_str', 'created', 'modified')
<commit_msg>API: Include both the numerical instance.pk & its API URL<commit_after>
|
"""
Instance serializers (API representation)
"""
#pylint: disable=no-init
from rest_framework import serializers
from .models import OpenStackServer, OpenEdXInstance
class OpenStackServerSerializer(serializers.ModelSerializer):
pk_url = serializers.HyperlinkedIdentityField(view_name='api:openstackserver-detail')
instance = serializers.HyperlinkedRelatedField(view_name='api:openedxinstance-detail', read_only=True)
class Meta:
model = OpenStackServer
fields = ('pk', 'pk_url', 'status', 'instance', 'openstack_id', 'created', 'modified')
class OpenEdXInstanceSerializer(serializers.ModelSerializer):
pk_url = serializers.HyperlinkedIdentityField(view_name='api:openedxinstance-detail')
server_set = OpenStackServerSerializer(many=True, read_only=True)
class Meta:
model = OpenEdXInstance
fields = ('pk', 'pk_url', 'server_set', 'sub_domain', 'base_domain', 'email', 'name', 'protocol',
'domain', 'url', 'branch_name', 'commit_id', 'github_organization_name',
'github_organization_name', 'github_base_url', 'repository_url', 'updates_feed',
'vars_str', 'created', 'modified')
|
"""
Instance serializers (API representation)
"""
#pylint: disable=no-init
from rest_framework import serializers
from .models import OpenStackServer, OpenEdXInstance
class OpenStackServerSerializer(serializers.ModelSerializer):
pk = serializers.HyperlinkedIdentityField(view_name='api:openstackserver-detail')
instance = serializers.HyperlinkedRelatedField(view_name='api:openedxinstance-detail', read_only=True)
class Meta:
model = OpenStackServer
fields = ('pk', 'status', 'instance', 'openstack_id', 'created', 'modified')
class OpenEdXInstanceSerializer(serializers.ModelSerializer):
pk = serializers.HyperlinkedIdentityField(view_name='api:openedxinstance-detail')
server_set = OpenStackServerSerializer(many=True, read_only=True)
class Meta:
model = OpenEdXInstance
fields = ('pk', 'server_set', 'sub_domain', 'base_domain', 'email', 'name', 'protocol',
'domain', 'url', 'branch_name', 'commit_id', 'github_organization_name',
'github_organization_name', 'github_base_url', 'repository_url', 'updates_feed',
'vars_str', 'created', 'modified')
API: Include both the numerical instance.pk & its API URL"""
Instance serializers (API representation)
"""
#pylint: disable=no-init
from rest_framework import serializers
from .models import OpenStackServer, OpenEdXInstance
class OpenStackServerSerializer(serializers.ModelSerializer):
pk_url = serializers.HyperlinkedIdentityField(view_name='api:openstackserver-detail')
instance = serializers.HyperlinkedRelatedField(view_name='api:openedxinstance-detail', read_only=True)
class Meta:
model = OpenStackServer
fields = ('pk', 'pk_url', 'status', 'instance', 'openstack_id', 'created', 'modified')
class OpenEdXInstanceSerializer(serializers.ModelSerializer):
pk_url = serializers.HyperlinkedIdentityField(view_name='api:openedxinstance-detail')
server_set = OpenStackServerSerializer(many=True, read_only=True)
class Meta:
model = OpenEdXInstance
fields = ('pk', 'pk_url', 'server_set', 'sub_domain', 'base_domain', 'email', 'name', 'protocol',
'domain', 'url', 'branch_name', 'commit_id', 'github_organization_name',
'github_organization_name', 'github_base_url', 'repository_url', 'updates_feed',
'vars_str', 'created', 'modified')
|
<commit_before>"""
Instance serializers (API representation)
"""
#pylint: disable=no-init
from rest_framework import serializers
from .models import OpenStackServer, OpenEdXInstance
class OpenStackServerSerializer(serializers.ModelSerializer):
pk = serializers.HyperlinkedIdentityField(view_name='api:openstackserver-detail')
instance = serializers.HyperlinkedRelatedField(view_name='api:openedxinstance-detail', read_only=True)
class Meta:
model = OpenStackServer
fields = ('pk', 'status', 'instance', 'openstack_id', 'created', 'modified')
class OpenEdXInstanceSerializer(serializers.ModelSerializer):
pk = serializers.HyperlinkedIdentityField(view_name='api:openedxinstance-detail')
server_set = OpenStackServerSerializer(many=True, read_only=True)
class Meta:
model = OpenEdXInstance
fields = ('pk', 'server_set', 'sub_domain', 'base_domain', 'email', 'name', 'protocol',
'domain', 'url', 'branch_name', 'commit_id', 'github_organization_name',
'github_organization_name', 'github_base_url', 'repository_url', 'updates_feed',
'vars_str', 'created', 'modified')
<commit_msg>API: Include both the numerical instance.pk & its API URL<commit_after>"""
Instance serializers (API representation)
"""
#pylint: disable=no-init
from rest_framework import serializers
from .models import OpenStackServer, OpenEdXInstance
class OpenStackServerSerializer(serializers.ModelSerializer):
pk_url = serializers.HyperlinkedIdentityField(view_name='api:openstackserver-detail')
instance = serializers.HyperlinkedRelatedField(view_name='api:openedxinstance-detail', read_only=True)
class Meta:
model = OpenStackServer
fields = ('pk', 'pk_url', 'status', 'instance', 'openstack_id', 'created', 'modified')
class OpenEdXInstanceSerializer(serializers.ModelSerializer):
pk_url = serializers.HyperlinkedIdentityField(view_name='api:openedxinstance-detail')
server_set = OpenStackServerSerializer(many=True, read_only=True)
class Meta:
model = OpenEdXInstance
fields = ('pk', 'pk_url', 'server_set', 'sub_domain', 'base_domain', 'email', 'name', 'protocol',
'domain', 'url', 'branch_name', 'commit_id', 'github_organization_name',
'github_organization_name', 'github_base_url', 'repository_url', 'updates_feed',
'vars_str', 'created', 'modified')
|
0a005806afb436a2ad01275a969ae6afc3c5a72c
|
cloud_browser/__init__.py
|
cloud_browser/__init__.py
|
"""Cloud browser application.
Provides a simple filesystem-like browser interface for cloud blob datastores.
"""
VERSION = (0, 4, 0)
__version__ = ".".join(str(v) for v in VERSION)
__version_full__ = __version__
|
"""Cloud browser application.
Provides a simple filesystem-like browser interface for cloud blob datastores.
"""
VERSION = (0, 0, 0) # placeholder, real value is set by `fab sdist`
__version__ = ".".join(str(v) for v in VERSION)
__version_full__ = __version__
|
Make it explicit that VERSION is a placeholder
|
Make it explicit that VERSION is a placeholder
|
Python
|
mit
|
ryan-roemer/django-cloud-browser,ryan-roemer/django-cloud-browser,ryan-roemer/django-cloud-browser
|
"""Cloud browser application.
Provides a simple filesystem-like browser interface for cloud blob datastores.
"""
VERSION = (0, 4, 0)
__version__ = ".".join(str(v) for v in VERSION)
__version_full__ = __version__
Make it explicit that VERSION is a placeholder
|
"""Cloud browser application.
Provides a simple filesystem-like browser interface for cloud blob datastores.
"""
VERSION = (0, 0, 0) # placeholder, real value is set by `fab sdist`
__version__ = ".".join(str(v) for v in VERSION)
__version_full__ = __version__
|
<commit_before>"""Cloud browser application.
Provides a simple filesystem-like browser interface for cloud blob datastores.
"""
VERSION = (0, 4, 0)
__version__ = ".".join(str(v) for v in VERSION)
__version_full__ = __version__
<commit_msg>Make it explicit that VERSION is a placeholder<commit_after>
|
"""Cloud browser application.
Provides a simple filesystem-like browser interface for cloud blob datastores.
"""
VERSION = (0, 0, 0) # placeholder, real value is set by `fab sdist`
__version__ = ".".join(str(v) for v in VERSION)
__version_full__ = __version__
|
"""Cloud browser application.
Provides a simple filesystem-like browser interface for cloud blob datastores.
"""
VERSION = (0, 4, 0)
__version__ = ".".join(str(v) for v in VERSION)
__version_full__ = __version__
Make it explicit that VERSION is a placeholder"""Cloud browser application.
Provides a simple filesystem-like browser interface for cloud blob datastores.
"""
VERSION = (0, 0, 0) # placeholder, real value is set by `fab sdist`
__version__ = ".".join(str(v) for v in VERSION)
__version_full__ = __version__
|
<commit_before>"""Cloud browser application.
Provides a simple filesystem-like browser interface for cloud blob datastores.
"""
VERSION = (0, 4, 0)
__version__ = ".".join(str(v) for v in VERSION)
__version_full__ = __version__
<commit_msg>Make it explicit that VERSION is a placeholder<commit_after>"""Cloud browser application.
Provides a simple filesystem-like browser interface for cloud blob datastores.
"""
VERSION = (0, 0, 0) # placeholder, real value is set by `fab sdist`
__version__ = ".".join(str(v) for v in VERSION)
__version_full__ = __version__
|
7281a7525b20e48147049229a9faa0cb97340427
|
src/client/packaging/pypi/delphi_epidata/__init__.py
|
src/client/packaging/pypi/delphi_epidata/__init__.py
|
from .delphi_epidata import Epidata
name = 'delphi_epidata'
__version__ = '0.0.5'
|
from .delphi_epidata import Epidata
name = 'delphi_epidata'
__version__ = '0.0.6'
|
Increment client version number (again)
|
Increment client version number (again)
|
Python
|
mit
|
cmu-delphi/delphi-epidata,cmu-delphi/delphi-epidata,cmu-delphi/delphi-epidata,cmu-delphi/delphi-epidata,cmu-delphi/delphi-epidata,cmu-delphi/delphi-epidata
|
from .delphi_epidata import Epidata
name = 'delphi_epidata'
__version__ = '0.0.5'
Increment client version number (again)
|
from .delphi_epidata import Epidata
name = 'delphi_epidata'
__version__ = '0.0.6'
|
<commit_before>from .delphi_epidata import Epidata
name = 'delphi_epidata'
__version__ = '0.0.5'
<commit_msg>Increment client version number (again)<commit_after>
|
from .delphi_epidata import Epidata
name = 'delphi_epidata'
__version__ = '0.0.6'
|
from .delphi_epidata import Epidata
name = 'delphi_epidata'
__version__ = '0.0.5'
Increment client version number (again)from .delphi_epidata import Epidata
name = 'delphi_epidata'
__version__ = '0.0.6'
|
<commit_before>from .delphi_epidata import Epidata
name = 'delphi_epidata'
__version__ = '0.0.5'
<commit_msg>Increment client version number (again)<commit_after>from .delphi_epidata import Epidata
name = 'delphi_epidata'
__version__ = '0.0.6'
|
0997b216ea520aca2d8d62ac31a238c7280302ca
|
bananas/admin/api/serializers.py
|
bananas/admin/api/serializers.py
|
from django.contrib.auth import password_validation
from django.utils.translation import ugettext_lazy as _
from rest_framework import serializers
class AuthenticationSerializer(serializers.Serializer):
username = serializers.CharField(label=_("Username"), write_only=True)
password = serializers.CharField(label=_("Password"), write_only=True)
class PasswordChangeSerializer(serializers.Serializer):
old_password = serializers.CharField(label=_("Old password"), write_only=True)
new_password1 = serializers.CharField(
label=_("New password"),
help_text=password_validation.password_validators_help_text_html(),
write_only=True,
)
new_password2 = serializers.CharField(
label=_("New password confirmation"), write_only=True
)
|
from django.contrib.auth.password_validation import password_validators_help_texts
from django.utils.translation import ugettext_lazy as _
from rest_framework import serializers
class AuthenticationSerializer(serializers.Serializer):
username = serializers.CharField(label=_("Username"), write_only=True)
password = serializers.CharField(label=_("Password"), write_only=True)
class PasswordChangeSerializer(serializers.Serializer):
old_password = serializers.CharField(label=_("Old password"), write_only=True)
new_password1 = serializers.CharField(
label=_("New password"),
help_text=password_validators_help_texts(),
write_only=True,
)
new_password2 = serializers.CharField(
label=_("New password confirmation"), write_only=True
)
|
Use plain password help text instead of html
|
Use plain password help text instead of html
|
Python
|
mit
|
5monkeys/django-bananas,5monkeys/django-bananas,5monkeys/django-bananas
|
from django.contrib.auth import password_validation
from django.utils.translation import ugettext_lazy as _
from rest_framework import serializers
class AuthenticationSerializer(serializers.Serializer):
username = serializers.CharField(label=_("Username"), write_only=True)
password = serializers.CharField(label=_("Password"), write_only=True)
class PasswordChangeSerializer(serializers.Serializer):
old_password = serializers.CharField(label=_("Old password"), write_only=True)
new_password1 = serializers.CharField(
label=_("New password"),
help_text=password_validation.password_validators_help_text_html(),
write_only=True,
)
new_password2 = serializers.CharField(
label=_("New password confirmation"), write_only=True
)
Use plain password help text instead of html
|
from django.contrib.auth.password_validation import password_validators_help_texts
from django.utils.translation import ugettext_lazy as _
from rest_framework import serializers
class AuthenticationSerializer(serializers.Serializer):
username = serializers.CharField(label=_("Username"), write_only=True)
password = serializers.CharField(label=_("Password"), write_only=True)
class PasswordChangeSerializer(serializers.Serializer):
old_password = serializers.CharField(label=_("Old password"), write_only=True)
new_password1 = serializers.CharField(
label=_("New password"),
help_text=password_validators_help_texts(),
write_only=True,
)
new_password2 = serializers.CharField(
label=_("New password confirmation"), write_only=True
)
|
<commit_before>from django.contrib.auth import password_validation
from django.utils.translation import ugettext_lazy as _
from rest_framework import serializers
class AuthenticationSerializer(serializers.Serializer):
username = serializers.CharField(label=_("Username"), write_only=True)
password = serializers.CharField(label=_("Password"), write_only=True)
class PasswordChangeSerializer(serializers.Serializer):
old_password = serializers.CharField(label=_("Old password"), write_only=True)
new_password1 = serializers.CharField(
label=_("New password"),
help_text=password_validation.password_validators_help_text_html(),
write_only=True,
)
new_password2 = serializers.CharField(
label=_("New password confirmation"), write_only=True
)
<commit_msg>Use plain password help text instead of html<commit_after>
|
from django.contrib.auth.password_validation import password_validators_help_texts
from django.utils.translation import ugettext_lazy as _
from rest_framework import serializers
class AuthenticationSerializer(serializers.Serializer):
username = serializers.CharField(label=_("Username"), write_only=True)
password = serializers.CharField(label=_("Password"), write_only=True)
class PasswordChangeSerializer(serializers.Serializer):
old_password = serializers.CharField(label=_("Old password"), write_only=True)
new_password1 = serializers.CharField(
label=_("New password"),
help_text=password_validators_help_texts(),
write_only=True,
)
new_password2 = serializers.CharField(
label=_("New password confirmation"), write_only=True
)
|
from django.contrib.auth import password_validation
from django.utils.translation import ugettext_lazy as _
from rest_framework import serializers
class AuthenticationSerializer(serializers.Serializer):
username = serializers.CharField(label=_("Username"), write_only=True)
password = serializers.CharField(label=_("Password"), write_only=True)
class PasswordChangeSerializer(serializers.Serializer):
old_password = serializers.CharField(label=_("Old password"), write_only=True)
new_password1 = serializers.CharField(
label=_("New password"),
help_text=password_validation.password_validators_help_text_html(),
write_only=True,
)
new_password2 = serializers.CharField(
label=_("New password confirmation"), write_only=True
)
Use plain password help text instead of htmlfrom django.contrib.auth.password_validation import password_validators_help_texts
from django.utils.translation import ugettext_lazy as _
from rest_framework import serializers
class AuthenticationSerializer(serializers.Serializer):
username = serializers.CharField(label=_("Username"), write_only=True)
password = serializers.CharField(label=_("Password"), write_only=True)
class PasswordChangeSerializer(serializers.Serializer):
old_password = serializers.CharField(label=_("Old password"), write_only=True)
new_password1 = serializers.CharField(
label=_("New password"),
help_text=password_validators_help_texts(),
write_only=True,
)
new_password2 = serializers.CharField(
label=_("New password confirmation"), write_only=True
)
|
<commit_before>from django.contrib.auth import password_validation
from django.utils.translation import ugettext_lazy as _
from rest_framework import serializers
class AuthenticationSerializer(serializers.Serializer):
username = serializers.CharField(label=_("Username"), write_only=True)
password = serializers.CharField(label=_("Password"), write_only=True)
class PasswordChangeSerializer(serializers.Serializer):
old_password = serializers.CharField(label=_("Old password"), write_only=True)
new_password1 = serializers.CharField(
label=_("New password"),
help_text=password_validation.password_validators_help_text_html(),
write_only=True,
)
new_password2 = serializers.CharField(
label=_("New password confirmation"), write_only=True
)
<commit_msg>Use plain password help text instead of html<commit_after>from django.contrib.auth.password_validation import password_validators_help_texts
from django.utils.translation import ugettext_lazy as _
from rest_framework import serializers
class AuthenticationSerializer(serializers.Serializer):
username = serializers.CharField(label=_("Username"), write_only=True)
password = serializers.CharField(label=_("Password"), write_only=True)
class PasswordChangeSerializer(serializers.Serializer):
old_password = serializers.CharField(label=_("Old password"), write_only=True)
new_password1 = serializers.CharField(
label=_("New password"),
help_text=password_validators_help_texts(),
write_only=True,
)
new_password2 = serializers.CharField(
label=_("New password confirmation"), write_only=True
)
|
cc7de0147d773722db026d2571cc94c6ee01c9e0
|
new/energies/zeeman.py
|
new/energies/zeeman.py
|
class FixedZeeman(object):
def __init__(self, H, multiplier=1, name='fixedzeeman'):
if not isinstance(H, (list, tuple)) or len(H) != 3:
raise ValueError('H must be a 3-element tuple or list.')
else:
self.H = H
if not isinstance(multiplier, (float, int)):
raise ValueError('Multiplier must be a positive float or int.')
else:
self.multiplier = multiplier
if not isinstance(name, str):
raise ValueError('name must be a string.')
else:
self.name = name
def get_mif(self):
# Create mif string.
mif = '# FixedZeeman\n'
mif += 'Specify Oxs_FixedZeeman:{} '.format(self.name)
mif += '{\n'
mif += '\tfield {\n'
mif += '\t\tOxs_UniformVectorField {\n'
mif += '\t\t\tvector {'
mif += ' {} {} {} '.format(self.H[0], self.H[1], self.H[2])
mif += '}\n'
mif += '\t\t}\n'
mif += '\t}\n'
mif += '\tmultiplier {}\n'.format(self.multiplier)
mif += '}\n\n'
return mif
|
import numpy as np
class FixedZeeman(object):
def __init__(self, H, multiplier=1, name='fixedzeeman'):
if not isinstance(H, (list, tuple, np.ndarray)) or len(H) != 3:
raise ValueError('H must be a 3-element tuple or list.')
else:
self.H = H
if not isinstance(multiplier, (float, int)):
raise ValueError('Multiplier must be a positive float or int.')
else:
self.multiplier = multiplier
if not isinstance(name, str):
raise ValueError('name must be a string.')
else:
self.name = name
def get_mif(self):
# Create mif string.
mif = '# FixedZeeman\n'
mif += 'Specify Oxs_FixedZeeman:{} '.format(self.name)
mif += '{\n'
mif += '\tfield {\n'
mif += '\t\tOxs_UniformVectorField {\n'
mif += '\t\t\tvector {'
mif += ' {} {} {} '.format(self.H[0], self.H[1], self.H[2])
mif += '}\n'
mif += '\t\t}\n'
mif += '\t}\n'
mif += '\tmultiplier {}\n'.format(self.multiplier)
mif += '}\n\n'
return mif
|
Add numpy array as a possibility for setting external magnetic field.
|
Add numpy array as a possibility for setting external magnetic field.
|
Python
|
bsd-2-clause
|
fangohr/oommf-python,fangohr/oommf-python,fangohr/oommf-python
|
class FixedZeeman(object):
def __init__(self, H, multiplier=1, name='fixedzeeman'):
if not isinstance(H, (list, tuple)) or len(H) != 3:
raise ValueError('H must be a 3-element tuple or list.')
else:
self.H = H
if not isinstance(multiplier, (float, int)):
raise ValueError('Multiplier must be a positive float or int.')
else:
self.multiplier = multiplier
if not isinstance(name, str):
raise ValueError('name must be a string.')
else:
self.name = name
def get_mif(self):
# Create mif string.
mif = '# FixedZeeman\n'
mif += 'Specify Oxs_FixedZeeman:{} '.format(self.name)
mif += '{\n'
mif += '\tfield {\n'
mif += '\t\tOxs_UniformVectorField {\n'
mif += '\t\t\tvector {'
mif += ' {} {} {} '.format(self.H[0], self.H[1], self.H[2])
mif += '}\n'
mif += '\t\t}\n'
mif += '\t}\n'
mif += '\tmultiplier {}\n'.format(self.multiplier)
mif += '}\n\n'
return mif
Add numpy array as a possibility for setting external magnetic field.
|
import numpy as np
class FixedZeeman(object):
def __init__(self, H, multiplier=1, name='fixedzeeman'):
if not isinstance(H, (list, tuple, np.ndarray)) or len(H) != 3:
raise ValueError('H must be a 3-element tuple or list.')
else:
self.H = H
if not isinstance(multiplier, (float, int)):
raise ValueError('Multiplier must be a positive float or int.')
else:
self.multiplier = multiplier
if not isinstance(name, str):
raise ValueError('name must be a string.')
else:
self.name = name
def get_mif(self):
# Create mif string.
mif = '# FixedZeeman\n'
mif += 'Specify Oxs_FixedZeeman:{} '.format(self.name)
mif += '{\n'
mif += '\tfield {\n'
mif += '\t\tOxs_UniformVectorField {\n'
mif += '\t\t\tvector {'
mif += ' {} {} {} '.format(self.H[0], self.H[1], self.H[2])
mif += '}\n'
mif += '\t\t}\n'
mif += '\t}\n'
mif += '\tmultiplier {}\n'.format(self.multiplier)
mif += '}\n\n'
return mif
|
<commit_before>class FixedZeeman(object):
def __init__(self, H, multiplier=1, name='fixedzeeman'):
if not isinstance(H, (list, tuple)) or len(H) != 3:
raise ValueError('H must be a 3-element tuple or list.')
else:
self.H = H
if not isinstance(multiplier, (float, int)):
raise ValueError('Multiplier must be a positive float or int.')
else:
self.multiplier = multiplier
if not isinstance(name, str):
raise ValueError('name must be a string.')
else:
self.name = name
def get_mif(self):
# Create mif string.
mif = '# FixedZeeman\n'
mif += 'Specify Oxs_FixedZeeman:{} '.format(self.name)
mif += '{\n'
mif += '\tfield {\n'
mif += '\t\tOxs_UniformVectorField {\n'
mif += '\t\t\tvector {'
mif += ' {} {} {} '.format(self.H[0], self.H[1], self.H[2])
mif += '}\n'
mif += '\t\t}\n'
mif += '\t}\n'
mif += '\tmultiplier {}\n'.format(self.multiplier)
mif += '}\n\n'
return mif
<commit_msg>Add numpy array as a possibility for setting external magnetic field.<commit_after>
|
import numpy as np
class FixedZeeman(object):
def __init__(self, H, multiplier=1, name='fixedzeeman'):
if not isinstance(H, (list, tuple, np.ndarray)) or len(H) != 3:
raise ValueError('H must be a 3-element tuple or list.')
else:
self.H = H
if not isinstance(multiplier, (float, int)):
raise ValueError('Multiplier must be a positive float or int.')
else:
self.multiplier = multiplier
if not isinstance(name, str):
raise ValueError('name must be a string.')
else:
self.name = name
def get_mif(self):
# Create mif string.
mif = '# FixedZeeman\n'
mif += 'Specify Oxs_FixedZeeman:{} '.format(self.name)
mif += '{\n'
mif += '\tfield {\n'
mif += '\t\tOxs_UniformVectorField {\n'
mif += '\t\t\tvector {'
mif += ' {} {} {} '.format(self.H[0], self.H[1], self.H[2])
mif += '}\n'
mif += '\t\t}\n'
mif += '\t}\n'
mif += '\tmultiplier {}\n'.format(self.multiplier)
mif += '}\n\n'
return mif
|
class FixedZeeman(object):
def __init__(self, H, multiplier=1, name='fixedzeeman'):
if not isinstance(H, (list, tuple)) or len(H) != 3:
raise ValueError('H must be a 3-element tuple or list.')
else:
self.H = H
if not isinstance(multiplier, (float, int)):
raise ValueError('Multiplier must be a positive float or int.')
else:
self.multiplier = multiplier
if not isinstance(name, str):
raise ValueError('name must be a string.')
else:
self.name = name
def get_mif(self):
# Create mif string.
mif = '# FixedZeeman\n'
mif += 'Specify Oxs_FixedZeeman:{} '.format(self.name)
mif += '{\n'
mif += '\tfield {\n'
mif += '\t\tOxs_UniformVectorField {\n'
mif += '\t\t\tvector {'
mif += ' {} {} {} '.format(self.H[0], self.H[1], self.H[2])
mif += '}\n'
mif += '\t\t}\n'
mif += '\t}\n'
mif += '\tmultiplier {}\n'.format(self.multiplier)
mif += '}\n\n'
return mif
Add numpy array as a possibility for setting external magnetic field.import numpy as np
class FixedZeeman(object):
def __init__(self, H, multiplier=1, name='fixedzeeman'):
if not isinstance(H, (list, tuple, np.ndarray)) or len(H) != 3:
raise ValueError('H must be a 3-element tuple or list.')
else:
self.H = H
if not isinstance(multiplier, (float, int)):
raise ValueError('Multiplier must be a positive float or int.')
else:
self.multiplier = multiplier
if not isinstance(name, str):
raise ValueError('name must be a string.')
else:
self.name = name
def get_mif(self):
# Create mif string.
mif = '# FixedZeeman\n'
mif += 'Specify Oxs_FixedZeeman:{} '.format(self.name)
mif += '{\n'
mif += '\tfield {\n'
mif += '\t\tOxs_UniformVectorField {\n'
mif += '\t\t\tvector {'
mif += ' {} {} {} '.format(self.H[0], self.H[1], self.H[2])
mif += '}\n'
mif += '\t\t}\n'
mif += '\t}\n'
mif += '\tmultiplier {}\n'.format(self.multiplier)
mif += '}\n\n'
return mif
|
<commit_before>class FixedZeeman(object):
def __init__(self, H, multiplier=1, name='fixedzeeman'):
if not isinstance(H, (list, tuple)) or len(H) != 3:
raise ValueError('H must be a 3-element tuple or list.')
else:
self.H = H
if not isinstance(multiplier, (float, int)):
raise ValueError('Multiplier must be a positive float or int.')
else:
self.multiplier = multiplier
if not isinstance(name, str):
raise ValueError('name must be a string.')
else:
self.name = name
def get_mif(self):
# Create mif string.
mif = '# FixedZeeman\n'
mif += 'Specify Oxs_FixedZeeman:{} '.format(self.name)
mif += '{\n'
mif += '\tfield {\n'
mif += '\t\tOxs_UniformVectorField {\n'
mif += '\t\t\tvector {'
mif += ' {} {} {} '.format(self.H[0], self.H[1], self.H[2])
mif += '}\n'
mif += '\t\t}\n'
mif += '\t}\n'
mif += '\tmultiplier {}\n'.format(self.multiplier)
mif += '}\n\n'
return mif
<commit_msg>Add numpy array as a possibility for setting external magnetic field.<commit_after>import numpy as np
class FixedZeeman(object):
def __init__(self, H, multiplier=1, name='fixedzeeman'):
if not isinstance(H, (list, tuple, np.ndarray)) or len(H) != 3:
raise ValueError('H must be a 3-element tuple or list.')
else:
self.H = H
if not isinstance(multiplier, (float, int)):
raise ValueError('Multiplier must be a positive float or int.')
else:
self.multiplier = multiplier
if not isinstance(name, str):
raise ValueError('name must be a string.')
else:
self.name = name
def get_mif(self):
# Create mif string.
mif = '# FixedZeeman\n'
mif += 'Specify Oxs_FixedZeeman:{} '.format(self.name)
mif += '{\n'
mif += '\tfield {\n'
mif += '\t\tOxs_UniformVectorField {\n'
mif += '\t\t\tvector {'
mif += ' {} {} {} '.format(self.H[0], self.H[1], self.H[2])
mif += '}\n'
mif += '\t\t}\n'
mif += '\t}\n'
mif += '\tmultiplier {}\n'.format(self.multiplier)
mif += '}\n\n'
return mif
|
59536d499999786bae5e6cb0da33e71454e3b539
|
systematic_review/urls.py
|
systematic_review/urls.py
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
from sysrev.views import *
urlpatterns = patterns(
'',
url(r'^$', ReviewListView.as_view(), name='index'),
url(r'^review/(?P<pk>\d+)(-([\w\-]+))?/$', ReviewDetailView.as_view(), name='review'),
url(r'^review/(?P<pk>\d+)(-([\w\-]+))?/delete$', ReviewUpdateView.as_view(), name='review_update'),
url(r'^review/(?P<pk>\d+)(-([\w\-]+))?/delete$', ReviewDeleteView.as_view(), name='review_delete'),
url(r'^create/', ReviewCreateWizard.as_view(), name='create'),
url(r'^review/(?P<pk>\d+)(-([\w\-]+))?/(?P<pk2>\d+)/$', PaperDetailView.as_view(), name='paper'),
url(r'^profile/', ProfileView.as_view(), name='profile'),
url(r'^accounts/register/$', SRRegistrationView.as_view(), name='registration_register'),
url(r'^accounts/', include('registration.backends.simple.urls')),
url(r'^accounts/', include('django.contrib.auth.urls')),
url(r'^admin/', include(admin.site.urls)),
)
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
from sysrev.views import *
urlpatterns = patterns(
'',
url(r'^$', ReviewListView.as_view(), name='index'),
url(r'^review/(?P<pk>\d+)(-([\w\-]+))?/$', ReviewDetailView.as_view(), name='review'),
url(r'^review/(?P<pk>\d+)(-([\w\-]+))?/update$', ReviewUpdateView.as_view(), name='review_update'),
url(r'^review/(?P<pk>\d+)(-([\w\-]+))?/delete$', ReviewDeleteView.as_view(), name='review_delete'),
url(r'^create/', ReviewCreateWizard.as_view(), name='create'),
url(r'^review/(?P<pk>\d+)(-([\w\-]+))?/(?P<pk2>\d+)/$', PaperDetailView.as_view(), name='paper'),
url(r'^profile/', ProfileView.as_view(), name='profile'),
url(r'^accounts/register/$', SRRegistrationView.as_view(), name='registration_register'),
url(r'^accounts/', include('registration.backends.simple.urls')),
url(r'^accounts/', include('django.contrib.auth.urls')),
url(r'^admin/', include(admin.site.urls)),
)
|
Correct error in URL mappings
|
Correct error in URL mappings
|
Python
|
mit
|
iliawnek/SystematicReview,iliawnek/SystematicReview,iliawnek/SystematicReview,iliawnek/SystematicReview
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
from sysrev.views import *
urlpatterns = patterns(
'',
url(r'^$', ReviewListView.as_view(), name='index'),
url(r'^review/(?P<pk>\d+)(-([\w\-]+))?/$', ReviewDetailView.as_view(), name='review'),
url(r'^review/(?P<pk>\d+)(-([\w\-]+))?/delete$', ReviewUpdateView.as_view(), name='review_update'),
url(r'^review/(?P<pk>\d+)(-([\w\-]+))?/delete$', ReviewDeleteView.as_view(), name='review_delete'),
url(r'^create/', ReviewCreateWizard.as_view(), name='create'),
url(r'^review/(?P<pk>\d+)(-([\w\-]+))?/(?P<pk2>\d+)/$', PaperDetailView.as_view(), name='paper'),
url(r'^profile/', ProfileView.as_view(), name='profile'),
url(r'^accounts/register/$', SRRegistrationView.as_view(), name='registration_register'),
url(r'^accounts/', include('registration.backends.simple.urls')),
url(r'^accounts/', include('django.contrib.auth.urls')),
url(r'^admin/', include(admin.site.urls)),
)
Correct error in URL mappings
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
from sysrev.views import *
urlpatterns = patterns(
'',
url(r'^$', ReviewListView.as_view(), name='index'),
url(r'^review/(?P<pk>\d+)(-([\w\-]+))?/$', ReviewDetailView.as_view(), name='review'),
url(r'^review/(?P<pk>\d+)(-([\w\-]+))?/update$', ReviewUpdateView.as_view(), name='review_update'),
url(r'^review/(?P<pk>\d+)(-([\w\-]+))?/delete$', ReviewDeleteView.as_view(), name='review_delete'),
url(r'^create/', ReviewCreateWizard.as_view(), name='create'),
url(r'^review/(?P<pk>\d+)(-([\w\-]+))?/(?P<pk2>\d+)/$', PaperDetailView.as_view(), name='paper'),
url(r'^profile/', ProfileView.as_view(), name='profile'),
url(r'^accounts/register/$', SRRegistrationView.as_view(), name='registration_register'),
url(r'^accounts/', include('registration.backends.simple.urls')),
url(r'^accounts/', include('django.contrib.auth.urls')),
url(r'^admin/', include(admin.site.urls)),
)
|
<commit_before>from django.conf.urls import patterns, include, url
from django.contrib import admin
from sysrev.views import *
urlpatterns = patterns(
'',
url(r'^$', ReviewListView.as_view(), name='index'),
url(r'^review/(?P<pk>\d+)(-([\w\-]+))?/$', ReviewDetailView.as_view(), name='review'),
url(r'^review/(?P<pk>\d+)(-([\w\-]+))?/delete$', ReviewUpdateView.as_view(), name='review_update'),
url(r'^review/(?P<pk>\d+)(-([\w\-]+))?/delete$', ReviewDeleteView.as_view(), name='review_delete'),
url(r'^create/', ReviewCreateWizard.as_view(), name='create'),
url(r'^review/(?P<pk>\d+)(-([\w\-]+))?/(?P<pk2>\d+)/$', PaperDetailView.as_view(), name='paper'),
url(r'^profile/', ProfileView.as_view(), name='profile'),
url(r'^accounts/register/$', SRRegistrationView.as_view(), name='registration_register'),
url(r'^accounts/', include('registration.backends.simple.urls')),
url(r'^accounts/', include('django.contrib.auth.urls')),
url(r'^admin/', include(admin.site.urls)),
)
<commit_msg>Correct error in URL mappings<commit_after>
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
from sysrev.views import *
urlpatterns = patterns(
'',
url(r'^$', ReviewListView.as_view(), name='index'),
url(r'^review/(?P<pk>\d+)(-([\w\-]+))?/$', ReviewDetailView.as_view(), name='review'),
url(r'^review/(?P<pk>\d+)(-([\w\-]+))?/update$', ReviewUpdateView.as_view(), name='review_update'),
url(r'^review/(?P<pk>\d+)(-([\w\-]+))?/delete$', ReviewDeleteView.as_view(), name='review_delete'),
url(r'^create/', ReviewCreateWizard.as_view(), name='create'),
url(r'^review/(?P<pk>\d+)(-([\w\-]+))?/(?P<pk2>\d+)/$', PaperDetailView.as_view(), name='paper'),
url(r'^profile/', ProfileView.as_view(), name='profile'),
url(r'^accounts/register/$', SRRegistrationView.as_view(), name='registration_register'),
url(r'^accounts/', include('registration.backends.simple.urls')),
url(r'^accounts/', include('django.contrib.auth.urls')),
url(r'^admin/', include(admin.site.urls)),
)
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
from sysrev.views import *
urlpatterns = patterns(
'',
url(r'^$', ReviewListView.as_view(), name='index'),
url(r'^review/(?P<pk>\d+)(-([\w\-]+))?/$', ReviewDetailView.as_view(), name='review'),
url(r'^review/(?P<pk>\d+)(-([\w\-]+))?/delete$', ReviewUpdateView.as_view(), name='review_update'),
url(r'^review/(?P<pk>\d+)(-([\w\-]+))?/delete$', ReviewDeleteView.as_view(), name='review_delete'),
url(r'^create/', ReviewCreateWizard.as_view(), name='create'),
url(r'^review/(?P<pk>\d+)(-([\w\-]+))?/(?P<pk2>\d+)/$', PaperDetailView.as_view(), name='paper'),
url(r'^profile/', ProfileView.as_view(), name='profile'),
url(r'^accounts/register/$', SRRegistrationView.as_view(), name='registration_register'),
url(r'^accounts/', include('registration.backends.simple.urls')),
url(r'^accounts/', include('django.contrib.auth.urls')),
url(r'^admin/', include(admin.site.urls)),
)
Correct error in URL mappingsfrom django.conf.urls import patterns, include, url
from django.contrib import admin
from sysrev.views import *
urlpatterns = patterns(
'',
url(r'^$', ReviewListView.as_view(), name='index'),
url(r'^review/(?P<pk>\d+)(-([\w\-]+))?/$', ReviewDetailView.as_view(), name='review'),
url(r'^review/(?P<pk>\d+)(-([\w\-]+))?/update$', ReviewUpdateView.as_view(), name='review_update'),
url(r'^review/(?P<pk>\d+)(-([\w\-]+))?/delete$', ReviewDeleteView.as_view(), name='review_delete'),
url(r'^create/', ReviewCreateWizard.as_view(), name='create'),
url(r'^review/(?P<pk>\d+)(-([\w\-]+))?/(?P<pk2>\d+)/$', PaperDetailView.as_view(), name='paper'),
url(r'^profile/', ProfileView.as_view(), name='profile'),
url(r'^accounts/register/$', SRRegistrationView.as_view(), name='registration_register'),
url(r'^accounts/', include('registration.backends.simple.urls')),
url(r'^accounts/', include('django.contrib.auth.urls')),
url(r'^admin/', include(admin.site.urls)),
)
|
<commit_before>from django.conf.urls import patterns, include, url
from django.contrib import admin
from sysrev.views import *
urlpatterns = patterns(
'',
url(r'^$', ReviewListView.as_view(), name='index'),
url(r'^review/(?P<pk>\d+)(-([\w\-]+))?/$', ReviewDetailView.as_view(), name='review'),
url(r'^review/(?P<pk>\d+)(-([\w\-]+))?/delete$', ReviewUpdateView.as_view(), name='review_update'),
url(r'^review/(?P<pk>\d+)(-([\w\-]+))?/delete$', ReviewDeleteView.as_view(), name='review_delete'),
url(r'^create/', ReviewCreateWizard.as_view(), name='create'),
url(r'^review/(?P<pk>\d+)(-([\w\-]+))?/(?P<pk2>\d+)/$', PaperDetailView.as_view(), name='paper'),
url(r'^profile/', ProfileView.as_view(), name='profile'),
url(r'^accounts/register/$', SRRegistrationView.as_view(), name='registration_register'),
url(r'^accounts/', include('registration.backends.simple.urls')),
url(r'^accounts/', include('django.contrib.auth.urls')),
url(r'^admin/', include(admin.site.urls)),
)
<commit_msg>Correct error in URL mappings<commit_after>from django.conf.urls import patterns, include, url
from django.contrib import admin
from sysrev.views import *
urlpatterns = patterns(
'',
url(r'^$', ReviewListView.as_view(), name='index'),
url(r'^review/(?P<pk>\d+)(-([\w\-]+))?/$', ReviewDetailView.as_view(), name='review'),
url(r'^review/(?P<pk>\d+)(-([\w\-]+))?/update$', ReviewUpdateView.as_view(), name='review_update'),
url(r'^review/(?P<pk>\d+)(-([\w\-]+))?/delete$', ReviewDeleteView.as_view(), name='review_delete'),
url(r'^create/', ReviewCreateWizard.as_view(), name='create'),
url(r'^review/(?P<pk>\d+)(-([\w\-]+))?/(?P<pk2>\d+)/$', PaperDetailView.as_view(), name='paper'),
url(r'^profile/', ProfileView.as_view(), name='profile'),
url(r'^accounts/register/$', SRRegistrationView.as_view(), name='registration_register'),
url(r'^accounts/', include('registration.backends.simple.urls')),
url(r'^accounts/', include('django.contrib.auth.urls')),
url(r'^admin/', include(admin.site.urls)),
)
|
5796a54d10eb3baebda51e3420a818a251406a5c
|
python/test.py
|
python/test.py
|
import sys
from PyQt5 import QtWidgets
from QHexEdit import QHexEdit, QHexEditData
class HexEdit(QHexEdit):
def __init__(self, fileName=None):
super(HexEdit, self).__init__()
file = open(fileName)
data = file.read()
self.setData(data)
self.setReadOnly(False)
if __name__ == '__main__':
app = QtWidgets.QApplication(sys.argv)
# QHexEditData* hexeditdata = QHexEditData::fromFile("data.bin");
data = QHexEditData.fromFile('test.py')
# QHexEdit* hexedit = new QHexEdit();
# hexedit->setData(hexeditdata);
mainWin = QHexEdit()
mainWin.setData(data)
mainWin.show()
sys.exit(app.exec_())
|
import sys
from PyQt5 import QtWidgets, QtGui
from QHexEdit import QHexEdit, QHexEditData
if __name__ == '__main__':
app = QtWidgets.QApplication(sys.argv)
# QHexEditData* hexeditdata = QHexEditData::fromFile("test.py");
hexeditdata = QHexEditData.fromFile('test.py')
# QHexEdit* hexedit = new QHexEdit();
# hexedit->setData(hexeditdata);
hexedit = QHexEdit()
hexedit.setData(hexeditdata)
hexedit.show()
# hexedit->commentRange(0, 12, "I'm a comment!");
hexedit.commentRange(0, 12, "I'm a comment!")
# hexedit->highlightBackground(0, 10, QColor(Qt::Red));
hexedit.highlightBackground(0, 10, QtGui.QColor(255, 0, 0))
sys.exit(app.exec_())
|
Test more stuff in python
|
Test more stuff in python
|
Python
|
mit
|
parallaxinc/QHexEdit,parallaxinc/QHexEdit
|
import sys
from PyQt5 import QtWidgets
from QHexEdit import QHexEdit, QHexEditData
class HexEdit(QHexEdit):
def __init__(self, fileName=None):
super(HexEdit, self).__init__()
file = open(fileName)
data = file.read()
self.setData(data)
self.setReadOnly(False)
if __name__ == '__main__':
app = QtWidgets.QApplication(sys.argv)
# QHexEditData* hexeditdata = QHexEditData::fromFile("data.bin");
data = QHexEditData.fromFile('test.py')
# QHexEdit* hexedit = new QHexEdit();
# hexedit->setData(hexeditdata);
mainWin = QHexEdit()
mainWin.setData(data)
mainWin.show()
sys.exit(app.exec_())
Test more stuff in python
|
import sys
from PyQt5 import QtWidgets, QtGui
from QHexEdit import QHexEdit, QHexEditData
if __name__ == '__main__':
app = QtWidgets.QApplication(sys.argv)
# QHexEditData* hexeditdata = QHexEditData::fromFile("test.py");
hexeditdata = QHexEditData.fromFile('test.py')
# QHexEdit* hexedit = new QHexEdit();
# hexedit->setData(hexeditdata);
hexedit = QHexEdit()
hexedit.setData(hexeditdata)
hexedit.show()
# hexedit->commentRange(0, 12, "I'm a comment!");
hexedit.commentRange(0, 12, "I'm a comment!")
# hexedit->highlightBackground(0, 10, QColor(Qt::Red));
hexedit.highlightBackground(0, 10, QtGui.QColor(255, 0, 0))
sys.exit(app.exec_())
|
<commit_before>import sys
from PyQt5 import QtWidgets
from QHexEdit import QHexEdit, QHexEditData
class HexEdit(QHexEdit):
def __init__(self, fileName=None):
super(HexEdit, self).__init__()
file = open(fileName)
data = file.read()
self.setData(data)
self.setReadOnly(False)
if __name__ == '__main__':
app = QtWidgets.QApplication(sys.argv)
# QHexEditData* hexeditdata = QHexEditData::fromFile("data.bin");
data = QHexEditData.fromFile('test.py')
# QHexEdit* hexedit = new QHexEdit();
# hexedit->setData(hexeditdata);
mainWin = QHexEdit()
mainWin.setData(data)
mainWin.show()
sys.exit(app.exec_())
<commit_msg>Test more stuff in python<commit_after>
|
import sys
from PyQt5 import QtWidgets, QtGui
from QHexEdit import QHexEdit, QHexEditData
if __name__ == '__main__':
app = QtWidgets.QApplication(sys.argv)
# QHexEditData* hexeditdata = QHexEditData::fromFile("test.py");
hexeditdata = QHexEditData.fromFile('test.py')
# QHexEdit* hexedit = new QHexEdit();
# hexedit->setData(hexeditdata);
hexedit = QHexEdit()
hexedit.setData(hexeditdata)
hexedit.show()
# hexedit->commentRange(0, 12, "I'm a comment!");
hexedit.commentRange(0, 12, "I'm a comment!")
# hexedit->highlightBackground(0, 10, QColor(Qt::Red));
hexedit.highlightBackground(0, 10, QtGui.QColor(255, 0, 0))
sys.exit(app.exec_())
|
import sys
from PyQt5 import QtWidgets
from QHexEdit import QHexEdit, QHexEditData
class HexEdit(QHexEdit):
def __init__(self, fileName=None):
super(HexEdit, self).__init__()
file = open(fileName)
data = file.read()
self.setData(data)
self.setReadOnly(False)
if __name__ == '__main__':
app = QtWidgets.QApplication(sys.argv)
# QHexEditData* hexeditdata = QHexEditData::fromFile("data.bin");
data = QHexEditData.fromFile('test.py')
# QHexEdit* hexedit = new QHexEdit();
# hexedit->setData(hexeditdata);
mainWin = QHexEdit()
mainWin.setData(data)
mainWin.show()
sys.exit(app.exec_())
Test more stuff in pythonimport sys
from PyQt5 import QtWidgets, QtGui
from QHexEdit import QHexEdit, QHexEditData
if __name__ == '__main__':
app = QtWidgets.QApplication(sys.argv)
# QHexEditData* hexeditdata = QHexEditData::fromFile("test.py");
hexeditdata = QHexEditData.fromFile('test.py')
# QHexEdit* hexedit = new QHexEdit();
# hexedit->setData(hexeditdata);
hexedit = QHexEdit()
hexedit.setData(hexeditdata)
hexedit.show()
# hexedit->commentRange(0, 12, "I'm a comment!");
hexedit.commentRange(0, 12, "I'm a comment!")
# hexedit->highlightBackground(0, 10, QColor(Qt::Red));
hexedit.highlightBackground(0, 10, QtGui.QColor(255, 0, 0))
sys.exit(app.exec_())
|
<commit_before>import sys
from PyQt5 import QtWidgets
from QHexEdit import QHexEdit, QHexEditData
class HexEdit(QHexEdit):
def __init__(self, fileName=None):
super(HexEdit, self).__init__()
file = open(fileName)
data = file.read()
self.setData(data)
self.setReadOnly(False)
if __name__ == '__main__':
app = QtWidgets.QApplication(sys.argv)
# QHexEditData* hexeditdata = QHexEditData::fromFile("data.bin");
data = QHexEditData.fromFile('test.py')
# QHexEdit* hexedit = new QHexEdit();
# hexedit->setData(hexeditdata);
mainWin = QHexEdit()
mainWin.setData(data)
mainWin.show()
sys.exit(app.exec_())
<commit_msg>Test more stuff in python<commit_after>import sys
from PyQt5 import QtWidgets, QtGui
from QHexEdit import QHexEdit, QHexEditData
if __name__ == '__main__':
app = QtWidgets.QApplication(sys.argv)
# QHexEditData* hexeditdata = QHexEditData::fromFile("test.py");
hexeditdata = QHexEditData.fromFile('test.py')
# QHexEdit* hexedit = new QHexEdit();
# hexedit->setData(hexeditdata);
hexedit = QHexEdit()
hexedit.setData(hexeditdata)
hexedit.show()
# hexedit->commentRange(0, 12, "I'm a comment!");
hexedit.commentRange(0, 12, "I'm a comment!")
# hexedit->highlightBackground(0, 10, QColor(Qt::Red));
hexedit.highlightBackground(0, 10, QtGui.QColor(255, 0, 0))
sys.exit(app.exec_())
|
60bfd29c5f21c3daf43a1b150048f57c147dbaf2
|
inet/sources/twitter.py
|
inet/sources/twitter.py
|
# -*- coding: utf-8 -*-
import tweepy
from .constants import TWITTER_ACCESS, TWITTER_SECRET
from .constants import TWITTER_CONSUMER_ACCESS, TWITTER_CONSUMER_SECRET
_auth = tweepy.OAuthHandler(TWITTER_CONSUMER_ACCESS, TWITTER_CONSUMER_SECRET)
_auth.set_access_token(TWITTER_ACCESS, TWITTER_SECRET)
twitter_client = tweepy.API(_auth)
|
# -*- coding: utf-8 -*-
import tweepy
from .constants import TWITTER_ACCESS, TWITTER_SECRET
from .constants import TWITTER_CONSUMER_ACCESS, TWITTER_CONSUMER_SECRET
_auth = tweepy.OAuthHandler(TWITTER_CONSUMER_ACCESS, TWITTER_CONSUMER_SECRET)
_auth.set_access_token(TWITTER_ACCESS, TWITTER_SECRET)
twitter_client = tweepy.API(_auth, wait_on_rate_limit=True)
|
Set wait on rate limit to True
|
Set wait on rate limit to True
|
Python
|
mit
|
nestauk/inet
|
# -*- coding: utf-8 -*-
import tweepy
from .constants import TWITTER_ACCESS, TWITTER_SECRET
from .constants import TWITTER_CONSUMER_ACCESS, TWITTER_CONSUMER_SECRET
_auth = tweepy.OAuthHandler(TWITTER_CONSUMER_ACCESS, TWITTER_CONSUMER_SECRET)
_auth.set_access_token(TWITTER_ACCESS, TWITTER_SECRET)
twitter_client = tweepy.API(_auth)
Set wait on rate limit to True
|
# -*- coding: utf-8 -*-
import tweepy
from .constants import TWITTER_ACCESS, TWITTER_SECRET
from .constants import TWITTER_CONSUMER_ACCESS, TWITTER_CONSUMER_SECRET
_auth = tweepy.OAuthHandler(TWITTER_CONSUMER_ACCESS, TWITTER_CONSUMER_SECRET)
_auth.set_access_token(TWITTER_ACCESS, TWITTER_SECRET)
twitter_client = tweepy.API(_auth, wait_on_rate_limit=True)
|
<commit_before># -*- coding: utf-8 -*-
import tweepy
from .constants import TWITTER_ACCESS, TWITTER_SECRET
from .constants import TWITTER_CONSUMER_ACCESS, TWITTER_CONSUMER_SECRET
_auth = tweepy.OAuthHandler(TWITTER_CONSUMER_ACCESS, TWITTER_CONSUMER_SECRET)
_auth.set_access_token(TWITTER_ACCESS, TWITTER_SECRET)
twitter_client = tweepy.API(_auth)
<commit_msg>Set wait on rate limit to True<commit_after>
|
# -*- coding: utf-8 -*-
import tweepy
from .constants import TWITTER_ACCESS, TWITTER_SECRET
from .constants import TWITTER_CONSUMER_ACCESS, TWITTER_CONSUMER_SECRET
_auth = tweepy.OAuthHandler(TWITTER_CONSUMER_ACCESS, TWITTER_CONSUMER_SECRET)
_auth.set_access_token(TWITTER_ACCESS, TWITTER_SECRET)
twitter_client = tweepy.API(_auth, wait_on_rate_limit=True)
|
# -*- coding: utf-8 -*-
import tweepy
from .constants import TWITTER_ACCESS, TWITTER_SECRET
from .constants import TWITTER_CONSUMER_ACCESS, TWITTER_CONSUMER_SECRET
_auth = tweepy.OAuthHandler(TWITTER_CONSUMER_ACCESS, TWITTER_CONSUMER_SECRET)
_auth.set_access_token(TWITTER_ACCESS, TWITTER_SECRET)
twitter_client = tweepy.API(_auth)
Set wait on rate limit to True# -*- coding: utf-8 -*-
import tweepy
from .constants import TWITTER_ACCESS, TWITTER_SECRET
from .constants import TWITTER_CONSUMER_ACCESS, TWITTER_CONSUMER_SECRET
_auth = tweepy.OAuthHandler(TWITTER_CONSUMER_ACCESS, TWITTER_CONSUMER_SECRET)
_auth.set_access_token(TWITTER_ACCESS, TWITTER_SECRET)
twitter_client = tweepy.API(_auth, wait_on_rate_limit=True)
|
<commit_before># -*- coding: utf-8 -*-
import tweepy
from .constants import TWITTER_ACCESS, TWITTER_SECRET
from .constants import TWITTER_CONSUMER_ACCESS, TWITTER_CONSUMER_SECRET
_auth = tweepy.OAuthHandler(TWITTER_CONSUMER_ACCESS, TWITTER_CONSUMER_SECRET)
_auth.set_access_token(TWITTER_ACCESS, TWITTER_SECRET)
twitter_client = tweepy.API(_auth)
<commit_msg>Set wait on rate limit to True<commit_after># -*- coding: utf-8 -*-
import tweepy
from .constants import TWITTER_ACCESS, TWITTER_SECRET
from .constants import TWITTER_CONSUMER_ACCESS, TWITTER_CONSUMER_SECRET
_auth = tweepy.OAuthHandler(TWITTER_CONSUMER_ACCESS, TWITTER_CONSUMER_SECRET)
_auth.set_access_token(TWITTER_ACCESS, TWITTER_SECRET)
twitter_client = tweepy.API(_auth, wait_on_rate_limit=True)
|
822881c51a36af3fe3e746e169743b0c6c29d878
|
conman/redirects/views.py
|
conman/redirects/views.py
|
from django.views.generic import RedirectView
class RouteRedirectView(RedirectView):
"""Redirect to the target Route."""
permanent = True # Set to django 1.9's default to avoid RemovedInDjango19Warning
def get_redirect_url(self, *args, **kwargs):
"""
Return the route's target url.
Save the route's redirect type for use by RedirectView.
"""
redirect = kwargs['route']
self.permanent = redirect.permanent
return redirect.target.url
|
from django.views.generic import RedirectView
class RouteRedirectView(RedirectView):
"""Redirect to the target Route."""
permanent = False # Set to django 1.9's default to avoid RemovedInDjango19Warning
def get_redirect_url(self, *args, **kwargs):
"""
Return the route's target url.
Save the route's redirect type for use by RedirectView.
"""
redirect = kwargs['route']
self.permanent = redirect.permanent
return redirect.target.url
|
Correct silly mistake made in 25f2692
|
Correct silly mistake made in 25f2692
|
Python
|
bsd-2-clause
|
meshy/django-conman,meshy/django-conman,Ian-Foote/django-conman
|
from django.views.generic import RedirectView
class RouteRedirectView(RedirectView):
"""Redirect to the target Route."""
permanent = True # Set to django 1.9's default to avoid RemovedInDjango19Warning
def get_redirect_url(self, *args, **kwargs):
"""
Return the route's target url.
Save the route's redirect type for use by RedirectView.
"""
redirect = kwargs['route']
self.permanent = redirect.permanent
return redirect.target.url
Correct silly mistake made in 25f2692
|
from django.views.generic import RedirectView
class RouteRedirectView(RedirectView):
"""Redirect to the target Route."""
permanent = False # Set to django 1.9's default to avoid RemovedInDjango19Warning
def get_redirect_url(self, *args, **kwargs):
"""
Return the route's target url.
Save the route's redirect type for use by RedirectView.
"""
redirect = kwargs['route']
self.permanent = redirect.permanent
return redirect.target.url
|
<commit_before>from django.views.generic import RedirectView
class RouteRedirectView(RedirectView):
"""Redirect to the target Route."""
permanent = True # Set to django 1.9's default to avoid RemovedInDjango19Warning
def get_redirect_url(self, *args, **kwargs):
"""
Return the route's target url.
Save the route's redirect type for use by RedirectView.
"""
redirect = kwargs['route']
self.permanent = redirect.permanent
return redirect.target.url
<commit_msg>Correct silly mistake made in 25f2692<commit_after>
|
from django.views.generic import RedirectView
class RouteRedirectView(RedirectView):
"""Redirect to the target Route."""
permanent = False # Set to django 1.9's default to avoid RemovedInDjango19Warning
def get_redirect_url(self, *args, **kwargs):
"""
Return the route's target url.
Save the route's redirect type for use by RedirectView.
"""
redirect = kwargs['route']
self.permanent = redirect.permanent
return redirect.target.url
|
from django.views.generic import RedirectView
class RouteRedirectView(RedirectView):
"""Redirect to the target Route."""
permanent = True # Set to django 1.9's default to avoid RemovedInDjango19Warning
def get_redirect_url(self, *args, **kwargs):
"""
Return the route's target url.
Save the route's redirect type for use by RedirectView.
"""
redirect = kwargs['route']
self.permanent = redirect.permanent
return redirect.target.url
Correct silly mistake made in 25f2692from django.views.generic import RedirectView
class RouteRedirectView(RedirectView):
"""Redirect to the target Route."""
permanent = False # Set to django 1.9's default to avoid RemovedInDjango19Warning
def get_redirect_url(self, *args, **kwargs):
"""
Return the route's target url.
Save the route's redirect type for use by RedirectView.
"""
redirect = kwargs['route']
self.permanent = redirect.permanent
return redirect.target.url
|
<commit_before>from django.views.generic import RedirectView
class RouteRedirectView(RedirectView):
"""Redirect to the target Route."""
permanent = True # Set to django 1.9's default to avoid RemovedInDjango19Warning
def get_redirect_url(self, *args, **kwargs):
"""
Return the route's target url.
Save the route's redirect type for use by RedirectView.
"""
redirect = kwargs['route']
self.permanent = redirect.permanent
return redirect.target.url
<commit_msg>Correct silly mistake made in 25f2692<commit_after>from django.views.generic import RedirectView
class RouteRedirectView(RedirectView):
"""Redirect to the target Route."""
permanent = False # Set to django 1.9's default to avoid RemovedInDjango19Warning
def get_redirect_url(self, *args, **kwargs):
"""
Return the route's target url.
Save the route's redirect type for use by RedirectView.
"""
redirect = kwargs['route']
self.permanent = redirect.permanent
return redirect.target.url
|
de75ec4f92c424b22f1d64dc43b3d8259b96fde0
|
coverart_redirect/loggers.py
|
coverart_redirect/loggers.py
|
import raven
import raven.transport.threaded_requests
from raven.handlers.logging import SentryHandler
from raven.conf import setup_logging
import logging
class MissingRavenClient(raven.Client):
"""Raven client class that is used as a placeholder.
This is done to make sure that calls to functions in the client don't fail
even if the client is not initialized. Sentry server might be missing, but
we don't want to check if it actually exists in every place exception is
captured.
"""
captureException = lambda self, *args, **kwargs: None
captureMessage = lambda self, *args, **kwargs: None
_sentry = MissingRavenClient() # type: raven.Client
def get_sentry():
return _sentry
def init_raven_client(dsn):
global _sentry
_sentry = raven.Client(
dsn=dsn,
transport=raven.transport.threaded_requests.ThreadedRequestsHTTPTransport,
ignore_exceptions={'KeyboardInterrupt'},
logging=True,
)
sentry_errors_logger = logging.getLogger("sentry.errors")
sentry_errors_logger.addHandler(logging.StreamHandler())
handler = SentryHandler(_sentry)
handler.setLevel(logging.ERROR)
setup_logging(handler)
|
import raven
import raven.transport.threaded_requests
from raven.handlers.logging import SentryHandler
from raven.conf import setup_logging
from werkzeug.exceptions import HTTPException
from exceptions import KeyboardInterrupt
import logging
class MissingRavenClient(raven.Client):
"""Raven client class that is used as a placeholder.
This is done to make sure that calls to functions in the client don't fail
even if the client is not initialized. Sentry server might be missing, but
we don't want to check if it actually exists in every place exception is
captured.
"""
captureException = lambda self, *args, **kwargs: None
captureMessage = lambda self, *args, **kwargs: None
_sentry = MissingRavenClient() # type: raven.Client
def get_sentry():
return _sentry
def init_raven_client(dsn):
global _sentry
_sentry = raven.Client(
dsn=dsn,
transport=raven.transport.threaded_requests.ThreadedRequestsHTTPTransport,
ignore_exceptions={KeyboardInterrupt, HTTPException},
logging=True,
)
sentry_errors_logger = logging.getLogger("sentry.errors")
sentry_errors_logger.addHandler(logging.StreamHandler())
handler = SentryHandler(_sentry)
handler.setLevel(logging.ERROR)
setup_logging(handler)
|
Exclude HTTP exceptions from logging by Raven
|
Exclude HTTP exceptions from logging by Raven
|
Python
|
mit
|
metabrainz/coverart_redirect,metabrainz/coverart_redirect,metabrainz/coverart_redirect
|
import raven
import raven.transport.threaded_requests
from raven.handlers.logging import SentryHandler
from raven.conf import setup_logging
import logging
class MissingRavenClient(raven.Client):
"""Raven client class that is used as a placeholder.
This is done to make sure that calls to functions in the client don't fail
even if the client is not initialized. Sentry server might be missing, but
we don't want to check if it actually exists in every place exception is
captured.
"""
captureException = lambda self, *args, **kwargs: None
captureMessage = lambda self, *args, **kwargs: None
_sentry = MissingRavenClient() # type: raven.Client
def get_sentry():
return _sentry
def init_raven_client(dsn):
global _sentry
_sentry = raven.Client(
dsn=dsn,
transport=raven.transport.threaded_requests.ThreadedRequestsHTTPTransport,
ignore_exceptions={'KeyboardInterrupt'},
logging=True,
)
sentry_errors_logger = logging.getLogger("sentry.errors")
sentry_errors_logger.addHandler(logging.StreamHandler())
handler = SentryHandler(_sentry)
handler.setLevel(logging.ERROR)
setup_logging(handler)
Exclude HTTP exceptions from logging by Raven
|
import raven
import raven.transport.threaded_requests
from raven.handlers.logging import SentryHandler
from raven.conf import setup_logging
from werkzeug.exceptions import HTTPException
from exceptions import KeyboardInterrupt
import logging
class MissingRavenClient(raven.Client):
"""Raven client class that is used as a placeholder.
This is done to make sure that calls to functions in the client don't fail
even if the client is not initialized. Sentry server might be missing, but
we don't want to check if it actually exists in every place exception is
captured.
"""
captureException = lambda self, *args, **kwargs: None
captureMessage = lambda self, *args, **kwargs: None
_sentry = MissingRavenClient() # type: raven.Client
def get_sentry():
return _sentry
def init_raven_client(dsn):
global _sentry
_sentry = raven.Client(
dsn=dsn,
transport=raven.transport.threaded_requests.ThreadedRequestsHTTPTransport,
ignore_exceptions={KeyboardInterrupt, HTTPException},
logging=True,
)
sentry_errors_logger = logging.getLogger("sentry.errors")
sentry_errors_logger.addHandler(logging.StreamHandler())
handler = SentryHandler(_sentry)
handler.setLevel(logging.ERROR)
setup_logging(handler)
|
<commit_before>import raven
import raven.transport.threaded_requests
from raven.handlers.logging import SentryHandler
from raven.conf import setup_logging
import logging
class MissingRavenClient(raven.Client):
"""Raven client class that is used as a placeholder.
This is done to make sure that calls to functions in the client don't fail
even if the client is not initialized. Sentry server might be missing, but
we don't want to check if it actually exists in every place exception is
captured.
"""
captureException = lambda self, *args, **kwargs: None
captureMessage = lambda self, *args, **kwargs: None
_sentry = MissingRavenClient() # type: raven.Client
def get_sentry():
return _sentry
def init_raven_client(dsn):
global _sentry
_sentry = raven.Client(
dsn=dsn,
transport=raven.transport.threaded_requests.ThreadedRequestsHTTPTransport,
ignore_exceptions={'KeyboardInterrupt'},
logging=True,
)
sentry_errors_logger = logging.getLogger("sentry.errors")
sentry_errors_logger.addHandler(logging.StreamHandler())
handler = SentryHandler(_sentry)
handler.setLevel(logging.ERROR)
setup_logging(handler)
<commit_msg>Exclude HTTP exceptions from logging by Raven<commit_after>
|
import raven
import raven.transport.threaded_requests
from raven.handlers.logging import SentryHandler
from raven.conf import setup_logging
from werkzeug.exceptions import HTTPException
from exceptions import KeyboardInterrupt
import logging
class MissingRavenClient(raven.Client):
"""Raven client class that is used as a placeholder.
This is done to make sure that calls to functions in the client don't fail
even if the client is not initialized. Sentry server might be missing, but
we don't want to check if it actually exists in every place exception is
captured.
"""
captureException = lambda self, *args, **kwargs: None
captureMessage = lambda self, *args, **kwargs: None
_sentry = MissingRavenClient() # type: raven.Client
def get_sentry():
return _sentry
def init_raven_client(dsn):
global _sentry
_sentry = raven.Client(
dsn=dsn,
transport=raven.transport.threaded_requests.ThreadedRequestsHTTPTransport,
ignore_exceptions={KeyboardInterrupt, HTTPException},
logging=True,
)
sentry_errors_logger = logging.getLogger("sentry.errors")
sentry_errors_logger.addHandler(logging.StreamHandler())
handler = SentryHandler(_sentry)
handler.setLevel(logging.ERROR)
setup_logging(handler)
|
import raven
import raven.transport.threaded_requests
from raven.handlers.logging import SentryHandler
from raven.conf import setup_logging
import logging
class MissingRavenClient(raven.Client):
"""Raven client class that is used as a placeholder.
This is done to make sure that calls to functions in the client don't fail
even if the client is not initialized. Sentry server might be missing, but
we don't want to check if it actually exists in every place exception is
captured.
"""
captureException = lambda self, *args, **kwargs: None
captureMessage = lambda self, *args, **kwargs: None
_sentry = MissingRavenClient() # type: raven.Client
def get_sentry():
return _sentry
def init_raven_client(dsn):
global _sentry
_sentry = raven.Client(
dsn=dsn,
transport=raven.transport.threaded_requests.ThreadedRequestsHTTPTransport,
ignore_exceptions={'KeyboardInterrupt'},
logging=True,
)
sentry_errors_logger = logging.getLogger("sentry.errors")
sentry_errors_logger.addHandler(logging.StreamHandler())
handler = SentryHandler(_sentry)
handler.setLevel(logging.ERROR)
setup_logging(handler)
Exclude HTTP exceptions from logging by Ravenimport raven
import raven.transport.threaded_requests
from raven.handlers.logging import SentryHandler
from raven.conf import setup_logging
from werkzeug.exceptions import HTTPException
from exceptions import KeyboardInterrupt
import logging
class MissingRavenClient(raven.Client):
"""Raven client class that is used as a placeholder.
This is done to make sure that calls to functions in the client don't fail
even if the client is not initialized. Sentry server might be missing, but
we don't want to check if it actually exists in every place exception is
captured.
"""
captureException = lambda self, *args, **kwargs: None
captureMessage = lambda self, *args, **kwargs: None
_sentry = MissingRavenClient() # type: raven.Client
def get_sentry():
return _sentry
def init_raven_client(dsn):
global _sentry
_sentry = raven.Client(
dsn=dsn,
transport=raven.transport.threaded_requests.ThreadedRequestsHTTPTransport,
ignore_exceptions={KeyboardInterrupt, HTTPException},
logging=True,
)
sentry_errors_logger = logging.getLogger("sentry.errors")
sentry_errors_logger.addHandler(logging.StreamHandler())
handler = SentryHandler(_sentry)
handler.setLevel(logging.ERROR)
setup_logging(handler)
|
<commit_before>import raven
import raven.transport.threaded_requests
from raven.handlers.logging import SentryHandler
from raven.conf import setup_logging
import logging
class MissingRavenClient(raven.Client):
"""Raven client class that is used as a placeholder.
This is done to make sure that calls to functions in the client don't fail
even if the client is not initialized. Sentry server might be missing, but
we don't want to check if it actually exists in every place exception is
captured.
"""
captureException = lambda self, *args, **kwargs: None
captureMessage = lambda self, *args, **kwargs: None
_sentry = MissingRavenClient() # type: raven.Client
def get_sentry():
return _sentry
def init_raven_client(dsn):
global _sentry
_sentry = raven.Client(
dsn=dsn,
transport=raven.transport.threaded_requests.ThreadedRequestsHTTPTransport,
ignore_exceptions={'KeyboardInterrupt'},
logging=True,
)
sentry_errors_logger = logging.getLogger("sentry.errors")
sentry_errors_logger.addHandler(logging.StreamHandler())
handler = SentryHandler(_sentry)
handler.setLevel(logging.ERROR)
setup_logging(handler)
<commit_msg>Exclude HTTP exceptions from logging by Raven<commit_after>import raven
import raven.transport.threaded_requests
from raven.handlers.logging import SentryHandler
from raven.conf import setup_logging
from werkzeug.exceptions import HTTPException
from exceptions import KeyboardInterrupt
import logging
class MissingRavenClient(raven.Client):
"""Raven client class that is used as a placeholder.
This is done to make sure that calls to functions in the client don't fail
even if the client is not initialized. Sentry server might be missing, but
we don't want to check if it actually exists in every place exception is
captured.
"""
captureException = lambda self, *args, **kwargs: None
captureMessage = lambda self, *args, **kwargs: None
_sentry = MissingRavenClient() # type: raven.Client
def get_sentry():
return _sentry
def init_raven_client(dsn):
global _sentry
_sentry = raven.Client(
dsn=dsn,
transport=raven.transport.threaded_requests.ThreadedRequestsHTTPTransport,
ignore_exceptions={KeyboardInterrupt, HTTPException},
logging=True,
)
sentry_errors_logger = logging.getLogger("sentry.errors")
sentry_errors_logger.addHandler(logging.StreamHandler())
handler = SentryHandler(_sentry)
handler.setLevel(logging.ERROR)
setup_logging(handler)
|
38d5e165363f55dfedea94397ca85634bf800941
|
libqtile/layout/sublayouts.py
|
libqtile/layout/sublayouts.py
|
from base import SubLayout, Rect
from Xlib import Xatom
class TopLevelSubLayout(SubLayout):
'''
This class effectively wraps a sublayout, and automatically adds a floating sublayout,
'''
def __init__(self, sublayout_data, clientStack, theme):
WrappedSubLayout, args = sublayout_data
SubLayout.__init__(self, clientStack, theme)
self.sublayouts.append(Floating(clientStack,
theme,
parent=self
)
)
self.sublayouts.append(WrappedSubLayout(clientStack,
theme,
parent=self,
**args
)
)
class VerticalStack(SubLayout):
def layout(self, rectangle, windows):
SubLayout.layout(self, rectangle, windows)
def configure(self, r, client):
position = self.windows.index(client)
cliheight = int(r.h / len(self.windows)) #inc border
self.place(client,
r.x,
r.y + cliheight*position,
r.w,
cliheight,
)
class Floating(SubLayout):
def filter(self, client):
return client.floating
def request_rectangle(self, r, windows):
return (Rect(0,0,0,0), r) #we want nothing
def configure(self, r, client):
client.unhide() #let it be where it wants
|
from base import SubLayout, Rect
from Xlib import Xatom
class TopLevelSubLayout(SubLayout):
'''
This class effectively wraps a sublayout, and automatically adds a floating sublayout,
'''
def __init__(self, sublayout_data, clientStack, theme):
WrappedSubLayout, args = sublayout_data
SubLayout.__init__(self, clientStack, theme)
self.sublayouts.append(Floating(clientStack,
theme,
parent=self
)
)
self.sublayouts.append(WrappedSubLayout(clientStack,
theme,
parent=self,
**args
)
)
class VerticalStack(SubLayout):
def layout(self, rectangle, windows):
SubLayout.layout(self, rectangle, windows)
def configure(self, r, client):
position = self.windows.index(client)
cliheight = int(r.h / len(self.windows)) #inc border
self.place(client,
r.x,
r.y + cliheight*position,
r.w,
cliheight,
)
class Floating(SubLayout):
def filter(self, client):
return client.floating
def request_rectangle(self, r, windows):
return (Rect(0,0,0,0), r) #we want nothing
def configure(self, r, client):
d = client.floatDimensions
self.place(client, **d)
|
Update floating sublayout to use floatDimensions
|
Update floating sublayout to use floatDimensions
|
Python
|
mit
|
himaaaatti/qtile,dequis/qtile,cortesi/qtile,kopchik/qtile,zordsdavini/qtile,frostidaho/qtile,nxnfufunezn/qtile,himaaaatti/qtile,EndPointCorp/qtile,zordsdavini/qtile,encukou/qtile,kynikos/qtile,flacjacket/qtile,bavardage/qtile,kseistrup/qtile,soulchainer/qtile,jdowner/qtile,apinsard/qtile,rxcomm/qtile,ramnes/qtile,EndPointCorp/qtile,de-vri-es/qtile,StephenBarnes/qtile,qtile/qtile,xplv/qtile,jdowner/qtile,cortesi/qtile,de-vri-es/qtile,frostidaho/qtile,kseistrup/qtile,w1ndy/qtile,andrewyoung1991/qtile,tych0/qtile,dequis/qtile,tych0/qtile,flacjacket/qtile,aniruddhkanojia/qtile,soulchainer/qtile,farebord/qtile,rxcomm/qtile,kiniou/qtile,aniruddhkanojia/qtile,encukou/qtile,kiniou/qtile,nxnfufunezn/qtile,xplv/qtile,kopchik/qtile,StephenBarnes/qtile,w1ndy/qtile,apinsard/qtile,ramnes/qtile,qtile/qtile,kynikos/qtile,farebord/qtile,andrewyoung1991/qtile
|
from base import SubLayout, Rect
from Xlib import Xatom
class TopLevelSubLayout(SubLayout):
'''
This class effectively wraps a sublayout, and automatically adds a floating sublayout,
'''
def __init__(self, sublayout_data, clientStack, theme):
WrappedSubLayout, args = sublayout_data
SubLayout.__init__(self, clientStack, theme)
self.sublayouts.append(Floating(clientStack,
theme,
parent=self
)
)
self.sublayouts.append(WrappedSubLayout(clientStack,
theme,
parent=self,
**args
)
)
class VerticalStack(SubLayout):
def layout(self, rectangle, windows):
SubLayout.layout(self, rectangle, windows)
def configure(self, r, client):
position = self.windows.index(client)
cliheight = int(r.h / len(self.windows)) #inc border
self.place(client,
r.x,
r.y + cliheight*position,
r.w,
cliheight,
)
class Floating(SubLayout):
def filter(self, client):
return client.floating
def request_rectangle(self, r, windows):
return (Rect(0,0,0,0), r) #we want nothing
def configure(self, r, client):
client.unhide() #let it be where it wants
Update floating sublayout to use floatDimensions
|
from base import SubLayout, Rect
from Xlib import Xatom
class TopLevelSubLayout(SubLayout):
'''
This class effectively wraps a sublayout, and automatically adds a floating sublayout,
'''
def __init__(self, sublayout_data, clientStack, theme):
WrappedSubLayout, args = sublayout_data
SubLayout.__init__(self, clientStack, theme)
self.sublayouts.append(Floating(clientStack,
theme,
parent=self
)
)
self.sublayouts.append(WrappedSubLayout(clientStack,
theme,
parent=self,
**args
)
)
class VerticalStack(SubLayout):
def layout(self, rectangle, windows):
SubLayout.layout(self, rectangle, windows)
def configure(self, r, client):
position = self.windows.index(client)
cliheight = int(r.h / len(self.windows)) #inc border
self.place(client,
r.x,
r.y + cliheight*position,
r.w,
cliheight,
)
class Floating(SubLayout):
def filter(self, client):
return client.floating
def request_rectangle(self, r, windows):
return (Rect(0,0,0,0), r) #we want nothing
def configure(self, r, client):
d = client.floatDimensions
self.place(client, **d)
|
<commit_before>from base import SubLayout, Rect
from Xlib import Xatom
class TopLevelSubLayout(SubLayout):
'''
This class effectively wraps a sublayout, and automatically adds a floating sublayout,
'''
def __init__(self, sublayout_data, clientStack, theme):
WrappedSubLayout, args = sublayout_data
SubLayout.__init__(self, clientStack, theme)
self.sublayouts.append(Floating(clientStack,
theme,
parent=self
)
)
self.sublayouts.append(WrappedSubLayout(clientStack,
theme,
parent=self,
**args
)
)
class VerticalStack(SubLayout):
def layout(self, rectangle, windows):
SubLayout.layout(self, rectangle, windows)
def configure(self, r, client):
position = self.windows.index(client)
cliheight = int(r.h / len(self.windows)) #inc border
self.place(client,
r.x,
r.y + cliheight*position,
r.w,
cliheight,
)
class Floating(SubLayout):
def filter(self, client):
return client.floating
def request_rectangle(self, r, windows):
return (Rect(0,0,0,0), r) #we want nothing
def configure(self, r, client):
client.unhide() #let it be where it wants
<commit_msg>Update floating sublayout to use floatDimensions<commit_after>
|
from base import SubLayout, Rect
from Xlib import Xatom
class TopLevelSubLayout(SubLayout):
'''
This class effectively wraps a sublayout, and automatically adds a floating sublayout,
'''
def __init__(self, sublayout_data, clientStack, theme):
WrappedSubLayout, args = sublayout_data
SubLayout.__init__(self, clientStack, theme)
self.sublayouts.append(Floating(clientStack,
theme,
parent=self
)
)
self.sublayouts.append(WrappedSubLayout(clientStack,
theme,
parent=self,
**args
)
)
class VerticalStack(SubLayout):
def layout(self, rectangle, windows):
SubLayout.layout(self, rectangle, windows)
def configure(self, r, client):
position = self.windows.index(client)
cliheight = int(r.h / len(self.windows)) #inc border
self.place(client,
r.x,
r.y + cliheight*position,
r.w,
cliheight,
)
class Floating(SubLayout):
def filter(self, client):
return client.floating
def request_rectangle(self, r, windows):
return (Rect(0,0,0,0), r) #we want nothing
def configure(self, r, client):
d = client.floatDimensions
self.place(client, **d)
|
from base import SubLayout, Rect
from Xlib import Xatom
class TopLevelSubLayout(SubLayout):
'''
This class effectively wraps a sublayout, and automatically adds a floating sublayout,
'''
def __init__(self, sublayout_data, clientStack, theme):
WrappedSubLayout, args = sublayout_data
SubLayout.__init__(self, clientStack, theme)
self.sublayouts.append(Floating(clientStack,
theme,
parent=self
)
)
self.sublayouts.append(WrappedSubLayout(clientStack,
theme,
parent=self,
**args
)
)
class VerticalStack(SubLayout):
def layout(self, rectangle, windows):
SubLayout.layout(self, rectangle, windows)
def configure(self, r, client):
position = self.windows.index(client)
cliheight = int(r.h / len(self.windows)) #inc border
self.place(client,
r.x,
r.y + cliheight*position,
r.w,
cliheight,
)
class Floating(SubLayout):
def filter(self, client):
return client.floating
def request_rectangle(self, r, windows):
return (Rect(0,0,0,0), r) #we want nothing
def configure(self, r, client):
client.unhide() #let it be where it wants
Update floating sublayout to use floatDimensionsfrom base import SubLayout, Rect
from Xlib import Xatom
class TopLevelSubLayout(SubLayout):
'''
This class effectively wraps a sublayout, and automatically adds a floating sublayout,
'''
def __init__(self, sublayout_data, clientStack, theme):
WrappedSubLayout, args = sublayout_data
SubLayout.__init__(self, clientStack, theme)
self.sublayouts.append(Floating(clientStack,
theme,
parent=self
)
)
self.sublayouts.append(WrappedSubLayout(clientStack,
theme,
parent=self,
**args
)
)
class VerticalStack(SubLayout):
def layout(self, rectangle, windows):
SubLayout.layout(self, rectangle, windows)
def configure(self, r, client):
position = self.windows.index(client)
cliheight = int(r.h / len(self.windows)) #inc border
self.place(client,
r.x,
r.y + cliheight*position,
r.w,
cliheight,
)
class Floating(SubLayout):
def filter(self, client):
return client.floating
def request_rectangle(self, r, windows):
return (Rect(0,0,0,0), r) #we want nothing
def configure(self, r, client):
d = client.floatDimensions
self.place(client, **d)
|
<commit_before>from base import SubLayout, Rect
from Xlib import Xatom
class TopLevelSubLayout(SubLayout):
'''
This class effectively wraps a sublayout, and automatically adds a floating sublayout,
'''
def __init__(self, sublayout_data, clientStack, theme):
WrappedSubLayout, args = sublayout_data
SubLayout.__init__(self, clientStack, theme)
self.sublayouts.append(Floating(clientStack,
theme,
parent=self
)
)
self.sublayouts.append(WrappedSubLayout(clientStack,
theme,
parent=self,
**args
)
)
class VerticalStack(SubLayout):
def layout(self, rectangle, windows):
SubLayout.layout(self, rectangle, windows)
def configure(self, r, client):
position = self.windows.index(client)
cliheight = int(r.h / len(self.windows)) #inc border
self.place(client,
r.x,
r.y + cliheight*position,
r.w,
cliheight,
)
class Floating(SubLayout):
def filter(self, client):
return client.floating
def request_rectangle(self, r, windows):
return (Rect(0,0,0,0), r) #we want nothing
def configure(self, r, client):
client.unhide() #let it be where it wants
<commit_msg>Update floating sublayout to use floatDimensions<commit_after>from base import SubLayout, Rect
from Xlib import Xatom
class TopLevelSubLayout(SubLayout):
'''
This class effectively wraps a sublayout, and automatically adds a floating sublayout,
'''
def __init__(self, sublayout_data, clientStack, theme):
WrappedSubLayout, args = sublayout_data
SubLayout.__init__(self, clientStack, theme)
self.sublayouts.append(Floating(clientStack,
theme,
parent=self
)
)
self.sublayouts.append(WrappedSubLayout(clientStack,
theme,
parent=self,
**args
)
)
class VerticalStack(SubLayout):
def layout(self, rectangle, windows):
SubLayout.layout(self, rectangle, windows)
def configure(self, r, client):
position = self.windows.index(client)
cliheight = int(r.h / len(self.windows)) #inc border
self.place(client,
r.x,
r.y + cliheight*position,
r.w,
cliheight,
)
class Floating(SubLayout):
def filter(self, client):
return client.floating
def request_rectangle(self, r, windows):
return (Rect(0,0,0,0), r) #we want nothing
def configure(self, r, client):
d = client.floatDimensions
self.place(client, **d)
|
f35ed8a6c0dc81b86c69348fff543d52f070ee28
|
test/units/TestModules.py
|
test/units/TestModules.py
|
# -*- coding: utf-8 -*-
import os
import ast
import unittest
from ansible import utils
class TestModules(unittest.TestCase):
def list_all_modules(self):
paths = utils.plugins.module_finder._get_paths()
paths = [x for x in paths if os.path.isdir(x)]
module_list = []
for path in paths:
for (dirpath, dirnames, filenames) in os.walk(path):
for filename in filenames:
(path, ext) = os.path.splitext(filename)
if ext != ".ps1":
module_list.append(os.path.join(dirpath, filename))
return module_list
def test_ast_parse(self):
module_list = self.list_all_modules()
ERRORS = []
# attempt to parse each module with ast
for m in module_list:
try:
ast.parse(''.join(open(m)))
except Exception, e:
ERRORS.append((m, e))
assert len(ERRORS) == 0, "get_docstring errors: %s" % ERRORS
|
# -*- coding: utf-8 -*-
import os
import ast
import unittest
from ansible import utils
class TestModules(unittest.TestCase):
def list_all_modules(self):
paths = utils.plugins.module_finder._get_paths()
paths = [x for x in paths if os.path.isdir(x)]
module_list = []
for path in paths:
for (dirpath, dirnames, filenames) in os.walk(path):
for filename in filenames:
(path, ext) = os.path.splitext(filename)
if ext == ".py":
module_list.append(os.path.join(dirpath, filename))
return module_list
def test_ast_parse(self):
module_list = self.list_all_modules()
ERRORS = []
# attempt to parse each module with ast
for m in module_list:
try:
ast.parse(''.join(open(m)))
except Exception, e:
ERRORS.append((m, e))
assert len(ERRORS) == 0, "get_docstring errors: %s" % ERRORS
|
Update module test code to avoid pycs (that are not used)
|
Update module test code to avoid pycs (that are not used)
|
Python
|
mit
|
thaim/ansible,thaim/ansible
|
# -*- coding: utf-8 -*-
import os
import ast
import unittest
from ansible import utils
class TestModules(unittest.TestCase):
def list_all_modules(self):
paths = utils.plugins.module_finder._get_paths()
paths = [x for x in paths if os.path.isdir(x)]
module_list = []
for path in paths:
for (dirpath, dirnames, filenames) in os.walk(path):
for filename in filenames:
(path, ext) = os.path.splitext(filename)
if ext != ".ps1":
module_list.append(os.path.join(dirpath, filename))
return module_list
def test_ast_parse(self):
module_list = self.list_all_modules()
ERRORS = []
# attempt to parse each module with ast
for m in module_list:
try:
ast.parse(''.join(open(m)))
except Exception, e:
ERRORS.append((m, e))
assert len(ERRORS) == 0, "get_docstring errors: %s" % ERRORS
Update module test code to avoid pycs (that are not used)
|
# -*- coding: utf-8 -*-
import os
import ast
import unittest
from ansible import utils
class TestModules(unittest.TestCase):
def list_all_modules(self):
paths = utils.plugins.module_finder._get_paths()
paths = [x for x in paths if os.path.isdir(x)]
module_list = []
for path in paths:
for (dirpath, dirnames, filenames) in os.walk(path):
for filename in filenames:
(path, ext) = os.path.splitext(filename)
if ext == ".py":
module_list.append(os.path.join(dirpath, filename))
return module_list
def test_ast_parse(self):
module_list = self.list_all_modules()
ERRORS = []
# attempt to parse each module with ast
for m in module_list:
try:
ast.parse(''.join(open(m)))
except Exception, e:
ERRORS.append((m, e))
assert len(ERRORS) == 0, "get_docstring errors: %s" % ERRORS
|
<commit_before># -*- coding: utf-8 -*-
import os
import ast
import unittest
from ansible import utils
class TestModules(unittest.TestCase):
def list_all_modules(self):
paths = utils.plugins.module_finder._get_paths()
paths = [x for x in paths if os.path.isdir(x)]
module_list = []
for path in paths:
for (dirpath, dirnames, filenames) in os.walk(path):
for filename in filenames:
(path, ext) = os.path.splitext(filename)
if ext != ".ps1":
module_list.append(os.path.join(dirpath, filename))
return module_list
def test_ast_parse(self):
module_list = self.list_all_modules()
ERRORS = []
# attempt to parse each module with ast
for m in module_list:
try:
ast.parse(''.join(open(m)))
except Exception, e:
ERRORS.append((m, e))
assert len(ERRORS) == 0, "get_docstring errors: %s" % ERRORS
<commit_msg>Update module test code to avoid pycs (that are not used)<commit_after>
|
# -*- coding: utf-8 -*-
import os
import ast
import unittest
from ansible import utils
class TestModules(unittest.TestCase):
def list_all_modules(self):
paths = utils.plugins.module_finder._get_paths()
paths = [x for x in paths if os.path.isdir(x)]
module_list = []
for path in paths:
for (dirpath, dirnames, filenames) in os.walk(path):
for filename in filenames:
(path, ext) = os.path.splitext(filename)
if ext == ".py":
module_list.append(os.path.join(dirpath, filename))
return module_list
def test_ast_parse(self):
module_list = self.list_all_modules()
ERRORS = []
# attempt to parse each module with ast
for m in module_list:
try:
ast.parse(''.join(open(m)))
except Exception, e:
ERRORS.append((m, e))
assert len(ERRORS) == 0, "get_docstring errors: %s" % ERRORS
|
# -*- coding: utf-8 -*-
import os
import ast
import unittest
from ansible import utils
class TestModules(unittest.TestCase):
def list_all_modules(self):
paths = utils.plugins.module_finder._get_paths()
paths = [x for x in paths if os.path.isdir(x)]
module_list = []
for path in paths:
for (dirpath, dirnames, filenames) in os.walk(path):
for filename in filenames:
(path, ext) = os.path.splitext(filename)
if ext != ".ps1":
module_list.append(os.path.join(dirpath, filename))
return module_list
def test_ast_parse(self):
module_list = self.list_all_modules()
ERRORS = []
# attempt to parse each module with ast
for m in module_list:
try:
ast.parse(''.join(open(m)))
except Exception, e:
ERRORS.append((m, e))
assert len(ERRORS) == 0, "get_docstring errors: %s" % ERRORS
Update module test code to avoid pycs (that are not used)# -*- coding: utf-8 -*-
import os
import ast
import unittest
from ansible import utils
class TestModules(unittest.TestCase):
def list_all_modules(self):
paths = utils.plugins.module_finder._get_paths()
paths = [x for x in paths if os.path.isdir(x)]
module_list = []
for path in paths:
for (dirpath, dirnames, filenames) in os.walk(path):
for filename in filenames:
(path, ext) = os.path.splitext(filename)
if ext == ".py":
module_list.append(os.path.join(dirpath, filename))
return module_list
def test_ast_parse(self):
module_list = self.list_all_modules()
ERRORS = []
# attempt to parse each module with ast
for m in module_list:
try:
ast.parse(''.join(open(m)))
except Exception, e:
ERRORS.append((m, e))
assert len(ERRORS) == 0, "get_docstring errors: %s" % ERRORS
|
<commit_before># -*- coding: utf-8 -*-
import os
import ast
import unittest
from ansible import utils
class TestModules(unittest.TestCase):
def list_all_modules(self):
paths = utils.plugins.module_finder._get_paths()
paths = [x for x in paths if os.path.isdir(x)]
module_list = []
for path in paths:
for (dirpath, dirnames, filenames) in os.walk(path):
for filename in filenames:
(path, ext) = os.path.splitext(filename)
if ext != ".ps1":
module_list.append(os.path.join(dirpath, filename))
return module_list
def test_ast_parse(self):
module_list = self.list_all_modules()
ERRORS = []
# attempt to parse each module with ast
for m in module_list:
try:
ast.parse(''.join(open(m)))
except Exception, e:
ERRORS.append((m, e))
assert len(ERRORS) == 0, "get_docstring errors: %s" % ERRORS
<commit_msg>Update module test code to avoid pycs (that are not used)<commit_after># -*- coding: utf-8 -*-
import os
import ast
import unittest
from ansible import utils
class TestModules(unittest.TestCase):
def list_all_modules(self):
paths = utils.plugins.module_finder._get_paths()
paths = [x for x in paths if os.path.isdir(x)]
module_list = []
for path in paths:
for (dirpath, dirnames, filenames) in os.walk(path):
for filename in filenames:
(path, ext) = os.path.splitext(filename)
if ext == ".py":
module_list.append(os.path.join(dirpath, filename))
return module_list
def test_ast_parse(self):
module_list = self.list_all_modules()
ERRORS = []
# attempt to parse each module with ast
for m in module_list:
try:
ast.parse(''.join(open(m)))
except Exception, e:
ERRORS.append((m, e))
assert len(ERRORS) == 0, "get_docstring errors: %s" % ERRORS
|
6152709cad4c602dd00184f525e5cdd397074bd5
|
tests/web/test_request.py
|
tests/web/test_request.py
|
import unittest
from performance.web import Request, RequestTypeError, RequestTimeError
class RequestTestCase(unittest.TestCase):
def setUp(self):
self.url = 'http://www.google.com'
def test_constants(self):
self.assertEqual('get', Request.GET)
self.assertEqual('post', Request.POST)
def test_init(self):
request = Request(url=self.url, type=Request.GET)
self.assertEqual(self.url, request.url)
self.assertEqual(Request.GET, request.type)
def test_do(self):
request = Request(url=self.url, type=Request.GET)
request.do()
self.assertTrue(hasattr(request, 'status_code'))
request.type = Request.POST
request.do()
self.assertTrue(hasattr(request, 'status_code'))
def test_invalid_type(self):
type = 'foo_bar'
request = Request(url=self.url, type=type)
with self.assertRaises(RequestTypeError) as error:
request.do()
self.assertEqual('Invalid request type "%s".' % type, error.exception.__str__())
def test_response_time(self):
request = Request(url=self.url, type=Request.GET)
request.do()
self.assertEqual(request.finished - request.started, request.get_response_time())
def test_time_error(self):
request = Request(url=self.url, type=Request.GET)
with self.assertRaises(RequestTimeError):
request.get_response_time()
|
import unittest
from performance.web import Request, RequestTypeError, RequestTimeError
class RequestTestCase(unittest.TestCase):
def setUp(self):
self.url = 'http://www.google.com'
def test_constants(self):
self.assertEqual('get', Request.GET)
self.assertEqual('post', Request.POST)
def test_init(self):
request = Request(url=self.url, type=Request.GET)
self.assertEqual(self.url, request.url)
self.assertEqual(Request.GET, request.type)
def test_do(self):
request = Request(url=self.url, type=Request.GET)
request.do()
self.assertTrue(hasattr(request, 'status_code'))
request.type = Request.POST
request.do()
self.assertTrue(hasattr(request, 'status_code'))
def test_invalid_type(self):
type = 'foo_bar'
request = Request(url=self.url, type=type)
with self.assertRaises(RequestTypeError) as error:
request.do()
self.assertEqual('Invalid request type "%s"' % type, error.exception.__str__())
def test_response_time(self):
request = Request(url=self.url, type=Request.GET)
request.do()
self.assertEqual(request.finished - request.started, request.get_response_time())
def test_time_error(self):
request = Request(url=self.url, type=Request.GET)
with self.assertRaises(RequestTimeError):
request.get_response_time()
|
Update test that it runs
|
Update test that it runs
|
Python
|
mit
|
BakeCode/performance-testing,BakeCode/performance-testing
|
import unittest
from performance.web import Request, RequestTypeError, RequestTimeError
class RequestTestCase(unittest.TestCase):
def setUp(self):
self.url = 'http://www.google.com'
def test_constants(self):
self.assertEqual('get', Request.GET)
self.assertEqual('post', Request.POST)
def test_init(self):
request = Request(url=self.url, type=Request.GET)
self.assertEqual(self.url, request.url)
self.assertEqual(Request.GET, request.type)
def test_do(self):
request = Request(url=self.url, type=Request.GET)
request.do()
self.assertTrue(hasattr(request, 'status_code'))
request.type = Request.POST
request.do()
self.assertTrue(hasattr(request, 'status_code'))
def test_invalid_type(self):
type = 'foo_bar'
request = Request(url=self.url, type=type)
with self.assertRaises(RequestTypeError) as error:
request.do()
self.assertEqual('Invalid request type "%s".' % type, error.exception.__str__())
def test_response_time(self):
request = Request(url=self.url, type=Request.GET)
request.do()
self.assertEqual(request.finished - request.started, request.get_response_time())
def test_time_error(self):
request = Request(url=self.url, type=Request.GET)
with self.assertRaises(RequestTimeError):
request.get_response_time()
Update test that it runs
|
import unittest
from performance.web import Request, RequestTypeError, RequestTimeError
class RequestTestCase(unittest.TestCase):
def setUp(self):
self.url = 'http://www.google.com'
def test_constants(self):
self.assertEqual('get', Request.GET)
self.assertEqual('post', Request.POST)
def test_init(self):
request = Request(url=self.url, type=Request.GET)
self.assertEqual(self.url, request.url)
self.assertEqual(Request.GET, request.type)
def test_do(self):
request = Request(url=self.url, type=Request.GET)
request.do()
self.assertTrue(hasattr(request, 'status_code'))
request.type = Request.POST
request.do()
self.assertTrue(hasattr(request, 'status_code'))
def test_invalid_type(self):
type = 'foo_bar'
request = Request(url=self.url, type=type)
with self.assertRaises(RequestTypeError) as error:
request.do()
self.assertEqual('Invalid request type "%s"' % type, error.exception.__str__())
def test_response_time(self):
request = Request(url=self.url, type=Request.GET)
request.do()
self.assertEqual(request.finished - request.started, request.get_response_time())
def test_time_error(self):
request = Request(url=self.url, type=Request.GET)
with self.assertRaises(RequestTimeError):
request.get_response_time()
|
<commit_before>import unittest
from performance.web import Request, RequestTypeError, RequestTimeError
class RequestTestCase(unittest.TestCase):
def setUp(self):
self.url = 'http://www.google.com'
def test_constants(self):
self.assertEqual('get', Request.GET)
self.assertEqual('post', Request.POST)
def test_init(self):
request = Request(url=self.url, type=Request.GET)
self.assertEqual(self.url, request.url)
self.assertEqual(Request.GET, request.type)
def test_do(self):
request = Request(url=self.url, type=Request.GET)
request.do()
self.assertTrue(hasattr(request, 'status_code'))
request.type = Request.POST
request.do()
self.assertTrue(hasattr(request, 'status_code'))
def test_invalid_type(self):
type = 'foo_bar'
request = Request(url=self.url, type=type)
with self.assertRaises(RequestTypeError) as error:
request.do()
self.assertEqual('Invalid request type "%s".' % type, error.exception.__str__())
def test_response_time(self):
request = Request(url=self.url, type=Request.GET)
request.do()
self.assertEqual(request.finished - request.started, request.get_response_time())
def test_time_error(self):
request = Request(url=self.url, type=Request.GET)
with self.assertRaises(RequestTimeError):
request.get_response_time()
<commit_msg>Update test that it runs<commit_after>
|
import unittest
from performance.web import Request, RequestTypeError, RequestTimeError
class RequestTestCase(unittest.TestCase):
def setUp(self):
self.url = 'http://www.google.com'
def test_constants(self):
self.assertEqual('get', Request.GET)
self.assertEqual('post', Request.POST)
def test_init(self):
request = Request(url=self.url, type=Request.GET)
self.assertEqual(self.url, request.url)
self.assertEqual(Request.GET, request.type)
def test_do(self):
request = Request(url=self.url, type=Request.GET)
request.do()
self.assertTrue(hasattr(request, 'status_code'))
request.type = Request.POST
request.do()
self.assertTrue(hasattr(request, 'status_code'))
def test_invalid_type(self):
type = 'foo_bar'
request = Request(url=self.url, type=type)
with self.assertRaises(RequestTypeError) as error:
request.do()
self.assertEqual('Invalid request type "%s"' % type, error.exception.__str__())
def test_response_time(self):
request = Request(url=self.url, type=Request.GET)
request.do()
self.assertEqual(request.finished - request.started, request.get_response_time())
def test_time_error(self):
request = Request(url=self.url, type=Request.GET)
with self.assertRaises(RequestTimeError):
request.get_response_time()
|
import unittest
from performance.web import Request, RequestTypeError, RequestTimeError
class RequestTestCase(unittest.TestCase):
def setUp(self):
self.url = 'http://www.google.com'
def test_constants(self):
self.assertEqual('get', Request.GET)
self.assertEqual('post', Request.POST)
def test_init(self):
request = Request(url=self.url, type=Request.GET)
self.assertEqual(self.url, request.url)
self.assertEqual(Request.GET, request.type)
def test_do(self):
request = Request(url=self.url, type=Request.GET)
request.do()
self.assertTrue(hasattr(request, 'status_code'))
request.type = Request.POST
request.do()
self.assertTrue(hasattr(request, 'status_code'))
def test_invalid_type(self):
type = 'foo_bar'
request = Request(url=self.url, type=type)
with self.assertRaises(RequestTypeError) as error:
request.do()
self.assertEqual('Invalid request type "%s".' % type, error.exception.__str__())
def test_response_time(self):
request = Request(url=self.url, type=Request.GET)
request.do()
self.assertEqual(request.finished - request.started, request.get_response_time())
def test_time_error(self):
request = Request(url=self.url, type=Request.GET)
with self.assertRaises(RequestTimeError):
request.get_response_time()
Update test that it runsimport unittest
from performance.web import Request, RequestTypeError, RequestTimeError
class RequestTestCase(unittest.TestCase):
def setUp(self):
self.url = 'http://www.google.com'
def test_constants(self):
self.assertEqual('get', Request.GET)
self.assertEqual('post', Request.POST)
def test_init(self):
request = Request(url=self.url, type=Request.GET)
self.assertEqual(self.url, request.url)
self.assertEqual(Request.GET, request.type)
def test_do(self):
request = Request(url=self.url, type=Request.GET)
request.do()
self.assertTrue(hasattr(request, 'status_code'))
request.type = Request.POST
request.do()
self.assertTrue(hasattr(request, 'status_code'))
def test_invalid_type(self):
type = 'foo_bar'
request = Request(url=self.url, type=type)
with self.assertRaises(RequestTypeError) as error:
request.do()
self.assertEqual('Invalid request type "%s"' % type, error.exception.__str__())
def test_response_time(self):
request = Request(url=self.url, type=Request.GET)
request.do()
self.assertEqual(request.finished - request.started, request.get_response_time())
def test_time_error(self):
request = Request(url=self.url, type=Request.GET)
with self.assertRaises(RequestTimeError):
request.get_response_time()
|
<commit_before>import unittest
from performance.web import Request, RequestTypeError, RequestTimeError
class RequestTestCase(unittest.TestCase):
def setUp(self):
self.url = 'http://www.google.com'
def test_constants(self):
self.assertEqual('get', Request.GET)
self.assertEqual('post', Request.POST)
def test_init(self):
request = Request(url=self.url, type=Request.GET)
self.assertEqual(self.url, request.url)
self.assertEqual(Request.GET, request.type)
def test_do(self):
request = Request(url=self.url, type=Request.GET)
request.do()
self.assertTrue(hasattr(request, 'status_code'))
request.type = Request.POST
request.do()
self.assertTrue(hasattr(request, 'status_code'))
def test_invalid_type(self):
type = 'foo_bar'
request = Request(url=self.url, type=type)
with self.assertRaises(RequestTypeError) as error:
request.do()
self.assertEqual('Invalid request type "%s".' % type, error.exception.__str__())
def test_response_time(self):
request = Request(url=self.url, type=Request.GET)
request.do()
self.assertEqual(request.finished - request.started, request.get_response_time())
def test_time_error(self):
request = Request(url=self.url, type=Request.GET)
with self.assertRaises(RequestTimeError):
request.get_response_time()
<commit_msg>Update test that it runs<commit_after>import unittest
from performance.web import Request, RequestTypeError, RequestTimeError
class RequestTestCase(unittest.TestCase):
def setUp(self):
self.url = 'http://www.google.com'
def test_constants(self):
self.assertEqual('get', Request.GET)
self.assertEqual('post', Request.POST)
def test_init(self):
request = Request(url=self.url, type=Request.GET)
self.assertEqual(self.url, request.url)
self.assertEqual(Request.GET, request.type)
def test_do(self):
request = Request(url=self.url, type=Request.GET)
request.do()
self.assertTrue(hasattr(request, 'status_code'))
request.type = Request.POST
request.do()
self.assertTrue(hasattr(request, 'status_code'))
def test_invalid_type(self):
type = 'foo_bar'
request = Request(url=self.url, type=type)
with self.assertRaises(RequestTypeError) as error:
request.do()
self.assertEqual('Invalid request type "%s"' % type, error.exception.__str__())
def test_response_time(self):
request = Request(url=self.url, type=Request.GET)
request.do()
self.assertEqual(request.finished - request.started, request.get_response_time())
def test_time_error(self):
request = Request(url=self.url, type=Request.GET)
with self.assertRaises(RequestTimeError):
request.get_response_time()
|
a0443783c880cf90b11886e3180e842e2c17a77a
|
tests/gtype.py
|
tests/gtype.py
|
import unittest
from common import gobject, gtk
class GTypeTest(unittest.TestCase):
def testBoolType(self):
store = gtk.ListStore(gobject.TYPE_BOOLEAN)
assert store.get_column_type(0) == gobject.TYPE_BOOLEAN
store = gtk.ListStore('gboolean')
assert store.get_column_type(0) == gobject.TYPE_BOOLEAN
store = gtk.ListStore(bool)
assert store.get_column_type(0) == gobject.TYPE_BOOLEAN
if __name__ == '__main__':
unittest.main()
|
import unittest
from common import gobject, gtk
class GTypeTest(unittest.TestCase):
def checkType(self, expected, *objects):
# Silly method to check pyg_type_from_object
store = gtk.ListStore(expected)
val = store.get_column_type(0)
assert val == expected, \
'got %r while %r was expected' % (val, expected)
for object in objects:
store = gtk.ListStore(object)
val = store.get_column_type(0)
assert val == expected, \
'got %r while %r was expected' % (val, expected)
def testBool(self):
self.checkType(gobject.TYPE_BOOLEAN, 'gboolean', bool)
def testInt(self):
self.checkType(gobject.TYPE_INT, 'gint', int)
def testInt64(self):
self.checkType(gobject.TYPE_INT64, 'gint64')
def testUint(self):
self.checkType(gobject.TYPE_UINT, 'guint')
def testUint64(self):
self.checkType(gobject.TYPE_UINT64, 'guint64')
def testLong(self):
self.checkType(gobject.TYPE_LONG, 'glong', long)
def testUlong(self):
self.checkType(gobject.TYPE_ULONG, 'gulong')
def testDouble(self):
self.checkType(gobject.TYPE_DOUBLE, 'gdouble', float)
def testFloat(self):
self.checkType(gobject.TYPE_FLOAT, 'gfloat')
def testPyObject(self):
self.checkType(gobject.TYPE_PYOBJECT, object)
def testObject(self):
self.checkType(gobject.TYPE_OBJECT)
# XXX: Flags, Enums
if __name__ == '__main__':
unittest.main()
|
Test various other types aswell
|
Test various other types aswell
|
Python
|
lgpl-2.1
|
thiblahute/pygobject,atizo/pygobject,atizo/pygobject,nzjrs/pygobject,choeger/pygobject-cmake,Distrotech/pygobject,pexip/pygobject,davidmalcolm/pygobject,jdahlin/pygobject,MathieuDuponchelle/pygobject,GNOME/pygobject,davidmalcolm/pygobject,thiblahute/pygobject,davibe/pygobject,davibe/pygobject,jdahlin/pygobject,pexip/pygobject,alexef/pygobject,sfeltman/pygobject,sfeltman/pygobject,alexef/pygobject,GNOME/pygobject,Distrotech/pygobject,thiblahute/pygobject,atizo/pygobject,jdahlin/pygobject,davibe/pygobject,alexef/pygobject,Distrotech/pygobject,Distrotech/pygobject,MathieuDuponchelle/pygobject,davidmalcolm/pygobject,MathieuDuponchelle/pygobject,sfeltman/pygobject,nzjrs/pygobject,pexip/pygobject,davibe/pygobject,GNOME/pygobject,choeger/pygobject-cmake,choeger/pygobject-cmake,nzjrs/pygobject
|
import unittest
from common import gobject, gtk
class GTypeTest(unittest.TestCase):
def testBoolType(self):
store = gtk.ListStore(gobject.TYPE_BOOLEAN)
assert store.get_column_type(0) == gobject.TYPE_BOOLEAN
store = gtk.ListStore('gboolean')
assert store.get_column_type(0) == gobject.TYPE_BOOLEAN
store = gtk.ListStore(bool)
assert store.get_column_type(0) == gobject.TYPE_BOOLEAN
if __name__ == '__main__':
unittest.main()
Test various other types aswell
|
import unittest
from common import gobject, gtk
class GTypeTest(unittest.TestCase):
def checkType(self, expected, *objects):
# Silly method to check pyg_type_from_object
store = gtk.ListStore(expected)
val = store.get_column_type(0)
assert val == expected, \
'got %r while %r was expected' % (val, expected)
for object in objects:
store = gtk.ListStore(object)
val = store.get_column_type(0)
assert val == expected, \
'got %r while %r was expected' % (val, expected)
def testBool(self):
self.checkType(gobject.TYPE_BOOLEAN, 'gboolean', bool)
def testInt(self):
self.checkType(gobject.TYPE_INT, 'gint', int)
def testInt64(self):
self.checkType(gobject.TYPE_INT64, 'gint64')
def testUint(self):
self.checkType(gobject.TYPE_UINT, 'guint')
def testUint64(self):
self.checkType(gobject.TYPE_UINT64, 'guint64')
def testLong(self):
self.checkType(gobject.TYPE_LONG, 'glong', long)
def testUlong(self):
self.checkType(gobject.TYPE_ULONG, 'gulong')
def testDouble(self):
self.checkType(gobject.TYPE_DOUBLE, 'gdouble', float)
def testFloat(self):
self.checkType(gobject.TYPE_FLOAT, 'gfloat')
def testPyObject(self):
self.checkType(gobject.TYPE_PYOBJECT, object)
def testObject(self):
self.checkType(gobject.TYPE_OBJECT)
# XXX: Flags, Enums
if __name__ == '__main__':
unittest.main()
|
<commit_before>import unittest
from common import gobject, gtk
class GTypeTest(unittest.TestCase):
def testBoolType(self):
store = gtk.ListStore(gobject.TYPE_BOOLEAN)
assert store.get_column_type(0) == gobject.TYPE_BOOLEAN
store = gtk.ListStore('gboolean')
assert store.get_column_type(0) == gobject.TYPE_BOOLEAN
store = gtk.ListStore(bool)
assert store.get_column_type(0) == gobject.TYPE_BOOLEAN
if __name__ == '__main__':
unittest.main()
<commit_msg>Test various other types aswell<commit_after>
|
import unittest
from common import gobject, gtk
class GTypeTest(unittest.TestCase):
def checkType(self, expected, *objects):
# Silly method to check pyg_type_from_object
store = gtk.ListStore(expected)
val = store.get_column_type(0)
assert val == expected, \
'got %r while %r was expected' % (val, expected)
for object in objects:
store = gtk.ListStore(object)
val = store.get_column_type(0)
assert val == expected, \
'got %r while %r was expected' % (val, expected)
def testBool(self):
self.checkType(gobject.TYPE_BOOLEAN, 'gboolean', bool)
def testInt(self):
self.checkType(gobject.TYPE_INT, 'gint', int)
def testInt64(self):
self.checkType(gobject.TYPE_INT64, 'gint64')
def testUint(self):
self.checkType(gobject.TYPE_UINT, 'guint')
def testUint64(self):
self.checkType(gobject.TYPE_UINT64, 'guint64')
def testLong(self):
self.checkType(gobject.TYPE_LONG, 'glong', long)
def testUlong(self):
self.checkType(gobject.TYPE_ULONG, 'gulong')
def testDouble(self):
self.checkType(gobject.TYPE_DOUBLE, 'gdouble', float)
def testFloat(self):
self.checkType(gobject.TYPE_FLOAT, 'gfloat')
def testPyObject(self):
self.checkType(gobject.TYPE_PYOBJECT, object)
def testObject(self):
self.checkType(gobject.TYPE_OBJECT)
# XXX: Flags, Enums
if __name__ == '__main__':
unittest.main()
|
import unittest
from common import gobject, gtk
class GTypeTest(unittest.TestCase):
def testBoolType(self):
store = gtk.ListStore(gobject.TYPE_BOOLEAN)
assert store.get_column_type(0) == gobject.TYPE_BOOLEAN
store = gtk.ListStore('gboolean')
assert store.get_column_type(0) == gobject.TYPE_BOOLEAN
store = gtk.ListStore(bool)
assert store.get_column_type(0) == gobject.TYPE_BOOLEAN
if __name__ == '__main__':
unittest.main()
Test various other types aswellimport unittest
from common import gobject, gtk
class GTypeTest(unittest.TestCase):
def checkType(self, expected, *objects):
# Silly method to check pyg_type_from_object
store = gtk.ListStore(expected)
val = store.get_column_type(0)
assert val == expected, \
'got %r while %r was expected' % (val, expected)
for object in objects:
store = gtk.ListStore(object)
val = store.get_column_type(0)
assert val == expected, \
'got %r while %r was expected' % (val, expected)
def testBool(self):
self.checkType(gobject.TYPE_BOOLEAN, 'gboolean', bool)
def testInt(self):
self.checkType(gobject.TYPE_INT, 'gint', int)
def testInt64(self):
self.checkType(gobject.TYPE_INT64, 'gint64')
def testUint(self):
self.checkType(gobject.TYPE_UINT, 'guint')
def testUint64(self):
self.checkType(gobject.TYPE_UINT64, 'guint64')
def testLong(self):
self.checkType(gobject.TYPE_LONG, 'glong', long)
def testUlong(self):
self.checkType(gobject.TYPE_ULONG, 'gulong')
def testDouble(self):
self.checkType(gobject.TYPE_DOUBLE, 'gdouble', float)
def testFloat(self):
self.checkType(gobject.TYPE_FLOAT, 'gfloat')
def testPyObject(self):
self.checkType(gobject.TYPE_PYOBJECT, object)
def testObject(self):
self.checkType(gobject.TYPE_OBJECT)
# XXX: Flags, Enums
if __name__ == '__main__':
unittest.main()
|
<commit_before>import unittest
from common import gobject, gtk
class GTypeTest(unittest.TestCase):
def testBoolType(self):
store = gtk.ListStore(gobject.TYPE_BOOLEAN)
assert store.get_column_type(0) == gobject.TYPE_BOOLEAN
store = gtk.ListStore('gboolean')
assert store.get_column_type(0) == gobject.TYPE_BOOLEAN
store = gtk.ListStore(bool)
assert store.get_column_type(0) == gobject.TYPE_BOOLEAN
if __name__ == '__main__':
unittest.main()
<commit_msg>Test various other types aswell<commit_after>import unittest
from common import gobject, gtk
class GTypeTest(unittest.TestCase):
def checkType(self, expected, *objects):
# Silly method to check pyg_type_from_object
store = gtk.ListStore(expected)
val = store.get_column_type(0)
assert val == expected, \
'got %r while %r was expected' % (val, expected)
for object in objects:
store = gtk.ListStore(object)
val = store.get_column_type(0)
assert val == expected, \
'got %r while %r was expected' % (val, expected)
def testBool(self):
self.checkType(gobject.TYPE_BOOLEAN, 'gboolean', bool)
def testInt(self):
self.checkType(gobject.TYPE_INT, 'gint', int)
def testInt64(self):
self.checkType(gobject.TYPE_INT64, 'gint64')
def testUint(self):
self.checkType(gobject.TYPE_UINT, 'guint')
def testUint64(self):
self.checkType(gobject.TYPE_UINT64, 'guint64')
def testLong(self):
self.checkType(gobject.TYPE_LONG, 'glong', long)
def testUlong(self):
self.checkType(gobject.TYPE_ULONG, 'gulong')
def testDouble(self):
self.checkType(gobject.TYPE_DOUBLE, 'gdouble', float)
def testFloat(self):
self.checkType(gobject.TYPE_FLOAT, 'gfloat')
def testPyObject(self):
self.checkType(gobject.TYPE_PYOBJECT, object)
def testObject(self):
self.checkType(gobject.TYPE_OBJECT)
# XXX: Flags, Enums
if __name__ == '__main__':
unittest.main()
|
9bedcfc03e2dcb4261a49f332e82e78379059997
|
client/setup.py
|
client/setup.py
|
from setuptools import setup
requirements = [
'pyserial',
]
with open('README') as f:
long_description = f.read()
setup(
name='removinator',
version='0.1.0.dev2',
description='A library for controlling the Smart Card Removinator',
long_description=long_description,
url='https://github.com/nkinder/smart-card-removinator',
author='Smart Card Removinator contributors',
author_email='nkinder@redhat.com',
license='APLv2',
packages=['removinator'],
classifiers=[
'Intended Audience :: Developers',
'Topic :: Software Development :: Testing',
'Topic :: Software Development :: Libraries :: Python Modules',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
install_requires=requirements,
)
|
from setuptools import setup
requirements = [
'pyserial',
]
with open('README') as f:
long_description = f.read()
setup(
name='removinator',
version='0.1.0.dev3',
description='A library for controlling the Smart Card Removinator',
long_description=long_description,
url='https://github.com/nkinder/smart-card-removinator',
author='Smart Card Removinator contributors',
author_email='nkinder@redhat.com',
license='APLv2',
packages=['removinator'],
classifiers=[
'Intended Audience :: Developers',
'Topic :: Software Development :: Testing',
'Topic :: Software Development :: Libraries :: Python Modules',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
install_requires=requirements,
)
|
Bump client library version to 0.1.0.dev3
|
Bump client library version to 0.1.0.dev3
|
Python
|
apache-2.0
|
nkinder/smart-card-removinator
|
from setuptools import setup
requirements = [
'pyserial',
]
with open('README') as f:
long_description = f.read()
setup(
name='removinator',
version='0.1.0.dev2',
description='A library for controlling the Smart Card Removinator',
long_description=long_description,
url='https://github.com/nkinder/smart-card-removinator',
author='Smart Card Removinator contributors',
author_email='nkinder@redhat.com',
license='APLv2',
packages=['removinator'],
classifiers=[
'Intended Audience :: Developers',
'Topic :: Software Development :: Testing',
'Topic :: Software Development :: Libraries :: Python Modules',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
install_requires=requirements,
)
Bump client library version to 0.1.0.dev3
|
from setuptools import setup
requirements = [
'pyserial',
]
with open('README') as f:
long_description = f.read()
setup(
name='removinator',
version='0.1.0.dev3',
description='A library for controlling the Smart Card Removinator',
long_description=long_description,
url='https://github.com/nkinder/smart-card-removinator',
author='Smart Card Removinator contributors',
author_email='nkinder@redhat.com',
license='APLv2',
packages=['removinator'],
classifiers=[
'Intended Audience :: Developers',
'Topic :: Software Development :: Testing',
'Topic :: Software Development :: Libraries :: Python Modules',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
install_requires=requirements,
)
|
<commit_before>from setuptools import setup
requirements = [
'pyserial',
]
with open('README') as f:
long_description = f.read()
setup(
name='removinator',
version='0.1.0.dev2',
description='A library for controlling the Smart Card Removinator',
long_description=long_description,
url='https://github.com/nkinder/smart-card-removinator',
author='Smart Card Removinator contributors',
author_email='nkinder@redhat.com',
license='APLv2',
packages=['removinator'],
classifiers=[
'Intended Audience :: Developers',
'Topic :: Software Development :: Testing',
'Topic :: Software Development :: Libraries :: Python Modules',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
install_requires=requirements,
)
<commit_msg>Bump client library version to 0.1.0.dev3<commit_after>
|
from setuptools import setup
requirements = [
'pyserial',
]
with open('README') as f:
long_description = f.read()
setup(
name='removinator',
version='0.1.0.dev3',
description='A library for controlling the Smart Card Removinator',
long_description=long_description,
url='https://github.com/nkinder/smart-card-removinator',
author='Smart Card Removinator contributors',
author_email='nkinder@redhat.com',
license='APLv2',
packages=['removinator'],
classifiers=[
'Intended Audience :: Developers',
'Topic :: Software Development :: Testing',
'Topic :: Software Development :: Libraries :: Python Modules',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
install_requires=requirements,
)
|
from setuptools import setup
requirements = [
'pyserial',
]
with open('README') as f:
long_description = f.read()
setup(
name='removinator',
version='0.1.0.dev2',
description='A library for controlling the Smart Card Removinator',
long_description=long_description,
url='https://github.com/nkinder/smart-card-removinator',
author='Smart Card Removinator contributors',
author_email='nkinder@redhat.com',
license='APLv2',
packages=['removinator'],
classifiers=[
'Intended Audience :: Developers',
'Topic :: Software Development :: Testing',
'Topic :: Software Development :: Libraries :: Python Modules',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
install_requires=requirements,
)
Bump client library version to 0.1.0.dev3from setuptools import setup
requirements = [
'pyserial',
]
with open('README') as f:
long_description = f.read()
setup(
name='removinator',
version='0.1.0.dev3',
description='A library for controlling the Smart Card Removinator',
long_description=long_description,
url='https://github.com/nkinder/smart-card-removinator',
author='Smart Card Removinator contributors',
author_email='nkinder@redhat.com',
license='APLv2',
packages=['removinator'],
classifiers=[
'Intended Audience :: Developers',
'Topic :: Software Development :: Testing',
'Topic :: Software Development :: Libraries :: Python Modules',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
install_requires=requirements,
)
|
<commit_before>from setuptools import setup
requirements = [
'pyserial',
]
with open('README') as f:
long_description = f.read()
setup(
name='removinator',
version='0.1.0.dev2',
description='A library for controlling the Smart Card Removinator',
long_description=long_description,
url='https://github.com/nkinder/smart-card-removinator',
author='Smart Card Removinator contributors',
author_email='nkinder@redhat.com',
license='APLv2',
packages=['removinator'],
classifiers=[
'Intended Audience :: Developers',
'Topic :: Software Development :: Testing',
'Topic :: Software Development :: Libraries :: Python Modules',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
install_requires=requirements,
)
<commit_msg>Bump client library version to 0.1.0.dev3<commit_after>from setuptools import setup
requirements = [
'pyserial',
]
with open('README') as f:
long_description = f.read()
setup(
name='removinator',
version='0.1.0.dev3',
description='A library for controlling the Smart Card Removinator',
long_description=long_description,
url='https://github.com/nkinder/smart-card-removinator',
author='Smart Card Removinator contributors',
author_email='nkinder@redhat.com',
license='APLv2',
packages=['removinator'],
classifiers=[
'Intended Audience :: Developers',
'Topic :: Software Development :: Testing',
'Topic :: Software Development :: Libraries :: Python Modules',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
install_requires=requirements,
)
|
fde3cb61225f66414f41eac141fb68651a3fe1c9
|
tests/stats.py
|
tests/stats.py
|
# Copyright (C) 2010 Peter Teichman
import unittest
from instatrace.stats import Statistic
class testStatistic(unittest.TestCase):
def testName(self):
name = "test_stat"
s = Statistic(name, [])
self.assertEqual(name, s.name)
|
# Copyright (C) 2010 Peter Teichman
import unittest
from instatrace.stats import Statistics
class testStatistics(unittest.TestCase):
def testStats(self):
name = "test_stat"
s = Statistics()
s.add_sample(name, 1)
s.add_sample(name, 2)
s.add_sample(name, 3)
test_stat = s.statistics.get(name)
self.assertEqual(name, test_stat._name)
stats = test_stat.stats()
self.assertEqual(3, stats.get("count"))
self.assertEqual(6, stats.get("total"))
self.assertAlmostEqual(2, stats.get("mean"))
self.assertAlmostEqual(0.8164966, stats.get("stddev"))
|
Implement testStatistics with count, total, mean, and stddev checks
|
Implement testStatistics with count, total, mean, and stddev checks
|
Python
|
mit
|
pteichman/instatrace
|
# Copyright (C) 2010 Peter Teichman
import unittest
from instatrace.stats import Statistic
class testStatistic(unittest.TestCase):
def testName(self):
name = "test_stat"
s = Statistic(name, [])
self.assertEqual(name, s.name)
Implement testStatistics with count, total, mean, and stddev checks
|
# Copyright (C) 2010 Peter Teichman
import unittest
from instatrace.stats import Statistics
class testStatistics(unittest.TestCase):
def testStats(self):
name = "test_stat"
s = Statistics()
s.add_sample(name, 1)
s.add_sample(name, 2)
s.add_sample(name, 3)
test_stat = s.statistics.get(name)
self.assertEqual(name, test_stat._name)
stats = test_stat.stats()
self.assertEqual(3, stats.get("count"))
self.assertEqual(6, stats.get("total"))
self.assertAlmostEqual(2, stats.get("mean"))
self.assertAlmostEqual(0.8164966, stats.get("stddev"))
|
<commit_before># Copyright (C) 2010 Peter Teichman
import unittest
from instatrace.stats import Statistic
class testStatistic(unittest.TestCase):
def testName(self):
name = "test_stat"
s = Statistic(name, [])
self.assertEqual(name, s.name)
<commit_msg>Implement testStatistics with count, total, mean, and stddev checks<commit_after>
|
# Copyright (C) 2010 Peter Teichman
import unittest
from instatrace.stats import Statistics
class testStatistics(unittest.TestCase):
def testStats(self):
name = "test_stat"
s = Statistics()
s.add_sample(name, 1)
s.add_sample(name, 2)
s.add_sample(name, 3)
test_stat = s.statistics.get(name)
self.assertEqual(name, test_stat._name)
stats = test_stat.stats()
self.assertEqual(3, stats.get("count"))
self.assertEqual(6, stats.get("total"))
self.assertAlmostEqual(2, stats.get("mean"))
self.assertAlmostEqual(0.8164966, stats.get("stddev"))
|
# Copyright (C) 2010 Peter Teichman
import unittest
from instatrace.stats import Statistic
class testStatistic(unittest.TestCase):
def testName(self):
name = "test_stat"
s = Statistic(name, [])
self.assertEqual(name, s.name)
Implement testStatistics with count, total, mean, and stddev checks# Copyright (C) 2010 Peter Teichman
import unittest
from instatrace.stats import Statistics
class testStatistics(unittest.TestCase):
def testStats(self):
name = "test_stat"
s = Statistics()
s.add_sample(name, 1)
s.add_sample(name, 2)
s.add_sample(name, 3)
test_stat = s.statistics.get(name)
self.assertEqual(name, test_stat._name)
stats = test_stat.stats()
self.assertEqual(3, stats.get("count"))
self.assertEqual(6, stats.get("total"))
self.assertAlmostEqual(2, stats.get("mean"))
self.assertAlmostEqual(0.8164966, stats.get("stddev"))
|
<commit_before># Copyright (C) 2010 Peter Teichman
import unittest
from instatrace.stats import Statistic
class testStatistic(unittest.TestCase):
def testName(self):
name = "test_stat"
s = Statistic(name, [])
self.assertEqual(name, s.name)
<commit_msg>Implement testStatistics with count, total, mean, and stddev checks<commit_after># Copyright (C) 2010 Peter Teichman
import unittest
from instatrace.stats import Statistics
class testStatistics(unittest.TestCase):
def testStats(self):
name = "test_stat"
s = Statistics()
s.add_sample(name, 1)
s.add_sample(name, 2)
s.add_sample(name, 3)
test_stat = s.statistics.get(name)
self.assertEqual(name, test_stat._name)
stats = test_stat.stats()
self.assertEqual(3, stats.get("count"))
self.assertEqual(6, stats.get("total"))
self.assertAlmostEqual(2, stats.get("mean"))
self.assertAlmostEqual(0.8164966, stats.get("stddev"))
|
37900e6c4bd8c16b2fef1be0c978f4f0567b09a3
|
nn/embedding/embeddings.py
|
nn/embedding/embeddings.py
|
import tensorflow as tf
from .. import var_init
def embeddings(*, id_space_size, embedding_size):
return tf.Variable(var_init.normal([id_space_size, embedding_size]))
|
import tensorflow as tf
from ..variable import variable
def embeddings(*, id_space_size, embedding_size):
return variable([id_space_size, embedding_size])
|
Use variable module instead of var_init
|
Use variable module instead of var_init
|
Python
|
unlicense
|
raviqqe/tensorflow-extenteten,raviqqe/tensorflow-extenteten
|
import tensorflow as tf
from .. import var_init
def embeddings(*, id_space_size, embedding_size):
return tf.Variable(var_init.normal([id_space_size, embedding_size]))
Use variable module instead of var_init
|
import tensorflow as tf
from ..variable import variable
def embeddings(*, id_space_size, embedding_size):
return variable([id_space_size, embedding_size])
|
<commit_before>import tensorflow as tf
from .. import var_init
def embeddings(*, id_space_size, embedding_size):
return tf.Variable(var_init.normal([id_space_size, embedding_size]))
<commit_msg>Use variable module instead of var_init<commit_after>
|
import tensorflow as tf
from ..variable import variable
def embeddings(*, id_space_size, embedding_size):
return variable([id_space_size, embedding_size])
|
import tensorflow as tf
from .. import var_init
def embeddings(*, id_space_size, embedding_size):
return tf.Variable(var_init.normal([id_space_size, embedding_size]))
Use variable module instead of var_initimport tensorflow as tf
from ..variable import variable
def embeddings(*, id_space_size, embedding_size):
return variable([id_space_size, embedding_size])
|
<commit_before>import tensorflow as tf
from .. import var_init
def embeddings(*, id_space_size, embedding_size):
return tf.Variable(var_init.normal([id_space_size, embedding_size]))
<commit_msg>Use variable module instead of var_init<commit_after>import tensorflow as tf
from ..variable import variable
def embeddings(*, id_space_size, embedding_size):
return variable([id_space_size, embedding_size])
|
0bfd715ca0bae0929f2a06bdb42bdb0c95aea6dd
|
grafeno/_parse_freeling.py
|
grafeno/_parse_freeling.py
|
#!/usr/bin/env python3
from subprocess import Popen, PIPE
import subprocess as subp
import json
import re
regex = re.compile('}\s*{')
def parse (sentence, lang):
'''Calls the freeling process to obtain the dependency parse of a text.'''
config = "grafeno/freeling_deps_"+lang+".cfg"
proc = Popen(["analyze", "--flush", "-f", config], stdin=PIPE, stdout=PIPE, stderr=PIPE)
data, err = proc.communicate(sentence.encode('UTF-8'))
return json.loads('['+regex.sub('},{',data.decode('UTF-8'))+']')
|
#!/usr/bin/env python3
from subprocess import Popen, PIPE
import subprocess as subp
import os
import json
import re
regex = re.compile('}\s*{')
def parse (sentence, lang):
'''Calls the freeling process to obtain the dependency parse of a text.'''
config = os.path.dirname(__file__)+"/freeling_deps_"+lang+".cfg"
proc = Popen(["analyze", "--flush", "-f", config], stdin=PIPE, stdout=PIPE, stderr=PIPE)
data, err = proc.communicate(sentence.encode('UTF-8'))
return json.loads('['+regex.sub('},{',data.decode('UTF-8'))+']')
|
Make freeling configuration available independent of execution path
|
Make freeling configuration available independent of execution path
|
Python
|
agpl-3.0
|
agarsev/grafeno,agarsev/grafeno,agarsev/grafeno
|
#!/usr/bin/env python3
from subprocess import Popen, PIPE
import subprocess as subp
import json
import re
regex = re.compile('}\s*{')
def parse (sentence, lang):
'''Calls the freeling process to obtain the dependency parse of a text.'''
config = "grafeno/freeling_deps_"+lang+".cfg"
proc = Popen(["analyze", "--flush", "-f", config], stdin=PIPE, stdout=PIPE, stderr=PIPE)
data, err = proc.communicate(sentence.encode('UTF-8'))
return json.loads('['+regex.sub('},{',data.decode('UTF-8'))+']')
Make freeling configuration available independent of execution path
|
#!/usr/bin/env python3
from subprocess import Popen, PIPE
import subprocess as subp
import os
import json
import re
regex = re.compile('}\s*{')
def parse (sentence, lang):
'''Calls the freeling process to obtain the dependency parse of a text.'''
config = os.path.dirname(__file__)+"/freeling_deps_"+lang+".cfg"
proc = Popen(["analyze", "--flush", "-f", config], stdin=PIPE, stdout=PIPE, stderr=PIPE)
data, err = proc.communicate(sentence.encode('UTF-8'))
return json.loads('['+regex.sub('},{',data.decode('UTF-8'))+']')
|
<commit_before>#!/usr/bin/env python3
from subprocess import Popen, PIPE
import subprocess as subp
import json
import re
regex = re.compile('}\s*{')
def parse (sentence, lang):
'''Calls the freeling process to obtain the dependency parse of a text.'''
config = "grafeno/freeling_deps_"+lang+".cfg"
proc = Popen(["analyze", "--flush", "-f", config], stdin=PIPE, stdout=PIPE, stderr=PIPE)
data, err = proc.communicate(sentence.encode('UTF-8'))
return json.loads('['+regex.sub('},{',data.decode('UTF-8'))+']')
<commit_msg>Make freeling configuration available independent of execution path<commit_after>
|
#!/usr/bin/env python3
from subprocess import Popen, PIPE
import subprocess as subp
import os
import json
import re
regex = re.compile('}\s*{')
def parse (sentence, lang):
'''Calls the freeling process to obtain the dependency parse of a text.'''
config = os.path.dirname(__file__)+"/freeling_deps_"+lang+".cfg"
proc = Popen(["analyze", "--flush", "-f", config], stdin=PIPE, stdout=PIPE, stderr=PIPE)
data, err = proc.communicate(sentence.encode('UTF-8'))
return json.loads('['+regex.sub('},{',data.decode('UTF-8'))+']')
|
#!/usr/bin/env python3
from subprocess import Popen, PIPE
import subprocess as subp
import json
import re
regex = re.compile('}\s*{')
def parse (sentence, lang):
'''Calls the freeling process to obtain the dependency parse of a text.'''
config = "grafeno/freeling_deps_"+lang+".cfg"
proc = Popen(["analyze", "--flush", "-f", config], stdin=PIPE, stdout=PIPE, stderr=PIPE)
data, err = proc.communicate(sentence.encode('UTF-8'))
return json.loads('['+regex.sub('},{',data.decode('UTF-8'))+']')
Make freeling configuration available independent of execution path#!/usr/bin/env python3
from subprocess import Popen, PIPE
import subprocess as subp
import os
import json
import re
regex = re.compile('}\s*{')
def parse (sentence, lang):
'''Calls the freeling process to obtain the dependency parse of a text.'''
config = os.path.dirname(__file__)+"/freeling_deps_"+lang+".cfg"
proc = Popen(["analyze", "--flush", "-f", config], stdin=PIPE, stdout=PIPE, stderr=PIPE)
data, err = proc.communicate(sentence.encode('UTF-8'))
return json.loads('['+regex.sub('},{',data.decode('UTF-8'))+']')
|
<commit_before>#!/usr/bin/env python3
from subprocess import Popen, PIPE
import subprocess as subp
import json
import re
regex = re.compile('}\s*{')
def parse (sentence, lang):
'''Calls the freeling process to obtain the dependency parse of a text.'''
config = "grafeno/freeling_deps_"+lang+".cfg"
proc = Popen(["analyze", "--flush", "-f", config], stdin=PIPE, stdout=PIPE, stderr=PIPE)
data, err = proc.communicate(sentence.encode('UTF-8'))
return json.loads('['+regex.sub('},{',data.decode('UTF-8'))+']')
<commit_msg>Make freeling configuration available independent of execution path<commit_after>#!/usr/bin/env python3
from subprocess import Popen, PIPE
import subprocess as subp
import os
import json
import re
regex = re.compile('}\s*{')
def parse (sentence, lang):
'''Calls the freeling process to obtain the dependency parse of a text.'''
config = os.path.dirname(__file__)+"/freeling_deps_"+lang+".cfg"
proc = Popen(["analyze", "--flush", "-f", config], stdin=PIPE, stdout=PIPE, stderr=PIPE)
data, err = proc.communicate(sentence.encode('UTF-8'))
return json.loads('['+regex.sub('},{',data.decode('UTF-8'))+']')
|
dc59e03337ef26ca7fbc3165c8723a58da796598
|
cloud_deploy.py
|
cloud_deploy.py
|
#!/usr/bin/env python
__metaclass__ = type
from argparse import ArgumentParser
import subprocess
import sys
from jujupy import (
CannotConnectEnv,
Environment,
until_timeout,
)
def deploy_stack(environment):
""""Deploy a test stack in the specified environment.
:param environment: The name of the desired environment.
"""
env = Environment.from_config(environment)
# Clean up any leftover junk
env.destroy_environment()
env.bootstrap()
try:
# wait for status info....
try:
try:
env.get_status()
except CannotConnectEnv:
print "Status got Unable to connect to env. Retrying..."
env.get_status()
env.wait_for_started()
except subprocess.CalledProcessError as e:
if getattr(e, 'stderr', None) is not None:
sys.stderr.write(e.stderr)
raise
finally:
env.destroy_environment()
def main():
parser = ArgumentParser('Test a cloud')
parser.add_argument('env', help='The juju environment to test')
args = parser.parse_args()
try:
deploy_stack(args.env)
except Exception as e:
print '%s: %s' % (type(e), e)
sys.exit(1)
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
from __future__ import print_function
__metaclass__ = type
from argparse import ArgumentParser
import os
import subprocess
import sys
from jujupy import (
CannotConnectEnv,
Environment,
)
def deploy_stack(environment, debug):
""""Deploy a test stack in the specified environment.
:param environment: The name of the desired environment.
"""
env = Environment.from_config(environment)
env.client.debug = debug
# Clean up any leftover junk
env.destroy_environment()
env.bootstrap()
try:
# wait for status info....
try:
try:
env.get_status()
except CannotConnectEnv:
print("Status got Unable to connect to env. Retrying...")
env.get_status()
env.wait_for_started()
except subprocess.CalledProcessError as e:
if getattr(e, 'stderr', None) is not None:
sys.stderr.write(e.stderr)
raise
finally:
env.destroy_environment()
def main():
parser = ArgumentParser('Test a cloud')
parser.add_argument('env', help='The juju environment to test')
args = parser.parse_args()
debug = bool(os.environ.get('DEBUG') == 'true')
try:
deploy_stack(args.env, debug)
except Exception as e:
print('%s: %s' % (type(e), e))
sys.exit(1)
if __name__ == '__main__':
main()
|
Add support for -debug=true when testing clouds. --debug is still unsafe. We need to redirect sdtout to a log.
|
Add support for -debug=true when testing clouds. --debug is still unsafe.
We need to redirect sdtout to a log.
|
Python
|
agpl-3.0
|
mjs/juju,mjs/juju,mjs/juju,mjs/juju,mjs/juju,mjs/juju,mjs/juju
|
#!/usr/bin/env python
__metaclass__ = type
from argparse import ArgumentParser
import subprocess
import sys
from jujupy import (
CannotConnectEnv,
Environment,
until_timeout,
)
def deploy_stack(environment):
""""Deploy a test stack in the specified environment.
:param environment: The name of the desired environment.
"""
env = Environment.from_config(environment)
# Clean up any leftover junk
env.destroy_environment()
env.bootstrap()
try:
# wait for status info....
try:
try:
env.get_status()
except CannotConnectEnv:
print "Status got Unable to connect to env. Retrying..."
env.get_status()
env.wait_for_started()
except subprocess.CalledProcessError as e:
if getattr(e, 'stderr', None) is not None:
sys.stderr.write(e.stderr)
raise
finally:
env.destroy_environment()
def main():
parser = ArgumentParser('Test a cloud')
parser.add_argument('env', help='The juju environment to test')
args = parser.parse_args()
try:
deploy_stack(args.env)
except Exception as e:
print '%s: %s' % (type(e), e)
sys.exit(1)
if __name__ == '__main__':
main()
Add support for -debug=true when testing clouds. --debug is still unsafe.
We need to redirect sdtout to a log.
|
#!/usr/bin/env python
from __future__ import print_function
__metaclass__ = type
from argparse import ArgumentParser
import os
import subprocess
import sys
from jujupy import (
CannotConnectEnv,
Environment,
)
def deploy_stack(environment, debug):
""""Deploy a test stack in the specified environment.
:param environment: The name of the desired environment.
"""
env = Environment.from_config(environment)
env.client.debug = debug
# Clean up any leftover junk
env.destroy_environment()
env.bootstrap()
try:
# wait for status info....
try:
try:
env.get_status()
except CannotConnectEnv:
print("Status got Unable to connect to env. Retrying...")
env.get_status()
env.wait_for_started()
except subprocess.CalledProcessError as e:
if getattr(e, 'stderr', None) is not None:
sys.stderr.write(e.stderr)
raise
finally:
env.destroy_environment()
def main():
parser = ArgumentParser('Test a cloud')
parser.add_argument('env', help='The juju environment to test')
args = parser.parse_args()
debug = bool(os.environ.get('DEBUG') == 'true')
try:
deploy_stack(args.env, debug)
except Exception as e:
print('%s: %s' % (type(e), e))
sys.exit(1)
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
__metaclass__ = type
from argparse import ArgumentParser
import subprocess
import sys
from jujupy import (
CannotConnectEnv,
Environment,
until_timeout,
)
def deploy_stack(environment):
""""Deploy a test stack in the specified environment.
:param environment: The name of the desired environment.
"""
env = Environment.from_config(environment)
# Clean up any leftover junk
env.destroy_environment()
env.bootstrap()
try:
# wait for status info....
try:
try:
env.get_status()
except CannotConnectEnv:
print "Status got Unable to connect to env. Retrying..."
env.get_status()
env.wait_for_started()
except subprocess.CalledProcessError as e:
if getattr(e, 'stderr', None) is not None:
sys.stderr.write(e.stderr)
raise
finally:
env.destroy_environment()
def main():
parser = ArgumentParser('Test a cloud')
parser.add_argument('env', help='The juju environment to test')
args = parser.parse_args()
try:
deploy_stack(args.env)
except Exception as e:
print '%s: %s' % (type(e), e)
sys.exit(1)
if __name__ == '__main__':
main()
<commit_msg>Add support for -debug=true when testing clouds. --debug is still unsafe.
We need to redirect sdtout to a log.<commit_after>
|
#!/usr/bin/env python
from __future__ import print_function
__metaclass__ = type
from argparse import ArgumentParser
import os
import subprocess
import sys
from jujupy import (
CannotConnectEnv,
Environment,
)
def deploy_stack(environment, debug):
""""Deploy a test stack in the specified environment.
:param environment: The name of the desired environment.
"""
env = Environment.from_config(environment)
env.client.debug = debug
# Clean up any leftover junk
env.destroy_environment()
env.bootstrap()
try:
# wait for status info....
try:
try:
env.get_status()
except CannotConnectEnv:
print("Status got Unable to connect to env. Retrying...")
env.get_status()
env.wait_for_started()
except subprocess.CalledProcessError as e:
if getattr(e, 'stderr', None) is not None:
sys.stderr.write(e.stderr)
raise
finally:
env.destroy_environment()
def main():
parser = ArgumentParser('Test a cloud')
parser.add_argument('env', help='The juju environment to test')
args = parser.parse_args()
debug = bool(os.environ.get('DEBUG') == 'true')
try:
deploy_stack(args.env, debug)
except Exception as e:
print('%s: %s' % (type(e), e))
sys.exit(1)
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
__metaclass__ = type
from argparse import ArgumentParser
import subprocess
import sys
from jujupy import (
CannotConnectEnv,
Environment,
until_timeout,
)
def deploy_stack(environment):
""""Deploy a test stack in the specified environment.
:param environment: The name of the desired environment.
"""
env = Environment.from_config(environment)
# Clean up any leftover junk
env.destroy_environment()
env.bootstrap()
try:
# wait for status info....
try:
try:
env.get_status()
except CannotConnectEnv:
print "Status got Unable to connect to env. Retrying..."
env.get_status()
env.wait_for_started()
except subprocess.CalledProcessError as e:
if getattr(e, 'stderr', None) is not None:
sys.stderr.write(e.stderr)
raise
finally:
env.destroy_environment()
def main():
parser = ArgumentParser('Test a cloud')
parser.add_argument('env', help='The juju environment to test')
args = parser.parse_args()
try:
deploy_stack(args.env)
except Exception as e:
print '%s: %s' % (type(e), e)
sys.exit(1)
if __name__ == '__main__':
main()
Add support for -debug=true when testing clouds. --debug is still unsafe.
We need to redirect sdtout to a log.#!/usr/bin/env python
from __future__ import print_function
__metaclass__ = type
from argparse import ArgumentParser
import os
import subprocess
import sys
from jujupy import (
CannotConnectEnv,
Environment,
)
def deploy_stack(environment, debug):
""""Deploy a test stack in the specified environment.
:param environment: The name of the desired environment.
"""
env = Environment.from_config(environment)
env.client.debug = debug
# Clean up any leftover junk
env.destroy_environment()
env.bootstrap()
try:
# wait for status info....
try:
try:
env.get_status()
except CannotConnectEnv:
print("Status got Unable to connect to env. Retrying...")
env.get_status()
env.wait_for_started()
except subprocess.CalledProcessError as e:
if getattr(e, 'stderr', None) is not None:
sys.stderr.write(e.stderr)
raise
finally:
env.destroy_environment()
def main():
parser = ArgumentParser('Test a cloud')
parser.add_argument('env', help='The juju environment to test')
args = parser.parse_args()
debug = bool(os.environ.get('DEBUG') == 'true')
try:
deploy_stack(args.env, debug)
except Exception as e:
print('%s: %s' % (type(e), e))
sys.exit(1)
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
__metaclass__ = type
from argparse import ArgumentParser
import subprocess
import sys
from jujupy import (
CannotConnectEnv,
Environment,
until_timeout,
)
def deploy_stack(environment):
""""Deploy a test stack in the specified environment.
:param environment: The name of the desired environment.
"""
env = Environment.from_config(environment)
# Clean up any leftover junk
env.destroy_environment()
env.bootstrap()
try:
# wait for status info....
try:
try:
env.get_status()
except CannotConnectEnv:
print "Status got Unable to connect to env. Retrying..."
env.get_status()
env.wait_for_started()
except subprocess.CalledProcessError as e:
if getattr(e, 'stderr', None) is not None:
sys.stderr.write(e.stderr)
raise
finally:
env.destroy_environment()
def main():
parser = ArgumentParser('Test a cloud')
parser.add_argument('env', help='The juju environment to test')
args = parser.parse_args()
try:
deploy_stack(args.env)
except Exception as e:
print '%s: %s' % (type(e), e)
sys.exit(1)
if __name__ == '__main__':
main()
<commit_msg>Add support for -debug=true when testing clouds. --debug is still unsafe.
We need to redirect sdtout to a log.<commit_after>#!/usr/bin/env python
from __future__ import print_function
__metaclass__ = type
from argparse import ArgumentParser
import os
import subprocess
import sys
from jujupy import (
CannotConnectEnv,
Environment,
)
def deploy_stack(environment, debug):
""""Deploy a test stack in the specified environment.
:param environment: The name of the desired environment.
"""
env = Environment.from_config(environment)
env.client.debug = debug
# Clean up any leftover junk
env.destroy_environment()
env.bootstrap()
try:
# wait for status info....
try:
try:
env.get_status()
except CannotConnectEnv:
print("Status got Unable to connect to env. Retrying...")
env.get_status()
env.wait_for_started()
except subprocess.CalledProcessError as e:
if getattr(e, 'stderr', None) is not None:
sys.stderr.write(e.stderr)
raise
finally:
env.destroy_environment()
def main():
parser = ArgumentParser('Test a cloud')
parser.add_argument('env', help='The juju environment to test')
args = parser.parse_args()
debug = bool(os.environ.get('DEBUG') == 'true')
try:
deploy_stack(args.env, debug)
except Exception as e:
print('%s: %s' % (type(e), e))
sys.exit(1)
if __name__ == '__main__':
main()
|
7060a7c85cf82fae9e018f1c82a1d8181bd5214d
|
library/tests/factories.py
|
library/tests/factories.py
|
from django.conf import settings
import factory
from factory.fuzzy import FuzzyText
from ..models import Book, BookSpecimen
class BookFactory(factory.django.DjangoModelFactory):
title = factory.Sequence(lambda n: "Test book %03d" % n)
summary = "This is a test summary"
section = 1
lang = settings.LANGUAGE_CODE
class Meta:
model = Book
class BookSpecimenFactory(factory.django.DjangoModelFactory):
serial = FuzzyText(length=6)
book = factory.SubFactory(BookFactory)
class Meta:
model = BookSpecimen
|
from django.conf import settings
import factory
from factory.fuzzy import FuzzyText
from ..models import Book, BookSpecimen
class BookFactory(factory.django.DjangoModelFactory):
title = factory.Sequence(lambda n: "Test book {0}".format(n))
summary = "This is a test summary"
section = 1
lang = settings.LANGUAGE_CODE
class Meta:
model = Book
class BookSpecimenFactory(factory.django.DjangoModelFactory):
serial = FuzzyText(length=6)
book = factory.SubFactory(BookFactory)
class Meta:
model = BookSpecimen
|
Use format for string formatting
|
Use format for string formatting
|
Python
|
agpl-3.0
|
Lcaracol/ideasbox.lan,ideascube/ideascube,ideascube/ideascube,ideascube/ideascube,Lcaracol/ideasbox.lan,ideascube/ideascube,Lcaracol/ideasbox.lan
|
from django.conf import settings
import factory
from factory.fuzzy import FuzzyText
from ..models import Book, BookSpecimen
class BookFactory(factory.django.DjangoModelFactory):
title = factory.Sequence(lambda n: "Test book %03d" % n)
summary = "This is a test summary"
section = 1
lang = settings.LANGUAGE_CODE
class Meta:
model = Book
class BookSpecimenFactory(factory.django.DjangoModelFactory):
serial = FuzzyText(length=6)
book = factory.SubFactory(BookFactory)
class Meta:
model = BookSpecimen
Use format for string formatting
|
from django.conf import settings
import factory
from factory.fuzzy import FuzzyText
from ..models import Book, BookSpecimen
class BookFactory(factory.django.DjangoModelFactory):
title = factory.Sequence(lambda n: "Test book {0}".format(n))
summary = "This is a test summary"
section = 1
lang = settings.LANGUAGE_CODE
class Meta:
model = Book
class BookSpecimenFactory(factory.django.DjangoModelFactory):
serial = FuzzyText(length=6)
book = factory.SubFactory(BookFactory)
class Meta:
model = BookSpecimen
|
<commit_before>from django.conf import settings
import factory
from factory.fuzzy import FuzzyText
from ..models import Book, BookSpecimen
class BookFactory(factory.django.DjangoModelFactory):
title = factory.Sequence(lambda n: "Test book %03d" % n)
summary = "This is a test summary"
section = 1
lang = settings.LANGUAGE_CODE
class Meta:
model = Book
class BookSpecimenFactory(factory.django.DjangoModelFactory):
serial = FuzzyText(length=6)
book = factory.SubFactory(BookFactory)
class Meta:
model = BookSpecimen
<commit_msg>Use format for string formatting<commit_after>
|
from django.conf import settings
import factory
from factory.fuzzy import FuzzyText
from ..models import Book, BookSpecimen
class BookFactory(factory.django.DjangoModelFactory):
title = factory.Sequence(lambda n: "Test book {0}".format(n))
summary = "This is a test summary"
section = 1
lang = settings.LANGUAGE_CODE
class Meta:
model = Book
class BookSpecimenFactory(factory.django.DjangoModelFactory):
serial = FuzzyText(length=6)
book = factory.SubFactory(BookFactory)
class Meta:
model = BookSpecimen
|
from django.conf import settings
import factory
from factory.fuzzy import FuzzyText
from ..models import Book, BookSpecimen
class BookFactory(factory.django.DjangoModelFactory):
title = factory.Sequence(lambda n: "Test book %03d" % n)
summary = "This is a test summary"
section = 1
lang = settings.LANGUAGE_CODE
class Meta:
model = Book
class BookSpecimenFactory(factory.django.DjangoModelFactory):
serial = FuzzyText(length=6)
book = factory.SubFactory(BookFactory)
class Meta:
model = BookSpecimen
Use format for string formattingfrom django.conf import settings
import factory
from factory.fuzzy import FuzzyText
from ..models import Book, BookSpecimen
class BookFactory(factory.django.DjangoModelFactory):
title = factory.Sequence(lambda n: "Test book {0}".format(n))
summary = "This is a test summary"
section = 1
lang = settings.LANGUAGE_CODE
class Meta:
model = Book
class BookSpecimenFactory(factory.django.DjangoModelFactory):
serial = FuzzyText(length=6)
book = factory.SubFactory(BookFactory)
class Meta:
model = BookSpecimen
|
<commit_before>from django.conf import settings
import factory
from factory.fuzzy import FuzzyText
from ..models import Book, BookSpecimen
class BookFactory(factory.django.DjangoModelFactory):
title = factory.Sequence(lambda n: "Test book %03d" % n)
summary = "This is a test summary"
section = 1
lang = settings.LANGUAGE_CODE
class Meta:
model = Book
class BookSpecimenFactory(factory.django.DjangoModelFactory):
serial = FuzzyText(length=6)
book = factory.SubFactory(BookFactory)
class Meta:
model = BookSpecimen
<commit_msg>Use format for string formatting<commit_after>from django.conf import settings
import factory
from factory.fuzzy import FuzzyText
from ..models import Book, BookSpecimen
class BookFactory(factory.django.DjangoModelFactory):
title = factory.Sequence(lambda n: "Test book {0}".format(n))
summary = "This is a test summary"
section = 1
lang = settings.LANGUAGE_CODE
class Meta:
model = Book
class BookSpecimenFactory(factory.django.DjangoModelFactory):
serial = FuzzyText(length=6)
book = factory.SubFactory(BookFactory)
class Meta:
model = BookSpecimen
|
f2a47110c9416a1efcf2a4346303377afedc2315
|
builders/horizons_telnet.py
|
builders/horizons_telnet.py
|
#!/usr/bin/env python2.7
#import argparse
from telnetlib import Telnet
def main(in_path, out_path):
lines = read_lines(open(in_path))
tn = Telnet('horizons.jpl.nasa.gov', 6775)
out = open(out_path, 'w')
for line in lines:
print(repr(line))
tn.write(line.encode('ascii') + b'\r\n')
data = tn.read_until(b'DUMMY PATTERN', 5.0).decode('ascii')
print(repr(data))
out.write(data)
out.flush()
def read_lines(f):
for line in f:
line = line.strip()
if (not line) or line.startswith('#'):
continue
yield line
if __name__ == '__main__':
try:
main('horizons_input.txt', 'horizons_output.txt')
except EOFError:
print
print('EOF')
|
#!/usr/bin/env python2.7
#import argparse
from telnetlib import Telnet
def main(in_path, out_path):
lines = read_lines(open(in_path))
tn = Telnet('horizons.jpl.nasa.gov', 6775)
out = open(out_path, 'w')
for line in lines:
print(repr(line))
tn.write(line.encode('ascii') + b'\r\n')
index, match, data1 = tn.expect([b'.'], 5.0)
data2 = tn.read_until(b'DUMMY PATTERN', 1.0)
data = (data1 + data2).decode('ascii')
print(repr(data))
out.write(data)
out.flush()
def read_lines(f):
for line in f:
line = line.strip()
if (not line) or line.startswith('#'):
continue
yield line
if __name__ == '__main__':
try:
main('horizons_input.txt', 'horizons_output.txt')
except EOFError:
print
print('EOF')
|
Speed up the telnet process for HORIZONS
|
Speed up the telnet process for HORIZONS
Be very patient waiting for the other end to start its reply, but then
only wait 1s for the rest of the data.
|
Python
|
mit
|
exoanalytic/python-skyfield,ozialien/python-skyfield,exoanalytic/python-skyfield,skyfielders/python-skyfield,ozialien/python-skyfield,GuidoBR/python-skyfield,GuidoBR/python-skyfield,skyfielders/python-skyfield
|
#!/usr/bin/env python2.7
#import argparse
from telnetlib import Telnet
def main(in_path, out_path):
lines = read_lines(open(in_path))
tn = Telnet('horizons.jpl.nasa.gov', 6775)
out = open(out_path, 'w')
for line in lines:
print(repr(line))
tn.write(line.encode('ascii') + b'\r\n')
data = tn.read_until(b'DUMMY PATTERN', 5.0).decode('ascii')
print(repr(data))
out.write(data)
out.flush()
def read_lines(f):
for line in f:
line = line.strip()
if (not line) or line.startswith('#'):
continue
yield line
if __name__ == '__main__':
try:
main('horizons_input.txt', 'horizons_output.txt')
except EOFError:
print
print('EOF')
Speed up the telnet process for HORIZONS
Be very patient waiting for the other end to start its reply, but then
only wait 1s for the rest of the data.
|
#!/usr/bin/env python2.7
#import argparse
from telnetlib import Telnet
def main(in_path, out_path):
lines = read_lines(open(in_path))
tn = Telnet('horizons.jpl.nasa.gov', 6775)
out = open(out_path, 'w')
for line in lines:
print(repr(line))
tn.write(line.encode('ascii') + b'\r\n')
index, match, data1 = tn.expect([b'.'], 5.0)
data2 = tn.read_until(b'DUMMY PATTERN', 1.0)
data = (data1 + data2).decode('ascii')
print(repr(data))
out.write(data)
out.flush()
def read_lines(f):
for line in f:
line = line.strip()
if (not line) or line.startswith('#'):
continue
yield line
if __name__ == '__main__':
try:
main('horizons_input.txt', 'horizons_output.txt')
except EOFError:
print
print('EOF')
|
<commit_before>#!/usr/bin/env python2.7
#import argparse
from telnetlib import Telnet
def main(in_path, out_path):
lines = read_lines(open(in_path))
tn = Telnet('horizons.jpl.nasa.gov', 6775)
out = open(out_path, 'w')
for line in lines:
print(repr(line))
tn.write(line.encode('ascii') + b'\r\n')
data = tn.read_until(b'DUMMY PATTERN', 5.0).decode('ascii')
print(repr(data))
out.write(data)
out.flush()
def read_lines(f):
for line in f:
line = line.strip()
if (not line) or line.startswith('#'):
continue
yield line
if __name__ == '__main__':
try:
main('horizons_input.txt', 'horizons_output.txt')
except EOFError:
print
print('EOF')
<commit_msg>Speed up the telnet process for HORIZONS
Be very patient waiting for the other end to start its reply, but then
only wait 1s for the rest of the data.<commit_after>
|
#!/usr/bin/env python2.7
#import argparse
from telnetlib import Telnet
def main(in_path, out_path):
lines = read_lines(open(in_path))
tn = Telnet('horizons.jpl.nasa.gov', 6775)
out = open(out_path, 'w')
for line in lines:
print(repr(line))
tn.write(line.encode('ascii') + b'\r\n')
index, match, data1 = tn.expect([b'.'], 5.0)
data2 = tn.read_until(b'DUMMY PATTERN', 1.0)
data = (data1 + data2).decode('ascii')
print(repr(data))
out.write(data)
out.flush()
def read_lines(f):
for line in f:
line = line.strip()
if (not line) or line.startswith('#'):
continue
yield line
if __name__ == '__main__':
try:
main('horizons_input.txt', 'horizons_output.txt')
except EOFError:
print
print('EOF')
|
#!/usr/bin/env python2.7
#import argparse
from telnetlib import Telnet
def main(in_path, out_path):
lines = read_lines(open(in_path))
tn = Telnet('horizons.jpl.nasa.gov', 6775)
out = open(out_path, 'w')
for line in lines:
print(repr(line))
tn.write(line.encode('ascii') + b'\r\n')
data = tn.read_until(b'DUMMY PATTERN', 5.0).decode('ascii')
print(repr(data))
out.write(data)
out.flush()
def read_lines(f):
for line in f:
line = line.strip()
if (not line) or line.startswith('#'):
continue
yield line
if __name__ == '__main__':
try:
main('horizons_input.txt', 'horizons_output.txt')
except EOFError:
print
print('EOF')
Speed up the telnet process for HORIZONS
Be very patient waiting for the other end to start its reply, but then
only wait 1s for the rest of the data.#!/usr/bin/env python2.7
#import argparse
from telnetlib import Telnet
def main(in_path, out_path):
lines = read_lines(open(in_path))
tn = Telnet('horizons.jpl.nasa.gov', 6775)
out = open(out_path, 'w')
for line in lines:
print(repr(line))
tn.write(line.encode('ascii') + b'\r\n')
index, match, data1 = tn.expect([b'.'], 5.0)
data2 = tn.read_until(b'DUMMY PATTERN', 1.0)
data = (data1 + data2).decode('ascii')
print(repr(data))
out.write(data)
out.flush()
def read_lines(f):
for line in f:
line = line.strip()
if (not line) or line.startswith('#'):
continue
yield line
if __name__ == '__main__':
try:
main('horizons_input.txt', 'horizons_output.txt')
except EOFError:
print
print('EOF')
|
<commit_before>#!/usr/bin/env python2.7
#import argparse
from telnetlib import Telnet
def main(in_path, out_path):
lines = read_lines(open(in_path))
tn = Telnet('horizons.jpl.nasa.gov', 6775)
out = open(out_path, 'w')
for line in lines:
print(repr(line))
tn.write(line.encode('ascii') + b'\r\n')
data = tn.read_until(b'DUMMY PATTERN', 5.0).decode('ascii')
print(repr(data))
out.write(data)
out.flush()
def read_lines(f):
for line in f:
line = line.strip()
if (not line) or line.startswith('#'):
continue
yield line
if __name__ == '__main__':
try:
main('horizons_input.txt', 'horizons_output.txt')
except EOFError:
print
print('EOF')
<commit_msg>Speed up the telnet process for HORIZONS
Be very patient waiting for the other end to start its reply, but then
only wait 1s for the rest of the data.<commit_after>#!/usr/bin/env python2.7
#import argparse
from telnetlib import Telnet
def main(in_path, out_path):
lines = read_lines(open(in_path))
tn = Telnet('horizons.jpl.nasa.gov', 6775)
out = open(out_path, 'w')
for line in lines:
print(repr(line))
tn.write(line.encode('ascii') + b'\r\n')
index, match, data1 = tn.expect([b'.'], 5.0)
data2 = tn.read_until(b'DUMMY PATTERN', 1.0)
data = (data1 + data2).decode('ascii')
print(repr(data))
out.write(data)
out.flush()
def read_lines(f):
for line in f:
line = line.strip()
if (not line) or line.startswith('#'):
continue
yield line
if __name__ == '__main__':
try:
main('horizons_input.txt', 'horizons_output.txt')
except EOFError:
print
print('EOF')
|
565841a8ccfe9675cfbee89564506bc1967314b7
|
trunk/scons-tools/gmcs.py
|
trunk/scons-tools/gmcs.py
|
import os.path
import SCons.Builder
import SCons.Node.FS
import SCons.Util
csccom = "$CSC $CSCFLAGS $_CSCLIBPATH -r:$_CSCLIBS -out:${TARGET.abspath} $SOURCES"
csclibcom = "$CSC -t:library $CSCLIBFLAGS $_CSCLIBPATH $_CSCLIBS -out:${TARGET.abspath} $SOURCES"
McsBuilder = SCons.Builder.Builder(action = '$CSCCOM',
source_factory = SCons.Node.FS.default_fs.Entry,
suffix = '.exe')
McsLibBuilder = SCons.Builder.Builder(action = '$CSCLIBCOM',
source_factory = SCons.Node.FS.default_fs.Entry,
suffix = '.dll')
def generate(env):
env['BUILDERS']['CLIProgram'] = McsBuilder
env['BUILDERS']['CLILibrary'] = McsLibBuilder
env['CSC'] = 'gmcs'
env['_CSCLIBS'] = "${_stripixes('-r:', CILLIBS, '', '-r', '', __env__)}"
env['_CSCLIBPATH'] = "${_stripixes('-lib:', CILLIBPATH, '', '-r', '', __env__)}"
env['CSCFLAGS'] = SCons.Util.CLVar('')
env['CSCCOM'] = SCons.Action.Action(csccom)
env['CSCLIBCOM'] = SCons.Action.Action(csclibcom)
def exists(env):
return internal_zip or env.Detect('gmcs')
|
import os.path
import SCons.Builder
import SCons.Node.FS
import SCons.Util
csccom = "$CSC $CSCFLAGS $_CSCLIBPATH -r:$_CSCLIBS -out:${TARGET.abspath} $SOURCES"
csclibcom = "$CSC -t:library $CSCLIBFLAGS $_CSCLIBPATH $_CSCLIBS -out:${TARGET.abspath} $SOURCES"
McsBuilder = SCons.Builder.Builder(action = '$CSCCOM',
source_factory = SCons.Node.FS.default_fs.Entry,
suffix = '.exe')
McsLibBuilder = SCons.Builder.Builder(action = '$CSCLIBCOM',
source_factory = SCons.Node.FS.default_fs.Entry,
suffix = '.dll')
def generate(env):
env['BUILDERS']['CLIProgram'] = McsBuilder
env['BUILDERS']['CLILibrary'] = McsLibBuilder
env['CSC'] = 'gmcs'
env['_CSCLIBS'] = "${_stripixes('-r:', CILLIBS, '', '-r', '', __env__)}"
env['_CSCLIBPATH'] = "${_stripixes('-lib:', CILLIBPATH, '', '-r', '', __env__)}"
env['CSCFLAGS'] = SCons.Util.CLVar('-platform:anycpu')
env['CSCLIBFLAGS'] = SCons.Util.CLVar('-platform:anycpu')
env['CSCCOM'] = SCons.Action.Action(csccom)
env['CSCLIBCOM'] = SCons.Action.Action(csclibcom)
def exists(env):
return internal_zip or env.Detect('gmcs')
|
Use -platform:anycpu while compiling .NET assemblies
|
Use -platform:anycpu while compiling .NET assemblies
git-svn-id: 8d82213adbbc6b1538a984bace977d31fcb31691@349 2f5d681c-ba19-11dd-a503-ed2d4bea8bb5
|
Python
|
lgpl-2.1
|
shutej/tapcfg,shutej/tapcfg,shutej/tapcfg,shutej/tapcfg,shutej/tapcfg,shutej/tapcfg,shutej/tapcfg
|
import os.path
import SCons.Builder
import SCons.Node.FS
import SCons.Util
csccom = "$CSC $CSCFLAGS $_CSCLIBPATH -r:$_CSCLIBS -out:${TARGET.abspath} $SOURCES"
csclibcom = "$CSC -t:library $CSCLIBFLAGS $_CSCLIBPATH $_CSCLIBS -out:${TARGET.abspath} $SOURCES"
McsBuilder = SCons.Builder.Builder(action = '$CSCCOM',
source_factory = SCons.Node.FS.default_fs.Entry,
suffix = '.exe')
McsLibBuilder = SCons.Builder.Builder(action = '$CSCLIBCOM',
source_factory = SCons.Node.FS.default_fs.Entry,
suffix = '.dll')
def generate(env):
env['BUILDERS']['CLIProgram'] = McsBuilder
env['BUILDERS']['CLILibrary'] = McsLibBuilder
env['CSC'] = 'gmcs'
env['_CSCLIBS'] = "${_stripixes('-r:', CILLIBS, '', '-r', '', __env__)}"
env['_CSCLIBPATH'] = "${_stripixes('-lib:', CILLIBPATH, '', '-r', '', __env__)}"
env['CSCFLAGS'] = SCons.Util.CLVar('')
env['CSCCOM'] = SCons.Action.Action(csccom)
env['CSCLIBCOM'] = SCons.Action.Action(csclibcom)
def exists(env):
return internal_zip or env.Detect('gmcs')
Use -platform:anycpu while compiling .NET assemblies
git-svn-id: 8d82213adbbc6b1538a984bace977d31fcb31691@349 2f5d681c-ba19-11dd-a503-ed2d4bea8bb5
|
import os.path
import SCons.Builder
import SCons.Node.FS
import SCons.Util
csccom = "$CSC $CSCFLAGS $_CSCLIBPATH -r:$_CSCLIBS -out:${TARGET.abspath} $SOURCES"
csclibcom = "$CSC -t:library $CSCLIBFLAGS $_CSCLIBPATH $_CSCLIBS -out:${TARGET.abspath} $SOURCES"
McsBuilder = SCons.Builder.Builder(action = '$CSCCOM',
source_factory = SCons.Node.FS.default_fs.Entry,
suffix = '.exe')
McsLibBuilder = SCons.Builder.Builder(action = '$CSCLIBCOM',
source_factory = SCons.Node.FS.default_fs.Entry,
suffix = '.dll')
def generate(env):
env['BUILDERS']['CLIProgram'] = McsBuilder
env['BUILDERS']['CLILibrary'] = McsLibBuilder
env['CSC'] = 'gmcs'
env['_CSCLIBS'] = "${_stripixes('-r:', CILLIBS, '', '-r', '', __env__)}"
env['_CSCLIBPATH'] = "${_stripixes('-lib:', CILLIBPATH, '', '-r', '', __env__)}"
env['CSCFLAGS'] = SCons.Util.CLVar('-platform:anycpu')
env['CSCLIBFLAGS'] = SCons.Util.CLVar('-platform:anycpu')
env['CSCCOM'] = SCons.Action.Action(csccom)
env['CSCLIBCOM'] = SCons.Action.Action(csclibcom)
def exists(env):
return internal_zip or env.Detect('gmcs')
|
<commit_before>import os.path
import SCons.Builder
import SCons.Node.FS
import SCons.Util
csccom = "$CSC $CSCFLAGS $_CSCLIBPATH -r:$_CSCLIBS -out:${TARGET.abspath} $SOURCES"
csclibcom = "$CSC -t:library $CSCLIBFLAGS $_CSCLIBPATH $_CSCLIBS -out:${TARGET.abspath} $SOURCES"
McsBuilder = SCons.Builder.Builder(action = '$CSCCOM',
source_factory = SCons.Node.FS.default_fs.Entry,
suffix = '.exe')
McsLibBuilder = SCons.Builder.Builder(action = '$CSCLIBCOM',
source_factory = SCons.Node.FS.default_fs.Entry,
suffix = '.dll')
def generate(env):
env['BUILDERS']['CLIProgram'] = McsBuilder
env['BUILDERS']['CLILibrary'] = McsLibBuilder
env['CSC'] = 'gmcs'
env['_CSCLIBS'] = "${_stripixes('-r:', CILLIBS, '', '-r', '', __env__)}"
env['_CSCLIBPATH'] = "${_stripixes('-lib:', CILLIBPATH, '', '-r', '', __env__)}"
env['CSCFLAGS'] = SCons.Util.CLVar('')
env['CSCCOM'] = SCons.Action.Action(csccom)
env['CSCLIBCOM'] = SCons.Action.Action(csclibcom)
def exists(env):
return internal_zip or env.Detect('gmcs')
<commit_msg>Use -platform:anycpu while compiling .NET assemblies
git-svn-id: 8d82213adbbc6b1538a984bace977d31fcb31691@349 2f5d681c-ba19-11dd-a503-ed2d4bea8bb5<commit_after>
|
import os.path
import SCons.Builder
import SCons.Node.FS
import SCons.Util
csccom = "$CSC $CSCFLAGS $_CSCLIBPATH -r:$_CSCLIBS -out:${TARGET.abspath} $SOURCES"
csclibcom = "$CSC -t:library $CSCLIBFLAGS $_CSCLIBPATH $_CSCLIBS -out:${TARGET.abspath} $SOURCES"
McsBuilder = SCons.Builder.Builder(action = '$CSCCOM',
source_factory = SCons.Node.FS.default_fs.Entry,
suffix = '.exe')
McsLibBuilder = SCons.Builder.Builder(action = '$CSCLIBCOM',
source_factory = SCons.Node.FS.default_fs.Entry,
suffix = '.dll')
def generate(env):
env['BUILDERS']['CLIProgram'] = McsBuilder
env['BUILDERS']['CLILibrary'] = McsLibBuilder
env['CSC'] = 'gmcs'
env['_CSCLIBS'] = "${_stripixes('-r:', CILLIBS, '', '-r', '', __env__)}"
env['_CSCLIBPATH'] = "${_stripixes('-lib:', CILLIBPATH, '', '-r', '', __env__)}"
env['CSCFLAGS'] = SCons.Util.CLVar('-platform:anycpu')
env['CSCLIBFLAGS'] = SCons.Util.CLVar('-platform:anycpu')
env['CSCCOM'] = SCons.Action.Action(csccom)
env['CSCLIBCOM'] = SCons.Action.Action(csclibcom)
def exists(env):
return internal_zip or env.Detect('gmcs')
|
import os.path
import SCons.Builder
import SCons.Node.FS
import SCons.Util
csccom = "$CSC $CSCFLAGS $_CSCLIBPATH -r:$_CSCLIBS -out:${TARGET.abspath} $SOURCES"
csclibcom = "$CSC -t:library $CSCLIBFLAGS $_CSCLIBPATH $_CSCLIBS -out:${TARGET.abspath} $SOURCES"
McsBuilder = SCons.Builder.Builder(action = '$CSCCOM',
source_factory = SCons.Node.FS.default_fs.Entry,
suffix = '.exe')
McsLibBuilder = SCons.Builder.Builder(action = '$CSCLIBCOM',
source_factory = SCons.Node.FS.default_fs.Entry,
suffix = '.dll')
def generate(env):
env['BUILDERS']['CLIProgram'] = McsBuilder
env['BUILDERS']['CLILibrary'] = McsLibBuilder
env['CSC'] = 'gmcs'
env['_CSCLIBS'] = "${_stripixes('-r:', CILLIBS, '', '-r', '', __env__)}"
env['_CSCLIBPATH'] = "${_stripixes('-lib:', CILLIBPATH, '', '-r', '', __env__)}"
env['CSCFLAGS'] = SCons.Util.CLVar('')
env['CSCCOM'] = SCons.Action.Action(csccom)
env['CSCLIBCOM'] = SCons.Action.Action(csclibcom)
def exists(env):
return internal_zip or env.Detect('gmcs')
Use -platform:anycpu while compiling .NET assemblies
git-svn-id: 8d82213adbbc6b1538a984bace977d31fcb31691@349 2f5d681c-ba19-11dd-a503-ed2d4bea8bb5import os.path
import SCons.Builder
import SCons.Node.FS
import SCons.Util
csccom = "$CSC $CSCFLAGS $_CSCLIBPATH -r:$_CSCLIBS -out:${TARGET.abspath} $SOURCES"
csclibcom = "$CSC -t:library $CSCLIBFLAGS $_CSCLIBPATH $_CSCLIBS -out:${TARGET.abspath} $SOURCES"
McsBuilder = SCons.Builder.Builder(action = '$CSCCOM',
source_factory = SCons.Node.FS.default_fs.Entry,
suffix = '.exe')
McsLibBuilder = SCons.Builder.Builder(action = '$CSCLIBCOM',
source_factory = SCons.Node.FS.default_fs.Entry,
suffix = '.dll')
def generate(env):
env['BUILDERS']['CLIProgram'] = McsBuilder
env['BUILDERS']['CLILibrary'] = McsLibBuilder
env['CSC'] = 'gmcs'
env['_CSCLIBS'] = "${_stripixes('-r:', CILLIBS, '', '-r', '', __env__)}"
env['_CSCLIBPATH'] = "${_stripixes('-lib:', CILLIBPATH, '', '-r', '', __env__)}"
env['CSCFLAGS'] = SCons.Util.CLVar('-platform:anycpu')
env['CSCLIBFLAGS'] = SCons.Util.CLVar('-platform:anycpu')
env['CSCCOM'] = SCons.Action.Action(csccom)
env['CSCLIBCOM'] = SCons.Action.Action(csclibcom)
def exists(env):
return internal_zip or env.Detect('gmcs')
|
<commit_before>import os.path
import SCons.Builder
import SCons.Node.FS
import SCons.Util
csccom = "$CSC $CSCFLAGS $_CSCLIBPATH -r:$_CSCLIBS -out:${TARGET.abspath} $SOURCES"
csclibcom = "$CSC -t:library $CSCLIBFLAGS $_CSCLIBPATH $_CSCLIBS -out:${TARGET.abspath} $SOURCES"
McsBuilder = SCons.Builder.Builder(action = '$CSCCOM',
source_factory = SCons.Node.FS.default_fs.Entry,
suffix = '.exe')
McsLibBuilder = SCons.Builder.Builder(action = '$CSCLIBCOM',
source_factory = SCons.Node.FS.default_fs.Entry,
suffix = '.dll')
def generate(env):
env['BUILDERS']['CLIProgram'] = McsBuilder
env['BUILDERS']['CLILibrary'] = McsLibBuilder
env['CSC'] = 'gmcs'
env['_CSCLIBS'] = "${_stripixes('-r:', CILLIBS, '', '-r', '', __env__)}"
env['_CSCLIBPATH'] = "${_stripixes('-lib:', CILLIBPATH, '', '-r', '', __env__)}"
env['CSCFLAGS'] = SCons.Util.CLVar('')
env['CSCCOM'] = SCons.Action.Action(csccom)
env['CSCLIBCOM'] = SCons.Action.Action(csclibcom)
def exists(env):
return internal_zip or env.Detect('gmcs')
<commit_msg>Use -platform:anycpu while compiling .NET assemblies
git-svn-id: 8d82213adbbc6b1538a984bace977d31fcb31691@349 2f5d681c-ba19-11dd-a503-ed2d4bea8bb5<commit_after>import os.path
import SCons.Builder
import SCons.Node.FS
import SCons.Util
csccom = "$CSC $CSCFLAGS $_CSCLIBPATH -r:$_CSCLIBS -out:${TARGET.abspath} $SOURCES"
csclibcom = "$CSC -t:library $CSCLIBFLAGS $_CSCLIBPATH $_CSCLIBS -out:${TARGET.abspath} $SOURCES"
McsBuilder = SCons.Builder.Builder(action = '$CSCCOM',
source_factory = SCons.Node.FS.default_fs.Entry,
suffix = '.exe')
McsLibBuilder = SCons.Builder.Builder(action = '$CSCLIBCOM',
source_factory = SCons.Node.FS.default_fs.Entry,
suffix = '.dll')
def generate(env):
env['BUILDERS']['CLIProgram'] = McsBuilder
env['BUILDERS']['CLILibrary'] = McsLibBuilder
env['CSC'] = 'gmcs'
env['_CSCLIBS'] = "${_stripixes('-r:', CILLIBS, '', '-r', '', __env__)}"
env['_CSCLIBPATH'] = "${_stripixes('-lib:', CILLIBPATH, '', '-r', '', __env__)}"
env['CSCFLAGS'] = SCons.Util.CLVar('-platform:anycpu')
env['CSCLIBFLAGS'] = SCons.Util.CLVar('-platform:anycpu')
env['CSCCOM'] = SCons.Action.Action(csccom)
env['CSCLIBCOM'] = SCons.Action.Action(csclibcom)
def exists(env):
return internal_zip or env.Detect('gmcs')
|
63ce9ac2a46f74704810d62e22c0b75ca071442a
|
minesweeper/minesweeper.py
|
minesweeper/minesweeper.py
|
import re
class InvalidBoard(ValueError):
pass
def board(b):
if not is_valid_board(b):
raise InvalidBoard("Board is malformed and thus invalid")
b = [[ch for ch in row] for row in b]
for i in range(1, len(b)-1):
for j in range(1, len(b[0])-1):
if b[i][j] == " ":
m = "".join(b[i-1][j-1:j+2] + b[i][j-1:j+2] + b[i+1][j-1:j+2])
count = m.count("*")
if count:
b[i][j] = str(count)
return list(map("".join, b))
def is_valid_board(b):
width = "{" + str(len(b[0]) - 2) + "}"
height = "{" + str(len(b) - 2) + "}"
r = re.compile("^(\+-{w}\+)(\|[ *]{w}\|){h}(\+-{w}\+)$".format(w=width,
h=height))
return bool(r.match("".join(b)))
|
import re
class InvalidBoard(ValueError):
pass
def board(b):
if not is_valid_board(b):
raise InvalidBoard("Board is malformed and thus invalid")
b = [[ch for ch in row] for row in b]
for i in range(1, len(b)-1):
for j in range(1, len(b[0])-1):
if b[i][j] == " ":
m = "".join(b[i-1][j-1:j+2] + b[i][j-1:j+2] + b[i+1][j-1:j+2])
count = m.count("*")
if count:
b[i][j] = str(count)
return list(map("".join, b))
def is_valid_board(b):
width = "{" + str(len(b[0]) - 2) + "}"
height = "{" + str(len(b) - 2) + "}"
r = re.compile("^(\+-{w}\+)(\|[ *]{w}\|){h}(\+-{w}\+)$".format(w=width,
h=height))
# bool is technically redundant here, but I'd rather that this function
# return an explicit True/False
return bool(r.match("".join(b)))
|
Add note regarding use of bool in validation
|
Add note regarding use of bool in validation
|
Python
|
agpl-3.0
|
CubicComet/exercism-python-solutions
|
import re
class InvalidBoard(ValueError):
pass
def board(b):
if not is_valid_board(b):
raise InvalidBoard("Board is malformed and thus invalid")
b = [[ch for ch in row] for row in b]
for i in range(1, len(b)-1):
for j in range(1, len(b[0])-1):
if b[i][j] == " ":
m = "".join(b[i-1][j-1:j+2] + b[i][j-1:j+2] + b[i+1][j-1:j+2])
count = m.count("*")
if count:
b[i][j] = str(count)
return list(map("".join, b))
def is_valid_board(b):
width = "{" + str(len(b[0]) - 2) + "}"
height = "{" + str(len(b) - 2) + "}"
r = re.compile("^(\+-{w}\+)(\|[ *]{w}\|){h}(\+-{w}\+)$".format(w=width,
h=height))
return bool(r.match("".join(b)))
Add note regarding use of bool in validation
|
import re
class InvalidBoard(ValueError):
pass
def board(b):
if not is_valid_board(b):
raise InvalidBoard("Board is malformed and thus invalid")
b = [[ch for ch in row] for row in b]
for i in range(1, len(b)-1):
for j in range(1, len(b[0])-1):
if b[i][j] == " ":
m = "".join(b[i-1][j-1:j+2] + b[i][j-1:j+2] + b[i+1][j-1:j+2])
count = m.count("*")
if count:
b[i][j] = str(count)
return list(map("".join, b))
def is_valid_board(b):
width = "{" + str(len(b[0]) - 2) + "}"
height = "{" + str(len(b) - 2) + "}"
r = re.compile("^(\+-{w}\+)(\|[ *]{w}\|){h}(\+-{w}\+)$".format(w=width,
h=height))
# bool is technically redundant here, but I'd rather that this function
# return an explicit True/False
return bool(r.match("".join(b)))
|
<commit_before>import re
class InvalidBoard(ValueError):
pass
def board(b):
if not is_valid_board(b):
raise InvalidBoard("Board is malformed and thus invalid")
b = [[ch for ch in row] for row in b]
for i in range(1, len(b)-1):
for j in range(1, len(b[0])-1):
if b[i][j] == " ":
m = "".join(b[i-1][j-1:j+2] + b[i][j-1:j+2] + b[i+1][j-1:j+2])
count = m.count("*")
if count:
b[i][j] = str(count)
return list(map("".join, b))
def is_valid_board(b):
width = "{" + str(len(b[0]) - 2) + "}"
height = "{" + str(len(b) - 2) + "}"
r = re.compile("^(\+-{w}\+)(\|[ *]{w}\|){h}(\+-{w}\+)$".format(w=width,
h=height))
return bool(r.match("".join(b)))
<commit_msg>Add note regarding use of bool in validation<commit_after>
|
import re
class InvalidBoard(ValueError):
pass
def board(b):
if not is_valid_board(b):
raise InvalidBoard("Board is malformed and thus invalid")
b = [[ch for ch in row] for row in b]
for i in range(1, len(b)-1):
for j in range(1, len(b[0])-1):
if b[i][j] == " ":
m = "".join(b[i-1][j-1:j+2] + b[i][j-1:j+2] + b[i+1][j-1:j+2])
count = m.count("*")
if count:
b[i][j] = str(count)
return list(map("".join, b))
def is_valid_board(b):
width = "{" + str(len(b[0]) - 2) + "}"
height = "{" + str(len(b) - 2) + "}"
r = re.compile("^(\+-{w}\+)(\|[ *]{w}\|){h}(\+-{w}\+)$".format(w=width,
h=height))
# bool is technically redundant here, but I'd rather that this function
# return an explicit True/False
return bool(r.match("".join(b)))
|
import re
class InvalidBoard(ValueError):
pass
def board(b):
if not is_valid_board(b):
raise InvalidBoard("Board is malformed and thus invalid")
b = [[ch for ch in row] for row in b]
for i in range(1, len(b)-1):
for j in range(1, len(b[0])-1):
if b[i][j] == " ":
m = "".join(b[i-1][j-1:j+2] + b[i][j-1:j+2] + b[i+1][j-1:j+2])
count = m.count("*")
if count:
b[i][j] = str(count)
return list(map("".join, b))
def is_valid_board(b):
width = "{" + str(len(b[0]) - 2) + "}"
height = "{" + str(len(b) - 2) + "}"
r = re.compile("^(\+-{w}\+)(\|[ *]{w}\|){h}(\+-{w}\+)$".format(w=width,
h=height))
return bool(r.match("".join(b)))
Add note regarding use of bool in validationimport re
class InvalidBoard(ValueError):
pass
def board(b):
if not is_valid_board(b):
raise InvalidBoard("Board is malformed and thus invalid")
b = [[ch for ch in row] for row in b]
for i in range(1, len(b)-1):
for j in range(1, len(b[0])-1):
if b[i][j] == " ":
m = "".join(b[i-1][j-1:j+2] + b[i][j-1:j+2] + b[i+1][j-1:j+2])
count = m.count("*")
if count:
b[i][j] = str(count)
return list(map("".join, b))
def is_valid_board(b):
width = "{" + str(len(b[0]) - 2) + "}"
height = "{" + str(len(b) - 2) + "}"
r = re.compile("^(\+-{w}\+)(\|[ *]{w}\|){h}(\+-{w}\+)$".format(w=width,
h=height))
# bool is technically redundant here, but I'd rather that this function
# return an explicit True/False
return bool(r.match("".join(b)))
|
<commit_before>import re
class InvalidBoard(ValueError):
pass
def board(b):
if not is_valid_board(b):
raise InvalidBoard("Board is malformed and thus invalid")
b = [[ch for ch in row] for row in b]
for i in range(1, len(b)-1):
for j in range(1, len(b[0])-1):
if b[i][j] == " ":
m = "".join(b[i-1][j-1:j+2] + b[i][j-1:j+2] + b[i+1][j-1:j+2])
count = m.count("*")
if count:
b[i][j] = str(count)
return list(map("".join, b))
def is_valid_board(b):
width = "{" + str(len(b[0]) - 2) + "}"
height = "{" + str(len(b) - 2) + "}"
r = re.compile("^(\+-{w}\+)(\|[ *]{w}\|){h}(\+-{w}\+)$".format(w=width,
h=height))
return bool(r.match("".join(b)))
<commit_msg>Add note regarding use of bool in validation<commit_after>import re
class InvalidBoard(ValueError):
pass
def board(b):
if not is_valid_board(b):
raise InvalidBoard("Board is malformed and thus invalid")
b = [[ch for ch in row] for row in b]
for i in range(1, len(b)-1):
for j in range(1, len(b[0])-1):
if b[i][j] == " ":
m = "".join(b[i-1][j-1:j+2] + b[i][j-1:j+2] + b[i+1][j-1:j+2])
count = m.count("*")
if count:
b[i][j] = str(count)
return list(map("".join, b))
def is_valid_board(b):
width = "{" + str(len(b[0]) - 2) + "}"
height = "{" + str(len(b) - 2) + "}"
r = re.compile("^(\+-{w}\+)(\|[ *]{w}\|){h}(\+-{w}\+)$".format(w=width,
h=height))
# bool is technically redundant here, but I'd rather that this function
# return an explicit True/False
return bool(r.match("".join(b)))
|
fbe6722fd74b5e260892f5664226bc66d5424d79
|
kindred/Token.py
|
kindred/Token.py
|
class Token:
"""
Individual word with lemma, part-of-speech and location in text.
:ivar word: Unprocessed word
:ivar lemma: Lemmatized word
:ivar partofspeech: Part-of-speech of word
:ivar startPos: Start position of token in sentence
:ivar endPos: End position of token in sentence
"""
def __init__(self,word,lemma,partofspeech,startPos,endPos):
"""
Constructor for Token class
:param word: Unprocessed word
:param lemma: Lemmatized word
:param partofspeech: Part-of-speech of word
:param startPos: Start position of token in sentence
:param endPos: End position of token in sentence
:type word: str
:type lemma: str
:type partofspeech: str
:type startPos: int
:type endPos: int
"""
self.word = word
self.lemma = lemma
self.partofspeech = partofspeech
self.startPos = startPos
self.endPos = endPos
def __str__(self):
return self.word
def __repr__(self):
return self.__str__()
|
class Token:
"""
Individual word with lemma, part-of-speech and location in text.
:ivar word: Unprocessed word
:ivar lemma: Lemmatized word
:ivar partofspeech: Part-of-speech of word
:ivar startPos: Start position of token in document text (note: not the sentence text)
:ivar endPos: End position of token in document text (note: not the sentence text)
"""
def __init__(self,word,lemma,partofspeech,startPos,endPos):
"""
Constructor for Token class
:param word: Unprocessed word
:param lemma: Lemmatized word
:param partofspeech: Part-of-speech of word
:param startPos: Start position of token in document text (note: not the sentence text)
:param endPos: End position of token in document text (note: not the sentence text)
:type word: str
:type lemma: str
:type partofspeech: str
:type startPos: int
:type endPos: int
"""
self.word = word
self.lemma = lemma
self.partofspeech = partofspeech
self.startPos = startPos
self.endPos = endPos
def __str__(self):
return self.word
def __repr__(self):
return self.__str__()
|
Fix mistaken document about token pos
|
Fix mistaken document about token pos
|
Python
|
mit
|
jakelever/kindred,jakelever/kindred
|
class Token:
"""
Individual word with lemma, part-of-speech and location in text.
:ivar word: Unprocessed word
:ivar lemma: Lemmatized word
:ivar partofspeech: Part-of-speech of word
:ivar startPos: Start position of token in sentence
:ivar endPos: End position of token in sentence
"""
def __init__(self,word,lemma,partofspeech,startPos,endPos):
"""
Constructor for Token class
:param word: Unprocessed word
:param lemma: Lemmatized word
:param partofspeech: Part-of-speech of word
:param startPos: Start position of token in sentence
:param endPos: End position of token in sentence
:type word: str
:type lemma: str
:type partofspeech: str
:type startPos: int
:type endPos: int
"""
self.word = word
self.lemma = lemma
self.partofspeech = partofspeech
self.startPos = startPos
self.endPos = endPos
def __str__(self):
return self.word
def __repr__(self):
return self.__str__()
Fix mistaken document about token pos
|
class Token:
"""
Individual word with lemma, part-of-speech and location in text.
:ivar word: Unprocessed word
:ivar lemma: Lemmatized word
:ivar partofspeech: Part-of-speech of word
:ivar startPos: Start position of token in document text (note: not the sentence text)
:ivar endPos: End position of token in document text (note: not the sentence text)
"""
def __init__(self,word,lemma,partofspeech,startPos,endPos):
"""
Constructor for Token class
:param word: Unprocessed word
:param lemma: Lemmatized word
:param partofspeech: Part-of-speech of word
:param startPos: Start position of token in document text (note: not the sentence text)
:param endPos: End position of token in document text (note: not the sentence text)
:type word: str
:type lemma: str
:type partofspeech: str
:type startPos: int
:type endPos: int
"""
self.word = word
self.lemma = lemma
self.partofspeech = partofspeech
self.startPos = startPos
self.endPos = endPos
def __str__(self):
return self.word
def __repr__(self):
return self.__str__()
|
<commit_before>
class Token:
"""
Individual word with lemma, part-of-speech and location in text.
:ivar word: Unprocessed word
:ivar lemma: Lemmatized word
:ivar partofspeech: Part-of-speech of word
:ivar startPos: Start position of token in sentence
:ivar endPos: End position of token in sentence
"""
def __init__(self,word,lemma,partofspeech,startPos,endPos):
"""
Constructor for Token class
:param word: Unprocessed word
:param lemma: Lemmatized word
:param partofspeech: Part-of-speech of word
:param startPos: Start position of token in sentence
:param endPos: End position of token in sentence
:type word: str
:type lemma: str
:type partofspeech: str
:type startPos: int
:type endPos: int
"""
self.word = word
self.lemma = lemma
self.partofspeech = partofspeech
self.startPos = startPos
self.endPos = endPos
def __str__(self):
return self.word
def __repr__(self):
return self.__str__()
<commit_msg>Fix mistaken document about token pos<commit_after>
|
class Token:
"""
Individual word with lemma, part-of-speech and location in text.
:ivar word: Unprocessed word
:ivar lemma: Lemmatized word
:ivar partofspeech: Part-of-speech of word
:ivar startPos: Start position of token in document text (note: not the sentence text)
:ivar endPos: End position of token in document text (note: not the sentence text)
"""
def __init__(self,word,lemma,partofspeech,startPos,endPos):
"""
Constructor for Token class
:param word: Unprocessed word
:param lemma: Lemmatized word
:param partofspeech: Part-of-speech of word
:param startPos: Start position of token in document text (note: not the sentence text)
:param endPos: End position of token in document text (note: not the sentence text)
:type word: str
:type lemma: str
:type partofspeech: str
:type startPos: int
:type endPos: int
"""
self.word = word
self.lemma = lemma
self.partofspeech = partofspeech
self.startPos = startPos
self.endPos = endPos
def __str__(self):
return self.word
def __repr__(self):
return self.__str__()
|
class Token:
"""
Individual word with lemma, part-of-speech and location in text.
:ivar word: Unprocessed word
:ivar lemma: Lemmatized word
:ivar partofspeech: Part-of-speech of word
:ivar startPos: Start position of token in sentence
:ivar endPos: End position of token in sentence
"""
def __init__(self,word,lemma,partofspeech,startPos,endPos):
"""
Constructor for Token class
:param word: Unprocessed word
:param lemma: Lemmatized word
:param partofspeech: Part-of-speech of word
:param startPos: Start position of token in sentence
:param endPos: End position of token in sentence
:type word: str
:type lemma: str
:type partofspeech: str
:type startPos: int
:type endPos: int
"""
self.word = word
self.lemma = lemma
self.partofspeech = partofspeech
self.startPos = startPos
self.endPos = endPos
def __str__(self):
return self.word
def __repr__(self):
return self.__str__()
Fix mistaken document about token pos
class Token:
"""
Individual word with lemma, part-of-speech and location in text.
:ivar word: Unprocessed word
:ivar lemma: Lemmatized word
:ivar partofspeech: Part-of-speech of word
:ivar startPos: Start position of token in document text (note: not the sentence text)
:ivar endPos: End position of token in document text (note: not the sentence text)
"""
def __init__(self,word,lemma,partofspeech,startPos,endPos):
"""
Constructor for Token class
:param word: Unprocessed word
:param lemma: Lemmatized word
:param partofspeech: Part-of-speech of word
:param startPos: Start position of token in document text (note: not the sentence text)
:param endPos: End position of token in document text (note: not the sentence text)
:type word: str
:type lemma: str
:type partofspeech: str
:type startPos: int
:type endPos: int
"""
self.word = word
self.lemma = lemma
self.partofspeech = partofspeech
self.startPos = startPos
self.endPos = endPos
def __str__(self):
return self.word
def __repr__(self):
return self.__str__()
|
<commit_before>
class Token:
"""
Individual word with lemma, part-of-speech and location in text.
:ivar word: Unprocessed word
:ivar lemma: Lemmatized word
:ivar partofspeech: Part-of-speech of word
:ivar startPos: Start position of token in sentence
:ivar endPos: End position of token in sentence
"""
def __init__(self,word,lemma,partofspeech,startPos,endPos):
"""
Constructor for Token class
:param word: Unprocessed word
:param lemma: Lemmatized word
:param partofspeech: Part-of-speech of word
:param startPos: Start position of token in sentence
:param endPos: End position of token in sentence
:type word: str
:type lemma: str
:type partofspeech: str
:type startPos: int
:type endPos: int
"""
self.word = word
self.lemma = lemma
self.partofspeech = partofspeech
self.startPos = startPos
self.endPos = endPos
def __str__(self):
return self.word
def __repr__(self):
return self.__str__()
<commit_msg>Fix mistaken document about token pos<commit_after>
class Token:
"""
Individual word with lemma, part-of-speech and location in text.
:ivar word: Unprocessed word
:ivar lemma: Lemmatized word
:ivar partofspeech: Part-of-speech of word
:ivar startPos: Start position of token in document text (note: not the sentence text)
:ivar endPos: End position of token in document text (note: not the sentence text)
"""
def __init__(self,word,lemma,partofspeech,startPos,endPos):
"""
Constructor for Token class
:param word: Unprocessed word
:param lemma: Lemmatized word
:param partofspeech: Part-of-speech of word
:param startPos: Start position of token in document text (note: not the sentence text)
:param endPos: End position of token in document text (note: not the sentence text)
:type word: str
:type lemma: str
:type partofspeech: str
:type startPos: int
:type endPos: int
"""
self.word = word
self.lemma = lemma
self.partofspeech = partofspeech
self.startPos = startPos
self.endPos = endPos
def __str__(self):
return self.word
def __repr__(self):
return self.__str__()
|
26d2e74c93036962aa266fc1484e77d47d36a446
|
rnacentral/portal/migrations/0010_add_precomputed_rna_type.py
|
rnacentral/portal/migrations/0010_add_precomputed_rna_type.py
|
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('portal', '0009_add_precomputed_rna_table'),
]
operations = [
migrations.AddField("RnaPrecomputed", "rna_type", models.CharField(max_length=250))
]
|
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('portal', '0009_add_precomputed_rna_table'),
]
operations = [
# rna_type is a / seperated field that represents the set of rna_types
# for a given sequence.
migrations.AddField("RnaPrecomputed", "rna_type", models.CharField(max_length=250))
]
|
Add a doc to the migration
|
Add a doc to the migration
This should probably be encoded better somehow.
|
Python
|
apache-2.0
|
RNAcentral/rnacentral-webcode,RNAcentral/rnacentral-webcode,RNAcentral/rnacentral-webcode,RNAcentral/rnacentral-webcode
|
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('portal', '0009_add_precomputed_rna_table'),
]
operations = [
migrations.AddField("RnaPrecomputed", "rna_type", models.CharField(max_length=250))
]
Add a doc to the migration
This should probably be encoded better somehow.
|
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('portal', '0009_add_precomputed_rna_table'),
]
operations = [
# rna_type is a / seperated field that represents the set of rna_types
# for a given sequence.
migrations.AddField("RnaPrecomputed", "rna_type", models.CharField(max_length=250))
]
|
<commit_before>from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('portal', '0009_add_precomputed_rna_table'),
]
operations = [
migrations.AddField("RnaPrecomputed", "rna_type", models.CharField(max_length=250))
]
<commit_msg>Add a doc to the migration
This should probably be encoded better somehow.<commit_after>
|
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('portal', '0009_add_precomputed_rna_table'),
]
operations = [
# rna_type is a / seperated field that represents the set of rna_types
# for a given sequence.
migrations.AddField("RnaPrecomputed", "rna_type", models.CharField(max_length=250))
]
|
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('portal', '0009_add_precomputed_rna_table'),
]
operations = [
migrations.AddField("RnaPrecomputed", "rna_type", models.CharField(max_length=250))
]
Add a doc to the migration
This should probably be encoded better somehow.from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('portal', '0009_add_precomputed_rna_table'),
]
operations = [
# rna_type is a / seperated field that represents the set of rna_types
# for a given sequence.
migrations.AddField("RnaPrecomputed", "rna_type", models.CharField(max_length=250))
]
|
<commit_before>from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('portal', '0009_add_precomputed_rna_table'),
]
operations = [
migrations.AddField("RnaPrecomputed", "rna_type", models.CharField(max_length=250))
]
<commit_msg>Add a doc to the migration
This should probably be encoded better somehow.<commit_after>from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('portal', '0009_add_precomputed_rna_table'),
]
operations = [
# rna_type is a / seperated field that represents the set of rna_types
# for a given sequence.
migrations.AddField("RnaPrecomputed", "rna_type", models.CharField(max_length=250))
]
|
dabc17d149eebd8c6fa61780291fd229bd7bea99
|
oysterapp/oyster/management/commands/update_recurring_tasks.py
|
oysterapp/oyster/management/commands/update_recurring_tasks.py
|
import datetime
from django.core.management.base import BaseCommand
from oysterapp.oyster.models import TaskRule
class Command(BaseCommand):
help = "Creates tasks based off task rules"
def handle(self, *args, **options):
now = datetime.datetime.now()
last_run = TaskRule.objects.order_by('-updated').first().updated
recurring_tasks = TaskRule.objects.filter(next_scheduled_run__gt=last_run,
next_scheduled_run__lte=now)
for task_rule in recurring_tasks:
task_rule.create_new_task()
run = task_rule.calculate_next_run()
print "updating rule: %s next run: %s" % (task_rule.title, run)
|
import datetime
import pytz
from django.core.management.base import BaseCommand
from oysterapp.oyster.models import TaskRule
class Command(BaseCommand):
help = "Creates tasks based off task rules"
def handle(self, *args, **options):
now = datetime.datetime.now().replace(tzinfo=pytz.UTC)
last_run = TaskRule.objects.order_by('-updated').first().updated
print "###################################################"
print "Now: %s" % now.strftime("%h %D %H:%M %z")
print "Last Run: %s" % last_run.strftime("%h %D %H:%M %z")
all_recurring_tasks = TaskRule.objects.all()
recurring_tasks = TaskRule.objects.filter(next_scheduled_run__gt=last_run,
next_scheduled_run__lte=now)
print "Total rules: %s out of %s" % (recurring_tasks.count(), all_recurring_tasks.count())
for task_rule in recurring_tasks:
print "updating rule: %s" % task_rule.title
task_rule.create_new_task()
run = task_rule.calculate_next_run()
print "next run: %s" % run.strftime("%h %D %H:%M %z")
print "###################################################"
|
Update datetime with timezone add more print statements for debugging
|
Update datetime with timezone
add more print statements for debugging
|
Python
|
unlicense
|
averymanderson/oysterap,averymanderson/OysterWebApp,averymanderson/oysterap,averymanderson/OysterWebApp,averymanderson/OysterWebApp,averymanderson/oysterap
|
import datetime
from django.core.management.base import BaseCommand
from oysterapp.oyster.models import TaskRule
class Command(BaseCommand):
help = "Creates tasks based off task rules"
def handle(self, *args, **options):
now = datetime.datetime.now()
last_run = TaskRule.objects.order_by('-updated').first().updated
recurring_tasks = TaskRule.objects.filter(next_scheduled_run__gt=last_run,
next_scheduled_run__lte=now)
for task_rule in recurring_tasks:
task_rule.create_new_task()
run = task_rule.calculate_next_run()
print "updating rule: %s next run: %s" % (task_rule.title, run)
Update datetime with timezone
add more print statements for debugging
|
import datetime
import pytz
from django.core.management.base import BaseCommand
from oysterapp.oyster.models import TaskRule
class Command(BaseCommand):
help = "Creates tasks based off task rules"
def handle(self, *args, **options):
now = datetime.datetime.now().replace(tzinfo=pytz.UTC)
last_run = TaskRule.objects.order_by('-updated').first().updated
print "###################################################"
print "Now: %s" % now.strftime("%h %D %H:%M %z")
print "Last Run: %s" % last_run.strftime("%h %D %H:%M %z")
all_recurring_tasks = TaskRule.objects.all()
recurring_tasks = TaskRule.objects.filter(next_scheduled_run__gt=last_run,
next_scheduled_run__lte=now)
print "Total rules: %s out of %s" % (recurring_tasks.count(), all_recurring_tasks.count())
for task_rule in recurring_tasks:
print "updating rule: %s" % task_rule.title
task_rule.create_new_task()
run = task_rule.calculate_next_run()
print "next run: %s" % run.strftime("%h %D %H:%M %z")
print "###################################################"
|
<commit_before>import datetime
from django.core.management.base import BaseCommand
from oysterapp.oyster.models import TaskRule
class Command(BaseCommand):
help = "Creates tasks based off task rules"
def handle(self, *args, **options):
now = datetime.datetime.now()
last_run = TaskRule.objects.order_by('-updated').first().updated
recurring_tasks = TaskRule.objects.filter(next_scheduled_run__gt=last_run,
next_scheduled_run__lte=now)
for task_rule in recurring_tasks:
task_rule.create_new_task()
run = task_rule.calculate_next_run()
print "updating rule: %s next run: %s" % (task_rule.title, run)
<commit_msg>Update datetime with timezone
add more print statements for debugging<commit_after>
|
import datetime
import pytz
from django.core.management.base import BaseCommand
from oysterapp.oyster.models import TaskRule
class Command(BaseCommand):
help = "Creates tasks based off task rules"
def handle(self, *args, **options):
now = datetime.datetime.now().replace(tzinfo=pytz.UTC)
last_run = TaskRule.objects.order_by('-updated').first().updated
print "###################################################"
print "Now: %s" % now.strftime("%h %D %H:%M %z")
print "Last Run: %s" % last_run.strftime("%h %D %H:%M %z")
all_recurring_tasks = TaskRule.objects.all()
recurring_tasks = TaskRule.objects.filter(next_scheduled_run__gt=last_run,
next_scheduled_run__lte=now)
print "Total rules: %s out of %s" % (recurring_tasks.count(), all_recurring_tasks.count())
for task_rule in recurring_tasks:
print "updating rule: %s" % task_rule.title
task_rule.create_new_task()
run = task_rule.calculate_next_run()
print "next run: %s" % run.strftime("%h %D %H:%M %z")
print "###################################################"
|
import datetime
from django.core.management.base import BaseCommand
from oysterapp.oyster.models import TaskRule
class Command(BaseCommand):
help = "Creates tasks based off task rules"
def handle(self, *args, **options):
now = datetime.datetime.now()
last_run = TaskRule.objects.order_by('-updated').first().updated
recurring_tasks = TaskRule.objects.filter(next_scheduled_run__gt=last_run,
next_scheduled_run__lte=now)
for task_rule in recurring_tasks:
task_rule.create_new_task()
run = task_rule.calculate_next_run()
print "updating rule: %s next run: %s" % (task_rule.title, run)
Update datetime with timezone
add more print statements for debuggingimport datetime
import pytz
from django.core.management.base import BaseCommand
from oysterapp.oyster.models import TaskRule
class Command(BaseCommand):
help = "Creates tasks based off task rules"
def handle(self, *args, **options):
now = datetime.datetime.now().replace(tzinfo=pytz.UTC)
last_run = TaskRule.objects.order_by('-updated').first().updated
print "###################################################"
print "Now: %s" % now.strftime("%h %D %H:%M %z")
print "Last Run: %s" % last_run.strftime("%h %D %H:%M %z")
all_recurring_tasks = TaskRule.objects.all()
recurring_tasks = TaskRule.objects.filter(next_scheduled_run__gt=last_run,
next_scheduled_run__lte=now)
print "Total rules: %s out of %s" % (recurring_tasks.count(), all_recurring_tasks.count())
for task_rule in recurring_tasks:
print "updating rule: %s" % task_rule.title
task_rule.create_new_task()
run = task_rule.calculate_next_run()
print "next run: %s" % run.strftime("%h %D %H:%M %z")
print "###################################################"
|
<commit_before>import datetime
from django.core.management.base import BaseCommand
from oysterapp.oyster.models import TaskRule
class Command(BaseCommand):
help = "Creates tasks based off task rules"
def handle(self, *args, **options):
now = datetime.datetime.now()
last_run = TaskRule.objects.order_by('-updated').first().updated
recurring_tasks = TaskRule.objects.filter(next_scheduled_run__gt=last_run,
next_scheduled_run__lte=now)
for task_rule in recurring_tasks:
task_rule.create_new_task()
run = task_rule.calculate_next_run()
print "updating rule: %s next run: %s" % (task_rule.title, run)
<commit_msg>Update datetime with timezone
add more print statements for debugging<commit_after>import datetime
import pytz
from django.core.management.base import BaseCommand
from oysterapp.oyster.models import TaskRule
class Command(BaseCommand):
help = "Creates tasks based off task rules"
def handle(self, *args, **options):
now = datetime.datetime.now().replace(tzinfo=pytz.UTC)
last_run = TaskRule.objects.order_by('-updated').first().updated
print "###################################################"
print "Now: %s" % now.strftime("%h %D %H:%M %z")
print "Last Run: %s" % last_run.strftime("%h %D %H:%M %z")
all_recurring_tasks = TaskRule.objects.all()
recurring_tasks = TaskRule.objects.filter(next_scheduled_run__gt=last_run,
next_scheduled_run__lte=now)
print "Total rules: %s out of %s" % (recurring_tasks.count(), all_recurring_tasks.count())
for task_rule in recurring_tasks:
print "updating rule: %s" % task_rule.title
task_rule.create_new_task()
run = task_rule.calculate_next_run()
print "next run: %s" % run.strftime("%h %D %H:%M %z")
print "###################################################"
|
da9f9028e3a757e81affb34ad2ff9dc61a1ddd8a
|
merlin/engine/battle.py
|
merlin/engine/battle.py
|
class Prepare(object):
"""
Prepare the champions for the battle!
Usage:
hero = Prepare(name="Aragorn", base_attack=100, base_hp=100)
or like this:
aragorn = {"name": "Aragorn", "base_attack": 100, "base_hp": 100}
hero = Prepare(**aragorn)
"""
def __init__(self, name, base_attack, base_hp):
self.name = name
self.base_attack = base_attack
self.base_hp = base_hp
@property
def status(self):
return self.__dict__
def attack(self, foe):
if not isinstance(foe, Prepare):
raise TypeError('foe should be a Prepare object')
if foe.base_hp <= 0:
raise Exception('foe is already dead! Stop hit him!')
foe.base_hp = foe.base_hp - self.base_attack
if foe.base_hp <= 0:
print 'foe is dead.'
return foe.base_hp
|
class Prepare(object):
"""
Prepare the champions for the battle!
"""
def __init__(self, name, base_attack, base_hp):
self.name = name
self.base_attack = base_attack
self.base_hp = base_hp
@property
def status(self):
return self.__dict__
def attack(self, foe):
if not isinstance(foe, Prepare):
raise TypeError('foe should be a Prepare object')
if foe.base_hp <= 0:
raise Exception('foe is already dead! Stop hit him!')
foe.base_hp = foe.base_hp - self.base_attack
if foe.base_hp <= 0:
print 'foe is dead.'
return foe.base_hp
def collect(self, foe):
if not isinstance(foe, Prepare):
raise TypeError('foe should be a Prepare object')
pass
|
Create a collect method for get droppable items
|
Create a collect method for get droppable items
|
Python
|
mit
|
lerrua/merlin-engine
|
class Prepare(object):
"""
Prepare the champions for the battle!
Usage:
hero = Prepare(name="Aragorn", base_attack=100, base_hp=100)
or like this:
aragorn = {"name": "Aragorn", "base_attack": 100, "base_hp": 100}
hero = Prepare(**aragorn)
"""
def __init__(self, name, base_attack, base_hp):
self.name = name
self.base_attack = base_attack
self.base_hp = base_hp
@property
def status(self):
return self.__dict__
def attack(self, foe):
if not isinstance(foe, Prepare):
raise TypeError('foe should be a Prepare object')
if foe.base_hp <= 0:
raise Exception('foe is already dead! Stop hit him!')
foe.base_hp = foe.base_hp - self.base_attack
if foe.base_hp <= 0:
print 'foe is dead.'
return foe.base_hp
Create a collect method for get droppable items
|
class Prepare(object):
"""
Prepare the champions for the battle!
"""
def __init__(self, name, base_attack, base_hp):
self.name = name
self.base_attack = base_attack
self.base_hp = base_hp
@property
def status(self):
return self.__dict__
def attack(self, foe):
if not isinstance(foe, Prepare):
raise TypeError('foe should be a Prepare object')
if foe.base_hp <= 0:
raise Exception('foe is already dead! Stop hit him!')
foe.base_hp = foe.base_hp - self.base_attack
if foe.base_hp <= 0:
print 'foe is dead.'
return foe.base_hp
def collect(self, foe):
if not isinstance(foe, Prepare):
raise TypeError('foe should be a Prepare object')
pass
|
<commit_before>
class Prepare(object):
"""
Prepare the champions for the battle!
Usage:
hero = Prepare(name="Aragorn", base_attack=100, base_hp=100)
or like this:
aragorn = {"name": "Aragorn", "base_attack": 100, "base_hp": 100}
hero = Prepare(**aragorn)
"""
def __init__(self, name, base_attack, base_hp):
self.name = name
self.base_attack = base_attack
self.base_hp = base_hp
@property
def status(self):
return self.__dict__
def attack(self, foe):
if not isinstance(foe, Prepare):
raise TypeError('foe should be a Prepare object')
if foe.base_hp <= 0:
raise Exception('foe is already dead! Stop hit him!')
foe.base_hp = foe.base_hp - self.base_attack
if foe.base_hp <= 0:
print 'foe is dead.'
return foe.base_hp
<commit_msg>Create a collect method for get droppable items<commit_after>
|
class Prepare(object):
"""
Prepare the champions for the battle!
"""
def __init__(self, name, base_attack, base_hp):
self.name = name
self.base_attack = base_attack
self.base_hp = base_hp
@property
def status(self):
return self.__dict__
def attack(self, foe):
if not isinstance(foe, Prepare):
raise TypeError('foe should be a Prepare object')
if foe.base_hp <= 0:
raise Exception('foe is already dead! Stop hit him!')
foe.base_hp = foe.base_hp - self.base_attack
if foe.base_hp <= 0:
print 'foe is dead.'
return foe.base_hp
def collect(self, foe):
if not isinstance(foe, Prepare):
raise TypeError('foe should be a Prepare object')
pass
|
class Prepare(object):
"""
Prepare the champions for the battle!
Usage:
hero = Prepare(name="Aragorn", base_attack=100, base_hp=100)
or like this:
aragorn = {"name": "Aragorn", "base_attack": 100, "base_hp": 100}
hero = Prepare(**aragorn)
"""
def __init__(self, name, base_attack, base_hp):
self.name = name
self.base_attack = base_attack
self.base_hp = base_hp
@property
def status(self):
return self.__dict__
def attack(self, foe):
if not isinstance(foe, Prepare):
raise TypeError('foe should be a Prepare object')
if foe.base_hp <= 0:
raise Exception('foe is already dead! Stop hit him!')
foe.base_hp = foe.base_hp - self.base_attack
if foe.base_hp <= 0:
print 'foe is dead.'
return foe.base_hp
Create a collect method for get droppable itemsclass Prepare(object):
"""
Prepare the champions for the battle!
"""
def __init__(self, name, base_attack, base_hp):
self.name = name
self.base_attack = base_attack
self.base_hp = base_hp
@property
def status(self):
return self.__dict__
def attack(self, foe):
if not isinstance(foe, Prepare):
raise TypeError('foe should be a Prepare object')
if foe.base_hp <= 0:
raise Exception('foe is already dead! Stop hit him!')
foe.base_hp = foe.base_hp - self.base_attack
if foe.base_hp <= 0:
print 'foe is dead.'
return foe.base_hp
def collect(self, foe):
if not isinstance(foe, Prepare):
raise TypeError('foe should be a Prepare object')
pass
|
<commit_before>
class Prepare(object):
"""
Prepare the champions for the battle!
Usage:
hero = Prepare(name="Aragorn", base_attack=100, base_hp=100)
or like this:
aragorn = {"name": "Aragorn", "base_attack": 100, "base_hp": 100}
hero = Prepare(**aragorn)
"""
def __init__(self, name, base_attack, base_hp):
self.name = name
self.base_attack = base_attack
self.base_hp = base_hp
@property
def status(self):
return self.__dict__
def attack(self, foe):
if not isinstance(foe, Prepare):
raise TypeError('foe should be a Prepare object')
if foe.base_hp <= 0:
raise Exception('foe is already dead! Stop hit him!')
foe.base_hp = foe.base_hp - self.base_attack
if foe.base_hp <= 0:
print 'foe is dead.'
return foe.base_hp
<commit_msg>Create a collect method for get droppable items<commit_after>class Prepare(object):
"""
Prepare the champions for the battle!
"""
def __init__(self, name, base_attack, base_hp):
self.name = name
self.base_attack = base_attack
self.base_hp = base_hp
@property
def status(self):
return self.__dict__
def attack(self, foe):
if not isinstance(foe, Prepare):
raise TypeError('foe should be a Prepare object')
if foe.base_hp <= 0:
raise Exception('foe is already dead! Stop hit him!')
foe.base_hp = foe.base_hp - self.base_attack
if foe.base_hp <= 0:
print 'foe is dead.'
return foe.base_hp
def collect(self, foe):
if not isinstance(foe, Prepare):
raise TypeError('foe should be a Prepare object')
pass
|
e1edb506113a0fd8104931def710188f5d507f06
|
crispy/gui/widgets/plotwidget.py
|
crispy/gui/widgets/plotwidget.py
|
# coding: utf-8
from silx.gui.plot import PlotWindow
class PlotWidget(PlotWindow):
def __init__(self, *args):
super(PlotWidget, self).__init__()
self.setActiveCurveHandling(False)
self.setGraphXLabel('Energy (eV)')
self.setGraphYLabel('Absorption cross section (a.u.)')
def plot(self, x, y, legend=None):
self.addCurve(x, y, legend=legend)
def clear(self):
super(PlotWidget, self).clear()
|
# coding: utf-8
from silx.gui.plot import PlotWindow
class PlotWidget(PlotWindow):
def __init__(self, *args):
super(PlotWidget, self).__init__(logScale=False, grid=True,
aspectRatio=False, yInverted=False, roi=False, mask=False,
print_=False)
self.setActiveCurveHandling(False)
self.setGraphXLabel('Energy (eV)')
self.setGraphYLabel('Absorption cross section (a.u.)')
def plot(self, x, y, legend=None):
self.addCurve(x, y, legend=legend)
def clear(self):
super(PlotWidget, self).clear()
|
Remove unused icons of the PlotWindow
|
Remove unused icons of the PlotWindow
|
Python
|
mit
|
mretegan/crispy,mretegan/crispy
|
# coding: utf-8
from silx.gui.plot import PlotWindow
class PlotWidget(PlotWindow):
def __init__(self, *args):
super(PlotWidget, self).__init__()
self.setActiveCurveHandling(False)
self.setGraphXLabel('Energy (eV)')
self.setGraphYLabel('Absorption cross section (a.u.)')
def plot(self, x, y, legend=None):
self.addCurve(x, y, legend=legend)
def clear(self):
super(PlotWidget, self).clear()
Remove unused icons of the PlotWindow
|
# coding: utf-8
from silx.gui.plot import PlotWindow
class PlotWidget(PlotWindow):
def __init__(self, *args):
super(PlotWidget, self).__init__(logScale=False, grid=True,
aspectRatio=False, yInverted=False, roi=False, mask=False,
print_=False)
self.setActiveCurveHandling(False)
self.setGraphXLabel('Energy (eV)')
self.setGraphYLabel('Absorption cross section (a.u.)')
def plot(self, x, y, legend=None):
self.addCurve(x, y, legend=legend)
def clear(self):
super(PlotWidget, self).clear()
|
<commit_before># coding: utf-8
from silx.gui.plot import PlotWindow
class PlotWidget(PlotWindow):
def __init__(self, *args):
super(PlotWidget, self).__init__()
self.setActiveCurveHandling(False)
self.setGraphXLabel('Energy (eV)')
self.setGraphYLabel('Absorption cross section (a.u.)')
def plot(self, x, y, legend=None):
self.addCurve(x, y, legend=legend)
def clear(self):
super(PlotWidget, self).clear()
<commit_msg>Remove unused icons of the PlotWindow<commit_after>
|
# coding: utf-8
from silx.gui.plot import PlotWindow
class PlotWidget(PlotWindow):
def __init__(self, *args):
super(PlotWidget, self).__init__(logScale=False, grid=True,
aspectRatio=False, yInverted=False, roi=False, mask=False,
print_=False)
self.setActiveCurveHandling(False)
self.setGraphXLabel('Energy (eV)')
self.setGraphYLabel('Absorption cross section (a.u.)')
def plot(self, x, y, legend=None):
self.addCurve(x, y, legend=legend)
def clear(self):
super(PlotWidget, self).clear()
|
# coding: utf-8
from silx.gui.plot import PlotWindow
class PlotWidget(PlotWindow):
def __init__(self, *args):
super(PlotWidget, self).__init__()
self.setActiveCurveHandling(False)
self.setGraphXLabel('Energy (eV)')
self.setGraphYLabel('Absorption cross section (a.u.)')
def plot(self, x, y, legend=None):
self.addCurve(x, y, legend=legend)
def clear(self):
super(PlotWidget, self).clear()
Remove unused icons of the PlotWindow# coding: utf-8
from silx.gui.plot import PlotWindow
class PlotWidget(PlotWindow):
def __init__(self, *args):
super(PlotWidget, self).__init__(logScale=False, grid=True,
aspectRatio=False, yInverted=False, roi=False, mask=False,
print_=False)
self.setActiveCurveHandling(False)
self.setGraphXLabel('Energy (eV)')
self.setGraphYLabel('Absorption cross section (a.u.)')
def plot(self, x, y, legend=None):
self.addCurve(x, y, legend=legend)
def clear(self):
super(PlotWidget, self).clear()
|
<commit_before># coding: utf-8
from silx.gui.plot import PlotWindow
class PlotWidget(PlotWindow):
def __init__(self, *args):
super(PlotWidget, self).__init__()
self.setActiveCurveHandling(False)
self.setGraphXLabel('Energy (eV)')
self.setGraphYLabel('Absorption cross section (a.u.)')
def plot(self, x, y, legend=None):
self.addCurve(x, y, legend=legend)
def clear(self):
super(PlotWidget, self).clear()
<commit_msg>Remove unused icons of the PlotWindow<commit_after># coding: utf-8
from silx.gui.plot import PlotWindow
class PlotWidget(PlotWindow):
def __init__(self, *args):
super(PlotWidget, self).__init__(logScale=False, grid=True,
aspectRatio=False, yInverted=False, roi=False, mask=False,
print_=False)
self.setActiveCurveHandling(False)
self.setGraphXLabel('Energy (eV)')
self.setGraphYLabel('Absorption cross section (a.u.)')
def plot(self, x, y, legend=None):
self.addCurve(x, y, legend=legend)
def clear(self):
super(PlotWidget, self).clear()
|
4979b1b62be3b6aee300650cee47837a605b6376
|
jenkins.check_copyright.py
|
jenkins.check_copyright.py
|
#-------------------------------------------------------------------------------------------------------
# Copyright (C) Microsoft. All rights reserved.
# Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
#-------------------------------------------------------------------------------------------------------
# Python 2.7 and 3.x compatibility for easier testing regardless of python installation
from __future__ import print_function
import sys
import os.path
import re
copyright_lines = [
r'-------------------------------------------------------------------------------------------------------',
r' Copyright \(C\) Microsoft\. All rights reserved\.',
r' Licensed under the MIT license\. See LICENSE\.txt file in the project root for full license information\.'
]
regexes = []
for line in copyright_lines:
pattern = '^.{1,5}%s$' % line
regexes.append(re.compile(pattern))
if len(sys.argv) < 2:
print("Requires passing a filename as an argument.")
exit(1)
file_name = sys.argv[1]
if not os.path.isfile(file_name):
print("File does not exist:", file_name, "(not necessarily an error)")
exit(0)
with open(file_name, 'r') as sourcefile:
for x in range(0,len(copyright_lines)):
# TODO add a check for empty files (dummy.js etc), as they cause the script to crash here
line = next(sourcefile)
line = line.rstrip()
matches = regexes[x].match(line)
if not matches:
print(file_name, "... does not contain a correct Microsoft copyright notice.")
# found a problem so exit and report the problem to the caller
exit(1)
|
#-------------------------------------------------------------------------------------------------------
# Copyright (C) Microsoft. All rights reserved.
# Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
#-------------------------------------------------------------------------------------------------------
# Python 2.7 and 3.x compatibility for easier testing regardless of python installation
from __future__ import print_function
import sys
import os.path
import re
copyright_lines = [
r'-------------------------------------------------------------------------------------------------------',
r' Copyright \(C\) Microsoft( Corporation and contributors)?\. All rights reserved\.',
r' Licensed under the MIT license\. See LICENSE\.txt file in the project root for full license information\.'
]
regexes = []
for line in copyright_lines:
pattern = '^.{1,5}%s$' % line
regexes.append(re.compile(pattern))
if len(sys.argv) < 2:
print("Requires passing a filename as an argument.")
exit(1)
file_name = sys.argv[1]
if not os.path.isfile(file_name):
print("File does not exist:", file_name, "(not necessarily an error)")
exit(0)
with open(file_name, 'r') as sourcefile:
for x in range(0,len(copyright_lines)):
# TODO add a check for empty files (dummy.js etc), as they cause the script to crash here
line = next(sourcefile)
line = line.rstrip()
matches = regexes[x].match(line)
if not matches:
print(file_name, "... does not contain a correct Microsoft copyright notice.")
# found a problem so exit and report the problem to the caller
exit(1)
|
Update copyright check to accept a new, more inclusive, copyright.
|
Update copyright check to accept a new, more inclusive, copyright.
In order to address concerns over community copyright of the source code they modify. All authors are still subject to the terms of the CLA they sign when contributing.
This new format was approved by Microsoft legal:
```
//-------------------------------------------------------------------------------------------------------
// Copyright (C) Microsoft Corporation and contributors. All rights reserved.
// Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
//-------------------------------------------------------------------------------------------------------
```
In addition to the original format:
```
//-------------------------------------------------------------------------------------------------------
// Copyright (C) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
//-------------------------------------------------------------------------------------------------------
```
The check now allows both so that we don't need to make a disruptive change to every file in the project. The notice can be updated at the author's discretion as they make changes.
|
Python
|
mit
|
mrkmarron/ChakraCore,mrkmarron/ChakraCore,mrkmarron/ChakraCore,Microsoft/ChakraCore,Microsoft/ChakraCore,mrkmarron/ChakraCore,Microsoft/ChakraCore,mrkmarron/ChakraCore,Microsoft/ChakraCore,Microsoft/ChakraCore,mrkmarron/ChakraCore,mrkmarron/ChakraCore,Microsoft/ChakraCore
|
#-------------------------------------------------------------------------------------------------------
# Copyright (C) Microsoft. All rights reserved.
# Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
#-------------------------------------------------------------------------------------------------------
# Python 2.7 and 3.x compatibility for easier testing regardless of python installation
from __future__ import print_function
import sys
import os.path
import re
copyright_lines = [
r'-------------------------------------------------------------------------------------------------------',
r' Copyright \(C\) Microsoft\. All rights reserved\.',
r' Licensed under the MIT license\. See LICENSE\.txt file in the project root for full license information\.'
]
regexes = []
for line in copyright_lines:
pattern = '^.{1,5}%s$' % line
regexes.append(re.compile(pattern))
if len(sys.argv) < 2:
print("Requires passing a filename as an argument.")
exit(1)
file_name = sys.argv[1]
if not os.path.isfile(file_name):
print("File does not exist:", file_name, "(not necessarily an error)")
exit(0)
with open(file_name, 'r') as sourcefile:
for x in range(0,len(copyright_lines)):
# TODO add a check for empty files (dummy.js etc), as they cause the script to crash here
line = next(sourcefile)
line = line.rstrip()
matches = regexes[x].match(line)
if not matches:
print(file_name, "... does not contain a correct Microsoft copyright notice.")
# found a problem so exit and report the problem to the caller
exit(1)
Update copyright check to accept a new, more inclusive, copyright.
In order to address concerns over community copyright of the source code they modify. All authors are still subject to the terms of the CLA they sign when contributing.
This new format was approved by Microsoft legal:
```
//-------------------------------------------------------------------------------------------------------
// Copyright (C) Microsoft Corporation and contributors. All rights reserved.
// Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
//-------------------------------------------------------------------------------------------------------
```
In addition to the original format:
```
//-------------------------------------------------------------------------------------------------------
// Copyright (C) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
//-------------------------------------------------------------------------------------------------------
```
The check now allows both so that we don't need to make a disruptive change to every file in the project. The notice can be updated at the author's discretion as they make changes.
|
#-------------------------------------------------------------------------------------------------------
# Copyright (C) Microsoft. All rights reserved.
# Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
#-------------------------------------------------------------------------------------------------------
# Python 2.7 and 3.x compatibility for easier testing regardless of python installation
from __future__ import print_function
import sys
import os.path
import re
copyright_lines = [
r'-------------------------------------------------------------------------------------------------------',
r' Copyright \(C\) Microsoft( Corporation and contributors)?\. All rights reserved\.',
r' Licensed under the MIT license\. See LICENSE\.txt file in the project root for full license information\.'
]
regexes = []
for line in copyright_lines:
pattern = '^.{1,5}%s$' % line
regexes.append(re.compile(pattern))
if len(sys.argv) < 2:
print("Requires passing a filename as an argument.")
exit(1)
file_name = sys.argv[1]
if not os.path.isfile(file_name):
print("File does not exist:", file_name, "(not necessarily an error)")
exit(0)
with open(file_name, 'r') as sourcefile:
for x in range(0,len(copyright_lines)):
# TODO add a check for empty files (dummy.js etc), as they cause the script to crash here
line = next(sourcefile)
line = line.rstrip()
matches = regexes[x].match(line)
if not matches:
print(file_name, "... does not contain a correct Microsoft copyright notice.")
# found a problem so exit and report the problem to the caller
exit(1)
|
<commit_before>#-------------------------------------------------------------------------------------------------------
# Copyright (C) Microsoft. All rights reserved.
# Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
#-------------------------------------------------------------------------------------------------------
# Python 2.7 and 3.x compatibility for easier testing regardless of python installation
from __future__ import print_function
import sys
import os.path
import re
copyright_lines = [
r'-------------------------------------------------------------------------------------------------------',
r' Copyright \(C\) Microsoft\. All rights reserved\.',
r' Licensed under the MIT license\. See LICENSE\.txt file in the project root for full license information\.'
]
regexes = []
for line in copyright_lines:
pattern = '^.{1,5}%s$' % line
regexes.append(re.compile(pattern))
if len(sys.argv) < 2:
print("Requires passing a filename as an argument.")
exit(1)
file_name = sys.argv[1]
if not os.path.isfile(file_name):
print("File does not exist:", file_name, "(not necessarily an error)")
exit(0)
with open(file_name, 'r') as sourcefile:
for x in range(0,len(copyright_lines)):
# TODO add a check for empty files (dummy.js etc), as they cause the script to crash here
line = next(sourcefile)
line = line.rstrip()
matches = regexes[x].match(line)
if not matches:
print(file_name, "... does not contain a correct Microsoft copyright notice.")
# found a problem so exit and report the problem to the caller
exit(1)
<commit_msg>Update copyright check to accept a new, more inclusive, copyright.
In order to address concerns over community copyright of the source code they modify. All authors are still subject to the terms of the CLA they sign when contributing.
This new format was approved by Microsoft legal:
```
//-------------------------------------------------------------------------------------------------------
// Copyright (C) Microsoft Corporation and contributors. All rights reserved.
// Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
//-------------------------------------------------------------------------------------------------------
```
In addition to the original format:
```
//-------------------------------------------------------------------------------------------------------
// Copyright (C) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
//-------------------------------------------------------------------------------------------------------
```
The check now allows both so that we don't need to make a disruptive change to every file in the project. The notice can be updated at the author's discretion as they make changes.<commit_after>
|
#-------------------------------------------------------------------------------------------------------
# Copyright (C) Microsoft. All rights reserved.
# Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
#-------------------------------------------------------------------------------------------------------
# Python 2.7 and 3.x compatibility for easier testing regardless of python installation
from __future__ import print_function
import sys
import os.path
import re
copyright_lines = [
r'-------------------------------------------------------------------------------------------------------',
r' Copyright \(C\) Microsoft( Corporation and contributors)?\. All rights reserved\.',
r' Licensed under the MIT license\. See LICENSE\.txt file in the project root for full license information\.'
]
regexes = []
for line in copyright_lines:
pattern = '^.{1,5}%s$' % line
regexes.append(re.compile(pattern))
if len(sys.argv) < 2:
print("Requires passing a filename as an argument.")
exit(1)
file_name = sys.argv[1]
if not os.path.isfile(file_name):
print("File does not exist:", file_name, "(not necessarily an error)")
exit(0)
with open(file_name, 'r') as sourcefile:
for x in range(0,len(copyright_lines)):
# TODO add a check for empty files (dummy.js etc), as they cause the script to crash here
line = next(sourcefile)
line = line.rstrip()
matches = regexes[x].match(line)
if not matches:
print(file_name, "... does not contain a correct Microsoft copyright notice.")
# found a problem so exit and report the problem to the caller
exit(1)
|
#-------------------------------------------------------------------------------------------------------
# Copyright (C) Microsoft. All rights reserved.
# Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
#-------------------------------------------------------------------------------------------------------
# Python 2.7 and 3.x compatibility for easier testing regardless of python installation
from __future__ import print_function
import sys
import os.path
import re
copyright_lines = [
r'-------------------------------------------------------------------------------------------------------',
r' Copyright \(C\) Microsoft\. All rights reserved\.',
r' Licensed under the MIT license\. See LICENSE\.txt file in the project root for full license information\.'
]
regexes = []
for line in copyright_lines:
pattern = '^.{1,5}%s$' % line
regexes.append(re.compile(pattern))
if len(sys.argv) < 2:
print("Requires passing a filename as an argument.")
exit(1)
file_name = sys.argv[1]
if not os.path.isfile(file_name):
print("File does not exist:", file_name, "(not necessarily an error)")
exit(0)
with open(file_name, 'r') as sourcefile:
for x in range(0,len(copyright_lines)):
# TODO add a check for empty files (dummy.js etc), as they cause the script to crash here
line = next(sourcefile)
line = line.rstrip()
matches = regexes[x].match(line)
if not matches:
print(file_name, "... does not contain a correct Microsoft copyright notice.")
# found a problem so exit and report the problem to the caller
exit(1)
Update copyright check to accept a new, more inclusive, copyright.
In order to address concerns over community copyright of the source code they modify. All authors are still subject to the terms of the CLA they sign when contributing.
This new format was approved by Microsoft legal:
```
//-------------------------------------------------------------------------------------------------------
// Copyright (C) Microsoft Corporation and contributors. All rights reserved.
// Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
//-------------------------------------------------------------------------------------------------------
```
In addition to the original format:
```
//-------------------------------------------------------------------------------------------------------
// Copyright (C) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
//-------------------------------------------------------------------------------------------------------
```
The check now allows both so that we don't need to make a disruptive change to every file in the project. The notice can be updated at the author's discretion as they make changes.#-------------------------------------------------------------------------------------------------------
# Copyright (C) Microsoft. All rights reserved.
# Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
#-------------------------------------------------------------------------------------------------------
# Python 2.7 and 3.x compatibility for easier testing regardless of python installation
from __future__ import print_function
import sys
import os.path
import re
copyright_lines = [
r'-------------------------------------------------------------------------------------------------------',
r' Copyright \(C\) Microsoft( Corporation and contributors)?\. All rights reserved\.',
r' Licensed under the MIT license\. See LICENSE\.txt file in the project root for full license information\.'
]
regexes = []
for line in copyright_lines:
pattern = '^.{1,5}%s$' % line
regexes.append(re.compile(pattern))
if len(sys.argv) < 2:
print("Requires passing a filename as an argument.")
exit(1)
file_name = sys.argv[1]
if not os.path.isfile(file_name):
print("File does not exist:", file_name, "(not necessarily an error)")
exit(0)
with open(file_name, 'r') as sourcefile:
for x in range(0,len(copyright_lines)):
# TODO add a check for empty files (dummy.js etc), as they cause the script to crash here
line = next(sourcefile)
line = line.rstrip()
matches = regexes[x].match(line)
if not matches:
print(file_name, "... does not contain a correct Microsoft copyright notice.")
# found a problem so exit and report the problem to the caller
exit(1)
|
<commit_before>#-------------------------------------------------------------------------------------------------------
# Copyright (C) Microsoft. All rights reserved.
# Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
#-------------------------------------------------------------------------------------------------------
# Python 2.7 and 3.x compatibility for easier testing regardless of python installation
from __future__ import print_function
import sys
import os.path
import re
copyright_lines = [
r'-------------------------------------------------------------------------------------------------------',
r' Copyright \(C\) Microsoft\. All rights reserved\.',
r' Licensed under the MIT license\. See LICENSE\.txt file in the project root for full license information\.'
]
regexes = []
for line in copyright_lines:
pattern = '^.{1,5}%s$' % line
regexes.append(re.compile(pattern))
if len(sys.argv) < 2:
print("Requires passing a filename as an argument.")
exit(1)
file_name = sys.argv[1]
if not os.path.isfile(file_name):
print("File does not exist:", file_name, "(not necessarily an error)")
exit(0)
with open(file_name, 'r') as sourcefile:
for x in range(0,len(copyright_lines)):
# TODO add a check for empty files (dummy.js etc), as they cause the script to crash here
line = next(sourcefile)
line = line.rstrip()
matches = regexes[x].match(line)
if not matches:
print(file_name, "... does not contain a correct Microsoft copyright notice.")
# found a problem so exit and report the problem to the caller
exit(1)
<commit_msg>Update copyright check to accept a new, more inclusive, copyright.
In order to address concerns over community copyright of the source code they modify. All authors are still subject to the terms of the CLA they sign when contributing.
This new format was approved by Microsoft legal:
```
//-------------------------------------------------------------------------------------------------------
// Copyright (C) Microsoft Corporation and contributors. All rights reserved.
// Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
//-------------------------------------------------------------------------------------------------------
```
In addition to the original format:
```
//-------------------------------------------------------------------------------------------------------
// Copyright (C) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
//-------------------------------------------------------------------------------------------------------
```
The check now allows both so that we don't need to make a disruptive change to every file in the project. The notice can be updated at the author's discretion as they make changes.<commit_after>#-------------------------------------------------------------------------------------------------------
# Copyright (C) Microsoft. All rights reserved.
# Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
#-------------------------------------------------------------------------------------------------------
# Python 2.7 and 3.x compatibility for easier testing regardless of python installation
from __future__ import print_function
import sys
import os.path
import re
copyright_lines = [
r'-------------------------------------------------------------------------------------------------------',
r' Copyright \(C\) Microsoft( Corporation and contributors)?\. All rights reserved\.',
r' Licensed under the MIT license\. See LICENSE\.txt file in the project root for full license information\.'
]
regexes = []
for line in copyright_lines:
pattern = '^.{1,5}%s$' % line
regexes.append(re.compile(pattern))
if len(sys.argv) < 2:
print("Requires passing a filename as an argument.")
exit(1)
file_name = sys.argv[1]
if not os.path.isfile(file_name):
print("File does not exist:", file_name, "(not necessarily an error)")
exit(0)
with open(file_name, 'r') as sourcefile:
for x in range(0,len(copyright_lines)):
# TODO add a check for empty files (dummy.js etc), as they cause the script to crash here
line = next(sourcefile)
line = line.rstrip()
matches = regexes[x].match(line)
if not matches:
print(file_name, "... does not contain a correct Microsoft copyright notice.")
# found a problem so exit and report the problem to the caller
exit(1)
|
d01a5cdf950b7421703e2a018ee0306935e79555
|
sugar/activity/__init__.py
|
sugar/activity/__init__.py
|
import gtk
from sugar.graphics.grid import Grid
settings = gtk.settings_get_default()
grid = Grid()
sizes = 'gtk-large-toolbar=%d, %d' % (grid.dimension(1), grid.dimension(1))
settings.set_string_property('gtk-icon-sizes', sizes, '')
settings.set_string_property('gtk-font-name', 'Sans 14', '')
def get_default_type(activity_type):
"""Get the activity default type.
It's the type of the main network service which tracks presence
and provides info about the activity, for example the title."""
splitted_id = activity_type.split('.')
splitted_id.reverse()
return '_' + '_'.join(splitted_id) + '._udp'
|
import gtk
from sugar.graphics.grid import Grid
settings = gtk.settings_get_default()
grid = Grid()
sizes = 'gtk-large-toolbar=%d, %d' % (grid.dimension(1), grid.dimension(1))
settings.set_string_property('gtk-icon-sizes', sizes, '')
def get_default_type(activity_type):
"""Get the activity default type.
It's the type of the main network service which tracks presence
and provides info about the activity, for example the title."""
splitted_id = activity_type.split('.')
splitted_id.reverse()
return '_' + '_'.join(splitted_id) + '._udp'
|
Move font size in the theme
|
Move font size in the theme
|
Python
|
lgpl-2.1
|
Daksh/sugar-toolkit-gtk3,puneetgkaur/backup_sugar_sugartoolkit,ceibal-tatu/sugar-toolkit-gtk3,gusDuarte/sugar-toolkit-gtk3,tchx84/debian-pkg-sugar-toolkit-gtk3,gusDuarte/sugar-toolkit-gtk3,samdroid-apps/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit-gtk3,tchx84/debian-pkg-sugar-toolkit-gtk3,puneetgkaur/sugar-toolkit-gtk3,tchx84/debian-pkg-sugar-toolkit-gtk3,i5o/sugar-toolkit-gtk3,i5o/sugar-toolkit-gtk3,tchx84/sugar-toolkit-gtk3,quozl/sugar-toolkit-gtk3,gusDuarte/sugar-toolkit-gtk3,samdroid-apps/sugar-toolkit-gtk3,puneetgkaur/sugar-toolkit-gtk3,manuq/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit,quozl/sugar-toolkit-gtk3,tchx84/sugar-toolkit-gtk3,puneetgkaur/backup_sugar_sugartoolkit,godiard/sugar-toolkit-gtk3,manuq/sugar-toolkit-gtk3,tchx84/debian-pkg-sugar-toolkit,i5o/sugar-toolkit-gtk3,tchx84/debian-pkg-sugar-toolkit,samdroid-apps/sugar-toolkit-gtk3,quozl/sugar-toolkit-gtk3,manuq/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit,ceibal-tatu/sugar-toolkit,sugarlabs/sugar-toolkit-gtk3,Daksh/sugar-toolkit-gtk3,godiard/sugar-toolkit-gtk3,i5o/sugar-toolkit-gtk3,tchx84/debian-pkg-sugar-toolkit,samdroid-apps/sugar-toolkit-gtk3,ceibal-tatu/sugar-toolkit-gtk3,puneetgkaur/backup_sugar_sugartoolkit,tchx84/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit-gtk3,quozl/sugar-toolkit-gtk3,ceibal-tatu/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit,godiard/sugar-toolkit-gtk3,ceibal-tatu/sugar-toolkit,puneetgkaur/sugar-toolkit-gtk3,Daksh/sugar-toolkit-gtk3,ceibal-tatu/sugar-toolkit,gusDuarte/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit
|
import gtk
from sugar.graphics.grid import Grid
settings = gtk.settings_get_default()
grid = Grid()
sizes = 'gtk-large-toolbar=%d, %d' % (grid.dimension(1), grid.dimension(1))
settings.set_string_property('gtk-icon-sizes', sizes, '')
settings.set_string_property('gtk-font-name', 'Sans 14', '')
def get_default_type(activity_type):
"""Get the activity default type.
It's the type of the main network service which tracks presence
and provides info about the activity, for example the title."""
splitted_id = activity_type.split('.')
splitted_id.reverse()
return '_' + '_'.join(splitted_id) + '._udp'
Move font size in the theme
|
import gtk
from sugar.graphics.grid import Grid
settings = gtk.settings_get_default()
grid = Grid()
sizes = 'gtk-large-toolbar=%d, %d' % (grid.dimension(1), grid.dimension(1))
settings.set_string_property('gtk-icon-sizes', sizes, '')
def get_default_type(activity_type):
"""Get the activity default type.
It's the type of the main network service which tracks presence
and provides info about the activity, for example the title."""
splitted_id = activity_type.split('.')
splitted_id.reverse()
return '_' + '_'.join(splitted_id) + '._udp'
|
<commit_before>import gtk
from sugar.graphics.grid import Grid
settings = gtk.settings_get_default()
grid = Grid()
sizes = 'gtk-large-toolbar=%d, %d' % (grid.dimension(1), grid.dimension(1))
settings.set_string_property('gtk-icon-sizes', sizes, '')
settings.set_string_property('gtk-font-name', 'Sans 14', '')
def get_default_type(activity_type):
"""Get the activity default type.
It's the type of the main network service which tracks presence
and provides info about the activity, for example the title."""
splitted_id = activity_type.split('.')
splitted_id.reverse()
return '_' + '_'.join(splitted_id) + '._udp'
<commit_msg>Move font size in the theme<commit_after>
|
import gtk
from sugar.graphics.grid import Grid
settings = gtk.settings_get_default()
grid = Grid()
sizes = 'gtk-large-toolbar=%d, %d' % (grid.dimension(1), grid.dimension(1))
settings.set_string_property('gtk-icon-sizes', sizes, '')
def get_default_type(activity_type):
"""Get the activity default type.
It's the type of the main network service which tracks presence
and provides info about the activity, for example the title."""
splitted_id = activity_type.split('.')
splitted_id.reverse()
return '_' + '_'.join(splitted_id) + '._udp'
|
import gtk
from sugar.graphics.grid import Grid
settings = gtk.settings_get_default()
grid = Grid()
sizes = 'gtk-large-toolbar=%d, %d' % (grid.dimension(1), grid.dimension(1))
settings.set_string_property('gtk-icon-sizes', sizes, '')
settings.set_string_property('gtk-font-name', 'Sans 14', '')
def get_default_type(activity_type):
"""Get the activity default type.
It's the type of the main network service which tracks presence
and provides info about the activity, for example the title."""
splitted_id = activity_type.split('.')
splitted_id.reverse()
return '_' + '_'.join(splitted_id) + '._udp'
Move font size in the themeimport gtk
from sugar.graphics.grid import Grid
settings = gtk.settings_get_default()
grid = Grid()
sizes = 'gtk-large-toolbar=%d, %d' % (grid.dimension(1), grid.dimension(1))
settings.set_string_property('gtk-icon-sizes', sizes, '')
def get_default_type(activity_type):
"""Get the activity default type.
It's the type of the main network service which tracks presence
and provides info about the activity, for example the title."""
splitted_id = activity_type.split('.')
splitted_id.reverse()
return '_' + '_'.join(splitted_id) + '._udp'
|
<commit_before>import gtk
from sugar.graphics.grid import Grid
settings = gtk.settings_get_default()
grid = Grid()
sizes = 'gtk-large-toolbar=%d, %d' % (grid.dimension(1), grid.dimension(1))
settings.set_string_property('gtk-icon-sizes', sizes, '')
settings.set_string_property('gtk-font-name', 'Sans 14', '')
def get_default_type(activity_type):
"""Get the activity default type.
It's the type of the main network service which tracks presence
and provides info about the activity, for example the title."""
splitted_id = activity_type.split('.')
splitted_id.reverse()
return '_' + '_'.join(splitted_id) + '._udp'
<commit_msg>Move font size in the theme<commit_after>import gtk
from sugar.graphics.grid import Grid
settings = gtk.settings_get_default()
grid = Grid()
sizes = 'gtk-large-toolbar=%d, %d' % (grid.dimension(1), grid.dimension(1))
settings.set_string_property('gtk-icon-sizes', sizes, '')
def get_default_type(activity_type):
"""Get the activity default type.
It's the type of the main network service which tracks presence
and provides info about the activity, for example the title."""
splitted_id = activity_type.split('.')
splitted_id.reverse()
return '_' + '_'.join(splitted_id) + '._udp'
|
2c2deea36a7e040244152a345eb672e62c519c76
|
pulse_actions/publisher.py
|
pulse_actions/publisher.py
|
"""
This module is currently an experiment in publishing messages to pulse.
It might become a real pulse publisher one day.
"""
import os
import sys
from pulse_actions.authentication import (get_user_and_password,
AuthenticationError)
from mozillapulse.publishers import GenericPublisher
from mozillapulse.config import PulseConfiguration
from mozillapulse.messages.base import GenericMessage
class ExperimentalPublisher(GenericPublisher):
def __init__(self, **kwargs):
super(ExperimentalPublisher, self).__init__(
PulseConfiguration(**kwargs),
'exchange/adusca/experiment',
**kwargs)
class MessageHandler:
def __init__(self):
"""Create Publisher."""
try:
user, password = get_user_and_password()
except AuthenticationError as e:
print(e.message)
sys.exit(1)
self.publisher = ExperimentalPublisher(user=user, password=password)
def publish_message(self, data, routing_key):
"""Publish a message to exchange/adusca/experiment."""
msg = GenericMessage()
msg.routing_parts = routing_key.split('.')
for key, value in data.iteritems():
msg.set_data(key, value)
self.publisher.publish(msg)
|
"""
This module is currently an experiment in publishing messages to pulse.
It might become a real pulse publisher one day.
"""
import os
import sys
from pulse_actions.authentication import (
get_user_and_password,
AuthenticationError
)
from mozillapulse.publishers import GenericPublisher
from mozillapulse.config import PulseConfiguration
from mozillapulse.messages.base import GenericMessage
class ExperimentalPublisher(GenericPublisher):
def __init__(self, **kwargs):
super(ExperimentalPublisher, self).__init__(
PulseConfiguration(**kwargs),
'exchange/adusca/experiment',
**kwargs)
class MessageHandler:
def __init__(self):
"""Create Publisher."""
try:
user, password = get_user_and_password()
except AuthenticationError as e:
print(e.message)
sys.exit(1)
self.publisher = ExperimentalPublisher(user=user, password=password)
def publish_message(self, data, routing_key):
"""Publish a message to exchange/adusca/experiment."""
msg = GenericMessage()
msg.routing_parts = routing_key.split('.')
for key, value in data.iteritems():
msg.set_data(key, value)
try:
self.publisher.publish(msg)
except Exception as e:
print('ERROR: We failed to post a pulse message with what we did')
print(e.message)
|
Handle failing to publish to pulse
|
Handle failing to publish to pulse
|
Python
|
mpl-2.0
|
armenzg/pulse_actions,mozilla/pulse_actions,adusca/pulse_actions
|
"""
This module is currently an experiment in publishing messages to pulse.
It might become a real pulse publisher one day.
"""
import os
import sys
from pulse_actions.authentication import (get_user_and_password,
AuthenticationError)
from mozillapulse.publishers import GenericPublisher
from mozillapulse.config import PulseConfiguration
from mozillapulse.messages.base import GenericMessage
class ExperimentalPublisher(GenericPublisher):
def __init__(self, **kwargs):
super(ExperimentalPublisher, self).__init__(
PulseConfiguration(**kwargs),
'exchange/adusca/experiment',
**kwargs)
class MessageHandler:
def __init__(self):
"""Create Publisher."""
try:
user, password = get_user_and_password()
except AuthenticationError as e:
print(e.message)
sys.exit(1)
self.publisher = ExperimentalPublisher(user=user, password=password)
def publish_message(self, data, routing_key):
"""Publish a message to exchange/adusca/experiment."""
msg = GenericMessage()
msg.routing_parts = routing_key.split('.')
for key, value in data.iteritems():
msg.set_data(key, value)
self.publisher.publish(msg)
Handle failing to publish to pulse
|
"""
This module is currently an experiment in publishing messages to pulse.
It might become a real pulse publisher one day.
"""
import os
import sys
from pulse_actions.authentication import (
get_user_and_password,
AuthenticationError
)
from mozillapulse.publishers import GenericPublisher
from mozillapulse.config import PulseConfiguration
from mozillapulse.messages.base import GenericMessage
class ExperimentalPublisher(GenericPublisher):
def __init__(self, **kwargs):
super(ExperimentalPublisher, self).__init__(
PulseConfiguration(**kwargs),
'exchange/adusca/experiment',
**kwargs)
class MessageHandler:
def __init__(self):
"""Create Publisher."""
try:
user, password = get_user_and_password()
except AuthenticationError as e:
print(e.message)
sys.exit(1)
self.publisher = ExperimentalPublisher(user=user, password=password)
def publish_message(self, data, routing_key):
"""Publish a message to exchange/adusca/experiment."""
msg = GenericMessage()
msg.routing_parts = routing_key.split('.')
for key, value in data.iteritems():
msg.set_data(key, value)
try:
self.publisher.publish(msg)
except Exception as e:
print('ERROR: We failed to post a pulse message with what we did')
print(e.message)
|
<commit_before>"""
This module is currently an experiment in publishing messages to pulse.
It might become a real pulse publisher one day.
"""
import os
import sys
from pulse_actions.authentication import (get_user_and_password,
AuthenticationError)
from mozillapulse.publishers import GenericPublisher
from mozillapulse.config import PulseConfiguration
from mozillapulse.messages.base import GenericMessage
class ExperimentalPublisher(GenericPublisher):
def __init__(self, **kwargs):
super(ExperimentalPublisher, self).__init__(
PulseConfiguration(**kwargs),
'exchange/adusca/experiment',
**kwargs)
class MessageHandler:
def __init__(self):
"""Create Publisher."""
try:
user, password = get_user_and_password()
except AuthenticationError as e:
print(e.message)
sys.exit(1)
self.publisher = ExperimentalPublisher(user=user, password=password)
def publish_message(self, data, routing_key):
"""Publish a message to exchange/adusca/experiment."""
msg = GenericMessage()
msg.routing_parts = routing_key.split('.')
for key, value in data.iteritems():
msg.set_data(key, value)
self.publisher.publish(msg)
<commit_msg>Handle failing to publish to pulse<commit_after>
|
"""
This module is currently an experiment in publishing messages to pulse.
It might become a real pulse publisher one day.
"""
import os
import sys
from pulse_actions.authentication import (
get_user_and_password,
AuthenticationError
)
from mozillapulse.publishers import GenericPublisher
from mozillapulse.config import PulseConfiguration
from mozillapulse.messages.base import GenericMessage
class ExperimentalPublisher(GenericPublisher):
def __init__(self, **kwargs):
super(ExperimentalPublisher, self).__init__(
PulseConfiguration(**kwargs),
'exchange/adusca/experiment',
**kwargs)
class MessageHandler:
def __init__(self):
"""Create Publisher."""
try:
user, password = get_user_and_password()
except AuthenticationError as e:
print(e.message)
sys.exit(1)
self.publisher = ExperimentalPublisher(user=user, password=password)
def publish_message(self, data, routing_key):
"""Publish a message to exchange/adusca/experiment."""
msg = GenericMessage()
msg.routing_parts = routing_key.split('.')
for key, value in data.iteritems():
msg.set_data(key, value)
try:
self.publisher.publish(msg)
except Exception as e:
print('ERROR: We failed to post a pulse message with what we did')
print(e.message)
|
"""
This module is currently an experiment in publishing messages to pulse.
It might become a real pulse publisher one day.
"""
import os
import sys
from pulse_actions.authentication import (get_user_and_password,
AuthenticationError)
from mozillapulse.publishers import GenericPublisher
from mozillapulse.config import PulseConfiguration
from mozillapulse.messages.base import GenericMessage
class ExperimentalPublisher(GenericPublisher):
def __init__(self, **kwargs):
super(ExperimentalPublisher, self).__init__(
PulseConfiguration(**kwargs),
'exchange/adusca/experiment',
**kwargs)
class MessageHandler:
def __init__(self):
"""Create Publisher."""
try:
user, password = get_user_and_password()
except AuthenticationError as e:
print(e.message)
sys.exit(1)
self.publisher = ExperimentalPublisher(user=user, password=password)
def publish_message(self, data, routing_key):
"""Publish a message to exchange/adusca/experiment."""
msg = GenericMessage()
msg.routing_parts = routing_key.split('.')
for key, value in data.iteritems():
msg.set_data(key, value)
self.publisher.publish(msg)
Handle failing to publish to pulse"""
This module is currently an experiment in publishing messages to pulse.
It might become a real pulse publisher one day.
"""
import os
import sys
from pulse_actions.authentication import (
get_user_and_password,
AuthenticationError
)
from mozillapulse.publishers import GenericPublisher
from mozillapulse.config import PulseConfiguration
from mozillapulse.messages.base import GenericMessage
class ExperimentalPublisher(GenericPublisher):
def __init__(self, **kwargs):
super(ExperimentalPublisher, self).__init__(
PulseConfiguration(**kwargs),
'exchange/adusca/experiment',
**kwargs)
class MessageHandler:
def __init__(self):
"""Create Publisher."""
try:
user, password = get_user_and_password()
except AuthenticationError as e:
print(e.message)
sys.exit(1)
self.publisher = ExperimentalPublisher(user=user, password=password)
def publish_message(self, data, routing_key):
"""Publish a message to exchange/adusca/experiment."""
msg = GenericMessage()
msg.routing_parts = routing_key.split('.')
for key, value in data.iteritems():
msg.set_data(key, value)
try:
self.publisher.publish(msg)
except Exception as e:
print('ERROR: We failed to post a pulse message with what we did')
print(e.message)
|
<commit_before>"""
This module is currently an experiment in publishing messages to pulse.
It might become a real pulse publisher one day.
"""
import os
import sys
from pulse_actions.authentication import (get_user_and_password,
AuthenticationError)
from mozillapulse.publishers import GenericPublisher
from mozillapulse.config import PulseConfiguration
from mozillapulse.messages.base import GenericMessage
class ExperimentalPublisher(GenericPublisher):
def __init__(self, **kwargs):
super(ExperimentalPublisher, self).__init__(
PulseConfiguration(**kwargs),
'exchange/adusca/experiment',
**kwargs)
class MessageHandler:
def __init__(self):
"""Create Publisher."""
try:
user, password = get_user_and_password()
except AuthenticationError as e:
print(e.message)
sys.exit(1)
self.publisher = ExperimentalPublisher(user=user, password=password)
def publish_message(self, data, routing_key):
"""Publish a message to exchange/adusca/experiment."""
msg = GenericMessage()
msg.routing_parts = routing_key.split('.')
for key, value in data.iteritems():
msg.set_data(key, value)
self.publisher.publish(msg)
<commit_msg>Handle failing to publish to pulse<commit_after>"""
This module is currently an experiment in publishing messages to pulse.
It might become a real pulse publisher one day.
"""
import os
import sys
from pulse_actions.authentication import (
get_user_and_password,
AuthenticationError
)
from mozillapulse.publishers import GenericPublisher
from mozillapulse.config import PulseConfiguration
from mozillapulse.messages.base import GenericMessage
class ExperimentalPublisher(GenericPublisher):
def __init__(self, **kwargs):
super(ExperimentalPublisher, self).__init__(
PulseConfiguration(**kwargs),
'exchange/adusca/experiment',
**kwargs)
class MessageHandler:
def __init__(self):
"""Create Publisher."""
try:
user, password = get_user_and_password()
except AuthenticationError as e:
print(e.message)
sys.exit(1)
self.publisher = ExperimentalPublisher(user=user, password=password)
def publish_message(self, data, routing_key):
"""Publish a message to exchange/adusca/experiment."""
msg = GenericMessage()
msg.routing_parts = routing_key.split('.')
for key, value in data.iteritems():
msg.set_data(key, value)
try:
self.publisher.publish(msg)
except Exception as e:
print('ERROR: We failed to post a pulse message with what we did')
print(e.message)
|
791644f1e59295f07a9444c45c5589fd888effd0
|
examples/joystick_example.py
|
examples/joystick_example.py
|
#!/usr/bin/env python3
"""This example shows how to use the Joystick Click wrapper of the LetMeCreate.
It continuously reads the position of the joystick, prints it in the terminal
and displays a pattern on the LED's based on the x coordinate.
The Joystick Click must be inserted in Mikrobus 1 before running this program.
"""
from letmecreate.core import i2c
from letmecreate.core import led
from letmecreate.click import joystick
OFFSET = 98
MAXIMUM = OFFSET2
def get_led_mask(perc):
div = int((1. - perc)led.LED_CNT)
if div > led.LED_CNT:
div = led.LED_CNT
mask = 0
for i in range(div):
mask |= (1 << i)
return mask
i2c.init()
led.init()
while True:
pos = joystick.get_position()
print('{} {}'.format(pos[0], pos[1]))
mask = get_led_mask(float(pos[0] + OFFSET)/float(MAXIMUM))
led.switch_on(mask)
led.switch_off(~mask)
i2c.release()
led.release()
|
#!/usr/bin/env python3
"""This example shows how to use the Joystick Click wrapper of the LetMeCreate.
It continuously reads the position of the joystick, prints it in the terminal
and displays a pattern on the LED's based on the x coordinate.
The Joystick Click must be inserted in Mikrobus 1 before running this program.
"""
from letmecreate.core import i2c
from letmecreate.core import led
from letmecreate.click import joystick
OFFSET = 98
MAXIMUM = OFFSET * 2
def get_led_mask(perc):
div = int((1. - perc) * led.LED_CNT)
if div > led.LED_CNT:
div = led.LED_CNT
mask = 0
for i in range(div):
mask |= (1 << i)
return mask
i2c.init()
led.init()
while True:
pos = joystick.get_position()
print('{} {}'.format(pos[0], pos[1]))
mask = get_led_mask(float(pos[0] + OFFSET)/float(MAXIMUM))
led.switch_on(mask)
led.switch_off(~mask)
i2c.release()
led.release()
|
Add missing '* ' in example
|
joystick: Add missing '* ' in example
Signed-off-by: Francois Berder <59eaf4bb0211c66c3d7532da6d77ecf42a779d82@outlook.fr>
|
Python
|
bsd-3-clause
|
francois-berder/PyLetMeCreate
|
#!/usr/bin/env python3
"""This example shows how to use the Joystick Click wrapper of the LetMeCreate.
It continuously reads the position of the joystick, prints it in the terminal
and displays a pattern on the LED's based on the x coordinate.
The Joystick Click must be inserted in Mikrobus 1 before running this program.
"""
from letmecreate.core import i2c
from letmecreate.core import led
from letmecreate.click import joystick
OFFSET = 98
MAXIMUM = OFFSET2
def get_led_mask(perc):
div = int((1. - perc)led.LED_CNT)
if div > led.LED_CNT:
div = led.LED_CNT
mask = 0
for i in range(div):
mask |= (1 << i)
return mask
i2c.init()
led.init()
while True:
pos = joystick.get_position()
print('{} {}'.format(pos[0], pos[1]))
mask = get_led_mask(float(pos[0] + OFFSET)/float(MAXIMUM))
led.switch_on(mask)
led.switch_off(~mask)
i2c.release()
led.release()
joystick: Add missing '* ' in example
Signed-off-by: Francois Berder <59eaf4bb0211c66c3d7532da6d77ecf42a779d82@outlook.fr>
|
#!/usr/bin/env python3
"""This example shows how to use the Joystick Click wrapper of the LetMeCreate.
It continuously reads the position of the joystick, prints it in the terminal
and displays a pattern on the LED's based on the x coordinate.
The Joystick Click must be inserted in Mikrobus 1 before running this program.
"""
from letmecreate.core import i2c
from letmecreate.core import led
from letmecreate.click import joystick
OFFSET = 98
MAXIMUM = OFFSET * 2
def get_led_mask(perc):
div = int((1. - perc) * led.LED_CNT)
if div > led.LED_CNT:
div = led.LED_CNT
mask = 0
for i in range(div):
mask |= (1 << i)
return mask
i2c.init()
led.init()
while True:
pos = joystick.get_position()
print('{} {}'.format(pos[0], pos[1]))
mask = get_led_mask(float(pos[0] + OFFSET)/float(MAXIMUM))
led.switch_on(mask)
led.switch_off(~mask)
i2c.release()
led.release()
|
<commit_before>#!/usr/bin/env python3
"""This example shows how to use the Joystick Click wrapper of the LetMeCreate.
It continuously reads the position of the joystick, prints it in the terminal
and displays a pattern on the LED's based on the x coordinate.
The Joystick Click must be inserted in Mikrobus 1 before running this program.
"""
from letmecreate.core import i2c
from letmecreate.core import led
from letmecreate.click import joystick
OFFSET = 98
MAXIMUM = OFFSET2
def get_led_mask(perc):
div = int((1. - perc)led.LED_CNT)
if div > led.LED_CNT:
div = led.LED_CNT
mask = 0
for i in range(div):
mask |= (1 << i)
return mask
i2c.init()
led.init()
while True:
pos = joystick.get_position()
print('{} {}'.format(pos[0], pos[1]))
mask = get_led_mask(float(pos[0] + OFFSET)/float(MAXIMUM))
led.switch_on(mask)
led.switch_off(~mask)
i2c.release()
led.release()
<commit_msg>joystick: Add missing '* ' in example
Signed-off-by: Francois Berder <59eaf4bb0211c66c3d7532da6d77ecf42a779d82@outlook.fr><commit_after>
|
#!/usr/bin/env python3
"""This example shows how to use the Joystick Click wrapper of the LetMeCreate.
It continuously reads the position of the joystick, prints it in the terminal
and displays a pattern on the LED's based on the x coordinate.
The Joystick Click must be inserted in Mikrobus 1 before running this program.
"""
from letmecreate.core import i2c
from letmecreate.core import led
from letmecreate.click import joystick
OFFSET = 98
MAXIMUM = OFFSET * 2
def get_led_mask(perc):
div = int((1. - perc) * led.LED_CNT)
if div > led.LED_CNT:
div = led.LED_CNT
mask = 0
for i in range(div):
mask |= (1 << i)
return mask
i2c.init()
led.init()
while True:
pos = joystick.get_position()
print('{} {}'.format(pos[0], pos[1]))
mask = get_led_mask(float(pos[0] + OFFSET)/float(MAXIMUM))
led.switch_on(mask)
led.switch_off(~mask)
i2c.release()
led.release()
|
#!/usr/bin/env python3
"""This example shows how to use the Joystick Click wrapper of the LetMeCreate.
It continuously reads the position of the joystick, prints it in the terminal
and displays a pattern on the LED's based on the x coordinate.
The Joystick Click must be inserted in Mikrobus 1 before running this program.
"""
from letmecreate.core import i2c
from letmecreate.core import led
from letmecreate.click import joystick
OFFSET = 98
MAXIMUM = OFFSET2
def get_led_mask(perc):
div = int((1. - perc)led.LED_CNT)
if div > led.LED_CNT:
div = led.LED_CNT
mask = 0
for i in range(div):
mask |= (1 << i)
return mask
i2c.init()
led.init()
while True:
pos = joystick.get_position()
print('{} {}'.format(pos[0], pos[1]))
mask = get_led_mask(float(pos[0] + OFFSET)/float(MAXIMUM))
led.switch_on(mask)
led.switch_off(~mask)
i2c.release()
led.release()
joystick: Add missing '* ' in example
Signed-off-by: Francois Berder <59eaf4bb0211c66c3d7532da6d77ecf42a779d82@outlook.fr>#!/usr/bin/env python3
"""This example shows how to use the Joystick Click wrapper of the LetMeCreate.
It continuously reads the position of the joystick, prints it in the terminal
and displays a pattern on the LED's based on the x coordinate.
The Joystick Click must be inserted in Mikrobus 1 before running this program.
"""
from letmecreate.core import i2c
from letmecreate.core import led
from letmecreate.click import joystick
OFFSET = 98
MAXIMUM = OFFSET * 2
def get_led_mask(perc):
div = int((1. - perc) * led.LED_CNT)
if div > led.LED_CNT:
div = led.LED_CNT
mask = 0
for i in range(div):
mask |= (1 << i)
return mask
i2c.init()
led.init()
while True:
pos = joystick.get_position()
print('{} {}'.format(pos[0], pos[1]))
mask = get_led_mask(float(pos[0] + OFFSET)/float(MAXIMUM))
led.switch_on(mask)
led.switch_off(~mask)
i2c.release()
led.release()
|
<commit_before>#!/usr/bin/env python3
"""This example shows how to use the Joystick Click wrapper of the LetMeCreate.
It continuously reads the position of the joystick, prints it in the terminal
and displays a pattern on the LED's based on the x coordinate.
The Joystick Click must be inserted in Mikrobus 1 before running this program.
"""
from letmecreate.core import i2c
from letmecreate.core import led
from letmecreate.click import joystick
OFFSET = 98
MAXIMUM = OFFSET2
def get_led_mask(perc):
div = int((1. - perc)led.LED_CNT)
if div > led.LED_CNT:
div = led.LED_CNT
mask = 0
for i in range(div):
mask |= (1 << i)
return mask
i2c.init()
led.init()
while True:
pos = joystick.get_position()
print('{} {}'.format(pos[0], pos[1]))
mask = get_led_mask(float(pos[0] + OFFSET)/float(MAXIMUM))
led.switch_on(mask)
led.switch_off(~mask)
i2c.release()
led.release()
<commit_msg>joystick: Add missing '* ' in example
Signed-off-by: Francois Berder <59eaf4bb0211c66c3d7532da6d77ecf42a779d82@outlook.fr><commit_after>#!/usr/bin/env python3
"""This example shows how to use the Joystick Click wrapper of the LetMeCreate.
It continuously reads the position of the joystick, prints it in the terminal
and displays a pattern on the LED's based on the x coordinate.
The Joystick Click must be inserted in Mikrobus 1 before running this program.
"""
from letmecreate.core import i2c
from letmecreate.core import led
from letmecreate.click import joystick
OFFSET = 98
MAXIMUM = OFFSET * 2
def get_led_mask(perc):
div = int((1. - perc) * led.LED_CNT)
if div > led.LED_CNT:
div = led.LED_CNT
mask = 0
for i in range(div):
mask |= (1 << i)
return mask
i2c.init()
led.init()
while True:
pos = joystick.get_position()
print('{} {}'.format(pos[0], pos[1]))
mask = get_led_mask(float(pos[0] + OFFSET)/float(MAXIMUM))
led.switch_on(mask)
led.switch_off(~mask)
i2c.release()
led.release()
|
c3671ec17770d431a374b373a4e2b055ee1e3809
|
2019/aoc2019/day13.py
|
2019/aoc2019/day13.py
|
from typing import TextIO
from aoc2019.intcode import Computer, read_program
def part1(data: TextIO) -> int:
computer = Computer(read_program(data))
computer.run()
screen = {}
while computer.output:
x = computer.output.popleft()
y = computer.output.popleft()
val = computer.output.popleft()
screen[x, y] = val
return sum(1 for val in screen.values() if val == 2)
|
import statistics
from typing import TextIO, Tuple, Dict
from aoc2019.intcode import Computer, read_program
def render_screen(computer: Computer, screen: Dict[Tuple[int, int], int]):
while computer.output:
x = computer.output.popleft()
y = computer.output.popleft()
val = computer.output.popleft()
screen[x, y] = val
def part1(data: TextIO) -> int:
computer = Computer(read_program(data))
computer.run()
screen = {}
render_screen(computer, screen)
return sum(1 for val in screen.values() if val == 2)
def part2(data: TextIO) -> int:
computer = Computer(read_program(data))
computer.program[0] = 2
screen = {}
finished = False
while not finished:
try:
computer.run()
finished = True
except IndexError:
# Waiting for input
pass
render_screen(computer, screen)
ball_x = next(x for x, y in screen if screen[x, y] == 4)
paddle_x = statistics.mean(x for x, y in screen if screen[x, y] == 3)
if ball_x < paddle_x:
computer.input.append(-1)
elif ball_x > paddle_x:
computer.input.append(1)
else:
computer.input.append(0)
return screen[-1, 0]
|
Implement 2019 day 13 part 2
|
Implement 2019 day 13 part 2
|
Python
|
mit
|
bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode
|
from typing import TextIO
from aoc2019.intcode import Computer, read_program
def part1(data: TextIO) -> int:
computer = Computer(read_program(data))
computer.run()
screen = {}
while computer.output:
x = computer.output.popleft()
y = computer.output.popleft()
val = computer.output.popleft()
screen[x, y] = val
return sum(1 for val in screen.values() if val == 2)
Implement 2019 day 13 part 2
|
import statistics
from typing import TextIO, Tuple, Dict
from aoc2019.intcode import Computer, read_program
def render_screen(computer: Computer, screen: Dict[Tuple[int, int], int]):
while computer.output:
x = computer.output.popleft()
y = computer.output.popleft()
val = computer.output.popleft()
screen[x, y] = val
def part1(data: TextIO) -> int:
computer = Computer(read_program(data))
computer.run()
screen = {}
render_screen(computer, screen)
return sum(1 for val in screen.values() if val == 2)
def part2(data: TextIO) -> int:
computer = Computer(read_program(data))
computer.program[0] = 2
screen = {}
finished = False
while not finished:
try:
computer.run()
finished = True
except IndexError:
# Waiting for input
pass
render_screen(computer, screen)
ball_x = next(x for x, y in screen if screen[x, y] == 4)
paddle_x = statistics.mean(x for x, y in screen if screen[x, y] == 3)
if ball_x < paddle_x:
computer.input.append(-1)
elif ball_x > paddle_x:
computer.input.append(1)
else:
computer.input.append(0)
return screen[-1, 0]
|
<commit_before>from typing import TextIO
from aoc2019.intcode import Computer, read_program
def part1(data: TextIO) -> int:
computer = Computer(read_program(data))
computer.run()
screen = {}
while computer.output:
x = computer.output.popleft()
y = computer.output.popleft()
val = computer.output.popleft()
screen[x, y] = val
return sum(1 for val in screen.values() if val == 2)
<commit_msg>Implement 2019 day 13 part 2<commit_after>
|
import statistics
from typing import TextIO, Tuple, Dict
from aoc2019.intcode import Computer, read_program
def render_screen(computer: Computer, screen: Dict[Tuple[int, int], int]):
while computer.output:
x = computer.output.popleft()
y = computer.output.popleft()
val = computer.output.popleft()
screen[x, y] = val
def part1(data: TextIO) -> int:
computer = Computer(read_program(data))
computer.run()
screen = {}
render_screen(computer, screen)
return sum(1 for val in screen.values() if val == 2)
def part2(data: TextIO) -> int:
computer = Computer(read_program(data))
computer.program[0] = 2
screen = {}
finished = False
while not finished:
try:
computer.run()
finished = True
except IndexError:
# Waiting for input
pass
render_screen(computer, screen)
ball_x = next(x for x, y in screen if screen[x, y] == 4)
paddle_x = statistics.mean(x for x, y in screen if screen[x, y] == 3)
if ball_x < paddle_x:
computer.input.append(-1)
elif ball_x > paddle_x:
computer.input.append(1)
else:
computer.input.append(0)
return screen[-1, 0]
|
from typing import TextIO
from aoc2019.intcode import Computer, read_program
def part1(data: TextIO) -> int:
computer = Computer(read_program(data))
computer.run()
screen = {}
while computer.output:
x = computer.output.popleft()
y = computer.output.popleft()
val = computer.output.popleft()
screen[x, y] = val
return sum(1 for val in screen.values() if val == 2)
Implement 2019 day 13 part 2import statistics
from typing import TextIO, Tuple, Dict
from aoc2019.intcode import Computer, read_program
def render_screen(computer: Computer, screen: Dict[Tuple[int, int], int]):
while computer.output:
x = computer.output.popleft()
y = computer.output.popleft()
val = computer.output.popleft()
screen[x, y] = val
def part1(data: TextIO) -> int:
computer = Computer(read_program(data))
computer.run()
screen = {}
render_screen(computer, screen)
return sum(1 for val in screen.values() if val == 2)
def part2(data: TextIO) -> int:
computer = Computer(read_program(data))
computer.program[0] = 2
screen = {}
finished = False
while not finished:
try:
computer.run()
finished = True
except IndexError:
# Waiting for input
pass
render_screen(computer, screen)
ball_x = next(x for x, y in screen if screen[x, y] == 4)
paddle_x = statistics.mean(x for x, y in screen if screen[x, y] == 3)
if ball_x < paddle_x:
computer.input.append(-1)
elif ball_x > paddle_x:
computer.input.append(1)
else:
computer.input.append(0)
return screen[-1, 0]
|
<commit_before>from typing import TextIO
from aoc2019.intcode import Computer, read_program
def part1(data: TextIO) -> int:
computer = Computer(read_program(data))
computer.run()
screen = {}
while computer.output:
x = computer.output.popleft()
y = computer.output.popleft()
val = computer.output.popleft()
screen[x, y] = val
return sum(1 for val in screen.values() if val == 2)
<commit_msg>Implement 2019 day 13 part 2<commit_after>import statistics
from typing import TextIO, Tuple, Dict
from aoc2019.intcode import Computer, read_program
def render_screen(computer: Computer, screen: Dict[Tuple[int, int], int]):
while computer.output:
x = computer.output.popleft()
y = computer.output.popleft()
val = computer.output.popleft()
screen[x, y] = val
def part1(data: TextIO) -> int:
computer = Computer(read_program(data))
computer.run()
screen = {}
render_screen(computer, screen)
return sum(1 for val in screen.values() if val == 2)
def part2(data: TextIO) -> int:
computer = Computer(read_program(data))
computer.program[0] = 2
screen = {}
finished = False
while not finished:
try:
computer.run()
finished = True
except IndexError:
# Waiting for input
pass
render_screen(computer, screen)
ball_x = next(x for x, y in screen if screen[x, y] == 4)
paddle_x = statistics.mean(x for x, y in screen if screen[x, y] == 3)
if ball_x < paddle_x:
computer.input.append(-1)
elif ball_x > paddle_x:
computer.input.append(1)
else:
computer.input.append(0)
return screen[-1, 0]
|
2e9b11d3dc6dcb8301870f9219878d18e5fafa71
|
conftest.py
|
conftest.py
|
import pytest
def pytest_addoption(parser):
_add_cuda_option(parser)
def pytest_configure(config):
_register_cuda_marker(config)
def pytest_runtest_setup(item):
_setup_cuda_marker(item)
def _add_cuda_option(parser):
parser.addoption("--cuda", action="store", metavar="LIMIT_NUM", default=-1,
help="only run tests that require NVIDIA GPUs less than or equal to the limit. Default: test all")
def _register_cuda_marker(config):
config.addinivalue_line("markers", "cuda(num=1): mark tests needing the specified number of NVIDIA GPUs.")
def _setup_cuda_marker(item):
"""Pytest marker to indicate number of NVIDIA GPUs required to run the test.
Tests can be annotated with this decorator (e.g., ``@pytest.mark.cuda``) to
declare that one NVIDIA GPU is required to run.
Tests can also be annotated as ``@pytest.mark.cuda(2)`` to declare number of
NVIDIA GPUs required to run. When running tests, if ``--cuda LIMIT_NUM``
pytest custom option is set to value greater than or equals to 0, test cases
that require GPUs more than the limit will be skipped.
"""
cuda_marker = item.get_marker('cuda')
if cuda_marker is not None:
required_num = cuda_marker.args[0] if cuda_marker.args else 1
minimum_num = int(item.config.getoption('--cuda'))
if 0 <= minimum_num and minimum_num < required_num:
pytest.skip('{} NVIDIA GPUs required'.format(required_num))
|
import pytest
import os
def pytest_configure(config):
_register_cuda_marker(config)
def pytest_runtest_setup(item):
_setup_cuda_marker(item)
def _register_cuda_marker(config):
config.addinivalue_line("markers", "cuda(num=1): mark tests needing the specified number of NVIDIA GPUs.")
def _setup_cuda_marker(item):
"""Pytest marker to indicate number of NVIDIA GPUs required to run the test.
Tests can be annotated with this decorator (e.g., ``@pytest.mark.cuda``) to
declare that one NVIDIA GPU is required to run.
Tests can also be annotated as ``@pytest.mark.cuda(2)`` to declare number of
NVIDIA GPUs required to run. When running tests, if ``XCHAINER_TEST_CUDA_LIMIT``
environment variable is set to value greater than or equals to 0, test cases
that require GPUs more than the limit will be skipped.
"""
cuda_marker = item.get_marker('cuda')
if cuda_marker is not None:
required_num = cuda_marker.args[0] if cuda_marker.args else 1
minimum_num = int(os.getenv('XCHAINER_TEST_CUDA_LIMIT', '-1'))
if 0 <= minimum_num and minimum_num < required_num:
pytest.skip('{} NVIDIA GPUs required'.format(required_num))
|
Use XCHAINER_TEST_CUDA_LIMIT env int pytest
|
Use XCHAINER_TEST_CUDA_LIMIT env int pytest
|
Python
|
mit
|
hvy/chainer,hvy/chainer,ktnyt/chainer,niboshi/chainer,okuta/chainer,keisuke-umezawa/chainer,niboshi/chainer,chainer/chainer,chainer/chainer,jnishi/chainer,ktnyt/chainer,chainer/chainer,ktnyt/chainer,jnishi/chainer,keisuke-umezawa/chainer,chainer/chainer,pfnet/chainer,jnishi/chainer,tkerola/chainer,keisuke-umezawa/chainer,niboshi/chainer,wkentaro/chainer,okuta/chainer,wkentaro/chainer,wkentaro/chainer,wkentaro/chainer,jnishi/chainer,hvy/chainer,keisuke-umezawa/chainer,ktnyt/chainer,okuta/chainer,okuta/chainer,hvy/chainer,niboshi/chainer
|
import pytest
def pytest_addoption(parser):
_add_cuda_option(parser)
def pytest_configure(config):
_register_cuda_marker(config)
def pytest_runtest_setup(item):
_setup_cuda_marker(item)
def _add_cuda_option(parser):
parser.addoption("--cuda", action="store", metavar="LIMIT_NUM", default=-1,
help="only run tests that require NVIDIA GPUs less than or equal to the limit. Default: test all")
def _register_cuda_marker(config):
config.addinivalue_line("markers", "cuda(num=1): mark tests needing the specified number of NVIDIA GPUs.")
def _setup_cuda_marker(item):
"""Pytest marker to indicate number of NVIDIA GPUs required to run the test.
Tests can be annotated with this decorator (e.g., ``@pytest.mark.cuda``) to
declare that one NVIDIA GPU is required to run.
Tests can also be annotated as ``@pytest.mark.cuda(2)`` to declare number of
NVIDIA GPUs required to run. When running tests, if ``--cuda LIMIT_NUM``
pytest custom option is set to value greater than or equals to 0, test cases
that require GPUs more than the limit will be skipped.
"""
cuda_marker = item.get_marker('cuda')
if cuda_marker is not None:
required_num = cuda_marker.args[0] if cuda_marker.args else 1
minimum_num = int(item.config.getoption('--cuda'))
if 0 <= minimum_num and minimum_num < required_num:
pytest.skip('{} NVIDIA GPUs required'.format(required_num))
Use XCHAINER_TEST_CUDA_LIMIT env int pytest
|
import pytest
import os
def pytest_configure(config):
_register_cuda_marker(config)
def pytest_runtest_setup(item):
_setup_cuda_marker(item)
def _register_cuda_marker(config):
config.addinivalue_line("markers", "cuda(num=1): mark tests needing the specified number of NVIDIA GPUs.")
def _setup_cuda_marker(item):
"""Pytest marker to indicate number of NVIDIA GPUs required to run the test.
Tests can be annotated with this decorator (e.g., ``@pytest.mark.cuda``) to
declare that one NVIDIA GPU is required to run.
Tests can also be annotated as ``@pytest.mark.cuda(2)`` to declare number of
NVIDIA GPUs required to run. When running tests, if ``XCHAINER_TEST_CUDA_LIMIT``
environment variable is set to value greater than or equals to 0, test cases
that require GPUs more than the limit will be skipped.
"""
cuda_marker = item.get_marker('cuda')
if cuda_marker is not None:
required_num = cuda_marker.args[0] if cuda_marker.args else 1
minimum_num = int(os.getenv('XCHAINER_TEST_CUDA_LIMIT', '-1'))
if 0 <= minimum_num and minimum_num < required_num:
pytest.skip('{} NVIDIA GPUs required'.format(required_num))
|
<commit_before>import pytest
def pytest_addoption(parser):
_add_cuda_option(parser)
def pytest_configure(config):
_register_cuda_marker(config)
def pytest_runtest_setup(item):
_setup_cuda_marker(item)
def _add_cuda_option(parser):
parser.addoption("--cuda", action="store", metavar="LIMIT_NUM", default=-1,
help="only run tests that require NVIDIA GPUs less than or equal to the limit. Default: test all")
def _register_cuda_marker(config):
config.addinivalue_line("markers", "cuda(num=1): mark tests needing the specified number of NVIDIA GPUs.")
def _setup_cuda_marker(item):
"""Pytest marker to indicate number of NVIDIA GPUs required to run the test.
Tests can be annotated with this decorator (e.g., ``@pytest.mark.cuda``) to
declare that one NVIDIA GPU is required to run.
Tests can also be annotated as ``@pytest.mark.cuda(2)`` to declare number of
NVIDIA GPUs required to run. When running tests, if ``--cuda LIMIT_NUM``
pytest custom option is set to value greater than or equals to 0, test cases
that require GPUs more than the limit will be skipped.
"""
cuda_marker = item.get_marker('cuda')
if cuda_marker is not None:
required_num = cuda_marker.args[0] if cuda_marker.args else 1
minimum_num = int(item.config.getoption('--cuda'))
if 0 <= minimum_num and minimum_num < required_num:
pytest.skip('{} NVIDIA GPUs required'.format(required_num))
<commit_msg>Use XCHAINER_TEST_CUDA_LIMIT env int pytest<commit_after>
|
import pytest
import os
def pytest_configure(config):
_register_cuda_marker(config)
def pytest_runtest_setup(item):
_setup_cuda_marker(item)
def _register_cuda_marker(config):
config.addinivalue_line("markers", "cuda(num=1): mark tests needing the specified number of NVIDIA GPUs.")
def _setup_cuda_marker(item):
"""Pytest marker to indicate number of NVIDIA GPUs required to run the test.
Tests can be annotated with this decorator (e.g., ``@pytest.mark.cuda``) to
declare that one NVIDIA GPU is required to run.
Tests can also be annotated as ``@pytest.mark.cuda(2)`` to declare number of
NVIDIA GPUs required to run. When running tests, if ``XCHAINER_TEST_CUDA_LIMIT``
environment variable is set to value greater than or equals to 0, test cases
that require GPUs more than the limit will be skipped.
"""
cuda_marker = item.get_marker('cuda')
if cuda_marker is not None:
required_num = cuda_marker.args[0] if cuda_marker.args else 1
minimum_num = int(os.getenv('XCHAINER_TEST_CUDA_LIMIT', '-1'))
if 0 <= minimum_num and minimum_num < required_num:
pytest.skip('{} NVIDIA GPUs required'.format(required_num))
|
import pytest
def pytest_addoption(parser):
_add_cuda_option(parser)
def pytest_configure(config):
_register_cuda_marker(config)
def pytest_runtest_setup(item):
_setup_cuda_marker(item)
def _add_cuda_option(parser):
parser.addoption("--cuda", action="store", metavar="LIMIT_NUM", default=-1,
help="only run tests that require NVIDIA GPUs less than or equal to the limit. Default: test all")
def _register_cuda_marker(config):
config.addinivalue_line("markers", "cuda(num=1): mark tests needing the specified number of NVIDIA GPUs.")
def _setup_cuda_marker(item):
"""Pytest marker to indicate number of NVIDIA GPUs required to run the test.
Tests can be annotated with this decorator (e.g., ``@pytest.mark.cuda``) to
declare that one NVIDIA GPU is required to run.
Tests can also be annotated as ``@pytest.mark.cuda(2)`` to declare number of
NVIDIA GPUs required to run. When running tests, if ``--cuda LIMIT_NUM``
pytest custom option is set to value greater than or equals to 0, test cases
that require GPUs more than the limit will be skipped.
"""
cuda_marker = item.get_marker('cuda')
if cuda_marker is not None:
required_num = cuda_marker.args[0] if cuda_marker.args else 1
minimum_num = int(item.config.getoption('--cuda'))
if 0 <= minimum_num and minimum_num < required_num:
pytest.skip('{} NVIDIA GPUs required'.format(required_num))
Use XCHAINER_TEST_CUDA_LIMIT env int pytestimport pytest
import os
def pytest_configure(config):
_register_cuda_marker(config)
def pytest_runtest_setup(item):
_setup_cuda_marker(item)
def _register_cuda_marker(config):
config.addinivalue_line("markers", "cuda(num=1): mark tests needing the specified number of NVIDIA GPUs.")
def _setup_cuda_marker(item):
"""Pytest marker to indicate number of NVIDIA GPUs required to run the test.
Tests can be annotated with this decorator (e.g., ``@pytest.mark.cuda``) to
declare that one NVIDIA GPU is required to run.
Tests can also be annotated as ``@pytest.mark.cuda(2)`` to declare number of
NVIDIA GPUs required to run. When running tests, if ``XCHAINER_TEST_CUDA_LIMIT``
environment variable is set to value greater than or equals to 0, test cases
that require GPUs more than the limit will be skipped.
"""
cuda_marker = item.get_marker('cuda')
if cuda_marker is not None:
required_num = cuda_marker.args[0] if cuda_marker.args else 1
minimum_num = int(os.getenv('XCHAINER_TEST_CUDA_LIMIT', '-1'))
if 0 <= minimum_num and minimum_num < required_num:
pytest.skip('{} NVIDIA GPUs required'.format(required_num))
|
<commit_before>import pytest
def pytest_addoption(parser):
_add_cuda_option(parser)
def pytest_configure(config):
_register_cuda_marker(config)
def pytest_runtest_setup(item):
_setup_cuda_marker(item)
def _add_cuda_option(parser):
parser.addoption("--cuda", action="store", metavar="LIMIT_NUM", default=-1,
help="only run tests that require NVIDIA GPUs less than or equal to the limit. Default: test all")
def _register_cuda_marker(config):
config.addinivalue_line("markers", "cuda(num=1): mark tests needing the specified number of NVIDIA GPUs.")
def _setup_cuda_marker(item):
"""Pytest marker to indicate number of NVIDIA GPUs required to run the test.
Tests can be annotated with this decorator (e.g., ``@pytest.mark.cuda``) to
declare that one NVIDIA GPU is required to run.
Tests can also be annotated as ``@pytest.mark.cuda(2)`` to declare number of
NVIDIA GPUs required to run. When running tests, if ``--cuda LIMIT_NUM``
pytest custom option is set to value greater than or equals to 0, test cases
that require GPUs more than the limit will be skipped.
"""
cuda_marker = item.get_marker('cuda')
if cuda_marker is not None:
required_num = cuda_marker.args[0] if cuda_marker.args else 1
minimum_num = int(item.config.getoption('--cuda'))
if 0 <= minimum_num and minimum_num < required_num:
pytest.skip('{} NVIDIA GPUs required'.format(required_num))
<commit_msg>Use XCHAINER_TEST_CUDA_LIMIT env int pytest<commit_after>import pytest
import os
def pytest_configure(config):
_register_cuda_marker(config)
def pytest_runtest_setup(item):
_setup_cuda_marker(item)
def _register_cuda_marker(config):
config.addinivalue_line("markers", "cuda(num=1): mark tests needing the specified number of NVIDIA GPUs.")
def _setup_cuda_marker(item):
"""Pytest marker to indicate number of NVIDIA GPUs required to run the test.
Tests can be annotated with this decorator (e.g., ``@pytest.mark.cuda``) to
declare that one NVIDIA GPU is required to run.
Tests can also be annotated as ``@pytest.mark.cuda(2)`` to declare number of
NVIDIA GPUs required to run. When running tests, if ``XCHAINER_TEST_CUDA_LIMIT``
environment variable is set to value greater than or equals to 0, test cases
that require GPUs more than the limit will be skipped.
"""
cuda_marker = item.get_marker('cuda')
if cuda_marker is not None:
required_num = cuda_marker.args[0] if cuda_marker.args else 1
minimum_num = int(os.getenv('XCHAINER_TEST_CUDA_LIMIT', '-1'))
if 0 <= minimum_num and minimum_num < required_num:
pytest.skip('{} NVIDIA GPUs required'.format(required_num))
|
dbb58d2898f9f9a4824ee9596e52da9eaa92cab1
|
examples/get_secure_user_falco_rules.py
|
examples/get_secure_user_falco_rules.py
|
#!/usr/bin/env python
#
# Get the sysdig secure user rules file.
#
import os
import sys
sys.path.insert(0, os.path.join(os.path.dirname(os.path.realpath(sys.argv[0])), '..'))
from sdcclient import SdSecureClient
#
# Parse arguments
#
if len(sys.argv) != 2:
print('usage: %s <sysdig-token>' % sys.argv[0])
print('You can find your token at https://secure.sysdig.com/#/settings/user')
sys.exit(1)
sdc_token = sys.argv[1]
#
# Instantiate the SDC client
#
sdclient = SdSecureClient(sdc_token, 'https://secure.sysdig.com')
#
# Get the configuration
#
ok, res = sdclient.get_user_falco_rules()
#
# Return the result
#
if ok:
sys.stdout.write(res[1]["userRulesFile"]["content"])
else:
print(res)
sys.exit(1)
|
#!/usr/bin/env python
#
# Get the sysdig secure user rules file.
#
import os
import sys
sys.path.insert(0, os.path.join(os.path.dirname(os.path.realpath(sys.argv[0])), '..'))
from sdcclient import SdSecureClient
#
# Parse arguments
#
if len(sys.argv) != 2:
print('usage: %s <sysdig-token>' % sys.argv[0])
print('You can find your token at https://secure.sysdig.com/#/settings/user')
sys.exit(1)
sdc_token = sys.argv[1]
#
# Instantiate the SDC client
#
sdclient = SdSecureClient(sdc_token, 'https://secure.sysdig.com')
#
# Get the configuration
#
ok, res = sdclient.get_user_falco_rules()
#
# Return the result
#
if ok:
sys.stdout.write(res["userRulesFile"]["content"])
else:
print(res)
sys.exit(1)
|
Fix legacy use of action result
|
Fix legacy use of action result
|
Python
|
mit
|
draios/python-sdc-client,draios/python-sdc-client
|
#!/usr/bin/env python
#
# Get the sysdig secure user rules file.
#
import os
import sys
sys.path.insert(0, os.path.join(os.path.dirname(os.path.realpath(sys.argv[0])), '..'))
from sdcclient import SdSecureClient
#
# Parse arguments
#
if len(sys.argv) != 2:
print('usage: %s <sysdig-token>' % sys.argv[0])
print('You can find your token at https://secure.sysdig.com/#/settings/user')
sys.exit(1)
sdc_token = sys.argv[1]
#
# Instantiate the SDC client
#
sdclient = SdSecureClient(sdc_token, 'https://secure.sysdig.com')
#
# Get the configuration
#
ok, res = sdclient.get_user_falco_rules()
#
# Return the result
#
if ok:
sys.stdout.write(res[1]["userRulesFile"]["content"])
else:
print(res)
sys.exit(1)
Fix legacy use of action result
|
#!/usr/bin/env python
#
# Get the sysdig secure user rules file.
#
import os
import sys
sys.path.insert(0, os.path.join(os.path.dirname(os.path.realpath(sys.argv[0])), '..'))
from sdcclient import SdSecureClient
#
# Parse arguments
#
if len(sys.argv) != 2:
print('usage: %s <sysdig-token>' % sys.argv[0])
print('You can find your token at https://secure.sysdig.com/#/settings/user')
sys.exit(1)
sdc_token = sys.argv[1]
#
# Instantiate the SDC client
#
sdclient = SdSecureClient(sdc_token, 'https://secure.sysdig.com')
#
# Get the configuration
#
ok, res = sdclient.get_user_falco_rules()
#
# Return the result
#
if ok:
sys.stdout.write(res["userRulesFile"]["content"])
else:
print(res)
sys.exit(1)
|
<commit_before>#!/usr/bin/env python
#
# Get the sysdig secure user rules file.
#
import os
import sys
sys.path.insert(0, os.path.join(os.path.dirname(os.path.realpath(sys.argv[0])), '..'))
from sdcclient import SdSecureClient
#
# Parse arguments
#
if len(sys.argv) != 2:
print('usage: %s <sysdig-token>' % sys.argv[0])
print('You can find your token at https://secure.sysdig.com/#/settings/user')
sys.exit(1)
sdc_token = sys.argv[1]
#
# Instantiate the SDC client
#
sdclient = SdSecureClient(sdc_token, 'https://secure.sysdig.com')
#
# Get the configuration
#
ok, res = sdclient.get_user_falco_rules()
#
# Return the result
#
if ok:
sys.stdout.write(res[1]["userRulesFile"]["content"])
else:
print(res)
sys.exit(1)
<commit_msg>Fix legacy use of action result<commit_after>
|
#!/usr/bin/env python
#
# Get the sysdig secure user rules file.
#
import os
import sys
sys.path.insert(0, os.path.join(os.path.dirname(os.path.realpath(sys.argv[0])), '..'))
from sdcclient import SdSecureClient
#
# Parse arguments
#
if len(sys.argv) != 2:
print('usage: %s <sysdig-token>' % sys.argv[0])
print('You can find your token at https://secure.sysdig.com/#/settings/user')
sys.exit(1)
sdc_token = sys.argv[1]
#
# Instantiate the SDC client
#
sdclient = SdSecureClient(sdc_token, 'https://secure.sysdig.com')
#
# Get the configuration
#
ok, res = sdclient.get_user_falco_rules()
#
# Return the result
#
if ok:
sys.stdout.write(res["userRulesFile"]["content"])
else:
print(res)
sys.exit(1)
|
#!/usr/bin/env python
#
# Get the sysdig secure user rules file.
#
import os
import sys
sys.path.insert(0, os.path.join(os.path.dirname(os.path.realpath(sys.argv[0])), '..'))
from sdcclient import SdSecureClient
#
# Parse arguments
#
if len(sys.argv) != 2:
print('usage: %s <sysdig-token>' % sys.argv[0])
print('You can find your token at https://secure.sysdig.com/#/settings/user')
sys.exit(1)
sdc_token = sys.argv[1]
#
# Instantiate the SDC client
#
sdclient = SdSecureClient(sdc_token, 'https://secure.sysdig.com')
#
# Get the configuration
#
ok, res = sdclient.get_user_falco_rules()
#
# Return the result
#
if ok:
sys.stdout.write(res[1]["userRulesFile"]["content"])
else:
print(res)
sys.exit(1)
Fix legacy use of action result#!/usr/bin/env python
#
# Get the sysdig secure user rules file.
#
import os
import sys
sys.path.insert(0, os.path.join(os.path.dirname(os.path.realpath(sys.argv[0])), '..'))
from sdcclient import SdSecureClient
#
# Parse arguments
#
if len(sys.argv) != 2:
print('usage: %s <sysdig-token>' % sys.argv[0])
print('You can find your token at https://secure.sysdig.com/#/settings/user')
sys.exit(1)
sdc_token = sys.argv[1]
#
# Instantiate the SDC client
#
sdclient = SdSecureClient(sdc_token, 'https://secure.sysdig.com')
#
# Get the configuration
#
ok, res = sdclient.get_user_falco_rules()
#
# Return the result
#
if ok:
sys.stdout.write(res["userRulesFile"]["content"])
else:
print(res)
sys.exit(1)
|
<commit_before>#!/usr/bin/env python
#
# Get the sysdig secure user rules file.
#
import os
import sys
sys.path.insert(0, os.path.join(os.path.dirname(os.path.realpath(sys.argv[0])), '..'))
from sdcclient import SdSecureClient
#
# Parse arguments
#
if len(sys.argv) != 2:
print('usage: %s <sysdig-token>' % sys.argv[0])
print('You can find your token at https://secure.sysdig.com/#/settings/user')
sys.exit(1)
sdc_token = sys.argv[1]
#
# Instantiate the SDC client
#
sdclient = SdSecureClient(sdc_token, 'https://secure.sysdig.com')
#
# Get the configuration
#
ok, res = sdclient.get_user_falco_rules()
#
# Return the result
#
if ok:
sys.stdout.write(res[1]["userRulesFile"]["content"])
else:
print(res)
sys.exit(1)
<commit_msg>Fix legacy use of action result<commit_after>#!/usr/bin/env python
#
# Get the sysdig secure user rules file.
#
import os
import sys
sys.path.insert(0, os.path.join(os.path.dirname(os.path.realpath(sys.argv[0])), '..'))
from sdcclient import SdSecureClient
#
# Parse arguments
#
if len(sys.argv) != 2:
print('usage: %s <sysdig-token>' % sys.argv[0])
print('You can find your token at https://secure.sysdig.com/#/settings/user')
sys.exit(1)
sdc_token = sys.argv[1]
#
# Instantiate the SDC client
#
sdclient = SdSecureClient(sdc_token, 'https://secure.sysdig.com')
#
# Get the configuration
#
ok, res = sdclient.get_user_falco_rules()
#
# Return the result
#
if ok:
sys.stdout.write(res["userRulesFile"]["content"])
else:
print(res)
sys.exit(1)
|
82dcd51c59eecccac4e7d9ee1dac754b27ff9ed2
|
mzalendo/feedback/views.py
|
mzalendo/feedback/views.py
|
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.views.decorators.csrf import csrf_protect
from models import Feedback
from forms import FeedbackForm
@csrf_protect
def add(request):
"""Gather feedback for a page, and if it is ok show a thanks message and link back to the page."""
submit_was_success = False
return_to_url = None
# If it is a post request try to create the feedback
if request.method == 'POST':
form = FeedbackForm( request.POST )
if form.is_valid():
feedback = Feedback()
feedback.url = form.cleaned_data['url']
feedback.email = form.cleaned_data['email']
feedback.comment = form.cleaned_data['comment']
# if there is any content in the honeypot field then label this comment as spammy
if form.cleaned_data['website']:
feedback.status = 'spammy'
if request.user.is_authenticated():
feedback.user = request.user
feedback.save()
submit_was_success = True
return_to_url = feedback.url or None
else:
# use GET to grab the url if set
form = FeedbackForm(initial=request.GET)
return render_to_response(
'feedback/add.html',
{
'form': form,
'submit_was_success': submit_was_success,
'return_to_url': return_to_url,
},
context_instance=RequestContext(request)
)
|
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.views.decorators.csrf import csrf_protect
from models import Feedback
from forms import FeedbackForm
import re
@csrf_protect
def add(request):
"""Gather feedback for a page, and if it is ok show a thanks message and link back to the page."""
submit_was_success = False
return_to_url = None
# If it is a post request try to create the feedback
if request.method == 'POST':
form = FeedbackForm( request.POST )
if form.is_valid():
feedback = Feedback()
feedback.url = form.cleaned_data['url']
feedback.email = form.cleaned_data['email']
feedback.comment = form.cleaned_data['comment']
# if there is any content in the honeypot field then label this comment as spammy
if form.cleaned_data['website']:
feedback.status = 'spammy'
# if the comment starts with an html tag it is probably spam
if re.search('\A\s*<\w+>', form.cleaned_data['comment']):
feedback.status = 'spammy'
if request.user.is_authenticated():
feedback.user = request.user
feedback.save()
submit_was_success = True
return_to_url = feedback.url or None
else:
# use GET to grab the url if set
form = FeedbackForm(initial=request.GET)
return render_to_response(
'feedback/add.html',
{
'form': form,
'submit_was_success': submit_was_success,
'return_to_url': return_to_url,
},
context_instance=RequestContext(request)
)
|
Mark feedback comments starting with a html tag as spammy
|
Mark feedback comments starting with a html tag as spammy
|
Python
|
agpl-3.0
|
ken-muturi/pombola,mysociety/pombola,hzj123/56th,hzj123/56th,mysociety/pombola,ken-muturi/pombola,hzj123/56th,Hutspace/odekro,geoffkilpin/pombola,patricmutwiri/pombola,mysociety/pombola,patricmutwiri/pombola,patricmutwiri/pombola,hzj123/56th,Hutspace/odekro,ken-muturi/pombola,Hutspace/odekro,patricmutwiri/pombola,Hutspace/odekro,mysociety/pombola,ken-muturi/pombola,geoffkilpin/pombola,geoffkilpin/pombola,patricmutwiri/pombola,hzj123/56th,geoffkilpin/pombola,ken-muturi/pombola,mysociety/pombola,patricmutwiri/pombola,geoffkilpin/pombola,hzj123/56th,Hutspace/odekro,geoffkilpin/pombola,mysociety/pombola,ken-muturi/pombola
|
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.views.decorators.csrf import csrf_protect
from models import Feedback
from forms import FeedbackForm
@csrf_protect
def add(request):
"""Gather feedback for a page, and if it is ok show a thanks message and link back to the page."""
submit_was_success = False
return_to_url = None
# If it is a post request try to create the feedback
if request.method == 'POST':
form = FeedbackForm( request.POST )
if form.is_valid():
feedback = Feedback()
feedback.url = form.cleaned_data['url']
feedback.email = form.cleaned_data['email']
feedback.comment = form.cleaned_data['comment']
# if there is any content in the honeypot field then label this comment as spammy
if form.cleaned_data['website']:
feedback.status = 'spammy'
if request.user.is_authenticated():
feedback.user = request.user
feedback.save()
submit_was_success = True
return_to_url = feedback.url or None
else:
# use GET to grab the url if set
form = FeedbackForm(initial=request.GET)
return render_to_response(
'feedback/add.html',
{
'form': form,
'submit_was_success': submit_was_success,
'return_to_url': return_to_url,
},
context_instance=RequestContext(request)
)
Mark feedback comments starting with a html tag as spammy
|
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.views.decorators.csrf import csrf_protect
from models import Feedback
from forms import FeedbackForm
import re
@csrf_protect
def add(request):
"""Gather feedback for a page, and if it is ok show a thanks message and link back to the page."""
submit_was_success = False
return_to_url = None
# If it is a post request try to create the feedback
if request.method == 'POST':
form = FeedbackForm( request.POST )
if form.is_valid():
feedback = Feedback()
feedback.url = form.cleaned_data['url']
feedback.email = form.cleaned_data['email']
feedback.comment = form.cleaned_data['comment']
# if there is any content in the honeypot field then label this comment as spammy
if form.cleaned_data['website']:
feedback.status = 'spammy'
# if the comment starts with an html tag it is probably spam
if re.search('\A\s*<\w+>', form.cleaned_data['comment']):
feedback.status = 'spammy'
if request.user.is_authenticated():
feedback.user = request.user
feedback.save()
submit_was_success = True
return_to_url = feedback.url or None
else:
# use GET to grab the url if set
form = FeedbackForm(initial=request.GET)
return render_to_response(
'feedback/add.html',
{
'form': form,
'submit_was_success': submit_was_success,
'return_to_url': return_to_url,
},
context_instance=RequestContext(request)
)
|
<commit_before>from django.shortcuts import render_to_response
from django.template import RequestContext
from django.views.decorators.csrf import csrf_protect
from models import Feedback
from forms import FeedbackForm
@csrf_protect
def add(request):
"""Gather feedback for a page, and if it is ok show a thanks message and link back to the page."""
submit_was_success = False
return_to_url = None
# If it is a post request try to create the feedback
if request.method == 'POST':
form = FeedbackForm( request.POST )
if form.is_valid():
feedback = Feedback()
feedback.url = form.cleaned_data['url']
feedback.email = form.cleaned_data['email']
feedback.comment = form.cleaned_data['comment']
# if there is any content in the honeypot field then label this comment as spammy
if form.cleaned_data['website']:
feedback.status = 'spammy'
if request.user.is_authenticated():
feedback.user = request.user
feedback.save()
submit_was_success = True
return_to_url = feedback.url or None
else:
# use GET to grab the url if set
form = FeedbackForm(initial=request.GET)
return render_to_response(
'feedback/add.html',
{
'form': form,
'submit_was_success': submit_was_success,
'return_to_url': return_to_url,
},
context_instance=RequestContext(request)
)
<commit_msg>Mark feedback comments starting with a html tag as spammy<commit_after>
|
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.views.decorators.csrf import csrf_protect
from models import Feedback
from forms import FeedbackForm
import re
@csrf_protect
def add(request):
"""Gather feedback for a page, and if it is ok show a thanks message and link back to the page."""
submit_was_success = False
return_to_url = None
# If it is a post request try to create the feedback
if request.method == 'POST':
form = FeedbackForm( request.POST )
if form.is_valid():
feedback = Feedback()
feedback.url = form.cleaned_data['url']
feedback.email = form.cleaned_data['email']
feedback.comment = form.cleaned_data['comment']
# if there is any content in the honeypot field then label this comment as spammy
if form.cleaned_data['website']:
feedback.status = 'spammy'
# if the comment starts with an html tag it is probably spam
if re.search('\A\s*<\w+>', form.cleaned_data['comment']):
feedback.status = 'spammy'
if request.user.is_authenticated():
feedback.user = request.user
feedback.save()
submit_was_success = True
return_to_url = feedback.url or None
else:
# use GET to grab the url if set
form = FeedbackForm(initial=request.GET)
return render_to_response(
'feedback/add.html',
{
'form': form,
'submit_was_success': submit_was_success,
'return_to_url': return_to_url,
},
context_instance=RequestContext(request)
)
|
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.views.decorators.csrf import csrf_protect
from models import Feedback
from forms import FeedbackForm
@csrf_protect
def add(request):
"""Gather feedback for a page, and if it is ok show a thanks message and link back to the page."""
submit_was_success = False
return_to_url = None
# If it is a post request try to create the feedback
if request.method == 'POST':
form = FeedbackForm( request.POST )
if form.is_valid():
feedback = Feedback()
feedback.url = form.cleaned_data['url']
feedback.email = form.cleaned_data['email']
feedback.comment = form.cleaned_data['comment']
# if there is any content in the honeypot field then label this comment as spammy
if form.cleaned_data['website']:
feedback.status = 'spammy'
if request.user.is_authenticated():
feedback.user = request.user
feedback.save()
submit_was_success = True
return_to_url = feedback.url or None
else:
# use GET to grab the url if set
form = FeedbackForm(initial=request.GET)
return render_to_response(
'feedback/add.html',
{
'form': form,
'submit_was_success': submit_was_success,
'return_to_url': return_to_url,
},
context_instance=RequestContext(request)
)
Mark feedback comments starting with a html tag as spammyfrom django.shortcuts import render_to_response
from django.template import RequestContext
from django.views.decorators.csrf import csrf_protect
from models import Feedback
from forms import FeedbackForm
import re
@csrf_protect
def add(request):
"""Gather feedback for a page, and if it is ok show a thanks message and link back to the page."""
submit_was_success = False
return_to_url = None
# If it is a post request try to create the feedback
if request.method == 'POST':
form = FeedbackForm( request.POST )
if form.is_valid():
feedback = Feedback()
feedback.url = form.cleaned_data['url']
feedback.email = form.cleaned_data['email']
feedback.comment = form.cleaned_data['comment']
# if there is any content in the honeypot field then label this comment as spammy
if form.cleaned_data['website']:
feedback.status = 'spammy'
# if the comment starts with an html tag it is probably spam
if re.search('\A\s*<\w+>', form.cleaned_data['comment']):
feedback.status = 'spammy'
if request.user.is_authenticated():
feedback.user = request.user
feedback.save()
submit_was_success = True
return_to_url = feedback.url or None
else:
# use GET to grab the url if set
form = FeedbackForm(initial=request.GET)
return render_to_response(
'feedback/add.html',
{
'form': form,
'submit_was_success': submit_was_success,
'return_to_url': return_to_url,
},
context_instance=RequestContext(request)
)
|
<commit_before>from django.shortcuts import render_to_response
from django.template import RequestContext
from django.views.decorators.csrf import csrf_protect
from models import Feedback
from forms import FeedbackForm
@csrf_protect
def add(request):
"""Gather feedback for a page, and if it is ok show a thanks message and link back to the page."""
submit_was_success = False
return_to_url = None
# If it is a post request try to create the feedback
if request.method == 'POST':
form = FeedbackForm( request.POST )
if form.is_valid():
feedback = Feedback()
feedback.url = form.cleaned_data['url']
feedback.email = form.cleaned_data['email']
feedback.comment = form.cleaned_data['comment']
# if there is any content in the honeypot field then label this comment as spammy
if form.cleaned_data['website']:
feedback.status = 'spammy'
if request.user.is_authenticated():
feedback.user = request.user
feedback.save()
submit_was_success = True
return_to_url = feedback.url or None
else:
# use GET to grab the url if set
form = FeedbackForm(initial=request.GET)
return render_to_response(
'feedback/add.html',
{
'form': form,
'submit_was_success': submit_was_success,
'return_to_url': return_to_url,
},
context_instance=RequestContext(request)
)
<commit_msg>Mark feedback comments starting with a html tag as spammy<commit_after>from django.shortcuts import render_to_response
from django.template import RequestContext
from django.views.decorators.csrf import csrf_protect
from models import Feedback
from forms import FeedbackForm
import re
@csrf_protect
def add(request):
"""Gather feedback for a page, and if it is ok show a thanks message and link back to the page."""
submit_was_success = False
return_to_url = None
# If it is a post request try to create the feedback
if request.method == 'POST':
form = FeedbackForm( request.POST )
if form.is_valid():
feedback = Feedback()
feedback.url = form.cleaned_data['url']
feedback.email = form.cleaned_data['email']
feedback.comment = form.cleaned_data['comment']
# if there is any content in the honeypot field then label this comment as spammy
if form.cleaned_data['website']:
feedback.status = 'spammy'
# if the comment starts with an html tag it is probably spam
if re.search('\A\s*<\w+>', form.cleaned_data['comment']):
feedback.status = 'spammy'
if request.user.is_authenticated():
feedback.user = request.user
feedback.save()
submit_was_success = True
return_to_url = feedback.url or None
else:
# use GET to grab the url if set
form = FeedbackForm(initial=request.GET)
return render_to_response(
'feedback/add.html',
{
'form': form,
'submit_was_success': submit_was_success,
'return_to_url': return_to_url,
},
context_instance=RequestContext(request)
)
|
23e809db71889cec7b2af03b978ecb339853fe51
|
satchless/cart/views.py
|
satchless/cart/views.py
|
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.views.generic.simple import direct_to_template
from . import models
from . import forms
def cart(request, typ):
cart = models.Cart.objects.get_or_create_from_request(request, typ)
if request.method == 'POST':
formset = forms.CartItemFormSet(instance=cart, data=request.POST)
if formset.is_valid():
formset.save()
return HttpResponseRedirect(reverse('satchless-cart-view', kwargs={'typ': typ}))
else:
formset = forms.CartItemFormSet(instance=cart)
return direct_to_template(request,
'satchless/cart/view.html',
{'cart': cart, 'formset': formset})
|
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.shortcuts import render_to_response
from django.template import RequestContext
from . import models
from . import forms
def cart(request, typ):
cart = models.Cart.objects.get_or_create_from_request(request, typ)
if request.method == 'POST':
formset = forms.CartItemFormSet(instance=cart, data=request.POST)
if formset.is_valid():
formset.save()
return HttpResponseRedirect(reverse('satchless-cart-view', kwargs={'typ': typ}))
else:
formset = forms.CartItemFormSet(instance=cart)
return render_to_response(
['satchless/cart/%s/view.html' % typ, 'satchless/cart/view.html'],
{'cart': cart, 'formset': formset},
context_instance=RequestContext(request))
|
Allow prefixed templates for different cart types
|
Allow prefixed templates for different cart types
|
Python
|
bsd-3-clause
|
taedori81/satchless,fusionbox/satchless,fusionbox/satchless,fusionbox/satchless
|
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.views.generic.simple import direct_to_template
from . import models
from . import forms
def cart(request, typ):
cart = models.Cart.objects.get_or_create_from_request(request, typ)
if request.method == 'POST':
formset = forms.CartItemFormSet(instance=cart, data=request.POST)
if formset.is_valid():
formset.save()
return HttpResponseRedirect(reverse('satchless-cart-view', kwargs={'typ': typ}))
else:
formset = forms.CartItemFormSet(instance=cart)
return direct_to_template(request,
'satchless/cart/view.html',
{'cart': cart, 'formset': formset})
Allow prefixed templates for different cart types
|
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.shortcuts import render_to_response
from django.template import RequestContext
from . import models
from . import forms
def cart(request, typ):
cart = models.Cart.objects.get_or_create_from_request(request, typ)
if request.method == 'POST':
formset = forms.CartItemFormSet(instance=cart, data=request.POST)
if formset.is_valid():
formset.save()
return HttpResponseRedirect(reverse('satchless-cart-view', kwargs={'typ': typ}))
else:
formset = forms.CartItemFormSet(instance=cart)
return render_to_response(
['satchless/cart/%s/view.html' % typ, 'satchless/cart/view.html'],
{'cart': cart, 'formset': formset},
context_instance=RequestContext(request))
|
<commit_before>from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.views.generic.simple import direct_to_template
from . import models
from . import forms
def cart(request, typ):
cart = models.Cart.objects.get_or_create_from_request(request, typ)
if request.method == 'POST':
formset = forms.CartItemFormSet(instance=cart, data=request.POST)
if formset.is_valid():
formset.save()
return HttpResponseRedirect(reverse('satchless-cart-view', kwargs={'typ': typ}))
else:
formset = forms.CartItemFormSet(instance=cart)
return direct_to_template(request,
'satchless/cart/view.html',
{'cart': cart, 'formset': formset})
<commit_msg>Allow prefixed templates for different cart types<commit_after>
|
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.shortcuts import render_to_response
from django.template import RequestContext
from . import models
from . import forms
def cart(request, typ):
cart = models.Cart.objects.get_or_create_from_request(request, typ)
if request.method == 'POST':
formset = forms.CartItemFormSet(instance=cart, data=request.POST)
if formset.is_valid():
formset.save()
return HttpResponseRedirect(reverse('satchless-cart-view', kwargs={'typ': typ}))
else:
formset = forms.CartItemFormSet(instance=cart)
return render_to_response(
['satchless/cart/%s/view.html' % typ, 'satchless/cart/view.html'],
{'cart': cart, 'formset': formset},
context_instance=RequestContext(request))
|
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.views.generic.simple import direct_to_template
from . import models
from . import forms
def cart(request, typ):
cart = models.Cart.objects.get_or_create_from_request(request, typ)
if request.method == 'POST':
formset = forms.CartItemFormSet(instance=cart, data=request.POST)
if formset.is_valid():
formset.save()
return HttpResponseRedirect(reverse('satchless-cart-view', kwargs={'typ': typ}))
else:
formset = forms.CartItemFormSet(instance=cart)
return direct_to_template(request,
'satchless/cart/view.html',
{'cart': cart, 'formset': formset})
Allow prefixed templates for different cart typesfrom django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.shortcuts import render_to_response
from django.template import RequestContext
from . import models
from . import forms
def cart(request, typ):
cart = models.Cart.objects.get_or_create_from_request(request, typ)
if request.method == 'POST':
formset = forms.CartItemFormSet(instance=cart, data=request.POST)
if formset.is_valid():
formset.save()
return HttpResponseRedirect(reverse('satchless-cart-view', kwargs={'typ': typ}))
else:
formset = forms.CartItemFormSet(instance=cart)
return render_to_response(
['satchless/cart/%s/view.html' % typ, 'satchless/cart/view.html'],
{'cart': cart, 'formset': formset},
context_instance=RequestContext(request))
|
<commit_before>from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.views.generic.simple import direct_to_template
from . import models
from . import forms
def cart(request, typ):
cart = models.Cart.objects.get_or_create_from_request(request, typ)
if request.method == 'POST':
formset = forms.CartItemFormSet(instance=cart, data=request.POST)
if formset.is_valid():
formset.save()
return HttpResponseRedirect(reverse('satchless-cart-view', kwargs={'typ': typ}))
else:
formset = forms.CartItemFormSet(instance=cart)
return direct_to_template(request,
'satchless/cart/view.html',
{'cart': cart, 'formset': formset})
<commit_msg>Allow prefixed templates for different cart types<commit_after>from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.shortcuts import render_to_response
from django.template import RequestContext
from . import models
from . import forms
def cart(request, typ):
cart = models.Cart.objects.get_or_create_from_request(request, typ)
if request.method == 'POST':
formset = forms.CartItemFormSet(instance=cart, data=request.POST)
if formset.is_valid():
formset.save()
return HttpResponseRedirect(reverse('satchless-cart-view', kwargs={'typ': typ}))
else:
formset = forms.CartItemFormSet(instance=cart)
return render_to_response(
['satchless/cart/%s/view.html' % typ, 'satchless/cart/view.html'],
{'cart': cart, 'formset': formset},
context_instance=RequestContext(request))
|
db6632820215885b8940112335a07a91b5e79fed
|
dnstwister/worker_atom.py
|
dnstwister/worker_atom.py
|
"""Updates atom feeds."""
import datetime
import time
import db
import tools
# Time in seconds between re-processing a domain.
PERIOD = 60#86400
if __name__ == '__main__':
while True:
try:
start = time.time()
# Pick the oldest domain.
with db.cursor() as cursor:
threshold = (
datetime.datetime.now() -
datetime.timedelta(seconds=PERIOD)
)
# Get the first entry with an updated date older than the
# threshold.
cursor.execute("""
SELECT domain, updated
FROM stored
WHERE updated < (%s)
ORDER BY updated ASC
LIMIT 1
""", (threshold,))
result = cursor.fetchone()
# If we're idle, that's great.
if result is None:
time.sleep(1)
continue
domain, updated = result
age = (datetime.datetime.now() - updated).total_seconds()
# Generate a new report.
latest = {}
for entry in tools.analyse(domain)[1]['fuzzy_domains'][1:]:
ip, error = tools.resolve(entry['domain-name'])
if error or not ip or ip is None:
continue
latest[entry['domain-name']] = ip
# Update the "latest" version of the report.
db.stored_set(domain, latest)
print ','.join(map(str, (
domain, age, time.time() - start
)))
except Exception as ex:
db.DB = None
time.sleep(1)
print 'crashed... {}'.format(ex)
|
"""Updates atom feeds."""
import datetime
import time
import db
import tools
# Time in seconds between re-processing a domain.
PERIOD = 86400
if __name__ == '__main__':
while True:
try:
start = time.time()
# Pick the oldest domain.
with db.cursor() as cursor:
threshold = (
datetime.datetime.now() -
datetime.timedelta(seconds=PERIOD)
)
# Get the first entry with an updated date older than the
# threshold.
cursor.execute("""
SELECT domain, updated
FROM stored
WHERE updated < (%s)
ORDER BY updated ASC
LIMIT 1
""", (threshold,))
result = cursor.fetchone()
# If we're idle, that's great.
if result is None:
time.sleep(1)
continue
domain, updated = result
age = (datetime.datetime.now() - updated).total_seconds()
# Generate a new report.
latest = {}
for entry in tools.analyse(domain)[1]['fuzzy_domains'][1:]:
ip, error = tools.resolve(entry['domain-name'])
if error or not ip or ip is None:
continue
latest[entry['domain-name']] = ip
# Update the "latest" version of the report.
db.stored_set(domain, latest)
print ','.join(map(str, (
domain, age, time.time() - start
)))
except Exception as ex:
db.DB = None
time.sleep(1)
print 'crashed... {}'.format(ex)
|
Set period to 24 hours now that it works
|
Set period to 24 hours now that it works
|
Python
|
unlicense
|
thisismyrobot/dnstwister,thisismyrobot/dnstwister,thisismyrobot/dnstwister
|
"""Updates atom feeds."""
import datetime
import time
import db
import tools
# Time in seconds between re-processing a domain.
PERIOD = 60#86400
if __name__ == '__main__':
while True:
try:
start = time.time()
# Pick the oldest domain.
with db.cursor() as cursor:
threshold = (
datetime.datetime.now() -
datetime.timedelta(seconds=PERIOD)
)
# Get the first entry with an updated date older than the
# threshold.
cursor.execute("""
SELECT domain, updated
FROM stored
WHERE updated < (%s)
ORDER BY updated ASC
LIMIT 1
""", (threshold,))
result = cursor.fetchone()
# If we're idle, that's great.
if result is None:
time.sleep(1)
continue
domain, updated = result
age = (datetime.datetime.now() - updated).total_seconds()
# Generate a new report.
latest = {}
for entry in tools.analyse(domain)[1]['fuzzy_domains'][1:]:
ip, error = tools.resolve(entry['domain-name'])
if error or not ip or ip is None:
continue
latest[entry['domain-name']] = ip
# Update the "latest" version of the report.
db.stored_set(domain, latest)
print ','.join(map(str, (
domain, age, time.time() - start
)))
except Exception as ex:
db.DB = None
time.sleep(1)
print 'crashed... {}'.format(ex)
Set period to 24 hours now that it works
|
"""Updates atom feeds."""
import datetime
import time
import db
import tools
# Time in seconds between re-processing a domain.
PERIOD = 86400
if __name__ == '__main__':
while True:
try:
start = time.time()
# Pick the oldest domain.
with db.cursor() as cursor:
threshold = (
datetime.datetime.now() -
datetime.timedelta(seconds=PERIOD)
)
# Get the first entry with an updated date older than the
# threshold.
cursor.execute("""
SELECT domain, updated
FROM stored
WHERE updated < (%s)
ORDER BY updated ASC
LIMIT 1
""", (threshold,))
result = cursor.fetchone()
# If we're idle, that's great.
if result is None:
time.sleep(1)
continue
domain, updated = result
age = (datetime.datetime.now() - updated).total_seconds()
# Generate a new report.
latest = {}
for entry in tools.analyse(domain)[1]['fuzzy_domains'][1:]:
ip, error = tools.resolve(entry['domain-name'])
if error or not ip or ip is None:
continue
latest[entry['domain-name']] = ip
# Update the "latest" version of the report.
db.stored_set(domain, latest)
print ','.join(map(str, (
domain, age, time.time() - start
)))
except Exception as ex:
db.DB = None
time.sleep(1)
print 'crashed... {}'.format(ex)
|
<commit_before>"""Updates atom feeds."""
import datetime
import time
import db
import tools
# Time in seconds between re-processing a domain.
PERIOD = 60#86400
if __name__ == '__main__':
while True:
try:
start = time.time()
# Pick the oldest domain.
with db.cursor() as cursor:
threshold = (
datetime.datetime.now() -
datetime.timedelta(seconds=PERIOD)
)
# Get the first entry with an updated date older than the
# threshold.
cursor.execute("""
SELECT domain, updated
FROM stored
WHERE updated < (%s)
ORDER BY updated ASC
LIMIT 1
""", (threshold,))
result = cursor.fetchone()
# If we're idle, that's great.
if result is None:
time.sleep(1)
continue
domain, updated = result
age = (datetime.datetime.now() - updated).total_seconds()
# Generate a new report.
latest = {}
for entry in tools.analyse(domain)[1]['fuzzy_domains'][1:]:
ip, error = tools.resolve(entry['domain-name'])
if error or not ip or ip is None:
continue
latest[entry['domain-name']] = ip
# Update the "latest" version of the report.
db.stored_set(domain, latest)
print ','.join(map(str, (
domain, age, time.time() - start
)))
except Exception as ex:
db.DB = None
time.sleep(1)
print 'crashed... {}'.format(ex)
<commit_msg>Set period to 24 hours now that it works<commit_after>
|
"""Updates atom feeds."""
import datetime
import time
import db
import tools
# Time in seconds between re-processing a domain.
PERIOD = 86400
if __name__ == '__main__':
while True:
try:
start = time.time()
# Pick the oldest domain.
with db.cursor() as cursor:
threshold = (
datetime.datetime.now() -
datetime.timedelta(seconds=PERIOD)
)
# Get the first entry with an updated date older than the
# threshold.
cursor.execute("""
SELECT domain, updated
FROM stored
WHERE updated < (%s)
ORDER BY updated ASC
LIMIT 1
""", (threshold,))
result = cursor.fetchone()
# If we're idle, that's great.
if result is None:
time.sleep(1)
continue
domain, updated = result
age = (datetime.datetime.now() - updated).total_seconds()
# Generate a new report.
latest = {}
for entry in tools.analyse(domain)[1]['fuzzy_domains'][1:]:
ip, error = tools.resolve(entry['domain-name'])
if error or not ip or ip is None:
continue
latest[entry['domain-name']] = ip
# Update the "latest" version of the report.
db.stored_set(domain, latest)
print ','.join(map(str, (
domain, age, time.time() - start
)))
except Exception as ex:
db.DB = None
time.sleep(1)
print 'crashed... {}'.format(ex)
|
"""Updates atom feeds."""
import datetime
import time
import db
import tools
# Time in seconds between re-processing a domain.
PERIOD = 60#86400
if __name__ == '__main__':
while True:
try:
start = time.time()
# Pick the oldest domain.
with db.cursor() as cursor:
threshold = (
datetime.datetime.now() -
datetime.timedelta(seconds=PERIOD)
)
# Get the first entry with an updated date older than the
# threshold.
cursor.execute("""
SELECT domain, updated
FROM stored
WHERE updated < (%s)
ORDER BY updated ASC
LIMIT 1
""", (threshold,))
result = cursor.fetchone()
# If we're idle, that's great.
if result is None:
time.sleep(1)
continue
domain, updated = result
age = (datetime.datetime.now() - updated).total_seconds()
# Generate a new report.
latest = {}
for entry in tools.analyse(domain)[1]['fuzzy_domains'][1:]:
ip, error = tools.resolve(entry['domain-name'])
if error or not ip or ip is None:
continue
latest[entry['domain-name']] = ip
# Update the "latest" version of the report.
db.stored_set(domain, latest)
print ','.join(map(str, (
domain, age, time.time() - start
)))
except Exception as ex:
db.DB = None
time.sleep(1)
print 'crashed... {}'.format(ex)
Set period to 24 hours now that it works"""Updates atom feeds."""
import datetime
import time
import db
import tools
# Time in seconds between re-processing a domain.
PERIOD = 86400
if __name__ == '__main__':
while True:
try:
start = time.time()
# Pick the oldest domain.
with db.cursor() as cursor:
threshold = (
datetime.datetime.now() -
datetime.timedelta(seconds=PERIOD)
)
# Get the first entry with an updated date older than the
# threshold.
cursor.execute("""
SELECT domain, updated
FROM stored
WHERE updated < (%s)
ORDER BY updated ASC
LIMIT 1
""", (threshold,))
result = cursor.fetchone()
# If we're idle, that's great.
if result is None:
time.sleep(1)
continue
domain, updated = result
age = (datetime.datetime.now() - updated).total_seconds()
# Generate a new report.
latest = {}
for entry in tools.analyse(domain)[1]['fuzzy_domains'][1:]:
ip, error = tools.resolve(entry['domain-name'])
if error or not ip or ip is None:
continue
latest[entry['domain-name']] = ip
# Update the "latest" version of the report.
db.stored_set(domain, latest)
print ','.join(map(str, (
domain, age, time.time() - start
)))
except Exception as ex:
db.DB = None
time.sleep(1)
print 'crashed... {}'.format(ex)
|
<commit_before>"""Updates atom feeds."""
import datetime
import time
import db
import tools
# Time in seconds between re-processing a domain.
PERIOD = 60#86400
if __name__ == '__main__':
while True:
try:
start = time.time()
# Pick the oldest domain.
with db.cursor() as cursor:
threshold = (
datetime.datetime.now() -
datetime.timedelta(seconds=PERIOD)
)
# Get the first entry with an updated date older than the
# threshold.
cursor.execute("""
SELECT domain, updated
FROM stored
WHERE updated < (%s)
ORDER BY updated ASC
LIMIT 1
""", (threshold,))
result = cursor.fetchone()
# If we're idle, that's great.
if result is None:
time.sleep(1)
continue
domain, updated = result
age = (datetime.datetime.now() - updated).total_seconds()
# Generate a new report.
latest = {}
for entry in tools.analyse(domain)[1]['fuzzy_domains'][1:]:
ip, error = tools.resolve(entry['domain-name'])
if error or not ip or ip is None:
continue
latest[entry['domain-name']] = ip
# Update the "latest" version of the report.
db.stored_set(domain, latest)
print ','.join(map(str, (
domain, age, time.time() - start
)))
except Exception as ex:
db.DB = None
time.sleep(1)
print 'crashed... {}'.format(ex)
<commit_msg>Set period to 24 hours now that it works<commit_after>"""Updates atom feeds."""
import datetime
import time
import db
import tools
# Time in seconds between re-processing a domain.
PERIOD = 86400
if __name__ == '__main__':
while True:
try:
start = time.time()
# Pick the oldest domain.
with db.cursor() as cursor:
threshold = (
datetime.datetime.now() -
datetime.timedelta(seconds=PERIOD)
)
# Get the first entry with an updated date older than the
# threshold.
cursor.execute("""
SELECT domain, updated
FROM stored
WHERE updated < (%s)
ORDER BY updated ASC
LIMIT 1
""", (threshold,))
result = cursor.fetchone()
# If we're idle, that's great.
if result is None:
time.sleep(1)
continue
domain, updated = result
age = (datetime.datetime.now() - updated).total_seconds()
# Generate a new report.
latest = {}
for entry in tools.analyse(domain)[1]['fuzzy_domains'][1:]:
ip, error = tools.resolve(entry['domain-name'])
if error or not ip or ip is None:
continue
latest[entry['domain-name']] = ip
# Update the "latest" version of the report.
db.stored_set(domain, latest)
print ','.join(map(str, (
domain, age, time.time() - start
)))
except Exception as ex:
db.DB = None
time.sleep(1)
print 'crashed... {}'.format(ex)
|
3ddcc3ed5b6288ea6b39b7a11c5d85232a2d224f
|
networkzero/__init__.py
|
networkzero/__init__.py
|
# -*- coding: utf-8 -*-
"""Easy network discovery & messaging
Aimed at a classrom or club situation, networkzero makes it simpler to
have several machines or several processes on one machine discovering
each other and talking across a network. Typical examples would include:
* Sending commands to a robot
* Sending scores to a scoreboard
* Having a remote sensor ping a central controller
* A peer-to-peer chat / instant messenger
Example code:
[Computer 1]:
import networkzero as nw0
echo_address = nw0.advertise("echo")
while True:
name = nw0.wait_for_message(echo_address)
nw0.send_reply(echo_address, "Hello " + name)
[Computer 2]:
import networkzero as nw0
echo_address = nw0.discover("echo")
print(nw0.send_message(echo_address, "Alice"))
print(nw0.send_message(echo_address, "Bob"))
"""
from .discovery import advertise, discover, discover_all
from .messenger import (
send_command, wait_for_command,
send_message, wait_for_message, send_reply,
send_notification, wait_for_notification
)
|
# -*- coding: utf-8 -*-
"""Easy network discovery & messaging
Aimed at a classrom or club situation, networkzero makes it simpler to
have several machines or several processes on one machine discovering
each other and talking across a network. Typical examples would include:
* Sending commands to a robot
* Sending scores to a scoreboard
* Having a remote sensor ping a central controller
* A peer-to-peer chat / instant messenger
To send a message and wait for a reply::
[Computer 1]
import networkzero as nw0
echo_address = nw0.advertise("echo")
while True:
name = nw0.wait_for_message(echo_address)
nw0.send_reply(echo_address, "Hello " + name)
[Computer 2]
import networkzero as nw0
echo_address = nw0.discover("echo")
print(nw0.send_message(echo_address, "Alice"))
print(nw0.send_message(echo_address, "Bob"))
To send a command without waiting for a reply::
[Computer 1]
import networkzero as nw0
address = nw0.advertise("robot")
while True:
command, params = nw0.wait_for_command(address)
if command == "FORWARD":
# ...
elif command == "TURN":
[direction, degrees] = params
# ...
[Computer 2]
import networkzero as nw0
robot = nw0.discover("robot")
nw0.send_command("FORWARD")
nw0.send_command("TURN LEFT 45")
To send notifications::
[Computer 1]
import networkzero as nw0
address = nw0.advertise("hub")
while True:
"""
from .discovery import advertise, discover, discover_all
from .messenger import (
send_command, wait_for_command,
send_message, wait_for_message, send_reply,
send_notification, wait_for_notification
)
|
Improve the examples in the package root docstring
|
Improve the examples in the package root docstring
|
Python
|
mit
|
tjguk/networkzero,tjguk/networkzero,tjguk/networkzero
|
# -*- coding: utf-8 -*-
"""Easy network discovery & messaging
Aimed at a classrom or club situation, networkzero makes it simpler to
have several machines or several processes on one machine discovering
each other and talking across a network. Typical examples would include:
* Sending commands to a robot
* Sending scores to a scoreboard
* Having a remote sensor ping a central controller
* A peer-to-peer chat / instant messenger
Example code:
[Computer 1]:
import networkzero as nw0
echo_address = nw0.advertise("echo")
while True:
name = nw0.wait_for_message(echo_address)
nw0.send_reply(echo_address, "Hello " + name)
[Computer 2]:
import networkzero as nw0
echo_address = nw0.discover("echo")
print(nw0.send_message(echo_address, "Alice"))
print(nw0.send_message(echo_address, "Bob"))
"""
from .discovery import advertise, discover, discover_all
from .messenger import (
send_command, wait_for_command,
send_message, wait_for_message, send_reply,
send_notification, wait_for_notification
)
Improve the examples in the package root docstring
|
# -*- coding: utf-8 -*-
"""Easy network discovery & messaging
Aimed at a classrom or club situation, networkzero makes it simpler to
have several machines or several processes on one machine discovering
each other and talking across a network. Typical examples would include:
* Sending commands to a robot
* Sending scores to a scoreboard
* Having a remote sensor ping a central controller
* A peer-to-peer chat / instant messenger
To send a message and wait for a reply::
[Computer 1]
import networkzero as nw0
echo_address = nw0.advertise("echo")
while True:
name = nw0.wait_for_message(echo_address)
nw0.send_reply(echo_address, "Hello " + name)
[Computer 2]
import networkzero as nw0
echo_address = nw0.discover("echo")
print(nw0.send_message(echo_address, "Alice"))
print(nw0.send_message(echo_address, "Bob"))
To send a command without waiting for a reply::
[Computer 1]
import networkzero as nw0
address = nw0.advertise("robot")
while True:
command, params = nw0.wait_for_command(address)
if command == "FORWARD":
# ...
elif command == "TURN":
[direction, degrees] = params
# ...
[Computer 2]
import networkzero as nw0
robot = nw0.discover("robot")
nw0.send_command("FORWARD")
nw0.send_command("TURN LEFT 45")
To send notifications::
[Computer 1]
import networkzero as nw0
address = nw0.advertise("hub")
while True:
"""
from .discovery import advertise, discover, discover_all
from .messenger import (
send_command, wait_for_command,
send_message, wait_for_message, send_reply,
send_notification, wait_for_notification
)
|
<commit_before># -*- coding: utf-8 -*-
"""Easy network discovery & messaging
Aimed at a classrom or club situation, networkzero makes it simpler to
have several machines or several processes on one machine discovering
each other and talking across a network. Typical examples would include:
* Sending commands to a robot
* Sending scores to a scoreboard
* Having a remote sensor ping a central controller
* A peer-to-peer chat / instant messenger
Example code:
[Computer 1]:
import networkzero as nw0
echo_address = nw0.advertise("echo")
while True:
name = nw0.wait_for_message(echo_address)
nw0.send_reply(echo_address, "Hello " + name)
[Computer 2]:
import networkzero as nw0
echo_address = nw0.discover("echo")
print(nw0.send_message(echo_address, "Alice"))
print(nw0.send_message(echo_address, "Bob"))
"""
from .discovery import advertise, discover, discover_all
from .messenger import (
send_command, wait_for_command,
send_message, wait_for_message, send_reply,
send_notification, wait_for_notification
)
<commit_msg>Improve the examples in the package root docstring<commit_after>
|
# -*- coding: utf-8 -*-
"""Easy network discovery & messaging
Aimed at a classrom or club situation, networkzero makes it simpler to
have several machines or several processes on one machine discovering
each other and talking across a network. Typical examples would include:
* Sending commands to a robot
* Sending scores to a scoreboard
* Having a remote sensor ping a central controller
* A peer-to-peer chat / instant messenger
To send a message and wait for a reply::
[Computer 1]
import networkzero as nw0
echo_address = nw0.advertise("echo")
while True:
name = nw0.wait_for_message(echo_address)
nw0.send_reply(echo_address, "Hello " + name)
[Computer 2]
import networkzero as nw0
echo_address = nw0.discover("echo")
print(nw0.send_message(echo_address, "Alice"))
print(nw0.send_message(echo_address, "Bob"))
To send a command without waiting for a reply::
[Computer 1]
import networkzero as nw0
address = nw0.advertise("robot")
while True:
command, params = nw0.wait_for_command(address)
if command == "FORWARD":
# ...
elif command == "TURN":
[direction, degrees] = params
# ...
[Computer 2]
import networkzero as nw0
robot = nw0.discover("robot")
nw0.send_command("FORWARD")
nw0.send_command("TURN LEFT 45")
To send notifications::
[Computer 1]
import networkzero as nw0
address = nw0.advertise("hub")
while True:
"""
from .discovery import advertise, discover, discover_all
from .messenger import (
send_command, wait_for_command,
send_message, wait_for_message, send_reply,
send_notification, wait_for_notification
)
|
# -*- coding: utf-8 -*-
"""Easy network discovery & messaging
Aimed at a classrom or club situation, networkzero makes it simpler to
have several machines or several processes on one machine discovering
each other and talking across a network. Typical examples would include:
* Sending commands to a robot
* Sending scores to a scoreboard
* Having a remote sensor ping a central controller
* A peer-to-peer chat / instant messenger
Example code:
[Computer 1]:
import networkzero as nw0
echo_address = nw0.advertise("echo")
while True:
name = nw0.wait_for_message(echo_address)
nw0.send_reply(echo_address, "Hello " + name)
[Computer 2]:
import networkzero as nw0
echo_address = nw0.discover("echo")
print(nw0.send_message(echo_address, "Alice"))
print(nw0.send_message(echo_address, "Bob"))
"""
from .discovery import advertise, discover, discover_all
from .messenger import (
send_command, wait_for_command,
send_message, wait_for_message, send_reply,
send_notification, wait_for_notification
)
Improve the examples in the package root docstring# -*- coding: utf-8 -*-
"""Easy network discovery & messaging
Aimed at a classrom or club situation, networkzero makes it simpler to
have several machines or several processes on one machine discovering
each other and talking across a network. Typical examples would include:
* Sending commands to a robot
* Sending scores to a scoreboard
* Having a remote sensor ping a central controller
* A peer-to-peer chat / instant messenger
To send a message and wait for a reply::
[Computer 1]
import networkzero as nw0
echo_address = nw0.advertise("echo")
while True:
name = nw0.wait_for_message(echo_address)
nw0.send_reply(echo_address, "Hello " + name)
[Computer 2]
import networkzero as nw0
echo_address = nw0.discover("echo")
print(nw0.send_message(echo_address, "Alice"))
print(nw0.send_message(echo_address, "Bob"))
To send a command without waiting for a reply::
[Computer 1]
import networkzero as nw0
address = nw0.advertise("robot")
while True:
command, params = nw0.wait_for_command(address)
if command == "FORWARD":
# ...
elif command == "TURN":
[direction, degrees] = params
# ...
[Computer 2]
import networkzero as nw0
robot = nw0.discover("robot")
nw0.send_command("FORWARD")
nw0.send_command("TURN LEFT 45")
To send notifications::
[Computer 1]
import networkzero as nw0
address = nw0.advertise("hub")
while True:
"""
from .discovery import advertise, discover, discover_all
from .messenger import (
send_command, wait_for_command,
send_message, wait_for_message, send_reply,
send_notification, wait_for_notification
)
|
<commit_before># -*- coding: utf-8 -*-
"""Easy network discovery & messaging
Aimed at a classrom or club situation, networkzero makes it simpler to
have several machines or several processes on one machine discovering
each other and talking across a network. Typical examples would include:
* Sending commands to a robot
* Sending scores to a scoreboard
* Having a remote sensor ping a central controller
* A peer-to-peer chat / instant messenger
Example code:
[Computer 1]:
import networkzero as nw0
echo_address = nw0.advertise("echo")
while True:
name = nw0.wait_for_message(echo_address)
nw0.send_reply(echo_address, "Hello " + name)
[Computer 2]:
import networkzero as nw0
echo_address = nw0.discover("echo")
print(nw0.send_message(echo_address, "Alice"))
print(nw0.send_message(echo_address, "Bob"))
"""
from .discovery import advertise, discover, discover_all
from .messenger import (
send_command, wait_for_command,
send_message, wait_for_message, send_reply,
send_notification, wait_for_notification
)
<commit_msg>Improve the examples in the package root docstring<commit_after># -*- coding: utf-8 -*-
"""Easy network discovery & messaging
Aimed at a classrom or club situation, networkzero makes it simpler to
have several machines or several processes on one machine discovering
each other and talking across a network. Typical examples would include:
* Sending commands to a robot
* Sending scores to a scoreboard
* Having a remote sensor ping a central controller
* A peer-to-peer chat / instant messenger
To send a message and wait for a reply::
[Computer 1]
import networkzero as nw0
echo_address = nw0.advertise("echo")
while True:
name = nw0.wait_for_message(echo_address)
nw0.send_reply(echo_address, "Hello " + name)
[Computer 2]
import networkzero as nw0
echo_address = nw0.discover("echo")
print(nw0.send_message(echo_address, "Alice"))
print(nw0.send_message(echo_address, "Bob"))
To send a command without waiting for a reply::
[Computer 1]
import networkzero as nw0
address = nw0.advertise("robot")
while True:
command, params = nw0.wait_for_command(address)
if command == "FORWARD":
# ...
elif command == "TURN":
[direction, degrees] = params
# ...
[Computer 2]
import networkzero as nw0
robot = nw0.discover("robot")
nw0.send_command("FORWARD")
nw0.send_command("TURN LEFT 45")
To send notifications::
[Computer 1]
import networkzero as nw0
address = nw0.advertise("hub")
while True:
"""
from .discovery import advertise, discover, discover_all
from .messenger import (
send_command, wait_for_command,
send_message, wait_for_message, send_reply,
send_notification, wait_for_notification
)
|
d7eb2dc9eb5f391a6a6742bea3692c8ab1d8aa69
|
doc/examples/plot_edge_filter.py
|
doc/examples/plot_edge_filter.py
|
import matplotlib.pyplot as plt
from skimage.data import camera
from skimage.filter import roberts, sobel
image = camera()
edge_roberts = roberts(image)
edge_sobel = sobel(image)
fig, (ax0, ax1) = plt.subplots(ncols=2)
ax0.imshow(edge_roberts, cmap=plt.cm.gray)
ax0.set_title('Roberts Edge Detection')
ax0.axis('off')
ax1.imshow(edge_sobel, cmap=plt.cm.gray)
ax1.set_title('Sobel Edge Detection')
ax1.axis('off')
plt.show()
|
"""
==============
Edge operators
==============
Edge operators are used in image processing within edge detection algorithms.
They are discrete differentiation operators, computing an approximation of the
gradient of the image intensity function.
"""
import matplotlib.pyplot as plt
from skimage.data import camera
from skimage.filter import roberts, sobel
image = camera()
edge_roberts = roberts(image)
edge_sobel = sobel(image)
fig, (ax0, ax1) = plt.subplots(ncols=2)
ax0.imshow(edge_roberts, cmap=plt.cm.gray)
ax0.set_title('Roberts Edge Detection')
ax0.axis('off')
ax1.imshow(edge_sobel, cmap=plt.cm.gray)
ax1.set_title('Sobel Edge Detection')
ax1.axis('off')
plt.show()
|
Add short description to edge filter example
|
Add short description to edge filter example
|
Python
|
bsd-3-clause
|
ClinicalGraphics/scikit-image,pratapvardhan/scikit-image,bsipocz/scikit-image,oew1v07/scikit-image,michaelaye/scikit-image,SamHames/scikit-image,paalge/scikit-image,almarklein/scikit-image,Midafi/scikit-image,chintak/scikit-image,vighneshbirodkar/scikit-image,juliusbierk/scikit-image,rjeli/scikit-image,chintak/scikit-image,SamHames/scikit-image,chriscrosscutler/scikit-image,SamHames/scikit-image,rjeli/scikit-image,ajaybhat/scikit-image,chintak/scikit-image,ClinicalGraphics/scikit-image,blink1073/scikit-image,michaelaye/scikit-image,newville/scikit-image,robintw/scikit-image,warmspringwinds/scikit-image,jwiggins/scikit-image,pratapvardhan/scikit-image,vighneshbirodkar/scikit-image,WarrenWeckesser/scikits-image,jwiggins/scikit-image,bennlich/scikit-image,GaZ3ll3/scikit-image,keflavich/scikit-image,keflavich/scikit-image,almarklein/scikit-image,ofgulban/scikit-image,rjeli/scikit-image,paalge/scikit-image,michaelpacer/scikit-image,almarklein/scikit-image,bennlich/scikit-image,dpshelio/scikit-image,robintw/scikit-image,GaZ3ll3/scikit-image,bsipocz/scikit-image,youprofit/scikit-image,michaelpacer/scikit-image,Britefury/scikit-image,Britefury/scikit-image,dpshelio/scikit-image,chriscrosscutler/scikit-image,WarrenWeckesser/scikits-image,vighneshbirodkar/scikit-image,Hiyorimi/scikit-image,chintak/scikit-image,almarklein/scikit-image,Midafi/scikit-image,blink1073/scikit-image,juliusbierk/scikit-image,Hiyorimi/scikit-image,oew1v07/scikit-image,SamHames/scikit-image,ofgulban/scikit-image,ajaybhat/scikit-image,ofgulban/scikit-image,emon10005/scikit-image,youprofit/scikit-image,warmspringwinds/scikit-image,emon10005/scikit-image,paalge/scikit-image,newville/scikit-image
|
import matplotlib.pyplot as plt
from skimage.data import camera
from skimage.filter import roberts, sobel
image = camera()
edge_roberts = roberts(image)
edge_sobel = sobel(image)
fig, (ax0, ax1) = plt.subplots(ncols=2)
ax0.imshow(edge_roberts, cmap=plt.cm.gray)
ax0.set_title('Roberts Edge Detection')
ax0.axis('off')
ax1.imshow(edge_sobel, cmap=plt.cm.gray)
ax1.set_title('Sobel Edge Detection')
ax1.axis('off')
plt.show()
Add short description to edge filter example
|
"""
==============
Edge operators
==============
Edge operators are used in image processing within edge detection algorithms.
They are discrete differentiation operators, computing an approximation of the
gradient of the image intensity function.
"""
import matplotlib.pyplot as plt
from skimage.data import camera
from skimage.filter import roberts, sobel
image = camera()
edge_roberts = roberts(image)
edge_sobel = sobel(image)
fig, (ax0, ax1) = plt.subplots(ncols=2)
ax0.imshow(edge_roberts, cmap=plt.cm.gray)
ax0.set_title('Roberts Edge Detection')
ax0.axis('off')
ax1.imshow(edge_sobel, cmap=plt.cm.gray)
ax1.set_title('Sobel Edge Detection')
ax1.axis('off')
plt.show()
|
<commit_before>import matplotlib.pyplot as plt
from skimage.data import camera
from skimage.filter import roberts, sobel
image = camera()
edge_roberts = roberts(image)
edge_sobel = sobel(image)
fig, (ax0, ax1) = plt.subplots(ncols=2)
ax0.imshow(edge_roberts, cmap=plt.cm.gray)
ax0.set_title('Roberts Edge Detection')
ax0.axis('off')
ax1.imshow(edge_sobel, cmap=plt.cm.gray)
ax1.set_title('Sobel Edge Detection')
ax1.axis('off')
plt.show()
<commit_msg>Add short description to edge filter example<commit_after>
|
"""
==============
Edge operators
==============
Edge operators are used in image processing within edge detection algorithms.
They are discrete differentiation operators, computing an approximation of the
gradient of the image intensity function.
"""
import matplotlib.pyplot as plt
from skimage.data import camera
from skimage.filter import roberts, sobel
image = camera()
edge_roberts = roberts(image)
edge_sobel = sobel(image)
fig, (ax0, ax1) = plt.subplots(ncols=2)
ax0.imshow(edge_roberts, cmap=plt.cm.gray)
ax0.set_title('Roberts Edge Detection')
ax0.axis('off')
ax1.imshow(edge_sobel, cmap=plt.cm.gray)
ax1.set_title('Sobel Edge Detection')
ax1.axis('off')
plt.show()
|
import matplotlib.pyplot as plt
from skimage.data import camera
from skimage.filter import roberts, sobel
image = camera()
edge_roberts = roberts(image)
edge_sobel = sobel(image)
fig, (ax0, ax1) = plt.subplots(ncols=2)
ax0.imshow(edge_roberts, cmap=plt.cm.gray)
ax0.set_title('Roberts Edge Detection')
ax0.axis('off')
ax1.imshow(edge_sobel, cmap=plt.cm.gray)
ax1.set_title('Sobel Edge Detection')
ax1.axis('off')
plt.show()
Add short description to edge filter example"""
==============
Edge operators
==============
Edge operators are used in image processing within edge detection algorithms.
They are discrete differentiation operators, computing an approximation of the
gradient of the image intensity function.
"""
import matplotlib.pyplot as plt
from skimage.data import camera
from skimage.filter import roberts, sobel
image = camera()
edge_roberts = roberts(image)
edge_sobel = sobel(image)
fig, (ax0, ax1) = plt.subplots(ncols=2)
ax0.imshow(edge_roberts, cmap=plt.cm.gray)
ax0.set_title('Roberts Edge Detection')
ax0.axis('off')
ax1.imshow(edge_sobel, cmap=plt.cm.gray)
ax1.set_title('Sobel Edge Detection')
ax1.axis('off')
plt.show()
|
<commit_before>import matplotlib.pyplot as plt
from skimage.data import camera
from skimage.filter import roberts, sobel
image = camera()
edge_roberts = roberts(image)
edge_sobel = sobel(image)
fig, (ax0, ax1) = plt.subplots(ncols=2)
ax0.imshow(edge_roberts, cmap=plt.cm.gray)
ax0.set_title('Roberts Edge Detection')
ax0.axis('off')
ax1.imshow(edge_sobel, cmap=plt.cm.gray)
ax1.set_title('Sobel Edge Detection')
ax1.axis('off')
plt.show()
<commit_msg>Add short description to edge filter example<commit_after>"""
==============
Edge operators
==============
Edge operators are used in image processing within edge detection algorithms.
They are discrete differentiation operators, computing an approximation of the
gradient of the image intensity function.
"""
import matplotlib.pyplot as plt
from skimage.data import camera
from skimage.filter import roberts, sobel
image = camera()
edge_roberts = roberts(image)
edge_sobel = sobel(image)
fig, (ax0, ax1) = plt.subplots(ncols=2)
ax0.imshow(edge_roberts, cmap=plt.cm.gray)
ax0.set_title('Roberts Edge Detection')
ax0.axis('off')
ax1.imshow(edge_sobel, cmap=plt.cm.gray)
ax1.set_title('Sobel Edge Detection')
ax1.axis('off')
plt.show()
|
a4a1d924686a0d74a080369d81e20a75c4e7d210
|
gem/templatetags/gem_tags.py
|
gem/templatetags/gem_tags.py
|
from django.template import Library
from django.conf import settings
from gem.models import GemSettings
register = Library()
@register.simple_tag()
def get_site_static_prefix():
return settings.SITE_STATIC_PREFIX
@register.filter()
def get_bbm_app_id(request):
return GemSettings.for_site(request.site).bbm_app_id
@register.filter('fieldtype')
def fieldtype(field):
return field.field.widget.__class__.__name__
@register.filter(name='smarttruncatechars')
def smart_truncate_chars(value, max_length):
if len(value) > max_length:
truncd_val = value[:max_length]
if value[max_length] != ' ':
truncd_val = truncd_val[:truncd_val.rfind(' ')]
return truncd_val + '...'
return value
|
from django.template import Library
from django.conf import settings
register = Library()
@register.simple_tag()
def get_site_static_prefix():
return settings.SITE_STATIC_PREFIX
@register.filter('fieldtype')
def fieldtype(field):
return field.field.widget.__class__.__name__
@register.filter(name='smarttruncatechars')
def smart_truncate_chars(value, max_length):
if len(value) > max_length:
truncd_val = value[:max_length]
if value[max_length] != ' ':
truncd_val = truncd_val[:truncd_val.rfind(' ')]
return truncd_val + '...'
return value
|
Revert "Create GEM filter to get BBM App ID"
|
Revert "Create GEM filter to get BBM App ID"
This reverts commit 2805eb26865d7a12cbc0e6f7a71dbd99ba49224e.
|
Python
|
bsd-2-clause
|
praekelt/molo-gem,praekelt/molo-gem,praekelt/molo-gem
|
from django.template import Library
from django.conf import settings
from gem.models import GemSettings
register = Library()
@register.simple_tag()
def get_site_static_prefix():
return settings.SITE_STATIC_PREFIX
@register.filter()
def get_bbm_app_id(request):
return GemSettings.for_site(request.site).bbm_app_id
@register.filter('fieldtype')
def fieldtype(field):
return field.field.widget.__class__.__name__
@register.filter(name='smarttruncatechars')
def smart_truncate_chars(value, max_length):
if len(value) > max_length:
truncd_val = value[:max_length]
if value[max_length] != ' ':
truncd_val = truncd_val[:truncd_val.rfind(' ')]
return truncd_val + '...'
return value
Revert "Create GEM filter to get BBM App ID"
This reverts commit 2805eb26865d7a12cbc0e6f7a71dbd99ba49224e.
|
from django.template import Library
from django.conf import settings
register = Library()
@register.simple_tag()
def get_site_static_prefix():
return settings.SITE_STATIC_PREFIX
@register.filter('fieldtype')
def fieldtype(field):
return field.field.widget.__class__.__name__
@register.filter(name='smarttruncatechars')
def smart_truncate_chars(value, max_length):
if len(value) > max_length:
truncd_val = value[:max_length]
if value[max_length] != ' ':
truncd_val = truncd_val[:truncd_val.rfind(' ')]
return truncd_val + '...'
return value
|
<commit_before>from django.template import Library
from django.conf import settings
from gem.models import GemSettings
register = Library()
@register.simple_tag()
def get_site_static_prefix():
return settings.SITE_STATIC_PREFIX
@register.filter()
def get_bbm_app_id(request):
return GemSettings.for_site(request.site).bbm_app_id
@register.filter('fieldtype')
def fieldtype(field):
return field.field.widget.__class__.__name__
@register.filter(name='smarttruncatechars')
def smart_truncate_chars(value, max_length):
if len(value) > max_length:
truncd_val = value[:max_length]
if value[max_length] != ' ':
truncd_val = truncd_val[:truncd_val.rfind(' ')]
return truncd_val + '...'
return value
<commit_msg>Revert "Create GEM filter to get BBM App ID"
This reverts commit 2805eb26865d7a12cbc0e6f7a71dbd99ba49224e.<commit_after>
|
from django.template import Library
from django.conf import settings
register = Library()
@register.simple_tag()
def get_site_static_prefix():
return settings.SITE_STATIC_PREFIX
@register.filter('fieldtype')
def fieldtype(field):
return field.field.widget.__class__.__name__
@register.filter(name='smarttruncatechars')
def smart_truncate_chars(value, max_length):
if len(value) > max_length:
truncd_val = value[:max_length]
if value[max_length] != ' ':
truncd_val = truncd_val[:truncd_val.rfind(' ')]
return truncd_val + '...'
return value
|
from django.template import Library
from django.conf import settings
from gem.models import GemSettings
register = Library()
@register.simple_tag()
def get_site_static_prefix():
return settings.SITE_STATIC_PREFIX
@register.filter()
def get_bbm_app_id(request):
return GemSettings.for_site(request.site).bbm_app_id
@register.filter('fieldtype')
def fieldtype(field):
return field.field.widget.__class__.__name__
@register.filter(name='smarttruncatechars')
def smart_truncate_chars(value, max_length):
if len(value) > max_length:
truncd_val = value[:max_length]
if value[max_length] != ' ':
truncd_val = truncd_val[:truncd_val.rfind(' ')]
return truncd_val + '...'
return value
Revert "Create GEM filter to get BBM App ID"
This reverts commit 2805eb26865d7a12cbc0e6f7a71dbd99ba49224e.from django.template import Library
from django.conf import settings
register = Library()
@register.simple_tag()
def get_site_static_prefix():
return settings.SITE_STATIC_PREFIX
@register.filter('fieldtype')
def fieldtype(field):
return field.field.widget.__class__.__name__
@register.filter(name='smarttruncatechars')
def smart_truncate_chars(value, max_length):
if len(value) > max_length:
truncd_val = value[:max_length]
if value[max_length] != ' ':
truncd_val = truncd_val[:truncd_val.rfind(' ')]
return truncd_val + '...'
return value
|
<commit_before>from django.template import Library
from django.conf import settings
from gem.models import GemSettings
register = Library()
@register.simple_tag()
def get_site_static_prefix():
return settings.SITE_STATIC_PREFIX
@register.filter()
def get_bbm_app_id(request):
return GemSettings.for_site(request.site).bbm_app_id
@register.filter('fieldtype')
def fieldtype(field):
return field.field.widget.__class__.__name__
@register.filter(name='smarttruncatechars')
def smart_truncate_chars(value, max_length):
if len(value) > max_length:
truncd_val = value[:max_length]
if value[max_length] != ' ':
truncd_val = truncd_val[:truncd_val.rfind(' ')]
return truncd_val + '...'
return value
<commit_msg>Revert "Create GEM filter to get BBM App ID"
This reverts commit 2805eb26865d7a12cbc0e6f7a71dbd99ba49224e.<commit_after>from django.template import Library
from django.conf import settings
register = Library()
@register.simple_tag()
def get_site_static_prefix():
return settings.SITE_STATIC_PREFIX
@register.filter('fieldtype')
def fieldtype(field):
return field.field.widget.__class__.__name__
@register.filter(name='smarttruncatechars')
def smart_truncate_chars(value, max_length):
if len(value) > max_length:
truncd_val = value[:max_length]
if value[max_length] != ' ':
truncd_val = truncd_val[:truncd_val.rfind(' ')]
return truncd_val + '...'
return value
|
0e9c2fead2c8ad0194f1174ea7d5ad6acd74a12c
|
private_storage/storage.py
|
private_storage/storage.py
|
"""
Django Storage interface
"""
from django.core.files.storage import FileSystemStorage
from django.core.urlresolvers import reverse_lazy
from . import appconfig
__all__ = (
'private_storage',
'PrivateStorage',
)
class PrivateStorage(FileSystemStorage):
"""
Interface to the Django storage system,
storing the files in a private folder.
"""
def __init__(self, location=None, base_url=None, **kwargs):
if location is None:
location = appconfig.PRIVATE_STORAGE_ROOT
super(PrivateStorage, self).__init__(
location=location,
base_url=base_url,
**kwargs
)
if base_url is None:
# When base_url is not given, it's autodetected.
# However, as the super method checks for base_url.endswith('/'),
# the attribute is overwritten here to avoid breaking lazy evaluation.
self.base_url = reverse_lazy('serve_private_file', kwargs={'path': ''})
# Singleton instance.
private_storage = PrivateStorage()
|
"""
Django Storage interface
"""
from django.core.files.storage import FileSystemStorage
from django.core.urlresolvers import reverse_lazy
from django.utils.encoding import force_text
from . import appconfig
__all__ = (
'private_storage',
'PrivateStorage',
)
class PrivateStorage(FileSystemStorage):
"""
Interface to the Django storage system,
storing the files in a private folder.
"""
def __init__(self, location=None, base_url=None, **kwargs):
if location is None:
location = appconfig.PRIVATE_STORAGE_ROOT
super(PrivateStorage, self).__init__(
location=location,
base_url=base_url,
**kwargs
)
if base_url is None:
# When base_url is not given, it's autodetected.
# However, as the super method checks for base_url.endswith('/'),
# the attribute is overwritten here to avoid breaking lazy evaluation.
self.base_url = reverse_lazy('serve_private_file', kwargs={'path': ''})
def url(self, name):
# Make sure reverse_lazy() is evaluated, as Python 3 won't do this here.
self.base_url = force_text(self.base_url)
return super(PrivateStorage, self).url(name)
# Singleton instance.
private_storage = PrivateStorage()
|
Fix url reversing in Python 3
|
Fix url reversing in Python 3
|
Python
|
apache-2.0
|
edoburu/django-private-storage
|
"""
Django Storage interface
"""
from django.core.files.storage import FileSystemStorage
from django.core.urlresolvers import reverse_lazy
from . import appconfig
__all__ = (
'private_storage',
'PrivateStorage',
)
class PrivateStorage(FileSystemStorage):
"""
Interface to the Django storage system,
storing the files in a private folder.
"""
def __init__(self, location=None, base_url=None, **kwargs):
if location is None:
location = appconfig.PRIVATE_STORAGE_ROOT
super(PrivateStorage, self).__init__(
location=location,
base_url=base_url,
**kwargs
)
if base_url is None:
# When base_url is not given, it's autodetected.
# However, as the super method checks for base_url.endswith('/'),
# the attribute is overwritten here to avoid breaking lazy evaluation.
self.base_url = reverse_lazy('serve_private_file', kwargs={'path': ''})
# Singleton instance.
private_storage = PrivateStorage()
Fix url reversing in Python 3
|
"""
Django Storage interface
"""
from django.core.files.storage import FileSystemStorage
from django.core.urlresolvers import reverse_lazy
from django.utils.encoding import force_text
from . import appconfig
__all__ = (
'private_storage',
'PrivateStorage',
)
class PrivateStorage(FileSystemStorage):
"""
Interface to the Django storage system,
storing the files in a private folder.
"""
def __init__(self, location=None, base_url=None, **kwargs):
if location is None:
location = appconfig.PRIVATE_STORAGE_ROOT
super(PrivateStorage, self).__init__(
location=location,
base_url=base_url,
**kwargs
)
if base_url is None:
# When base_url is not given, it's autodetected.
# However, as the super method checks for base_url.endswith('/'),
# the attribute is overwritten here to avoid breaking lazy evaluation.
self.base_url = reverse_lazy('serve_private_file', kwargs={'path': ''})
def url(self, name):
# Make sure reverse_lazy() is evaluated, as Python 3 won't do this here.
self.base_url = force_text(self.base_url)
return super(PrivateStorage, self).url(name)
# Singleton instance.
private_storage = PrivateStorage()
|
<commit_before>"""
Django Storage interface
"""
from django.core.files.storage import FileSystemStorage
from django.core.urlresolvers import reverse_lazy
from . import appconfig
__all__ = (
'private_storage',
'PrivateStorage',
)
class PrivateStorage(FileSystemStorage):
"""
Interface to the Django storage system,
storing the files in a private folder.
"""
def __init__(self, location=None, base_url=None, **kwargs):
if location is None:
location = appconfig.PRIVATE_STORAGE_ROOT
super(PrivateStorage, self).__init__(
location=location,
base_url=base_url,
**kwargs
)
if base_url is None:
# When base_url is not given, it's autodetected.
# However, as the super method checks for base_url.endswith('/'),
# the attribute is overwritten here to avoid breaking lazy evaluation.
self.base_url = reverse_lazy('serve_private_file', kwargs={'path': ''})
# Singleton instance.
private_storage = PrivateStorage()
<commit_msg>Fix url reversing in Python 3<commit_after>
|
"""
Django Storage interface
"""
from django.core.files.storage import FileSystemStorage
from django.core.urlresolvers import reverse_lazy
from django.utils.encoding import force_text
from . import appconfig
__all__ = (
'private_storage',
'PrivateStorage',
)
class PrivateStorage(FileSystemStorage):
"""
Interface to the Django storage system,
storing the files in a private folder.
"""
def __init__(self, location=None, base_url=None, **kwargs):
if location is None:
location = appconfig.PRIVATE_STORAGE_ROOT
super(PrivateStorage, self).__init__(
location=location,
base_url=base_url,
**kwargs
)
if base_url is None:
# When base_url is not given, it's autodetected.
# However, as the super method checks for base_url.endswith('/'),
# the attribute is overwritten here to avoid breaking lazy evaluation.
self.base_url = reverse_lazy('serve_private_file', kwargs={'path': ''})
def url(self, name):
# Make sure reverse_lazy() is evaluated, as Python 3 won't do this here.
self.base_url = force_text(self.base_url)
return super(PrivateStorage, self).url(name)
# Singleton instance.
private_storage = PrivateStorage()
|
"""
Django Storage interface
"""
from django.core.files.storage import FileSystemStorage
from django.core.urlresolvers import reverse_lazy
from . import appconfig
__all__ = (
'private_storage',
'PrivateStorage',
)
class PrivateStorage(FileSystemStorage):
"""
Interface to the Django storage system,
storing the files in a private folder.
"""
def __init__(self, location=None, base_url=None, **kwargs):
if location is None:
location = appconfig.PRIVATE_STORAGE_ROOT
super(PrivateStorage, self).__init__(
location=location,
base_url=base_url,
**kwargs
)
if base_url is None:
# When base_url is not given, it's autodetected.
# However, as the super method checks for base_url.endswith('/'),
# the attribute is overwritten here to avoid breaking lazy evaluation.
self.base_url = reverse_lazy('serve_private_file', kwargs={'path': ''})
# Singleton instance.
private_storage = PrivateStorage()
Fix url reversing in Python 3"""
Django Storage interface
"""
from django.core.files.storage import FileSystemStorage
from django.core.urlresolvers import reverse_lazy
from django.utils.encoding import force_text
from . import appconfig
__all__ = (
'private_storage',
'PrivateStorage',
)
class PrivateStorage(FileSystemStorage):
"""
Interface to the Django storage system,
storing the files in a private folder.
"""
def __init__(self, location=None, base_url=None, **kwargs):
if location is None:
location = appconfig.PRIVATE_STORAGE_ROOT
super(PrivateStorage, self).__init__(
location=location,
base_url=base_url,
**kwargs
)
if base_url is None:
# When base_url is not given, it's autodetected.
# However, as the super method checks for base_url.endswith('/'),
# the attribute is overwritten here to avoid breaking lazy evaluation.
self.base_url = reverse_lazy('serve_private_file', kwargs={'path': ''})
def url(self, name):
# Make sure reverse_lazy() is evaluated, as Python 3 won't do this here.
self.base_url = force_text(self.base_url)
return super(PrivateStorage, self).url(name)
# Singleton instance.
private_storage = PrivateStorage()
|
<commit_before>"""
Django Storage interface
"""
from django.core.files.storage import FileSystemStorage
from django.core.urlresolvers import reverse_lazy
from . import appconfig
__all__ = (
'private_storage',
'PrivateStorage',
)
class PrivateStorage(FileSystemStorage):
"""
Interface to the Django storage system,
storing the files in a private folder.
"""
def __init__(self, location=None, base_url=None, **kwargs):
if location is None:
location = appconfig.PRIVATE_STORAGE_ROOT
super(PrivateStorage, self).__init__(
location=location,
base_url=base_url,
**kwargs
)
if base_url is None:
# When base_url is not given, it's autodetected.
# However, as the super method checks for base_url.endswith('/'),
# the attribute is overwritten here to avoid breaking lazy evaluation.
self.base_url = reverse_lazy('serve_private_file', kwargs={'path': ''})
# Singleton instance.
private_storage = PrivateStorage()
<commit_msg>Fix url reversing in Python 3<commit_after>"""
Django Storage interface
"""
from django.core.files.storage import FileSystemStorage
from django.core.urlresolvers import reverse_lazy
from django.utils.encoding import force_text
from . import appconfig
__all__ = (
'private_storage',
'PrivateStorage',
)
class PrivateStorage(FileSystemStorage):
"""
Interface to the Django storage system,
storing the files in a private folder.
"""
def __init__(self, location=None, base_url=None, **kwargs):
if location is None:
location = appconfig.PRIVATE_STORAGE_ROOT
super(PrivateStorage, self).__init__(
location=location,
base_url=base_url,
**kwargs
)
if base_url is None:
# When base_url is not given, it's autodetected.
# However, as the super method checks for base_url.endswith('/'),
# the attribute is overwritten here to avoid breaking lazy evaluation.
self.base_url = reverse_lazy('serve_private_file', kwargs={'path': ''})
def url(self, name):
# Make sure reverse_lazy() is evaluated, as Python 3 won't do this here.
self.base_url = force_text(self.base_url)
return super(PrivateStorage, self).url(name)
# Singleton instance.
private_storage = PrivateStorage()
|
8b2f393135c89a0a005a34cd57807d37b1b9d64e
|
docker/gunicorn_config.py
|
docker/gunicorn_config.py
|
import multiprocessing
from os import getenv
bind = '127.0.0.1:8001'
workers = multiprocessing.cpu_count() * 3
# graceful_timeout = 600
# timeout = 60
threads = multiprocessing.cpu_count() * 3
# max_requests = 300
pidfile = '/var/run/gunicorn.pid'
# max_requests_jitter = 50
errorlog = '/var/log/gunicorn/gunicorn-error.log'
loglevel = 'critical'
# Read the DEBUG setting from env var
try:
if getenv('DOCKER_SAL_DEBUG').lower() == 'true':
accesslog = '/var/log/gunicorn/gunicorn-access.log'
loglevel = 'info'
except Exception:
pass
# Protect against memory leaks by restarting each worker every 1000
# requests, with a randomized jitter of 0-50 requests.
max_requests = 1000
max_requests_jitter = 50
worker_class = 'gevent'
|
import multiprocessing
from os import getenv
bind = '127.0.0.1:8001'
workers = multiprocessing.cpu_count() * 3
graceful_timeout = 15
timeout = 30
threads = multiprocessing.cpu_count() * 3
pidfile = '/var/run/gunicorn.pid'
errorlog = '/var/log/gunicorn/gunicorn-error.log'
loglevel = 'critical'
# Read the DEBUG setting from env var
try:
if getenv('DOCKER_SAL_DEBUG').lower() == 'true':
accesslog = '/var/log/gunicorn/gunicorn-access.log'
loglevel = 'info'
except Exception:
pass
# Protect against memory leaks by restarting each worker every 1000
# requests, with a randomized jitter of 0-50 requests.
max_requests = 1000
max_requests_jitter = 50
worker_class = 'gevent'
|
Configure graceful_timeout to actually do something
|
Configure graceful_timeout to actually do something
|
Python
|
apache-2.0
|
salopensource/sal,sheagcraig/sal,salopensource/sal,sheagcraig/sal,sheagcraig/sal,sheagcraig/sal,salopensource/sal,salopensource/sal
|
import multiprocessing
from os import getenv
bind = '127.0.0.1:8001'
workers = multiprocessing.cpu_count() * 3
# graceful_timeout = 600
# timeout = 60
threads = multiprocessing.cpu_count() * 3
# max_requests = 300
pidfile = '/var/run/gunicorn.pid'
# max_requests_jitter = 50
errorlog = '/var/log/gunicorn/gunicorn-error.log'
loglevel = 'critical'
# Read the DEBUG setting from env var
try:
if getenv('DOCKER_SAL_DEBUG').lower() == 'true':
accesslog = '/var/log/gunicorn/gunicorn-access.log'
loglevel = 'info'
except Exception:
pass
# Protect against memory leaks by restarting each worker every 1000
# requests, with a randomized jitter of 0-50 requests.
max_requests = 1000
max_requests_jitter = 50
worker_class = 'gevent'
Configure graceful_timeout to actually do something
|
import multiprocessing
from os import getenv
bind = '127.0.0.1:8001'
workers = multiprocessing.cpu_count() * 3
graceful_timeout = 15
timeout = 30
threads = multiprocessing.cpu_count() * 3
pidfile = '/var/run/gunicorn.pid'
errorlog = '/var/log/gunicorn/gunicorn-error.log'
loglevel = 'critical'
# Read the DEBUG setting from env var
try:
if getenv('DOCKER_SAL_DEBUG').lower() == 'true':
accesslog = '/var/log/gunicorn/gunicorn-access.log'
loglevel = 'info'
except Exception:
pass
# Protect against memory leaks by restarting each worker every 1000
# requests, with a randomized jitter of 0-50 requests.
max_requests = 1000
max_requests_jitter = 50
worker_class = 'gevent'
|
<commit_before>import multiprocessing
from os import getenv
bind = '127.0.0.1:8001'
workers = multiprocessing.cpu_count() * 3
# graceful_timeout = 600
# timeout = 60
threads = multiprocessing.cpu_count() * 3
# max_requests = 300
pidfile = '/var/run/gunicorn.pid'
# max_requests_jitter = 50
errorlog = '/var/log/gunicorn/gunicorn-error.log'
loglevel = 'critical'
# Read the DEBUG setting from env var
try:
if getenv('DOCKER_SAL_DEBUG').lower() == 'true':
accesslog = '/var/log/gunicorn/gunicorn-access.log'
loglevel = 'info'
except Exception:
pass
# Protect against memory leaks by restarting each worker every 1000
# requests, with a randomized jitter of 0-50 requests.
max_requests = 1000
max_requests_jitter = 50
worker_class = 'gevent'
<commit_msg>Configure graceful_timeout to actually do something<commit_after>
|
import multiprocessing
from os import getenv
bind = '127.0.0.1:8001'
workers = multiprocessing.cpu_count() * 3
graceful_timeout = 15
timeout = 30
threads = multiprocessing.cpu_count() * 3
pidfile = '/var/run/gunicorn.pid'
errorlog = '/var/log/gunicorn/gunicorn-error.log'
loglevel = 'critical'
# Read the DEBUG setting from env var
try:
if getenv('DOCKER_SAL_DEBUG').lower() == 'true':
accesslog = '/var/log/gunicorn/gunicorn-access.log'
loglevel = 'info'
except Exception:
pass
# Protect against memory leaks by restarting each worker every 1000
# requests, with a randomized jitter of 0-50 requests.
max_requests = 1000
max_requests_jitter = 50
worker_class = 'gevent'
|
import multiprocessing
from os import getenv
bind = '127.0.0.1:8001'
workers = multiprocessing.cpu_count() * 3
# graceful_timeout = 600
# timeout = 60
threads = multiprocessing.cpu_count() * 3
# max_requests = 300
pidfile = '/var/run/gunicorn.pid'
# max_requests_jitter = 50
errorlog = '/var/log/gunicorn/gunicorn-error.log'
loglevel = 'critical'
# Read the DEBUG setting from env var
try:
if getenv('DOCKER_SAL_DEBUG').lower() == 'true':
accesslog = '/var/log/gunicorn/gunicorn-access.log'
loglevel = 'info'
except Exception:
pass
# Protect against memory leaks by restarting each worker every 1000
# requests, with a randomized jitter of 0-50 requests.
max_requests = 1000
max_requests_jitter = 50
worker_class = 'gevent'
Configure graceful_timeout to actually do somethingimport multiprocessing
from os import getenv
bind = '127.0.0.1:8001'
workers = multiprocessing.cpu_count() * 3
graceful_timeout = 15
timeout = 30
threads = multiprocessing.cpu_count() * 3
pidfile = '/var/run/gunicorn.pid'
errorlog = '/var/log/gunicorn/gunicorn-error.log'
loglevel = 'critical'
# Read the DEBUG setting from env var
try:
if getenv('DOCKER_SAL_DEBUG').lower() == 'true':
accesslog = '/var/log/gunicorn/gunicorn-access.log'
loglevel = 'info'
except Exception:
pass
# Protect against memory leaks by restarting each worker every 1000
# requests, with a randomized jitter of 0-50 requests.
max_requests = 1000
max_requests_jitter = 50
worker_class = 'gevent'
|
<commit_before>import multiprocessing
from os import getenv
bind = '127.0.0.1:8001'
workers = multiprocessing.cpu_count() * 3
# graceful_timeout = 600
# timeout = 60
threads = multiprocessing.cpu_count() * 3
# max_requests = 300
pidfile = '/var/run/gunicorn.pid'
# max_requests_jitter = 50
errorlog = '/var/log/gunicorn/gunicorn-error.log'
loglevel = 'critical'
# Read the DEBUG setting from env var
try:
if getenv('DOCKER_SAL_DEBUG').lower() == 'true':
accesslog = '/var/log/gunicorn/gunicorn-access.log'
loglevel = 'info'
except Exception:
pass
# Protect against memory leaks by restarting each worker every 1000
# requests, with a randomized jitter of 0-50 requests.
max_requests = 1000
max_requests_jitter = 50
worker_class = 'gevent'
<commit_msg>Configure graceful_timeout to actually do something<commit_after>import multiprocessing
from os import getenv
bind = '127.0.0.1:8001'
workers = multiprocessing.cpu_count() * 3
graceful_timeout = 15
timeout = 30
threads = multiprocessing.cpu_count() * 3
pidfile = '/var/run/gunicorn.pid'
errorlog = '/var/log/gunicorn/gunicorn-error.log'
loglevel = 'critical'
# Read the DEBUG setting from env var
try:
if getenv('DOCKER_SAL_DEBUG').lower() == 'true':
accesslog = '/var/log/gunicorn/gunicorn-access.log'
loglevel = 'info'
except Exception:
pass
# Protect against memory leaks by restarting each worker every 1000
# requests, with a randomized jitter of 0-50 requests.
max_requests = 1000
max_requests_jitter = 50
worker_class = 'gevent'
|
4ec431669134f8ac01fe5ef1d883bc4dc31fd6d7
|
number_to_words_test.py
|
number_to_words_test.py
|
import unittest
from number_to_words import NumberToWords
class TestNumberToWords(unittest.TestCase):
def setUp(self):
self.n2w = NumberToWords()
def tearDown(self):
self.n2w = None
def test_zero_and_single_digits(self):
NUMBERS = {
0: 'zero', 1: 'one', 2: 'two', 3: 'three', 4: 'four', 5: 'five',
6: 'six', 7: 'seven', 8: 'eight', 9: 'nine'
}
self.assert_numbers_equal_to_strings(NUMBERS)
def assert_numbers_equal_to_strings(self, numbers):
for number, string in numbers.iteritems():
self.assertEqual(string, self.n2w.convert(number))
if __name__ == '__main__':
unittest.main()
|
import unittest
from number_to_words import NumberToWords
class TestNumberToWords(unittest.TestCase):
def setUp(self):
self.n2w = NumberToWords()
def tearDown(self):
self.n2w = None
def test_zero_and_single_digits(self):
NUMBERS = {
0: 'zero', 1: 'one', 2: 'two', 3: 'three', 4: 'four', 5: 'five',
6: 'six', 7: 'seven', 8: 'eight', 9: 'nine'
}
self.assert_numbers_equal_to_strings(NUMBERS)
def test_eleven_to_nineteen(self):
NUMBERS = {
11: 'eleven', 12: 'twelve', 13: 'thirteen', 14: 'fourteen',
15: 'fifteen', 16: 'sixteen', 17: 'seventeen', 18: 'eighteen',
19: 'nineteen'
}
self.assert_numbers_equal_to_strings(NUMBERS)
def assert_numbers_equal_to_strings(self, numbers):
for number, string in numbers.iteritems():
self.assertEqual(string, self.n2w.convert(number))
if __name__ == '__main__':
unittest.main()
|
Add tests for numbers 11 to 19
|
Add tests for numbers 11 to 19
|
Python
|
mit
|
ianfieldhouse/number_to_words
|
import unittest
from number_to_words import NumberToWords
class TestNumberToWords(unittest.TestCase):
def setUp(self):
self.n2w = NumberToWords()
def tearDown(self):
self.n2w = None
def test_zero_and_single_digits(self):
NUMBERS = {
0: 'zero', 1: 'one', 2: 'two', 3: 'three', 4: 'four', 5: 'five',
6: 'six', 7: 'seven', 8: 'eight', 9: 'nine'
}
self.assert_numbers_equal_to_strings(NUMBERS)
def assert_numbers_equal_to_strings(self, numbers):
for number, string in numbers.iteritems():
self.assertEqual(string, self.n2w.convert(number))
if __name__ == '__main__':
unittest.main()
Add tests for numbers 11 to 19
|
import unittest
from number_to_words import NumberToWords
class TestNumberToWords(unittest.TestCase):
def setUp(self):
self.n2w = NumberToWords()
def tearDown(self):
self.n2w = None
def test_zero_and_single_digits(self):
NUMBERS = {
0: 'zero', 1: 'one', 2: 'two', 3: 'three', 4: 'four', 5: 'five',
6: 'six', 7: 'seven', 8: 'eight', 9: 'nine'
}
self.assert_numbers_equal_to_strings(NUMBERS)
def test_eleven_to_nineteen(self):
NUMBERS = {
11: 'eleven', 12: 'twelve', 13: 'thirteen', 14: 'fourteen',
15: 'fifteen', 16: 'sixteen', 17: 'seventeen', 18: 'eighteen',
19: 'nineteen'
}
self.assert_numbers_equal_to_strings(NUMBERS)
def assert_numbers_equal_to_strings(self, numbers):
for number, string in numbers.iteritems():
self.assertEqual(string, self.n2w.convert(number))
if __name__ == '__main__':
unittest.main()
|
<commit_before>import unittest
from number_to_words import NumberToWords
class TestNumberToWords(unittest.TestCase):
def setUp(self):
self.n2w = NumberToWords()
def tearDown(self):
self.n2w = None
def test_zero_and_single_digits(self):
NUMBERS = {
0: 'zero', 1: 'one', 2: 'two', 3: 'three', 4: 'four', 5: 'five',
6: 'six', 7: 'seven', 8: 'eight', 9: 'nine'
}
self.assert_numbers_equal_to_strings(NUMBERS)
def assert_numbers_equal_to_strings(self, numbers):
for number, string in numbers.iteritems():
self.assertEqual(string, self.n2w.convert(number))
if __name__ == '__main__':
unittest.main()
<commit_msg>Add tests for numbers 11 to 19<commit_after>
|
import unittest
from number_to_words import NumberToWords
class TestNumberToWords(unittest.TestCase):
def setUp(self):
self.n2w = NumberToWords()
def tearDown(self):
self.n2w = None
def test_zero_and_single_digits(self):
NUMBERS = {
0: 'zero', 1: 'one', 2: 'two', 3: 'three', 4: 'four', 5: 'five',
6: 'six', 7: 'seven', 8: 'eight', 9: 'nine'
}
self.assert_numbers_equal_to_strings(NUMBERS)
def test_eleven_to_nineteen(self):
NUMBERS = {
11: 'eleven', 12: 'twelve', 13: 'thirteen', 14: 'fourteen',
15: 'fifteen', 16: 'sixteen', 17: 'seventeen', 18: 'eighteen',
19: 'nineteen'
}
self.assert_numbers_equal_to_strings(NUMBERS)
def assert_numbers_equal_to_strings(self, numbers):
for number, string in numbers.iteritems():
self.assertEqual(string, self.n2w.convert(number))
if __name__ == '__main__':
unittest.main()
|
import unittest
from number_to_words import NumberToWords
class TestNumberToWords(unittest.TestCase):
def setUp(self):
self.n2w = NumberToWords()
def tearDown(self):
self.n2w = None
def test_zero_and_single_digits(self):
NUMBERS = {
0: 'zero', 1: 'one', 2: 'two', 3: 'three', 4: 'four', 5: 'five',
6: 'six', 7: 'seven', 8: 'eight', 9: 'nine'
}
self.assert_numbers_equal_to_strings(NUMBERS)
def assert_numbers_equal_to_strings(self, numbers):
for number, string in numbers.iteritems():
self.assertEqual(string, self.n2w.convert(number))
if __name__ == '__main__':
unittest.main()
Add tests for numbers 11 to 19import unittest
from number_to_words import NumberToWords
class TestNumberToWords(unittest.TestCase):
def setUp(self):
self.n2w = NumberToWords()
def tearDown(self):
self.n2w = None
def test_zero_and_single_digits(self):
NUMBERS = {
0: 'zero', 1: 'one', 2: 'two', 3: 'three', 4: 'four', 5: 'five',
6: 'six', 7: 'seven', 8: 'eight', 9: 'nine'
}
self.assert_numbers_equal_to_strings(NUMBERS)
def test_eleven_to_nineteen(self):
NUMBERS = {
11: 'eleven', 12: 'twelve', 13: 'thirteen', 14: 'fourteen',
15: 'fifteen', 16: 'sixteen', 17: 'seventeen', 18: 'eighteen',
19: 'nineteen'
}
self.assert_numbers_equal_to_strings(NUMBERS)
def assert_numbers_equal_to_strings(self, numbers):
for number, string in numbers.iteritems():
self.assertEqual(string, self.n2w.convert(number))
if __name__ == '__main__':
unittest.main()
|
<commit_before>import unittest
from number_to_words import NumberToWords
class TestNumberToWords(unittest.TestCase):
def setUp(self):
self.n2w = NumberToWords()
def tearDown(self):
self.n2w = None
def test_zero_and_single_digits(self):
NUMBERS = {
0: 'zero', 1: 'one', 2: 'two', 3: 'three', 4: 'four', 5: 'five',
6: 'six', 7: 'seven', 8: 'eight', 9: 'nine'
}
self.assert_numbers_equal_to_strings(NUMBERS)
def assert_numbers_equal_to_strings(self, numbers):
for number, string in numbers.iteritems():
self.assertEqual(string, self.n2w.convert(number))
if __name__ == '__main__':
unittest.main()
<commit_msg>Add tests for numbers 11 to 19<commit_after>import unittest
from number_to_words import NumberToWords
class TestNumberToWords(unittest.TestCase):
def setUp(self):
self.n2w = NumberToWords()
def tearDown(self):
self.n2w = None
def test_zero_and_single_digits(self):
NUMBERS = {
0: 'zero', 1: 'one', 2: 'two', 3: 'three', 4: 'four', 5: 'five',
6: 'six', 7: 'seven', 8: 'eight', 9: 'nine'
}
self.assert_numbers_equal_to_strings(NUMBERS)
def test_eleven_to_nineteen(self):
NUMBERS = {
11: 'eleven', 12: 'twelve', 13: 'thirteen', 14: 'fourteen',
15: 'fifteen', 16: 'sixteen', 17: 'seventeen', 18: 'eighteen',
19: 'nineteen'
}
self.assert_numbers_equal_to_strings(NUMBERS)
def assert_numbers_equal_to_strings(self, numbers):
for number, string in numbers.iteritems():
self.assertEqual(string, self.n2w.convert(number))
if __name__ == '__main__':
unittest.main()
|
954c38e9242da5bd2f8c036dd7c774c942860978
|
geotrek/api/mobile/urls.py
|
geotrek/api/mobile/urls.py
|
from __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import url, include
from rest_framework import routers
from geotrek.api.mobile import views as api_mobile
router = routers.DefaultRouter()
if 'geotrek.flatpages' in settings.INSTALLED_APPS:
router.register(r'flatpages', api_mobile.FlatPageViewSet, base_name='flatpage')
if 'geotrek.trekking' in settings.INSTALLED_APPS:
router.register(r'treks', api_mobile.TrekViewSet, base_name='treks')
urlpatterns = [
url(r'^$', api_mobile.SwaggerSchemaView.as_view(), name="schema"),
url(r'^', include(router.urls)),
]
if 'geotrek.flatpages' and 'geotrek.trekking' and 'geotrek.tourism':
urlpatterns += url(r'^settings/$', api_mobile.SettingsView.as_view(), name='settings'),
|
from __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import url, include
from rest_framework import routers
if 'geotrek.flatpages' and 'geotrek.trekking' and 'geotrek.tourism':
from geotrek.api.mobile import views as api_mobile
router = routers.DefaultRouter()
urlpatterns = [
url(r'^$', api_mobile.SwaggerSchemaView.as_view(), name="schema"),
url(r'^', include(router.urls)),
]
urlpatterns += url(r'^settings/$', api_mobile.SettingsView.as_view(), name='settings'),
if 'geotrek.flatpages' in settings.INSTALLED_APPS:
router.register(r'flatpages', api_mobile.FlatPageViewSet, base_name='flatpage')
if 'geotrek.trekking' in settings.INSTALLED_APPS:
router.register(r'treks', api_mobile.TrekViewSet, base_name='treks')
|
Fix api mobile only with geotrek flatpages trekking tourism
|
Fix api mobile only with geotrek flatpages trekking tourism
|
Python
|
bsd-2-clause
|
makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek
|
from __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import url, include
from rest_framework import routers
from geotrek.api.mobile import views as api_mobile
router = routers.DefaultRouter()
if 'geotrek.flatpages' in settings.INSTALLED_APPS:
router.register(r'flatpages', api_mobile.FlatPageViewSet, base_name='flatpage')
if 'geotrek.trekking' in settings.INSTALLED_APPS:
router.register(r'treks', api_mobile.TrekViewSet, base_name='treks')
urlpatterns = [
url(r'^$', api_mobile.SwaggerSchemaView.as_view(), name="schema"),
url(r'^', include(router.urls)),
]
if 'geotrek.flatpages' and 'geotrek.trekking' and 'geotrek.tourism':
urlpatterns += url(r'^settings/$', api_mobile.SettingsView.as_view(), name='settings'),
Fix api mobile only with geotrek flatpages trekking tourism
|
from __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import url, include
from rest_framework import routers
if 'geotrek.flatpages' and 'geotrek.trekking' and 'geotrek.tourism':
from geotrek.api.mobile import views as api_mobile
router = routers.DefaultRouter()
urlpatterns = [
url(r'^$', api_mobile.SwaggerSchemaView.as_view(), name="schema"),
url(r'^', include(router.urls)),
]
urlpatterns += url(r'^settings/$', api_mobile.SettingsView.as_view(), name='settings'),
if 'geotrek.flatpages' in settings.INSTALLED_APPS:
router.register(r'flatpages', api_mobile.FlatPageViewSet, base_name='flatpage')
if 'geotrek.trekking' in settings.INSTALLED_APPS:
router.register(r'treks', api_mobile.TrekViewSet, base_name='treks')
|
<commit_before>from __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import url, include
from rest_framework import routers
from geotrek.api.mobile import views as api_mobile
router = routers.DefaultRouter()
if 'geotrek.flatpages' in settings.INSTALLED_APPS:
router.register(r'flatpages', api_mobile.FlatPageViewSet, base_name='flatpage')
if 'geotrek.trekking' in settings.INSTALLED_APPS:
router.register(r'treks', api_mobile.TrekViewSet, base_name='treks')
urlpatterns = [
url(r'^$', api_mobile.SwaggerSchemaView.as_view(), name="schema"),
url(r'^', include(router.urls)),
]
if 'geotrek.flatpages' and 'geotrek.trekking' and 'geotrek.tourism':
urlpatterns += url(r'^settings/$', api_mobile.SettingsView.as_view(), name='settings'),
<commit_msg>Fix api mobile only with geotrek flatpages trekking tourism<commit_after>
|
from __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import url, include
from rest_framework import routers
if 'geotrek.flatpages' and 'geotrek.trekking' and 'geotrek.tourism':
from geotrek.api.mobile import views as api_mobile
router = routers.DefaultRouter()
urlpatterns = [
url(r'^$', api_mobile.SwaggerSchemaView.as_view(), name="schema"),
url(r'^', include(router.urls)),
]
urlpatterns += url(r'^settings/$', api_mobile.SettingsView.as_view(), name='settings'),
if 'geotrek.flatpages' in settings.INSTALLED_APPS:
router.register(r'flatpages', api_mobile.FlatPageViewSet, base_name='flatpage')
if 'geotrek.trekking' in settings.INSTALLED_APPS:
router.register(r'treks', api_mobile.TrekViewSet, base_name='treks')
|
from __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import url, include
from rest_framework import routers
from geotrek.api.mobile import views as api_mobile
router = routers.DefaultRouter()
if 'geotrek.flatpages' in settings.INSTALLED_APPS:
router.register(r'flatpages', api_mobile.FlatPageViewSet, base_name='flatpage')
if 'geotrek.trekking' in settings.INSTALLED_APPS:
router.register(r'treks', api_mobile.TrekViewSet, base_name='treks')
urlpatterns = [
url(r'^$', api_mobile.SwaggerSchemaView.as_view(), name="schema"),
url(r'^', include(router.urls)),
]
if 'geotrek.flatpages' and 'geotrek.trekking' and 'geotrek.tourism':
urlpatterns += url(r'^settings/$', api_mobile.SettingsView.as_view(), name='settings'),
Fix api mobile only with geotrek flatpages trekking tourismfrom __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import url, include
from rest_framework import routers
if 'geotrek.flatpages' and 'geotrek.trekking' and 'geotrek.tourism':
from geotrek.api.mobile import views as api_mobile
router = routers.DefaultRouter()
urlpatterns = [
url(r'^$', api_mobile.SwaggerSchemaView.as_view(), name="schema"),
url(r'^', include(router.urls)),
]
urlpatterns += url(r'^settings/$', api_mobile.SettingsView.as_view(), name='settings'),
if 'geotrek.flatpages' in settings.INSTALLED_APPS:
router.register(r'flatpages', api_mobile.FlatPageViewSet, base_name='flatpage')
if 'geotrek.trekking' in settings.INSTALLED_APPS:
router.register(r'treks', api_mobile.TrekViewSet, base_name='treks')
|
<commit_before>from __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import url, include
from rest_framework import routers
from geotrek.api.mobile import views as api_mobile
router = routers.DefaultRouter()
if 'geotrek.flatpages' in settings.INSTALLED_APPS:
router.register(r'flatpages', api_mobile.FlatPageViewSet, base_name='flatpage')
if 'geotrek.trekking' in settings.INSTALLED_APPS:
router.register(r'treks', api_mobile.TrekViewSet, base_name='treks')
urlpatterns = [
url(r'^$', api_mobile.SwaggerSchemaView.as_view(), name="schema"),
url(r'^', include(router.urls)),
]
if 'geotrek.flatpages' and 'geotrek.trekking' and 'geotrek.tourism':
urlpatterns += url(r'^settings/$', api_mobile.SettingsView.as_view(), name='settings'),
<commit_msg>Fix api mobile only with geotrek flatpages trekking tourism<commit_after>from __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import url, include
from rest_framework import routers
if 'geotrek.flatpages' and 'geotrek.trekking' and 'geotrek.tourism':
from geotrek.api.mobile import views as api_mobile
router = routers.DefaultRouter()
urlpatterns = [
url(r'^$', api_mobile.SwaggerSchemaView.as_view(), name="schema"),
url(r'^', include(router.urls)),
]
urlpatterns += url(r'^settings/$', api_mobile.SettingsView.as_view(), name='settings'),
if 'geotrek.flatpages' in settings.INSTALLED_APPS:
router.register(r'flatpages', api_mobile.FlatPageViewSet, base_name='flatpage')
if 'geotrek.trekking' in settings.INSTALLED_APPS:
router.register(r'treks', api_mobile.TrekViewSet, base_name='treks')
|
f5e4c1c0429ca06365778db7668296c266335628
|
nbs/api/hr.py
|
nbs/api/hr.py
|
# -*- coding: utf-8 -*-
from flask import jsonify, request, url_for
from nbs.models import Employee, AttendanceRecord
from nbs.schema import EmployeeSchema, AttendanceRecordSchema
from nbs.utils.api import ResourceApi, NestedApi, route, build_result
from nbs.utils.args import get_args, build_args
employee_s = EmployeeSchema()
class EmployeeApi(ResourceApi):
route_base = 'employees'
@classmethod
def get_obj(cls, id):
return Employee.query.get_or_404(id)
def index(self):
return build_result(Employee.query, employee_s)
|
# -*- coding: utf-8 -*-
from flask import jsonify, request, url_for
from nbs.models import db, Employee, AttendanceRecord
from nbs.schema import EmployeeSchema, AttendanceRecordSchema
from nbs.utils.api import ResourceApi, NestedApi, route, build_result
from nbs.utils.args import get_args, build_args
employee_s = EmployeeSchema()
class EmployeeApi(ResourceApi):
route_base = 'employees'
@classmethod
def get_obj(cls, id):
return Employee.query.get_or_404(id)
def index(self):
return build_result(Employee.query, employee_s)
@route('<int:id>')
def get(self, id):
employee = self.get_obj(id)
return build_result(employee, employee_s)
@route('<int:id>', methods=['DELETE'])
def delete(self, id):
employee = self.get_obj(id)
db.session.delete(employee)
db.session.commit()
# TODO: Remove related attendance records
return '', 204
|
Add some methods to employee api, delete needs more work
|
Add some methods to employee api, delete needs more work
|
Python
|
mit
|
coyotevz/nobix-app
|
# -*- coding: utf-8 -*-
from flask import jsonify, request, url_for
from nbs.models import Employee, AttendanceRecord
from nbs.schema import EmployeeSchema, AttendanceRecordSchema
from nbs.utils.api import ResourceApi, NestedApi, route, build_result
from nbs.utils.args import get_args, build_args
employee_s = EmployeeSchema()
class EmployeeApi(ResourceApi):
route_base = 'employees'
@classmethod
def get_obj(cls, id):
return Employee.query.get_or_404(id)
def index(self):
return build_result(Employee.query, employee_s)
Add some methods to employee api, delete needs more work
|
# -*- coding: utf-8 -*-
from flask import jsonify, request, url_for
from nbs.models import db, Employee, AttendanceRecord
from nbs.schema import EmployeeSchema, AttendanceRecordSchema
from nbs.utils.api import ResourceApi, NestedApi, route, build_result
from nbs.utils.args import get_args, build_args
employee_s = EmployeeSchema()
class EmployeeApi(ResourceApi):
route_base = 'employees'
@classmethod
def get_obj(cls, id):
return Employee.query.get_or_404(id)
def index(self):
return build_result(Employee.query, employee_s)
@route('<int:id>')
def get(self, id):
employee = self.get_obj(id)
return build_result(employee, employee_s)
@route('<int:id>', methods=['DELETE'])
def delete(self, id):
employee = self.get_obj(id)
db.session.delete(employee)
db.session.commit()
# TODO: Remove related attendance records
return '', 204
|
<commit_before># -*- coding: utf-8 -*-
from flask import jsonify, request, url_for
from nbs.models import Employee, AttendanceRecord
from nbs.schema import EmployeeSchema, AttendanceRecordSchema
from nbs.utils.api import ResourceApi, NestedApi, route, build_result
from nbs.utils.args import get_args, build_args
employee_s = EmployeeSchema()
class EmployeeApi(ResourceApi):
route_base = 'employees'
@classmethod
def get_obj(cls, id):
return Employee.query.get_or_404(id)
def index(self):
return build_result(Employee.query, employee_s)
<commit_msg>Add some methods to employee api, delete needs more work<commit_after>
|
# -*- coding: utf-8 -*-
from flask import jsonify, request, url_for
from nbs.models import db, Employee, AttendanceRecord
from nbs.schema import EmployeeSchema, AttendanceRecordSchema
from nbs.utils.api import ResourceApi, NestedApi, route, build_result
from nbs.utils.args import get_args, build_args
employee_s = EmployeeSchema()
class EmployeeApi(ResourceApi):
route_base = 'employees'
@classmethod
def get_obj(cls, id):
return Employee.query.get_or_404(id)
def index(self):
return build_result(Employee.query, employee_s)
@route('<int:id>')
def get(self, id):
employee = self.get_obj(id)
return build_result(employee, employee_s)
@route('<int:id>', methods=['DELETE'])
def delete(self, id):
employee = self.get_obj(id)
db.session.delete(employee)
db.session.commit()
# TODO: Remove related attendance records
return '', 204
|
# -*- coding: utf-8 -*-
from flask import jsonify, request, url_for
from nbs.models import Employee, AttendanceRecord
from nbs.schema import EmployeeSchema, AttendanceRecordSchema
from nbs.utils.api import ResourceApi, NestedApi, route, build_result
from nbs.utils.args import get_args, build_args
employee_s = EmployeeSchema()
class EmployeeApi(ResourceApi):
route_base = 'employees'
@classmethod
def get_obj(cls, id):
return Employee.query.get_or_404(id)
def index(self):
return build_result(Employee.query, employee_s)
Add some methods to employee api, delete needs more work# -*- coding: utf-8 -*-
from flask import jsonify, request, url_for
from nbs.models import db, Employee, AttendanceRecord
from nbs.schema import EmployeeSchema, AttendanceRecordSchema
from nbs.utils.api import ResourceApi, NestedApi, route, build_result
from nbs.utils.args import get_args, build_args
employee_s = EmployeeSchema()
class EmployeeApi(ResourceApi):
route_base = 'employees'
@classmethod
def get_obj(cls, id):
return Employee.query.get_or_404(id)
def index(self):
return build_result(Employee.query, employee_s)
@route('<int:id>')
def get(self, id):
employee = self.get_obj(id)
return build_result(employee, employee_s)
@route('<int:id>', methods=['DELETE'])
def delete(self, id):
employee = self.get_obj(id)
db.session.delete(employee)
db.session.commit()
# TODO: Remove related attendance records
return '', 204
|
<commit_before># -*- coding: utf-8 -*-
from flask import jsonify, request, url_for
from nbs.models import Employee, AttendanceRecord
from nbs.schema import EmployeeSchema, AttendanceRecordSchema
from nbs.utils.api import ResourceApi, NestedApi, route, build_result
from nbs.utils.args import get_args, build_args
employee_s = EmployeeSchema()
class EmployeeApi(ResourceApi):
route_base = 'employees'
@classmethod
def get_obj(cls, id):
return Employee.query.get_or_404(id)
def index(self):
return build_result(Employee.query, employee_s)
<commit_msg>Add some methods to employee api, delete needs more work<commit_after># -*- coding: utf-8 -*-
from flask import jsonify, request, url_for
from nbs.models import db, Employee, AttendanceRecord
from nbs.schema import EmployeeSchema, AttendanceRecordSchema
from nbs.utils.api import ResourceApi, NestedApi, route, build_result
from nbs.utils.args import get_args, build_args
employee_s = EmployeeSchema()
class EmployeeApi(ResourceApi):
route_base = 'employees'
@classmethod
def get_obj(cls, id):
return Employee.query.get_or_404(id)
def index(self):
return build_result(Employee.query, employee_s)
@route('<int:id>')
def get(self, id):
employee = self.get_obj(id)
return build_result(employee, employee_s)
@route('<int:id>', methods=['DELETE'])
def delete(self, id):
employee = self.get_obj(id)
db.session.delete(employee)
db.session.commit()
# TODO: Remove related attendance records
return '', 204
|
b07c9ae13f80cb0afbe787543b28e15d546763e6
|
elevator/db.py
|
elevator/db.py
|
import os
import md5
import leveldb
class DatabasesHandler(dict):
def __init__(self, dest, *args, **kwargs):
self['index'] = {}
self.dest = dest
self._init_default_db()
def _init_default_db(self):
self.add('default')
def load(self):
# Retrieving every databases from database store on fs,
# and adding them to backend databases handler.
for db_name in os.listdir(self.store_path):
if db_name != 'default':
db_path = os.path.join(self.store_path, db_name)
db_uid = md5.new(db_name).digest()
self['index'].update({db_name: db_uid})
self.update({db_uid: leveldb.LevelDB(db_path)})
def add(self, db_name):
new_db_name = db_name
new_db_uid = md5.new(new_db_name).digest()
new_db_dest = os.path.join(self.dest, new_db_name)
self['index'].update({new_db_name: new_db_uid})
self.update({new_db_uid: leveldb.LevelDB(new_db_dest)})
|
import os
import md5
import leveldb
class DatabasesHandler(dict):
def __init__(self, dest, *args, **kwargs):
self['index'] = {}
self.dest = dest
self._init_default_db()
def _init_default_db(self):
self.add('default')
def load(self):
# Retrieving every databases from database store on fs,
# and adding them to backend databases handler.
for db_name in os.listdir(self.store_path):
if db_name != 'default':
db_path = os.path.join(self.store_path, db_name)
db_uid = md5.new(db_name).digest()
self['index'].update({db_name: db_uid})
self.update({db_uid: leveldb.LevelDB(db_path)})
def add(self, db_name):
new_db_name = db_name
new_db_uid = md5.new(new_db_name).digest()
new_db_dest = os.path.join(self.dest, new_db_name)
self['index'].update({new_db_name: new_db_uid})
self.update({new_db_uid: leveldb.LevelDB(new_db_dest)})
def drop(self, db_name):
db_uid = self['index'].pop(db_name)
del self['db_uid']
os.remove(os.path.join(self.dest, db_name))
self.pop(db_uid)
def list(self):
return [db_name for db_name in self['index'].itervalues()]
|
Add : database handler list and del methods
|
Add : database handler list and del methods
|
Python
|
mit
|
oleiade/Elevator
|
import os
import md5
import leveldb
class DatabasesHandler(dict):
def __init__(self, dest, *args, **kwargs):
self['index'] = {}
self.dest = dest
self._init_default_db()
def _init_default_db(self):
self.add('default')
def load(self):
# Retrieving every databases from database store on fs,
# and adding them to backend databases handler.
for db_name in os.listdir(self.store_path):
if db_name != 'default':
db_path = os.path.join(self.store_path, db_name)
db_uid = md5.new(db_name).digest()
self['index'].update({db_name: db_uid})
self.update({db_uid: leveldb.LevelDB(db_path)})
def add(self, db_name):
new_db_name = db_name
new_db_uid = md5.new(new_db_name).digest()
new_db_dest = os.path.join(self.dest, new_db_name)
self['index'].update({new_db_name: new_db_uid})
self.update({new_db_uid: leveldb.LevelDB(new_db_dest)})
Add : database handler list and del methods
|
import os
import md5
import leveldb
class DatabasesHandler(dict):
def __init__(self, dest, *args, **kwargs):
self['index'] = {}
self.dest = dest
self._init_default_db()
def _init_default_db(self):
self.add('default')
def load(self):
# Retrieving every databases from database store on fs,
# and adding them to backend databases handler.
for db_name in os.listdir(self.store_path):
if db_name != 'default':
db_path = os.path.join(self.store_path, db_name)
db_uid = md5.new(db_name).digest()
self['index'].update({db_name: db_uid})
self.update({db_uid: leveldb.LevelDB(db_path)})
def add(self, db_name):
new_db_name = db_name
new_db_uid = md5.new(new_db_name).digest()
new_db_dest = os.path.join(self.dest, new_db_name)
self['index'].update({new_db_name: new_db_uid})
self.update({new_db_uid: leveldb.LevelDB(new_db_dest)})
def drop(self, db_name):
db_uid = self['index'].pop(db_name)
del self['db_uid']
os.remove(os.path.join(self.dest, db_name))
self.pop(db_uid)
def list(self):
return [db_name for db_name in self['index'].itervalues()]
|
<commit_before>import os
import md5
import leveldb
class DatabasesHandler(dict):
def __init__(self, dest, *args, **kwargs):
self['index'] = {}
self.dest = dest
self._init_default_db()
def _init_default_db(self):
self.add('default')
def load(self):
# Retrieving every databases from database store on fs,
# and adding them to backend databases handler.
for db_name in os.listdir(self.store_path):
if db_name != 'default':
db_path = os.path.join(self.store_path, db_name)
db_uid = md5.new(db_name).digest()
self['index'].update({db_name: db_uid})
self.update({db_uid: leveldb.LevelDB(db_path)})
def add(self, db_name):
new_db_name = db_name
new_db_uid = md5.new(new_db_name).digest()
new_db_dest = os.path.join(self.dest, new_db_name)
self['index'].update({new_db_name: new_db_uid})
self.update({new_db_uid: leveldb.LevelDB(new_db_dest)})
<commit_msg>Add : database handler list and del methods<commit_after>
|
import os
import md5
import leveldb
class DatabasesHandler(dict):
def __init__(self, dest, *args, **kwargs):
self['index'] = {}
self.dest = dest
self._init_default_db()
def _init_default_db(self):
self.add('default')
def load(self):
# Retrieving every databases from database store on fs,
# and adding them to backend databases handler.
for db_name in os.listdir(self.store_path):
if db_name != 'default':
db_path = os.path.join(self.store_path, db_name)
db_uid = md5.new(db_name).digest()
self['index'].update({db_name: db_uid})
self.update({db_uid: leveldb.LevelDB(db_path)})
def add(self, db_name):
new_db_name = db_name
new_db_uid = md5.new(new_db_name).digest()
new_db_dest = os.path.join(self.dest, new_db_name)
self['index'].update({new_db_name: new_db_uid})
self.update({new_db_uid: leveldb.LevelDB(new_db_dest)})
def drop(self, db_name):
db_uid = self['index'].pop(db_name)
del self['db_uid']
os.remove(os.path.join(self.dest, db_name))
self.pop(db_uid)
def list(self):
return [db_name for db_name in self['index'].itervalues()]
|
import os
import md5
import leveldb
class DatabasesHandler(dict):
def __init__(self, dest, *args, **kwargs):
self['index'] = {}
self.dest = dest
self._init_default_db()
def _init_default_db(self):
self.add('default')
def load(self):
# Retrieving every databases from database store on fs,
# and adding them to backend databases handler.
for db_name in os.listdir(self.store_path):
if db_name != 'default':
db_path = os.path.join(self.store_path, db_name)
db_uid = md5.new(db_name).digest()
self['index'].update({db_name: db_uid})
self.update({db_uid: leveldb.LevelDB(db_path)})
def add(self, db_name):
new_db_name = db_name
new_db_uid = md5.new(new_db_name).digest()
new_db_dest = os.path.join(self.dest, new_db_name)
self['index'].update({new_db_name: new_db_uid})
self.update({new_db_uid: leveldb.LevelDB(new_db_dest)})
Add : database handler list and del methodsimport os
import md5
import leveldb
class DatabasesHandler(dict):
def __init__(self, dest, *args, **kwargs):
self['index'] = {}
self.dest = dest
self._init_default_db()
def _init_default_db(self):
self.add('default')
def load(self):
# Retrieving every databases from database store on fs,
# and adding them to backend databases handler.
for db_name in os.listdir(self.store_path):
if db_name != 'default':
db_path = os.path.join(self.store_path, db_name)
db_uid = md5.new(db_name).digest()
self['index'].update({db_name: db_uid})
self.update({db_uid: leveldb.LevelDB(db_path)})
def add(self, db_name):
new_db_name = db_name
new_db_uid = md5.new(new_db_name).digest()
new_db_dest = os.path.join(self.dest, new_db_name)
self['index'].update({new_db_name: new_db_uid})
self.update({new_db_uid: leveldb.LevelDB(new_db_dest)})
def drop(self, db_name):
db_uid = self['index'].pop(db_name)
del self['db_uid']
os.remove(os.path.join(self.dest, db_name))
self.pop(db_uid)
def list(self):
return [db_name for db_name in self['index'].itervalues()]
|
<commit_before>import os
import md5
import leveldb
class DatabasesHandler(dict):
def __init__(self, dest, *args, **kwargs):
self['index'] = {}
self.dest = dest
self._init_default_db()
def _init_default_db(self):
self.add('default')
def load(self):
# Retrieving every databases from database store on fs,
# and adding them to backend databases handler.
for db_name in os.listdir(self.store_path):
if db_name != 'default':
db_path = os.path.join(self.store_path, db_name)
db_uid = md5.new(db_name).digest()
self['index'].update({db_name: db_uid})
self.update({db_uid: leveldb.LevelDB(db_path)})
def add(self, db_name):
new_db_name = db_name
new_db_uid = md5.new(new_db_name).digest()
new_db_dest = os.path.join(self.dest, new_db_name)
self['index'].update({new_db_name: new_db_uid})
self.update({new_db_uid: leveldb.LevelDB(new_db_dest)})
<commit_msg>Add : database handler list and del methods<commit_after>import os
import md5
import leveldb
class DatabasesHandler(dict):
def __init__(self, dest, *args, **kwargs):
self['index'] = {}
self.dest = dest
self._init_default_db()
def _init_default_db(self):
self.add('default')
def load(self):
# Retrieving every databases from database store on fs,
# and adding them to backend databases handler.
for db_name in os.listdir(self.store_path):
if db_name != 'default':
db_path = os.path.join(self.store_path, db_name)
db_uid = md5.new(db_name).digest()
self['index'].update({db_name: db_uid})
self.update({db_uid: leveldb.LevelDB(db_path)})
def add(self, db_name):
new_db_name = db_name
new_db_uid = md5.new(new_db_name).digest()
new_db_dest = os.path.join(self.dest, new_db_name)
self['index'].update({new_db_name: new_db_uid})
self.update({new_db_uid: leveldb.LevelDB(new_db_dest)})
def drop(self, db_name):
db_uid = self['index'].pop(db_name)
del self['db_uid']
os.remove(os.path.join(self.dest, db_name))
self.pop(db_uid)
def list(self):
return [db_name for db_name in self['index'].itervalues()]
|
dffcc683ebd1d228bd56e394ffeae69d8c8529c7
|
detectem/cli.py
|
detectem/cli.py
|
import logging
import click
from detectem.response import get_har
from detectem.plugin import load_plugins
from detectem.core import Detector
# Set up logging
logger = logging.getLogger('detectem')
ch = logging.StreamHandler()
logger.setLevel(logging.DEBUG)
logger.addHandler(ch)
@click.command()
@click.option(
'--debug',
default=False,
is_flag=True,
help='Include this flag to enable debug messages.'
)
@click.option(
'--format',
default=None,
type=click.Choice(['json']),
help='Set the format of the results.'
)
@click.option(
'--metadata',
default=False,
is_flag=True,
help='Include this flag to return plugin metadata.'
)
@click.argument('url')
def main(debug, format, metadata, url):
if debug:
click.echo("[+] Enabling debug mode.")
ch.setLevel(logging.DEBUG)
else:
ch.setLevel(logging.ERROR)
print(get_detection_results(url, format, metadata))
def get_detection_results(url, format, metadata):
har_data = get_har(url)
plugins = load_plugins()
det = Detector(har_data, plugins, url)
det.start_detection()
return det.get_results(format=format, metadata=metadata)
if __name__ == "__main__":
main()
|
import logging
import click
from detectem.response import get_har
from detectem.plugin import load_plugins
from detectem.core import Detector
# Set up logging
logger = logging.getLogger('detectem')
ch = logging.StreamHandler()
logger.setLevel(logging.ERROR)
logger.addHandler(ch)
@click.command()
@click.option(
'--debug',
default=False,
is_flag=True,
help='Include this flag to enable debug messages.'
)
@click.option(
'--format',
default=None,
type=click.Choice(['json']),
help='Set the format of the results.'
)
@click.option(
'--metadata',
default=False,
is_flag=True,
help='Include this flag to return plugin metadata.'
)
@click.argument('url')
def main(debug, format, metadata, url):
if debug:
click.echo("[+] Enabling debug mode.")
ch.setLevel(logging.DEBUG)
else:
ch.setLevel(logging.ERROR)
print(get_detection_results(url, format, metadata))
def get_detection_results(url, format, metadata):
har_data = get_har(url)
plugins = load_plugins()
det = Detector(har_data, plugins, url)
det.start_detection()
return det.get_results(format=format, metadata=metadata)
if __name__ == "__main__":
main()
|
Change the default logging level to error
|
Change the default logging level to error
|
Python
|
mit
|
spectresearch/detectem
|
import logging
import click
from detectem.response import get_har
from detectem.plugin import load_plugins
from detectem.core import Detector
# Set up logging
logger = logging.getLogger('detectem')
ch = logging.StreamHandler()
logger.setLevel(logging.DEBUG)
logger.addHandler(ch)
@click.command()
@click.option(
'--debug',
default=False,
is_flag=True,
help='Include this flag to enable debug messages.'
)
@click.option(
'--format',
default=None,
type=click.Choice(['json']),
help='Set the format of the results.'
)
@click.option(
'--metadata',
default=False,
is_flag=True,
help='Include this flag to return plugin metadata.'
)
@click.argument('url')
def main(debug, format, metadata, url):
if debug:
click.echo("[+] Enabling debug mode.")
ch.setLevel(logging.DEBUG)
else:
ch.setLevel(logging.ERROR)
print(get_detection_results(url, format, metadata))
def get_detection_results(url, format, metadata):
har_data = get_har(url)
plugins = load_plugins()
det = Detector(har_data, plugins, url)
det.start_detection()
return det.get_results(format=format, metadata=metadata)
if __name__ == "__main__":
main()
Change the default logging level to error
|
import logging
import click
from detectem.response import get_har
from detectem.plugin import load_plugins
from detectem.core import Detector
# Set up logging
logger = logging.getLogger('detectem')
ch = logging.StreamHandler()
logger.setLevel(logging.ERROR)
logger.addHandler(ch)
@click.command()
@click.option(
'--debug',
default=False,
is_flag=True,
help='Include this flag to enable debug messages.'
)
@click.option(
'--format',
default=None,
type=click.Choice(['json']),
help='Set the format of the results.'
)
@click.option(
'--metadata',
default=False,
is_flag=True,
help='Include this flag to return plugin metadata.'
)
@click.argument('url')
def main(debug, format, metadata, url):
if debug:
click.echo("[+] Enabling debug mode.")
ch.setLevel(logging.DEBUG)
else:
ch.setLevel(logging.ERROR)
print(get_detection_results(url, format, metadata))
def get_detection_results(url, format, metadata):
har_data = get_har(url)
plugins = load_plugins()
det = Detector(har_data, plugins, url)
det.start_detection()
return det.get_results(format=format, metadata=metadata)
if __name__ == "__main__":
main()
|
<commit_before>import logging
import click
from detectem.response import get_har
from detectem.plugin import load_plugins
from detectem.core import Detector
# Set up logging
logger = logging.getLogger('detectem')
ch = logging.StreamHandler()
logger.setLevel(logging.DEBUG)
logger.addHandler(ch)
@click.command()
@click.option(
'--debug',
default=False,
is_flag=True,
help='Include this flag to enable debug messages.'
)
@click.option(
'--format',
default=None,
type=click.Choice(['json']),
help='Set the format of the results.'
)
@click.option(
'--metadata',
default=False,
is_flag=True,
help='Include this flag to return plugin metadata.'
)
@click.argument('url')
def main(debug, format, metadata, url):
if debug:
click.echo("[+] Enabling debug mode.")
ch.setLevel(logging.DEBUG)
else:
ch.setLevel(logging.ERROR)
print(get_detection_results(url, format, metadata))
def get_detection_results(url, format, metadata):
har_data = get_har(url)
plugins = load_plugins()
det = Detector(har_data, plugins, url)
det.start_detection()
return det.get_results(format=format, metadata=metadata)
if __name__ == "__main__":
main()
<commit_msg>Change the default logging level to error<commit_after>
|
import logging
import click
from detectem.response import get_har
from detectem.plugin import load_plugins
from detectem.core import Detector
# Set up logging
logger = logging.getLogger('detectem')
ch = logging.StreamHandler()
logger.setLevel(logging.ERROR)
logger.addHandler(ch)
@click.command()
@click.option(
'--debug',
default=False,
is_flag=True,
help='Include this flag to enable debug messages.'
)
@click.option(
'--format',
default=None,
type=click.Choice(['json']),
help='Set the format of the results.'
)
@click.option(
'--metadata',
default=False,
is_flag=True,
help='Include this flag to return plugin metadata.'
)
@click.argument('url')
def main(debug, format, metadata, url):
if debug:
click.echo("[+] Enabling debug mode.")
ch.setLevel(logging.DEBUG)
else:
ch.setLevel(logging.ERROR)
print(get_detection_results(url, format, metadata))
def get_detection_results(url, format, metadata):
har_data = get_har(url)
plugins = load_plugins()
det = Detector(har_data, plugins, url)
det.start_detection()
return det.get_results(format=format, metadata=metadata)
if __name__ == "__main__":
main()
|
import logging
import click
from detectem.response import get_har
from detectem.plugin import load_plugins
from detectem.core import Detector
# Set up logging
logger = logging.getLogger('detectem')
ch = logging.StreamHandler()
logger.setLevel(logging.DEBUG)
logger.addHandler(ch)
@click.command()
@click.option(
'--debug',
default=False,
is_flag=True,
help='Include this flag to enable debug messages.'
)
@click.option(
'--format',
default=None,
type=click.Choice(['json']),
help='Set the format of the results.'
)
@click.option(
'--metadata',
default=False,
is_flag=True,
help='Include this flag to return plugin metadata.'
)
@click.argument('url')
def main(debug, format, metadata, url):
if debug:
click.echo("[+] Enabling debug mode.")
ch.setLevel(logging.DEBUG)
else:
ch.setLevel(logging.ERROR)
print(get_detection_results(url, format, metadata))
def get_detection_results(url, format, metadata):
har_data = get_har(url)
plugins = load_plugins()
det = Detector(har_data, plugins, url)
det.start_detection()
return det.get_results(format=format, metadata=metadata)
if __name__ == "__main__":
main()
Change the default logging level to errorimport logging
import click
from detectem.response import get_har
from detectem.plugin import load_plugins
from detectem.core import Detector
# Set up logging
logger = logging.getLogger('detectem')
ch = logging.StreamHandler()
logger.setLevel(logging.ERROR)
logger.addHandler(ch)
@click.command()
@click.option(
'--debug',
default=False,
is_flag=True,
help='Include this flag to enable debug messages.'
)
@click.option(
'--format',
default=None,
type=click.Choice(['json']),
help='Set the format of the results.'
)
@click.option(
'--metadata',
default=False,
is_flag=True,
help='Include this flag to return plugin metadata.'
)
@click.argument('url')
def main(debug, format, metadata, url):
if debug:
click.echo("[+] Enabling debug mode.")
ch.setLevel(logging.DEBUG)
else:
ch.setLevel(logging.ERROR)
print(get_detection_results(url, format, metadata))
def get_detection_results(url, format, metadata):
har_data = get_har(url)
plugins = load_plugins()
det = Detector(har_data, plugins, url)
det.start_detection()
return det.get_results(format=format, metadata=metadata)
if __name__ == "__main__":
main()
|
<commit_before>import logging
import click
from detectem.response import get_har
from detectem.plugin import load_plugins
from detectem.core import Detector
# Set up logging
logger = logging.getLogger('detectem')
ch = logging.StreamHandler()
logger.setLevel(logging.DEBUG)
logger.addHandler(ch)
@click.command()
@click.option(
'--debug',
default=False,
is_flag=True,
help='Include this flag to enable debug messages.'
)
@click.option(
'--format',
default=None,
type=click.Choice(['json']),
help='Set the format of the results.'
)
@click.option(
'--metadata',
default=False,
is_flag=True,
help='Include this flag to return plugin metadata.'
)
@click.argument('url')
def main(debug, format, metadata, url):
if debug:
click.echo("[+] Enabling debug mode.")
ch.setLevel(logging.DEBUG)
else:
ch.setLevel(logging.ERROR)
print(get_detection_results(url, format, metadata))
def get_detection_results(url, format, metadata):
har_data = get_har(url)
plugins = load_plugins()
det = Detector(har_data, plugins, url)
det.start_detection()
return det.get_results(format=format, metadata=metadata)
if __name__ == "__main__":
main()
<commit_msg>Change the default logging level to error<commit_after>import logging
import click
from detectem.response import get_har
from detectem.plugin import load_plugins
from detectem.core import Detector
# Set up logging
logger = logging.getLogger('detectem')
ch = logging.StreamHandler()
logger.setLevel(logging.ERROR)
logger.addHandler(ch)
@click.command()
@click.option(
'--debug',
default=False,
is_flag=True,
help='Include this flag to enable debug messages.'
)
@click.option(
'--format',
default=None,
type=click.Choice(['json']),
help='Set the format of the results.'
)
@click.option(
'--metadata',
default=False,
is_flag=True,
help='Include this flag to return plugin metadata.'
)
@click.argument('url')
def main(debug, format, metadata, url):
if debug:
click.echo("[+] Enabling debug mode.")
ch.setLevel(logging.DEBUG)
else:
ch.setLevel(logging.ERROR)
print(get_detection_results(url, format, metadata))
def get_detection_results(url, format, metadata):
har_data = get_har(url)
plugins = load_plugins()
det = Detector(har_data, plugins, url)
det.start_detection()
return det.get_results(format=format, metadata=metadata)
if __name__ == "__main__":
main()
|
97abbb8b38ff38b7b150bb2c4b5e9243856ede02
|
dork/dns.py
|
dork/dns.py
|
"""
Dynamic host file management.
"""
import docker
from subprocess import call
import re
def refresh():
"""
Ensure that all running containers have a valid entry in /etc/hosts.
"""
containers = docker.containers()
hosts = '\n'.join(['%s %s' % (c.address, c.domain) for c in [d for d in containers if d.running]])
hosts = '# DORK START\n%s\n# DORK END' % hosts
expr = re.compile('# DORK START\n(.*\n)*# DORK END')
with open('/etc/hosts', 'r') as f:
content = f.read()
if len(expr.findall(content)) > 0:
content = expr.sub(hosts, content)
else:
content += hosts + '\n'
with open('/etc/hosts', 'w') as f:
f.write(content)
call(['sudo', 'service', 'dnsmasq', 'restart'])
|
"""
Dynamic host file management.
"""
import docker
from subprocess import call
import re
def refresh():
"""
Ensure that all running containers have a valid entry in /etc/hosts.
"""
containers = docker.containers()
hosts = '\n'.join(['%s %s.%s.dork %s' % (c.address, c.project, c.instance, c.domain) for c in [d for d in containers if d.running]])
hosts = '# DORK START\n%s\n# DORK END' % hosts
expr = re.compile('# DORK START\n(.*\n)*# DORK END')
with open('/etc/hosts', 'r') as f:
content = f.read()
if len(expr.findall(content)) > 0:
content = expr.sub(hosts, content)
else:
content += hosts + '\n'
with open('/etc/hosts', 'w') as f:
f.write(content)
call(['sudo', 'service', 'dnsmasq', 'restart'])
|
Add both domains to hostsfile.
|
Add both domains to hostsfile.
|
Python
|
mit
|
iamdork/dork
|
"""
Dynamic host file management.
"""
import docker
from subprocess import call
import re
def refresh():
"""
Ensure that all running containers have a valid entry in /etc/hosts.
"""
containers = docker.containers()
hosts = '\n'.join(['%s %s' % (c.address, c.domain) for c in [d for d in containers if d.running]])
hosts = '# DORK START\n%s\n# DORK END' % hosts
expr = re.compile('# DORK START\n(.*\n)*# DORK END')
with open('/etc/hosts', 'r') as f:
content = f.read()
if len(expr.findall(content)) > 0:
content = expr.sub(hosts, content)
else:
content += hosts + '\n'
with open('/etc/hosts', 'w') as f:
f.write(content)
call(['sudo', 'service', 'dnsmasq', 'restart'])
Add both domains to hostsfile.
|
"""
Dynamic host file management.
"""
import docker
from subprocess import call
import re
def refresh():
"""
Ensure that all running containers have a valid entry in /etc/hosts.
"""
containers = docker.containers()
hosts = '\n'.join(['%s %s.%s.dork %s' % (c.address, c.project, c.instance, c.domain) for c in [d for d in containers if d.running]])
hosts = '# DORK START\n%s\n# DORK END' % hosts
expr = re.compile('# DORK START\n(.*\n)*# DORK END')
with open('/etc/hosts', 'r') as f:
content = f.read()
if len(expr.findall(content)) > 0:
content = expr.sub(hosts, content)
else:
content += hosts + '\n'
with open('/etc/hosts', 'w') as f:
f.write(content)
call(['sudo', 'service', 'dnsmasq', 'restart'])
|
<commit_before>"""
Dynamic host file management.
"""
import docker
from subprocess import call
import re
def refresh():
"""
Ensure that all running containers have a valid entry in /etc/hosts.
"""
containers = docker.containers()
hosts = '\n'.join(['%s %s' % (c.address, c.domain) for c in [d for d in containers if d.running]])
hosts = '# DORK START\n%s\n# DORK END' % hosts
expr = re.compile('# DORK START\n(.*\n)*# DORK END')
with open('/etc/hosts', 'r') as f:
content = f.read()
if len(expr.findall(content)) > 0:
content = expr.sub(hosts, content)
else:
content += hosts + '\n'
with open('/etc/hosts', 'w') as f:
f.write(content)
call(['sudo', 'service', 'dnsmasq', 'restart'])
<commit_msg>Add both domains to hostsfile.<commit_after>
|
"""
Dynamic host file management.
"""
import docker
from subprocess import call
import re
def refresh():
"""
Ensure that all running containers have a valid entry in /etc/hosts.
"""
containers = docker.containers()
hosts = '\n'.join(['%s %s.%s.dork %s' % (c.address, c.project, c.instance, c.domain) for c in [d for d in containers if d.running]])
hosts = '# DORK START\n%s\n# DORK END' % hosts
expr = re.compile('# DORK START\n(.*\n)*# DORK END')
with open('/etc/hosts', 'r') as f:
content = f.read()
if len(expr.findall(content)) > 0:
content = expr.sub(hosts, content)
else:
content += hosts + '\n'
with open('/etc/hosts', 'w') as f:
f.write(content)
call(['sudo', 'service', 'dnsmasq', 'restart'])
|
"""
Dynamic host file management.
"""
import docker
from subprocess import call
import re
def refresh():
"""
Ensure that all running containers have a valid entry in /etc/hosts.
"""
containers = docker.containers()
hosts = '\n'.join(['%s %s' % (c.address, c.domain) for c in [d for d in containers if d.running]])
hosts = '# DORK START\n%s\n# DORK END' % hosts
expr = re.compile('# DORK START\n(.*\n)*# DORK END')
with open('/etc/hosts', 'r') as f:
content = f.read()
if len(expr.findall(content)) > 0:
content = expr.sub(hosts, content)
else:
content += hosts + '\n'
with open('/etc/hosts', 'w') as f:
f.write(content)
call(['sudo', 'service', 'dnsmasq', 'restart'])
Add both domains to hostsfile."""
Dynamic host file management.
"""
import docker
from subprocess import call
import re
def refresh():
"""
Ensure that all running containers have a valid entry in /etc/hosts.
"""
containers = docker.containers()
hosts = '\n'.join(['%s %s.%s.dork %s' % (c.address, c.project, c.instance, c.domain) for c in [d for d in containers if d.running]])
hosts = '# DORK START\n%s\n# DORK END' % hosts
expr = re.compile('# DORK START\n(.*\n)*# DORK END')
with open('/etc/hosts', 'r') as f:
content = f.read()
if len(expr.findall(content)) > 0:
content = expr.sub(hosts, content)
else:
content += hosts + '\n'
with open('/etc/hosts', 'w') as f:
f.write(content)
call(['sudo', 'service', 'dnsmasq', 'restart'])
|
<commit_before>"""
Dynamic host file management.
"""
import docker
from subprocess import call
import re
def refresh():
"""
Ensure that all running containers have a valid entry in /etc/hosts.
"""
containers = docker.containers()
hosts = '\n'.join(['%s %s' % (c.address, c.domain) for c in [d for d in containers if d.running]])
hosts = '# DORK START\n%s\n# DORK END' % hosts
expr = re.compile('# DORK START\n(.*\n)*# DORK END')
with open('/etc/hosts', 'r') as f:
content = f.read()
if len(expr.findall(content)) > 0:
content = expr.sub(hosts, content)
else:
content += hosts + '\n'
with open('/etc/hosts', 'w') as f:
f.write(content)
call(['sudo', 'service', 'dnsmasq', 'restart'])
<commit_msg>Add both domains to hostsfile.<commit_after>"""
Dynamic host file management.
"""
import docker
from subprocess import call
import re
def refresh():
"""
Ensure that all running containers have a valid entry in /etc/hosts.
"""
containers = docker.containers()
hosts = '\n'.join(['%s %s.%s.dork %s' % (c.address, c.project, c.instance, c.domain) for c in [d for d in containers if d.running]])
hosts = '# DORK START\n%s\n# DORK END' % hosts
expr = re.compile('# DORK START\n(.*\n)*# DORK END')
with open('/etc/hosts', 'r') as f:
content = f.read()
if len(expr.findall(content)) > 0:
content = expr.sub(hosts, content)
else:
content += hosts + '\n'
with open('/etc/hosts', 'w') as f:
f.write(content)
call(['sudo', 'service', 'dnsmasq', 'restart'])
|
cbc69077016885ebf2b481eebd2f11511c8184ce
|
nbgrader/tests/apps/test_nbgrader.py
|
nbgrader/tests/apps/test_nbgrader.py
|
import os
import sys
from .. import run_nbgrader, run_command
from .base import BaseTestApp
class TestNbGrader(BaseTestApp):
def test_help(self):
"""Does the help display without error?"""
run_nbgrader(["--help-all"])
def test_no_subapp(self):
"""Is the help displayed when no subapp is given?"""
run_nbgrader([], retcode=0)
def test_check_version(self, capfd):
"""Is the version the same regardless of how we run nbgrader?"""
out1 = '\n'.join(
run_command([sys.executable, "-m", "nbgrader", "--version"]).splitlines()[-3:]
).strip()
out2 = '\n'.join(
run_nbgrader(["--version"], stdout=True).splitlines()[-3:]
).strip()
assert out1 == out2
def test_logfile(self):
# by default, there should be no logfile created
files_before = set(os.listdir())
run_nbgrader([])
files_after = set(os.listdir())
assert files_before == files_after
# if we specify a logfile, it should get used
run_nbgrader(["--NbGrader.logfile=log.txt"])
assert os.path.exists("log.txt")
|
import os
import sys
from .. import run_nbgrader, run_command
from .base import BaseTestApp
class TestNbGrader(BaseTestApp):
def test_help(self):
"""Does the help display without error?"""
run_nbgrader(["--help-all"])
def test_no_subapp(self):
"""Is the help displayed when no subapp is given?"""
run_nbgrader([], retcode=0)
def test_check_version(self, capfd):
"""Is the version the same regardless of how we run nbgrader?"""
out1 = '\n'.join(
run_command([sys.executable, "-m", "nbgrader", "--version"]).splitlines()[-3:]
).strip()
out2 = '\n'.join(
run_nbgrader(["--version"], stdout=True).splitlines()[-3:]
).strip()
assert out1 == out2
def test_logfile(self):
# by default, there should be no logfile created
cwd = os.getcwd()
files_before = set(os.listdir(cwd))
run_nbgrader([])
files_after = set(os.listdir(cwd))
assert files_before == files_after
# if we specify a logfile, it should get used
run_nbgrader(["--NbGrader.logfile=log.txt"])
assert os.path.exists("log.txt")
|
Include directory name for python 2 compatibility
|
Include directory name for python 2 compatibility
|
Python
|
bsd-3-clause
|
jhamrick/nbgrader,jhamrick/nbgrader,jupyter/nbgrader,jupyter/nbgrader,jupyter/nbgrader,jhamrick/nbgrader,jupyter/nbgrader,jhamrick/nbgrader,jupyter/nbgrader
|
import os
import sys
from .. import run_nbgrader, run_command
from .base import BaseTestApp
class TestNbGrader(BaseTestApp):
def test_help(self):
"""Does the help display without error?"""
run_nbgrader(["--help-all"])
def test_no_subapp(self):
"""Is the help displayed when no subapp is given?"""
run_nbgrader([], retcode=0)
def test_check_version(self, capfd):
"""Is the version the same regardless of how we run nbgrader?"""
out1 = '\n'.join(
run_command([sys.executable, "-m", "nbgrader", "--version"]).splitlines()[-3:]
).strip()
out2 = '\n'.join(
run_nbgrader(["--version"], stdout=True).splitlines()[-3:]
).strip()
assert out1 == out2
def test_logfile(self):
# by default, there should be no logfile created
files_before = set(os.listdir())
run_nbgrader([])
files_after = set(os.listdir())
assert files_before == files_after
# if we specify a logfile, it should get used
run_nbgrader(["--NbGrader.logfile=log.txt"])
assert os.path.exists("log.txt")
Include directory name for python 2 compatibility
|
import os
import sys
from .. import run_nbgrader, run_command
from .base import BaseTestApp
class TestNbGrader(BaseTestApp):
def test_help(self):
"""Does the help display without error?"""
run_nbgrader(["--help-all"])
def test_no_subapp(self):
"""Is the help displayed when no subapp is given?"""
run_nbgrader([], retcode=0)
def test_check_version(self, capfd):
"""Is the version the same regardless of how we run nbgrader?"""
out1 = '\n'.join(
run_command([sys.executable, "-m", "nbgrader", "--version"]).splitlines()[-3:]
).strip()
out2 = '\n'.join(
run_nbgrader(["--version"], stdout=True).splitlines()[-3:]
).strip()
assert out1 == out2
def test_logfile(self):
# by default, there should be no logfile created
cwd = os.getcwd()
files_before = set(os.listdir(cwd))
run_nbgrader([])
files_after = set(os.listdir(cwd))
assert files_before == files_after
# if we specify a logfile, it should get used
run_nbgrader(["--NbGrader.logfile=log.txt"])
assert os.path.exists("log.txt")
|
<commit_before>import os
import sys
from .. import run_nbgrader, run_command
from .base import BaseTestApp
class TestNbGrader(BaseTestApp):
def test_help(self):
"""Does the help display without error?"""
run_nbgrader(["--help-all"])
def test_no_subapp(self):
"""Is the help displayed when no subapp is given?"""
run_nbgrader([], retcode=0)
def test_check_version(self, capfd):
"""Is the version the same regardless of how we run nbgrader?"""
out1 = '\n'.join(
run_command([sys.executable, "-m", "nbgrader", "--version"]).splitlines()[-3:]
).strip()
out2 = '\n'.join(
run_nbgrader(["--version"], stdout=True).splitlines()[-3:]
).strip()
assert out1 == out2
def test_logfile(self):
# by default, there should be no logfile created
files_before = set(os.listdir())
run_nbgrader([])
files_after = set(os.listdir())
assert files_before == files_after
# if we specify a logfile, it should get used
run_nbgrader(["--NbGrader.logfile=log.txt"])
assert os.path.exists("log.txt")
<commit_msg>Include directory name for python 2 compatibility<commit_after>
|
import os
import sys
from .. import run_nbgrader, run_command
from .base import BaseTestApp
class TestNbGrader(BaseTestApp):
def test_help(self):
"""Does the help display without error?"""
run_nbgrader(["--help-all"])
def test_no_subapp(self):
"""Is the help displayed when no subapp is given?"""
run_nbgrader([], retcode=0)
def test_check_version(self, capfd):
"""Is the version the same regardless of how we run nbgrader?"""
out1 = '\n'.join(
run_command([sys.executable, "-m", "nbgrader", "--version"]).splitlines()[-3:]
).strip()
out2 = '\n'.join(
run_nbgrader(["--version"], stdout=True).splitlines()[-3:]
).strip()
assert out1 == out2
def test_logfile(self):
# by default, there should be no logfile created
cwd = os.getcwd()
files_before = set(os.listdir(cwd))
run_nbgrader([])
files_after = set(os.listdir(cwd))
assert files_before == files_after
# if we specify a logfile, it should get used
run_nbgrader(["--NbGrader.logfile=log.txt"])
assert os.path.exists("log.txt")
|
import os
import sys
from .. import run_nbgrader, run_command
from .base import BaseTestApp
class TestNbGrader(BaseTestApp):
def test_help(self):
"""Does the help display without error?"""
run_nbgrader(["--help-all"])
def test_no_subapp(self):
"""Is the help displayed when no subapp is given?"""
run_nbgrader([], retcode=0)
def test_check_version(self, capfd):
"""Is the version the same regardless of how we run nbgrader?"""
out1 = '\n'.join(
run_command([sys.executable, "-m", "nbgrader", "--version"]).splitlines()[-3:]
).strip()
out2 = '\n'.join(
run_nbgrader(["--version"], stdout=True).splitlines()[-3:]
).strip()
assert out1 == out2
def test_logfile(self):
# by default, there should be no logfile created
files_before = set(os.listdir())
run_nbgrader([])
files_after = set(os.listdir())
assert files_before == files_after
# if we specify a logfile, it should get used
run_nbgrader(["--NbGrader.logfile=log.txt"])
assert os.path.exists("log.txt")
Include directory name for python 2 compatibilityimport os
import sys
from .. import run_nbgrader, run_command
from .base import BaseTestApp
class TestNbGrader(BaseTestApp):
def test_help(self):
"""Does the help display without error?"""
run_nbgrader(["--help-all"])
def test_no_subapp(self):
"""Is the help displayed when no subapp is given?"""
run_nbgrader([], retcode=0)
def test_check_version(self, capfd):
"""Is the version the same regardless of how we run nbgrader?"""
out1 = '\n'.join(
run_command([sys.executable, "-m", "nbgrader", "--version"]).splitlines()[-3:]
).strip()
out2 = '\n'.join(
run_nbgrader(["--version"], stdout=True).splitlines()[-3:]
).strip()
assert out1 == out2
def test_logfile(self):
# by default, there should be no logfile created
cwd = os.getcwd()
files_before = set(os.listdir(cwd))
run_nbgrader([])
files_after = set(os.listdir(cwd))
assert files_before == files_after
# if we specify a logfile, it should get used
run_nbgrader(["--NbGrader.logfile=log.txt"])
assert os.path.exists("log.txt")
|
<commit_before>import os
import sys
from .. import run_nbgrader, run_command
from .base import BaseTestApp
class TestNbGrader(BaseTestApp):
def test_help(self):
"""Does the help display without error?"""
run_nbgrader(["--help-all"])
def test_no_subapp(self):
"""Is the help displayed when no subapp is given?"""
run_nbgrader([], retcode=0)
def test_check_version(self, capfd):
"""Is the version the same regardless of how we run nbgrader?"""
out1 = '\n'.join(
run_command([sys.executable, "-m", "nbgrader", "--version"]).splitlines()[-3:]
).strip()
out2 = '\n'.join(
run_nbgrader(["--version"], stdout=True).splitlines()[-3:]
).strip()
assert out1 == out2
def test_logfile(self):
# by default, there should be no logfile created
files_before = set(os.listdir())
run_nbgrader([])
files_after = set(os.listdir())
assert files_before == files_after
# if we specify a logfile, it should get used
run_nbgrader(["--NbGrader.logfile=log.txt"])
assert os.path.exists("log.txt")
<commit_msg>Include directory name for python 2 compatibility<commit_after>import os
import sys
from .. import run_nbgrader, run_command
from .base import BaseTestApp
class TestNbGrader(BaseTestApp):
def test_help(self):
"""Does the help display without error?"""
run_nbgrader(["--help-all"])
def test_no_subapp(self):
"""Is the help displayed when no subapp is given?"""
run_nbgrader([], retcode=0)
def test_check_version(self, capfd):
"""Is the version the same regardless of how we run nbgrader?"""
out1 = '\n'.join(
run_command([sys.executable, "-m", "nbgrader", "--version"]).splitlines()[-3:]
).strip()
out2 = '\n'.join(
run_nbgrader(["--version"], stdout=True).splitlines()[-3:]
).strip()
assert out1 == out2
def test_logfile(self):
# by default, there should be no logfile created
cwd = os.getcwd()
files_before = set(os.listdir(cwd))
run_nbgrader([])
files_after = set(os.listdir(cwd))
assert files_before == files_after
# if we specify a logfile, it should get used
run_nbgrader(["--NbGrader.logfile=log.txt"])
assert os.path.exists("log.txt")
|
3421caaceb54d30ccb070f9959978f196c72bb1b
|
responsive_wrapper/urls.py
|
responsive_wrapper/urls.py
|
from django.conf.urls import patterns, url
from views import ajax_render
urlpatterns = patterns(
'',
url(r'^render/(?P<plugin_id>[-\w]+)/$', ajax_render, name='ajax-render')
)
|
from django.conf.urls import patterns, url
from .views import ajax_render
urlpatterns = patterns(
'',
url(r'^render/(?P<plugin_id>[-\w]+)/$', ajax_render, name='ajax-render')
)
|
Use .views instead of views
|
Use .views instead of views
|
Python
|
bsd-3-clause
|
mishbahr/djangocms-responsive-wrapper,mishbahr/djangocms-responsive-wrapper,mishbahr/djangocms-responsive-wrapper
|
from django.conf.urls import patterns, url
from views import ajax_render
urlpatterns = patterns(
'',
url(r'^render/(?P<plugin_id>[-\w]+)/$', ajax_render, name='ajax-render')
)
Use .views instead of views
|
from django.conf.urls import patterns, url
from .views import ajax_render
urlpatterns = patterns(
'',
url(r'^render/(?P<plugin_id>[-\w]+)/$', ajax_render, name='ajax-render')
)
|
<commit_before>from django.conf.urls import patterns, url
from views import ajax_render
urlpatterns = patterns(
'',
url(r'^render/(?P<plugin_id>[-\w]+)/$', ajax_render, name='ajax-render')
)
<commit_msg>Use .views instead of views<commit_after>
|
from django.conf.urls import patterns, url
from .views import ajax_render
urlpatterns = patterns(
'',
url(r'^render/(?P<plugin_id>[-\w]+)/$', ajax_render, name='ajax-render')
)
|
from django.conf.urls import patterns, url
from views import ajax_render
urlpatterns = patterns(
'',
url(r'^render/(?P<plugin_id>[-\w]+)/$', ajax_render, name='ajax-render')
)
Use .views instead of viewsfrom django.conf.urls import patterns, url
from .views import ajax_render
urlpatterns = patterns(
'',
url(r'^render/(?P<plugin_id>[-\w]+)/$', ajax_render, name='ajax-render')
)
|
<commit_before>from django.conf.urls import patterns, url
from views import ajax_render
urlpatterns = patterns(
'',
url(r'^render/(?P<plugin_id>[-\w]+)/$', ajax_render, name='ajax-render')
)
<commit_msg>Use .views instead of views<commit_after>from django.conf.urls import patterns, url
from .views import ajax_render
urlpatterns = patterns(
'',
url(r'^render/(?P<plugin_id>[-\w]+)/$', ajax_render, name='ajax-render')
)
|
96777198fda175de0f53b8a2a36cc693fe4f50a3
|
scipy_base/__init__.py
|
scipy_base/__init__.py
|
from info_scipy_base import __doc__
from scipy_base_version import scipy_base_version as __version__
from ppimport import ppimport, ppimport_attr
# The following statement is equivalent to
#
# from Matrix import Matrix as mat
#
# but avoids expensive LinearAlgebra import when
# Matrix is not used.
mat = ppimport_attr(ppimport('Matrix'), 'Matrix')
# Force Numeric to use scipy_base.fastumath instead of Numeric.umath.
import fastumath # no need to use scipy_base.fastumath
import sys as _sys
_sys.modules['umath'] = fastumath
import Numeric
from Numeric import *
import limits
from type_check import *
from index_tricks import *
from function_base import *
from shape_base import *
from matrix_base import *
from polynomial import *
from scimath import *
from machar import *
from pexec import *
Inf = inf = fastumath.PINF
try:
NAN = NaN = nan = fastumath.NAN
except AttributeError:
NaN = NAN = nan = fastumath.PINF/fastumath.PINF
from scipy_test.testing import ScipyTest
test = ScipyTest('scipy_base').test
if _sys.modules.has_key('scipy_base.Matrix') \
and _sys.modules['scipy_base.Matrix'] is None:
del _sys.modules['scipy_base.Matrix']
|
from info_scipy_base import __doc__
from scipy_base_version import scipy_base_version as __version__
from ppimport import ppimport, ppimport_attr
# The following statement is equivalent to
#
# from Matrix import Matrix as mat
#
# but avoids expensive LinearAlgebra import when
# Matrix is not used.
mat = ppimport_attr(ppimport('Matrix'), 'Matrix')
# Force Numeric to use scipy_base.fastumath instead of Numeric.umath.
import fastumath # no need to use scipy_base.fastumath
import sys as _sys
_sys.modules['umath'] = fastumath
import Numeric
from Numeric import *
import limits
from type_check import *
from index_tricks import *
from function_base import *
from shape_base import *
from matrix_base import *
from polynomial import *
from scimath import *
from machar import *
from pexec import *
if Numeric.__version__ < '23.5':
matrixmultiply=dot
Inf = inf = fastumath.PINF
try:
NAN = NaN = nan = fastumath.NAN
except AttributeError:
NaN = NAN = nan = fastumath.PINF/fastumath.PINF
from scipy_test.testing import ScipyTest
test = ScipyTest('scipy_base').test
if _sys.modules.has_key('scipy_base.Matrix') \
and _sys.modules['scipy_base.Matrix'] is None:
del _sys.modules['scipy_base.Matrix']
|
Fix for matrixmultiply != dot on Numeric < 23.4
|
Fix for matrixmultiply != dot on Numeric < 23.4
|
Python
|
bsd-3-clause
|
ddasilva/numpy,behzadnouri/numpy,joferkington/numpy,bertrand-l/numpy,groutr/numpy,Eric89GXL/numpy,jakirkham/numpy,matthew-brett/numpy,cowlicks/numpy,githubmlai/numpy,musically-ut/numpy,dimasad/numpy,jakirkham/numpy,stefanv/numpy,tynn/numpy,ahaldane/numpy,utke1/numpy,pelson/numpy,mingwpy/numpy,skwbc/numpy,Dapid/numpy,charris/numpy,has2k1/numpy,SiccarPoint/numpy,Dapid/numpy,dwf/numpy,ViralLeadership/numpy,jakirkham/numpy,felipebetancur/numpy,dwf/numpy,GaZ3ll3/numpy,ajdawson/numpy,stuarteberg/numpy,dimasad/numpy,bmorris3/numpy,AustereCuriosity/numpy,stefanv/numpy,sonnyhu/numpy,jankoslavic/numpy,nguyentu1602/numpy,matthew-brett/numpy,jorisvandenbossche/numpy,Srisai85/numpy,cowlicks/numpy,BabeNovelty/numpy,rmcgibbo/numpy,mathdd/numpy,simongibbons/numpy,musically-ut/numpy,chiffa/numpy,matthew-brett/numpy,ContinuumIO/numpy,pelson/numpy,MSeifert04/numpy,ogrisel/numpy,bmorris3/numpy,immerrr/numpy,WarrenWeckesser/numpy,drasmuss/numpy,solarjoe/numpy,leifdenby/numpy,rgommers/numpy,rhythmsosad/numpy,embray/numpy,chatcannon/numpy,tdsmith/numpy,embray/numpy,ewmoore/numpy,tdsmith/numpy,mathdd/numpy,leifdenby/numpy,argriffing/numpy,tacaswell/numpy,BMJHayward/numpy,sinhrks/numpy,bertrand-l/numpy,WarrenWeckesser/numpy,musically-ut/numpy,moreati/numpy,ekalosak/numpy,ogrisel/numpy,empeeu/numpy,endolith/numpy,tynn/numpy,abalkin/numpy,astrofrog/numpy,BabeNovelty/numpy,Linkid/numpy,solarjoe/numpy,joferkington/numpy,MSeifert04/numpy,gmcastil/numpy,GrimDerp/numpy,has2k1/numpy,andsor/numpy,ChristopherHogan/numpy,madphysicist/numpy,mhvk/numpy,ahaldane/numpy,ogrisel/numpy,BabeNovelty/numpy,maniteja123/numpy,CMartelLML/numpy,jankoslavic/numpy,WillieMaddox/numpy,behzadnouri/numpy,cowlicks/numpy,larsmans/numpy,mattip/numpy,nguyentu1602/numpy,dimasad/numpy,MichaelAquilina/numpy,astrofrog/numpy,SunghanKim/numpy,grlee77/numpy,ogrisel/numpy,cjermain/numpy,ddasilva/numpy,pdebuyl/numpy,mhvk/numpy,dwf/numpy,Srisai85/numpy,kiwifb/numpy,gmcastil/numpy,gfyoung/numpy,gfyoung/numpy,madphysicist/numpy,rudimeier/numpy,ChanderG/numpy,SunghanKim/numpy,ewmoore/numpy,nguyentu1602/numpy,BabeNovelty/numpy,ddasilva/numpy,mortada/numpy,embray/numpy,ssanderson/numpy,has2k1/numpy,tynn/numpy,MaPePeR/numpy,mwiebe/numpy,mhvk/numpy,matthew-brett/numpy,mingwpy/numpy,sigma-random/numpy,abalkin/numpy,skymanaditya1/numpy,bmorris3/numpy,numpy/numpy-refactor,KaelChen/numpy,mortada/numpy,shoyer/numpy,Anwesh43/numpy,numpy/numpy,mingwpy/numpy,nbeaver/numpy,ajdawson/numpy,dimasad/numpy,empeeu/numpy,numpy/numpy,ESSS/numpy,jorisvandenbossche/numpy,ChristopherHogan/numpy,brandon-rhodes/numpy,astrofrog/numpy,mindw/numpy,sonnyhu/numpy,rajathkumarmp/numpy,anntzer/numpy,Anwesh43/numpy,njase/numpy,endolith/numpy,pelson/numpy,yiakwy/numpy,tacaswell/numpy,Linkid/numpy,stefanv/numpy,ssanderson/numpy,rgommers/numpy,ekalosak/numpy,MaPePeR/numpy,CMartelLML/numpy,immerrr/numpy,larsmans/numpy,mindw/numpy,pbrod/numpy,dwillmer/numpy,pbrod/numpy,jankoslavic/numpy,Srisai85/numpy,larsmans/numpy,ajdawson/numpy,sigma-random/numpy,KaelChen/numpy,ESSS/numpy,kiwifb/numpy,yiakwy/numpy,stuarteberg/numpy,mathdd/numpy,felipebetancur/numpy,jonathanunderwood/numpy,hainm/numpy,MSeifert04/numpy,SiccarPoint/numpy,drasmuss/numpy,SunghanKim/numpy,madphysicist/numpy,stuarteberg/numpy,shoyer/numpy,b-carter/numpy,nguyentu1602/numpy,tdsmith/numpy,MSeifert04/numpy,ahaldane/numpy,jschueller/numpy,WarrenWeckesser/numpy,Dapid/numpy,naritta/numpy,ekalosak/numpy,ogrisel/numpy,charris/numpy,ahaldane/numpy,mattip/numpy,mattip/numpy,GaZ3ll3/numpy,kirillzhuravlev/numpy,Yusa95/numpy,argriffing/numpy,mwiebe/numpy,empeeu/numpy,ViralLeadership/numpy,kirillzhuravlev/numpy,mortada/numpy,pizzathief/numpy,MSeifert04/numpy,SiccarPoint/numpy,dch312/numpy,dato-code/numpy,numpy/numpy-refactor,jakirkham/numpy,NextThought/pypy-numpy,numpy/numpy,stuarteberg/numpy,mhvk/numpy,ViralLeadership/numpy,hainm/numpy,astrofrog/numpy,hainm/numpy,grlee77/numpy,grlee77/numpy,pbrod/numpy,naritta/numpy,drasmuss/numpy,grlee77/numpy,BMJHayward/numpy,stefanv/numpy,dwf/numpy,ajdawson/numpy,dato-code/numpy,NextThought/pypy-numpy,skwbc/numpy,gmcastil/numpy,anntzer/numpy,ChristopherHogan/numpy,hainm/numpy,Srisai85/numpy,charris/numpy,embray/numpy,jschueller/numpy,ChanderG/numpy,larsmans/numpy,chatcannon/numpy,cjermain/numpy,jonathanunderwood/numpy,dch312/numpy,mortada/numpy,trankmichael/numpy,ewmoore/numpy,jorisvandenbossche/numpy,sigma-random/numpy,andsor/numpy,jorisvandenbossche/numpy,jonathanunderwood/numpy,kiwifb/numpy,grlee77/numpy,simongibbons/numpy,SunghanKim/numpy,numpy/numpy,GrimDerp/numpy,ahaldane/numpy,brandon-rhodes/numpy,mingwpy/numpy,moreati/numpy,mathdd/numpy,bringingheavendown/numpy,CMartelLML/numpy,pdebuyl/numpy,BMJHayward/numpy,b-carter/numpy,charris/numpy,skymanaditya1/numpy,githubmlai/numpy,solarjoe/numpy,dwillmer/numpy,MaPePeR/numpy,shoyer/numpy,trankmichael/numpy,jorisvandenbossche/numpy,immerrr/numpy,bertrand-l/numpy,kirillzhuravlev/numpy,Yusa95/numpy,joferkington/numpy,b-carter/numpy,rgommers/numpy,pyparallel/numpy,ewmoore/numpy,rhythmsosad/numpy,SiccarPoint/numpy,Yusa95/numpy,rhythmsosad/numpy,ekalosak/numpy,tdsmith/numpy,sigma-random/numpy,WarrenWeckesser/numpy,KaelChen/numpy,sonnyhu/numpy,dwillmer/numpy,pizzathief/numpy,Eric89GXL/numpy,rgommers/numpy,CMartelLML/numpy,MichaelAquilina/numpy,chiffa/numpy,GrimDerp/numpy,ChristopherHogan/numpy,abalkin/numpy,NextThought/pypy-numpy,AustereCuriosity/numpy,seberg/numpy,nbeaver/numpy,utke1/numpy,shoyer/numpy,bringingheavendown/numpy,rmcgibbo/numpy,numpy/numpy-refactor,brandon-rhodes/numpy,sonnyhu/numpy,GaZ3ll3/numpy,dwillmer/numpy,stefanv/numpy,cowlicks/numpy,matthew-brett/numpy,simongibbons/numpy,dato-code/numpy,WarrenWeckesser/numpy,pizzathief/numpy,MichaelAquilina/numpy,ContinuumIO/numpy,rudimeier/numpy,immerrr/numpy,pbrod/numpy,njase/numpy,pdebuyl/numpy,gfyoung/numpy,seberg/numpy,Yusa95/numpy,leifdenby/numpy,rmcgibbo/numpy,skwbc/numpy,mindw/numpy,utke1/numpy,dwf/numpy,sinhrks/numpy,skymanaditya1/numpy,rudimeier/numpy,felipebetancur/numpy,naritta/numpy,endolith/numpy,embray/numpy,brandon-rhodes/numpy,Linkid/numpy,andsor/numpy,WillieMaddox/numpy,seberg/numpy,rhythmsosad/numpy,simongibbons/numpy,madphysicist/numpy,Eric89GXL/numpy,ChanderG/numpy,jschueller/numpy,seberg/numpy,mwiebe/numpy,argriffing/numpy,Anwesh43/numpy,rajathkumarmp/numpy,groutr/numpy,Linkid/numpy,tacaswell/numpy,sinhrks/numpy,mindw/numpy,joferkington/numpy,AustereCuriosity/numpy,mhvk/numpy,maniteja123/numpy,trankmichael/numpy,sinhrks/numpy,rherault-insa/numpy,nbeaver/numpy,groutr/numpy,rherault-insa/numpy,pdebuyl/numpy,Eric89GXL/numpy,shoyer/numpy,mattip/numpy,NextThought/pypy-numpy,chiffa/numpy,Anwesh43/numpy,ewmoore/numpy,BMJHayward/numpy,jakirkham/numpy,dch312/numpy,felipebetancur/numpy,has2k1/numpy,WillieMaddox/numpy,moreati/numpy,ContinuumIO/numpy,bmorris3/numpy,dch312/numpy,githubmlai/numpy,pyparallel/numpy,madphysicist/numpy,MaPePeR/numpy,bringingheavendown/numpy,simongibbons/numpy,njase/numpy,behzadnouri/numpy,GaZ3ll3/numpy,pbrod/numpy,ESSS/numpy,empeeu/numpy,KaelChen/numpy,pyparallel/numpy,pizzathief/numpy,githubmlai/numpy,dato-code/numpy,rherault-insa/numpy,yiakwy/numpy,anntzer/numpy,rudimeier/numpy,yiakwy/numpy,cjermain/numpy,MichaelAquilina/numpy,maniteja123/numpy,rajathkumarmp/numpy,pizzathief/numpy,pelson/numpy,astrofrog/numpy,kirillzhuravlev/numpy,numpy/numpy-refactor,rmcgibbo/numpy,skymanaditya1/numpy,pelson/numpy,jankoslavic/numpy,naritta/numpy,cjermain/numpy,trankmichael/numpy,chatcannon/numpy,musically-ut/numpy,jschueller/numpy,numpy/numpy-refactor,endolith/numpy,ssanderson/numpy,andsor/numpy,ChanderG/numpy,rajathkumarmp/numpy,GrimDerp/numpy,anntzer/numpy
|
from info_scipy_base import __doc__
from scipy_base_version import scipy_base_version as __version__
from ppimport import ppimport, ppimport_attr
# The following statement is equivalent to
#
# from Matrix import Matrix as mat
#
# but avoids expensive LinearAlgebra import when
# Matrix is not used.
mat = ppimport_attr(ppimport('Matrix'), 'Matrix')
# Force Numeric to use scipy_base.fastumath instead of Numeric.umath.
import fastumath # no need to use scipy_base.fastumath
import sys as _sys
_sys.modules['umath'] = fastumath
import Numeric
from Numeric import *
import limits
from type_check import *
from index_tricks import *
from function_base import *
from shape_base import *
from matrix_base import *
from polynomial import *
from scimath import *
from machar import *
from pexec import *
Inf = inf = fastumath.PINF
try:
NAN = NaN = nan = fastumath.NAN
except AttributeError:
NaN = NAN = nan = fastumath.PINF/fastumath.PINF
from scipy_test.testing import ScipyTest
test = ScipyTest('scipy_base').test
if _sys.modules.has_key('scipy_base.Matrix') \
and _sys.modules['scipy_base.Matrix'] is None:
del _sys.modules['scipy_base.Matrix']
Fix for matrixmultiply != dot on Numeric < 23.4
|
from info_scipy_base import __doc__
from scipy_base_version import scipy_base_version as __version__
from ppimport import ppimport, ppimport_attr
# The following statement is equivalent to
#
# from Matrix import Matrix as mat
#
# but avoids expensive LinearAlgebra import when
# Matrix is not used.
mat = ppimport_attr(ppimport('Matrix'), 'Matrix')
# Force Numeric to use scipy_base.fastumath instead of Numeric.umath.
import fastumath # no need to use scipy_base.fastumath
import sys as _sys
_sys.modules['umath'] = fastumath
import Numeric
from Numeric import *
import limits
from type_check import *
from index_tricks import *
from function_base import *
from shape_base import *
from matrix_base import *
from polynomial import *
from scimath import *
from machar import *
from pexec import *
if Numeric.__version__ < '23.5':
matrixmultiply=dot
Inf = inf = fastumath.PINF
try:
NAN = NaN = nan = fastumath.NAN
except AttributeError:
NaN = NAN = nan = fastumath.PINF/fastumath.PINF
from scipy_test.testing import ScipyTest
test = ScipyTest('scipy_base').test
if _sys.modules.has_key('scipy_base.Matrix') \
and _sys.modules['scipy_base.Matrix'] is None:
del _sys.modules['scipy_base.Matrix']
|
<commit_before>
from info_scipy_base import __doc__
from scipy_base_version import scipy_base_version as __version__
from ppimport import ppimport, ppimport_attr
# The following statement is equivalent to
#
# from Matrix import Matrix as mat
#
# but avoids expensive LinearAlgebra import when
# Matrix is not used.
mat = ppimport_attr(ppimport('Matrix'), 'Matrix')
# Force Numeric to use scipy_base.fastumath instead of Numeric.umath.
import fastumath # no need to use scipy_base.fastumath
import sys as _sys
_sys.modules['umath'] = fastumath
import Numeric
from Numeric import *
import limits
from type_check import *
from index_tricks import *
from function_base import *
from shape_base import *
from matrix_base import *
from polynomial import *
from scimath import *
from machar import *
from pexec import *
Inf = inf = fastumath.PINF
try:
NAN = NaN = nan = fastumath.NAN
except AttributeError:
NaN = NAN = nan = fastumath.PINF/fastumath.PINF
from scipy_test.testing import ScipyTest
test = ScipyTest('scipy_base').test
if _sys.modules.has_key('scipy_base.Matrix') \
and _sys.modules['scipy_base.Matrix'] is None:
del _sys.modules['scipy_base.Matrix']
<commit_msg>Fix for matrixmultiply != dot on Numeric < 23.4<commit_after>
|
from info_scipy_base import __doc__
from scipy_base_version import scipy_base_version as __version__
from ppimport import ppimport, ppimport_attr
# The following statement is equivalent to
#
# from Matrix import Matrix as mat
#
# but avoids expensive LinearAlgebra import when
# Matrix is not used.
mat = ppimport_attr(ppimport('Matrix'), 'Matrix')
# Force Numeric to use scipy_base.fastumath instead of Numeric.umath.
import fastumath # no need to use scipy_base.fastumath
import sys as _sys
_sys.modules['umath'] = fastumath
import Numeric
from Numeric import *
import limits
from type_check import *
from index_tricks import *
from function_base import *
from shape_base import *
from matrix_base import *
from polynomial import *
from scimath import *
from machar import *
from pexec import *
if Numeric.__version__ < '23.5':
matrixmultiply=dot
Inf = inf = fastumath.PINF
try:
NAN = NaN = nan = fastumath.NAN
except AttributeError:
NaN = NAN = nan = fastumath.PINF/fastumath.PINF
from scipy_test.testing import ScipyTest
test = ScipyTest('scipy_base').test
if _sys.modules.has_key('scipy_base.Matrix') \
and _sys.modules['scipy_base.Matrix'] is None:
del _sys.modules['scipy_base.Matrix']
|
from info_scipy_base import __doc__
from scipy_base_version import scipy_base_version as __version__
from ppimport import ppimport, ppimport_attr
# The following statement is equivalent to
#
# from Matrix import Matrix as mat
#
# but avoids expensive LinearAlgebra import when
# Matrix is not used.
mat = ppimport_attr(ppimport('Matrix'), 'Matrix')
# Force Numeric to use scipy_base.fastumath instead of Numeric.umath.
import fastumath # no need to use scipy_base.fastumath
import sys as _sys
_sys.modules['umath'] = fastumath
import Numeric
from Numeric import *
import limits
from type_check import *
from index_tricks import *
from function_base import *
from shape_base import *
from matrix_base import *
from polynomial import *
from scimath import *
from machar import *
from pexec import *
Inf = inf = fastumath.PINF
try:
NAN = NaN = nan = fastumath.NAN
except AttributeError:
NaN = NAN = nan = fastumath.PINF/fastumath.PINF
from scipy_test.testing import ScipyTest
test = ScipyTest('scipy_base').test
if _sys.modules.has_key('scipy_base.Matrix') \
and _sys.modules['scipy_base.Matrix'] is None:
del _sys.modules['scipy_base.Matrix']
Fix for matrixmultiply != dot on Numeric < 23.4
from info_scipy_base import __doc__
from scipy_base_version import scipy_base_version as __version__
from ppimport import ppimport, ppimport_attr
# The following statement is equivalent to
#
# from Matrix import Matrix as mat
#
# but avoids expensive LinearAlgebra import when
# Matrix is not used.
mat = ppimport_attr(ppimport('Matrix'), 'Matrix')
# Force Numeric to use scipy_base.fastumath instead of Numeric.umath.
import fastumath # no need to use scipy_base.fastumath
import sys as _sys
_sys.modules['umath'] = fastumath
import Numeric
from Numeric import *
import limits
from type_check import *
from index_tricks import *
from function_base import *
from shape_base import *
from matrix_base import *
from polynomial import *
from scimath import *
from machar import *
from pexec import *
if Numeric.__version__ < '23.5':
matrixmultiply=dot
Inf = inf = fastumath.PINF
try:
NAN = NaN = nan = fastumath.NAN
except AttributeError:
NaN = NAN = nan = fastumath.PINF/fastumath.PINF
from scipy_test.testing import ScipyTest
test = ScipyTest('scipy_base').test
if _sys.modules.has_key('scipy_base.Matrix') \
and _sys.modules['scipy_base.Matrix'] is None:
del _sys.modules['scipy_base.Matrix']
|
<commit_before>
from info_scipy_base import __doc__
from scipy_base_version import scipy_base_version as __version__
from ppimport import ppimport, ppimport_attr
# The following statement is equivalent to
#
# from Matrix import Matrix as mat
#
# but avoids expensive LinearAlgebra import when
# Matrix is not used.
mat = ppimport_attr(ppimport('Matrix'), 'Matrix')
# Force Numeric to use scipy_base.fastumath instead of Numeric.umath.
import fastumath # no need to use scipy_base.fastumath
import sys as _sys
_sys.modules['umath'] = fastumath
import Numeric
from Numeric import *
import limits
from type_check import *
from index_tricks import *
from function_base import *
from shape_base import *
from matrix_base import *
from polynomial import *
from scimath import *
from machar import *
from pexec import *
Inf = inf = fastumath.PINF
try:
NAN = NaN = nan = fastumath.NAN
except AttributeError:
NaN = NAN = nan = fastumath.PINF/fastumath.PINF
from scipy_test.testing import ScipyTest
test = ScipyTest('scipy_base').test
if _sys.modules.has_key('scipy_base.Matrix') \
and _sys.modules['scipy_base.Matrix'] is None:
del _sys.modules['scipy_base.Matrix']
<commit_msg>Fix for matrixmultiply != dot on Numeric < 23.4<commit_after>
from info_scipy_base import __doc__
from scipy_base_version import scipy_base_version as __version__
from ppimport import ppimport, ppimport_attr
# The following statement is equivalent to
#
# from Matrix import Matrix as mat
#
# but avoids expensive LinearAlgebra import when
# Matrix is not used.
mat = ppimport_attr(ppimport('Matrix'), 'Matrix')
# Force Numeric to use scipy_base.fastumath instead of Numeric.umath.
import fastumath # no need to use scipy_base.fastumath
import sys as _sys
_sys.modules['umath'] = fastumath
import Numeric
from Numeric import *
import limits
from type_check import *
from index_tricks import *
from function_base import *
from shape_base import *
from matrix_base import *
from polynomial import *
from scimath import *
from machar import *
from pexec import *
if Numeric.__version__ < '23.5':
matrixmultiply=dot
Inf = inf = fastumath.PINF
try:
NAN = NaN = nan = fastumath.NAN
except AttributeError:
NaN = NAN = nan = fastumath.PINF/fastumath.PINF
from scipy_test.testing import ScipyTest
test = ScipyTest('scipy_base').test
if _sys.modules.has_key('scipy_base.Matrix') \
and _sys.modules['scipy_base.Matrix'] is None:
del _sys.modules['scipy_base.Matrix']
|
30459a1552b5b90ec5469bbae85510ef3224ccac
|
stored_messages/models.py
|
stored_messages/models.py
|
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils import timezone
from .compat import AUTH_USER_MODEL
from .settings import stored_messages_settings
INBOX_EXPIRE_DAYS = 30 # TODO move to settings
@python_2_unicode_compatible
class Message(models.Model):
"""
TODO: docstring
"""
message = models.TextField()
level = models.IntegerField()
tags = models.TextField()
date = models.DateTimeField(default=timezone.now)
def __str__(self):
return self.message
@python_2_unicode_compatible
class MessageArchive(models.Model):
"""
TODO: docstring
"""
user = models.ForeignKey(AUTH_USER_MODEL)
message = models.ForeignKey(Message)
def __str__(self):
return "[%s] %s" % (self.user, self.message)
@python_2_unicode_compatible
class Inbox(models.Model):
"""
TODO: docstring
"""
user = models.ForeignKey(AUTH_USER_MODEL)
message = models.ForeignKey(Message)
def expired(self):
expiration_date = self.message.date + timezone.timedelta(
days=stored_messages_settings.INBOX_EXPIRE_DAYS)
return expiration_date <= timezone.now()
expired.boolean = True # show a nifty icon in the admin
|
from django.db import models
from django.utils import timezone
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from .compat import AUTH_USER_MODEL
from .settings import stored_messages_settings
INBOX_EXPIRE_DAYS = 30 # TODO move to settings
@python_2_unicode_compatible
class Message(models.Model):
"""
TODO: docstring
"""
message = models.TextField()
level = models.IntegerField()
tags = models.TextField()
date = models.DateTimeField(default=timezone.now)
def __str__(self):
return self.message
@python_2_unicode_compatible
class MessageArchive(models.Model):
"""
TODO: docstring
"""
user = models.ForeignKey(AUTH_USER_MODEL)
message = models.ForeignKey(Message)
def __str__(self):
return "[%s] %s" % (self.user, self.message)
@python_2_unicode_compatible
class Inbox(models.Model):
"""
TODO: docstring
"""
user = models.ForeignKey(AUTH_USER_MODEL)
message = models.ForeignKey(Message)
class Meta:
verbose_name_plural = _('inboxes')
def expired(self):
expiration_date = self.message.date + timezone.timedelta(
days=stored_messages_settings.INBOX_EXPIRE_DAYS)
return expiration_date <= timezone.now()
expired.boolean = True # show a nifty icon in the admin
def __str__(self):
return "[%s] %s" % (self.user, self.message)
|
Add Verbose name plurar for inbox and __str__ method
|
Add Verbose name plurar for inbox and __str__ method
|
Python
|
bsd-3-clause
|
nthall/django-stored-messages,evonove/django-stored-messages,evonove/django-stored-messages,xrmx/django-stored-messages,b0bbywan/django-stored-messages,nthall/django-stored-messages,b0bbywan/django-stored-messages,xrmx/django-stored-messages
|
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils import timezone
from .compat import AUTH_USER_MODEL
from .settings import stored_messages_settings
INBOX_EXPIRE_DAYS = 30 # TODO move to settings
@python_2_unicode_compatible
class Message(models.Model):
"""
TODO: docstring
"""
message = models.TextField()
level = models.IntegerField()
tags = models.TextField()
date = models.DateTimeField(default=timezone.now)
def __str__(self):
return self.message
@python_2_unicode_compatible
class MessageArchive(models.Model):
"""
TODO: docstring
"""
user = models.ForeignKey(AUTH_USER_MODEL)
message = models.ForeignKey(Message)
def __str__(self):
return "[%s] %s" % (self.user, self.message)
@python_2_unicode_compatible
class Inbox(models.Model):
"""
TODO: docstring
"""
user = models.ForeignKey(AUTH_USER_MODEL)
message = models.ForeignKey(Message)
def expired(self):
expiration_date = self.message.date + timezone.timedelta(
days=stored_messages_settings.INBOX_EXPIRE_DAYS)
return expiration_date <= timezone.now()
expired.boolean = True # show a nifty icon in the admin
Add Verbose name plurar for inbox and __str__ method
|
from django.db import models
from django.utils import timezone
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from .compat import AUTH_USER_MODEL
from .settings import stored_messages_settings
INBOX_EXPIRE_DAYS = 30 # TODO move to settings
@python_2_unicode_compatible
class Message(models.Model):
"""
TODO: docstring
"""
message = models.TextField()
level = models.IntegerField()
tags = models.TextField()
date = models.DateTimeField(default=timezone.now)
def __str__(self):
return self.message
@python_2_unicode_compatible
class MessageArchive(models.Model):
"""
TODO: docstring
"""
user = models.ForeignKey(AUTH_USER_MODEL)
message = models.ForeignKey(Message)
def __str__(self):
return "[%s] %s" % (self.user, self.message)
@python_2_unicode_compatible
class Inbox(models.Model):
"""
TODO: docstring
"""
user = models.ForeignKey(AUTH_USER_MODEL)
message = models.ForeignKey(Message)
class Meta:
verbose_name_plural = _('inboxes')
def expired(self):
expiration_date = self.message.date + timezone.timedelta(
days=stored_messages_settings.INBOX_EXPIRE_DAYS)
return expiration_date <= timezone.now()
expired.boolean = True # show a nifty icon in the admin
def __str__(self):
return "[%s] %s" % (self.user, self.message)
|
<commit_before>from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils import timezone
from .compat import AUTH_USER_MODEL
from .settings import stored_messages_settings
INBOX_EXPIRE_DAYS = 30 # TODO move to settings
@python_2_unicode_compatible
class Message(models.Model):
"""
TODO: docstring
"""
message = models.TextField()
level = models.IntegerField()
tags = models.TextField()
date = models.DateTimeField(default=timezone.now)
def __str__(self):
return self.message
@python_2_unicode_compatible
class MessageArchive(models.Model):
"""
TODO: docstring
"""
user = models.ForeignKey(AUTH_USER_MODEL)
message = models.ForeignKey(Message)
def __str__(self):
return "[%s] %s" % (self.user, self.message)
@python_2_unicode_compatible
class Inbox(models.Model):
"""
TODO: docstring
"""
user = models.ForeignKey(AUTH_USER_MODEL)
message = models.ForeignKey(Message)
def expired(self):
expiration_date = self.message.date + timezone.timedelta(
days=stored_messages_settings.INBOX_EXPIRE_DAYS)
return expiration_date <= timezone.now()
expired.boolean = True # show a nifty icon in the admin
<commit_msg>Add Verbose name plurar for inbox and __str__ method<commit_after>
|
from django.db import models
from django.utils import timezone
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from .compat import AUTH_USER_MODEL
from .settings import stored_messages_settings
INBOX_EXPIRE_DAYS = 30 # TODO move to settings
@python_2_unicode_compatible
class Message(models.Model):
"""
TODO: docstring
"""
message = models.TextField()
level = models.IntegerField()
tags = models.TextField()
date = models.DateTimeField(default=timezone.now)
def __str__(self):
return self.message
@python_2_unicode_compatible
class MessageArchive(models.Model):
"""
TODO: docstring
"""
user = models.ForeignKey(AUTH_USER_MODEL)
message = models.ForeignKey(Message)
def __str__(self):
return "[%s] %s" % (self.user, self.message)
@python_2_unicode_compatible
class Inbox(models.Model):
"""
TODO: docstring
"""
user = models.ForeignKey(AUTH_USER_MODEL)
message = models.ForeignKey(Message)
class Meta:
verbose_name_plural = _('inboxes')
def expired(self):
expiration_date = self.message.date + timezone.timedelta(
days=stored_messages_settings.INBOX_EXPIRE_DAYS)
return expiration_date <= timezone.now()
expired.boolean = True # show a nifty icon in the admin
def __str__(self):
return "[%s] %s" % (self.user, self.message)
|
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils import timezone
from .compat import AUTH_USER_MODEL
from .settings import stored_messages_settings
INBOX_EXPIRE_DAYS = 30 # TODO move to settings
@python_2_unicode_compatible
class Message(models.Model):
"""
TODO: docstring
"""
message = models.TextField()
level = models.IntegerField()
tags = models.TextField()
date = models.DateTimeField(default=timezone.now)
def __str__(self):
return self.message
@python_2_unicode_compatible
class MessageArchive(models.Model):
"""
TODO: docstring
"""
user = models.ForeignKey(AUTH_USER_MODEL)
message = models.ForeignKey(Message)
def __str__(self):
return "[%s] %s" % (self.user, self.message)
@python_2_unicode_compatible
class Inbox(models.Model):
"""
TODO: docstring
"""
user = models.ForeignKey(AUTH_USER_MODEL)
message = models.ForeignKey(Message)
def expired(self):
expiration_date = self.message.date + timezone.timedelta(
days=stored_messages_settings.INBOX_EXPIRE_DAYS)
return expiration_date <= timezone.now()
expired.boolean = True # show a nifty icon in the admin
Add Verbose name plurar for inbox and __str__ methodfrom django.db import models
from django.utils import timezone
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from .compat import AUTH_USER_MODEL
from .settings import stored_messages_settings
INBOX_EXPIRE_DAYS = 30 # TODO move to settings
@python_2_unicode_compatible
class Message(models.Model):
"""
TODO: docstring
"""
message = models.TextField()
level = models.IntegerField()
tags = models.TextField()
date = models.DateTimeField(default=timezone.now)
def __str__(self):
return self.message
@python_2_unicode_compatible
class MessageArchive(models.Model):
"""
TODO: docstring
"""
user = models.ForeignKey(AUTH_USER_MODEL)
message = models.ForeignKey(Message)
def __str__(self):
return "[%s] %s" % (self.user, self.message)
@python_2_unicode_compatible
class Inbox(models.Model):
"""
TODO: docstring
"""
user = models.ForeignKey(AUTH_USER_MODEL)
message = models.ForeignKey(Message)
class Meta:
verbose_name_plural = _('inboxes')
def expired(self):
expiration_date = self.message.date + timezone.timedelta(
days=stored_messages_settings.INBOX_EXPIRE_DAYS)
return expiration_date <= timezone.now()
expired.boolean = True # show a nifty icon in the admin
def __str__(self):
return "[%s] %s" % (self.user, self.message)
|
<commit_before>from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils import timezone
from .compat import AUTH_USER_MODEL
from .settings import stored_messages_settings
INBOX_EXPIRE_DAYS = 30 # TODO move to settings
@python_2_unicode_compatible
class Message(models.Model):
"""
TODO: docstring
"""
message = models.TextField()
level = models.IntegerField()
tags = models.TextField()
date = models.DateTimeField(default=timezone.now)
def __str__(self):
return self.message
@python_2_unicode_compatible
class MessageArchive(models.Model):
"""
TODO: docstring
"""
user = models.ForeignKey(AUTH_USER_MODEL)
message = models.ForeignKey(Message)
def __str__(self):
return "[%s] %s" % (self.user, self.message)
@python_2_unicode_compatible
class Inbox(models.Model):
"""
TODO: docstring
"""
user = models.ForeignKey(AUTH_USER_MODEL)
message = models.ForeignKey(Message)
def expired(self):
expiration_date = self.message.date + timezone.timedelta(
days=stored_messages_settings.INBOX_EXPIRE_DAYS)
return expiration_date <= timezone.now()
expired.boolean = True # show a nifty icon in the admin
<commit_msg>Add Verbose name plurar for inbox and __str__ method<commit_after>from django.db import models
from django.utils import timezone
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from .compat import AUTH_USER_MODEL
from .settings import stored_messages_settings
INBOX_EXPIRE_DAYS = 30 # TODO move to settings
@python_2_unicode_compatible
class Message(models.Model):
"""
TODO: docstring
"""
message = models.TextField()
level = models.IntegerField()
tags = models.TextField()
date = models.DateTimeField(default=timezone.now)
def __str__(self):
return self.message
@python_2_unicode_compatible
class MessageArchive(models.Model):
"""
TODO: docstring
"""
user = models.ForeignKey(AUTH_USER_MODEL)
message = models.ForeignKey(Message)
def __str__(self):
return "[%s] %s" % (self.user, self.message)
@python_2_unicode_compatible
class Inbox(models.Model):
"""
TODO: docstring
"""
user = models.ForeignKey(AUTH_USER_MODEL)
message = models.ForeignKey(Message)
class Meta:
verbose_name_plural = _('inboxes')
def expired(self):
expiration_date = self.message.date + timezone.timedelta(
days=stored_messages_settings.INBOX_EXPIRE_DAYS)
return expiration_date <= timezone.now()
expired.boolean = True # show a nifty icon in the admin
def __str__(self):
return "[%s] %s" % (self.user, self.message)
|
a9f51a3e8eacc360d4f353a1fbe649809f88e4ce
|
astropy/io/misc/asdf/tags/time/tests/test_timedelta.py
|
astropy/io/misc/asdf/tags/time/tests/test_timedelta.py
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
# -*- coding: utf-8 -*-
import pytest
from asdf.tests.helpers import assert_roundtrip_tree
from astropy.time import Time
@pytest.mark.parametrize('fmt', Time.FORMATS.keys())
def test_timedelta(fmt, tmpdir):
t1 = Time(Time.now(), format=fmt)
t2 = Time(Time.now(), format=fmt)
td = t2 - t1
tree = dict(timedelta=td)
assert_roundtrip_tree(tree, tmpdir)
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
# -*- coding: utf-8 -*-
import pytest
asdf = pytest.importorskip('asdf')
from asdf.tests.helpers import assert_roundtrip_tree
from astropy.time import Time
@pytest.mark.parametrize('fmt', Time.FORMATS.keys())
def test_timedelta(fmt, tmpdir):
t1 = Time(Time.now(), format=fmt)
t2 = Time(Time.now(), format=fmt)
td = t2 - t1
tree = dict(timedelta=td)
assert_roundtrip_tree(tree, tmpdir)
|
Add importorskip('asdf') to test for TimeDelta tag
|
Add importorskip('asdf') to test for TimeDelta tag
|
Python
|
bsd-3-clause
|
dhomeier/astropy,pllim/astropy,bsipocz/astropy,MSeifert04/astropy,StuartLittlefair/astropy,pllim/astropy,aleksandr-bakanov/astropy,saimn/astropy,bsipocz/astropy,pllim/astropy,saimn/astropy,aleksandr-bakanov/astropy,StuartLittlefair/astropy,astropy/astropy,saimn/astropy,larrybradley/astropy,MSeifert04/astropy,pllim/astropy,lpsinger/astropy,MSeifert04/astropy,stargaser/astropy,StuartLittlefair/astropy,mhvk/astropy,stargaser/astropy,saimn/astropy,stargaser/astropy,mhvk/astropy,dhomeier/astropy,MSeifert04/astropy,astropy/astropy,lpsinger/astropy,mhvk/astropy,aleksandr-bakanov/astropy,mhvk/astropy,dhomeier/astropy,pllim/astropy,StuartLittlefair/astropy,dhomeier/astropy,larrybradley/astropy,saimn/astropy,astropy/astropy,lpsinger/astropy,larrybradley/astropy,StuartLittlefair/astropy,astropy/astropy,aleksandr-bakanov/astropy,larrybradley/astropy,stargaser/astropy,bsipocz/astropy,bsipocz/astropy,lpsinger/astropy,dhomeier/astropy,larrybradley/astropy,mhvk/astropy,lpsinger/astropy,astropy/astropy
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
# -*- coding: utf-8 -*-
import pytest
from asdf.tests.helpers import assert_roundtrip_tree
from astropy.time import Time
@pytest.mark.parametrize('fmt', Time.FORMATS.keys())
def test_timedelta(fmt, tmpdir):
t1 = Time(Time.now(), format=fmt)
t2 = Time(Time.now(), format=fmt)
td = t2 - t1
tree = dict(timedelta=td)
assert_roundtrip_tree(tree, tmpdir)
Add importorskip('asdf') to test for TimeDelta tag
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
# -*- coding: utf-8 -*-
import pytest
asdf = pytest.importorskip('asdf')
from asdf.tests.helpers import assert_roundtrip_tree
from astropy.time import Time
@pytest.mark.parametrize('fmt', Time.FORMATS.keys())
def test_timedelta(fmt, tmpdir):
t1 = Time(Time.now(), format=fmt)
t2 = Time(Time.now(), format=fmt)
td = t2 - t1
tree = dict(timedelta=td)
assert_roundtrip_tree(tree, tmpdir)
|
<commit_before># Licensed under a 3-clause BSD style license - see LICENSE.rst
# -*- coding: utf-8 -*-
import pytest
from asdf.tests.helpers import assert_roundtrip_tree
from astropy.time import Time
@pytest.mark.parametrize('fmt', Time.FORMATS.keys())
def test_timedelta(fmt, tmpdir):
t1 = Time(Time.now(), format=fmt)
t2 = Time(Time.now(), format=fmt)
td = t2 - t1
tree = dict(timedelta=td)
assert_roundtrip_tree(tree, tmpdir)
<commit_msg>Add importorskip('asdf') to test for TimeDelta tag<commit_after>
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
# -*- coding: utf-8 -*-
import pytest
asdf = pytest.importorskip('asdf')
from asdf.tests.helpers import assert_roundtrip_tree
from astropy.time import Time
@pytest.mark.parametrize('fmt', Time.FORMATS.keys())
def test_timedelta(fmt, tmpdir):
t1 = Time(Time.now(), format=fmt)
t2 = Time(Time.now(), format=fmt)
td = t2 - t1
tree = dict(timedelta=td)
assert_roundtrip_tree(tree, tmpdir)
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
# -*- coding: utf-8 -*-
import pytest
from asdf.tests.helpers import assert_roundtrip_tree
from astropy.time import Time
@pytest.mark.parametrize('fmt', Time.FORMATS.keys())
def test_timedelta(fmt, tmpdir):
t1 = Time(Time.now(), format=fmt)
t2 = Time(Time.now(), format=fmt)
td = t2 - t1
tree = dict(timedelta=td)
assert_roundtrip_tree(tree, tmpdir)
Add importorskip('asdf') to test for TimeDelta tag# Licensed under a 3-clause BSD style license - see LICENSE.rst
# -*- coding: utf-8 -*-
import pytest
asdf = pytest.importorskip('asdf')
from asdf.tests.helpers import assert_roundtrip_tree
from astropy.time import Time
@pytest.mark.parametrize('fmt', Time.FORMATS.keys())
def test_timedelta(fmt, tmpdir):
t1 = Time(Time.now(), format=fmt)
t2 = Time(Time.now(), format=fmt)
td = t2 - t1
tree = dict(timedelta=td)
assert_roundtrip_tree(tree, tmpdir)
|
<commit_before># Licensed under a 3-clause BSD style license - see LICENSE.rst
# -*- coding: utf-8 -*-
import pytest
from asdf.tests.helpers import assert_roundtrip_tree
from astropy.time import Time
@pytest.mark.parametrize('fmt', Time.FORMATS.keys())
def test_timedelta(fmt, tmpdir):
t1 = Time(Time.now(), format=fmt)
t2 = Time(Time.now(), format=fmt)
td = t2 - t1
tree = dict(timedelta=td)
assert_roundtrip_tree(tree, tmpdir)
<commit_msg>Add importorskip('asdf') to test for TimeDelta tag<commit_after># Licensed under a 3-clause BSD style license - see LICENSE.rst
# -*- coding: utf-8 -*-
import pytest
asdf = pytest.importorskip('asdf')
from asdf.tests.helpers import assert_roundtrip_tree
from astropy.time import Time
@pytest.mark.parametrize('fmt', Time.FORMATS.keys())
def test_timedelta(fmt, tmpdir):
t1 = Time(Time.now(), format=fmt)
t2 = Time(Time.now(), format=fmt)
td = t2 - t1
tree = dict(timedelta=td)
assert_roundtrip_tree(tree, tmpdir)
|
a938128b1e6b7654f93047883c90bf7b80ee564e
|
pentai/t_all.py
|
pentai/t_all.py
|
#!/usr/bin/python
import unittest
import pentai.base.t_all as b_t
import pentai.ai.t_all as ai_t
import pentai.db.t_all as db_t
import pentai.db.zodb_dict as z_m
import os
def suite():
global all_tests
all_tests = unittest.TestSuite()
all_tests.addTest(b_t.suite())
all_tests.addTest(ai_t.suite())
all_tests.addTest(db_t.suite())
return all_tests
def main():
z_m.set_db("test.db")
unittest.TextTestRunner().run(suite())
os.unlink("test.db")
os.unlink("test.db.lock")
os.unlink("test.db.tmp")
os.unlink("test.db.index")
if __name__ == "__main__":
main()
|
#!/usr/bin/python
import unittest
import pentai.base.t_all as b_t
import pentai.ai.t_all as ai_t
import pentai.db.t_all as db_t
import pentai.db.zodb_dict as z_m
import pentai.db.test_db as tdb_m
def suite():
global all_tests
all_tests = unittest.TestSuite()
all_tests.addTest(b_t.suite())
all_tests.addTest(ai_t.suite())
all_tests.addTest(db_t.suite())
return all_tests
def main():
unittest.TextTestRunner().run(suite())
if __name__ == "__main__":
main()
tdb_m.delete_test_db()
|
Delete test db after a run
|
Delete test db after a run
|
Python
|
mit
|
cropleyb/pentai,cropleyb/pentai,cropleyb/pentai
|
#!/usr/bin/python
import unittest
import pentai.base.t_all as b_t
import pentai.ai.t_all as ai_t
import pentai.db.t_all as db_t
import pentai.db.zodb_dict as z_m
import os
def suite():
global all_tests
all_tests = unittest.TestSuite()
all_tests.addTest(b_t.suite())
all_tests.addTest(ai_t.suite())
all_tests.addTest(db_t.suite())
return all_tests
def main():
z_m.set_db("test.db")
unittest.TextTestRunner().run(suite())
os.unlink("test.db")
os.unlink("test.db.lock")
os.unlink("test.db.tmp")
os.unlink("test.db.index")
if __name__ == "__main__":
main()
Delete test db after a run
|
#!/usr/bin/python
import unittest
import pentai.base.t_all as b_t
import pentai.ai.t_all as ai_t
import pentai.db.t_all as db_t
import pentai.db.zodb_dict as z_m
import pentai.db.test_db as tdb_m
def suite():
global all_tests
all_tests = unittest.TestSuite()
all_tests.addTest(b_t.suite())
all_tests.addTest(ai_t.suite())
all_tests.addTest(db_t.suite())
return all_tests
def main():
unittest.TextTestRunner().run(suite())
if __name__ == "__main__":
main()
tdb_m.delete_test_db()
|
<commit_before>#!/usr/bin/python
import unittest
import pentai.base.t_all as b_t
import pentai.ai.t_all as ai_t
import pentai.db.t_all as db_t
import pentai.db.zodb_dict as z_m
import os
def suite():
global all_tests
all_tests = unittest.TestSuite()
all_tests.addTest(b_t.suite())
all_tests.addTest(ai_t.suite())
all_tests.addTest(db_t.suite())
return all_tests
def main():
z_m.set_db("test.db")
unittest.TextTestRunner().run(suite())
os.unlink("test.db")
os.unlink("test.db.lock")
os.unlink("test.db.tmp")
os.unlink("test.db.index")
if __name__ == "__main__":
main()
<commit_msg>Delete test db after a run<commit_after>
|
#!/usr/bin/python
import unittest
import pentai.base.t_all as b_t
import pentai.ai.t_all as ai_t
import pentai.db.t_all as db_t
import pentai.db.zodb_dict as z_m
import pentai.db.test_db as tdb_m
def suite():
global all_tests
all_tests = unittest.TestSuite()
all_tests.addTest(b_t.suite())
all_tests.addTest(ai_t.suite())
all_tests.addTest(db_t.suite())
return all_tests
def main():
unittest.TextTestRunner().run(suite())
if __name__ == "__main__":
main()
tdb_m.delete_test_db()
|
#!/usr/bin/python
import unittest
import pentai.base.t_all as b_t
import pentai.ai.t_all as ai_t
import pentai.db.t_all as db_t
import pentai.db.zodb_dict as z_m
import os
def suite():
global all_tests
all_tests = unittest.TestSuite()
all_tests.addTest(b_t.suite())
all_tests.addTest(ai_t.suite())
all_tests.addTest(db_t.suite())
return all_tests
def main():
z_m.set_db("test.db")
unittest.TextTestRunner().run(suite())
os.unlink("test.db")
os.unlink("test.db.lock")
os.unlink("test.db.tmp")
os.unlink("test.db.index")
if __name__ == "__main__":
main()
Delete test db after a run#!/usr/bin/python
import unittest
import pentai.base.t_all as b_t
import pentai.ai.t_all as ai_t
import pentai.db.t_all as db_t
import pentai.db.zodb_dict as z_m
import pentai.db.test_db as tdb_m
def suite():
global all_tests
all_tests = unittest.TestSuite()
all_tests.addTest(b_t.suite())
all_tests.addTest(ai_t.suite())
all_tests.addTest(db_t.suite())
return all_tests
def main():
unittest.TextTestRunner().run(suite())
if __name__ == "__main__":
main()
tdb_m.delete_test_db()
|
<commit_before>#!/usr/bin/python
import unittest
import pentai.base.t_all as b_t
import pentai.ai.t_all as ai_t
import pentai.db.t_all as db_t
import pentai.db.zodb_dict as z_m
import os
def suite():
global all_tests
all_tests = unittest.TestSuite()
all_tests.addTest(b_t.suite())
all_tests.addTest(ai_t.suite())
all_tests.addTest(db_t.suite())
return all_tests
def main():
z_m.set_db("test.db")
unittest.TextTestRunner().run(suite())
os.unlink("test.db")
os.unlink("test.db.lock")
os.unlink("test.db.tmp")
os.unlink("test.db.index")
if __name__ == "__main__":
main()
<commit_msg>Delete test db after a run<commit_after>#!/usr/bin/python
import unittest
import pentai.base.t_all as b_t
import pentai.ai.t_all as ai_t
import pentai.db.t_all as db_t
import pentai.db.zodb_dict as z_m
import pentai.db.test_db as tdb_m
def suite():
global all_tests
all_tests = unittest.TestSuite()
all_tests.addTest(b_t.suite())
all_tests.addTest(ai_t.suite())
all_tests.addTest(db_t.suite())
return all_tests
def main():
unittest.TextTestRunner().run(suite())
if __name__ == "__main__":
main()
tdb_m.delete_test_db()
|
03c479bce1d135e9d1c4acfbb085340b14679feb
|
fedmsg.d/fmn.py
|
fedmsg.d/fmn.py
|
import socket
hostname = socket.gethostname().split('.')[-1]
config = {
# Consumer stuff
"fmn.consumer.enabled": True,
"fmn.sqlalchemy.uri": "sqlite:////var/tmp/fmn-dev-db.sqlite",
## Backend stuff ##
# Email
"fmn.email.mailserver": "127.0.0.1:25",
"fmn.email.from_address": "fedmsg-notifications@fedoraproject.org",
# IRC
"fmn.irc.network": "irc.freenode.net",
"fmn.irc.nickname": "threebot",
"fmn.irc.port": 6667,
"fmn.irc.timeout": 120,
# GCM
"fmn.gcm.post_url": "wat",
"fmn.gcm.api_key": "wat",
# Generic stuff
"endpoints": {
"fmn.%s" % hostname: [
"tcp://127.0.0.1:3041",
],
},
"logging": dict(
loggers=dict(
fmn={
"level": "DEBUG",
"propagate": False,
"handlers": ["console"],
},
),
),
}
|
import socket
hostname = socket.gethostname().split('.')[-1]
import fmn.lib
config = {
# General stuff
"fmn.valid_code_paths": fmn.lib.load_filters(),
# Consumer stuff
"fmn.consumer.enabled": True,
"fmn.sqlalchemy.uri": "sqlite:////var/tmp/fmn-dev-db.sqlite",
## Backend stuff ##
# Email
"fmn.email.mailserver": "127.0.0.1:25",
"fmn.email.from_address": "fedmsg-notifications@fedoraproject.org",
# IRC
"fmn.irc.network": "irc.freenode.net",
"fmn.irc.nickname": "threebot",
"fmn.irc.port": 6667,
"fmn.irc.timeout": 120,
# GCM
"fmn.gcm.post_url": "wat",
"fmn.gcm.api_key": "wat",
# Generic stuff
"endpoints": {
"fmn.%s" % hostname: [
"tcp://127.0.0.1:3041",
],
},
"logging": dict(
loggers=dict(
fmn={
"level": "DEBUG",
"propagate": False,
"handlers": ["console"],
},
),
),
}
|
Load code paths into the fedmsg dict.
|
Load code paths into the fedmsg dict.
|
Python
|
lgpl-2.1
|
jeremycline/fmn,jeremycline/fmn,jeremycline/fmn
|
import socket
hostname = socket.gethostname().split('.')[-1]
config = {
# Consumer stuff
"fmn.consumer.enabled": True,
"fmn.sqlalchemy.uri": "sqlite:////var/tmp/fmn-dev-db.sqlite",
## Backend stuff ##
# Email
"fmn.email.mailserver": "127.0.0.1:25",
"fmn.email.from_address": "fedmsg-notifications@fedoraproject.org",
# IRC
"fmn.irc.network": "irc.freenode.net",
"fmn.irc.nickname": "threebot",
"fmn.irc.port": 6667,
"fmn.irc.timeout": 120,
# GCM
"fmn.gcm.post_url": "wat",
"fmn.gcm.api_key": "wat",
# Generic stuff
"endpoints": {
"fmn.%s" % hostname: [
"tcp://127.0.0.1:3041",
],
},
"logging": dict(
loggers=dict(
fmn={
"level": "DEBUG",
"propagate": False,
"handlers": ["console"],
},
),
),
}
Load code paths into the fedmsg dict.
|
import socket
hostname = socket.gethostname().split('.')[-1]
import fmn.lib
config = {
# General stuff
"fmn.valid_code_paths": fmn.lib.load_filters(),
# Consumer stuff
"fmn.consumer.enabled": True,
"fmn.sqlalchemy.uri": "sqlite:////var/tmp/fmn-dev-db.sqlite",
## Backend stuff ##
# Email
"fmn.email.mailserver": "127.0.0.1:25",
"fmn.email.from_address": "fedmsg-notifications@fedoraproject.org",
# IRC
"fmn.irc.network": "irc.freenode.net",
"fmn.irc.nickname": "threebot",
"fmn.irc.port": 6667,
"fmn.irc.timeout": 120,
# GCM
"fmn.gcm.post_url": "wat",
"fmn.gcm.api_key": "wat",
# Generic stuff
"endpoints": {
"fmn.%s" % hostname: [
"tcp://127.0.0.1:3041",
],
},
"logging": dict(
loggers=dict(
fmn={
"level": "DEBUG",
"propagate": False,
"handlers": ["console"],
},
),
),
}
|
<commit_before>import socket
hostname = socket.gethostname().split('.')[-1]
config = {
# Consumer stuff
"fmn.consumer.enabled": True,
"fmn.sqlalchemy.uri": "sqlite:////var/tmp/fmn-dev-db.sqlite",
## Backend stuff ##
# Email
"fmn.email.mailserver": "127.0.0.1:25",
"fmn.email.from_address": "fedmsg-notifications@fedoraproject.org",
# IRC
"fmn.irc.network": "irc.freenode.net",
"fmn.irc.nickname": "threebot",
"fmn.irc.port": 6667,
"fmn.irc.timeout": 120,
# GCM
"fmn.gcm.post_url": "wat",
"fmn.gcm.api_key": "wat",
# Generic stuff
"endpoints": {
"fmn.%s" % hostname: [
"tcp://127.0.0.1:3041",
],
},
"logging": dict(
loggers=dict(
fmn={
"level": "DEBUG",
"propagate": False,
"handlers": ["console"],
},
),
),
}
<commit_msg>Load code paths into the fedmsg dict.<commit_after>
|
import socket
hostname = socket.gethostname().split('.')[-1]
import fmn.lib
config = {
# General stuff
"fmn.valid_code_paths": fmn.lib.load_filters(),
# Consumer stuff
"fmn.consumer.enabled": True,
"fmn.sqlalchemy.uri": "sqlite:////var/tmp/fmn-dev-db.sqlite",
## Backend stuff ##
# Email
"fmn.email.mailserver": "127.0.0.1:25",
"fmn.email.from_address": "fedmsg-notifications@fedoraproject.org",
# IRC
"fmn.irc.network": "irc.freenode.net",
"fmn.irc.nickname": "threebot",
"fmn.irc.port": 6667,
"fmn.irc.timeout": 120,
# GCM
"fmn.gcm.post_url": "wat",
"fmn.gcm.api_key": "wat",
# Generic stuff
"endpoints": {
"fmn.%s" % hostname: [
"tcp://127.0.0.1:3041",
],
},
"logging": dict(
loggers=dict(
fmn={
"level": "DEBUG",
"propagate": False,
"handlers": ["console"],
},
),
),
}
|
import socket
hostname = socket.gethostname().split('.')[-1]
config = {
# Consumer stuff
"fmn.consumer.enabled": True,
"fmn.sqlalchemy.uri": "sqlite:////var/tmp/fmn-dev-db.sqlite",
## Backend stuff ##
# Email
"fmn.email.mailserver": "127.0.0.1:25",
"fmn.email.from_address": "fedmsg-notifications@fedoraproject.org",
# IRC
"fmn.irc.network": "irc.freenode.net",
"fmn.irc.nickname": "threebot",
"fmn.irc.port": 6667,
"fmn.irc.timeout": 120,
# GCM
"fmn.gcm.post_url": "wat",
"fmn.gcm.api_key": "wat",
# Generic stuff
"endpoints": {
"fmn.%s" % hostname: [
"tcp://127.0.0.1:3041",
],
},
"logging": dict(
loggers=dict(
fmn={
"level": "DEBUG",
"propagate": False,
"handlers": ["console"],
},
),
),
}
Load code paths into the fedmsg dict.import socket
hostname = socket.gethostname().split('.')[-1]
import fmn.lib
config = {
# General stuff
"fmn.valid_code_paths": fmn.lib.load_filters(),
# Consumer stuff
"fmn.consumer.enabled": True,
"fmn.sqlalchemy.uri": "sqlite:////var/tmp/fmn-dev-db.sqlite",
## Backend stuff ##
# Email
"fmn.email.mailserver": "127.0.0.1:25",
"fmn.email.from_address": "fedmsg-notifications@fedoraproject.org",
# IRC
"fmn.irc.network": "irc.freenode.net",
"fmn.irc.nickname": "threebot",
"fmn.irc.port": 6667,
"fmn.irc.timeout": 120,
# GCM
"fmn.gcm.post_url": "wat",
"fmn.gcm.api_key": "wat",
# Generic stuff
"endpoints": {
"fmn.%s" % hostname: [
"tcp://127.0.0.1:3041",
],
},
"logging": dict(
loggers=dict(
fmn={
"level": "DEBUG",
"propagate": False,
"handlers": ["console"],
},
),
),
}
|
<commit_before>import socket
hostname = socket.gethostname().split('.')[-1]
config = {
# Consumer stuff
"fmn.consumer.enabled": True,
"fmn.sqlalchemy.uri": "sqlite:////var/tmp/fmn-dev-db.sqlite",
## Backend stuff ##
# Email
"fmn.email.mailserver": "127.0.0.1:25",
"fmn.email.from_address": "fedmsg-notifications@fedoraproject.org",
# IRC
"fmn.irc.network": "irc.freenode.net",
"fmn.irc.nickname": "threebot",
"fmn.irc.port": 6667,
"fmn.irc.timeout": 120,
# GCM
"fmn.gcm.post_url": "wat",
"fmn.gcm.api_key": "wat",
# Generic stuff
"endpoints": {
"fmn.%s" % hostname: [
"tcp://127.0.0.1:3041",
],
},
"logging": dict(
loggers=dict(
fmn={
"level": "DEBUG",
"propagate": False,
"handlers": ["console"],
},
),
),
}
<commit_msg>Load code paths into the fedmsg dict.<commit_after>import socket
hostname = socket.gethostname().split('.')[-1]
import fmn.lib
config = {
# General stuff
"fmn.valid_code_paths": fmn.lib.load_filters(),
# Consumer stuff
"fmn.consumer.enabled": True,
"fmn.sqlalchemy.uri": "sqlite:////var/tmp/fmn-dev-db.sqlite",
## Backend stuff ##
# Email
"fmn.email.mailserver": "127.0.0.1:25",
"fmn.email.from_address": "fedmsg-notifications@fedoraproject.org",
# IRC
"fmn.irc.network": "irc.freenode.net",
"fmn.irc.nickname": "threebot",
"fmn.irc.port": 6667,
"fmn.irc.timeout": 120,
# GCM
"fmn.gcm.post_url": "wat",
"fmn.gcm.api_key": "wat",
# Generic stuff
"endpoints": {
"fmn.%s" % hostname: [
"tcp://127.0.0.1:3041",
],
},
"logging": dict(
loggers=dict(
fmn={
"level": "DEBUG",
"propagate": False,
"handlers": ["console"],
},
),
),
}
|
827bb2fb8025fcd882c58b7b9136bc68231319dd
|
src/randbot.py
|
src/randbot.py
|
__author__ = 'Antony Cherepanov'
import tweepy
from src import dbhandler
from src import generator
class RandBot(object):
def __init__(self):
self.db = dbhandler.DBHandler()
self.auth = tweepy.OAuthHandler(*(self.db.get_consumer_data()))
self.auth.set_access_token(*(self.db.get_access_token_data()))
self.api = tweepy.API(self.auth)
def run(self):
public_tweets = self.api.home_timeline()
for tweet in public_tweets:
print(tweet.text)
if __name__ == '__main__':
print("Start RandBot")
bot = RandBot()
bot.run()
|
__author__ = 'Antony Cherepanov'
import tweepy
from src import dbhandler
from src import generator
class RandBot(object):
def __init__(self):
self.tweets = list()
self.db = dbhandler.DBHandler()
self.auth = tweepy.OAuthHandler(*(self.db.get_consumer_data()))
self.auth.set_access_token(*(self.db.get_access_token_data()))
self.api = tweepy.API(self.auth)
def run(self):
self.__process_last_mentions()
self.__process_search()
self.__send_tweets()
def __process_last_mentions(self):
mentions = list()
msg_id = self.db.get_last_msg_id()
if msg_id is None:
mentions = self.api.mentions_timeline(count=10)
else:
mentions = self.api.mentions_timeline(since_id=msg_id, count=10)
for tweet in mentions:
print(tweet.text)
user_data = self.db.get_user_data(tweet.author.id_str)
if user_data is None:
self.__process_new_user(tweet)
else:
self.tweets.append("Your number, @{0}, is {1}".format(
user_data['name']), user_data['number'])
def __process_new_user(self, tweet):
if tweet is None:
print("Invalid tweet - it is empty!")
return
gen = generator.Generator()
number = gen.generate(tweet)
if number is None:
return
# user_id = tweet.author.id_str
user_name = tweet.author.screen_name
# user_data = {'user_id': user_id, 'name': user_name, 'number': number}
# self.db.add_user(user_data)
self.tweets.append("Hi @{0}. I have a number for you: {1}".format(
user_name, number))
def __process_search(self):
pass
def __send_tweets(self):
for tweet in self.tweets:
print(tweet)
if __name__ == '__main__':
print("Start RandBot")
bot = RandBot()
bot.run()
|
Add functions to process tweets
|
Add functions to process tweets
|
Python
|
mit
|
iamantony/randbot
|
__author__ = 'Antony Cherepanov'
import tweepy
from src import dbhandler
from src import generator
class RandBot(object):
def __init__(self):
self.db = dbhandler.DBHandler()
self.auth = tweepy.OAuthHandler(*(self.db.get_consumer_data()))
self.auth.set_access_token(*(self.db.get_access_token_data()))
self.api = tweepy.API(self.auth)
def run(self):
public_tweets = self.api.home_timeline()
for tweet in public_tweets:
print(tweet.text)
if __name__ == '__main__':
print("Start RandBot")
bot = RandBot()
bot.run()
Add functions to process tweets
|
__author__ = 'Antony Cherepanov'
import tweepy
from src import dbhandler
from src import generator
class RandBot(object):
def __init__(self):
self.tweets = list()
self.db = dbhandler.DBHandler()
self.auth = tweepy.OAuthHandler(*(self.db.get_consumer_data()))
self.auth.set_access_token(*(self.db.get_access_token_data()))
self.api = tweepy.API(self.auth)
def run(self):
self.__process_last_mentions()
self.__process_search()
self.__send_tweets()
def __process_last_mentions(self):
mentions = list()
msg_id = self.db.get_last_msg_id()
if msg_id is None:
mentions = self.api.mentions_timeline(count=10)
else:
mentions = self.api.mentions_timeline(since_id=msg_id, count=10)
for tweet in mentions:
print(tweet.text)
user_data = self.db.get_user_data(tweet.author.id_str)
if user_data is None:
self.__process_new_user(tweet)
else:
self.tweets.append("Your number, @{0}, is {1}".format(
user_data['name']), user_data['number'])
def __process_new_user(self, tweet):
if tweet is None:
print("Invalid tweet - it is empty!")
return
gen = generator.Generator()
number = gen.generate(tweet)
if number is None:
return
# user_id = tweet.author.id_str
user_name = tweet.author.screen_name
# user_data = {'user_id': user_id, 'name': user_name, 'number': number}
# self.db.add_user(user_data)
self.tweets.append("Hi @{0}. I have a number for you: {1}".format(
user_name, number))
def __process_search(self):
pass
def __send_tweets(self):
for tweet in self.tweets:
print(tweet)
if __name__ == '__main__':
print("Start RandBot")
bot = RandBot()
bot.run()
|
<commit_before>__author__ = 'Antony Cherepanov'
import tweepy
from src import dbhandler
from src import generator
class RandBot(object):
def __init__(self):
self.db = dbhandler.DBHandler()
self.auth = tweepy.OAuthHandler(*(self.db.get_consumer_data()))
self.auth.set_access_token(*(self.db.get_access_token_data()))
self.api = tweepy.API(self.auth)
def run(self):
public_tweets = self.api.home_timeline()
for tweet in public_tweets:
print(tweet.text)
if __name__ == '__main__':
print("Start RandBot")
bot = RandBot()
bot.run()
<commit_msg>Add functions to process tweets<commit_after>
|
__author__ = 'Antony Cherepanov'
import tweepy
from src import dbhandler
from src import generator
class RandBot(object):
def __init__(self):
self.tweets = list()
self.db = dbhandler.DBHandler()
self.auth = tweepy.OAuthHandler(*(self.db.get_consumer_data()))
self.auth.set_access_token(*(self.db.get_access_token_data()))
self.api = tweepy.API(self.auth)
def run(self):
self.__process_last_mentions()
self.__process_search()
self.__send_tweets()
def __process_last_mentions(self):
mentions = list()
msg_id = self.db.get_last_msg_id()
if msg_id is None:
mentions = self.api.mentions_timeline(count=10)
else:
mentions = self.api.mentions_timeline(since_id=msg_id, count=10)
for tweet in mentions:
print(tweet.text)
user_data = self.db.get_user_data(tweet.author.id_str)
if user_data is None:
self.__process_new_user(tweet)
else:
self.tweets.append("Your number, @{0}, is {1}".format(
user_data['name']), user_data['number'])
def __process_new_user(self, tweet):
if tweet is None:
print("Invalid tweet - it is empty!")
return
gen = generator.Generator()
number = gen.generate(tweet)
if number is None:
return
# user_id = tweet.author.id_str
user_name = tweet.author.screen_name
# user_data = {'user_id': user_id, 'name': user_name, 'number': number}
# self.db.add_user(user_data)
self.tweets.append("Hi @{0}. I have a number for you: {1}".format(
user_name, number))
def __process_search(self):
pass
def __send_tweets(self):
for tweet in self.tweets:
print(tweet)
if __name__ == '__main__':
print("Start RandBot")
bot = RandBot()
bot.run()
|
__author__ = 'Antony Cherepanov'
import tweepy
from src import dbhandler
from src import generator
class RandBot(object):
def __init__(self):
self.db = dbhandler.DBHandler()
self.auth = tweepy.OAuthHandler(*(self.db.get_consumer_data()))
self.auth.set_access_token(*(self.db.get_access_token_data()))
self.api = tweepy.API(self.auth)
def run(self):
public_tweets = self.api.home_timeline()
for tweet in public_tweets:
print(tweet.text)
if __name__ == '__main__':
print("Start RandBot")
bot = RandBot()
bot.run()
Add functions to process tweets__author__ = 'Antony Cherepanov'
import tweepy
from src import dbhandler
from src import generator
class RandBot(object):
def __init__(self):
self.tweets = list()
self.db = dbhandler.DBHandler()
self.auth = tweepy.OAuthHandler(*(self.db.get_consumer_data()))
self.auth.set_access_token(*(self.db.get_access_token_data()))
self.api = tweepy.API(self.auth)
def run(self):
self.__process_last_mentions()
self.__process_search()
self.__send_tweets()
def __process_last_mentions(self):
mentions = list()
msg_id = self.db.get_last_msg_id()
if msg_id is None:
mentions = self.api.mentions_timeline(count=10)
else:
mentions = self.api.mentions_timeline(since_id=msg_id, count=10)
for tweet in mentions:
print(tweet.text)
user_data = self.db.get_user_data(tweet.author.id_str)
if user_data is None:
self.__process_new_user(tweet)
else:
self.tweets.append("Your number, @{0}, is {1}".format(
user_data['name']), user_data['number'])
def __process_new_user(self, tweet):
if tweet is None:
print("Invalid tweet - it is empty!")
return
gen = generator.Generator()
number = gen.generate(tweet)
if number is None:
return
# user_id = tweet.author.id_str
user_name = tweet.author.screen_name
# user_data = {'user_id': user_id, 'name': user_name, 'number': number}
# self.db.add_user(user_data)
self.tweets.append("Hi @{0}. I have a number for you: {1}".format(
user_name, number))
def __process_search(self):
pass
def __send_tweets(self):
for tweet in self.tweets:
print(tweet)
if __name__ == '__main__':
print("Start RandBot")
bot = RandBot()
bot.run()
|
<commit_before>__author__ = 'Antony Cherepanov'
import tweepy
from src import dbhandler
from src import generator
class RandBot(object):
def __init__(self):
self.db = dbhandler.DBHandler()
self.auth = tweepy.OAuthHandler(*(self.db.get_consumer_data()))
self.auth.set_access_token(*(self.db.get_access_token_data()))
self.api = tweepy.API(self.auth)
def run(self):
public_tweets = self.api.home_timeline()
for tweet in public_tweets:
print(tweet.text)
if __name__ == '__main__':
print("Start RandBot")
bot = RandBot()
bot.run()
<commit_msg>Add functions to process tweets<commit_after>__author__ = 'Antony Cherepanov'
import tweepy
from src import dbhandler
from src import generator
class RandBot(object):
def __init__(self):
self.tweets = list()
self.db = dbhandler.DBHandler()
self.auth = tweepy.OAuthHandler(*(self.db.get_consumer_data()))
self.auth.set_access_token(*(self.db.get_access_token_data()))
self.api = tweepy.API(self.auth)
def run(self):
self.__process_last_mentions()
self.__process_search()
self.__send_tweets()
def __process_last_mentions(self):
mentions = list()
msg_id = self.db.get_last_msg_id()
if msg_id is None:
mentions = self.api.mentions_timeline(count=10)
else:
mentions = self.api.mentions_timeline(since_id=msg_id, count=10)
for tweet in mentions:
print(tweet.text)
user_data = self.db.get_user_data(tweet.author.id_str)
if user_data is None:
self.__process_new_user(tweet)
else:
self.tweets.append("Your number, @{0}, is {1}".format(
user_data['name']), user_data['number'])
def __process_new_user(self, tweet):
if tweet is None:
print("Invalid tweet - it is empty!")
return
gen = generator.Generator()
number = gen.generate(tweet)
if number is None:
return
# user_id = tweet.author.id_str
user_name = tweet.author.screen_name
# user_data = {'user_id': user_id, 'name': user_name, 'number': number}
# self.db.add_user(user_data)
self.tweets.append("Hi @{0}. I have a number for you: {1}".format(
user_name, number))
def __process_search(self):
pass
def __send_tweets(self):
for tweet in self.tweets:
print(tweet)
if __name__ == '__main__':
print("Start RandBot")
bot = RandBot()
bot.run()
|
28d8e67420b64a126db2c14e5532323b0782575b
|
linter.py
|
linter.py
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Ben Edwards
# Copyright (c) 2015 Ben Edwards
#
# License: MIT
#
"""This module exports the PugLint plugin class."""
from SublimeLinter.lint import NodeLinter, util, highlight
class PugLint(NodeLinter):
"""Provides an interface to pug-lint."""
npm_name = 'pug-lint'
cmd = 'pug-lint @ *'
executable = None
version_args = '--version'
version_re = r'(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 2.1.1'
regex = r'^.+?:(?P<line>\d+)(:(?P<col>\d+) | )(?P<message>.+)'
multiline = False
tempfile_suffix = 'pug'
error_stream = util.STREAM_BOTH
config_file = ('--config', '.pug-lintrc', '.pug-lint.json', '.jade-lintrc', '.jade-lint.json', '~')
defaults = {
'selector': 'text.pug, source.pypug, text.jade',
'--reporter=': 'inline'
}
default_type = highlight.WARNING
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Ben Edwards
# Copyright (c) 2015 Ben Edwards
#
# License: MIT
#
"""This module exports the PugLint plugin class."""
from SublimeLinter.lint import NodeLinter, util, highlight
class PugLint(NodeLinter):
"""Provides an interface to pug-lint."""
cmd = 'pug-lint @ *'
regex = r'^.+?:(?P<line>\d+)(:(?P<col>\d+) | )(?P<message>.+)'
multiline = False
tempfile_suffix = 'pug'
error_stream = util.STREAM_BOTH
defaults = {
'selector': 'text.pug, source.pypug, text.jade',
'--reporter=': 'inline'
}
default_type = highlight.WARNING
|
Remove lines that cause error in Sublime Text console
|
Remove lines that cause error in Sublime Text console
Logs:
puglint: Defining 'cls.version_args' has no effect. Please cleanup and remove this setting.
puglint: Defining 'cls.version_re' has no effect. Please cleanup and remove this setting.
puglint: Defining 'cls.version_requirement' has no effect. Please cleanup and remove this setting.
puglint: Defining 'cls.npm_name' has no effect. Please cleanup and remove this setting.
puglint: Defining 'cls.executable' has no effect. Please cleanup and remove this setting.
puglint: Defining 'cls.config_file' has no effect. Please cleanup and remove this setting.
|
Python
|
mit
|
benedfit/SublimeLinter-contrib-jade-lint,benedfit/SublimeLinter-contrib-pug-lint
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Ben Edwards
# Copyright (c) 2015 Ben Edwards
#
# License: MIT
#
"""This module exports the PugLint plugin class."""
from SublimeLinter.lint import NodeLinter, util, highlight
class PugLint(NodeLinter):
"""Provides an interface to pug-lint."""
npm_name = 'pug-lint'
cmd = 'pug-lint @ *'
executable = None
version_args = '--version'
version_re = r'(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 2.1.1'
regex = r'^.+?:(?P<line>\d+)(:(?P<col>\d+) | )(?P<message>.+)'
multiline = False
tempfile_suffix = 'pug'
error_stream = util.STREAM_BOTH
config_file = ('--config', '.pug-lintrc', '.pug-lint.json', '.jade-lintrc', '.jade-lint.json', '~')
defaults = {
'selector': 'text.pug, source.pypug, text.jade',
'--reporter=': 'inline'
}
default_type = highlight.WARNING
Remove lines that cause error in Sublime Text console
Logs:
puglint: Defining 'cls.version_args' has no effect. Please cleanup and remove this setting.
puglint: Defining 'cls.version_re' has no effect. Please cleanup and remove this setting.
puglint: Defining 'cls.version_requirement' has no effect. Please cleanup and remove this setting.
puglint: Defining 'cls.npm_name' has no effect. Please cleanup and remove this setting.
puglint: Defining 'cls.executable' has no effect. Please cleanup and remove this setting.
puglint: Defining 'cls.config_file' has no effect. Please cleanup and remove this setting.
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Ben Edwards
# Copyright (c) 2015 Ben Edwards
#
# License: MIT
#
"""This module exports the PugLint plugin class."""
from SublimeLinter.lint import NodeLinter, util, highlight
class PugLint(NodeLinter):
"""Provides an interface to pug-lint."""
cmd = 'pug-lint @ *'
regex = r'^.+?:(?P<line>\d+)(:(?P<col>\d+) | )(?P<message>.+)'
multiline = False
tempfile_suffix = 'pug'
error_stream = util.STREAM_BOTH
defaults = {
'selector': 'text.pug, source.pypug, text.jade',
'--reporter=': 'inline'
}
default_type = highlight.WARNING
|
<commit_before>#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Ben Edwards
# Copyright (c) 2015 Ben Edwards
#
# License: MIT
#
"""This module exports the PugLint plugin class."""
from SublimeLinter.lint import NodeLinter, util, highlight
class PugLint(NodeLinter):
"""Provides an interface to pug-lint."""
npm_name = 'pug-lint'
cmd = 'pug-lint @ *'
executable = None
version_args = '--version'
version_re = r'(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 2.1.1'
regex = r'^.+?:(?P<line>\d+)(:(?P<col>\d+) | )(?P<message>.+)'
multiline = False
tempfile_suffix = 'pug'
error_stream = util.STREAM_BOTH
config_file = ('--config', '.pug-lintrc', '.pug-lint.json', '.jade-lintrc', '.jade-lint.json', '~')
defaults = {
'selector': 'text.pug, source.pypug, text.jade',
'--reporter=': 'inline'
}
default_type = highlight.WARNING
<commit_msg>Remove lines that cause error in Sublime Text console
Logs:
puglint: Defining 'cls.version_args' has no effect. Please cleanup and remove this setting.
puglint: Defining 'cls.version_re' has no effect. Please cleanup and remove this setting.
puglint: Defining 'cls.version_requirement' has no effect. Please cleanup and remove this setting.
puglint: Defining 'cls.npm_name' has no effect. Please cleanup and remove this setting.
puglint: Defining 'cls.executable' has no effect. Please cleanup and remove this setting.
puglint: Defining 'cls.config_file' has no effect. Please cleanup and remove this setting.<commit_after>
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Ben Edwards
# Copyright (c) 2015 Ben Edwards
#
# License: MIT
#
"""This module exports the PugLint plugin class."""
from SublimeLinter.lint import NodeLinter, util, highlight
class PugLint(NodeLinter):
"""Provides an interface to pug-lint."""
cmd = 'pug-lint @ *'
regex = r'^.+?:(?P<line>\d+)(:(?P<col>\d+) | )(?P<message>.+)'
multiline = False
tempfile_suffix = 'pug'
error_stream = util.STREAM_BOTH
defaults = {
'selector': 'text.pug, source.pypug, text.jade',
'--reporter=': 'inline'
}
default_type = highlight.WARNING
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Ben Edwards
# Copyright (c) 2015 Ben Edwards
#
# License: MIT
#
"""This module exports the PugLint plugin class."""
from SublimeLinter.lint import NodeLinter, util, highlight
class PugLint(NodeLinter):
"""Provides an interface to pug-lint."""
npm_name = 'pug-lint'
cmd = 'pug-lint @ *'
executable = None
version_args = '--version'
version_re = r'(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 2.1.1'
regex = r'^.+?:(?P<line>\d+)(:(?P<col>\d+) | )(?P<message>.+)'
multiline = False
tempfile_suffix = 'pug'
error_stream = util.STREAM_BOTH
config_file = ('--config', '.pug-lintrc', '.pug-lint.json', '.jade-lintrc', '.jade-lint.json', '~')
defaults = {
'selector': 'text.pug, source.pypug, text.jade',
'--reporter=': 'inline'
}
default_type = highlight.WARNING
Remove lines that cause error in Sublime Text console
Logs:
puglint: Defining 'cls.version_args' has no effect. Please cleanup and remove this setting.
puglint: Defining 'cls.version_re' has no effect. Please cleanup and remove this setting.
puglint: Defining 'cls.version_requirement' has no effect. Please cleanup and remove this setting.
puglint: Defining 'cls.npm_name' has no effect. Please cleanup and remove this setting.
puglint: Defining 'cls.executable' has no effect. Please cleanup and remove this setting.
puglint: Defining 'cls.config_file' has no effect. Please cleanup and remove this setting.#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Ben Edwards
# Copyright (c) 2015 Ben Edwards
#
# License: MIT
#
"""This module exports the PugLint plugin class."""
from SublimeLinter.lint import NodeLinter, util, highlight
class PugLint(NodeLinter):
"""Provides an interface to pug-lint."""
cmd = 'pug-lint @ *'
regex = r'^.+?:(?P<line>\d+)(:(?P<col>\d+) | )(?P<message>.+)'
multiline = False
tempfile_suffix = 'pug'
error_stream = util.STREAM_BOTH
defaults = {
'selector': 'text.pug, source.pypug, text.jade',
'--reporter=': 'inline'
}
default_type = highlight.WARNING
|
<commit_before>#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Ben Edwards
# Copyright (c) 2015 Ben Edwards
#
# License: MIT
#
"""This module exports the PugLint plugin class."""
from SublimeLinter.lint import NodeLinter, util, highlight
class PugLint(NodeLinter):
"""Provides an interface to pug-lint."""
npm_name = 'pug-lint'
cmd = 'pug-lint @ *'
executable = None
version_args = '--version'
version_re = r'(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 2.1.1'
regex = r'^.+?:(?P<line>\d+)(:(?P<col>\d+) | )(?P<message>.+)'
multiline = False
tempfile_suffix = 'pug'
error_stream = util.STREAM_BOTH
config_file = ('--config', '.pug-lintrc', '.pug-lint.json', '.jade-lintrc', '.jade-lint.json', '~')
defaults = {
'selector': 'text.pug, source.pypug, text.jade',
'--reporter=': 'inline'
}
default_type = highlight.WARNING
<commit_msg>Remove lines that cause error in Sublime Text console
Logs:
puglint: Defining 'cls.version_args' has no effect. Please cleanup and remove this setting.
puglint: Defining 'cls.version_re' has no effect. Please cleanup and remove this setting.
puglint: Defining 'cls.version_requirement' has no effect. Please cleanup and remove this setting.
puglint: Defining 'cls.npm_name' has no effect. Please cleanup and remove this setting.
puglint: Defining 'cls.executable' has no effect. Please cleanup and remove this setting.
puglint: Defining 'cls.config_file' has no effect. Please cleanup and remove this setting.<commit_after>#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Ben Edwards
# Copyright (c) 2015 Ben Edwards
#
# License: MIT
#
"""This module exports the PugLint plugin class."""
from SublimeLinter.lint import NodeLinter, util, highlight
class PugLint(NodeLinter):
"""Provides an interface to pug-lint."""
cmd = 'pug-lint @ *'
regex = r'^.+?:(?P<line>\d+)(:(?P<col>\d+) | )(?P<message>.+)'
multiline = False
tempfile_suffix = 'pug'
error_stream = util.STREAM_BOTH
defaults = {
'selector': 'text.pug, source.pypug, text.jade',
'--reporter=': 'inline'
}
default_type = highlight.WARNING
|
805c6097b3dc7e7e2468235a9c28d159cb99f187
|
satchless/cart/__init__.py
|
satchless/cart/__init__.py
|
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from .handler import AddToCartHandler
add_to_cart_handler = AddToCartHandler('cart')
if not getattr(settings, 'SATCHLESS_DEFAULT_CURRENCY', None):
raise ImproperlyConfigured('You need to configure '
'SATCHLESS_DEFAULT_CURRENCY')
|
class InvalidQuantityException(Exception):
def __init__(self, reason, quantity_delta):
self.reason = reason
self.quantity_delta = quantity_delta
def __str__(self):
return self.reason
|
Add cart quantity exception, remove old handler
|
Add cart quantity exception, remove old handler
|
Python
|
bsd-3-clause
|
taedori81/satchless
|
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from .handler import AddToCartHandler
add_to_cart_handler = AddToCartHandler('cart')
if not getattr(settings, 'SATCHLESS_DEFAULT_CURRENCY', None):
raise ImproperlyConfigured('You need to configure '
'SATCHLESS_DEFAULT_CURRENCY')Add cart quantity exception, remove old handler
|
class InvalidQuantityException(Exception):
def __init__(self, reason, quantity_delta):
self.reason = reason
self.quantity_delta = quantity_delta
def __str__(self):
return self.reason
|
<commit_before>from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from .handler import AddToCartHandler
add_to_cart_handler = AddToCartHandler('cart')
if not getattr(settings, 'SATCHLESS_DEFAULT_CURRENCY', None):
raise ImproperlyConfigured('You need to configure '
'SATCHLESS_DEFAULT_CURRENCY')<commit_msg>Add cart quantity exception, remove old handler<commit_after>
|
class InvalidQuantityException(Exception):
def __init__(self, reason, quantity_delta):
self.reason = reason
self.quantity_delta = quantity_delta
def __str__(self):
return self.reason
|
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from .handler import AddToCartHandler
add_to_cart_handler = AddToCartHandler('cart')
if not getattr(settings, 'SATCHLESS_DEFAULT_CURRENCY', None):
raise ImproperlyConfigured('You need to configure '
'SATCHLESS_DEFAULT_CURRENCY')Add cart quantity exception, remove old handlerclass InvalidQuantityException(Exception):
def __init__(self, reason, quantity_delta):
self.reason = reason
self.quantity_delta = quantity_delta
def __str__(self):
return self.reason
|
<commit_before>from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from .handler import AddToCartHandler
add_to_cart_handler = AddToCartHandler('cart')
if not getattr(settings, 'SATCHLESS_DEFAULT_CURRENCY', None):
raise ImproperlyConfigured('You need to configure '
'SATCHLESS_DEFAULT_CURRENCY')<commit_msg>Add cart quantity exception, remove old handler<commit_after>class InvalidQuantityException(Exception):
def __init__(self, reason, quantity_delta):
self.reason = reason
self.quantity_delta = quantity_delta
def __str__(self):
return self.reason
|
0829ca1bb133841efd9ff1753384b0895c1be924
|
nightbus/utils.py
|
nightbus/utils.py
|
# Copyright 2017 Codethink Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''Utility functions.'''
import itertools
def ensure_list(string_or_list_or_none, separator=None):
if isinstance(string_or_list_or_none, str):
if separator:
return string_or_list_or_none.split(separator)
else:
return [string_or_list_or_none]
else:
if separator:
return list(itertools.chain.from_iterable(
item.split(separator) for item in string_or_list_or_none))
else:
return string_or_list_or_none or []
|
# Copyright 2017 Codethink Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''Utility functions.'''
import itertools
def ensure_list(string_or_list_or_none, separator=None):
if isinstance(string_or_list_or_none, str):
if separator:
return string_or_list_or_none.split(separator)
else:
return [string_or_list_or_none]
elif string_or_list_or_none is not None:
if separator:
return list(itertools.chain.from_iterable(
item.split(separator) for item in string_or_list_or_none))
else:
return string_or_list_or_none
else:
return []
|
Fix crash when no commandline args are passed
|
Fix crash when no commandline args are passed
Regression from 3513da19363b8b4564f199b469d13486996ede57
|
Python
|
apache-2.0
|
ssssam/nightbus,ssssam/nightbus
|
# Copyright 2017 Codethink Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''Utility functions.'''
import itertools
def ensure_list(string_or_list_or_none, separator=None):
if isinstance(string_or_list_or_none, str):
if separator:
return string_or_list_or_none.split(separator)
else:
return [string_or_list_or_none]
else:
if separator:
return list(itertools.chain.from_iterable(
item.split(separator) for item in string_or_list_or_none))
else:
return string_or_list_or_none or []
Fix crash when no commandline args are passed
Regression from 3513da19363b8b4564f199b469d13486996ede57
|
# Copyright 2017 Codethink Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''Utility functions.'''
import itertools
def ensure_list(string_or_list_or_none, separator=None):
if isinstance(string_or_list_or_none, str):
if separator:
return string_or_list_or_none.split(separator)
else:
return [string_or_list_or_none]
elif string_or_list_or_none is not None:
if separator:
return list(itertools.chain.from_iterable(
item.split(separator) for item in string_or_list_or_none))
else:
return string_or_list_or_none
else:
return []
|
<commit_before># Copyright 2017 Codethink Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''Utility functions.'''
import itertools
def ensure_list(string_or_list_or_none, separator=None):
if isinstance(string_or_list_or_none, str):
if separator:
return string_or_list_or_none.split(separator)
else:
return [string_or_list_or_none]
else:
if separator:
return list(itertools.chain.from_iterable(
item.split(separator) for item in string_or_list_or_none))
else:
return string_or_list_or_none or []
<commit_msg>Fix crash when no commandline args are passed
Regression from 3513da19363b8b4564f199b469d13486996ede57<commit_after>
|
# Copyright 2017 Codethink Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''Utility functions.'''
import itertools
def ensure_list(string_or_list_or_none, separator=None):
if isinstance(string_or_list_or_none, str):
if separator:
return string_or_list_or_none.split(separator)
else:
return [string_or_list_or_none]
elif string_or_list_or_none is not None:
if separator:
return list(itertools.chain.from_iterable(
item.split(separator) for item in string_or_list_or_none))
else:
return string_or_list_or_none
else:
return []
|
# Copyright 2017 Codethink Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''Utility functions.'''
import itertools
def ensure_list(string_or_list_or_none, separator=None):
if isinstance(string_or_list_or_none, str):
if separator:
return string_or_list_or_none.split(separator)
else:
return [string_or_list_or_none]
else:
if separator:
return list(itertools.chain.from_iterable(
item.split(separator) for item in string_or_list_or_none))
else:
return string_or_list_or_none or []
Fix crash when no commandline args are passed
Regression from 3513da19363b8b4564f199b469d13486996ede57# Copyright 2017 Codethink Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''Utility functions.'''
import itertools
def ensure_list(string_or_list_or_none, separator=None):
if isinstance(string_or_list_or_none, str):
if separator:
return string_or_list_or_none.split(separator)
else:
return [string_or_list_or_none]
elif string_or_list_or_none is not None:
if separator:
return list(itertools.chain.from_iterable(
item.split(separator) for item in string_or_list_or_none))
else:
return string_or_list_or_none
else:
return []
|
<commit_before># Copyright 2017 Codethink Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''Utility functions.'''
import itertools
def ensure_list(string_or_list_or_none, separator=None):
if isinstance(string_or_list_or_none, str):
if separator:
return string_or_list_or_none.split(separator)
else:
return [string_or_list_or_none]
else:
if separator:
return list(itertools.chain.from_iterable(
item.split(separator) for item in string_or_list_or_none))
else:
return string_or_list_or_none or []
<commit_msg>Fix crash when no commandline args are passed
Regression from 3513da19363b8b4564f199b469d13486996ede57<commit_after># Copyright 2017 Codethink Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''Utility functions.'''
import itertools
def ensure_list(string_or_list_or_none, separator=None):
if isinstance(string_or_list_or_none, str):
if separator:
return string_or_list_or_none.split(separator)
else:
return [string_or_list_or_none]
elif string_or_list_or_none is not None:
if separator:
return list(itertools.chain.from_iterable(
item.split(separator) for item in string_or_list_or_none))
else:
return string_or_list_or_none
else:
return []
|
06ab217f49e00bd52c9f8b632db904e1ebe7256d
|
pycroft/helpers/date.py
|
pycroft/helpers/date.py
|
from datetime import timedelta
from datetime import date
def diff_month(d1: date, d2: date) -> int:
"""Calculate the difference in months ignoring the days
If d1 > d2, the result is positive.
"""
return (d1.year - d2.year) * 12 + d1.month - d2.month
def last_day_of_month(d):
next_month = d.replace(day=28) + timedelta(4)
return d.replace(day=(next_month - timedelta(days=next_month.day)).day)
|
from calendar import monthrange
from datetime import date
def diff_month(d1: date, d2: date) -> int:
"""Calculate the difference in months ignoring the days
If d1 > d2, the result is positive.
"""
return (d1.year - d2.year) * 12 + d1.month - d2.month
def last_day_of_month(d: date) -> date:
_, num_days = monthrange(d.year, d.month)
return d.replace(day=num_days)
|
Use builtin function to find last month
|
Use builtin function to find last month
|
Python
|
apache-2.0
|
agdsn/pycroft,lukasjuhrich/pycroft,agdsn/pycroft,agdsn/pycroft,lukasjuhrich/pycroft,lukasjuhrich/pycroft,lukasjuhrich/pycroft,agdsn/pycroft,agdsn/pycroft
|
from datetime import timedelta
from datetime import date
def diff_month(d1: date, d2: date) -> int:
"""Calculate the difference in months ignoring the days
If d1 > d2, the result is positive.
"""
return (d1.year - d2.year) * 12 + d1.month - d2.month
def last_day_of_month(d):
next_month = d.replace(day=28) + timedelta(4)
return d.replace(day=(next_month - timedelta(days=next_month.day)).day)
Use builtin function to find last month
|
from calendar import monthrange
from datetime import date
def diff_month(d1: date, d2: date) -> int:
"""Calculate the difference in months ignoring the days
If d1 > d2, the result is positive.
"""
return (d1.year - d2.year) * 12 + d1.month - d2.month
def last_day_of_month(d: date) -> date:
_, num_days = monthrange(d.year, d.month)
return d.replace(day=num_days)
|
<commit_before>from datetime import timedelta
from datetime import date
def diff_month(d1: date, d2: date) -> int:
"""Calculate the difference in months ignoring the days
If d1 > d2, the result is positive.
"""
return (d1.year - d2.year) * 12 + d1.month - d2.month
def last_day_of_month(d):
next_month = d.replace(day=28) + timedelta(4)
return d.replace(day=(next_month - timedelta(days=next_month.day)).day)
<commit_msg>Use builtin function to find last month<commit_after>
|
from calendar import monthrange
from datetime import date
def diff_month(d1: date, d2: date) -> int:
"""Calculate the difference in months ignoring the days
If d1 > d2, the result is positive.
"""
return (d1.year - d2.year) * 12 + d1.month - d2.month
def last_day_of_month(d: date) -> date:
_, num_days = monthrange(d.year, d.month)
return d.replace(day=num_days)
|
from datetime import timedelta
from datetime import date
def diff_month(d1: date, d2: date) -> int:
"""Calculate the difference in months ignoring the days
If d1 > d2, the result is positive.
"""
return (d1.year - d2.year) * 12 + d1.month - d2.month
def last_day_of_month(d):
next_month = d.replace(day=28) + timedelta(4)
return d.replace(day=(next_month - timedelta(days=next_month.day)).day)
Use builtin function to find last monthfrom calendar import monthrange
from datetime import date
def diff_month(d1: date, d2: date) -> int:
"""Calculate the difference in months ignoring the days
If d1 > d2, the result is positive.
"""
return (d1.year - d2.year) * 12 + d1.month - d2.month
def last_day_of_month(d: date) -> date:
_, num_days = monthrange(d.year, d.month)
return d.replace(day=num_days)
|
<commit_before>from datetime import timedelta
from datetime import date
def diff_month(d1: date, d2: date) -> int:
"""Calculate the difference in months ignoring the days
If d1 > d2, the result is positive.
"""
return (d1.year - d2.year) * 12 + d1.month - d2.month
def last_day_of_month(d):
next_month = d.replace(day=28) + timedelta(4)
return d.replace(day=(next_month - timedelta(days=next_month.day)).day)
<commit_msg>Use builtin function to find last month<commit_after>from calendar import monthrange
from datetime import date
def diff_month(d1: date, d2: date) -> int:
"""Calculate the difference in months ignoring the days
If d1 > d2, the result is positive.
"""
return (d1.year - d2.year) * 12 + d1.month - d2.month
def last_day_of_month(d: date) -> date:
_, num_days = monthrange(d.year, d.month)
return d.replace(day=num_days)
|
b47b7bfe3a193c57b69ff4d85069c08d00c0dd35
|
models.py
|
models.py
|
from django import dispatch
from django.db import transaction
from django.db.models import signals as model_signals
from . import client
from .pool import pool
from resolwe.flow import models as flow_models, serializers as flow_serializers, views as flow_views
# Register all viewsets with the query observer pool.
# TODO: This should be moved to a separate application.
pool.register_viewset(flow_views.ProjectViewSet)
pool.register_viewset(flow_views.DataViewSet)
# Setup model notifications.
observer_client = client.QueryObserverClient()
@dispatch.receiver(model_signals.post_save)
def model_post_save(sender, instance, created=False, **kwargs):
"""
Signal emitted after any model is saved via Django ORM.
:param sender: Model class that was saved
:param instance: The actual instance that was saved
:param created: True if a new row was created
"""
def notify():
table = sender._meta.db_table
if created:
observer_client.notify_table_insert(table)
else:
observer_client.notify_table_update(table)
transaction.on_commit(notify)
@dispatch.receiver(model_signals.post_delete)
def model_post_delete(sender, instance, **kwargs):
"""
Signal emitted after any model is deleted via Django ORM.
:param sender: Model class that was deleted
:param instance: The actual instance that was removed
"""
def notify():
table = sender._meta.db_table
observer_client.notify_table_remove(table)
transaction.on_commit(notify)
|
from django import dispatch
from django.db import transaction
from django.db.models import signals as model_signals
from . import client
from .pool import pool
from resolwe.flow import models as flow_models, serializers as flow_serializers, views as flow_views
# Register all viewsets with the query observer pool.
# TODO: This should be moved to a separate application.
pool.register_viewset(flow_views.CollectionViewSet)
pool.register_viewset(flow_views.DataViewSet)
# Setup model notifications.
observer_client = client.QueryObserverClient()
@dispatch.receiver(model_signals.post_save)
def model_post_save(sender, instance, created=False, **kwargs):
"""
Signal emitted after any model is saved via Django ORM.
:param sender: Model class that was saved
:param instance: The actual instance that was saved
:param created: True if a new row was created
"""
def notify():
table = sender._meta.db_table
if created:
observer_client.notify_table_insert(table)
else:
observer_client.notify_table_update(table)
transaction.on_commit(notify)
@dispatch.receiver(model_signals.post_delete)
def model_post_delete(sender, instance, **kwargs):
"""
Signal emitted after any model is deleted via Django ORM.
:param sender: Model class that was deleted
:param instance: The actual instance that was removed
"""
def notify():
table = sender._meta.db_table
observer_client.notify_table_remove(table)
transaction.on_commit(notify)
|
Use collection instead of project
|
Use collection instead of project
|
Python
|
apache-2.0
|
genialis/django-rest-framework-reactive,genialis/django-rest-framework-reactive
|
from django import dispatch
from django.db import transaction
from django.db.models import signals as model_signals
from . import client
from .pool import pool
from resolwe.flow import models as flow_models, serializers as flow_serializers, views as flow_views
# Register all viewsets with the query observer pool.
# TODO: This should be moved to a separate application.
pool.register_viewset(flow_views.ProjectViewSet)
pool.register_viewset(flow_views.DataViewSet)
# Setup model notifications.
observer_client = client.QueryObserverClient()
@dispatch.receiver(model_signals.post_save)
def model_post_save(sender, instance, created=False, **kwargs):
"""
Signal emitted after any model is saved via Django ORM.
:param sender: Model class that was saved
:param instance: The actual instance that was saved
:param created: True if a new row was created
"""
def notify():
table = sender._meta.db_table
if created:
observer_client.notify_table_insert(table)
else:
observer_client.notify_table_update(table)
transaction.on_commit(notify)
@dispatch.receiver(model_signals.post_delete)
def model_post_delete(sender, instance, **kwargs):
"""
Signal emitted after any model is deleted via Django ORM.
:param sender: Model class that was deleted
:param instance: The actual instance that was removed
"""
def notify():
table = sender._meta.db_table
observer_client.notify_table_remove(table)
transaction.on_commit(notify)
Use collection instead of project
|
from django import dispatch
from django.db import transaction
from django.db.models import signals as model_signals
from . import client
from .pool import pool
from resolwe.flow import models as flow_models, serializers as flow_serializers, views as flow_views
# Register all viewsets with the query observer pool.
# TODO: This should be moved to a separate application.
pool.register_viewset(flow_views.CollectionViewSet)
pool.register_viewset(flow_views.DataViewSet)
# Setup model notifications.
observer_client = client.QueryObserverClient()
@dispatch.receiver(model_signals.post_save)
def model_post_save(sender, instance, created=False, **kwargs):
"""
Signal emitted after any model is saved via Django ORM.
:param sender: Model class that was saved
:param instance: The actual instance that was saved
:param created: True if a new row was created
"""
def notify():
table = sender._meta.db_table
if created:
observer_client.notify_table_insert(table)
else:
observer_client.notify_table_update(table)
transaction.on_commit(notify)
@dispatch.receiver(model_signals.post_delete)
def model_post_delete(sender, instance, **kwargs):
"""
Signal emitted after any model is deleted via Django ORM.
:param sender: Model class that was deleted
:param instance: The actual instance that was removed
"""
def notify():
table = sender._meta.db_table
observer_client.notify_table_remove(table)
transaction.on_commit(notify)
|
<commit_before>from django import dispatch
from django.db import transaction
from django.db.models import signals as model_signals
from . import client
from .pool import pool
from resolwe.flow import models as flow_models, serializers as flow_serializers, views as flow_views
# Register all viewsets with the query observer pool.
# TODO: This should be moved to a separate application.
pool.register_viewset(flow_views.ProjectViewSet)
pool.register_viewset(flow_views.DataViewSet)
# Setup model notifications.
observer_client = client.QueryObserverClient()
@dispatch.receiver(model_signals.post_save)
def model_post_save(sender, instance, created=False, **kwargs):
"""
Signal emitted after any model is saved via Django ORM.
:param sender: Model class that was saved
:param instance: The actual instance that was saved
:param created: True if a new row was created
"""
def notify():
table = sender._meta.db_table
if created:
observer_client.notify_table_insert(table)
else:
observer_client.notify_table_update(table)
transaction.on_commit(notify)
@dispatch.receiver(model_signals.post_delete)
def model_post_delete(sender, instance, **kwargs):
"""
Signal emitted after any model is deleted via Django ORM.
:param sender: Model class that was deleted
:param instance: The actual instance that was removed
"""
def notify():
table = sender._meta.db_table
observer_client.notify_table_remove(table)
transaction.on_commit(notify)
<commit_msg>Use collection instead of project<commit_after>
|
from django import dispatch
from django.db import transaction
from django.db.models import signals as model_signals
from . import client
from .pool import pool
from resolwe.flow import models as flow_models, serializers as flow_serializers, views as flow_views
# Register all viewsets with the query observer pool.
# TODO: This should be moved to a separate application.
pool.register_viewset(flow_views.CollectionViewSet)
pool.register_viewset(flow_views.DataViewSet)
# Setup model notifications.
observer_client = client.QueryObserverClient()
@dispatch.receiver(model_signals.post_save)
def model_post_save(sender, instance, created=False, **kwargs):
"""
Signal emitted after any model is saved via Django ORM.
:param sender: Model class that was saved
:param instance: The actual instance that was saved
:param created: True if a new row was created
"""
def notify():
table = sender._meta.db_table
if created:
observer_client.notify_table_insert(table)
else:
observer_client.notify_table_update(table)
transaction.on_commit(notify)
@dispatch.receiver(model_signals.post_delete)
def model_post_delete(sender, instance, **kwargs):
"""
Signal emitted after any model is deleted via Django ORM.
:param sender: Model class that was deleted
:param instance: The actual instance that was removed
"""
def notify():
table = sender._meta.db_table
observer_client.notify_table_remove(table)
transaction.on_commit(notify)
|
from django import dispatch
from django.db import transaction
from django.db.models import signals as model_signals
from . import client
from .pool import pool
from resolwe.flow import models as flow_models, serializers as flow_serializers, views as flow_views
# Register all viewsets with the query observer pool.
# TODO: This should be moved to a separate application.
pool.register_viewset(flow_views.ProjectViewSet)
pool.register_viewset(flow_views.DataViewSet)
# Setup model notifications.
observer_client = client.QueryObserverClient()
@dispatch.receiver(model_signals.post_save)
def model_post_save(sender, instance, created=False, **kwargs):
"""
Signal emitted after any model is saved via Django ORM.
:param sender: Model class that was saved
:param instance: The actual instance that was saved
:param created: True if a new row was created
"""
def notify():
table = sender._meta.db_table
if created:
observer_client.notify_table_insert(table)
else:
observer_client.notify_table_update(table)
transaction.on_commit(notify)
@dispatch.receiver(model_signals.post_delete)
def model_post_delete(sender, instance, **kwargs):
"""
Signal emitted after any model is deleted via Django ORM.
:param sender: Model class that was deleted
:param instance: The actual instance that was removed
"""
def notify():
table = sender._meta.db_table
observer_client.notify_table_remove(table)
transaction.on_commit(notify)
Use collection instead of projectfrom django import dispatch
from django.db import transaction
from django.db.models import signals as model_signals
from . import client
from .pool import pool
from resolwe.flow import models as flow_models, serializers as flow_serializers, views as flow_views
# Register all viewsets with the query observer pool.
# TODO: This should be moved to a separate application.
pool.register_viewset(flow_views.CollectionViewSet)
pool.register_viewset(flow_views.DataViewSet)
# Setup model notifications.
observer_client = client.QueryObserverClient()
@dispatch.receiver(model_signals.post_save)
def model_post_save(sender, instance, created=False, **kwargs):
"""
Signal emitted after any model is saved via Django ORM.
:param sender: Model class that was saved
:param instance: The actual instance that was saved
:param created: True if a new row was created
"""
def notify():
table = sender._meta.db_table
if created:
observer_client.notify_table_insert(table)
else:
observer_client.notify_table_update(table)
transaction.on_commit(notify)
@dispatch.receiver(model_signals.post_delete)
def model_post_delete(sender, instance, **kwargs):
"""
Signal emitted after any model is deleted via Django ORM.
:param sender: Model class that was deleted
:param instance: The actual instance that was removed
"""
def notify():
table = sender._meta.db_table
observer_client.notify_table_remove(table)
transaction.on_commit(notify)
|
<commit_before>from django import dispatch
from django.db import transaction
from django.db.models import signals as model_signals
from . import client
from .pool import pool
from resolwe.flow import models as flow_models, serializers as flow_serializers, views as flow_views
# Register all viewsets with the query observer pool.
# TODO: This should be moved to a separate application.
pool.register_viewset(flow_views.ProjectViewSet)
pool.register_viewset(flow_views.DataViewSet)
# Setup model notifications.
observer_client = client.QueryObserverClient()
@dispatch.receiver(model_signals.post_save)
def model_post_save(sender, instance, created=False, **kwargs):
"""
Signal emitted after any model is saved via Django ORM.
:param sender: Model class that was saved
:param instance: The actual instance that was saved
:param created: True if a new row was created
"""
def notify():
table = sender._meta.db_table
if created:
observer_client.notify_table_insert(table)
else:
observer_client.notify_table_update(table)
transaction.on_commit(notify)
@dispatch.receiver(model_signals.post_delete)
def model_post_delete(sender, instance, **kwargs):
"""
Signal emitted after any model is deleted via Django ORM.
:param sender: Model class that was deleted
:param instance: The actual instance that was removed
"""
def notify():
table = sender._meta.db_table
observer_client.notify_table_remove(table)
transaction.on_commit(notify)
<commit_msg>Use collection instead of project<commit_after>from django import dispatch
from django.db import transaction
from django.db.models import signals as model_signals
from . import client
from .pool import pool
from resolwe.flow import models as flow_models, serializers as flow_serializers, views as flow_views
# Register all viewsets with the query observer pool.
# TODO: This should be moved to a separate application.
pool.register_viewset(flow_views.CollectionViewSet)
pool.register_viewset(flow_views.DataViewSet)
# Setup model notifications.
observer_client = client.QueryObserverClient()
@dispatch.receiver(model_signals.post_save)
def model_post_save(sender, instance, created=False, **kwargs):
"""
Signal emitted after any model is saved via Django ORM.
:param sender: Model class that was saved
:param instance: The actual instance that was saved
:param created: True if a new row was created
"""
def notify():
table = sender._meta.db_table
if created:
observer_client.notify_table_insert(table)
else:
observer_client.notify_table_update(table)
transaction.on_commit(notify)
@dispatch.receiver(model_signals.post_delete)
def model_post_delete(sender, instance, **kwargs):
"""
Signal emitted after any model is deleted via Django ORM.
:param sender: Model class that was deleted
:param instance: The actual instance that was removed
"""
def notify():
table = sender._meta.db_table
observer_client.notify_table_remove(table)
transaction.on_commit(notify)
|
c9efa5897426b6c1f8f0e99185e8b15878a9abd2
|
gallery/urls.py
|
gallery/urls.py
|
from django.conf.urls import url
from . import views
app_name = 'gallery'
urlpatterns = [
url(r'^$', views.IndexView.as_view(), name='index'),
url(r'^(?P<slug>[-\w]+)/$', views.GalleryView.as_view(), name='gallery'),
url(r'^(?P<gallery_slug>[-\w]+)/(?P<slug>[-\w]+)/$', views.GalleryImageView.as_view(), name='gallery_image'),
]
|
from django.urls import include, path
from . import views
app_name = 'gallery'
urlpatterns = [
path('', views.IndexView.as_view(), name='index'),
path('<slug:slug>/', views.GalleryView.as_view(), name='gallery'),
path('<slug:gallery_slug>/<slug:slug>/', views.GalleryImageView.as_view(), name='gallery_image'),
]
|
Move gallery urlpatterns to Django 2.0 preferred method
|
Move gallery urlpatterns to Django 2.0 preferred method
|
Python
|
mit
|
evanepio/dotmanca,evanepio/dotmanca,evanepio/dotmanca
|
from django.conf.urls import url
from . import views
app_name = 'gallery'
urlpatterns = [
url(r'^$', views.IndexView.as_view(), name='index'),
url(r'^(?P<slug>[-\w]+)/$', views.GalleryView.as_view(), name='gallery'),
url(r'^(?P<gallery_slug>[-\w]+)/(?P<slug>[-\w]+)/$', views.GalleryImageView.as_view(), name='gallery_image'),
]
Move gallery urlpatterns to Django 2.0 preferred method
|
from django.urls import include, path
from . import views
app_name = 'gallery'
urlpatterns = [
path('', views.IndexView.as_view(), name='index'),
path('<slug:slug>/', views.GalleryView.as_view(), name='gallery'),
path('<slug:gallery_slug>/<slug:slug>/', views.GalleryImageView.as_view(), name='gallery_image'),
]
|
<commit_before>from django.conf.urls import url
from . import views
app_name = 'gallery'
urlpatterns = [
url(r'^$', views.IndexView.as_view(), name='index'),
url(r'^(?P<slug>[-\w]+)/$', views.GalleryView.as_view(), name='gallery'),
url(r'^(?P<gallery_slug>[-\w]+)/(?P<slug>[-\w]+)/$', views.GalleryImageView.as_view(), name='gallery_image'),
]
<commit_msg>Move gallery urlpatterns to Django 2.0 preferred method<commit_after>
|
from django.urls import include, path
from . import views
app_name = 'gallery'
urlpatterns = [
path('', views.IndexView.as_view(), name='index'),
path('<slug:slug>/', views.GalleryView.as_view(), name='gallery'),
path('<slug:gallery_slug>/<slug:slug>/', views.GalleryImageView.as_view(), name='gallery_image'),
]
|
from django.conf.urls import url
from . import views
app_name = 'gallery'
urlpatterns = [
url(r'^$', views.IndexView.as_view(), name='index'),
url(r'^(?P<slug>[-\w]+)/$', views.GalleryView.as_view(), name='gallery'),
url(r'^(?P<gallery_slug>[-\w]+)/(?P<slug>[-\w]+)/$', views.GalleryImageView.as_view(), name='gallery_image'),
]
Move gallery urlpatterns to Django 2.0 preferred methodfrom django.urls import include, path
from . import views
app_name = 'gallery'
urlpatterns = [
path('', views.IndexView.as_view(), name='index'),
path('<slug:slug>/', views.GalleryView.as_view(), name='gallery'),
path('<slug:gallery_slug>/<slug:slug>/', views.GalleryImageView.as_view(), name='gallery_image'),
]
|
<commit_before>from django.conf.urls import url
from . import views
app_name = 'gallery'
urlpatterns = [
url(r'^$', views.IndexView.as_view(), name='index'),
url(r'^(?P<slug>[-\w]+)/$', views.GalleryView.as_view(), name='gallery'),
url(r'^(?P<gallery_slug>[-\w]+)/(?P<slug>[-\w]+)/$', views.GalleryImageView.as_view(), name='gallery_image'),
]
<commit_msg>Move gallery urlpatterns to Django 2.0 preferred method<commit_after>from django.urls import include, path
from . import views
app_name = 'gallery'
urlpatterns = [
path('', views.IndexView.as_view(), name='index'),
path('<slug:slug>/', views.GalleryView.as_view(), name='gallery'),
path('<slug:gallery_slug>/<slug:slug>/', views.GalleryImageView.as_view(), name='gallery_image'),
]
|
7f78484fbefc0c193668fffd03b38bf8523e89f6
|
pyecore/notification.py
|
pyecore/notification.py
|
class ENotifer(object):
def notify(self, notification):
notification.notifier = notification.notifier or self
for listener in self._eternal_listener + self.listeners:
listener.notifyChanged(notification)
def enum(enumName, *listValueNames):
"""Clever implementation of an enum like in python
Shameless copy from: http://sametmax.com/faire-des-enums-en-python/
"""
listValueNumbers = range(len(listValueNames))
dictAttrib = dict(zip(listValueNames, listValueNumbers))
dictReverse = dict(zip(listValueNumbers, listValueNames))
dictAttrib["dictReverse"] = dictReverse
mainType = type(enumName, (), dictAttrib)
return mainType
Kind = enum('Kind',
'ADD',
'ADD_MANY',
'MOVE',
'REMOVE',
'REMOVE_MANY',
'SET',
'UNSET')
class Notification(object):
def __init__(self, notifier=None, kind=None, old=None, new=None,
feature=None):
self.notifier = notifier
self.kind = kind
self.old = old
self.new = new
self.feature = feature
def __repr__(self):
return ('[{0}] old={1} new={2} obj={3} #{4}'
.format(Kind.dictReverse[self.kind],
self.old,
self.new,
self.notifier,
self.feature))
class EObserver(object):
def __init__(self, notifier=None, notifyChanged=None):
if notifier:
notifier.listeners.append(self)
if notifyChanged:
self.notifyChanged = notifyChanged
def observe(self, notifier):
notifier.listeners.append(self)
def notifyChanged(self, notification):
pass
|
from enum import Enum, unique
class ENotifer(object):
def notify(self, notification):
notification.notifier = notification.notifier or self
for listener in self._eternal_listener + self.listeners:
listener.notifyChanged(notification)
@unique
class Kind(Enum):
ADD = 0
ADD_MANY = 1
MOVE = 2
REMOVE = 3
REMOVE_MANY = 4
SET = 5
UNSET = 6
class Notification(object):
def __init__(self, notifier=None, kind=None, old=None, new=None,
feature=None):
self.notifier = notifier
self.kind = kind
self.old = old
self.new = new
self.feature = feature
def __repr__(self):
return ('[{0}] old={1} new={2} obj={3} #{4}'
.format(self.kind.name,
self.old,
self.new,
self.notifier,
self.feature))
class EObserver(object):
def __init__(self, notifier=None, notifyChanged=None):
if notifier:
notifier.listeners.append(self)
if notifyChanged:
self.notifyChanged = notifyChanged
def observe(self, notifier):
notifier.listeners.append(self)
def notifyChanged(self, notification):
pass
|
Add better enumeration for Notification
|
Add better enumeration for Notification
The previous 'Kind' enumeration for Notification were using a
home-made-cooked way of dealing with enumeration. The code were got
from an article from the http://sametmax.com/ website (great website
by the way). This new version uses the python 3.4 enumeration module,
but as this module had been introduced in Python 3.4, it means that
the Python 3.3 compatibility is broken.
|
Python
|
bsd-3-clause
|
aranega/pyecore,pyecore/pyecore
|
class ENotifer(object):
def notify(self, notification):
notification.notifier = notification.notifier or self
for listener in self._eternal_listener + self.listeners:
listener.notifyChanged(notification)
def enum(enumName, *listValueNames):
"""Clever implementation of an enum like in python
Shameless copy from: http://sametmax.com/faire-des-enums-en-python/
"""
listValueNumbers = range(len(listValueNames))
dictAttrib = dict(zip(listValueNames, listValueNumbers))
dictReverse = dict(zip(listValueNumbers, listValueNames))
dictAttrib["dictReverse"] = dictReverse
mainType = type(enumName, (), dictAttrib)
return mainType
Kind = enum('Kind',
'ADD',
'ADD_MANY',
'MOVE',
'REMOVE',
'REMOVE_MANY',
'SET',
'UNSET')
class Notification(object):
def __init__(self, notifier=None, kind=None, old=None, new=None,
feature=None):
self.notifier = notifier
self.kind = kind
self.old = old
self.new = new
self.feature = feature
def __repr__(self):
return ('[{0}] old={1} new={2} obj={3} #{4}'
.format(Kind.dictReverse[self.kind],
self.old,
self.new,
self.notifier,
self.feature))
class EObserver(object):
def __init__(self, notifier=None, notifyChanged=None):
if notifier:
notifier.listeners.append(self)
if notifyChanged:
self.notifyChanged = notifyChanged
def observe(self, notifier):
notifier.listeners.append(self)
def notifyChanged(self, notification):
pass
Add better enumeration for Notification
The previous 'Kind' enumeration for Notification were using a
home-made-cooked way of dealing with enumeration. The code were got
from an article from the http://sametmax.com/ website (great website
by the way). This new version uses the python 3.4 enumeration module,
but as this module had been introduced in Python 3.4, it means that
the Python 3.3 compatibility is broken.
|
from enum import Enum, unique
class ENotifer(object):
def notify(self, notification):
notification.notifier = notification.notifier or self
for listener in self._eternal_listener + self.listeners:
listener.notifyChanged(notification)
@unique
class Kind(Enum):
ADD = 0
ADD_MANY = 1
MOVE = 2
REMOVE = 3
REMOVE_MANY = 4
SET = 5
UNSET = 6
class Notification(object):
def __init__(self, notifier=None, kind=None, old=None, new=None,
feature=None):
self.notifier = notifier
self.kind = kind
self.old = old
self.new = new
self.feature = feature
def __repr__(self):
return ('[{0}] old={1} new={2} obj={3} #{4}'
.format(self.kind.name,
self.old,
self.new,
self.notifier,
self.feature))
class EObserver(object):
def __init__(self, notifier=None, notifyChanged=None):
if notifier:
notifier.listeners.append(self)
if notifyChanged:
self.notifyChanged = notifyChanged
def observe(self, notifier):
notifier.listeners.append(self)
def notifyChanged(self, notification):
pass
|
<commit_before>
class ENotifer(object):
def notify(self, notification):
notification.notifier = notification.notifier or self
for listener in self._eternal_listener + self.listeners:
listener.notifyChanged(notification)
def enum(enumName, *listValueNames):
"""Clever implementation of an enum like in python
Shameless copy from: http://sametmax.com/faire-des-enums-en-python/
"""
listValueNumbers = range(len(listValueNames))
dictAttrib = dict(zip(listValueNames, listValueNumbers))
dictReverse = dict(zip(listValueNumbers, listValueNames))
dictAttrib["dictReverse"] = dictReverse
mainType = type(enumName, (), dictAttrib)
return mainType
Kind = enum('Kind',
'ADD',
'ADD_MANY',
'MOVE',
'REMOVE',
'REMOVE_MANY',
'SET',
'UNSET')
class Notification(object):
def __init__(self, notifier=None, kind=None, old=None, new=None,
feature=None):
self.notifier = notifier
self.kind = kind
self.old = old
self.new = new
self.feature = feature
def __repr__(self):
return ('[{0}] old={1} new={2} obj={3} #{4}'
.format(Kind.dictReverse[self.kind],
self.old,
self.new,
self.notifier,
self.feature))
class EObserver(object):
def __init__(self, notifier=None, notifyChanged=None):
if notifier:
notifier.listeners.append(self)
if notifyChanged:
self.notifyChanged = notifyChanged
def observe(self, notifier):
notifier.listeners.append(self)
def notifyChanged(self, notification):
pass
<commit_msg>Add better enumeration for Notification
The previous 'Kind' enumeration for Notification were using a
home-made-cooked way of dealing with enumeration. The code were got
from an article from the http://sametmax.com/ website (great website
by the way). This new version uses the python 3.4 enumeration module,
but as this module had been introduced in Python 3.4, it means that
the Python 3.3 compatibility is broken.<commit_after>
|
from enum import Enum, unique
class ENotifer(object):
def notify(self, notification):
notification.notifier = notification.notifier or self
for listener in self._eternal_listener + self.listeners:
listener.notifyChanged(notification)
@unique
class Kind(Enum):
ADD = 0
ADD_MANY = 1
MOVE = 2
REMOVE = 3
REMOVE_MANY = 4
SET = 5
UNSET = 6
class Notification(object):
def __init__(self, notifier=None, kind=None, old=None, new=None,
feature=None):
self.notifier = notifier
self.kind = kind
self.old = old
self.new = new
self.feature = feature
def __repr__(self):
return ('[{0}] old={1} new={2} obj={3} #{4}'
.format(self.kind.name,
self.old,
self.new,
self.notifier,
self.feature))
class EObserver(object):
def __init__(self, notifier=None, notifyChanged=None):
if notifier:
notifier.listeners.append(self)
if notifyChanged:
self.notifyChanged = notifyChanged
def observe(self, notifier):
notifier.listeners.append(self)
def notifyChanged(self, notification):
pass
|
class ENotifer(object):
def notify(self, notification):
notification.notifier = notification.notifier or self
for listener in self._eternal_listener + self.listeners:
listener.notifyChanged(notification)
def enum(enumName, *listValueNames):
"""Clever implementation of an enum like in python
Shameless copy from: http://sametmax.com/faire-des-enums-en-python/
"""
listValueNumbers = range(len(listValueNames))
dictAttrib = dict(zip(listValueNames, listValueNumbers))
dictReverse = dict(zip(listValueNumbers, listValueNames))
dictAttrib["dictReverse"] = dictReverse
mainType = type(enumName, (), dictAttrib)
return mainType
Kind = enum('Kind',
'ADD',
'ADD_MANY',
'MOVE',
'REMOVE',
'REMOVE_MANY',
'SET',
'UNSET')
class Notification(object):
def __init__(self, notifier=None, kind=None, old=None, new=None,
feature=None):
self.notifier = notifier
self.kind = kind
self.old = old
self.new = new
self.feature = feature
def __repr__(self):
return ('[{0}] old={1} new={2} obj={3} #{4}'
.format(Kind.dictReverse[self.kind],
self.old,
self.new,
self.notifier,
self.feature))
class EObserver(object):
def __init__(self, notifier=None, notifyChanged=None):
if notifier:
notifier.listeners.append(self)
if notifyChanged:
self.notifyChanged = notifyChanged
def observe(self, notifier):
notifier.listeners.append(self)
def notifyChanged(self, notification):
pass
Add better enumeration for Notification
The previous 'Kind' enumeration for Notification were using a
home-made-cooked way of dealing with enumeration. The code were got
from an article from the http://sametmax.com/ website (great website
by the way). This new version uses the python 3.4 enumeration module,
but as this module had been introduced in Python 3.4, it means that
the Python 3.3 compatibility is broken.from enum import Enum, unique
class ENotifer(object):
def notify(self, notification):
notification.notifier = notification.notifier or self
for listener in self._eternal_listener + self.listeners:
listener.notifyChanged(notification)
@unique
class Kind(Enum):
ADD = 0
ADD_MANY = 1
MOVE = 2
REMOVE = 3
REMOVE_MANY = 4
SET = 5
UNSET = 6
class Notification(object):
def __init__(self, notifier=None, kind=None, old=None, new=None,
feature=None):
self.notifier = notifier
self.kind = kind
self.old = old
self.new = new
self.feature = feature
def __repr__(self):
return ('[{0}] old={1} new={2} obj={3} #{4}'
.format(self.kind.name,
self.old,
self.new,
self.notifier,
self.feature))
class EObserver(object):
def __init__(self, notifier=None, notifyChanged=None):
if notifier:
notifier.listeners.append(self)
if notifyChanged:
self.notifyChanged = notifyChanged
def observe(self, notifier):
notifier.listeners.append(self)
def notifyChanged(self, notification):
pass
|
<commit_before>
class ENotifer(object):
def notify(self, notification):
notification.notifier = notification.notifier or self
for listener in self._eternal_listener + self.listeners:
listener.notifyChanged(notification)
def enum(enumName, *listValueNames):
"""Clever implementation of an enum like in python
Shameless copy from: http://sametmax.com/faire-des-enums-en-python/
"""
listValueNumbers = range(len(listValueNames))
dictAttrib = dict(zip(listValueNames, listValueNumbers))
dictReverse = dict(zip(listValueNumbers, listValueNames))
dictAttrib["dictReverse"] = dictReverse
mainType = type(enumName, (), dictAttrib)
return mainType
Kind = enum('Kind',
'ADD',
'ADD_MANY',
'MOVE',
'REMOVE',
'REMOVE_MANY',
'SET',
'UNSET')
class Notification(object):
def __init__(self, notifier=None, kind=None, old=None, new=None,
feature=None):
self.notifier = notifier
self.kind = kind
self.old = old
self.new = new
self.feature = feature
def __repr__(self):
return ('[{0}] old={1} new={2} obj={3} #{4}'
.format(Kind.dictReverse[self.kind],
self.old,
self.new,
self.notifier,
self.feature))
class EObserver(object):
def __init__(self, notifier=None, notifyChanged=None):
if notifier:
notifier.listeners.append(self)
if notifyChanged:
self.notifyChanged = notifyChanged
def observe(self, notifier):
notifier.listeners.append(self)
def notifyChanged(self, notification):
pass
<commit_msg>Add better enumeration for Notification
The previous 'Kind' enumeration for Notification were using a
home-made-cooked way of dealing with enumeration. The code were got
from an article from the http://sametmax.com/ website (great website
by the way). This new version uses the python 3.4 enumeration module,
but as this module had been introduced in Python 3.4, it means that
the Python 3.3 compatibility is broken.<commit_after>from enum import Enum, unique
class ENotifer(object):
def notify(self, notification):
notification.notifier = notification.notifier or self
for listener in self._eternal_listener + self.listeners:
listener.notifyChanged(notification)
@unique
class Kind(Enum):
ADD = 0
ADD_MANY = 1
MOVE = 2
REMOVE = 3
REMOVE_MANY = 4
SET = 5
UNSET = 6
class Notification(object):
def __init__(self, notifier=None, kind=None, old=None, new=None,
feature=None):
self.notifier = notifier
self.kind = kind
self.old = old
self.new = new
self.feature = feature
def __repr__(self):
return ('[{0}] old={1} new={2} obj={3} #{4}'
.format(self.kind.name,
self.old,
self.new,
self.notifier,
self.feature))
class EObserver(object):
def __init__(self, notifier=None, notifyChanged=None):
if notifier:
notifier.listeners.append(self)
if notifyChanged:
self.notifyChanged = notifyChanged
def observe(self, notifier):
notifier.listeners.append(self)
def notifyChanged(self, notification):
pass
|
006423b8975fa9e9bc3758e5c2e82002f0838ca7
|
scripts/link-python-apt.py
|
scripts/link-python-apt.py
|
"""
Make python-apt available in the Python virtual environment without using the
system site-packages support built into Travis CI because this doesn't work
for Python 3.4, 3.7 and PyPy. See the following failed build:
https://travis-ci.org/xolox/python-deb-pkg-tools/builds/581437417
"""
import os
import sys
from distutils.sysconfig import get_python_lib
src = "/usr/lib/python%s/dist-packages/apt" % ("2.7" if sys.version_info[0] == 2 else "3")
dst = os.path.join(get_python_lib(), "apt")
assert os.path.isdir(src)
os.symlink(src, dst)
|
"""
Workaround to enable python-apt on Travis CI.
Make python-apt available in the Python virtual environment without using the
system site-packages support built into Travis CI because this doesn't work
for Python 3.4, 3.7 and PyPy. See the following failed build:
https://travis-ci.org/xolox/python-deb-pkg-tools/builds/581437417
"""
import os
import sys
import subprocess
from distutils.sysconfig import get_python_lib
src = subprocess.check_output(
"/usr/bin/python%i" % sys.version_info[0], "-c", "import apt_pkg; print(apt_pkg.__file__)"
).strip()
assert os.path.isfile(src)
dst = os.path.join(get_python_lib(), os.path.basename(src))
print("Linking %s -> %s .." % (dst, src))
os.symlink(src, dst)
|
Change s/apt/apt_pkg/g (follow up to previous commit)
|
Change s/apt/apt_pkg/g (follow up to previous commit)
|
Python
|
mit
|
xolox/python-deb-pkg-tools,xolox/python-deb-pkg-tools
|
"""
Make python-apt available in the Python virtual environment without using the
system site-packages support built into Travis CI because this doesn't work
for Python 3.4, 3.7 and PyPy. See the following failed build:
https://travis-ci.org/xolox/python-deb-pkg-tools/builds/581437417
"""
import os
import sys
from distutils.sysconfig import get_python_lib
src = "/usr/lib/python%s/dist-packages/apt" % ("2.7" if sys.version_info[0] == 2 else "3")
dst = os.path.join(get_python_lib(), "apt")
assert os.path.isdir(src)
os.symlink(src, dst)
Change s/apt/apt_pkg/g (follow up to previous commit)
|
"""
Workaround to enable python-apt on Travis CI.
Make python-apt available in the Python virtual environment without using the
system site-packages support built into Travis CI because this doesn't work
for Python 3.4, 3.7 and PyPy. See the following failed build:
https://travis-ci.org/xolox/python-deb-pkg-tools/builds/581437417
"""
import os
import sys
import subprocess
from distutils.sysconfig import get_python_lib
src = subprocess.check_output(
"/usr/bin/python%i" % sys.version_info[0], "-c", "import apt_pkg; print(apt_pkg.__file__)"
).strip()
assert os.path.isfile(src)
dst = os.path.join(get_python_lib(), os.path.basename(src))
print("Linking %s -> %s .." % (dst, src))
os.symlink(src, dst)
|
<commit_before>"""
Make python-apt available in the Python virtual environment without using the
system site-packages support built into Travis CI because this doesn't work
for Python 3.4, 3.7 and PyPy. See the following failed build:
https://travis-ci.org/xolox/python-deb-pkg-tools/builds/581437417
"""
import os
import sys
from distutils.sysconfig import get_python_lib
src = "/usr/lib/python%s/dist-packages/apt" % ("2.7" if sys.version_info[0] == 2 else "3")
dst = os.path.join(get_python_lib(), "apt")
assert os.path.isdir(src)
os.symlink(src, dst)
<commit_msg>Change s/apt/apt_pkg/g (follow up to previous commit)<commit_after>
|
"""
Workaround to enable python-apt on Travis CI.
Make python-apt available in the Python virtual environment without using the
system site-packages support built into Travis CI because this doesn't work
for Python 3.4, 3.7 and PyPy. See the following failed build:
https://travis-ci.org/xolox/python-deb-pkg-tools/builds/581437417
"""
import os
import sys
import subprocess
from distutils.sysconfig import get_python_lib
src = subprocess.check_output(
"/usr/bin/python%i" % sys.version_info[0], "-c", "import apt_pkg; print(apt_pkg.__file__)"
).strip()
assert os.path.isfile(src)
dst = os.path.join(get_python_lib(), os.path.basename(src))
print("Linking %s -> %s .." % (dst, src))
os.symlink(src, dst)
|
"""
Make python-apt available in the Python virtual environment without using the
system site-packages support built into Travis CI because this doesn't work
for Python 3.4, 3.7 and PyPy. See the following failed build:
https://travis-ci.org/xolox/python-deb-pkg-tools/builds/581437417
"""
import os
import sys
from distutils.sysconfig import get_python_lib
src = "/usr/lib/python%s/dist-packages/apt" % ("2.7" if sys.version_info[0] == 2 else "3")
dst = os.path.join(get_python_lib(), "apt")
assert os.path.isdir(src)
os.symlink(src, dst)
Change s/apt/apt_pkg/g (follow up to previous commit)"""
Workaround to enable python-apt on Travis CI.
Make python-apt available in the Python virtual environment without using the
system site-packages support built into Travis CI because this doesn't work
for Python 3.4, 3.7 and PyPy. See the following failed build:
https://travis-ci.org/xolox/python-deb-pkg-tools/builds/581437417
"""
import os
import sys
import subprocess
from distutils.sysconfig import get_python_lib
src = subprocess.check_output(
"/usr/bin/python%i" % sys.version_info[0], "-c", "import apt_pkg; print(apt_pkg.__file__)"
).strip()
assert os.path.isfile(src)
dst = os.path.join(get_python_lib(), os.path.basename(src))
print("Linking %s -> %s .." % (dst, src))
os.symlink(src, dst)
|
<commit_before>"""
Make python-apt available in the Python virtual environment without using the
system site-packages support built into Travis CI because this doesn't work
for Python 3.4, 3.7 and PyPy. See the following failed build:
https://travis-ci.org/xolox/python-deb-pkg-tools/builds/581437417
"""
import os
import sys
from distutils.sysconfig import get_python_lib
src = "/usr/lib/python%s/dist-packages/apt" % ("2.7" if sys.version_info[0] == 2 else "3")
dst = os.path.join(get_python_lib(), "apt")
assert os.path.isdir(src)
os.symlink(src, dst)
<commit_msg>Change s/apt/apt_pkg/g (follow up to previous commit)<commit_after>"""
Workaround to enable python-apt on Travis CI.
Make python-apt available in the Python virtual environment without using the
system site-packages support built into Travis CI because this doesn't work
for Python 3.4, 3.7 and PyPy. See the following failed build:
https://travis-ci.org/xolox/python-deb-pkg-tools/builds/581437417
"""
import os
import sys
import subprocess
from distutils.sysconfig import get_python_lib
src = subprocess.check_output(
"/usr/bin/python%i" % sys.version_info[0], "-c", "import apt_pkg; print(apt_pkg.__file__)"
).strip()
assert os.path.isfile(src)
dst = os.path.join(get_python_lib(), os.path.basename(src))
print("Linking %s -> %s .." % (dst, src))
os.symlink(src, dst)
|
353728aba17695396c6167543e74181f9f853fdc
|
examples/template_render.py
|
examples/template_render.py
|
import django.template.loader
import django.conf
import sys
sys.path.append('django_test')
django.conf.settings.configure(INSTALLED_APPS=(), TEMPLATE_DIRS=('.', 'examples',))
for x in range(0,100):
django.template.loader.render_to_string('template.html')
|
import django.template.loader
import django.conf
import sys, os
os.chdir(os.path.dirname(__file__))
django.conf.settings.configure(
INSTALLED_APPS=(),
TEMPLATES=[{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": ['.']
}],
)
django.setup()
for x in range(0,100):
django.template.loader.render_to_string('template.html')
|
Update template render example for Django 1.8+
|
Update template render example for Django 1.8+
|
Python
|
bsd-3-clause
|
joerick/pyinstrument,joerick/pyinstrument,joerick/pyinstrument,joerick/pyinstrument,joerick/pyinstrument,joerick/pyinstrument
|
import django.template.loader
import django.conf
import sys
sys.path.append('django_test')
django.conf.settings.configure(INSTALLED_APPS=(), TEMPLATE_DIRS=('.', 'examples',))
for x in range(0,100):
django.template.loader.render_to_string('template.html')
Update template render example for Django 1.8+
|
import django.template.loader
import django.conf
import sys, os
os.chdir(os.path.dirname(__file__))
django.conf.settings.configure(
INSTALLED_APPS=(),
TEMPLATES=[{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": ['.']
}],
)
django.setup()
for x in range(0,100):
django.template.loader.render_to_string('template.html')
|
<commit_before>import django.template.loader
import django.conf
import sys
sys.path.append('django_test')
django.conf.settings.configure(INSTALLED_APPS=(), TEMPLATE_DIRS=('.', 'examples',))
for x in range(0,100):
django.template.loader.render_to_string('template.html')
<commit_msg>Update template render example for Django 1.8+<commit_after>
|
import django.template.loader
import django.conf
import sys, os
os.chdir(os.path.dirname(__file__))
django.conf.settings.configure(
INSTALLED_APPS=(),
TEMPLATES=[{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": ['.']
}],
)
django.setup()
for x in range(0,100):
django.template.loader.render_to_string('template.html')
|
import django.template.loader
import django.conf
import sys
sys.path.append('django_test')
django.conf.settings.configure(INSTALLED_APPS=(), TEMPLATE_DIRS=('.', 'examples',))
for x in range(0,100):
django.template.loader.render_to_string('template.html')
Update template render example for Django 1.8+import django.template.loader
import django.conf
import sys, os
os.chdir(os.path.dirname(__file__))
django.conf.settings.configure(
INSTALLED_APPS=(),
TEMPLATES=[{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": ['.']
}],
)
django.setup()
for x in range(0,100):
django.template.loader.render_to_string('template.html')
|
<commit_before>import django.template.loader
import django.conf
import sys
sys.path.append('django_test')
django.conf.settings.configure(INSTALLED_APPS=(), TEMPLATE_DIRS=('.', 'examples',))
for x in range(0,100):
django.template.loader.render_to_string('template.html')
<commit_msg>Update template render example for Django 1.8+<commit_after>import django.template.loader
import django.conf
import sys, os
os.chdir(os.path.dirname(__file__))
django.conf.settings.configure(
INSTALLED_APPS=(),
TEMPLATES=[{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": ['.']
}],
)
django.setup()
for x in range(0,100):
django.template.loader.render_to_string('template.html')
|
5ef233bf3bb3bd1346bb64a9da2ed5542c0e40df
|
regparser/layer/meta.py
|
regparser/layer/meta.py
|
import re
from regparser.layer.layer import Layer
import settings
class Meta(Layer):
shorthand = 'meta'
def __init__(self, tree, cfr_title, version, **context):
super(Meta, self).__init__(tree, **context)
self.cfr_title = cfr_title
self.version = version
def process(self, node):
"""If this is the root element, add some 'meta' information about
this regulation, including its cfr title, effective date, and any
configured info"""
if len(node.label) != 1:
return
layer = {
'cfr_title_number': self.cfr_title,
'cfr_title_text': settings.CFR_TITLES[self.cfr_title]
}
if node.title:
# up till the paren
match = re.search('part \d+[^\w]*([^\(]*)', node.title, re.I)
if match:
layer['statutory_name'] = match.group(1).strip()
match = re.search('\(regulation (\w+)\)', node.title, re.I)
if match:
layer['reg_letter'] = match.group(1)
effective_date = self.effective_date()
if effective_date:
layer['effective_date'] = effective_date
return [dict(layer.items() + settings.META.items())]
def effective_date(self):
if self.version and self.version.effective:
return self.version.effective.isoformat()
|
import re
from regparser.layer.layer import Layer
import settings
class Meta(Layer):
shorthand = 'meta'
def __init__(self, tree, cfr_title, version, **context):
super(Meta, self).__init__(tree, **context)
self.cfr_title = cfr_title
self.version = version
def process(self, node):
"""If this is the root element, add some 'meta' information about
this regulation, including its cfr title, effective date, and any
configured info"""
if len(node.label) != 1:
return
layer = {
'cfr_title_number': self.cfr_title,
'cfr_title_text': settings.CFR_TITLES[self.cfr_title]
}
if node.title:
# up till the paren
match = re.search('part \d+[^\w]*([^\(]*)', node.title, re.I)
if match:
layer['statutory_name'] = match.group(1).strip()
match = re.search('\(regulation (\w+)\)', node.title, re.I)
if match:
layer['reg_letter'] = match.group(1)
effective_date = self.effective_date()
if effective_date:
layer['effective_date'] = effective_date
result = {}
result.update(layer)
result.update(settings.META)
return [result]
def effective_date(self):
if self.version and self.version.effective:
return self.version.effective.isoformat()
|
Use dictionary update instead of addition
|
Use dictionary update instead of addition
|
Python
|
cc0-1.0
|
eregs/regulations-parser,tadhg-ohiggins/regulations-parser,tadhg-ohiggins/regulations-parser,eregs/regulations-parser
|
import re
from regparser.layer.layer import Layer
import settings
class Meta(Layer):
shorthand = 'meta'
def __init__(self, tree, cfr_title, version, **context):
super(Meta, self).__init__(tree, **context)
self.cfr_title = cfr_title
self.version = version
def process(self, node):
"""If this is the root element, add some 'meta' information about
this regulation, including its cfr title, effective date, and any
configured info"""
if len(node.label) != 1:
return
layer = {
'cfr_title_number': self.cfr_title,
'cfr_title_text': settings.CFR_TITLES[self.cfr_title]
}
if node.title:
# up till the paren
match = re.search('part \d+[^\w]*([^\(]*)', node.title, re.I)
if match:
layer['statutory_name'] = match.group(1).strip()
match = re.search('\(regulation (\w+)\)', node.title, re.I)
if match:
layer['reg_letter'] = match.group(1)
effective_date = self.effective_date()
if effective_date:
layer['effective_date'] = effective_date
return [dict(layer.items() + settings.META.items())]
def effective_date(self):
if self.version and self.version.effective:
return self.version.effective.isoformat()
Use dictionary update instead of addition
|
import re
from regparser.layer.layer import Layer
import settings
class Meta(Layer):
shorthand = 'meta'
def __init__(self, tree, cfr_title, version, **context):
super(Meta, self).__init__(tree, **context)
self.cfr_title = cfr_title
self.version = version
def process(self, node):
"""If this is the root element, add some 'meta' information about
this regulation, including its cfr title, effective date, and any
configured info"""
if len(node.label) != 1:
return
layer = {
'cfr_title_number': self.cfr_title,
'cfr_title_text': settings.CFR_TITLES[self.cfr_title]
}
if node.title:
# up till the paren
match = re.search('part \d+[^\w]*([^\(]*)', node.title, re.I)
if match:
layer['statutory_name'] = match.group(1).strip()
match = re.search('\(regulation (\w+)\)', node.title, re.I)
if match:
layer['reg_letter'] = match.group(1)
effective_date = self.effective_date()
if effective_date:
layer['effective_date'] = effective_date
result = {}
result.update(layer)
result.update(settings.META)
return [result]
def effective_date(self):
if self.version and self.version.effective:
return self.version.effective.isoformat()
|
<commit_before>import re
from regparser.layer.layer import Layer
import settings
class Meta(Layer):
shorthand = 'meta'
def __init__(self, tree, cfr_title, version, **context):
super(Meta, self).__init__(tree, **context)
self.cfr_title = cfr_title
self.version = version
def process(self, node):
"""If this is the root element, add some 'meta' information about
this regulation, including its cfr title, effective date, and any
configured info"""
if len(node.label) != 1:
return
layer = {
'cfr_title_number': self.cfr_title,
'cfr_title_text': settings.CFR_TITLES[self.cfr_title]
}
if node.title:
# up till the paren
match = re.search('part \d+[^\w]*([^\(]*)', node.title, re.I)
if match:
layer['statutory_name'] = match.group(1).strip()
match = re.search('\(regulation (\w+)\)', node.title, re.I)
if match:
layer['reg_letter'] = match.group(1)
effective_date = self.effective_date()
if effective_date:
layer['effective_date'] = effective_date
return [dict(layer.items() + settings.META.items())]
def effective_date(self):
if self.version and self.version.effective:
return self.version.effective.isoformat()
<commit_msg>Use dictionary update instead of addition<commit_after>
|
import re
from regparser.layer.layer import Layer
import settings
class Meta(Layer):
shorthand = 'meta'
def __init__(self, tree, cfr_title, version, **context):
super(Meta, self).__init__(tree, **context)
self.cfr_title = cfr_title
self.version = version
def process(self, node):
"""If this is the root element, add some 'meta' information about
this regulation, including its cfr title, effective date, and any
configured info"""
if len(node.label) != 1:
return
layer = {
'cfr_title_number': self.cfr_title,
'cfr_title_text': settings.CFR_TITLES[self.cfr_title]
}
if node.title:
# up till the paren
match = re.search('part \d+[^\w]*([^\(]*)', node.title, re.I)
if match:
layer['statutory_name'] = match.group(1).strip()
match = re.search('\(regulation (\w+)\)', node.title, re.I)
if match:
layer['reg_letter'] = match.group(1)
effective_date = self.effective_date()
if effective_date:
layer['effective_date'] = effective_date
result = {}
result.update(layer)
result.update(settings.META)
return [result]
def effective_date(self):
if self.version and self.version.effective:
return self.version.effective.isoformat()
|
import re
from regparser.layer.layer import Layer
import settings
class Meta(Layer):
shorthand = 'meta'
def __init__(self, tree, cfr_title, version, **context):
super(Meta, self).__init__(tree, **context)
self.cfr_title = cfr_title
self.version = version
def process(self, node):
"""If this is the root element, add some 'meta' information about
this regulation, including its cfr title, effective date, and any
configured info"""
if len(node.label) != 1:
return
layer = {
'cfr_title_number': self.cfr_title,
'cfr_title_text': settings.CFR_TITLES[self.cfr_title]
}
if node.title:
# up till the paren
match = re.search('part \d+[^\w]*([^\(]*)', node.title, re.I)
if match:
layer['statutory_name'] = match.group(1).strip()
match = re.search('\(regulation (\w+)\)', node.title, re.I)
if match:
layer['reg_letter'] = match.group(1)
effective_date = self.effective_date()
if effective_date:
layer['effective_date'] = effective_date
return [dict(layer.items() + settings.META.items())]
def effective_date(self):
if self.version and self.version.effective:
return self.version.effective.isoformat()
Use dictionary update instead of additionimport re
from regparser.layer.layer import Layer
import settings
class Meta(Layer):
shorthand = 'meta'
def __init__(self, tree, cfr_title, version, **context):
super(Meta, self).__init__(tree, **context)
self.cfr_title = cfr_title
self.version = version
def process(self, node):
"""If this is the root element, add some 'meta' information about
this regulation, including its cfr title, effective date, and any
configured info"""
if len(node.label) != 1:
return
layer = {
'cfr_title_number': self.cfr_title,
'cfr_title_text': settings.CFR_TITLES[self.cfr_title]
}
if node.title:
# up till the paren
match = re.search('part \d+[^\w]*([^\(]*)', node.title, re.I)
if match:
layer['statutory_name'] = match.group(1).strip()
match = re.search('\(regulation (\w+)\)', node.title, re.I)
if match:
layer['reg_letter'] = match.group(1)
effective_date = self.effective_date()
if effective_date:
layer['effective_date'] = effective_date
result = {}
result.update(layer)
result.update(settings.META)
return [result]
def effective_date(self):
if self.version and self.version.effective:
return self.version.effective.isoformat()
|
<commit_before>import re
from regparser.layer.layer import Layer
import settings
class Meta(Layer):
shorthand = 'meta'
def __init__(self, tree, cfr_title, version, **context):
super(Meta, self).__init__(tree, **context)
self.cfr_title = cfr_title
self.version = version
def process(self, node):
"""If this is the root element, add some 'meta' information about
this regulation, including its cfr title, effective date, and any
configured info"""
if len(node.label) != 1:
return
layer = {
'cfr_title_number': self.cfr_title,
'cfr_title_text': settings.CFR_TITLES[self.cfr_title]
}
if node.title:
# up till the paren
match = re.search('part \d+[^\w]*([^\(]*)', node.title, re.I)
if match:
layer['statutory_name'] = match.group(1).strip()
match = re.search('\(regulation (\w+)\)', node.title, re.I)
if match:
layer['reg_letter'] = match.group(1)
effective_date = self.effective_date()
if effective_date:
layer['effective_date'] = effective_date
return [dict(layer.items() + settings.META.items())]
def effective_date(self):
if self.version and self.version.effective:
return self.version.effective.isoformat()
<commit_msg>Use dictionary update instead of addition<commit_after>import re
from regparser.layer.layer import Layer
import settings
class Meta(Layer):
shorthand = 'meta'
def __init__(self, tree, cfr_title, version, **context):
super(Meta, self).__init__(tree, **context)
self.cfr_title = cfr_title
self.version = version
def process(self, node):
"""If this is the root element, add some 'meta' information about
this regulation, including its cfr title, effective date, and any
configured info"""
if len(node.label) != 1:
return
layer = {
'cfr_title_number': self.cfr_title,
'cfr_title_text': settings.CFR_TITLES[self.cfr_title]
}
if node.title:
# up till the paren
match = re.search('part \d+[^\w]*([^\(]*)', node.title, re.I)
if match:
layer['statutory_name'] = match.group(1).strip()
match = re.search('\(regulation (\w+)\)', node.title, re.I)
if match:
layer['reg_letter'] = match.group(1)
effective_date = self.effective_date()
if effective_date:
layer['effective_date'] = effective_date
result = {}
result.update(layer)
result.update(settings.META)
return [result]
def effective_date(self):
if self.version and self.version.effective:
return self.version.effective.isoformat()
|
e980aaf833b6f289069ee9ae9c2d3571ae297246
|
tools/publish_all_pkgs.py
|
tools/publish_all_pkgs.py
|
#!/usr/bin/env python
#
# Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
#
# Upload all packages in pkg/ (other than a few that should be explicitly
# excluded), plus sdk/lib/_internal/compiler .
#
# Usage: publish_all_pkgs.py
#
# "pub" must be in PATH.
import os
import os.path
import subprocess
import sys
def Main(argv):
pkgs_to_publish = []
for name in os.listdir('pkg'):
if os.path.isdir(os.path.join('pkg', name)):
if (name != '.svn' and name != 'fixnum' and name != 'expect'):
pkgs_to_publish.append(os.path.join('pkg', name))
# Publish dart2js as an "unsupported" package.
pkgs_to_publish.append(
os.path.join('sdk', 'lib', '_internal', 'compiler'))
for pkg in pkgs_to_publish:
print "\n\nPublishing [32m%s[0m:\n-------------------------------" % pkg
subprocess.call(['python', 'tools/publish_pkg.py', pkg])
if __name__ == '__main__':
sys.exit(Main(sys.argv))
|
#!/usr/bin/env python
#
# Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
#
# Upload all packages in pkg/ (other than a few that should be explicitly
# excluded), plus sdk/lib/_internal/compiler .
#
# Usage: publish_all_pkgs.py
#
# "pub" must be in PATH.
import os
import os.path
import subprocess
import sys
def Main(argv):
pkgs_to_publish = []
for name in os.listdir('pkg'):
if os.path.isdir(os.path.join('pkg', name)):
if (name != '.svn' and name != 'expect'):
pkgs_to_publish.append(os.path.join('pkg', name))
for pkg in pkgs_to_publish:
print "\n\nPublishing [32m%s[0m:\n-------------------------------" % pkg
subprocess.call(['python', 'tools/publish_pkg.py', pkg])
if __name__ == '__main__':
sys.exit(Main(sys.argv))
|
Stop publishing compiler_unsupported. Start publishing fixnum.
|
Stop publishing compiler_unsupported.
Start publishing fixnum.
R=sigmund@google.com
Review URL: https://codereview.chromium.org//19757010
git-svn-id: c93d8a2297af3b929165606efe145742a534bc71@25217 260f80e4-7a28-3924-810f-c04153c831b5
|
Python
|
bsd-3-clause
|
dartino/dart-sdk,dart-archive/dart-sdk,dart-lang/sdk,dart-lang/sdk,dartino/dart-sdk,dartino/dart-sdk,dart-lang/sdk,dart-lang/sdk,dart-archive/dart-sdk,dart-lang/sdk,dart-lang/sdk,dart-archive/dart-sdk,dart-archive/dart-sdk,dart-archive/dart-sdk,dart-lang/sdk,dartino/dart-sdk,dart-archive/dart-sdk,dart-archive/dart-sdk,dart-archive/dart-sdk,dartino/dart-sdk,dartino/dart-sdk,dartino/dart-sdk,dartino/dart-sdk,dart-archive/dart-sdk,dartino/dart-sdk,dart-lang/sdk
|
#!/usr/bin/env python
#
# Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
#
# Upload all packages in pkg/ (other than a few that should be explicitly
# excluded), plus sdk/lib/_internal/compiler .
#
# Usage: publish_all_pkgs.py
#
# "pub" must be in PATH.
import os
import os.path
import subprocess
import sys
def Main(argv):
pkgs_to_publish = []
for name in os.listdir('pkg'):
if os.path.isdir(os.path.join('pkg', name)):
if (name != '.svn' and name != 'fixnum' and name != 'expect'):
pkgs_to_publish.append(os.path.join('pkg', name))
# Publish dart2js as an "unsupported" package.
pkgs_to_publish.append(
os.path.join('sdk', 'lib', '_internal', 'compiler'))
for pkg in pkgs_to_publish:
print "\n\nPublishing [32m%s[0m:\n-------------------------------" % pkg
subprocess.call(['python', 'tools/publish_pkg.py', pkg])
if __name__ == '__main__':
sys.exit(Main(sys.argv))
Stop publishing compiler_unsupported.
Start publishing fixnum.
R=sigmund@google.com
Review URL: https://codereview.chromium.org//19757010
git-svn-id: c93d8a2297af3b929165606efe145742a534bc71@25217 260f80e4-7a28-3924-810f-c04153c831b5
|
#!/usr/bin/env python
#
# Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
#
# Upload all packages in pkg/ (other than a few that should be explicitly
# excluded), plus sdk/lib/_internal/compiler .
#
# Usage: publish_all_pkgs.py
#
# "pub" must be in PATH.
import os
import os.path
import subprocess
import sys
def Main(argv):
pkgs_to_publish = []
for name in os.listdir('pkg'):
if os.path.isdir(os.path.join('pkg', name)):
if (name != '.svn' and name != 'expect'):
pkgs_to_publish.append(os.path.join('pkg', name))
for pkg in pkgs_to_publish:
print "\n\nPublishing [32m%s[0m:\n-------------------------------" % pkg
subprocess.call(['python', 'tools/publish_pkg.py', pkg])
if __name__ == '__main__':
sys.exit(Main(sys.argv))
|
<commit_before>#!/usr/bin/env python
#
# Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
#
# Upload all packages in pkg/ (other than a few that should be explicitly
# excluded), plus sdk/lib/_internal/compiler .
#
# Usage: publish_all_pkgs.py
#
# "pub" must be in PATH.
import os
import os.path
import subprocess
import sys
def Main(argv):
pkgs_to_publish = []
for name in os.listdir('pkg'):
if os.path.isdir(os.path.join('pkg', name)):
if (name != '.svn' and name != 'fixnum' and name != 'expect'):
pkgs_to_publish.append(os.path.join('pkg', name))
# Publish dart2js as an "unsupported" package.
pkgs_to_publish.append(
os.path.join('sdk', 'lib', '_internal', 'compiler'))
for pkg in pkgs_to_publish:
print "\n\nPublishing [32m%s[0m:\n-------------------------------" % pkg
subprocess.call(['python', 'tools/publish_pkg.py', pkg])
if __name__ == '__main__':
sys.exit(Main(sys.argv))
<commit_msg>Stop publishing compiler_unsupported.
Start publishing fixnum.
R=sigmund@google.com
Review URL: https://codereview.chromium.org//19757010
git-svn-id: c93d8a2297af3b929165606efe145742a534bc71@25217 260f80e4-7a28-3924-810f-c04153c831b5<commit_after>
|
#!/usr/bin/env python
#
# Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
#
# Upload all packages in pkg/ (other than a few that should be explicitly
# excluded), plus sdk/lib/_internal/compiler .
#
# Usage: publish_all_pkgs.py
#
# "pub" must be in PATH.
import os
import os.path
import subprocess
import sys
def Main(argv):
pkgs_to_publish = []
for name in os.listdir('pkg'):
if os.path.isdir(os.path.join('pkg', name)):
if (name != '.svn' and name != 'expect'):
pkgs_to_publish.append(os.path.join('pkg', name))
for pkg in pkgs_to_publish:
print "\n\nPublishing [32m%s[0m:\n-------------------------------" % pkg
subprocess.call(['python', 'tools/publish_pkg.py', pkg])
if __name__ == '__main__':
sys.exit(Main(sys.argv))
|
#!/usr/bin/env python
#
# Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
#
# Upload all packages in pkg/ (other than a few that should be explicitly
# excluded), plus sdk/lib/_internal/compiler .
#
# Usage: publish_all_pkgs.py
#
# "pub" must be in PATH.
import os
import os.path
import subprocess
import sys
def Main(argv):
pkgs_to_publish = []
for name in os.listdir('pkg'):
if os.path.isdir(os.path.join('pkg', name)):
if (name != '.svn' and name != 'fixnum' and name != 'expect'):
pkgs_to_publish.append(os.path.join('pkg', name))
# Publish dart2js as an "unsupported" package.
pkgs_to_publish.append(
os.path.join('sdk', 'lib', '_internal', 'compiler'))
for pkg in pkgs_to_publish:
print "\n\nPublishing [32m%s[0m:\n-------------------------------" % pkg
subprocess.call(['python', 'tools/publish_pkg.py', pkg])
if __name__ == '__main__':
sys.exit(Main(sys.argv))
Stop publishing compiler_unsupported.
Start publishing fixnum.
R=sigmund@google.com
Review URL: https://codereview.chromium.org//19757010
git-svn-id: c93d8a2297af3b929165606efe145742a534bc71@25217 260f80e4-7a28-3924-810f-c04153c831b5#!/usr/bin/env python
#
# Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
#
# Upload all packages in pkg/ (other than a few that should be explicitly
# excluded), plus sdk/lib/_internal/compiler .
#
# Usage: publish_all_pkgs.py
#
# "pub" must be in PATH.
import os
import os.path
import subprocess
import sys
def Main(argv):
pkgs_to_publish = []
for name in os.listdir('pkg'):
if os.path.isdir(os.path.join('pkg', name)):
if (name != '.svn' and name != 'expect'):
pkgs_to_publish.append(os.path.join('pkg', name))
for pkg in pkgs_to_publish:
print "\n\nPublishing [32m%s[0m:\n-------------------------------" % pkg
subprocess.call(['python', 'tools/publish_pkg.py', pkg])
if __name__ == '__main__':
sys.exit(Main(sys.argv))
|
<commit_before>#!/usr/bin/env python
#
# Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
#
# Upload all packages in pkg/ (other than a few that should be explicitly
# excluded), plus sdk/lib/_internal/compiler .
#
# Usage: publish_all_pkgs.py
#
# "pub" must be in PATH.
import os
import os.path
import subprocess
import sys
def Main(argv):
pkgs_to_publish = []
for name in os.listdir('pkg'):
if os.path.isdir(os.path.join('pkg', name)):
if (name != '.svn' and name != 'fixnum' and name != 'expect'):
pkgs_to_publish.append(os.path.join('pkg', name))
# Publish dart2js as an "unsupported" package.
pkgs_to_publish.append(
os.path.join('sdk', 'lib', '_internal', 'compiler'))
for pkg in pkgs_to_publish:
print "\n\nPublishing [32m%s[0m:\n-------------------------------" % pkg
subprocess.call(['python', 'tools/publish_pkg.py', pkg])
if __name__ == '__main__':
sys.exit(Main(sys.argv))
<commit_msg>Stop publishing compiler_unsupported.
Start publishing fixnum.
R=sigmund@google.com
Review URL: https://codereview.chromium.org//19757010
git-svn-id: c93d8a2297af3b929165606efe145742a534bc71@25217 260f80e4-7a28-3924-810f-c04153c831b5<commit_after>#!/usr/bin/env python
#
# Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
#
# Upload all packages in pkg/ (other than a few that should be explicitly
# excluded), plus sdk/lib/_internal/compiler .
#
# Usage: publish_all_pkgs.py
#
# "pub" must be in PATH.
import os
import os.path
import subprocess
import sys
def Main(argv):
pkgs_to_publish = []
for name in os.listdir('pkg'):
if os.path.isdir(os.path.join('pkg', name)):
if (name != '.svn' and name != 'expect'):
pkgs_to_publish.append(os.path.join('pkg', name))
for pkg in pkgs_to_publish:
print "\n\nPublishing [32m%s[0m:\n-------------------------------" % pkg
subprocess.call(['python', 'tools/publish_pkg.py', pkg])
if __name__ == '__main__':
sys.exit(Main(sys.argv))
|
9ff75ff858681665141650d4e1ef310265956f35
|
tools/workplace_status.py
|
tools/workplace_status.py
|
#! /usr/bin/env python3
# Note that this file should work in both Python 2 and 3.
from __future__ import print_function
from subprocess import Popen, PIPE
dirty = Popen(["git", "diff-index", "--quiet", "HEAD"], stdout=PIPE).wait() != 0
commit_process = Popen(["git", "describe", "--tags", "--abbrev=0"], stdout=PIPE)
(version, err) = commit_process.communicate()
print("STABLE_GIT_VERSION %s%s" % (
version.decode("utf-8").replace("\n", ""),
"-dirty" if dirty else "")
)
|
#! /usr/bin/env python3
# Note that this file should work in both Python 2 and 3.
from __future__ import print_function
from subprocess import Popen, PIPE
dirty = Popen(["git", "diff-index", "--quiet", "HEAD"], stdout=PIPE).wait() != 0
commit_process = Popen(["git", "describe", "--always", "--tags", "--abbrev=0"], stdout=PIPE)
(version, err) = commit_process.communicate()
print("STABLE_GIT_VERSION %s%s" % (
version.decode("utf-8").replace("\n", ""),
"-dirty" if dirty else "")
)
|
Make git describe --always return a value
|
Make git describe --always return a value
This means that the latest commit will be stamped if there are no tags.
|
Python
|
apache-2.0
|
bazelbuild/bazel-watcher,bazelbuild/bazel-watcher,bazelbuild/bazel-watcher,bazelbuild/bazel-watcher
|
#! /usr/bin/env python3
# Note that this file should work in both Python 2 and 3.
from __future__ import print_function
from subprocess import Popen, PIPE
dirty = Popen(["git", "diff-index", "--quiet", "HEAD"], stdout=PIPE).wait() != 0
commit_process = Popen(["git", "describe", "--tags", "--abbrev=0"], stdout=PIPE)
(version, err) = commit_process.communicate()
print("STABLE_GIT_VERSION %s%s" % (
version.decode("utf-8").replace("\n", ""),
"-dirty" if dirty else "")
)
Make git describe --always return a value
This means that the latest commit will be stamped if there are no tags.
|
#! /usr/bin/env python3
# Note that this file should work in both Python 2 and 3.
from __future__ import print_function
from subprocess import Popen, PIPE
dirty = Popen(["git", "diff-index", "--quiet", "HEAD"], stdout=PIPE).wait() != 0
commit_process = Popen(["git", "describe", "--always", "--tags", "--abbrev=0"], stdout=PIPE)
(version, err) = commit_process.communicate()
print("STABLE_GIT_VERSION %s%s" % (
version.decode("utf-8").replace("\n", ""),
"-dirty" if dirty else "")
)
|
<commit_before>#! /usr/bin/env python3
# Note that this file should work in both Python 2 and 3.
from __future__ import print_function
from subprocess import Popen, PIPE
dirty = Popen(["git", "diff-index", "--quiet", "HEAD"], stdout=PIPE).wait() != 0
commit_process = Popen(["git", "describe", "--tags", "--abbrev=0"], stdout=PIPE)
(version, err) = commit_process.communicate()
print("STABLE_GIT_VERSION %s%s" % (
version.decode("utf-8").replace("\n", ""),
"-dirty" if dirty else "")
)
<commit_msg>Make git describe --always return a value
This means that the latest commit will be stamped if there are no tags.<commit_after>
|
#! /usr/bin/env python3
# Note that this file should work in both Python 2 and 3.
from __future__ import print_function
from subprocess import Popen, PIPE
dirty = Popen(["git", "diff-index", "--quiet", "HEAD"], stdout=PIPE).wait() != 0
commit_process = Popen(["git", "describe", "--always", "--tags", "--abbrev=0"], stdout=PIPE)
(version, err) = commit_process.communicate()
print("STABLE_GIT_VERSION %s%s" % (
version.decode("utf-8").replace("\n", ""),
"-dirty" if dirty else "")
)
|
#! /usr/bin/env python3
# Note that this file should work in both Python 2 and 3.
from __future__ import print_function
from subprocess import Popen, PIPE
dirty = Popen(["git", "diff-index", "--quiet", "HEAD"], stdout=PIPE).wait() != 0
commit_process = Popen(["git", "describe", "--tags", "--abbrev=0"], stdout=PIPE)
(version, err) = commit_process.communicate()
print("STABLE_GIT_VERSION %s%s" % (
version.decode("utf-8").replace("\n", ""),
"-dirty" if dirty else "")
)
Make git describe --always return a value
This means that the latest commit will be stamped if there are no tags.#! /usr/bin/env python3
# Note that this file should work in both Python 2 and 3.
from __future__ import print_function
from subprocess import Popen, PIPE
dirty = Popen(["git", "diff-index", "--quiet", "HEAD"], stdout=PIPE).wait() != 0
commit_process = Popen(["git", "describe", "--always", "--tags", "--abbrev=0"], stdout=PIPE)
(version, err) = commit_process.communicate()
print("STABLE_GIT_VERSION %s%s" % (
version.decode("utf-8").replace("\n", ""),
"-dirty" if dirty else "")
)
|
<commit_before>#! /usr/bin/env python3
# Note that this file should work in both Python 2 and 3.
from __future__ import print_function
from subprocess import Popen, PIPE
dirty = Popen(["git", "diff-index", "--quiet", "HEAD"], stdout=PIPE).wait() != 0
commit_process = Popen(["git", "describe", "--tags", "--abbrev=0"], stdout=PIPE)
(version, err) = commit_process.communicate()
print("STABLE_GIT_VERSION %s%s" % (
version.decode("utf-8").replace("\n", ""),
"-dirty" if dirty else "")
)
<commit_msg>Make git describe --always return a value
This means that the latest commit will be stamped if there are no tags.<commit_after>#! /usr/bin/env python3
# Note that this file should work in both Python 2 and 3.
from __future__ import print_function
from subprocess import Popen, PIPE
dirty = Popen(["git", "diff-index", "--quiet", "HEAD"], stdout=PIPE).wait() != 0
commit_process = Popen(["git", "describe", "--always", "--tags", "--abbrev=0"], stdout=PIPE)
(version, err) = commit_process.communicate()
print("STABLE_GIT_VERSION %s%s" % (
version.decode("utf-8").replace("\n", ""),
"-dirty" if dirty else "")
)
|
3681b5a485662656d6419d95ad89f1fbdb7a2a50
|
myuw/context_processors.py
|
myuw/context_processors.py
|
# Determins if the requesting device is a native hybrid app (android/ios)
def is_hybrid(request):
return {
'is_hybrid': 'HTTP_MYUW_HYBRID' in request.META
}
|
# Determins if the requesting device is a native hybrid app (android/ios)
def is_hybrid(request):
return {
'is_hybrid': 'MyUW_Hybrid/1.0' in request.META['HTTP_USER_AGENT']
}
|
Update context processer to check for custom hybrid user agent.
|
Update context processer to check for custom hybrid user agent.
|
Python
|
apache-2.0
|
uw-it-aca/myuw,uw-it-aca/myuw,uw-it-aca/myuw,uw-it-aca/myuw
|
# Determins if the requesting device is a native hybrid app (android/ios)
def is_hybrid(request):
return {
'is_hybrid': 'HTTP_MYUW_HYBRID' in request.META
}
Update context processer to check for custom hybrid user agent.
|
# Determins if the requesting device is a native hybrid app (android/ios)
def is_hybrid(request):
return {
'is_hybrid': 'MyUW_Hybrid/1.0' in request.META['HTTP_USER_AGENT']
}
|
<commit_before># Determins if the requesting device is a native hybrid app (android/ios)
def is_hybrid(request):
return {
'is_hybrid': 'HTTP_MYUW_HYBRID' in request.META
}
<commit_msg>Update context processer to check for custom hybrid user agent.<commit_after>
|
# Determins if the requesting device is a native hybrid app (android/ios)
def is_hybrid(request):
return {
'is_hybrid': 'MyUW_Hybrid/1.0' in request.META['HTTP_USER_AGENT']
}
|
# Determins if the requesting device is a native hybrid app (android/ios)
def is_hybrid(request):
return {
'is_hybrid': 'HTTP_MYUW_HYBRID' in request.META
}
Update context processer to check for custom hybrid user agent.# Determins if the requesting device is a native hybrid app (android/ios)
def is_hybrid(request):
return {
'is_hybrid': 'MyUW_Hybrid/1.0' in request.META['HTTP_USER_AGENT']
}
|
<commit_before># Determins if the requesting device is a native hybrid app (android/ios)
def is_hybrid(request):
return {
'is_hybrid': 'HTTP_MYUW_HYBRID' in request.META
}
<commit_msg>Update context processer to check for custom hybrid user agent.<commit_after># Determins if the requesting device is a native hybrid app (android/ios)
def is_hybrid(request):
return {
'is_hybrid': 'MyUW_Hybrid/1.0' in request.META['HTTP_USER_AGENT']
}
|
1fd43c6b87db9599c73b7cb26856e99404b2e0f7
|
corehq/apps/data_interfaces/tests/test_xform_management.py
|
corehq/apps/data_interfaces/tests/test_xform_management.py
|
from django.contrib.sessions.middleware import SessionMiddleware
from django.http import HttpRequest, QueryDict
from django.test import TestCase, Client
from corehq.apps.data_interfaces.views import XFormManagementView
from corehq.apps.domain.shortcuts import create_domain
from corehq.apps.users.models import WebUser
class XFormManagementTest(TestCase):
@classmethod
def setUpClass(cls):
cls.domain = create_domain('xform-management-test')
cls.web_user = WebUser.create('xform-management-test', 'test', 'test',
is_superuser=True)
Client().force_login(cls.web_user.get_django_user())
@classmethod
def tearDownClass(cls):
cls.web_user.delete()
cls.domain.delete()
def test_get_xform_ids__sanity_check(self):
# This helper has to mock a request in a brittle way.
# If permissions are wrong, instead of returning a list,
# it will return an HttpResponse containing the permission error.
# This can break when permissions change.
# So, just test that we aren't hitting that situation and that the response is a list.
request = HttpRequest()
request.POST = QueryDict('select_all=')
request.couch_user = self.web_user
SessionMiddleware().process_request(request)
view = XFormManagementView()
view.args = (self.domain.name,)
view.request = request
assert isinstance(view.get_xform_ids(request), list)
|
from django.contrib.sessions.middleware import SessionMiddleware
from django.http import HttpRequest, QueryDict
from django.test import TestCase, Client
from corehq.apps.data_interfaces.views import XFormManagementView
from corehq.apps.domain.shortcuts import create_domain
from corehq.apps.users.models import WebUser
from corehq.pillows.mappings.xform_mapping import XFORM_INDEX_INFO
from corehq.util.elastic import reset_es_index
class XFormManagementTest(TestCase):
@classmethod
def setUpClass(cls):
reset_es_index(XFORM_INDEX_INFO)
cls.domain = create_domain('xform-management-test')
cls.web_user = WebUser.create('xform-management-test', 'test', 'test',
is_superuser=True)
Client().force_login(cls.web_user.get_django_user())
@classmethod
def tearDownClass(cls):
cls.web_user.delete()
cls.domain.delete()
def test_get_xform_ids__sanity_check(self):
# This helper has to mock a request in a brittle way.
# If permissions are wrong, instead of returning a list,
# it will return an HttpResponse containing the permission error.
# This can break when permissions change.
# So, just test that we aren't hitting that situation and that the response is a list.
request = HttpRequest()
request.POST = QueryDict('select_all=')
request.couch_user = self.web_user
SessionMiddleware().process_request(request)
view = XFormManagementView()
view.args = (self.domain.name,)
view.request = request
assert isinstance(view.get_xform_ids(request), list)
|
Fix ES index setup in XFormManagementTest
|
Fix ES index setup in XFormManagementTest
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
from django.contrib.sessions.middleware import SessionMiddleware
from django.http import HttpRequest, QueryDict
from django.test import TestCase, Client
from corehq.apps.data_interfaces.views import XFormManagementView
from corehq.apps.domain.shortcuts import create_domain
from corehq.apps.users.models import WebUser
class XFormManagementTest(TestCase):
@classmethod
def setUpClass(cls):
cls.domain = create_domain('xform-management-test')
cls.web_user = WebUser.create('xform-management-test', 'test', 'test',
is_superuser=True)
Client().force_login(cls.web_user.get_django_user())
@classmethod
def tearDownClass(cls):
cls.web_user.delete()
cls.domain.delete()
def test_get_xform_ids__sanity_check(self):
# This helper has to mock a request in a brittle way.
# If permissions are wrong, instead of returning a list,
# it will return an HttpResponse containing the permission error.
# This can break when permissions change.
# So, just test that we aren't hitting that situation and that the response is a list.
request = HttpRequest()
request.POST = QueryDict('select_all=')
request.couch_user = self.web_user
SessionMiddleware().process_request(request)
view = XFormManagementView()
view.args = (self.domain.name,)
view.request = request
assert isinstance(view.get_xform_ids(request), list)
Fix ES index setup in XFormManagementTest
|
from django.contrib.sessions.middleware import SessionMiddleware
from django.http import HttpRequest, QueryDict
from django.test import TestCase, Client
from corehq.apps.data_interfaces.views import XFormManagementView
from corehq.apps.domain.shortcuts import create_domain
from corehq.apps.users.models import WebUser
from corehq.pillows.mappings.xform_mapping import XFORM_INDEX_INFO
from corehq.util.elastic import reset_es_index
class XFormManagementTest(TestCase):
@classmethod
def setUpClass(cls):
reset_es_index(XFORM_INDEX_INFO)
cls.domain = create_domain('xform-management-test')
cls.web_user = WebUser.create('xform-management-test', 'test', 'test',
is_superuser=True)
Client().force_login(cls.web_user.get_django_user())
@classmethod
def tearDownClass(cls):
cls.web_user.delete()
cls.domain.delete()
def test_get_xform_ids__sanity_check(self):
# This helper has to mock a request in a brittle way.
# If permissions are wrong, instead of returning a list,
# it will return an HttpResponse containing the permission error.
# This can break when permissions change.
# So, just test that we aren't hitting that situation and that the response is a list.
request = HttpRequest()
request.POST = QueryDict('select_all=')
request.couch_user = self.web_user
SessionMiddleware().process_request(request)
view = XFormManagementView()
view.args = (self.domain.name,)
view.request = request
assert isinstance(view.get_xform_ids(request), list)
|
<commit_before>from django.contrib.sessions.middleware import SessionMiddleware
from django.http import HttpRequest, QueryDict
from django.test import TestCase, Client
from corehq.apps.data_interfaces.views import XFormManagementView
from corehq.apps.domain.shortcuts import create_domain
from corehq.apps.users.models import WebUser
class XFormManagementTest(TestCase):
@classmethod
def setUpClass(cls):
cls.domain = create_domain('xform-management-test')
cls.web_user = WebUser.create('xform-management-test', 'test', 'test',
is_superuser=True)
Client().force_login(cls.web_user.get_django_user())
@classmethod
def tearDownClass(cls):
cls.web_user.delete()
cls.domain.delete()
def test_get_xform_ids__sanity_check(self):
# This helper has to mock a request in a brittle way.
# If permissions are wrong, instead of returning a list,
# it will return an HttpResponse containing the permission error.
# This can break when permissions change.
# So, just test that we aren't hitting that situation and that the response is a list.
request = HttpRequest()
request.POST = QueryDict('select_all=')
request.couch_user = self.web_user
SessionMiddleware().process_request(request)
view = XFormManagementView()
view.args = (self.domain.name,)
view.request = request
assert isinstance(view.get_xform_ids(request), list)
<commit_msg>Fix ES index setup in XFormManagementTest<commit_after>
|
from django.contrib.sessions.middleware import SessionMiddleware
from django.http import HttpRequest, QueryDict
from django.test import TestCase, Client
from corehq.apps.data_interfaces.views import XFormManagementView
from corehq.apps.domain.shortcuts import create_domain
from corehq.apps.users.models import WebUser
from corehq.pillows.mappings.xform_mapping import XFORM_INDEX_INFO
from corehq.util.elastic import reset_es_index
class XFormManagementTest(TestCase):
@classmethod
def setUpClass(cls):
reset_es_index(XFORM_INDEX_INFO)
cls.domain = create_domain('xform-management-test')
cls.web_user = WebUser.create('xform-management-test', 'test', 'test',
is_superuser=True)
Client().force_login(cls.web_user.get_django_user())
@classmethod
def tearDownClass(cls):
cls.web_user.delete()
cls.domain.delete()
def test_get_xform_ids__sanity_check(self):
# This helper has to mock a request in a brittle way.
# If permissions are wrong, instead of returning a list,
# it will return an HttpResponse containing the permission error.
# This can break when permissions change.
# So, just test that we aren't hitting that situation and that the response is a list.
request = HttpRequest()
request.POST = QueryDict('select_all=')
request.couch_user = self.web_user
SessionMiddleware().process_request(request)
view = XFormManagementView()
view.args = (self.domain.name,)
view.request = request
assert isinstance(view.get_xform_ids(request), list)
|
from django.contrib.sessions.middleware import SessionMiddleware
from django.http import HttpRequest, QueryDict
from django.test import TestCase, Client
from corehq.apps.data_interfaces.views import XFormManagementView
from corehq.apps.domain.shortcuts import create_domain
from corehq.apps.users.models import WebUser
class XFormManagementTest(TestCase):
@classmethod
def setUpClass(cls):
cls.domain = create_domain('xform-management-test')
cls.web_user = WebUser.create('xform-management-test', 'test', 'test',
is_superuser=True)
Client().force_login(cls.web_user.get_django_user())
@classmethod
def tearDownClass(cls):
cls.web_user.delete()
cls.domain.delete()
def test_get_xform_ids__sanity_check(self):
# This helper has to mock a request in a brittle way.
# If permissions are wrong, instead of returning a list,
# it will return an HttpResponse containing the permission error.
# This can break when permissions change.
# So, just test that we aren't hitting that situation and that the response is a list.
request = HttpRequest()
request.POST = QueryDict('select_all=')
request.couch_user = self.web_user
SessionMiddleware().process_request(request)
view = XFormManagementView()
view.args = (self.domain.name,)
view.request = request
assert isinstance(view.get_xform_ids(request), list)
Fix ES index setup in XFormManagementTestfrom django.contrib.sessions.middleware import SessionMiddleware
from django.http import HttpRequest, QueryDict
from django.test import TestCase, Client
from corehq.apps.data_interfaces.views import XFormManagementView
from corehq.apps.domain.shortcuts import create_domain
from corehq.apps.users.models import WebUser
from corehq.pillows.mappings.xform_mapping import XFORM_INDEX_INFO
from corehq.util.elastic import reset_es_index
class XFormManagementTest(TestCase):
@classmethod
def setUpClass(cls):
reset_es_index(XFORM_INDEX_INFO)
cls.domain = create_domain('xform-management-test')
cls.web_user = WebUser.create('xform-management-test', 'test', 'test',
is_superuser=True)
Client().force_login(cls.web_user.get_django_user())
@classmethod
def tearDownClass(cls):
cls.web_user.delete()
cls.domain.delete()
def test_get_xform_ids__sanity_check(self):
# This helper has to mock a request in a brittle way.
# If permissions are wrong, instead of returning a list,
# it will return an HttpResponse containing the permission error.
# This can break when permissions change.
# So, just test that we aren't hitting that situation and that the response is a list.
request = HttpRequest()
request.POST = QueryDict('select_all=')
request.couch_user = self.web_user
SessionMiddleware().process_request(request)
view = XFormManagementView()
view.args = (self.domain.name,)
view.request = request
assert isinstance(view.get_xform_ids(request), list)
|
<commit_before>from django.contrib.sessions.middleware import SessionMiddleware
from django.http import HttpRequest, QueryDict
from django.test import TestCase, Client
from corehq.apps.data_interfaces.views import XFormManagementView
from corehq.apps.domain.shortcuts import create_domain
from corehq.apps.users.models import WebUser
class XFormManagementTest(TestCase):
@classmethod
def setUpClass(cls):
cls.domain = create_domain('xform-management-test')
cls.web_user = WebUser.create('xform-management-test', 'test', 'test',
is_superuser=True)
Client().force_login(cls.web_user.get_django_user())
@classmethod
def tearDownClass(cls):
cls.web_user.delete()
cls.domain.delete()
def test_get_xform_ids__sanity_check(self):
# This helper has to mock a request in a brittle way.
# If permissions are wrong, instead of returning a list,
# it will return an HttpResponse containing the permission error.
# This can break when permissions change.
# So, just test that we aren't hitting that situation and that the response is a list.
request = HttpRequest()
request.POST = QueryDict('select_all=')
request.couch_user = self.web_user
SessionMiddleware().process_request(request)
view = XFormManagementView()
view.args = (self.domain.name,)
view.request = request
assert isinstance(view.get_xform_ids(request), list)
<commit_msg>Fix ES index setup in XFormManagementTest<commit_after>from django.contrib.sessions.middleware import SessionMiddleware
from django.http import HttpRequest, QueryDict
from django.test import TestCase, Client
from corehq.apps.data_interfaces.views import XFormManagementView
from corehq.apps.domain.shortcuts import create_domain
from corehq.apps.users.models import WebUser
from corehq.pillows.mappings.xform_mapping import XFORM_INDEX_INFO
from corehq.util.elastic import reset_es_index
class XFormManagementTest(TestCase):
@classmethod
def setUpClass(cls):
reset_es_index(XFORM_INDEX_INFO)
cls.domain = create_domain('xform-management-test')
cls.web_user = WebUser.create('xform-management-test', 'test', 'test',
is_superuser=True)
Client().force_login(cls.web_user.get_django_user())
@classmethod
def tearDownClass(cls):
cls.web_user.delete()
cls.domain.delete()
def test_get_xform_ids__sanity_check(self):
# This helper has to mock a request in a brittle way.
# If permissions are wrong, instead of returning a list,
# it will return an HttpResponse containing the permission error.
# This can break when permissions change.
# So, just test that we aren't hitting that situation and that the response is a list.
request = HttpRequest()
request.POST = QueryDict('select_all=')
request.couch_user = self.web_user
SessionMiddleware().process_request(request)
view = XFormManagementView()
view.args = (self.domain.name,)
view.request = request
assert isinstance(view.get_xform_ids(request), list)
|
ebce20c36007f5665d927b860a64f45de5f128c4
|
uptests/web/responding.py
|
uptests/web/responding.py
|
#!/usr/bin/env python
import urllib2
import argparse
import portend
parser = argparse.ArgumentParser()
parser.add_argument('host')
parser.add_argument('port', type=int)
args = parser.parse_args()
portend.occupied(args.host, args.port, timeout=3)
root = 'http://{host}:{port}/'.format(**vars(args))
urllib2.urlopen(root)
|
#!/usr/bin/env python3
import urllib.request
import argparse
import portend
parser = argparse.ArgumentParser()
parser.add_argument('host')
parser.add_argument('port', type=int)
args = parser.parse_args()
portend.occupied(args.host, args.port, timeout=3)
root = 'http://{host}:{port}/'.format(**vars(args))
urllib.request.urlopen(root)
|
Update uptest for Python 3
|
Update uptest for Python 3
|
Python
|
mit
|
yougov/librarypaste,yougov/librarypaste
|
#!/usr/bin/env python
import urllib2
import argparse
import portend
parser = argparse.ArgumentParser()
parser.add_argument('host')
parser.add_argument('port', type=int)
args = parser.parse_args()
portend.occupied(args.host, args.port, timeout=3)
root = 'http://{host}:{port}/'.format(**vars(args))
urllib2.urlopen(root)
Update uptest for Python 3
|
#!/usr/bin/env python3
import urllib.request
import argparse
import portend
parser = argparse.ArgumentParser()
parser.add_argument('host')
parser.add_argument('port', type=int)
args = parser.parse_args()
portend.occupied(args.host, args.port, timeout=3)
root = 'http://{host}:{port}/'.format(**vars(args))
urllib.request.urlopen(root)
|
<commit_before>#!/usr/bin/env python
import urllib2
import argparse
import portend
parser = argparse.ArgumentParser()
parser.add_argument('host')
parser.add_argument('port', type=int)
args = parser.parse_args()
portend.occupied(args.host, args.port, timeout=3)
root = 'http://{host}:{port}/'.format(**vars(args))
urllib2.urlopen(root)
<commit_msg>Update uptest for Python 3<commit_after>
|
#!/usr/bin/env python3
import urllib.request
import argparse
import portend
parser = argparse.ArgumentParser()
parser.add_argument('host')
parser.add_argument('port', type=int)
args = parser.parse_args()
portend.occupied(args.host, args.port, timeout=3)
root = 'http://{host}:{port}/'.format(**vars(args))
urllib.request.urlopen(root)
|
#!/usr/bin/env python
import urllib2
import argparse
import portend
parser = argparse.ArgumentParser()
parser.add_argument('host')
parser.add_argument('port', type=int)
args = parser.parse_args()
portend.occupied(args.host, args.port, timeout=3)
root = 'http://{host}:{port}/'.format(**vars(args))
urllib2.urlopen(root)
Update uptest for Python 3#!/usr/bin/env python3
import urllib.request
import argparse
import portend
parser = argparse.ArgumentParser()
parser.add_argument('host')
parser.add_argument('port', type=int)
args = parser.parse_args()
portend.occupied(args.host, args.port, timeout=3)
root = 'http://{host}:{port}/'.format(**vars(args))
urllib.request.urlopen(root)
|
<commit_before>#!/usr/bin/env python
import urllib2
import argparse
import portend
parser = argparse.ArgumentParser()
parser.add_argument('host')
parser.add_argument('port', type=int)
args = parser.parse_args()
portend.occupied(args.host, args.port, timeout=3)
root = 'http://{host}:{port}/'.format(**vars(args))
urllib2.urlopen(root)
<commit_msg>Update uptest for Python 3<commit_after>#!/usr/bin/env python3
import urllib.request
import argparse
import portend
parser = argparse.ArgumentParser()
parser.add_argument('host')
parser.add_argument('port', type=int)
args = parser.parse_args()
portend.occupied(args.host, args.port, timeout=3)
root = 'http://{host}:{port}/'.format(**vars(args))
urllib.request.urlopen(root)
|
56e3f571196bdc0ab8882f56ed66192d54ff8cad
|
gmt/clib/tests/test_functions.py
|
gmt/clib/tests/test_functions.py
|
"""
Test the wrappers for the API functions
"""
import os
from .. import create_session, call_module
def test_create_session():
"Test that create_session is called without errors"
session = create_session()
assert session is not None
def test_call_module():
"Run a psbasemap call to see if the module works"
module = 'psbasemap'
args = '-R10/70/-3/8 -JX4i/3i -Ba -P ->tmp.ps'
session = create_session()
call_module(session, module, args)
assert os.path.exists('tmp.ps')
# Not the most ideal test. Just check if no segfaults or exceptions occur.
|
"""
Test the wrappers for the API functions
"""
import os
from .. import create_session, call_module
def test_create_session():
"Test that create_session is called without errors"
session = create_session()
assert session is not None
def test_call_module():
"Run a psbasemap call to see if the module works"
module = 'psbasemap'
args = '-R10/70/-3/8 -JX4i/3i -Ba -P ->tmp.ps'
session = create_session()
call_module(session, module, args)
assert os.path.exists('tmp.ps')
os.remove('tmp.ps')
# Not the most ideal test. Just check if no segfaults or exceptions occur.
|
Remove tmp file created by test
|
Remove tmp file created by test
|
Python
|
bsd-3-clause
|
GenericMappingTools/gmt-python,GenericMappingTools/gmt-python
|
"""
Test the wrappers for the API functions
"""
import os
from .. import create_session, call_module
def test_create_session():
"Test that create_session is called without errors"
session = create_session()
assert session is not None
def test_call_module():
"Run a psbasemap call to see if the module works"
module = 'psbasemap'
args = '-R10/70/-3/8 -JX4i/3i -Ba -P ->tmp.ps'
session = create_session()
call_module(session, module, args)
assert os.path.exists('tmp.ps')
# Not the most ideal test. Just check if no segfaults or exceptions occur.
Remove tmp file created by test
|
"""
Test the wrappers for the API functions
"""
import os
from .. import create_session, call_module
def test_create_session():
"Test that create_session is called without errors"
session = create_session()
assert session is not None
def test_call_module():
"Run a psbasemap call to see if the module works"
module = 'psbasemap'
args = '-R10/70/-3/8 -JX4i/3i -Ba -P ->tmp.ps'
session = create_session()
call_module(session, module, args)
assert os.path.exists('tmp.ps')
os.remove('tmp.ps')
# Not the most ideal test. Just check if no segfaults or exceptions occur.
|
<commit_before>"""
Test the wrappers for the API functions
"""
import os
from .. import create_session, call_module
def test_create_session():
"Test that create_session is called without errors"
session = create_session()
assert session is not None
def test_call_module():
"Run a psbasemap call to see if the module works"
module = 'psbasemap'
args = '-R10/70/-3/8 -JX4i/3i -Ba -P ->tmp.ps'
session = create_session()
call_module(session, module, args)
assert os.path.exists('tmp.ps')
# Not the most ideal test. Just check if no segfaults or exceptions occur.
<commit_msg>Remove tmp file created by test<commit_after>
|
"""
Test the wrappers for the API functions
"""
import os
from .. import create_session, call_module
def test_create_session():
"Test that create_session is called without errors"
session = create_session()
assert session is not None
def test_call_module():
"Run a psbasemap call to see if the module works"
module = 'psbasemap'
args = '-R10/70/-3/8 -JX4i/3i -Ba -P ->tmp.ps'
session = create_session()
call_module(session, module, args)
assert os.path.exists('tmp.ps')
os.remove('tmp.ps')
# Not the most ideal test. Just check if no segfaults or exceptions occur.
|
"""
Test the wrappers for the API functions
"""
import os
from .. import create_session, call_module
def test_create_session():
"Test that create_session is called without errors"
session = create_session()
assert session is not None
def test_call_module():
"Run a psbasemap call to see if the module works"
module = 'psbasemap'
args = '-R10/70/-3/8 -JX4i/3i -Ba -P ->tmp.ps'
session = create_session()
call_module(session, module, args)
assert os.path.exists('tmp.ps')
# Not the most ideal test. Just check if no segfaults or exceptions occur.
Remove tmp file created by test"""
Test the wrappers for the API functions
"""
import os
from .. import create_session, call_module
def test_create_session():
"Test that create_session is called without errors"
session = create_session()
assert session is not None
def test_call_module():
"Run a psbasemap call to see if the module works"
module = 'psbasemap'
args = '-R10/70/-3/8 -JX4i/3i -Ba -P ->tmp.ps'
session = create_session()
call_module(session, module, args)
assert os.path.exists('tmp.ps')
os.remove('tmp.ps')
# Not the most ideal test. Just check if no segfaults or exceptions occur.
|
<commit_before>"""
Test the wrappers for the API functions
"""
import os
from .. import create_session, call_module
def test_create_session():
"Test that create_session is called without errors"
session = create_session()
assert session is not None
def test_call_module():
"Run a psbasemap call to see if the module works"
module = 'psbasemap'
args = '-R10/70/-3/8 -JX4i/3i -Ba -P ->tmp.ps'
session = create_session()
call_module(session, module, args)
assert os.path.exists('tmp.ps')
# Not the most ideal test. Just check if no segfaults or exceptions occur.
<commit_msg>Remove tmp file created by test<commit_after>"""
Test the wrappers for the API functions
"""
import os
from .. import create_session, call_module
def test_create_session():
"Test that create_session is called without errors"
session = create_session()
assert session is not None
def test_call_module():
"Run a psbasemap call to see if the module works"
module = 'psbasemap'
args = '-R10/70/-3/8 -JX4i/3i -Ba -P ->tmp.ps'
session = create_session()
call_module(session, module, args)
assert os.path.exists('tmp.ps')
os.remove('tmp.ps')
# Not the most ideal test. Just check if no segfaults or exceptions occur.
|
f0709f9e90e929daf4a918cc0376f0ba2af0e8f1
|
sandbox/sandbox/urls.py
|
sandbox/sandbox/urls.py
|
from django.conf import settings
from django.contrib import admin
from django.conf.urls.static import static
from django.conf.urls import patterns, include, url
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from oscar.app import shop
from stores.app import application as stores_app
from stores.dashboard.app import application as dashboard_app
admin.autodiscover()
js_info_dict = {
'packages': ('stores',),
}
urlpatterns = patterns('',
url(r'^dashboard/stores/', include(dashboard_app.urls)),
url(r'^stores/', include(stores_app.urls)),
url(r'^', include(shop.urls)),
url(r'^admin/', include(admin.site.urls)),
url(r'^i18n/', include('django.conf.urls.i18n')),
url(r'^jsi18n/$', 'django.views.i18n.javascript_catalog', js_info_dict),
url(r'^i18n/', include('django.conf.urls.i18n')),
)
if settings.DEBUG:
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(
settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
from django.conf import settings
from django.conf.urls.static import static
from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from oscar.app import application
from stores.app import application as stores_app
from stores.dashboard.app import application as dashboard_app
admin.autodiscover()
js_info_dict = {
'packages': ('stores',),
}
urlpatterns = [
url(r'^dashboard/stores/', include(dashboard_app.urls)),
url(r'^stores/', include(stores_app.urls)),
url(r'^', include(application.urls)),
url(r'^admin/', include(admin.site.urls)),
url(r'^i18n/', include('django.conf.urls.i18n')),
url(r'^jsi18n/$', 'django.views.i18n.javascript_catalog', js_info_dict),
url(r'^i18n/', include('django.conf.urls.i18n')),
]
if settings.DEBUG:
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(
settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
Fix import error (oscar.app.shop doesn't exist anymore)
|
Fix import error (oscar.app.shop doesn't exist anymore)
|
Python
|
bsd-3-clause
|
django-oscar/django-oscar-stores,django-oscar/django-oscar-stores,django-oscar/django-oscar-stores
|
from django.conf import settings
from django.contrib import admin
from django.conf.urls.static import static
from django.conf.urls import patterns, include, url
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from oscar.app import shop
from stores.app import application as stores_app
from stores.dashboard.app import application as dashboard_app
admin.autodiscover()
js_info_dict = {
'packages': ('stores',),
}
urlpatterns = patterns('',
url(r'^dashboard/stores/', include(dashboard_app.urls)),
url(r'^stores/', include(stores_app.urls)),
url(r'^', include(shop.urls)),
url(r'^admin/', include(admin.site.urls)),
url(r'^i18n/', include('django.conf.urls.i18n')),
url(r'^jsi18n/$', 'django.views.i18n.javascript_catalog', js_info_dict),
url(r'^i18n/', include('django.conf.urls.i18n')),
)
if settings.DEBUG:
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(
settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
Fix import error (oscar.app.shop doesn't exist anymore)
|
from django.conf import settings
from django.conf.urls.static import static
from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from oscar.app import application
from stores.app import application as stores_app
from stores.dashboard.app import application as dashboard_app
admin.autodiscover()
js_info_dict = {
'packages': ('stores',),
}
urlpatterns = [
url(r'^dashboard/stores/', include(dashboard_app.urls)),
url(r'^stores/', include(stores_app.urls)),
url(r'^', include(application.urls)),
url(r'^admin/', include(admin.site.urls)),
url(r'^i18n/', include('django.conf.urls.i18n')),
url(r'^jsi18n/$', 'django.views.i18n.javascript_catalog', js_info_dict),
url(r'^i18n/', include('django.conf.urls.i18n')),
]
if settings.DEBUG:
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(
settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
<commit_before>from django.conf import settings
from django.contrib import admin
from django.conf.urls.static import static
from django.conf.urls import patterns, include, url
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from oscar.app import shop
from stores.app import application as stores_app
from stores.dashboard.app import application as dashboard_app
admin.autodiscover()
js_info_dict = {
'packages': ('stores',),
}
urlpatterns = patterns('',
url(r'^dashboard/stores/', include(dashboard_app.urls)),
url(r'^stores/', include(stores_app.urls)),
url(r'^', include(shop.urls)),
url(r'^admin/', include(admin.site.urls)),
url(r'^i18n/', include('django.conf.urls.i18n')),
url(r'^jsi18n/$', 'django.views.i18n.javascript_catalog', js_info_dict),
url(r'^i18n/', include('django.conf.urls.i18n')),
)
if settings.DEBUG:
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(
settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
<commit_msg>Fix import error (oscar.app.shop doesn't exist anymore)<commit_after>
|
from django.conf import settings
from django.conf.urls.static import static
from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from oscar.app import application
from stores.app import application as stores_app
from stores.dashboard.app import application as dashboard_app
admin.autodiscover()
js_info_dict = {
'packages': ('stores',),
}
urlpatterns = [
url(r'^dashboard/stores/', include(dashboard_app.urls)),
url(r'^stores/', include(stores_app.urls)),
url(r'^', include(application.urls)),
url(r'^admin/', include(admin.site.urls)),
url(r'^i18n/', include('django.conf.urls.i18n')),
url(r'^jsi18n/$', 'django.views.i18n.javascript_catalog', js_info_dict),
url(r'^i18n/', include('django.conf.urls.i18n')),
]
if settings.DEBUG:
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(
settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
from django.conf import settings
from django.contrib import admin
from django.conf.urls.static import static
from django.conf.urls import patterns, include, url
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from oscar.app import shop
from stores.app import application as stores_app
from stores.dashboard.app import application as dashboard_app
admin.autodiscover()
js_info_dict = {
'packages': ('stores',),
}
urlpatterns = patterns('',
url(r'^dashboard/stores/', include(dashboard_app.urls)),
url(r'^stores/', include(stores_app.urls)),
url(r'^', include(shop.urls)),
url(r'^admin/', include(admin.site.urls)),
url(r'^i18n/', include('django.conf.urls.i18n')),
url(r'^jsi18n/$', 'django.views.i18n.javascript_catalog', js_info_dict),
url(r'^i18n/', include('django.conf.urls.i18n')),
)
if settings.DEBUG:
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(
settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
Fix import error (oscar.app.shop doesn't exist anymore)from django.conf import settings
from django.conf.urls.static import static
from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from oscar.app import application
from stores.app import application as stores_app
from stores.dashboard.app import application as dashboard_app
admin.autodiscover()
js_info_dict = {
'packages': ('stores',),
}
urlpatterns = [
url(r'^dashboard/stores/', include(dashboard_app.urls)),
url(r'^stores/', include(stores_app.urls)),
url(r'^', include(application.urls)),
url(r'^admin/', include(admin.site.urls)),
url(r'^i18n/', include('django.conf.urls.i18n')),
url(r'^jsi18n/$', 'django.views.i18n.javascript_catalog', js_info_dict),
url(r'^i18n/', include('django.conf.urls.i18n')),
]
if settings.DEBUG:
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(
settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
<commit_before>from django.conf import settings
from django.contrib import admin
from django.conf.urls.static import static
from django.conf.urls import patterns, include, url
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from oscar.app import shop
from stores.app import application as stores_app
from stores.dashboard.app import application as dashboard_app
admin.autodiscover()
js_info_dict = {
'packages': ('stores',),
}
urlpatterns = patterns('',
url(r'^dashboard/stores/', include(dashboard_app.urls)),
url(r'^stores/', include(stores_app.urls)),
url(r'^', include(shop.urls)),
url(r'^admin/', include(admin.site.urls)),
url(r'^i18n/', include('django.conf.urls.i18n')),
url(r'^jsi18n/$', 'django.views.i18n.javascript_catalog', js_info_dict),
url(r'^i18n/', include('django.conf.urls.i18n')),
)
if settings.DEBUG:
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(
settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
<commit_msg>Fix import error (oscar.app.shop doesn't exist anymore)<commit_after>from django.conf import settings
from django.conf.urls.static import static
from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from oscar.app import application
from stores.app import application as stores_app
from stores.dashboard.app import application as dashboard_app
admin.autodiscover()
js_info_dict = {
'packages': ('stores',),
}
urlpatterns = [
url(r'^dashboard/stores/', include(dashboard_app.urls)),
url(r'^stores/', include(stores_app.urls)),
url(r'^', include(application.urls)),
url(r'^admin/', include(admin.site.urls)),
url(r'^i18n/', include('django.conf.urls.i18n')),
url(r'^jsi18n/$', 'django.views.i18n.javascript_catalog', js_info_dict),
url(r'^i18n/', include('django.conf.urls.i18n')),
]
if settings.DEBUG:
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(
settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
c4d58ef971b850d3f201903bb6091d159241112f
|
histomicstk/features/__init__.py
|
histomicstk/features/__init__.py
|
from .ReinhardNorm import ReinhardNorm
from .ReinhardSample import ReinhardSample
__all__ = (
'FeatureExtraction'
)
|
__all__ = (
'FeatureExtraction'
)
|
Resolve import issue in color_normalization_test
|
Resolve import issue in color_normalization_test
|
Python
|
apache-2.0
|
DigitalSlideArchive/HistomicsTK,DigitalSlideArchive/HistomicsTK
|
from .ReinhardNorm import ReinhardNorm
from .ReinhardSample import ReinhardSample
__all__ = (
'FeatureExtraction'
)
Resolve import issue in color_normalization_test
|
__all__ = (
'FeatureExtraction'
)
|
<commit_before>from .ReinhardNorm import ReinhardNorm
from .ReinhardSample import ReinhardSample
__all__ = (
'FeatureExtraction'
)
<commit_msg>Resolve import issue in color_normalization_test<commit_after>
|
__all__ = (
'FeatureExtraction'
)
|
from .ReinhardNorm import ReinhardNorm
from .ReinhardSample import ReinhardSample
__all__ = (
'FeatureExtraction'
)
Resolve import issue in color_normalization_test__all__ = (
'FeatureExtraction'
)
|
<commit_before>from .ReinhardNorm import ReinhardNorm
from .ReinhardSample import ReinhardSample
__all__ = (
'FeatureExtraction'
)
<commit_msg>Resolve import issue in color_normalization_test<commit_after>__all__ = (
'FeatureExtraction'
)
|
2156f7c0b045c5c4c3fb035485dd44854776c464
|
app.py
|
app.py
|
from flask import Flask, jsonify, render_template, url_for
app = Flask(__name__)
@app.route("/")
def index():
return render_template('index.html')
if __name__ == '__main__':
app.run(debug=True, threaded=True, host='0.0.0.0')
|
from flask import Flask, jsonify, render_template, url_for, request
app = Flask(__name__)
@app.route("/")
def index():
return render_template('index.html')
@app.route("/login", methods=['POST'])
def login():
params = request.get_json()
print params
return jsonify({'status':True})
@app.route("/signup", methods=['POST'])
def signup():
params = request.get_json()
print params
return jsonify({'status':True})
@app.route("/logout", methods=['POST'])
def logout():
return True
if __name__ == '__main__':
app.run(debug=True, threaded=True, host='0.0.0.0')
|
Add routes for login,signup and logout in backend
|
Add routes for login,signup and logout in backend
|
Python
|
mit
|
sreecodeslayer/todo-flask,sreecodeslayer/todo-flask,sreecodeslayer/todo-flask
|
from flask import Flask, jsonify, render_template, url_for
app = Flask(__name__)
@app.route("/")
def index():
return render_template('index.html')
if __name__ == '__main__':
app.run(debug=True, threaded=True, host='0.0.0.0')Add routes for login,signup and logout in backend
|
from flask import Flask, jsonify, render_template, url_for, request
app = Flask(__name__)
@app.route("/")
def index():
return render_template('index.html')
@app.route("/login", methods=['POST'])
def login():
params = request.get_json()
print params
return jsonify({'status':True})
@app.route("/signup", methods=['POST'])
def signup():
params = request.get_json()
print params
return jsonify({'status':True})
@app.route("/logout", methods=['POST'])
def logout():
return True
if __name__ == '__main__':
app.run(debug=True, threaded=True, host='0.0.0.0')
|
<commit_before>from flask import Flask, jsonify, render_template, url_for
app = Flask(__name__)
@app.route("/")
def index():
return render_template('index.html')
if __name__ == '__main__':
app.run(debug=True, threaded=True, host='0.0.0.0')<commit_msg>Add routes for login,signup and logout in backend<commit_after>
|
from flask import Flask, jsonify, render_template, url_for, request
app = Flask(__name__)
@app.route("/")
def index():
return render_template('index.html')
@app.route("/login", methods=['POST'])
def login():
params = request.get_json()
print params
return jsonify({'status':True})
@app.route("/signup", methods=['POST'])
def signup():
params = request.get_json()
print params
return jsonify({'status':True})
@app.route("/logout", methods=['POST'])
def logout():
return True
if __name__ == '__main__':
app.run(debug=True, threaded=True, host='0.0.0.0')
|
from flask import Flask, jsonify, render_template, url_for
app = Flask(__name__)
@app.route("/")
def index():
return render_template('index.html')
if __name__ == '__main__':
app.run(debug=True, threaded=True, host='0.0.0.0')Add routes for login,signup and logout in backendfrom flask import Flask, jsonify, render_template, url_for, request
app = Flask(__name__)
@app.route("/")
def index():
return render_template('index.html')
@app.route("/login", methods=['POST'])
def login():
params = request.get_json()
print params
return jsonify({'status':True})
@app.route("/signup", methods=['POST'])
def signup():
params = request.get_json()
print params
return jsonify({'status':True})
@app.route("/logout", methods=['POST'])
def logout():
return True
if __name__ == '__main__':
app.run(debug=True, threaded=True, host='0.0.0.0')
|
<commit_before>from flask import Flask, jsonify, render_template, url_for
app = Flask(__name__)
@app.route("/")
def index():
return render_template('index.html')
if __name__ == '__main__':
app.run(debug=True, threaded=True, host='0.0.0.0')<commit_msg>Add routes for login,signup and logout in backend<commit_after>from flask import Flask, jsonify, render_template, url_for, request
app = Flask(__name__)
@app.route("/")
def index():
return render_template('index.html')
@app.route("/login", methods=['POST'])
def login():
params = request.get_json()
print params
return jsonify({'status':True})
@app.route("/signup", methods=['POST'])
def signup():
params = request.get_json()
print params
return jsonify({'status':True})
@app.route("/logout", methods=['POST'])
def logout():
return True
if __name__ == '__main__':
app.run(debug=True, threaded=True, host='0.0.0.0')
|
c4278b404b313c4fa5fad67a5703b7368d1c4428
|
fileapi/tests/test_qunit.py
|
fileapi/tests/test_qunit.py
|
import os
from django.conf import settings
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from django.test.utils import override_settings
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.ui import WebDriverWait
@override_settings(STATICFILES_DIRS=(os.path.join(os.path.dirname(__file__), 'static'), ))
class QunitTests(StaticLiveServerTestCase):
"""Iteractive tests with selenium."""
@classmethod
def setUpClass(cls):
cls.browser = webdriver.PhantomJS()
super().setUpClass()
@classmethod
def tearDownClass(cls):
cls.browser.quit()
super().tearDownClass()
def test_qunit(self):
"""Load the QUnit tests and check for failures."""
self.browser.get(self.live_server_url + settings.STATIC_URL + 'index.html')
results = WebDriverWait(self.browser, 5).until(
expected_conditions.visibility_of_element_located((By.ID, 'qunit-testresult')))
total = int(results.find_element_by_class_name('total').text)
failed = int(results.find_element_by_class_name('failed').text)
self.assertTrue(total and not failed, results.text)
|
import os
from django.conf import settings
from django.contrib.staticfiles import finders, storage
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from django.test.utils import override_settings
from django.utils.functional import empty
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.ui import WebDriverWait
@override_settings(STATICFILES_DIRS=(os.path.join(os.path.dirname(__file__), 'static'), ))
class QunitTests(StaticLiveServerTestCase):
"""Iteractive tests with selenium."""
@classmethod
def setUpClass(cls):
cls.browser = webdriver.PhantomJS()
super().setUpClass()
@classmethod
def tearDownClass(cls):
cls.browser.quit()
super().tearDownClass()
def setUp(self):
# Clear the cache versions of the staticfiles finders and storage
# See https://code.djangoproject.com/ticket/24197
storage.staticfiles_storage._wrapped = empty
finders.get_finder.cache_clear()
def test_qunit(self):
"""Load the QUnit tests and check for failures."""
self.browser.get(self.live_server_url + settings.STATIC_URL + 'index.html')
results = WebDriverWait(self.browser, 5).until(
expected_conditions.visibility_of_element_located((By.ID, 'qunit-testresult')))
total = int(results.find_element_by_class_name('total').text)
failed = int(results.find_element_by_class_name('failed').text)
self.assertTrue(total and not failed, results.text)
|
Clear global state/caching handled by Django so the test passes when run in the full suite.
|
Clear global state/caching handled by Django so the test passes when run in the full suite.
|
Python
|
bsd-2-clause
|
mlavin/fileapi,mlavin/fileapi,mlavin/fileapi
|
import os
from django.conf import settings
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from django.test.utils import override_settings
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.ui import WebDriverWait
@override_settings(STATICFILES_DIRS=(os.path.join(os.path.dirname(__file__), 'static'), ))
class QunitTests(StaticLiveServerTestCase):
"""Iteractive tests with selenium."""
@classmethod
def setUpClass(cls):
cls.browser = webdriver.PhantomJS()
super().setUpClass()
@classmethod
def tearDownClass(cls):
cls.browser.quit()
super().tearDownClass()
def test_qunit(self):
"""Load the QUnit tests and check for failures."""
self.browser.get(self.live_server_url + settings.STATIC_URL + 'index.html')
results = WebDriverWait(self.browser, 5).until(
expected_conditions.visibility_of_element_located((By.ID, 'qunit-testresult')))
total = int(results.find_element_by_class_name('total').text)
failed = int(results.find_element_by_class_name('failed').text)
self.assertTrue(total and not failed, results.text)
Clear global state/caching handled by Django so the test passes when run in the full suite.
|
import os
from django.conf import settings
from django.contrib.staticfiles import finders, storage
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from django.test.utils import override_settings
from django.utils.functional import empty
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.ui import WebDriverWait
@override_settings(STATICFILES_DIRS=(os.path.join(os.path.dirname(__file__), 'static'), ))
class QunitTests(StaticLiveServerTestCase):
"""Iteractive tests with selenium."""
@classmethod
def setUpClass(cls):
cls.browser = webdriver.PhantomJS()
super().setUpClass()
@classmethod
def tearDownClass(cls):
cls.browser.quit()
super().tearDownClass()
def setUp(self):
# Clear the cache versions of the staticfiles finders and storage
# See https://code.djangoproject.com/ticket/24197
storage.staticfiles_storage._wrapped = empty
finders.get_finder.cache_clear()
def test_qunit(self):
"""Load the QUnit tests and check for failures."""
self.browser.get(self.live_server_url + settings.STATIC_URL + 'index.html')
results = WebDriverWait(self.browser, 5).until(
expected_conditions.visibility_of_element_located((By.ID, 'qunit-testresult')))
total = int(results.find_element_by_class_name('total').text)
failed = int(results.find_element_by_class_name('failed').text)
self.assertTrue(total and not failed, results.text)
|
<commit_before>import os
from django.conf import settings
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from django.test.utils import override_settings
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.ui import WebDriverWait
@override_settings(STATICFILES_DIRS=(os.path.join(os.path.dirname(__file__), 'static'), ))
class QunitTests(StaticLiveServerTestCase):
"""Iteractive tests with selenium."""
@classmethod
def setUpClass(cls):
cls.browser = webdriver.PhantomJS()
super().setUpClass()
@classmethod
def tearDownClass(cls):
cls.browser.quit()
super().tearDownClass()
def test_qunit(self):
"""Load the QUnit tests and check for failures."""
self.browser.get(self.live_server_url + settings.STATIC_URL + 'index.html')
results = WebDriverWait(self.browser, 5).until(
expected_conditions.visibility_of_element_located((By.ID, 'qunit-testresult')))
total = int(results.find_element_by_class_name('total').text)
failed = int(results.find_element_by_class_name('failed').text)
self.assertTrue(total and not failed, results.text)
<commit_msg>Clear global state/caching handled by Django so the test passes when run in the full suite.<commit_after>
|
import os
from django.conf import settings
from django.contrib.staticfiles import finders, storage
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from django.test.utils import override_settings
from django.utils.functional import empty
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.ui import WebDriverWait
@override_settings(STATICFILES_DIRS=(os.path.join(os.path.dirname(__file__), 'static'), ))
class QunitTests(StaticLiveServerTestCase):
"""Iteractive tests with selenium."""
@classmethod
def setUpClass(cls):
cls.browser = webdriver.PhantomJS()
super().setUpClass()
@classmethod
def tearDownClass(cls):
cls.browser.quit()
super().tearDownClass()
def setUp(self):
# Clear the cache versions of the staticfiles finders and storage
# See https://code.djangoproject.com/ticket/24197
storage.staticfiles_storage._wrapped = empty
finders.get_finder.cache_clear()
def test_qunit(self):
"""Load the QUnit tests and check for failures."""
self.browser.get(self.live_server_url + settings.STATIC_URL + 'index.html')
results = WebDriverWait(self.browser, 5).until(
expected_conditions.visibility_of_element_located((By.ID, 'qunit-testresult')))
total = int(results.find_element_by_class_name('total').text)
failed = int(results.find_element_by_class_name('failed').text)
self.assertTrue(total and not failed, results.text)
|
import os
from django.conf import settings
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from django.test.utils import override_settings
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.ui import WebDriverWait
@override_settings(STATICFILES_DIRS=(os.path.join(os.path.dirname(__file__), 'static'), ))
class QunitTests(StaticLiveServerTestCase):
"""Iteractive tests with selenium."""
@classmethod
def setUpClass(cls):
cls.browser = webdriver.PhantomJS()
super().setUpClass()
@classmethod
def tearDownClass(cls):
cls.browser.quit()
super().tearDownClass()
def test_qunit(self):
"""Load the QUnit tests and check for failures."""
self.browser.get(self.live_server_url + settings.STATIC_URL + 'index.html')
results = WebDriverWait(self.browser, 5).until(
expected_conditions.visibility_of_element_located((By.ID, 'qunit-testresult')))
total = int(results.find_element_by_class_name('total').text)
failed = int(results.find_element_by_class_name('failed').text)
self.assertTrue(total and not failed, results.text)
Clear global state/caching handled by Django so the test passes when run in the full suite.import os
from django.conf import settings
from django.contrib.staticfiles import finders, storage
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from django.test.utils import override_settings
from django.utils.functional import empty
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.ui import WebDriverWait
@override_settings(STATICFILES_DIRS=(os.path.join(os.path.dirname(__file__), 'static'), ))
class QunitTests(StaticLiveServerTestCase):
"""Iteractive tests with selenium."""
@classmethod
def setUpClass(cls):
cls.browser = webdriver.PhantomJS()
super().setUpClass()
@classmethod
def tearDownClass(cls):
cls.browser.quit()
super().tearDownClass()
def setUp(self):
# Clear the cache versions of the staticfiles finders and storage
# See https://code.djangoproject.com/ticket/24197
storage.staticfiles_storage._wrapped = empty
finders.get_finder.cache_clear()
def test_qunit(self):
"""Load the QUnit tests and check for failures."""
self.browser.get(self.live_server_url + settings.STATIC_URL + 'index.html')
results = WebDriverWait(self.browser, 5).until(
expected_conditions.visibility_of_element_located((By.ID, 'qunit-testresult')))
total = int(results.find_element_by_class_name('total').text)
failed = int(results.find_element_by_class_name('failed').text)
self.assertTrue(total and not failed, results.text)
|
<commit_before>import os
from django.conf import settings
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from django.test.utils import override_settings
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.ui import WebDriverWait
@override_settings(STATICFILES_DIRS=(os.path.join(os.path.dirname(__file__), 'static'), ))
class QunitTests(StaticLiveServerTestCase):
"""Iteractive tests with selenium."""
@classmethod
def setUpClass(cls):
cls.browser = webdriver.PhantomJS()
super().setUpClass()
@classmethod
def tearDownClass(cls):
cls.browser.quit()
super().tearDownClass()
def test_qunit(self):
"""Load the QUnit tests and check for failures."""
self.browser.get(self.live_server_url + settings.STATIC_URL + 'index.html')
results = WebDriverWait(self.browser, 5).until(
expected_conditions.visibility_of_element_located((By.ID, 'qunit-testresult')))
total = int(results.find_element_by_class_name('total').text)
failed = int(results.find_element_by_class_name('failed').text)
self.assertTrue(total and not failed, results.text)
<commit_msg>Clear global state/caching handled by Django so the test passes when run in the full suite.<commit_after>import os
from django.conf import settings
from django.contrib.staticfiles import finders, storage
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from django.test.utils import override_settings
from django.utils.functional import empty
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.ui import WebDriverWait
@override_settings(STATICFILES_DIRS=(os.path.join(os.path.dirname(__file__), 'static'), ))
class QunitTests(StaticLiveServerTestCase):
"""Iteractive tests with selenium."""
@classmethod
def setUpClass(cls):
cls.browser = webdriver.PhantomJS()
super().setUpClass()
@classmethod
def tearDownClass(cls):
cls.browser.quit()
super().tearDownClass()
def setUp(self):
# Clear the cache versions of the staticfiles finders and storage
# See https://code.djangoproject.com/ticket/24197
storage.staticfiles_storage._wrapped = empty
finders.get_finder.cache_clear()
def test_qunit(self):
"""Load the QUnit tests and check for failures."""
self.browser.get(self.live_server_url + settings.STATIC_URL + 'index.html')
results = WebDriverWait(self.browser, 5).until(
expected_conditions.visibility_of_element_located((By.ID, 'qunit-testresult')))
total = int(results.find_element_by_class_name('total').text)
failed = int(results.find_element_by_class_name('failed').text)
self.assertTrue(total and not failed, results.text)
|
4bf0c58f8c8349239352d6153899fd7858df3436
|
faker/compat.py
|
faker/compat.py
|
# Python 2/3 compat
import sys
if sys.version < '3':
import codecs
def u(x):
return codecs.unicode_escape_decode(x)[0]
else:
def u(x):
return x
|
# Python 2/3 compat
import sys
if sys.version_info < (3, 0):
import codecs
def u(x):
return codecs.unicode_escape_decode(x)[0]
else:
def u(x):
return x
|
Change Python version check to use sys.version_info
|
Change Python version check to use sys.version_info
|
Python
|
mit
|
deepthawtz/faker
|
# Python 2/3 compat
import sys
if sys.version < '3':
import codecs
def u(x):
return codecs.unicode_escape_decode(x)[0]
else:
def u(x):
return x
Change Python version check to use sys.version_info
|
# Python 2/3 compat
import sys
if sys.version_info < (3, 0):
import codecs
def u(x):
return codecs.unicode_escape_decode(x)[0]
else:
def u(x):
return x
|
<commit_before># Python 2/3 compat
import sys
if sys.version < '3':
import codecs
def u(x):
return codecs.unicode_escape_decode(x)[0]
else:
def u(x):
return x
<commit_msg>Change Python version check to use sys.version_info<commit_after>
|
# Python 2/3 compat
import sys
if sys.version_info < (3, 0):
import codecs
def u(x):
return codecs.unicode_escape_decode(x)[0]
else:
def u(x):
return x
|
# Python 2/3 compat
import sys
if sys.version < '3':
import codecs
def u(x):
return codecs.unicode_escape_decode(x)[0]
else:
def u(x):
return x
Change Python version check to use sys.version_info# Python 2/3 compat
import sys
if sys.version_info < (3, 0):
import codecs
def u(x):
return codecs.unicode_escape_decode(x)[0]
else:
def u(x):
return x
|
<commit_before># Python 2/3 compat
import sys
if sys.version < '3':
import codecs
def u(x):
return codecs.unicode_escape_decode(x)[0]
else:
def u(x):
return x
<commit_msg>Change Python version check to use sys.version_info<commit_after># Python 2/3 compat
import sys
if sys.version_info < (3, 0):
import codecs
def u(x):
return codecs.unicode_escape_decode(x)[0]
else:
def u(x):
return x
|
26b978206d37c69b053fd2f66fdfd1a3face2c31
|
indico/modules/networks/views.py
|
indico/modules/networks/views.py
|
# This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from MaKaC.webinterface.pages.admins import WPAdminsBase
from MaKaC.webinterface.pages.base import WPJinjaMixin
class WPNetworksAdmin(WPJinjaMixin, WPAdminsBase):
template_prefix = 'networks/'
sidemenu_option = 'networks'
|
# This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from MaKaC.webinterface.pages.admins import WPAdminsBase
from MaKaC.webinterface.pages.base import WPJinjaMixin
class WPNetworksAdmin(WPJinjaMixin, WPAdminsBase):
template_prefix = 'networks/'
sidemenu_option = 'ip_networks'
|
Fix highlighting of "IP Networks" menu entry
|
Fix highlighting of "IP Networks" menu entry
|
Python
|
mit
|
OmeGak/indico,mic4ael/indico,pferreir/indico,pferreir/indico,mic4ael/indico,DirkHoffmann/indico,mvidalgarcia/indico,indico/indico,mvidalgarcia/indico,ThiefMaster/indico,mic4ael/indico,mvidalgarcia/indico,indico/indico,mic4ael/indico,indico/indico,ThiefMaster/indico,DirkHoffmann/indico,OmeGak/indico,OmeGak/indico,DirkHoffmann/indico,mvidalgarcia/indico,pferreir/indico,ThiefMaster/indico,ThiefMaster/indico,pferreir/indico,OmeGak/indico,DirkHoffmann/indico,indico/indico
|
# This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from MaKaC.webinterface.pages.admins import WPAdminsBase
from MaKaC.webinterface.pages.base import WPJinjaMixin
class WPNetworksAdmin(WPJinjaMixin, WPAdminsBase):
template_prefix = 'networks/'
sidemenu_option = 'networks'
Fix highlighting of "IP Networks" menu entry
|
# This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from MaKaC.webinterface.pages.admins import WPAdminsBase
from MaKaC.webinterface.pages.base import WPJinjaMixin
class WPNetworksAdmin(WPJinjaMixin, WPAdminsBase):
template_prefix = 'networks/'
sidemenu_option = 'ip_networks'
|
<commit_before># This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from MaKaC.webinterface.pages.admins import WPAdminsBase
from MaKaC.webinterface.pages.base import WPJinjaMixin
class WPNetworksAdmin(WPJinjaMixin, WPAdminsBase):
template_prefix = 'networks/'
sidemenu_option = 'networks'
<commit_msg>Fix highlighting of "IP Networks" menu entry<commit_after>
|
# This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from MaKaC.webinterface.pages.admins import WPAdminsBase
from MaKaC.webinterface.pages.base import WPJinjaMixin
class WPNetworksAdmin(WPJinjaMixin, WPAdminsBase):
template_prefix = 'networks/'
sidemenu_option = 'ip_networks'
|
# This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from MaKaC.webinterface.pages.admins import WPAdminsBase
from MaKaC.webinterface.pages.base import WPJinjaMixin
class WPNetworksAdmin(WPJinjaMixin, WPAdminsBase):
template_prefix = 'networks/'
sidemenu_option = 'networks'
Fix highlighting of "IP Networks" menu entry# This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from MaKaC.webinterface.pages.admins import WPAdminsBase
from MaKaC.webinterface.pages.base import WPJinjaMixin
class WPNetworksAdmin(WPJinjaMixin, WPAdminsBase):
template_prefix = 'networks/'
sidemenu_option = 'ip_networks'
|
<commit_before># This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from MaKaC.webinterface.pages.admins import WPAdminsBase
from MaKaC.webinterface.pages.base import WPJinjaMixin
class WPNetworksAdmin(WPJinjaMixin, WPAdminsBase):
template_prefix = 'networks/'
sidemenu_option = 'networks'
<commit_msg>Fix highlighting of "IP Networks" menu entry<commit_after># This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from MaKaC.webinterface.pages.admins import WPAdminsBase
from MaKaC.webinterface.pages.base import WPJinjaMixin
class WPNetworksAdmin(WPJinjaMixin, WPAdminsBase):
template_prefix = 'networks/'
sidemenu_option = 'ip_networks'
|
4720809f31c559e14ca69f6205766265d1095f44
|
vk/__init__.py
|
vk/__init__.py
|
# coding=utf-8
from .auth import *
from .groups import *
from .messages import *
from .users import *
|
# coding=utf-8
from .auth import *
from .error import VKError
from .groups import *
from .messages import *
from .users import *
|
Use `vk.VKError` instead of `vk.error.VKError`
|
Use `vk.VKError` instead of `vk.error.VKError`
|
Python
|
mit
|
sgaynetdinov/py-vkontakte
|
# coding=utf-8
from .auth import *
from .groups import *
from .messages import *
from .users import *
Use `vk.VKError` instead of `vk.error.VKError`
|
# coding=utf-8
from .auth import *
from .error import VKError
from .groups import *
from .messages import *
from .users import *
|
<commit_before># coding=utf-8
from .auth import *
from .groups import *
from .messages import *
from .users import *
<commit_msg>Use `vk.VKError` instead of `vk.error.VKError`<commit_after>
|
# coding=utf-8
from .auth import *
from .error import VKError
from .groups import *
from .messages import *
from .users import *
|
# coding=utf-8
from .auth import *
from .groups import *
from .messages import *
from .users import *
Use `vk.VKError` instead of `vk.error.VKError`# coding=utf-8
from .auth import *
from .error import VKError
from .groups import *
from .messages import *
from .users import *
|
<commit_before># coding=utf-8
from .auth import *
from .groups import *
from .messages import *
from .users import *
<commit_msg>Use `vk.VKError` instead of `vk.error.VKError`<commit_after># coding=utf-8
from .auth import *
from .error import VKError
from .groups import *
from .messages import *
from .users import *
|
e57bd1eeb551cf05a220b18ec1e3fafa311d9d78
|
MetaTools/buildChangeLog.py
|
MetaTools/buildChangeLog.py
|
#! /usr/bin/env python
import os, sys
fontToolsDir = os.path.dirname(os.path.dirname(os.path.normpath(
os.path.join(os.getcwd(), sys.argv[0]))))
os.chdir(fontToolsDir)
os.system("svn2cl -o Doc/ChangeLog https://fonttools.svn.sourceforge.net/svnroot/fonttools/trunk")
print "done."
|
#! /usr/bin/env python
import os, sys
fontToolsDir = os.path.dirname(os.path.dirname(os.path.normpath(
os.path.join(os.getcwd(), sys.argv[0]))))
os.chdir(fontToolsDir)
os.system("svn2cl -o Doc/ChangeLog https://svn.code.sf.net/p/fonttools/code/trunk")
print "done."
|
Fix the location of the SVN repository
|
Fix the location of the SVN repository
git-svn-id: 05b73559aeb8bace4cf49b5ea964569f1305eff8@618 4cde692c-a291-49d1-8350-778aa11640f8
|
Python
|
mit
|
fonttools/fonttools,googlefonts/fonttools
|
#! /usr/bin/env python
import os, sys
fontToolsDir = os.path.dirname(os.path.dirname(os.path.normpath(
os.path.join(os.getcwd(), sys.argv[0]))))
os.chdir(fontToolsDir)
os.system("svn2cl -o Doc/ChangeLog https://fonttools.svn.sourceforge.net/svnroot/fonttools/trunk")
print "done."
Fix the location of the SVN repository
git-svn-id: 05b73559aeb8bace4cf49b5ea964569f1305eff8@618 4cde692c-a291-49d1-8350-778aa11640f8
|
#! /usr/bin/env python
import os, sys
fontToolsDir = os.path.dirname(os.path.dirname(os.path.normpath(
os.path.join(os.getcwd(), sys.argv[0]))))
os.chdir(fontToolsDir)
os.system("svn2cl -o Doc/ChangeLog https://svn.code.sf.net/p/fonttools/code/trunk")
print "done."
|
<commit_before>#! /usr/bin/env python
import os, sys
fontToolsDir = os.path.dirname(os.path.dirname(os.path.normpath(
os.path.join(os.getcwd(), sys.argv[0]))))
os.chdir(fontToolsDir)
os.system("svn2cl -o Doc/ChangeLog https://fonttools.svn.sourceforge.net/svnroot/fonttools/trunk")
print "done."
<commit_msg>Fix the location of the SVN repository
git-svn-id: 05b73559aeb8bace4cf49b5ea964569f1305eff8@618 4cde692c-a291-49d1-8350-778aa11640f8<commit_after>
|
#! /usr/bin/env python
import os, sys
fontToolsDir = os.path.dirname(os.path.dirname(os.path.normpath(
os.path.join(os.getcwd(), sys.argv[0]))))
os.chdir(fontToolsDir)
os.system("svn2cl -o Doc/ChangeLog https://svn.code.sf.net/p/fonttools/code/trunk")
print "done."
|
#! /usr/bin/env python
import os, sys
fontToolsDir = os.path.dirname(os.path.dirname(os.path.normpath(
os.path.join(os.getcwd(), sys.argv[0]))))
os.chdir(fontToolsDir)
os.system("svn2cl -o Doc/ChangeLog https://fonttools.svn.sourceforge.net/svnroot/fonttools/trunk")
print "done."
Fix the location of the SVN repository
git-svn-id: 05b73559aeb8bace4cf49b5ea964569f1305eff8@618 4cde692c-a291-49d1-8350-778aa11640f8#! /usr/bin/env python
import os, sys
fontToolsDir = os.path.dirname(os.path.dirname(os.path.normpath(
os.path.join(os.getcwd(), sys.argv[0]))))
os.chdir(fontToolsDir)
os.system("svn2cl -o Doc/ChangeLog https://svn.code.sf.net/p/fonttools/code/trunk")
print "done."
|
<commit_before>#! /usr/bin/env python
import os, sys
fontToolsDir = os.path.dirname(os.path.dirname(os.path.normpath(
os.path.join(os.getcwd(), sys.argv[0]))))
os.chdir(fontToolsDir)
os.system("svn2cl -o Doc/ChangeLog https://fonttools.svn.sourceforge.net/svnroot/fonttools/trunk")
print "done."
<commit_msg>Fix the location of the SVN repository
git-svn-id: 05b73559aeb8bace4cf49b5ea964569f1305eff8@618 4cde692c-a291-49d1-8350-778aa11640f8<commit_after>#! /usr/bin/env python
import os, sys
fontToolsDir = os.path.dirname(os.path.dirname(os.path.normpath(
os.path.join(os.getcwd(), sys.argv[0]))))
os.chdir(fontToolsDir)
os.system("svn2cl -o Doc/ChangeLog https://svn.code.sf.net/p/fonttools/code/trunk")
print "done."
|
1e8b0c6d2255c38497db94a8f6e11ee7cfd2a3ec
|
tests/speed.py
|
tests/speed.py
|
#!/usr/bin/env python
from fastcomp import compare
import random
import string
def randomstr(minlen=5, maxlen=7):
charset = '01'
length = random.randint(minlen, maxlen)
return ''.join(random.choice(charset) for i in range(length))
if __name__ == "__main__":
import timeit
# Set up conditions
setup = """
from __main__ import compare, randomstr
cases = [(randomstr(), randomstr()) for x in range(1000)]"""
main = "for case in cases: compare(*case)"
loops = 100
# Run timeit
timer = timeit.Timer(main, setup=setup)
result = timer.repeat(number=loops)
# Result
best = round(min(result)*1000/loops, 2)
print('{} loops, best of 3: {} msec per loop'.format(loops, best))
|
#!/usr/bin/env python
from fastcomp import compare
import random
import string
def randomstr(minlen=5, maxlen=8):
charset = '01'
length = random.randint(minlen, maxlen)
return ''.join(random.choice(charset) for i in range(length))
if __name__ == "__main__":
import timeit
# Set up conditions
setup = """
from __main__ import compare, randomstr
cases = [(randomstr(), randomstr()) for x in range(1000)]"""
main = "for case in cases: compare(*case)"
loops = 100
# Run timeit
timer = timeit.Timer(main, setup=setup)
result = timer.repeat(number=loops)
# Result
best = round(min(result)*1000/loops, 2)
print('{} loops, best of 3: {} msec per loop'.format(loops, best))
|
Increment the max length of random strings.
|
Increment the max length of random strings.
|
Python
|
mit
|
fujimotos/fastcomp
|
#!/usr/bin/env python
from fastcomp import compare
import random
import string
def randomstr(minlen=5, maxlen=7):
charset = '01'
length = random.randint(minlen, maxlen)
return ''.join(random.choice(charset) for i in range(length))
if __name__ == "__main__":
import timeit
# Set up conditions
setup = """
from __main__ import compare, randomstr
cases = [(randomstr(), randomstr()) for x in range(1000)]"""
main = "for case in cases: compare(*case)"
loops = 100
# Run timeit
timer = timeit.Timer(main, setup=setup)
result = timer.repeat(number=loops)
# Result
best = round(min(result)*1000/loops, 2)
print('{} loops, best of 3: {} msec per loop'.format(loops, best))
Increment the max length of random strings.
|
#!/usr/bin/env python
from fastcomp import compare
import random
import string
def randomstr(minlen=5, maxlen=8):
charset = '01'
length = random.randint(minlen, maxlen)
return ''.join(random.choice(charset) for i in range(length))
if __name__ == "__main__":
import timeit
# Set up conditions
setup = """
from __main__ import compare, randomstr
cases = [(randomstr(), randomstr()) for x in range(1000)]"""
main = "for case in cases: compare(*case)"
loops = 100
# Run timeit
timer = timeit.Timer(main, setup=setup)
result = timer.repeat(number=loops)
# Result
best = round(min(result)*1000/loops, 2)
print('{} loops, best of 3: {} msec per loop'.format(loops, best))
|
<commit_before>#!/usr/bin/env python
from fastcomp import compare
import random
import string
def randomstr(minlen=5, maxlen=7):
charset = '01'
length = random.randint(minlen, maxlen)
return ''.join(random.choice(charset) for i in range(length))
if __name__ == "__main__":
import timeit
# Set up conditions
setup = """
from __main__ import compare, randomstr
cases = [(randomstr(), randomstr()) for x in range(1000)]"""
main = "for case in cases: compare(*case)"
loops = 100
# Run timeit
timer = timeit.Timer(main, setup=setup)
result = timer.repeat(number=loops)
# Result
best = round(min(result)*1000/loops, 2)
print('{} loops, best of 3: {} msec per loop'.format(loops, best))
<commit_msg>Increment the max length of random strings.<commit_after>
|
#!/usr/bin/env python
from fastcomp import compare
import random
import string
def randomstr(minlen=5, maxlen=8):
charset = '01'
length = random.randint(minlen, maxlen)
return ''.join(random.choice(charset) for i in range(length))
if __name__ == "__main__":
import timeit
# Set up conditions
setup = """
from __main__ import compare, randomstr
cases = [(randomstr(), randomstr()) for x in range(1000)]"""
main = "for case in cases: compare(*case)"
loops = 100
# Run timeit
timer = timeit.Timer(main, setup=setup)
result = timer.repeat(number=loops)
# Result
best = round(min(result)*1000/loops, 2)
print('{} loops, best of 3: {} msec per loop'.format(loops, best))
|
#!/usr/bin/env python
from fastcomp import compare
import random
import string
def randomstr(minlen=5, maxlen=7):
charset = '01'
length = random.randint(minlen, maxlen)
return ''.join(random.choice(charset) for i in range(length))
if __name__ == "__main__":
import timeit
# Set up conditions
setup = """
from __main__ import compare, randomstr
cases = [(randomstr(), randomstr()) for x in range(1000)]"""
main = "for case in cases: compare(*case)"
loops = 100
# Run timeit
timer = timeit.Timer(main, setup=setup)
result = timer.repeat(number=loops)
# Result
best = round(min(result)*1000/loops, 2)
print('{} loops, best of 3: {} msec per loop'.format(loops, best))
Increment the max length of random strings.#!/usr/bin/env python
from fastcomp import compare
import random
import string
def randomstr(minlen=5, maxlen=8):
charset = '01'
length = random.randint(minlen, maxlen)
return ''.join(random.choice(charset) for i in range(length))
if __name__ == "__main__":
import timeit
# Set up conditions
setup = """
from __main__ import compare, randomstr
cases = [(randomstr(), randomstr()) for x in range(1000)]"""
main = "for case in cases: compare(*case)"
loops = 100
# Run timeit
timer = timeit.Timer(main, setup=setup)
result = timer.repeat(number=loops)
# Result
best = round(min(result)*1000/loops, 2)
print('{} loops, best of 3: {} msec per loop'.format(loops, best))
|
<commit_before>#!/usr/bin/env python
from fastcomp import compare
import random
import string
def randomstr(minlen=5, maxlen=7):
charset = '01'
length = random.randint(minlen, maxlen)
return ''.join(random.choice(charset) for i in range(length))
if __name__ == "__main__":
import timeit
# Set up conditions
setup = """
from __main__ import compare, randomstr
cases = [(randomstr(), randomstr()) for x in range(1000)]"""
main = "for case in cases: compare(*case)"
loops = 100
# Run timeit
timer = timeit.Timer(main, setup=setup)
result = timer.repeat(number=loops)
# Result
best = round(min(result)*1000/loops, 2)
print('{} loops, best of 3: {} msec per loop'.format(loops, best))
<commit_msg>Increment the max length of random strings.<commit_after>#!/usr/bin/env python
from fastcomp import compare
import random
import string
def randomstr(minlen=5, maxlen=8):
charset = '01'
length = random.randint(minlen, maxlen)
return ''.join(random.choice(charset) for i in range(length))
if __name__ == "__main__":
import timeit
# Set up conditions
setup = """
from __main__ import compare, randomstr
cases = [(randomstr(), randomstr()) for x in range(1000)]"""
main = "for case in cases: compare(*case)"
loops = 100
# Run timeit
timer = timeit.Timer(main, setup=setup)
result = timer.repeat(number=loops)
# Result
best = round(min(result)*1000/loops, 2)
print('{} loops, best of 3: {} msec per loop'.format(loops, best))
|
4c52e331052a2b5f11ce56b0a6c1b6e3d2f18cdf
|
partner_communication_switzerland/controllers/b2s_image.py
|
partner_communication_switzerland/controllers/b2s_image.py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2018 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
import logging
from odoo import http
from odoo.http import request
from odoo.addons.sbc_compassion.controllers.b2s_image import RestController
_logger = logging.getLogger(__name__)
class B2sControllerSwitzerland(RestController):
@http.route('/b2s_image', type='http', auth='public', methods=['GET'])
def handler_b2s_image(self, image_id=None):
"""
URL for downloading a correspondence PDF
(or ZIP when multiple letters are attached).
Find the associated communication and mark all related letters
as opened and read.
:param image_id: uuid of the correspondence holding the data.
:return: file data for user
"""
res = super(B2sControllerSwitzerland, self).handler_b2s_image(image_id)
correspondence_obj = request.env['correspondence'].sudo()
correspondence = correspondence_obj.search([('uuid', '=', image_id)])
if correspondence.communication_id:
all_letters = correspondence.communication_id.get_objects()
all_letters.write({
'letter_delivered': True,
'email_read': True
})
return res
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2018 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
import logging
from odoo import http
from odoo.http import request
from odoo.addons.sbc_compassion.controllers.b2s_image import RestController
_logger = logging.getLogger(__name__)
class B2sControllerSwitzerland(RestController):
@http.route('/b2s_image', type='http', auth='public', methods=['GET'])
def handler_b2s_image(self, id=None):
"""
URL for downloading a correspondence PDF
(or ZIP when multiple letters are attached).
Find the associated communication and mark all related letters
as opened and read.
:param image_id: uuid of the correspondence holding the data.
:return: file data for user
"""
res = super(B2sControllerSwitzerland, self).handler_b2s_image(id)
correspondence_obj = request.env['correspondence'].sudo()
correspondence = correspondence_obj.search([('uuid', '=', id)])
if correspondence.communication_id:
all_letters = correspondence.communication_id.get_objects()
all_letters.write({
'letter_delivered': True,
'email_read': True
})
return res
|
Revert bad renaming of route url parameter
|
Revert bad renaming of route url parameter
|
Python
|
agpl-3.0
|
CompassionCH/compassion-switzerland,eicher31/compassion-switzerland,ecino/compassion-switzerland,eicher31/compassion-switzerland,ecino/compassion-switzerland,CompassionCH/compassion-switzerland,CompassionCH/compassion-switzerland,ecino/compassion-switzerland,eicher31/compassion-switzerland
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2018 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
import logging
from odoo import http
from odoo.http import request
from odoo.addons.sbc_compassion.controllers.b2s_image import RestController
_logger = logging.getLogger(__name__)
class B2sControllerSwitzerland(RestController):
@http.route('/b2s_image', type='http', auth='public', methods=['GET'])
def handler_b2s_image(self, image_id=None):
"""
URL for downloading a correspondence PDF
(or ZIP when multiple letters are attached).
Find the associated communication and mark all related letters
as opened and read.
:param image_id: uuid of the correspondence holding the data.
:return: file data for user
"""
res = super(B2sControllerSwitzerland, self).handler_b2s_image(image_id)
correspondence_obj = request.env['correspondence'].sudo()
correspondence = correspondence_obj.search([('uuid', '=', image_id)])
if correspondence.communication_id:
all_letters = correspondence.communication_id.get_objects()
all_letters.write({
'letter_delivered': True,
'email_read': True
})
return res
Revert bad renaming of route url parameter
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2018 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
import logging
from odoo import http
from odoo.http import request
from odoo.addons.sbc_compassion.controllers.b2s_image import RestController
_logger = logging.getLogger(__name__)
class B2sControllerSwitzerland(RestController):
@http.route('/b2s_image', type='http', auth='public', methods=['GET'])
def handler_b2s_image(self, id=None):
"""
URL for downloading a correspondence PDF
(or ZIP when multiple letters are attached).
Find the associated communication and mark all related letters
as opened and read.
:param image_id: uuid of the correspondence holding the data.
:return: file data for user
"""
res = super(B2sControllerSwitzerland, self).handler_b2s_image(id)
correspondence_obj = request.env['correspondence'].sudo()
correspondence = correspondence_obj.search([('uuid', '=', id)])
if correspondence.communication_id:
all_letters = correspondence.communication_id.get_objects()
all_letters.write({
'letter_delivered': True,
'email_read': True
})
return res
|
<commit_before># -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2018 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
import logging
from odoo import http
from odoo.http import request
from odoo.addons.sbc_compassion.controllers.b2s_image import RestController
_logger = logging.getLogger(__name__)
class B2sControllerSwitzerland(RestController):
@http.route('/b2s_image', type='http', auth='public', methods=['GET'])
def handler_b2s_image(self, image_id=None):
"""
URL for downloading a correspondence PDF
(or ZIP when multiple letters are attached).
Find the associated communication and mark all related letters
as opened and read.
:param image_id: uuid of the correspondence holding the data.
:return: file data for user
"""
res = super(B2sControllerSwitzerland, self).handler_b2s_image(image_id)
correspondence_obj = request.env['correspondence'].sudo()
correspondence = correspondence_obj.search([('uuid', '=', image_id)])
if correspondence.communication_id:
all_letters = correspondence.communication_id.get_objects()
all_letters.write({
'letter_delivered': True,
'email_read': True
})
return res
<commit_msg>Revert bad renaming of route url parameter<commit_after>
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2018 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
import logging
from odoo import http
from odoo.http import request
from odoo.addons.sbc_compassion.controllers.b2s_image import RestController
_logger = logging.getLogger(__name__)
class B2sControllerSwitzerland(RestController):
@http.route('/b2s_image', type='http', auth='public', methods=['GET'])
def handler_b2s_image(self, id=None):
"""
URL for downloading a correspondence PDF
(or ZIP when multiple letters are attached).
Find the associated communication and mark all related letters
as opened and read.
:param image_id: uuid of the correspondence holding the data.
:return: file data for user
"""
res = super(B2sControllerSwitzerland, self).handler_b2s_image(id)
correspondence_obj = request.env['correspondence'].sudo()
correspondence = correspondence_obj.search([('uuid', '=', id)])
if correspondence.communication_id:
all_letters = correspondence.communication_id.get_objects()
all_letters.write({
'letter_delivered': True,
'email_read': True
})
return res
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2018 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
import logging
from odoo import http
from odoo.http import request
from odoo.addons.sbc_compassion.controllers.b2s_image import RestController
_logger = logging.getLogger(__name__)
class B2sControllerSwitzerland(RestController):
@http.route('/b2s_image', type='http', auth='public', methods=['GET'])
def handler_b2s_image(self, image_id=None):
"""
URL for downloading a correspondence PDF
(or ZIP when multiple letters are attached).
Find the associated communication and mark all related letters
as opened and read.
:param image_id: uuid of the correspondence holding the data.
:return: file data for user
"""
res = super(B2sControllerSwitzerland, self).handler_b2s_image(image_id)
correspondence_obj = request.env['correspondence'].sudo()
correspondence = correspondence_obj.search([('uuid', '=', image_id)])
if correspondence.communication_id:
all_letters = correspondence.communication_id.get_objects()
all_letters.write({
'letter_delivered': True,
'email_read': True
})
return res
Revert bad renaming of route url parameter# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2018 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
import logging
from odoo import http
from odoo.http import request
from odoo.addons.sbc_compassion.controllers.b2s_image import RestController
_logger = logging.getLogger(__name__)
class B2sControllerSwitzerland(RestController):
@http.route('/b2s_image', type='http', auth='public', methods=['GET'])
def handler_b2s_image(self, id=None):
"""
URL for downloading a correspondence PDF
(or ZIP when multiple letters are attached).
Find the associated communication and mark all related letters
as opened and read.
:param image_id: uuid of the correspondence holding the data.
:return: file data for user
"""
res = super(B2sControllerSwitzerland, self).handler_b2s_image(id)
correspondence_obj = request.env['correspondence'].sudo()
correspondence = correspondence_obj.search([('uuid', '=', id)])
if correspondence.communication_id:
all_letters = correspondence.communication_id.get_objects()
all_letters.write({
'letter_delivered': True,
'email_read': True
})
return res
|
<commit_before># -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2018 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
import logging
from odoo import http
from odoo.http import request
from odoo.addons.sbc_compassion.controllers.b2s_image import RestController
_logger = logging.getLogger(__name__)
class B2sControllerSwitzerland(RestController):
@http.route('/b2s_image', type='http', auth='public', methods=['GET'])
def handler_b2s_image(self, image_id=None):
"""
URL for downloading a correspondence PDF
(or ZIP when multiple letters are attached).
Find the associated communication and mark all related letters
as opened and read.
:param image_id: uuid of the correspondence holding the data.
:return: file data for user
"""
res = super(B2sControllerSwitzerland, self).handler_b2s_image(image_id)
correspondence_obj = request.env['correspondence'].sudo()
correspondence = correspondence_obj.search([('uuid', '=', image_id)])
if correspondence.communication_id:
all_letters = correspondence.communication_id.get_objects()
all_letters.write({
'letter_delivered': True,
'email_read': True
})
return res
<commit_msg>Revert bad renaming of route url parameter<commit_after># -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2018 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
import logging
from odoo import http
from odoo.http import request
from odoo.addons.sbc_compassion.controllers.b2s_image import RestController
_logger = logging.getLogger(__name__)
class B2sControllerSwitzerland(RestController):
@http.route('/b2s_image', type='http', auth='public', methods=['GET'])
def handler_b2s_image(self, id=None):
"""
URL for downloading a correspondence PDF
(or ZIP when multiple letters are attached).
Find the associated communication and mark all related letters
as opened and read.
:param image_id: uuid of the correspondence holding the data.
:return: file data for user
"""
res = super(B2sControllerSwitzerland, self).handler_b2s_image(id)
correspondence_obj = request.env['correspondence'].sudo()
correspondence = correspondence_obj.search([('uuid', '=', id)])
if correspondence.communication_id:
all_letters = correspondence.communication_id.get_objects()
all_letters.write({
'letter_delivered': True,
'email_read': True
})
return res
|
445b9637300dc8dc25c2335c6f650de6c07bfb41
|
account_payment_partner/wizard/payment_order_create.py
|
account_payment_partner/wizard/payment_order_create.py
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Account Payment Partner module for OpenERP
# Copyright (C) 2014 Akretion (http://www.akretion.com)
# @author Alexis de Lattre <alexis.delattre@akretion.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, api
class PaymentOrderCreate(models.TransientModel):
_inherit = 'payment.order.create'
@api.model
def extend_payment_order_domain(self, payment_order, domain):
super(PaymentOrderCreate, self).extend_payment_order_domain(
payment_order, domain)
domain += ['|', '|',
('invoice', '=', False),
('invoice.payment_mode_id', '=', False),
('invoice.payment_mode_id', '=', payment_order.mode.id)]
return True
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Account Payment Partner module for OpenERP
# Copyright (C) 2014 Akretion (http://www.akretion.com)
# @author Alexis de Lattre <alexis.delattre@akretion.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, api
class PaymentOrderCreate(models.TransientModel):
_inherit = 'payment.order.create'
@api.model
def extend_payment_order_domain(self, payment_order, domain):
res = super(PaymentOrderCreate, self).extend_payment_order_domain(
payment_order, domain)
domain += ['|', '|',
('invoice', '=', False),
('invoice.payment_mode_id', '=', False),
('invoice.payment_mode_id', '=', payment_order.mode.id)]
return res
|
Return res in inherit (even if res is empty in this case)
|
Return res in inherit (even if res is empty in this case)
|
Python
|
agpl-3.0
|
rlizana/bank-payment,damdam-s/bank-payment,CompassionCH/bank-payment,acsone/bank-payment,sergio-teruel/bank-payment,David-Amaro/bank-payment,sergiocorato/bank-payment,hbrunn/bank-payment,incaser/bank-payment,rlizana/bank-payment,CompassionCH/bank-payment,ndtran/bank-payment,ndtran/bank-payment,damdam-s/bank-payment,sergio-incaser/bank-payment,David-Amaro/bank-payment,syci/bank-payment,open-synergy/bank-payment,sergiocorato/bank-payment,sergio-incaser/bank-payment,sergio-teruel/bank-payment,syci/bank-payment,diagramsoftware/bank-payment,Antiun/bank-payment,Antiun/bank-payment
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Account Payment Partner module for OpenERP
# Copyright (C) 2014 Akretion (http://www.akretion.com)
# @author Alexis de Lattre <alexis.delattre@akretion.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, api
class PaymentOrderCreate(models.TransientModel):
_inherit = 'payment.order.create'
@api.model
def extend_payment_order_domain(self, payment_order, domain):
super(PaymentOrderCreate, self).extend_payment_order_domain(
payment_order, domain)
domain += ['|', '|',
('invoice', '=', False),
('invoice.payment_mode_id', '=', False),
('invoice.payment_mode_id', '=', payment_order.mode.id)]
return True
Return res in inherit (even if res is empty in this case)
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Account Payment Partner module for OpenERP
# Copyright (C) 2014 Akretion (http://www.akretion.com)
# @author Alexis de Lattre <alexis.delattre@akretion.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, api
class PaymentOrderCreate(models.TransientModel):
_inherit = 'payment.order.create'
@api.model
def extend_payment_order_domain(self, payment_order, domain):
res = super(PaymentOrderCreate, self).extend_payment_order_domain(
payment_order, domain)
domain += ['|', '|',
('invoice', '=', False),
('invoice.payment_mode_id', '=', False),
('invoice.payment_mode_id', '=', payment_order.mode.id)]
return res
|
<commit_before># -*- encoding: utf-8 -*-
##############################################################################
#
# Account Payment Partner module for OpenERP
# Copyright (C) 2014 Akretion (http://www.akretion.com)
# @author Alexis de Lattre <alexis.delattre@akretion.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, api
class PaymentOrderCreate(models.TransientModel):
_inherit = 'payment.order.create'
@api.model
def extend_payment_order_domain(self, payment_order, domain):
super(PaymentOrderCreate, self).extend_payment_order_domain(
payment_order, domain)
domain += ['|', '|',
('invoice', '=', False),
('invoice.payment_mode_id', '=', False),
('invoice.payment_mode_id', '=', payment_order.mode.id)]
return True
<commit_msg>Return res in inherit (even if res is empty in this case)<commit_after>
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Account Payment Partner module for OpenERP
# Copyright (C) 2014 Akretion (http://www.akretion.com)
# @author Alexis de Lattre <alexis.delattre@akretion.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, api
class PaymentOrderCreate(models.TransientModel):
_inherit = 'payment.order.create'
@api.model
def extend_payment_order_domain(self, payment_order, domain):
res = super(PaymentOrderCreate, self).extend_payment_order_domain(
payment_order, domain)
domain += ['|', '|',
('invoice', '=', False),
('invoice.payment_mode_id', '=', False),
('invoice.payment_mode_id', '=', payment_order.mode.id)]
return res
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Account Payment Partner module for OpenERP
# Copyright (C) 2014 Akretion (http://www.akretion.com)
# @author Alexis de Lattre <alexis.delattre@akretion.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, api
class PaymentOrderCreate(models.TransientModel):
_inherit = 'payment.order.create'
@api.model
def extend_payment_order_domain(self, payment_order, domain):
super(PaymentOrderCreate, self).extend_payment_order_domain(
payment_order, domain)
domain += ['|', '|',
('invoice', '=', False),
('invoice.payment_mode_id', '=', False),
('invoice.payment_mode_id', '=', payment_order.mode.id)]
return True
Return res in inherit (even if res is empty in this case)# -*- encoding: utf-8 -*-
##############################################################################
#
# Account Payment Partner module for OpenERP
# Copyright (C) 2014 Akretion (http://www.akretion.com)
# @author Alexis de Lattre <alexis.delattre@akretion.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, api
class PaymentOrderCreate(models.TransientModel):
_inherit = 'payment.order.create'
@api.model
def extend_payment_order_domain(self, payment_order, domain):
res = super(PaymentOrderCreate, self).extend_payment_order_domain(
payment_order, domain)
domain += ['|', '|',
('invoice', '=', False),
('invoice.payment_mode_id', '=', False),
('invoice.payment_mode_id', '=', payment_order.mode.id)]
return res
|
<commit_before># -*- encoding: utf-8 -*-
##############################################################################
#
# Account Payment Partner module for OpenERP
# Copyright (C) 2014 Akretion (http://www.akretion.com)
# @author Alexis de Lattre <alexis.delattre@akretion.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, api
class PaymentOrderCreate(models.TransientModel):
_inherit = 'payment.order.create'
@api.model
def extend_payment_order_domain(self, payment_order, domain):
super(PaymentOrderCreate, self).extend_payment_order_domain(
payment_order, domain)
domain += ['|', '|',
('invoice', '=', False),
('invoice.payment_mode_id', '=', False),
('invoice.payment_mode_id', '=', payment_order.mode.id)]
return True
<commit_msg>Return res in inherit (even if res is empty in this case)<commit_after># -*- encoding: utf-8 -*-
##############################################################################
#
# Account Payment Partner module for OpenERP
# Copyright (C) 2014 Akretion (http://www.akretion.com)
# @author Alexis de Lattre <alexis.delattre@akretion.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, api
class PaymentOrderCreate(models.TransientModel):
_inherit = 'payment.order.create'
@api.model
def extend_payment_order_domain(self, payment_order, domain):
res = super(PaymentOrderCreate, self).extend_payment_order_domain(
payment_order, domain)
domain += ['|', '|',
('invoice', '=', False),
('invoice.payment_mode_id', '=', False),
('invoice.payment_mode_id', '=', payment_order.mode.id)]
return res
|
5a680d25a5e5a697440f17639d1a0617b903aa06
|
opps/__init__.py
|
opps/__init__.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
VERSION = (0, 0, 1)
__version__ = ".".join(map(str, VERSION))
__status__ = "Development"
__description__ = u"Opps CMS websites magazines and high-traffic"
__author__ = u"Thiago Avelino"
__credits__ = []
__email__ = u"opps-developers@googlegroups.com"
__license__ = u"BSD"
__copyright__ = u"Copyright 2013, YACOWS"
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.conf import settings
VERSION = (0, 0, 1)
__version__ = ".".join(map(str, VERSION))
__status__ = "Development"
__description__ = u"Opps CMS websites magazines and high-traffic"
__author__ = u"Thiago Avelino"
__credits__ = []
__email__ = u"opps-developers@googlegroups.com"
__license__ = u"BSD"
__copyright__ = u"Copyright 2013, YACOWS"
settings.INSTALLED_APPS += ('opps.article',
'opps.image',
'opps.channel',
'opps.source',
'redactor',
'tagging',)
settings.REDACTOR_OPTIONS = {'lang': 'en'}
settings.REDACTOR_UPLOAD = 'uploads/'
|
Add installed app on opps init
|
Add installed app on opps init
|
Python
|
mit
|
jeanmask/opps,jeanmask/opps,opps/opps,opps/opps,jeanmask/opps,williamroot/opps,YACOWS/opps,opps/opps,YACOWS/opps,williamroot/opps,YACOWS/opps,williamroot/opps,YACOWS/opps,williamroot/opps,opps/opps,jeanmask/opps
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
VERSION = (0, 0, 1)
__version__ = ".".join(map(str, VERSION))
__status__ = "Development"
__description__ = u"Opps CMS websites magazines and high-traffic"
__author__ = u"Thiago Avelino"
__credits__ = []
__email__ = u"opps-developers@googlegroups.com"
__license__ = u"BSD"
__copyright__ = u"Copyright 2013, YACOWS"
Add installed app on opps init
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.conf import settings
VERSION = (0, 0, 1)
__version__ = ".".join(map(str, VERSION))
__status__ = "Development"
__description__ = u"Opps CMS websites magazines and high-traffic"
__author__ = u"Thiago Avelino"
__credits__ = []
__email__ = u"opps-developers@googlegroups.com"
__license__ = u"BSD"
__copyright__ = u"Copyright 2013, YACOWS"
settings.INSTALLED_APPS += ('opps.article',
'opps.image',
'opps.channel',
'opps.source',
'redactor',
'tagging',)
settings.REDACTOR_OPTIONS = {'lang': 'en'}
settings.REDACTOR_UPLOAD = 'uploads/'
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
VERSION = (0, 0, 1)
__version__ = ".".join(map(str, VERSION))
__status__ = "Development"
__description__ = u"Opps CMS websites magazines and high-traffic"
__author__ = u"Thiago Avelino"
__credits__ = []
__email__ = u"opps-developers@googlegroups.com"
__license__ = u"BSD"
__copyright__ = u"Copyright 2013, YACOWS"
<commit_msg>Add installed app on opps init<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.conf import settings
VERSION = (0, 0, 1)
__version__ = ".".join(map(str, VERSION))
__status__ = "Development"
__description__ = u"Opps CMS websites magazines and high-traffic"
__author__ = u"Thiago Avelino"
__credits__ = []
__email__ = u"opps-developers@googlegroups.com"
__license__ = u"BSD"
__copyright__ = u"Copyright 2013, YACOWS"
settings.INSTALLED_APPS += ('opps.article',
'opps.image',
'opps.channel',
'opps.source',
'redactor',
'tagging',)
settings.REDACTOR_OPTIONS = {'lang': 'en'}
settings.REDACTOR_UPLOAD = 'uploads/'
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
VERSION = (0, 0, 1)
__version__ = ".".join(map(str, VERSION))
__status__ = "Development"
__description__ = u"Opps CMS websites magazines and high-traffic"
__author__ = u"Thiago Avelino"
__credits__ = []
__email__ = u"opps-developers@googlegroups.com"
__license__ = u"BSD"
__copyright__ = u"Copyright 2013, YACOWS"
Add installed app on opps init#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.conf import settings
VERSION = (0, 0, 1)
__version__ = ".".join(map(str, VERSION))
__status__ = "Development"
__description__ = u"Opps CMS websites magazines and high-traffic"
__author__ = u"Thiago Avelino"
__credits__ = []
__email__ = u"opps-developers@googlegroups.com"
__license__ = u"BSD"
__copyright__ = u"Copyright 2013, YACOWS"
settings.INSTALLED_APPS += ('opps.article',
'opps.image',
'opps.channel',
'opps.source',
'redactor',
'tagging',)
settings.REDACTOR_OPTIONS = {'lang': 'en'}
settings.REDACTOR_UPLOAD = 'uploads/'
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
VERSION = (0, 0, 1)
__version__ = ".".join(map(str, VERSION))
__status__ = "Development"
__description__ = u"Opps CMS websites magazines and high-traffic"
__author__ = u"Thiago Avelino"
__credits__ = []
__email__ = u"opps-developers@googlegroups.com"
__license__ = u"BSD"
__copyright__ = u"Copyright 2013, YACOWS"
<commit_msg>Add installed app on opps init<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.conf import settings
VERSION = (0, 0, 1)
__version__ = ".".join(map(str, VERSION))
__status__ = "Development"
__description__ = u"Opps CMS websites magazines and high-traffic"
__author__ = u"Thiago Avelino"
__credits__ = []
__email__ = u"opps-developers@googlegroups.com"
__license__ = u"BSD"
__copyright__ = u"Copyright 2013, YACOWS"
settings.INSTALLED_APPS += ('opps.article',
'opps.image',
'opps.channel',
'opps.source',
'redactor',
'tagging',)
settings.REDACTOR_OPTIONS = {'lang': 'en'}
settings.REDACTOR_UPLOAD = 'uploads/'
|
02f35718c6f6c3b18851b94e232031738629684e
|
promgen/sender/__init__.py
|
promgen/sender/__init__.py
|
import logging
from promgen.models import Project, Service
logger = logging.getLogger(__name__)
class SenderBase(object):
def send(self, data):
for alert in data['alerts']:
if 'project' in alert['labels']:
sent = 0
for project in Project.objects.filter(name=alert['labels']['project']):
for sender in project.sender.all():
if self._send(sender.value, alert, data):
sent += 1
if 'service' in alert['labels']:
for service in Service.objects.filter(name=alert['labels']['service']):
for sender in service.sender.all():
if self._send(sender.value, alert, data):
sent += 1
if sent == 0:
logger.debug('No senders configured for project or service %s', alert['labels']['project'])
return sent
def test(self, target, alert):
logger.debug('Sending test message to %s', target)
self._send(target, alert, {'externalURL': ''})
|
import logging
from promgen.models import Project, Service
logger = logging.getLogger(__name__)
class SenderBase(object):
def send(self, data):
sent = 0
for alert in data['alerts']:
if 'project' in alert['labels']:
logger.debug('Checking for projects')
for project in Project.objects.filter(name=alert['labels']['project']):
logger.debug('Checking %s', project)
for sender in project.sender.all():
logger.debug('Sending to %s', sender)
if self._send(sender.value, alert, data):
sent += 1
if 'service' in alert['labels']:
logger.debug('Checking for service')
for service in Service.objects.filter(name=alert['labels']['service']):
logger.debug('Checking %s', service)
for sender in service.sender.all():
logger.debug('Sending to %s', sender)
if self._send(sender.value, alert, data):
sent += 1
if sent == 0:
logger.debug('No senders configured for project or service %s', alert['labels']['project'])
return sent
def test(self, target, alert):
logger.debug('Sending test message to %s', target)
self._send(target, alert, {'externalURL': ''})
|
Fix send count and add debug logging
|
Fix send count and add debug logging
|
Python
|
mit
|
kfdm/promgen,kfdm/promgen,kfdm/promgen,kfdm/promgen
|
import logging
from promgen.models import Project, Service
logger = logging.getLogger(__name__)
class SenderBase(object):
def send(self, data):
for alert in data['alerts']:
if 'project' in alert['labels']:
sent = 0
for project in Project.objects.filter(name=alert['labels']['project']):
for sender in project.sender.all():
if self._send(sender.value, alert, data):
sent += 1
if 'service' in alert['labels']:
for service in Service.objects.filter(name=alert['labels']['service']):
for sender in service.sender.all():
if self._send(sender.value, alert, data):
sent += 1
if sent == 0:
logger.debug('No senders configured for project or service %s', alert['labels']['project'])
return sent
def test(self, target, alert):
logger.debug('Sending test message to %s', target)
self._send(target, alert, {'externalURL': ''})
Fix send count and add debug logging
|
import logging
from promgen.models import Project, Service
logger = logging.getLogger(__name__)
class SenderBase(object):
def send(self, data):
sent = 0
for alert in data['alerts']:
if 'project' in alert['labels']:
logger.debug('Checking for projects')
for project in Project.objects.filter(name=alert['labels']['project']):
logger.debug('Checking %s', project)
for sender in project.sender.all():
logger.debug('Sending to %s', sender)
if self._send(sender.value, alert, data):
sent += 1
if 'service' in alert['labels']:
logger.debug('Checking for service')
for service in Service.objects.filter(name=alert['labels']['service']):
logger.debug('Checking %s', service)
for sender in service.sender.all():
logger.debug('Sending to %s', sender)
if self._send(sender.value, alert, data):
sent += 1
if sent == 0:
logger.debug('No senders configured for project or service %s', alert['labels']['project'])
return sent
def test(self, target, alert):
logger.debug('Sending test message to %s', target)
self._send(target, alert, {'externalURL': ''})
|
<commit_before>import logging
from promgen.models import Project, Service
logger = logging.getLogger(__name__)
class SenderBase(object):
def send(self, data):
for alert in data['alerts']:
if 'project' in alert['labels']:
sent = 0
for project in Project.objects.filter(name=alert['labels']['project']):
for sender in project.sender.all():
if self._send(sender.value, alert, data):
sent += 1
if 'service' in alert['labels']:
for service in Service.objects.filter(name=alert['labels']['service']):
for sender in service.sender.all():
if self._send(sender.value, alert, data):
sent += 1
if sent == 0:
logger.debug('No senders configured for project or service %s', alert['labels']['project'])
return sent
def test(self, target, alert):
logger.debug('Sending test message to %s', target)
self._send(target, alert, {'externalURL': ''})
<commit_msg>Fix send count and add debug logging<commit_after>
|
import logging
from promgen.models import Project, Service
logger = logging.getLogger(__name__)
class SenderBase(object):
def send(self, data):
sent = 0
for alert in data['alerts']:
if 'project' in alert['labels']:
logger.debug('Checking for projects')
for project in Project.objects.filter(name=alert['labels']['project']):
logger.debug('Checking %s', project)
for sender in project.sender.all():
logger.debug('Sending to %s', sender)
if self._send(sender.value, alert, data):
sent += 1
if 'service' in alert['labels']:
logger.debug('Checking for service')
for service in Service.objects.filter(name=alert['labels']['service']):
logger.debug('Checking %s', service)
for sender in service.sender.all():
logger.debug('Sending to %s', sender)
if self._send(sender.value, alert, data):
sent += 1
if sent == 0:
logger.debug('No senders configured for project or service %s', alert['labels']['project'])
return sent
def test(self, target, alert):
logger.debug('Sending test message to %s', target)
self._send(target, alert, {'externalURL': ''})
|
import logging
from promgen.models import Project, Service
logger = logging.getLogger(__name__)
class SenderBase(object):
def send(self, data):
for alert in data['alerts']:
if 'project' in alert['labels']:
sent = 0
for project in Project.objects.filter(name=alert['labels']['project']):
for sender in project.sender.all():
if self._send(sender.value, alert, data):
sent += 1
if 'service' in alert['labels']:
for service in Service.objects.filter(name=alert['labels']['service']):
for sender in service.sender.all():
if self._send(sender.value, alert, data):
sent += 1
if sent == 0:
logger.debug('No senders configured for project or service %s', alert['labels']['project'])
return sent
def test(self, target, alert):
logger.debug('Sending test message to %s', target)
self._send(target, alert, {'externalURL': ''})
Fix send count and add debug loggingimport logging
from promgen.models import Project, Service
logger = logging.getLogger(__name__)
class SenderBase(object):
def send(self, data):
sent = 0
for alert in data['alerts']:
if 'project' in alert['labels']:
logger.debug('Checking for projects')
for project in Project.objects.filter(name=alert['labels']['project']):
logger.debug('Checking %s', project)
for sender in project.sender.all():
logger.debug('Sending to %s', sender)
if self._send(sender.value, alert, data):
sent += 1
if 'service' in alert['labels']:
logger.debug('Checking for service')
for service in Service.objects.filter(name=alert['labels']['service']):
logger.debug('Checking %s', service)
for sender in service.sender.all():
logger.debug('Sending to %s', sender)
if self._send(sender.value, alert, data):
sent += 1
if sent == 0:
logger.debug('No senders configured for project or service %s', alert['labels']['project'])
return sent
def test(self, target, alert):
logger.debug('Sending test message to %s', target)
self._send(target, alert, {'externalURL': ''})
|
<commit_before>import logging
from promgen.models import Project, Service
logger = logging.getLogger(__name__)
class SenderBase(object):
def send(self, data):
for alert in data['alerts']:
if 'project' in alert['labels']:
sent = 0
for project in Project.objects.filter(name=alert['labels']['project']):
for sender in project.sender.all():
if self._send(sender.value, alert, data):
sent += 1
if 'service' in alert['labels']:
for service in Service.objects.filter(name=alert['labels']['service']):
for sender in service.sender.all():
if self._send(sender.value, alert, data):
sent += 1
if sent == 0:
logger.debug('No senders configured for project or service %s', alert['labels']['project'])
return sent
def test(self, target, alert):
logger.debug('Sending test message to %s', target)
self._send(target, alert, {'externalURL': ''})
<commit_msg>Fix send count and add debug logging<commit_after>import logging
from promgen.models import Project, Service
logger = logging.getLogger(__name__)
class SenderBase(object):
def send(self, data):
sent = 0
for alert in data['alerts']:
if 'project' in alert['labels']:
logger.debug('Checking for projects')
for project in Project.objects.filter(name=alert['labels']['project']):
logger.debug('Checking %s', project)
for sender in project.sender.all():
logger.debug('Sending to %s', sender)
if self._send(sender.value, alert, data):
sent += 1
if 'service' in alert['labels']:
logger.debug('Checking for service')
for service in Service.objects.filter(name=alert['labels']['service']):
logger.debug('Checking %s', service)
for sender in service.sender.all():
logger.debug('Sending to %s', sender)
if self._send(sender.value, alert, data):
sent += 1
if sent == 0:
logger.debug('No senders configured for project or service %s', alert['labels']['project'])
return sent
def test(self, target, alert):
logger.debug('Sending test message to %s', target)
self._send(target, alert, {'externalURL': ''})
|
991f37a5946f2fdf821ab7de367f3ced7b68a635
|
segmentation/segment_pool/signals.py
|
segmentation/segment_pool/signals.py
|
# -*- coding: utf-8 -*-
from django.db.models.signals import post_save, pre_delete
from django.dispatch import receiver
from django.core.exceptions import ImproperlyConfigured
from cms.exceptions import PluginAlreadyRegistered, PluginNotRegistered
from .segment_pool import segment_pool
@receiver(post_save)
def register_segment(sender, instance, created, **kwargs):
'''
Ensure that saving changes in the model results in the de-registering (if
necessary) and registering of this segment plugin.
'''
#
# NOTE: Removed the test if instance is the right type from here, as it is
# already the first thing that happens in the (un)register_plugin()
# methods. Its not these signal handlers' job to decide who gets to be
# registered and who doesn't.
#
if not created:
try:
segment_pool.unregister_segment_plugin(instance)
except (PluginAlreadyRegistered, ImproperlyConfigured):
pass
# Either way, we register it.
try:
segment_pool.register_segment_plugin(instance)
except (PluginAlreadyRegistered, ImproperlyConfigured):
pass
@receiver(pre_delete)
def unregister_segment(sender, instance, **kwargs):
'''
Listens for signals that a SegmentPlugin instance is to be deleted, and
un-registers it from the segment_pool.
'''
# NOTE: See note in register_segment()
try:
segment_pool.unregister_segment_plugin(instance)
except (PluginNotRegistered, ImproperlyConfigured):
pass
|
# -*- coding: utf-8 -*-
from django.db.models.signals import post_save, pre_delete
from django.dispatch import receiver
from django.core.exceptions import ImproperlyConfigured
from cms.exceptions import PluginAlreadyRegistered, PluginNotRegistered
from .segment_pool import segment_pool
from ..models import SegmentBasePluginModel
@receiver(post_save)
def register_segment(sender, instance, created, **kwargs):
'''
Ensure that saving changes in the model results in the de-registering (if
necessary) and registering of this segment plugin.
'''
if isinstance(instance, SegmentBasePluginModel):
if not created:
try:
segment_pool.unregister_segment_plugin(instance)
except (PluginAlreadyRegistered, ImproperlyConfigured):
pass
# Either way, we register it.
try:
segment_pool.register_segment_plugin(instance)
except (PluginAlreadyRegistered, ImproperlyConfigured):
pass
@receiver(pre_delete)
def unregister_segment(sender, instance, **kwargs):
'''
Listens for signals that a SegmentPlugin instance is to be deleted, and
un-registers it from the segment_pool.
'''
if isinstance(instance, SegmentBasePluginModel):
try:
segment_pool.unregister_segment_plugin(instance)
except (PluginNotRegistered, ImproperlyConfigured):
pass
|
Use a broad-brush filter for the signal handlers
|
Use a broad-brush filter for the signal handlers
|
Python
|
bsd-3-clause
|
aldryn/aldryn-segmentation,aldryn/aldryn-segmentation
|
# -*- coding: utf-8 -*-
from django.db.models.signals import post_save, pre_delete
from django.dispatch import receiver
from django.core.exceptions import ImproperlyConfigured
from cms.exceptions import PluginAlreadyRegistered, PluginNotRegistered
from .segment_pool import segment_pool
@receiver(post_save)
def register_segment(sender, instance, created, **kwargs):
'''
Ensure that saving changes in the model results in the de-registering (if
necessary) and registering of this segment plugin.
'''
#
# NOTE: Removed the test if instance is the right type from here, as it is
# already the first thing that happens in the (un)register_plugin()
# methods. Its not these signal handlers' job to decide who gets to be
# registered and who doesn't.
#
if not created:
try:
segment_pool.unregister_segment_plugin(instance)
except (PluginAlreadyRegistered, ImproperlyConfigured):
pass
# Either way, we register it.
try:
segment_pool.register_segment_plugin(instance)
except (PluginAlreadyRegistered, ImproperlyConfigured):
pass
@receiver(pre_delete)
def unregister_segment(sender, instance, **kwargs):
'''
Listens for signals that a SegmentPlugin instance is to be deleted, and
un-registers it from the segment_pool.
'''
# NOTE: See note in register_segment()
try:
segment_pool.unregister_segment_plugin(instance)
except (PluginNotRegistered, ImproperlyConfigured):
pass
Use a broad-brush filter for the signal handlers
|
# -*- coding: utf-8 -*-
from django.db.models.signals import post_save, pre_delete
from django.dispatch import receiver
from django.core.exceptions import ImproperlyConfigured
from cms.exceptions import PluginAlreadyRegistered, PluginNotRegistered
from .segment_pool import segment_pool
from ..models import SegmentBasePluginModel
@receiver(post_save)
def register_segment(sender, instance, created, **kwargs):
'''
Ensure that saving changes in the model results in the de-registering (if
necessary) and registering of this segment plugin.
'''
if isinstance(instance, SegmentBasePluginModel):
if not created:
try:
segment_pool.unregister_segment_plugin(instance)
except (PluginAlreadyRegistered, ImproperlyConfigured):
pass
# Either way, we register it.
try:
segment_pool.register_segment_plugin(instance)
except (PluginAlreadyRegistered, ImproperlyConfigured):
pass
@receiver(pre_delete)
def unregister_segment(sender, instance, **kwargs):
'''
Listens for signals that a SegmentPlugin instance is to be deleted, and
un-registers it from the segment_pool.
'''
if isinstance(instance, SegmentBasePluginModel):
try:
segment_pool.unregister_segment_plugin(instance)
except (PluginNotRegistered, ImproperlyConfigured):
pass
|
<commit_before># -*- coding: utf-8 -*-
from django.db.models.signals import post_save, pre_delete
from django.dispatch import receiver
from django.core.exceptions import ImproperlyConfigured
from cms.exceptions import PluginAlreadyRegistered, PluginNotRegistered
from .segment_pool import segment_pool
@receiver(post_save)
def register_segment(sender, instance, created, **kwargs):
'''
Ensure that saving changes in the model results in the de-registering (if
necessary) and registering of this segment plugin.
'''
#
# NOTE: Removed the test if instance is the right type from here, as it is
# already the first thing that happens in the (un)register_plugin()
# methods. Its not these signal handlers' job to decide who gets to be
# registered and who doesn't.
#
if not created:
try:
segment_pool.unregister_segment_plugin(instance)
except (PluginAlreadyRegistered, ImproperlyConfigured):
pass
# Either way, we register it.
try:
segment_pool.register_segment_plugin(instance)
except (PluginAlreadyRegistered, ImproperlyConfigured):
pass
@receiver(pre_delete)
def unregister_segment(sender, instance, **kwargs):
'''
Listens for signals that a SegmentPlugin instance is to be deleted, and
un-registers it from the segment_pool.
'''
# NOTE: See note in register_segment()
try:
segment_pool.unregister_segment_plugin(instance)
except (PluginNotRegistered, ImproperlyConfigured):
pass
<commit_msg>Use a broad-brush filter for the signal handlers<commit_after>
|
# -*- coding: utf-8 -*-
from django.db.models.signals import post_save, pre_delete
from django.dispatch import receiver
from django.core.exceptions import ImproperlyConfigured
from cms.exceptions import PluginAlreadyRegistered, PluginNotRegistered
from .segment_pool import segment_pool
from ..models import SegmentBasePluginModel
@receiver(post_save)
def register_segment(sender, instance, created, **kwargs):
'''
Ensure that saving changes in the model results in the de-registering (if
necessary) and registering of this segment plugin.
'''
if isinstance(instance, SegmentBasePluginModel):
if not created:
try:
segment_pool.unregister_segment_plugin(instance)
except (PluginAlreadyRegistered, ImproperlyConfigured):
pass
# Either way, we register it.
try:
segment_pool.register_segment_plugin(instance)
except (PluginAlreadyRegistered, ImproperlyConfigured):
pass
@receiver(pre_delete)
def unregister_segment(sender, instance, **kwargs):
'''
Listens for signals that a SegmentPlugin instance is to be deleted, and
un-registers it from the segment_pool.
'''
if isinstance(instance, SegmentBasePluginModel):
try:
segment_pool.unregister_segment_plugin(instance)
except (PluginNotRegistered, ImproperlyConfigured):
pass
|
# -*- coding: utf-8 -*-
from django.db.models.signals import post_save, pre_delete
from django.dispatch import receiver
from django.core.exceptions import ImproperlyConfigured
from cms.exceptions import PluginAlreadyRegistered, PluginNotRegistered
from .segment_pool import segment_pool
@receiver(post_save)
def register_segment(sender, instance, created, **kwargs):
'''
Ensure that saving changes in the model results in the de-registering (if
necessary) and registering of this segment plugin.
'''
#
# NOTE: Removed the test if instance is the right type from here, as it is
# already the first thing that happens in the (un)register_plugin()
# methods. Its not these signal handlers' job to decide who gets to be
# registered and who doesn't.
#
if not created:
try:
segment_pool.unregister_segment_plugin(instance)
except (PluginAlreadyRegistered, ImproperlyConfigured):
pass
# Either way, we register it.
try:
segment_pool.register_segment_plugin(instance)
except (PluginAlreadyRegistered, ImproperlyConfigured):
pass
@receiver(pre_delete)
def unregister_segment(sender, instance, **kwargs):
'''
Listens for signals that a SegmentPlugin instance is to be deleted, and
un-registers it from the segment_pool.
'''
# NOTE: See note in register_segment()
try:
segment_pool.unregister_segment_plugin(instance)
except (PluginNotRegistered, ImproperlyConfigured):
pass
Use a broad-brush filter for the signal handlers# -*- coding: utf-8 -*-
from django.db.models.signals import post_save, pre_delete
from django.dispatch import receiver
from django.core.exceptions import ImproperlyConfigured
from cms.exceptions import PluginAlreadyRegistered, PluginNotRegistered
from .segment_pool import segment_pool
from ..models import SegmentBasePluginModel
@receiver(post_save)
def register_segment(sender, instance, created, **kwargs):
'''
Ensure that saving changes in the model results in the de-registering (if
necessary) and registering of this segment plugin.
'''
if isinstance(instance, SegmentBasePluginModel):
if not created:
try:
segment_pool.unregister_segment_plugin(instance)
except (PluginAlreadyRegistered, ImproperlyConfigured):
pass
# Either way, we register it.
try:
segment_pool.register_segment_plugin(instance)
except (PluginAlreadyRegistered, ImproperlyConfigured):
pass
@receiver(pre_delete)
def unregister_segment(sender, instance, **kwargs):
'''
Listens for signals that a SegmentPlugin instance is to be deleted, and
un-registers it from the segment_pool.
'''
if isinstance(instance, SegmentBasePluginModel):
try:
segment_pool.unregister_segment_plugin(instance)
except (PluginNotRegistered, ImproperlyConfigured):
pass
|
<commit_before># -*- coding: utf-8 -*-
from django.db.models.signals import post_save, pre_delete
from django.dispatch import receiver
from django.core.exceptions import ImproperlyConfigured
from cms.exceptions import PluginAlreadyRegistered, PluginNotRegistered
from .segment_pool import segment_pool
@receiver(post_save)
def register_segment(sender, instance, created, **kwargs):
'''
Ensure that saving changes in the model results in the de-registering (if
necessary) and registering of this segment plugin.
'''
#
# NOTE: Removed the test if instance is the right type from here, as it is
# already the first thing that happens in the (un)register_plugin()
# methods. Its not these signal handlers' job to decide who gets to be
# registered and who doesn't.
#
if not created:
try:
segment_pool.unregister_segment_plugin(instance)
except (PluginAlreadyRegistered, ImproperlyConfigured):
pass
# Either way, we register it.
try:
segment_pool.register_segment_plugin(instance)
except (PluginAlreadyRegistered, ImproperlyConfigured):
pass
@receiver(pre_delete)
def unregister_segment(sender, instance, **kwargs):
'''
Listens for signals that a SegmentPlugin instance is to be deleted, and
un-registers it from the segment_pool.
'''
# NOTE: See note in register_segment()
try:
segment_pool.unregister_segment_plugin(instance)
except (PluginNotRegistered, ImproperlyConfigured):
pass
<commit_msg>Use a broad-brush filter for the signal handlers<commit_after># -*- coding: utf-8 -*-
from django.db.models.signals import post_save, pre_delete
from django.dispatch import receiver
from django.core.exceptions import ImproperlyConfigured
from cms.exceptions import PluginAlreadyRegistered, PluginNotRegistered
from .segment_pool import segment_pool
from ..models import SegmentBasePluginModel
@receiver(post_save)
def register_segment(sender, instance, created, **kwargs):
'''
Ensure that saving changes in the model results in the de-registering (if
necessary) and registering of this segment plugin.
'''
if isinstance(instance, SegmentBasePluginModel):
if not created:
try:
segment_pool.unregister_segment_plugin(instance)
except (PluginAlreadyRegistered, ImproperlyConfigured):
pass
# Either way, we register it.
try:
segment_pool.register_segment_plugin(instance)
except (PluginAlreadyRegistered, ImproperlyConfigured):
pass
@receiver(pre_delete)
def unregister_segment(sender, instance, **kwargs):
'''
Listens for signals that a SegmentPlugin instance is to be deleted, and
un-registers it from the segment_pool.
'''
if isinstance(instance, SegmentBasePluginModel):
try:
segment_pool.unregister_segment_plugin(instance)
except (PluginNotRegistered, ImproperlyConfigured):
pass
|
5ad09d329b331c4c50a192a76b1c450e6340f508
|
distarray/core/tests/test_distributed_array_protocol.py
|
distarray/core/tests/test_distributed_array_protocol.py
|
import unittest
import distarray as da
from distarray.mpi.mpibase import create_comm_of_size, InvalidCommSizeError
class TestDistributedArrayProtocol(unittest.TestCase):
def setUp(self):
try:
comm = create_comm_of_size(4)
except InvalidCommSizeError:
raise unittest.SkipTest('Must run with comm size > 4.')
else:
self.larr = da.LocalArray((16,16),
grid_shape=(4,),
comm=comm, buf=None, offset=0)
def test_has_export(self):
self.assertTrue(hasattr(self.larr, '__distarray__'))
def test_export_well_formedness(self):
required_keys = set(("buffer", "dimdata"))
export = self.larr.__distarray__()
exported_keys = set(export.keys())
self.assertEqual(required_keys, exported_keys)
def test_round_trip(self):
new_larr = da.localarray(self.larr)
self.assertEqual(new_larr.local_array, self.larr.local_array)
if __name__ == '__main__':
try:
unittest.main()
except SystemExit:
pass
|
import unittest
import distarray as da
from distarray.mpi.mpibase import create_comm_of_size, InvalidCommSizeError
class TestDistributedArrayProtocol(unittest.TestCase):
def setUp(self):
try:
comm = create_comm_of_size(4)
except InvalidCommSizeError:
raise unittest.SkipTest('Must run with comm size > 4.')
else:
self.larr = da.LocalArray((16,16),
grid_shape=(4,),
comm=comm, buf=None, offset=0)
def test_has_export(self):
self.assertTrue(hasattr(self.larr, '__distarray__'))
def test_export_keys(self):
required_keys = set(("buffer", "dimdata"))
export_data = self.larr.__distarray__()
exported_keys = set(export_data.keys())
self.assertEqual(required_keys, exported_keys)
def test_round_trip(self):
new_larr = da.localarray(self.larr)
self.assertEqual(new_larr.local_array, self.larr.local_array)
if __name__ == '__main__':
try:
unittest.main()
except SystemExit:
pass
|
Test keys and values separately.
|
Test keys and values separately.
|
Python
|
bsd-3-clause
|
RaoUmer/distarray,enthought/distarray,RaoUmer/distarray,enthought/distarray
|
import unittest
import distarray as da
from distarray.mpi.mpibase import create_comm_of_size, InvalidCommSizeError
class TestDistributedArrayProtocol(unittest.TestCase):
def setUp(self):
try:
comm = create_comm_of_size(4)
except InvalidCommSizeError:
raise unittest.SkipTest('Must run with comm size > 4.')
else:
self.larr = da.LocalArray((16,16),
grid_shape=(4,),
comm=comm, buf=None, offset=0)
def test_has_export(self):
self.assertTrue(hasattr(self.larr, '__distarray__'))
def test_export_well_formedness(self):
required_keys = set(("buffer", "dimdata"))
export = self.larr.__distarray__()
exported_keys = set(export.keys())
self.assertEqual(required_keys, exported_keys)
def test_round_trip(self):
new_larr = da.localarray(self.larr)
self.assertEqual(new_larr.local_array, self.larr.local_array)
if __name__ == '__main__':
try:
unittest.main()
except SystemExit:
pass
Test keys and values separately.
|
import unittest
import distarray as da
from distarray.mpi.mpibase import create_comm_of_size, InvalidCommSizeError
class TestDistributedArrayProtocol(unittest.TestCase):
def setUp(self):
try:
comm = create_comm_of_size(4)
except InvalidCommSizeError:
raise unittest.SkipTest('Must run with comm size > 4.')
else:
self.larr = da.LocalArray((16,16),
grid_shape=(4,),
comm=comm, buf=None, offset=0)
def test_has_export(self):
self.assertTrue(hasattr(self.larr, '__distarray__'))
def test_export_keys(self):
required_keys = set(("buffer", "dimdata"))
export_data = self.larr.__distarray__()
exported_keys = set(export_data.keys())
self.assertEqual(required_keys, exported_keys)
def test_round_trip(self):
new_larr = da.localarray(self.larr)
self.assertEqual(new_larr.local_array, self.larr.local_array)
if __name__ == '__main__':
try:
unittest.main()
except SystemExit:
pass
|
<commit_before>import unittest
import distarray as da
from distarray.mpi.mpibase import create_comm_of_size, InvalidCommSizeError
class TestDistributedArrayProtocol(unittest.TestCase):
def setUp(self):
try:
comm = create_comm_of_size(4)
except InvalidCommSizeError:
raise unittest.SkipTest('Must run with comm size > 4.')
else:
self.larr = da.LocalArray((16,16),
grid_shape=(4,),
comm=comm, buf=None, offset=0)
def test_has_export(self):
self.assertTrue(hasattr(self.larr, '__distarray__'))
def test_export_well_formedness(self):
required_keys = set(("buffer", "dimdata"))
export = self.larr.__distarray__()
exported_keys = set(export.keys())
self.assertEqual(required_keys, exported_keys)
def test_round_trip(self):
new_larr = da.localarray(self.larr)
self.assertEqual(new_larr.local_array, self.larr.local_array)
if __name__ == '__main__':
try:
unittest.main()
except SystemExit:
pass
<commit_msg>Test keys and values separately.<commit_after>
|
import unittest
import distarray as da
from distarray.mpi.mpibase import create_comm_of_size, InvalidCommSizeError
class TestDistributedArrayProtocol(unittest.TestCase):
def setUp(self):
try:
comm = create_comm_of_size(4)
except InvalidCommSizeError:
raise unittest.SkipTest('Must run with comm size > 4.')
else:
self.larr = da.LocalArray((16,16),
grid_shape=(4,),
comm=comm, buf=None, offset=0)
def test_has_export(self):
self.assertTrue(hasattr(self.larr, '__distarray__'))
def test_export_keys(self):
required_keys = set(("buffer", "dimdata"))
export_data = self.larr.__distarray__()
exported_keys = set(export_data.keys())
self.assertEqual(required_keys, exported_keys)
def test_round_trip(self):
new_larr = da.localarray(self.larr)
self.assertEqual(new_larr.local_array, self.larr.local_array)
if __name__ == '__main__':
try:
unittest.main()
except SystemExit:
pass
|
import unittest
import distarray as da
from distarray.mpi.mpibase import create_comm_of_size, InvalidCommSizeError
class TestDistributedArrayProtocol(unittest.TestCase):
def setUp(self):
try:
comm = create_comm_of_size(4)
except InvalidCommSizeError:
raise unittest.SkipTest('Must run with comm size > 4.')
else:
self.larr = da.LocalArray((16,16),
grid_shape=(4,),
comm=comm, buf=None, offset=0)
def test_has_export(self):
self.assertTrue(hasattr(self.larr, '__distarray__'))
def test_export_well_formedness(self):
required_keys = set(("buffer", "dimdata"))
export = self.larr.__distarray__()
exported_keys = set(export.keys())
self.assertEqual(required_keys, exported_keys)
def test_round_trip(self):
new_larr = da.localarray(self.larr)
self.assertEqual(new_larr.local_array, self.larr.local_array)
if __name__ == '__main__':
try:
unittest.main()
except SystemExit:
pass
Test keys and values separately.import unittest
import distarray as da
from distarray.mpi.mpibase import create_comm_of_size, InvalidCommSizeError
class TestDistributedArrayProtocol(unittest.TestCase):
def setUp(self):
try:
comm = create_comm_of_size(4)
except InvalidCommSizeError:
raise unittest.SkipTest('Must run with comm size > 4.')
else:
self.larr = da.LocalArray((16,16),
grid_shape=(4,),
comm=comm, buf=None, offset=0)
def test_has_export(self):
self.assertTrue(hasattr(self.larr, '__distarray__'))
def test_export_keys(self):
required_keys = set(("buffer", "dimdata"))
export_data = self.larr.__distarray__()
exported_keys = set(export_data.keys())
self.assertEqual(required_keys, exported_keys)
def test_round_trip(self):
new_larr = da.localarray(self.larr)
self.assertEqual(new_larr.local_array, self.larr.local_array)
if __name__ == '__main__':
try:
unittest.main()
except SystemExit:
pass
|
<commit_before>import unittest
import distarray as da
from distarray.mpi.mpibase import create_comm_of_size, InvalidCommSizeError
class TestDistributedArrayProtocol(unittest.TestCase):
def setUp(self):
try:
comm = create_comm_of_size(4)
except InvalidCommSizeError:
raise unittest.SkipTest('Must run with comm size > 4.')
else:
self.larr = da.LocalArray((16,16),
grid_shape=(4,),
comm=comm, buf=None, offset=0)
def test_has_export(self):
self.assertTrue(hasattr(self.larr, '__distarray__'))
def test_export_well_formedness(self):
required_keys = set(("buffer", "dimdata"))
export = self.larr.__distarray__()
exported_keys = set(export.keys())
self.assertEqual(required_keys, exported_keys)
def test_round_trip(self):
new_larr = da.localarray(self.larr)
self.assertEqual(new_larr.local_array, self.larr.local_array)
if __name__ == '__main__':
try:
unittest.main()
except SystemExit:
pass
<commit_msg>Test keys and values separately.<commit_after>import unittest
import distarray as da
from distarray.mpi.mpibase import create_comm_of_size, InvalidCommSizeError
class TestDistributedArrayProtocol(unittest.TestCase):
def setUp(self):
try:
comm = create_comm_of_size(4)
except InvalidCommSizeError:
raise unittest.SkipTest('Must run with comm size > 4.')
else:
self.larr = da.LocalArray((16,16),
grid_shape=(4,),
comm=comm, buf=None, offset=0)
def test_has_export(self):
self.assertTrue(hasattr(self.larr, '__distarray__'))
def test_export_keys(self):
required_keys = set(("buffer", "dimdata"))
export_data = self.larr.__distarray__()
exported_keys = set(export_data.keys())
self.assertEqual(required_keys, exported_keys)
def test_round_trip(self):
new_larr = da.localarray(self.larr)
self.assertEqual(new_larr.local_array, self.larr.local_array)
if __name__ == '__main__':
try:
unittest.main()
except SystemExit:
pass
|
44ef488bbe25576ba25ca5855b726fa16fffa8bc
|
fireplace/cards/blackrock/collectible.py
|
fireplace/cards/blackrock/collectible.py
|
from ..utils import *
##
# Spells
# Dragon's Breath
class DragonsBreath:
action = [Hit(TARGET, 4)]
def cost(self, value):
return value - self.game.minionsKilledThisTurn
|
from ..utils import *
##
# Minions
# Flamewaker
class BRM_002:
events = [
OWN_SPELL_PLAY.after(Hit(RANDOM_ENEMY_MINION, 1) * 2)
]
##
# Spells
# Dragon's Breath
class BRM_003:
action = [Hit(TARGET, 4)]
def cost(self, value):
return value - self.game.minionsKilledThisTurn
|
Implement Flamewaker and correct Dragon's Breath id
|
Implement Flamewaker and correct Dragon's Breath id
|
Python
|
agpl-3.0
|
jleclanche/fireplace,Ragowit/fireplace,butozerca/fireplace,amw2104/fireplace,Ragowit/fireplace,oftc-ftw/fireplace,Meerkov/fireplace,smallnamespace/fireplace,amw2104/fireplace,liujimj/fireplace,oftc-ftw/fireplace,beheh/fireplace,smallnamespace/fireplace,liujimj/fireplace,butozerca/fireplace,NightKev/fireplace,Meerkov/fireplace
|
from ..utils import *
##
# Spells
# Dragon's Breath
class DragonsBreath:
action = [Hit(TARGET, 4)]
def cost(self, value):
return value - self.game.minionsKilledThisTurn
Implement Flamewaker and correct Dragon's Breath id
|
from ..utils import *
##
# Minions
# Flamewaker
class BRM_002:
events = [
OWN_SPELL_PLAY.after(Hit(RANDOM_ENEMY_MINION, 1) * 2)
]
##
# Spells
# Dragon's Breath
class BRM_003:
action = [Hit(TARGET, 4)]
def cost(self, value):
return value - self.game.minionsKilledThisTurn
|
<commit_before>from ..utils import *
##
# Spells
# Dragon's Breath
class DragonsBreath:
action = [Hit(TARGET, 4)]
def cost(self, value):
return value - self.game.minionsKilledThisTurn
<commit_msg>Implement Flamewaker and correct Dragon's Breath id<commit_after>
|
from ..utils import *
##
# Minions
# Flamewaker
class BRM_002:
events = [
OWN_SPELL_PLAY.after(Hit(RANDOM_ENEMY_MINION, 1) * 2)
]
##
# Spells
# Dragon's Breath
class BRM_003:
action = [Hit(TARGET, 4)]
def cost(self, value):
return value - self.game.minionsKilledThisTurn
|
from ..utils import *
##
# Spells
# Dragon's Breath
class DragonsBreath:
action = [Hit(TARGET, 4)]
def cost(self, value):
return value - self.game.minionsKilledThisTurn
Implement Flamewaker and correct Dragon's Breath idfrom ..utils import *
##
# Minions
# Flamewaker
class BRM_002:
events = [
OWN_SPELL_PLAY.after(Hit(RANDOM_ENEMY_MINION, 1) * 2)
]
##
# Spells
# Dragon's Breath
class BRM_003:
action = [Hit(TARGET, 4)]
def cost(self, value):
return value - self.game.minionsKilledThisTurn
|
<commit_before>from ..utils import *
##
# Spells
# Dragon's Breath
class DragonsBreath:
action = [Hit(TARGET, 4)]
def cost(self, value):
return value - self.game.minionsKilledThisTurn
<commit_msg>Implement Flamewaker and correct Dragon's Breath id<commit_after>from ..utils import *
##
# Minions
# Flamewaker
class BRM_002:
events = [
OWN_SPELL_PLAY.after(Hit(RANDOM_ENEMY_MINION, 1) * 2)
]
##
# Spells
# Dragon's Breath
class BRM_003:
action = [Hit(TARGET, 4)]
def cost(self, value):
return value - self.game.minionsKilledThisTurn
|
f9ffd5021f8af96df503c8a2743e97c8f1a17be0
|
infupy/backends/common.py
|
infupy/backends/common.py
|
def printerr(msg, e=''):
print(msg.format(e), file=sys.stderr)
class CommunicationError(Exception):
def __str__(self):
return "Communication error: {}".format(self.args)
class CommandError(Exception):
def __str__(self):
return "Command error: {}".format(self.args)
class Syringe():
_events = set()
def __init__(self):
pass
def execRawCommand(self, msg):
"""
Send command and read reply.
"""
pass
# Read Perfusion related values
def readRate(self):
return 0
def readVolume(self):
return 0
# Infusion control
def setRate(self, rate):
pass
def bolus(self, volume, rate):
pass
# Events
def registerEvent(self, event):
self._events |= set([event])
def unregisterEvent(self, event):
self._events -= set([event])
def clearEvents(self):
self._events = set()
|
def printerr(msg, e=''):
msg = "Backend: " + str(msg)
print(msg.format(e), file=sys.stderr)
class CommunicationError(Exception):
def __str__(self):
return "Communication error: {}".format(self.args)
class CommandError(Exception):
def __str__(self):
return "Command error: {}".format(self.args)
class Syringe():
_events = set()
def __init__(self):
pass
def execRawCommand(self, msg):
"""
Send command and read reply.
"""
pass
# Read Perfusion related values
def readRate(self):
return 0
def readVolume(self):
return 0
# Infusion control
def setRate(self, rate):
pass
def bolus(self, volume, rate):
pass
# Events
def registerEvent(self, event):
self._events |= set([event])
def unregisterEvent(self, event):
self._events -= set([event])
def clearEvents(self):
self._events = set()
|
Add marker to indicate backend error
|
Add marker to indicate backend error
|
Python
|
isc
|
jaj42/infupy
|
def printerr(msg, e=''):
print(msg.format(e), file=sys.stderr)
class CommunicationError(Exception):
def __str__(self):
return "Communication error: {}".format(self.args)
class CommandError(Exception):
def __str__(self):
return "Command error: {}".format(self.args)
class Syringe():
_events = set()
def __init__(self):
pass
def execRawCommand(self, msg):
"""
Send command and read reply.
"""
pass
# Read Perfusion related values
def readRate(self):
return 0
def readVolume(self):
return 0
# Infusion control
def setRate(self, rate):
pass
def bolus(self, volume, rate):
pass
# Events
def registerEvent(self, event):
self._events |= set([event])
def unregisterEvent(self, event):
self._events -= set([event])
def clearEvents(self):
self._events = set()
Add marker to indicate backend error
|
def printerr(msg, e=''):
msg = "Backend: " + str(msg)
print(msg.format(e), file=sys.stderr)
class CommunicationError(Exception):
def __str__(self):
return "Communication error: {}".format(self.args)
class CommandError(Exception):
def __str__(self):
return "Command error: {}".format(self.args)
class Syringe():
_events = set()
def __init__(self):
pass
def execRawCommand(self, msg):
"""
Send command and read reply.
"""
pass
# Read Perfusion related values
def readRate(self):
return 0
def readVolume(self):
return 0
# Infusion control
def setRate(self, rate):
pass
def bolus(self, volume, rate):
pass
# Events
def registerEvent(self, event):
self._events |= set([event])
def unregisterEvent(self, event):
self._events -= set([event])
def clearEvents(self):
self._events = set()
|
<commit_before>def printerr(msg, e=''):
print(msg.format(e), file=sys.stderr)
class CommunicationError(Exception):
def __str__(self):
return "Communication error: {}".format(self.args)
class CommandError(Exception):
def __str__(self):
return "Command error: {}".format(self.args)
class Syringe():
_events = set()
def __init__(self):
pass
def execRawCommand(self, msg):
"""
Send command and read reply.
"""
pass
# Read Perfusion related values
def readRate(self):
return 0
def readVolume(self):
return 0
# Infusion control
def setRate(self, rate):
pass
def bolus(self, volume, rate):
pass
# Events
def registerEvent(self, event):
self._events |= set([event])
def unregisterEvent(self, event):
self._events -= set([event])
def clearEvents(self):
self._events = set()
<commit_msg>Add marker to indicate backend error<commit_after>
|
def printerr(msg, e=''):
msg = "Backend: " + str(msg)
print(msg.format(e), file=sys.stderr)
class CommunicationError(Exception):
def __str__(self):
return "Communication error: {}".format(self.args)
class CommandError(Exception):
def __str__(self):
return "Command error: {}".format(self.args)
class Syringe():
_events = set()
def __init__(self):
pass
def execRawCommand(self, msg):
"""
Send command and read reply.
"""
pass
# Read Perfusion related values
def readRate(self):
return 0
def readVolume(self):
return 0
# Infusion control
def setRate(self, rate):
pass
def bolus(self, volume, rate):
pass
# Events
def registerEvent(self, event):
self._events |= set([event])
def unregisterEvent(self, event):
self._events -= set([event])
def clearEvents(self):
self._events = set()
|
def printerr(msg, e=''):
print(msg.format(e), file=sys.stderr)
class CommunicationError(Exception):
def __str__(self):
return "Communication error: {}".format(self.args)
class CommandError(Exception):
def __str__(self):
return "Command error: {}".format(self.args)
class Syringe():
_events = set()
def __init__(self):
pass
def execRawCommand(self, msg):
"""
Send command and read reply.
"""
pass
# Read Perfusion related values
def readRate(self):
return 0
def readVolume(self):
return 0
# Infusion control
def setRate(self, rate):
pass
def bolus(self, volume, rate):
pass
# Events
def registerEvent(self, event):
self._events |= set([event])
def unregisterEvent(self, event):
self._events -= set([event])
def clearEvents(self):
self._events = set()
Add marker to indicate backend errordef printerr(msg, e=''):
msg = "Backend: " + str(msg)
print(msg.format(e), file=sys.stderr)
class CommunicationError(Exception):
def __str__(self):
return "Communication error: {}".format(self.args)
class CommandError(Exception):
def __str__(self):
return "Command error: {}".format(self.args)
class Syringe():
_events = set()
def __init__(self):
pass
def execRawCommand(self, msg):
"""
Send command and read reply.
"""
pass
# Read Perfusion related values
def readRate(self):
return 0
def readVolume(self):
return 0
# Infusion control
def setRate(self, rate):
pass
def bolus(self, volume, rate):
pass
# Events
def registerEvent(self, event):
self._events |= set([event])
def unregisterEvent(self, event):
self._events -= set([event])
def clearEvents(self):
self._events = set()
|
<commit_before>def printerr(msg, e=''):
print(msg.format(e), file=sys.stderr)
class CommunicationError(Exception):
def __str__(self):
return "Communication error: {}".format(self.args)
class CommandError(Exception):
def __str__(self):
return "Command error: {}".format(self.args)
class Syringe():
_events = set()
def __init__(self):
pass
def execRawCommand(self, msg):
"""
Send command and read reply.
"""
pass
# Read Perfusion related values
def readRate(self):
return 0
def readVolume(self):
return 0
# Infusion control
def setRate(self, rate):
pass
def bolus(self, volume, rate):
pass
# Events
def registerEvent(self, event):
self._events |= set([event])
def unregisterEvent(self, event):
self._events -= set([event])
def clearEvents(self):
self._events = set()
<commit_msg>Add marker to indicate backend error<commit_after>def printerr(msg, e=''):
msg = "Backend: " + str(msg)
print(msg.format(e), file=sys.stderr)
class CommunicationError(Exception):
def __str__(self):
return "Communication error: {}".format(self.args)
class CommandError(Exception):
def __str__(self):
return "Command error: {}".format(self.args)
class Syringe():
_events = set()
def __init__(self):
pass
def execRawCommand(self, msg):
"""
Send command and read reply.
"""
pass
# Read Perfusion related values
def readRate(self):
return 0
def readVolume(self):
return 0
# Infusion control
def setRate(self, rate):
pass
def bolus(self, volume, rate):
pass
# Events
def registerEvent(self, event):
self._events |= set([event])
def unregisterEvent(self, event):
self._events -= set([event])
def clearEvents(self):
self._events = set()
|
55c24a4e47dfd6eab1dcceef8989a2a326322a14
|
osmABTS/trips.py
|
osmABTS/trips.py
|
"""
Trip generation
===============
"""
|
"""
Trip generation
===============
This module can be roughtly devided into two parts, the trip description and
trip generation. The trip description part contains mostly class definitions
that can be used to describe kinds of trips, while the trip generation contains
the main driver function to generate a large list of trips based on the
travellers and places. This module is kind of at the centre of the simulation.
"""
import random
import collections
#
# Trip description
# ----------------
#
# The trips can be roughtly described by two data structures, Location and
# Trip. A location is a location in the ways of a trip, and a trip is a series
# of locations with a mean frequency and variation.
#
# The location can be an attribute of the traveller or a random selection in a
# category of places. It is stored in the ``source`` attribute as one of the
# two constant symbols in this module. And a trip has a frequency stored in the
# ``freq`` attribute in unit of times per week, and ``var`` stores the
# corresponding variation. The list of locations are given in the ``locations``
# attribute, while the actual route is given in the route attribute as a list
# of zero-based indices in the locations list.
#
# constants for the two kinds of locations
TRAVELLER_ATTR = 1
RANDOM_FROM_CAT = 2
# Location class definition
Location = collections.namedtuple(
'Location',
['source', 'value']
)
Trip = collections.namedtuple(
'Trip',
['freq', 'var', 'locations', 'route']
)
# The default trip
DEFAULT_TRIP = [
# Commuting to work
Trip(
freq=5.0, var=1.0,
locations=[
Location(source=TRAVELLER_ATTR, value='home'),
Location(source=TRAVELLER_ATTR, value='work'),
],
route=[0, 1, 0]
),
# Go to a leisure place
Trip(
freq=2.0, var=0.5,
locations=[
Location(source=TRAVELLER_ATTR, value='home'),
Location(source=RANDOM_FROM_CAT, value='leisure'),
],
route=[0, 1, 0]
),
# Go to a restaurant
Trip(
freq=4.0, var=1.0,
locations=[
Location(source=TRAVELLER_ATTR, value='home'),
Location(source=RANDOM_FROM_CAT, value='restaurant'),
],
route=[0, 1, 0]
),
# Go to a church
Trip(
freq=1.0, var=0.5,
locations=[
Location(source=TRAVELLER_ATTR, value='home'),
Location(source=TRAVELLER_ATTR, value='church'),
],
route=[0, 1, 0]
),
]
|
Implement the trip description classes
|
Implement the trip description classes
And a default list for trips has also been written.
|
Python
|
mit
|
tschijnmo/osmABTS
|
"""
Trip generation
===============
"""
Implement the trip description classes
And a default list for trips has also been written.
|
"""
Trip generation
===============
This module can be roughtly devided into two parts, the trip description and
trip generation. The trip description part contains mostly class definitions
that can be used to describe kinds of trips, while the trip generation contains
the main driver function to generate a large list of trips based on the
travellers and places. This module is kind of at the centre of the simulation.
"""
import random
import collections
#
# Trip description
# ----------------
#
# The trips can be roughtly described by two data structures, Location and
# Trip. A location is a location in the ways of a trip, and a trip is a series
# of locations with a mean frequency and variation.
#
# The location can be an attribute of the traveller or a random selection in a
# category of places. It is stored in the ``source`` attribute as one of the
# two constant symbols in this module. And a trip has a frequency stored in the
# ``freq`` attribute in unit of times per week, and ``var`` stores the
# corresponding variation. The list of locations are given in the ``locations``
# attribute, while the actual route is given in the route attribute as a list
# of zero-based indices in the locations list.
#
# constants for the two kinds of locations
TRAVELLER_ATTR = 1
RANDOM_FROM_CAT = 2
# Location class definition
Location = collections.namedtuple(
'Location',
['source', 'value']
)
Trip = collections.namedtuple(
'Trip',
['freq', 'var', 'locations', 'route']
)
# The default trip
DEFAULT_TRIP = [
# Commuting to work
Trip(
freq=5.0, var=1.0,
locations=[
Location(source=TRAVELLER_ATTR, value='home'),
Location(source=TRAVELLER_ATTR, value='work'),
],
route=[0, 1, 0]
),
# Go to a leisure place
Trip(
freq=2.0, var=0.5,
locations=[
Location(source=TRAVELLER_ATTR, value='home'),
Location(source=RANDOM_FROM_CAT, value='leisure'),
],
route=[0, 1, 0]
),
# Go to a restaurant
Trip(
freq=4.0, var=1.0,
locations=[
Location(source=TRAVELLER_ATTR, value='home'),
Location(source=RANDOM_FROM_CAT, value='restaurant'),
],
route=[0, 1, 0]
),
# Go to a church
Trip(
freq=1.0, var=0.5,
locations=[
Location(source=TRAVELLER_ATTR, value='home'),
Location(source=TRAVELLER_ATTR, value='church'),
],
route=[0, 1, 0]
),
]
|
<commit_before>"""
Trip generation
===============
"""
<commit_msg>Implement the trip description classes
And a default list for trips has also been written.<commit_after>
|
"""
Trip generation
===============
This module can be roughtly devided into two parts, the trip description and
trip generation. The trip description part contains mostly class definitions
that can be used to describe kinds of trips, while the trip generation contains
the main driver function to generate a large list of trips based on the
travellers and places. This module is kind of at the centre of the simulation.
"""
import random
import collections
#
# Trip description
# ----------------
#
# The trips can be roughtly described by two data structures, Location and
# Trip. A location is a location in the ways of a trip, and a trip is a series
# of locations with a mean frequency and variation.
#
# The location can be an attribute of the traveller or a random selection in a
# category of places. It is stored in the ``source`` attribute as one of the
# two constant symbols in this module. And a trip has a frequency stored in the
# ``freq`` attribute in unit of times per week, and ``var`` stores the
# corresponding variation. The list of locations are given in the ``locations``
# attribute, while the actual route is given in the route attribute as a list
# of zero-based indices in the locations list.
#
# constants for the two kinds of locations
TRAVELLER_ATTR = 1
RANDOM_FROM_CAT = 2
# Location class definition
Location = collections.namedtuple(
'Location',
['source', 'value']
)
Trip = collections.namedtuple(
'Trip',
['freq', 'var', 'locations', 'route']
)
# The default trip
DEFAULT_TRIP = [
# Commuting to work
Trip(
freq=5.0, var=1.0,
locations=[
Location(source=TRAVELLER_ATTR, value='home'),
Location(source=TRAVELLER_ATTR, value='work'),
],
route=[0, 1, 0]
),
# Go to a leisure place
Trip(
freq=2.0, var=0.5,
locations=[
Location(source=TRAVELLER_ATTR, value='home'),
Location(source=RANDOM_FROM_CAT, value='leisure'),
],
route=[0, 1, 0]
),
# Go to a restaurant
Trip(
freq=4.0, var=1.0,
locations=[
Location(source=TRAVELLER_ATTR, value='home'),
Location(source=RANDOM_FROM_CAT, value='restaurant'),
],
route=[0, 1, 0]
),
# Go to a church
Trip(
freq=1.0, var=0.5,
locations=[
Location(source=TRAVELLER_ATTR, value='home'),
Location(source=TRAVELLER_ATTR, value='church'),
],
route=[0, 1, 0]
),
]
|
"""
Trip generation
===============
"""
Implement the trip description classes
And a default list for trips has also been written."""
Trip generation
===============
This module can be roughtly devided into two parts, the trip description and
trip generation. The trip description part contains mostly class definitions
that can be used to describe kinds of trips, while the trip generation contains
the main driver function to generate a large list of trips based on the
travellers and places. This module is kind of at the centre of the simulation.
"""
import random
import collections
#
# Trip description
# ----------------
#
# The trips can be roughtly described by two data structures, Location and
# Trip. A location is a location in the ways of a trip, and a trip is a series
# of locations with a mean frequency and variation.
#
# The location can be an attribute of the traveller or a random selection in a
# category of places. It is stored in the ``source`` attribute as one of the
# two constant symbols in this module. And a trip has a frequency stored in the
# ``freq`` attribute in unit of times per week, and ``var`` stores the
# corresponding variation. The list of locations are given in the ``locations``
# attribute, while the actual route is given in the route attribute as a list
# of zero-based indices in the locations list.
#
# constants for the two kinds of locations
TRAVELLER_ATTR = 1
RANDOM_FROM_CAT = 2
# Location class definition
Location = collections.namedtuple(
'Location',
['source', 'value']
)
Trip = collections.namedtuple(
'Trip',
['freq', 'var', 'locations', 'route']
)
# The default trip
DEFAULT_TRIP = [
# Commuting to work
Trip(
freq=5.0, var=1.0,
locations=[
Location(source=TRAVELLER_ATTR, value='home'),
Location(source=TRAVELLER_ATTR, value='work'),
],
route=[0, 1, 0]
),
# Go to a leisure place
Trip(
freq=2.0, var=0.5,
locations=[
Location(source=TRAVELLER_ATTR, value='home'),
Location(source=RANDOM_FROM_CAT, value='leisure'),
],
route=[0, 1, 0]
),
# Go to a restaurant
Trip(
freq=4.0, var=1.0,
locations=[
Location(source=TRAVELLER_ATTR, value='home'),
Location(source=RANDOM_FROM_CAT, value='restaurant'),
],
route=[0, 1, 0]
),
# Go to a church
Trip(
freq=1.0, var=0.5,
locations=[
Location(source=TRAVELLER_ATTR, value='home'),
Location(source=TRAVELLER_ATTR, value='church'),
],
route=[0, 1, 0]
),
]
|
<commit_before>"""
Trip generation
===============
"""
<commit_msg>Implement the trip description classes
And a default list for trips has also been written.<commit_after>"""
Trip generation
===============
This module can be roughtly devided into two parts, the trip description and
trip generation. The trip description part contains mostly class definitions
that can be used to describe kinds of trips, while the trip generation contains
the main driver function to generate a large list of trips based on the
travellers and places. This module is kind of at the centre of the simulation.
"""
import random
import collections
#
# Trip description
# ----------------
#
# The trips can be roughtly described by two data structures, Location and
# Trip. A location is a location in the ways of a trip, and a trip is a series
# of locations with a mean frequency and variation.
#
# The location can be an attribute of the traveller or a random selection in a
# category of places. It is stored in the ``source`` attribute as one of the
# two constant symbols in this module. And a trip has a frequency stored in the
# ``freq`` attribute in unit of times per week, and ``var`` stores the
# corresponding variation. The list of locations are given in the ``locations``
# attribute, while the actual route is given in the route attribute as a list
# of zero-based indices in the locations list.
#
# constants for the two kinds of locations
TRAVELLER_ATTR = 1
RANDOM_FROM_CAT = 2
# Location class definition
Location = collections.namedtuple(
'Location',
['source', 'value']
)
Trip = collections.namedtuple(
'Trip',
['freq', 'var', 'locations', 'route']
)
# The default trip
DEFAULT_TRIP = [
# Commuting to work
Trip(
freq=5.0, var=1.0,
locations=[
Location(source=TRAVELLER_ATTR, value='home'),
Location(source=TRAVELLER_ATTR, value='work'),
],
route=[0, 1, 0]
),
# Go to a leisure place
Trip(
freq=2.0, var=0.5,
locations=[
Location(source=TRAVELLER_ATTR, value='home'),
Location(source=RANDOM_FROM_CAT, value='leisure'),
],
route=[0, 1, 0]
),
# Go to a restaurant
Trip(
freq=4.0, var=1.0,
locations=[
Location(source=TRAVELLER_ATTR, value='home'),
Location(source=RANDOM_FROM_CAT, value='restaurant'),
],
route=[0, 1, 0]
),
# Go to a church
Trip(
freq=1.0, var=0.5,
locations=[
Location(source=TRAVELLER_ATTR, value='home'),
Location(source=TRAVELLER_ATTR, value='church'),
],
route=[0, 1, 0]
),
]
|
dc8099d10028266411c928befe9c690fe75ff391
|
tools/bundle.py
|
tools/bundle.py
|
#!/usr/bin/env python
import os
import sys
import glob
import getopt
def file_list(path):
files = []
if os.path.isfile(path):
return [path]
for f in os.listdir(path):
new_dir = os.path.join(path, f)
if os.path.isdir(new_dir) and not os.path.islink(new_dir):
files.extend(file_list(new_dir))
else:
if f.endswith('.lua'):
files.append(path + '/' + f)
return files
def generate_bundle_map(module_name, path, is_base=False):
t = []
for os_filename in file_list(path):
bundle_filename = (os_filename.replace(path, '')[1:])
if is_base:
bundle_filename = 'modules/' + bundle_filename
else:
bundle_filename = module_name + '/' + bundle_filename
t.append({ 'os_filename': os_filename, 'bundle_filename': bundle_filename })
return t
try:
opts, args = getopt.getopt(sys.argv[1:], 'lb', [])
except:
sys.exit(2)
if __name__ == '__main__':
module_path = args[0]
module_name = os.path.basename(module_path)
for o, a in opts:
if o == '-l':
for path in args:
print('\n'.join(file_list(path)))
elif o == '-b':
for path in args:
print(generate_bundle_map(module_name, path))
|
#!/usr/bin/env python
import os
import sys
import glob
import getopt
def file_list(path):
files = []
if os.path.isfile(path):
return [path]
for f in os.listdir(path):
new_dir = path + '/' + f
if os.path.isdir(new_dir) and not os.path.islink(new_dir):
files.extend(file_list(new_dir))
else:
if f.endswith('.lua'):
files.append(path + '/' + f)
return files
def generate_bundle_map(module_name, path, is_base=False):
t = []
for os_filename in file_list(path):
bundle_filename = (os_filename.replace(path, '')[1:])
if is_base:
bundle_filename = 'modules/' + bundle_filename
else:
bundle_filename = module_name + '/' + bundle_filename
t.append({ 'os_filename': os_filename, 'bundle_filename': bundle_filename })
return t
try:
opts, args = getopt.getopt(sys.argv[1:], 'lb', [])
except:
sys.exit(2)
if __name__ == '__main__':
module_path = args[0]
module_name = os.path.basename(module_path)
for o, a in opts:
if o == '-l':
for path in args:
print('\n'.join(file_list(path)))
elif o == '-b':
for path in args:
print(generate_bundle_map(module_name, path))
|
Stop using os.path.join, because Visual Studio can actually handle forward slash style paths, and the os.path method was creating mixed \\ and / style paths, b0rking everything.
|
Stop using os.path.join, because Visual Studio can actually handle forward
slash style paths, and the os.path method was creating mixed \\ and /
style paths, b0rking everything.
|
Python
|
apache-2.0
|
cp16net/virgo-base,AlphaStaxLLC/rackspace-monitoring-agent,cp16net/virgo-base,christopherjwang/rackspace-monitoring-agent,kaustavha/rackspace-monitoring-agent,christopherjwang/rackspace-monitoring-agent,cp16net/virgo-base,virgo-agent-toolkit/rackspace-monitoring-agent,kaustavha/rackspace-monitoring-agent,AlphaStaxLLC/rackspace-monitoring-agent,cp16net/virgo-base,cp16net/virgo-base,virgo-agent-toolkit/rackspace-monitoring-agent
|
#!/usr/bin/env python
import os
import sys
import glob
import getopt
def file_list(path):
files = []
if os.path.isfile(path):
return [path]
for f in os.listdir(path):
new_dir = os.path.join(path, f)
if os.path.isdir(new_dir) and not os.path.islink(new_dir):
files.extend(file_list(new_dir))
else:
if f.endswith('.lua'):
files.append(path + '/' + f)
return files
def generate_bundle_map(module_name, path, is_base=False):
t = []
for os_filename in file_list(path):
bundle_filename = (os_filename.replace(path, '')[1:])
if is_base:
bundle_filename = 'modules/' + bundle_filename
else:
bundle_filename = module_name + '/' + bundle_filename
t.append({ 'os_filename': os_filename, 'bundle_filename': bundle_filename })
return t
try:
opts, args = getopt.getopt(sys.argv[1:], 'lb', [])
except:
sys.exit(2)
if __name__ == '__main__':
module_path = args[0]
module_name = os.path.basename(module_path)
for o, a in opts:
if o == '-l':
for path in args:
print('\n'.join(file_list(path)))
elif o == '-b':
for path in args:
print(generate_bundle_map(module_name, path))
Stop using os.path.join, because Visual Studio can actually handle forward
slash style paths, and the os.path method was creating mixed \\ and /
style paths, b0rking everything.
|
#!/usr/bin/env python
import os
import sys
import glob
import getopt
def file_list(path):
files = []
if os.path.isfile(path):
return [path]
for f in os.listdir(path):
new_dir = path + '/' + f
if os.path.isdir(new_dir) and not os.path.islink(new_dir):
files.extend(file_list(new_dir))
else:
if f.endswith('.lua'):
files.append(path + '/' + f)
return files
def generate_bundle_map(module_name, path, is_base=False):
t = []
for os_filename in file_list(path):
bundle_filename = (os_filename.replace(path, '')[1:])
if is_base:
bundle_filename = 'modules/' + bundle_filename
else:
bundle_filename = module_name + '/' + bundle_filename
t.append({ 'os_filename': os_filename, 'bundle_filename': bundle_filename })
return t
try:
opts, args = getopt.getopt(sys.argv[1:], 'lb', [])
except:
sys.exit(2)
if __name__ == '__main__':
module_path = args[0]
module_name = os.path.basename(module_path)
for o, a in opts:
if o == '-l':
for path in args:
print('\n'.join(file_list(path)))
elif o == '-b':
for path in args:
print(generate_bundle_map(module_name, path))
|
<commit_before>#!/usr/bin/env python
import os
import sys
import glob
import getopt
def file_list(path):
files = []
if os.path.isfile(path):
return [path]
for f in os.listdir(path):
new_dir = os.path.join(path, f)
if os.path.isdir(new_dir) and not os.path.islink(new_dir):
files.extend(file_list(new_dir))
else:
if f.endswith('.lua'):
files.append(path + '/' + f)
return files
def generate_bundle_map(module_name, path, is_base=False):
t = []
for os_filename in file_list(path):
bundle_filename = (os_filename.replace(path, '')[1:])
if is_base:
bundle_filename = 'modules/' + bundle_filename
else:
bundle_filename = module_name + '/' + bundle_filename
t.append({ 'os_filename': os_filename, 'bundle_filename': bundle_filename })
return t
try:
opts, args = getopt.getopt(sys.argv[1:], 'lb', [])
except:
sys.exit(2)
if __name__ == '__main__':
module_path = args[0]
module_name = os.path.basename(module_path)
for o, a in opts:
if o == '-l':
for path in args:
print('\n'.join(file_list(path)))
elif o == '-b':
for path in args:
print(generate_bundle_map(module_name, path))
<commit_msg>Stop using os.path.join, because Visual Studio can actually handle forward
slash style paths, and the os.path method was creating mixed \\ and /
style paths, b0rking everything.<commit_after>
|
#!/usr/bin/env python
import os
import sys
import glob
import getopt
def file_list(path):
files = []
if os.path.isfile(path):
return [path]
for f in os.listdir(path):
new_dir = path + '/' + f
if os.path.isdir(new_dir) and not os.path.islink(new_dir):
files.extend(file_list(new_dir))
else:
if f.endswith('.lua'):
files.append(path + '/' + f)
return files
def generate_bundle_map(module_name, path, is_base=False):
t = []
for os_filename in file_list(path):
bundle_filename = (os_filename.replace(path, '')[1:])
if is_base:
bundle_filename = 'modules/' + bundle_filename
else:
bundle_filename = module_name + '/' + bundle_filename
t.append({ 'os_filename': os_filename, 'bundle_filename': bundle_filename })
return t
try:
opts, args = getopt.getopt(sys.argv[1:], 'lb', [])
except:
sys.exit(2)
if __name__ == '__main__':
module_path = args[0]
module_name = os.path.basename(module_path)
for o, a in opts:
if o == '-l':
for path in args:
print('\n'.join(file_list(path)))
elif o == '-b':
for path in args:
print(generate_bundle_map(module_name, path))
|
#!/usr/bin/env python
import os
import sys
import glob
import getopt
def file_list(path):
files = []
if os.path.isfile(path):
return [path]
for f in os.listdir(path):
new_dir = os.path.join(path, f)
if os.path.isdir(new_dir) and not os.path.islink(new_dir):
files.extend(file_list(new_dir))
else:
if f.endswith('.lua'):
files.append(path + '/' + f)
return files
def generate_bundle_map(module_name, path, is_base=False):
t = []
for os_filename in file_list(path):
bundle_filename = (os_filename.replace(path, '')[1:])
if is_base:
bundle_filename = 'modules/' + bundle_filename
else:
bundle_filename = module_name + '/' + bundle_filename
t.append({ 'os_filename': os_filename, 'bundle_filename': bundle_filename })
return t
try:
opts, args = getopt.getopt(sys.argv[1:], 'lb', [])
except:
sys.exit(2)
if __name__ == '__main__':
module_path = args[0]
module_name = os.path.basename(module_path)
for o, a in opts:
if o == '-l':
for path in args:
print('\n'.join(file_list(path)))
elif o == '-b':
for path in args:
print(generate_bundle_map(module_name, path))
Stop using os.path.join, because Visual Studio can actually handle forward
slash style paths, and the os.path method was creating mixed \\ and /
style paths, b0rking everything.#!/usr/bin/env python
import os
import sys
import glob
import getopt
def file_list(path):
files = []
if os.path.isfile(path):
return [path]
for f in os.listdir(path):
new_dir = path + '/' + f
if os.path.isdir(new_dir) and not os.path.islink(new_dir):
files.extend(file_list(new_dir))
else:
if f.endswith('.lua'):
files.append(path + '/' + f)
return files
def generate_bundle_map(module_name, path, is_base=False):
t = []
for os_filename in file_list(path):
bundle_filename = (os_filename.replace(path, '')[1:])
if is_base:
bundle_filename = 'modules/' + bundle_filename
else:
bundle_filename = module_name + '/' + bundle_filename
t.append({ 'os_filename': os_filename, 'bundle_filename': bundle_filename })
return t
try:
opts, args = getopt.getopt(sys.argv[1:], 'lb', [])
except:
sys.exit(2)
if __name__ == '__main__':
module_path = args[0]
module_name = os.path.basename(module_path)
for o, a in opts:
if o == '-l':
for path in args:
print('\n'.join(file_list(path)))
elif o == '-b':
for path in args:
print(generate_bundle_map(module_name, path))
|
<commit_before>#!/usr/bin/env python
import os
import sys
import glob
import getopt
def file_list(path):
files = []
if os.path.isfile(path):
return [path]
for f in os.listdir(path):
new_dir = os.path.join(path, f)
if os.path.isdir(new_dir) and not os.path.islink(new_dir):
files.extend(file_list(new_dir))
else:
if f.endswith('.lua'):
files.append(path + '/' + f)
return files
def generate_bundle_map(module_name, path, is_base=False):
t = []
for os_filename in file_list(path):
bundle_filename = (os_filename.replace(path, '')[1:])
if is_base:
bundle_filename = 'modules/' + bundle_filename
else:
bundle_filename = module_name + '/' + bundle_filename
t.append({ 'os_filename': os_filename, 'bundle_filename': bundle_filename })
return t
try:
opts, args = getopt.getopt(sys.argv[1:], 'lb', [])
except:
sys.exit(2)
if __name__ == '__main__':
module_path = args[0]
module_name = os.path.basename(module_path)
for o, a in opts:
if o == '-l':
for path in args:
print('\n'.join(file_list(path)))
elif o == '-b':
for path in args:
print(generate_bundle_map(module_name, path))
<commit_msg>Stop using os.path.join, because Visual Studio can actually handle forward
slash style paths, and the os.path method was creating mixed \\ and /
style paths, b0rking everything.<commit_after>#!/usr/bin/env python
import os
import sys
import glob
import getopt
def file_list(path):
files = []
if os.path.isfile(path):
return [path]
for f in os.listdir(path):
new_dir = path + '/' + f
if os.path.isdir(new_dir) and not os.path.islink(new_dir):
files.extend(file_list(new_dir))
else:
if f.endswith('.lua'):
files.append(path + '/' + f)
return files
def generate_bundle_map(module_name, path, is_base=False):
t = []
for os_filename in file_list(path):
bundle_filename = (os_filename.replace(path, '')[1:])
if is_base:
bundle_filename = 'modules/' + bundle_filename
else:
bundle_filename = module_name + '/' + bundle_filename
t.append({ 'os_filename': os_filename, 'bundle_filename': bundle_filename })
return t
try:
opts, args = getopt.getopt(sys.argv[1:], 'lb', [])
except:
sys.exit(2)
if __name__ == '__main__':
module_path = args[0]
module_name = os.path.basename(module_path)
for o, a in opts:
if o == '-l':
for path in args:
print('\n'.join(file_list(path)))
elif o == '-b':
for path in args:
print(generate_bundle_map(module_name, path))
|
cb2a9032e1ffef5020b24a28079ecc127cc178b7
|
comparch/__init__.py
|
comparch/__init__.py
|
from .implicit import implicit
from .registry import ClassRegistry, Registry
from .lookup import Lookup, CachedLookup
|
from .implicit import implicit
from .registry import ClassRegistry, Registry
from .lookup import Lookup, CachedLookup
from .interface import Interface
|
Include Interface in public API.
|
Include Interface in public API.
|
Python
|
bsd-3-clause
|
taschini/reg,morepath/reg
|
from .implicit import implicit
from .registry import ClassRegistry, Registry
from .lookup import Lookup, CachedLookup
Include Interface in public API.
|
from .implicit import implicit
from .registry import ClassRegistry, Registry
from .lookup import Lookup, CachedLookup
from .interface import Interface
|
<commit_before>from .implicit import implicit
from .registry import ClassRegistry, Registry
from .lookup import Lookup, CachedLookup
<commit_msg>Include Interface in public API.<commit_after>
|
from .implicit import implicit
from .registry import ClassRegistry, Registry
from .lookup import Lookup, CachedLookup
from .interface import Interface
|
from .implicit import implicit
from .registry import ClassRegistry, Registry
from .lookup import Lookup, CachedLookup
Include Interface in public API.from .implicit import implicit
from .registry import ClassRegistry, Registry
from .lookup import Lookup, CachedLookup
from .interface import Interface
|
<commit_before>from .implicit import implicit
from .registry import ClassRegistry, Registry
from .lookup import Lookup, CachedLookup
<commit_msg>Include Interface in public API.<commit_after>from .implicit import implicit
from .registry import ClassRegistry, Registry
from .lookup import Lookup, CachedLookup
from .interface import Interface
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.