commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
766b2165cf17506c4c28f858915929511df5c5ba
|
nutsurv/dashboard/serializers.py
|
nutsurv/dashboard/serializers.py
|
from rest_framework import serializers
from django.contrib.auth.models import User
from .models import Alert, HouseholdSurveyJSON, TeamMember
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = [ 'url', 'username', 'email']
class SimpleUserSerializer(UserSerializer):
class Meta:
model = User
fields = [ 'username', 'email' ]
class TeamMemberSerializer(serializers.HyperlinkedModelSerializer):
url = serializers.HyperlinkedIdentityField(view_name='teammember-detail',
lookup_field="member_id")
class Meta:
model = TeamMember
fields = [ 'url', 'member_id', 'name', 'phone', 'email']
class HouseholdSurveyJSONSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = HouseholdSurveyJSON
class AlertSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Alert
fields = (
'url',
'id',
# fields
'category',
'text',
'archived',
'created',
'completed',
# TODO fields still in json
'team_id',
'team_name',
'cluster_id',
'location',
'type',
'survey_id',
)
|
from rest_framework import serializers
from django.contrib.auth.models import User
from .models import Alert, HouseholdSurveyJSON, TeamMember
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = [ 'url', 'username', 'email']
class SimpleUserSerializer(UserSerializer):
class Meta:
model = User
fields = [ 'username', 'email' ]
class TeamMemberSerializer(serializers.HyperlinkedModelSerializer):
url = serializers.HyperlinkedIdentityField(view_name='teammember-detail',
lookup_field="member_id")
class Meta:
model = TeamMember
fields = [ 'url', 'member_id', 'name', 'phone', 'email']
class HouseholdSurveyJSONSerializer(serializers.HyperlinkedModelSerializer):
team_lead = serializers.HyperlinkedIdentityField(view_name='teammember-detail',
lookup_field="member_id")
team_assistant = serializers.HyperlinkedIdentityField(view_name='teammember-detail',
lookup_field="member_id")
team_anthropometrist = serializers.HyperlinkedIdentityField(view_name='teammember-detail',
lookup_field="member_id")
class Meta:
model = HouseholdSurveyJSON
class AlertSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Alert
fields = (
'url',
'id',
# fields
'category',
'text',
'archived',
'created',
'completed',
# TODO fields still in json
'team_id',
'team_name',
'cluster_id',
'location',
'type',
'survey_id',
)
|
Fix lookup in survey view
|
Fix lookup in survey view
|
Python
|
agpl-3.0
|
johanneswilm/eha-nutsurv-django,eHealthAfrica/nutsurv,johanneswilm/eha-nutsurv-django,johanneswilm/eha-nutsurv-django,eHealthAfrica/nutsurv,eHealthAfrica/nutsurv
|
from rest_framework import serializers
from django.contrib.auth.models import User
from .models import Alert, HouseholdSurveyJSON, TeamMember
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = [ 'url', 'username', 'email']
class SimpleUserSerializer(UserSerializer):
class Meta:
model = User
fields = [ 'username', 'email' ]
class TeamMemberSerializer(serializers.HyperlinkedModelSerializer):
url = serializers.HyperlinkedIdentityField(view_name='teammember-detail',
lookup_field="member_id")
class Meta:
model = TeamMember
fields = [ 'url', 'member_id', 'name', 'phone', 'email']
class HouseholdSurveyJSONSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = HouseholdSurveyJSON
class AlertSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Alert
fields = (
'url',
'id',
# fields
'category',
'text',
'archived',
'created',
'completed',
# TODO fields still in json
'team_id',
'team_name',
'cluster_id',
'location',
'type',
'survey_id',
)
Fix lookup in survey view
|
from rest_framework import serializers
from django.contrib.auth.models import User
from .models import Alert, HouseholdSurveyJSON, TeamMember
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = [ 'url', 'username', 'email']
class SimpleUserSerializer(UserSerializer):
class Meta:
model = User
fields = [ 'username', 'email' ]
class TeamMemberSerializer(serializers.HyperlinkedModelSerializer):
url = serializers.HyperlinkedIdentityField(view_name='teammember-detail',
lookup_field="member_id")
class Meta:
model = TeamMember
fields = [ 'url', 'member_id', 'name', 'phone', 'email']
class HouseholdSurveyJSONSerializer(serializers.HyperlinkedModelSerializer):
team_lead = serializers.HyperlinkedIdentityField(view_name='teammember-detail',
lookup_field="member_id")
team_assistant = serializers.HyperlinkedIdentityField(view_name='teammember-detail',
lookup_field="member_id")
team_anthropometrist = serializers.HyperlinkedIdentityField(view_name='teammember-detail',
lookup_field="member_id")
class Meta:
model = HouseholdSurveyJSON
class AlertSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Alert
fields = (
'url',
'id',
# fields
'category',
'text',
'archived',
'created',
'completed',
# TODO fields still in json
'team_id',
'team_name',
'cluster_id',
'location',
'type',
'survey_id',
)
|
<commit_before>from rest_framework import serializers
from django.contrib.auth.models import User
from .models import Alert, HouseholdSurveyJSON, TeamMember
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = [ 'url', 'username', 'email']
class SimpleUserSerializer(UserSerializer):
class Meta:
model = User
fields = [ 'username', 'email' ]
class TeamMemberSerializer(serializers.HyperlinkedModelSerializer):
url = serializers.HyperlinkedIdentityField(view_name='teammember-detail',
lookup_field="member_id")
class Meta:
model = TeamMember
fields = [ 'url', 'member_id', 'name', 'phone', 'email']
class HouseholdSurveyJSONSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = HouseholdSurveyJSON
class AlertSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Alert
fields = (
'url',
'id',
# fields
'category',
'text',
'archived',
'created',
'completed',
# TODO fields still in json
'team_id',
'team_name',
'cluster_id',
'location',
'type',
'survey_id',
)
<commit_msg>Fix lookup in survey view<commit_after>
|
from rest_framework import serializers
from django.contrib.auth.models import User
from .models import Alert, HouseholdSurveyJSON, TeamMember
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = [ 'url', 'username', 'email']
class SimpleUserSerializer(UserSerializer):
class Meta:
model = User
fields = [ 'username', 'email' ]
class TeamMemberSerializer(serializers.HyperlinkedModelSerializer):
url = serializers.HyperlinkedIdentityField(view_name='teammember-detail',
lookup_field="member_id")
class Meta:
model = TeamMember
fields = [ 'url', 'member_id', 'name', 'phone', 'email']
class HouseholdSurveyJSONSerializer(serializers.HyperlinkedModelSerializer):
team_lead = serializers.HyperlinkedIdentityField(view_name='teammember-detail',
lookup_field="member_id")
team_assistant = serializers.HyperlinkedIdentityField(view_name='teammember-detail',
lookup_field="member_id")
team_anthropometrist = serializers.HyperlinkedIdentityField(view_name='teammember-detail',
lookup_field="member_id")
class Meta:
model = HouseholdSurveyJSON
class AlertSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Alert
fields = (
'url',
'id',
# fields
'category',
'text',
'archived',
'created',
'completed',
# TODO fields still in json
'team_id',
'team_name',
'cluster_id',
'location',
'type',
'survey_id',
)
|
from rest_framework import serializers
from django.contrib.auth.models import User
from .models import Alert, HouseholdSurveyJSON, TeamMember
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = [ 'url', 'username', 'email']
class SimpleUserSerializer(UserSerializer):
class Meta:
model = User
fields = [ 'username', 'email' ]
class TeamMemberSerializer(serializers.HyperlinkedModelSerializer):
url = serializers.HyperlinkedIdentityField(view_name='teammember-detail',
lookup_field="member_id")
class Meta:
model = TeamMember
fields = [ 'url', 'member_id', 'name', 'phone', 'email']
class HouseholdSurveyJSONSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = HouseholdSurveyJSON
class AlertSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Alert
fields = (
'url',
'id',
# fields
'category',
'text',
'archived',
'created',
'completed',
# TODO fields still in json
'team_id',
'team_name',
'cluster_id',
'location',
'type',
'survey_id',
)
Fix lookup in survey viewfrom rest_framework import serializers
from django.contrib.auth.models import User
from .models import Alert, HouseholdSurveyJSON, TeamMember
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = [ 'url', 'username', 'email']
class SimpleUserSerializer(UserSerializer):
class Meta:
model = User
fields = [ 'username', 'email' ]
class TeamMemberSerializer(serializers.HyperlinkedModelSerializer):
url = serializers.HyperlinkedIdentityField(view_name='teammember-detail',
lookup_field="member_id")
class Meta:
model = TeamMember
fields = [ 'url', 'member_id', 'name', 'phone', 'email']
class HouseholdSurveyJSONSerializer(serializers.HyperlinkedModelSerializer):
team_lead = serializers.HyperlinkedIdentityField(view_name='teammember-detail',
lookup_field="member_id")
team_assistant = serializers.HyperlinkedIdentityField(view_name='teammember-detail',
lookup_field="member_id")
team_anthropometrist = serializers.HyperlinkedIdentityField(view_name='teammember-detail',
lookup_field="member_id")
class Meta:
model = HouseholdSurveyJSON
class AlertSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Alert
fields = (
'url',
'id',
# fields
'category',
'text',
'archived',
'created',
'completed',
# TODO fields still in json
'team_id',
'team_name',
'cluster_id',
'location',
'type',
'survey_id',
)
|
<commit_before>from rest_framework import serializers
from django.contrib.auth.models import User
from .models import Alert, HouseholdSurveyJSON, TeamMember
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = [ 'url', 'username', 'email']
class SimpleUserSerializer(UserSerializer):
class Meta:
model = User
fields = [ 'username', 'email' ]
class TeamMemberSerializer(serializers.HyperlinkedModelSerializer):
url = serializers.HyperlinkedIdentityField(view_name='teammember-detail',
lookup_field="member_id")
class Meta:
model = TeamMember
fields = [ 'url', 'member_id', 'name', 'phone', 'email']
class HouseholdSurveyJSONSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = HouseholdSurveyJSON
class AlertSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Alert
fields = (
'url',
'id',
# fields
'category',
'text',
'archived',
'created',
'completed',
# TODO fields still in json
'team_id',
'team_name',
'cluster_id',
'location',
'type',
'survey_id',
)
<commit_msg>Fix lookup in survey view<commit_after>from rest_framework import serializers
from django.contrib.auth.models import User
from .models import Alert, HouseholdSurveyJSON, TeamMember
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = [ 'url', 'username', 'email']
class SimpleUserSerializer(UserSerializer):
class Meta:
model = User
fields = [ 'username', 'email' ]
class TeamMemberSerializer(serializers.HyperlinkedModelSerializer):
url = serializers.HyperlinkedIdentityField(view_name='teammember-detail',
lookup_field="member_id")
class Meta:
model = TeamMember
fields = [ 'url', 'member_id', 'name', 'phone', 'email']
class HouseholdSurveyJSONSerializer(serializers.HyperlinkedModelSerializer):
team_lead = serializers.HyperlinkedIdentityField(view_name='teammember-detail',
lookup_field="member_id")
team_assistant = serializers.HyperlinkedIdentityField(view_name='teammember-detail',
lookup_field="member_id")
team_anthropometrist = serializers.HyperlinkedIdentityField(view_name='teammember-detail',
lookup_field="member_id")
class Meta:
model = HouseholdSurveyJSON
class AlertSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Alert
fields = (
'url',
'id',
# fields
'category',
'text',
'archived',
'created',
'completed',
# TODO fields still in json
'team_id',
'team_name',
'cluster_id',
'location',
'type',
'survey_id',
)
|
6696451b7c7a9b2de5b624b47159efae8fcf06b7
|
opwen_email_server/api/lokole.py
|
opwen_email_server/api/lokole.py
|
def upload(upload_info):
"""
:type upload_info: dict
"""
client_id = upload_info['client_id']
resource_id = upload_info['resource_id']
resource_type = upload_info['resource_type']
raise NotImplementedError
def download(client_id):
"""
:type client_id: str
:rtype dict
"""
return {
'resource_id': None,
'resource_type': None,
}
|
def upload(upload_info):
"""
:type upload_info: dict
"""
client_id = upload_info['client_id'] # noqa: F841
resource_id = upload_info['resource_id'] # noqa: F841
resource_type = upload_info['resource_type'] # noqa: F841
raise NotImplementedError
def download(client_id): # noqa: F841
"""
:type client_id: str
:rtype dict
"""
return {
'resource_id': None,
'resource_type': None,
}
|
Disable linter in in-progress code
|
Disable linter in in-progress code
|
Python
|
apache-2.0
|
ascoderu/opwen-cloudserver,ascoderu/opwen-cloudserver
|
def upload(upload_info):
"""
:type upload_info: dict
"""
client_id = upload_info['client_id']
resource_id = upload_info['resource_id']
resource_type = upload_info['resource_type']
raise NotImplementedError
def download(client_id):
"""
:type client_id: str
:rtype dict
"""
return {
'resource_id': None,
'resource_type': None,
}
Disable linter in in-progress code
|
def upload(upload_info):
"""
:type upload_info: dict
"""
client_id = upload_info['client_id'] # noqa: F841
resource_id = upload_info['resource_id'] # noqa: F841
resource_type = upload_info['resource_type'] # noqa: F841
raise NotImplementedError
def download(client_id): # noqa: F841
"""
:type client_id: str
:rtype dict
"""
return {
'resource_id': None,
'resource_type': None,
}
|
<commit_before>def upload(upload_info):
"""
:type upload_info: dict
"""
client_id = upload_info['client_id']
resource_id = upload_info['resource_id']
resource_type = upload_info['resource_type']
raise NotImplementedError
def download(client_id):
"""
:type client_id: str
:rtype dict
"""
return {
'resource_id': None,
'resource_type': None,
}
<commit_msg>Disable linter in in-progress code<commit_after>
|
def upload(upload_info):
"""
:type upload_info: dict
"""
client_id = upload_info['client_id'] # noqa: F841
resource_id = upload_info['resource_id'] # noqa: F841
resource_type = upload_info['resource_type'] # noqa: F841
raise NotImplementedError
def download(client_id): # noqa: F841
"""
:type client_id: str
:rtype dict
"""
return {
'resource_id': None,
'resource_type': None,
}
|
def upload(upload_info):
"""
:type upload_info: dict
"""
client_id = upload_info['client_id']
resource_id = upload_info['resource_id']
resource_type = upload_info['resource_type']
raise NotImplementedError
def download(client_id):
"""
:type client_id: str
:rtype dict
"""
return {
'resource_id': None,
'resource_type': None,
}
Disable linter in in-progress codedef upload(upload_info):
"""
:type upload_info: dict
"""
client_id = upload_info['client_id'] # noqa: F841
resource_id = upload_info['resource_id'] # noqa: F841
resource_type = upload_info['resource_type'] # noqa: F841
raise NotImplementedError
def download(client_id): # noqa: F841
"""
:type client_id: str
:rtype dict
"""
return {
'resource_id': None,
'resource_type': None,
}
|
<commit_before>def upload(upload_info):
"""
:type upload_info: dict
"""
client_id = upload_info['client_id']
resource_id = upload_info['resource_id']
resource_type = upload_info['resource_type']
raise NotImplementedError
def download(client_id):
"""
:type client_id: str
:rtype dict
"""
return {
'resource_id': None,
'resource_type': None,
}
<commit_msg>Disable linter in in-progress code<commit_after>def upload(upload_info):
"""
:type upload_info: dict
"""
client_id = upload_info['client_id'] # noqa: F841
resource_id = upload_info['resource_id'] # noqa: F841
resource_type = upload_info['resource_type'] # noqa: F841
raise NotImplementedError
def download(client_id): # noqa: F841
"""
:type client_id: str
:rtype dict
"""
return {
'resource_id': None,
'resource_type': None,
}
|
199caafc817e4e007b2eedd307cb7bff06c029c6
|
imagersite/imager_images/tests.py
|
imagersite/imager_images/tests.py
|
from __future__ import unicode_literals
from django.contrib.auth.models import User
from django.test import TestCase
import factory
from faker import Faker
from imager_profile.models import ImagerProfile
from .models import Album, Photo
# Create your tests here.
|
from __future__ import unicode_literals
from django.contrib.auth.models import User
from django.test import TestCase
import factory
from faker import Faker
from imager_profile.models import ImagerProfile
from .models import Album, Pho
# Create your tests here.
fake = Faker()
class UserFactory(factory.Factory):
"""Create a fake user."""
class Meta:
model = User
username = factory.Sequence(lambda n: 'user{}'.format(n))
first_name = fake.first_name()
last_name = fake.last_name()
email = fake.email()
|
Add a UserFactory for images test
|
Add a UserFactory for images test
|
Python
|
mit
|
jesseklein406/django-imager,jesseklein406/django-imager,jesseklein406/django-imager
|
from __future__ import unicode_literals
from django.contrib.auth.models import User
from django.test import TestCase
import factory
from faker import Faker
from imager_profile.models import ImagerProfile
from .models import Album, Photo
# Create your tests here.
Add a UserFactory for images test
|
from __future__ import unicode_literals
from django.contrib.auth.models import User
from django.test import TestCase
import factory
from faker import Faker
from imager_profile.models import ImagerProfile
from .models import Album, Pho
# Create your tests here.
fake = Faker()
class UserFactory(factory.Factory):
"""Create a fake user."""
class Meta:
model = User
username = factory.Sequence(lambda n: 'user{}'.format(n))
first_name = fake.first_name()
last_name = fake.last_name()
email = fake.email()
|
<commit_before>from __future__ import unicode_literals
from django.contrib.auth.models import User
from django.test import TestCase
import factory
from faker import Faker
from imager_profile.models import ImagerProfile
from .models import Album, Photo
# Create your tests here.
<commit_msg>Add a UserFactory for images test<commit_after>
|
from __future__ import unicode_literals
from django.contrib.auth.models import User
from django.test import TestCase
import factory
from faker import Faker
from imager_profile.models import ImagerProfile
from .models import Album, Pho
# Create your tests here.
fake = Faker()
class UserFactory(factory.Factory):
"""Create a fake user."""
class Meta:
model = User
username = factory.Sequence(lambda n: 'user{}'.format(n))
first_name = fake.first_name()
last_name = fake.last_name()
email = fake.email()
|
from __future__ import unicode_literals
from django.contrib.auth.models import User
from django.test import TestCase
import factory
from faker import Faker
from imager_profile.models import ImagerProfile
from .models import Album, Photo
# Create your tests here.
Add a UserFactory for images testfrom __future__ import unicode_literals
from django.contrib.auth.models import User
from django.test import TestCase
import factory
from faker import Faker
from imager_profile.models import ImagerProfile
from .models import Album, Pho
# Create your tests here.
fake = Faker()
class UserFactory(factory.Factory):
"""Create a fake user."""
class Meta:
model = User
username = factory.Sequence(lambda n: 'user{}'.format(n))
first_name = fake.first_name()
last_name = fake.last_name()
email = fake.email()
|
<commit_before>from __future__ import unicode_literals
from django.contrib.auth.models import User
from django.test import TestCase
import factory
from faker import Faker
from imager_profile.models import ImagerProfile
from .models import Album, Photo
# Create your tests here.
<commit_msg>Add a UserFactory for images test<commit_after>from __future__ import unicode_literals
from django.contrib.auth.models import User
from django.test import TestCase
import factory
from faker import Faker
from imager_profile.models import ImagerProfile
from .models import Album, Pho
# Create your tests here.
fake = Faker()
class UserFactory(factory.Factory):
"""Create a fake user."""
class Meta:
model = User
username = factory.Sequence(lambda n: 'user{}'.format(n))
first_name = fake.first_name()
last_name = fake.last_name()
email = fake.email()
|
de42731ab97a7d4272c44cc750891906aa5b4417
|
buildlet/runner/ipythonparallel.py
|
buildlet/runner/ipythonparallel.py
|
"""
Task runner using IPython parallel interface.
See `The IPython task interface`_ and `IPython Documentation`_
in `IPython Documentation`_.
.. _The IPython task interface:
http://ipython.org/ipython-doc/dev/parallel/parallel_task.html
.. _DAG Dependencies:
http://ipython.org/ipython-doc/dev/parallel/dag_dependencies.html
.. _IPython Documentation:
http://ipython.org/ipython-doc/dev/
"""
import IPython.parallel
from .simple import SimpleRunner
from .mixinparallel import MixInParallelRunner
class IPythonParallelRunner(MixInParallelRunner, SimpleRunner):
def submit_tasks(self):
self.client = IPython.parallel.Client()
self.view = view = self.client.load_balanced_view()
self.results = results = {}
for node in self.sorted_nodes():
deps = [results[n] for n in self.graph.predecessors(node)]
with view.temp_flags(after=deps):
results[node] = view.apply_async(self.run_func,
self.nodetaskmap[node])
def wait_tasks(self):
self.view.wait(self.results.values())
|
"""
Task runner using IPython parallel interface.
See `The IPython task interface`_ and `IPython Documentation`_
in `IPython Documentation`_.
.. _The IPython task interface:
http://ipython.org/ipython-doc/dev/parallel/parallel_task.html
.. _DAG Dependencies:
http://ipython.org/ipython-doc/dev/parallel/dag_dependencies.html
.. _IPython Documentation:
http://ipython.org/ipython-doc/dev/
"""
import IPython.parallel
from .simple import SimpleRunner
from .mixinparallel import MixInParallelRunner
class IPythonParallelRunner(MixInParallelRunner, SimpleRunner):
def submit_tasks(self):
self.client = IPython.parallel.Client()
self.view = view = self.client.load_balanced_view()
self.results = results = {}
for node in self.sorted_nodes():
deps = [results[n] for n in self.graph.predecessors(node)]
with view.temp_flags(after=deps):
results[node] = view.apply_async(self.run_func,
self.nodetaskmap[node])
def wait_tasks(self):
for r in self.results.values():
r.get()
|
Raise error if any in IPythonParallelRunner.wait_tasks
|
Raise error if any in IPythonParallelRunner.wait_tasks
|
Python
|
bsd-3-clause
|
tkf/buildlet
|
"""
Task runner using IPython parallel interface.
See `The IPython task interface`_ and `IPython Documentation`_
in `IPython Documentation`_.
.. _The IPython task interface:
http://ipython.org/ipython-doc/dev/parallel/parallel_task.html
.. _DAG Dependencies:
http://ipython.org/ipython-doc/dev/parallel/dag_dependencies.html
.. _IPython Documentation:
http://ipython.org/ipython-doc/dev/
"""
import IPython.parallel
from .simple import SimpleRunner
from .mixinparallel import MixInParallelRunner
class IPythonParallelRunner(MixInParallelRunner, SimpleRunner):
def submit_tasks(self):
self.client = IPython.parallel.Client()
self.view = view = self.client.load_balanced_view()
self.results = results = {}
for node in self.sorted_nodes():
deps = [results[n] for n in self.graph.predecessors(node)]
with view.temp_flags(after=deps):
results[node] = view.apply_async(self.run_func,
self.nodetaskmap[node])
def wait_tasks(self):
self.view.wait(self.results.values())
Raise error if any in IPythonParallelRunner.wait_tasks
|
"""
Task runner using IPython parallel interface.
See `The IPython task interface`_ and `IPython Documentation`_
in `IPython Documentation`_.
.. _The IPython task interface:
http://ipython.org/ipython-doc/dev/parallel/parallel_task.html
.. _DAG Dependencies:
http://ipython.org/ipython-doc/dev/parallel/dag_dependencies.html
.. _IPython Documentation:
http://ipython.org/ipython-doc/dev/
"""
import IPython.parallel
from .simple import SimpleRunner
from .mixinparallel import MixInParallelRunner
class IPythonParallelRunner(MixInParallelRunner, SimpleRunner):
def submit_tasks(self):
self.client = IPython.parallel.Client()
self.view = view = self.client.load_balanced_view()
self.results = results = {}
for node in self.sorted_nodes():
deps = [results[n] for n in self.graph.predecessors(node)]
with view.temp_flags(after=deps):
results[node] = view.apply_async(self.run_func,
self.nodetaskmap[node])
def wait_tasks(self):
for r in self.results.values():
r.get()
|
<commit_before>"""
Task runner using IPython parallel interface.
See `The IPython task interface`_ and `IPython Documentation`_
in `IPython Documentation`_.
.. _The IPython task interface:
http://ipython.org/ipython-doc/dev/parallel/parallel_task.html
.. _DAG Dependencies:
http://ipython.org/ipython-doc/dev/parallel/dag_dependencies.html
.. _IPython Documentation:
http://ipython.org/ipython-doc/dev/
"""
import IPython.parallel
from .simple import SimpleRunner
from .mixinparallel import MixInParallelRunner
class IPythonParallelRunner(MixInParallelRunner, SimpleRunner):
def submit_tasks(self):
self.client = IPython.parallel.Client()
self.view = view = self.client.load_balanced_view()
self.results = results = {}
for node in self.sorted_nodes():
deps = [results[n] for n in self.graph.predecessors(node)]
with view.temp_flags(after=deps):
results[node] = view.apply_async(self.run_func,
self.nodetaskmap[node])
def wait_tasks(self):
self.view.wait(self.results.values())
<commit_msg>Raise error if any in IPythonParallelRunner.wait_tasks<commit_after>
|
"""
Task runner using IPython parallel interface.
See `The IPython task interface`_ and `IPython Documentation`_
in `IPython Documentation`_.
.. _The IPython task interface:
http://ipython.org/ipython-doc/dev/parallel/parallel_task.html
.. _DAG Dependencies:
http://ipython.org/ipython-doc/dev/parallel/dag_dependencies.html
.. _IPython Documentation:
http://ipython.org/ipython-doc/dev/
"""
import IPython.parallel
from .simple import SimpleRunner
from .mixinparallel import MixInParallelRunner
class IPythonParallelRunner(MixInParallelRunner, SimpleRunner):
def submit_tasks(self):
self.client = IPython.parallel.Client()
self.view = view = self.client.load_balanced_view()
self.results = results = {}
for node in self.sorted_nodes():
deps = [results[n] for n in self.graph.predecessors(node)]
with view.temp_flags(after=deps):
results[node] = view.apply_async(self.run_func,
self.nodetaskmap[node])
def wait_tasks(self):
for r in self.results.values():
r.get()
|
"""
Task runner using IPython parallel interface.
See `The IPython task interface`_ and `IPython Documentation`_
in `IPython Documentation`_.
.. _The IPython task interface:
http://ipython.org/ipython-doc/dev/parallel/parallel_task.html
.. _DAG Dependencies:
http://ipython.org/ipython-doc/dev/parallel/dag_dependencies.html
.. _IPython Documentation:
http://ipython.org/ipython-doc/dev/
"""
import IPython.parallel
from .simple import SimpleRunner
from .mixinparallel import MixInParallelRunner
class IPythonParallelRunner(MixInParallelRunner, SimpleRunner):
def submit_tasks(self):
self.client = IPython.parallel.Client()
self.view = view = self.client.load_balanced_view()
self.results = results = {}
for node in self.sorted_nodes():
deps = [results[n] for n in self.graph.predecessors(node)]
with view.temp_flags(after=deps):
results[node] = view.apply_async(self.run_func,
self.nodetaskmap[node])
def wait_tasks(self):
self.view.wait(self.results.values())
Raise error if any in IPythonParallelRunner.wait_tasks"""
Task runner using IPython parallel interface.
See `The IPython task interface`_ and `IPython Documentation`_
in `IPython Documentation`_.
.. _The IPython task interface:
http://ipython.org/ipython-doc/dev/parallel/parallel_task.html
.. _DAG Dependencies:
http://ipython.org/ipython-doc/dev/parallel/dag_dependencies.html
.. _IPython Documentation:
http://ipython.org/ipython-doc/dev/
"""
import IPython.parallel
from .simple import SimpleRunner
from .mixinparallel import MixInParallelRunner
class IPythonParallelRunner(MixInParallelRunner, SimpleRunner):
def submit_tasks(self):
self.client = IPython.parallel.Client()
self.view = view = self.client.load_balanced_view()
self.results = results = {}
for node in self.sorted_nodes():
deps = [results[n] for n in self.graph.predecessors(node)]
with view.temp_flags(after=deps):
results[node] = view.apply_async(self.run_func,
self.nodetaskmap[node])
def wait_tasks(self):
for r in self.results.values():
r.get()
|
<commit_before>"""
Task runner using IPython parallel interface.
See `The IPython task interface`_ and `IPython Documentation`_
in `IPython Documentation`_.
.. _The IPython task interface:
http://ipython.org/ipython-doc/dev/parallel/parallel_task.html
.. _DAG Dependencies:
http://ipython.org/ipython-doc/dev/parallel/dag_dependencies.html
.. _IPython Documentation:
http://ipython.org/ipython-doc/dev/
"""
import IPython.parallel
from .simple import SimpleRunner
from .mixinparallel import MixInParallelRunner
class IPythonParallelRunner(MixInParallelRunner, SimpleRunner):
def submit_tasks(self):
self.client = IPython.parallel.Client()
self.view = view = self.client.load_balanced_view()
self.results = results = {}
for node in self.sorted_nodes():
deps = [results[n] for n in self.graph.predecessors(node)]
with view.temp_flags(after=deps):
results[node] = view.apply_async(self.run_func,
self.nodetaskmap[node])
def wait_tasks(self):
self.view.wait(self.results.values())
<commit_msg>Raise error if any in IPythonParallelRunner.wait_tasks<commit_after>"""
Task runner using IPython parallel interface.
See `The IPython task interface`_ and `IPython Documentation`_
in `IPython Documentation`_.
.. _The IPython task interface:
http://ipython.org/ipython-doc/dev/parallel/parallel_task.html
.. _DAG Dependencies:
http://ipython.org/ipython-doc/dev/parallel/dag_dependencies.html
.. _IPython Documentation:
http://ipython.org/ipython-doc/dev/
"""
import IPython.parallel
from .simple import SimpleRunner
from .mixinparallel import MixInParallelRunner
class IPythonParallelRunner(MixInParallelRunner, SimpleRunner):
def submit_tasks(self):
self.client = IPython.parallel.Client()
self.view = view = self.client.load_balanced_view()
self.results = results = {}
for node in self.sorted_nodes():
deps = [results[n] for n in self.graph.predecessors(node)]
with view.temp_flags(after=deps):
results[node] = view.apply_async(self.run_func,
self.nodetaskmap[node])
def wait_tasks(self):
for r in self.results.values():
r.get()
|
b198dd91082dd5ae2fdddb7f7bd6ef05c35ba4f4
|
jdleden/local_settings_example.py
|
jdleden/local_settings_example.py
|
LDAP_NAME = 'ldap://'
LDAP_PASSWORD = ''
LDAP_DN = 'cn=writeuser,ou=sysUsers,dc=jd,dc=nl'
SECRET_KEY = ''
DATABASES = {
"default": {
# Add "postgresql_psycopg2", "mysql", "sqlite3" or "oracle".
"ENGINE": "django.db.backends.sqlite3",
# DB name or path to database file if using sqlite3.
"NAME": "dev.db",
# Not used with sqlite3.
"USER": "",
# Not used with sqlite3.
"PASSWORD": "",
# Set to empty string for localhost. Not used with sqlite3.
"HOST": "",
# Set to empty string for default. Not used with sqlite3.
"PORT": "",
}
}
|
DATABASES = {
"default": {
# Add "postgresql_psycopg2", "mysql", "sqlite3" or "oracle".
"ENGINE": "django.db.backends.sqlite3",
# DB name or path to database file if using sqlite3.
"NAME": "dev.db",
# Not used with sqlite3.
"USER": "",
# Not used with sqlite3.
"PASSWORD": "",
# Set to empty string for localhost. Not used with sqlite3.
"HOST": "",
# Set to empty string for default. Not used with sqlite3.
"PORT": "",
}
}
#################
# LDAP SETTINGS #
#################
LDAP_NAME = 'ldap://127.0.0.1:389/'
LDAP_DN = 'cn=writeall,ou=sysUsers,dc=jd,dc=nl'
LDAP_PASSWORD = ''
###################
# JANEUS SETTINGS #
###################
JANEUS_SERVER = "ldap://127.0.0.1:389/"
JANEUS_DN = "cn=readall,ou=sysUsers,dc=jd,dc=nl"
JANEUS_PASS = ""
|
Add Janeus settings to local_settings example
|
Add Janeus settings to local_settings example
|
Python
|
mit
|
jonge-democraten/jdleden,jonge-democraten/jdleden
|
LDAP_NAME = 'ldap://'
LDAP_PASSWORD = ''
LDAP_DN = 'cn=writeuser,ou=sysUsers,dc=jd,dc=nl'
SECRET_KEY = ''
DATABASES = {
"default": {
# Add "postgresql_psycopg2", "mysql", "sqlite3" or "oracle".
"ENGINE": "django.db.backends.sqlite3",
# DB name or path to database file if using sqlite3.
"NAME": "dev.db",
# Not used with sqlite3.
"USER": "",
# Not used with sqlite3.
"PASSWORD": "",
# Set to empty string for localhost. Not used with sqlite3.
"HOST": "",
# Set to empty string for default. Not used with sqlite3.
"PORT": "",
}
}
Add Janeus settings to local_settings example
|
DATABASES = {
"default": {
# Add "postgresql_psycopg2", "mysql", "sqlite3" or "oracle".
"ENGINE": "django.db.backends.sqlite3",
# DB name or path to database file if using sqlite3.
"NAME": "dev.db",
# Not used with sqlite3.
"USER": "",
# Not used with sqlite3.
"PASSWORD": "",
# Set to empty string for localhost. Not used with sqlite3.
"HOST": "",
# Set to empty string for default. Not used with sqlite3.
"PORT": "",
}
}
#################
# LDAP SETTINGS #
#################
LDAP_NAME = 'ldap://127.0.0.1:389/'
LDAP_DN = 'cn=writeall,ou=sysUsers,dc=jd,dc=nl'
LDAP_PASSWORD = ''
###################
# JANEUS SETTINGS #
###################
JANEUS_SERVER = "ldap://127.0.0.1:389/"
JANEUS_DN = "cn=readall,ou=sysUsers,dc=jd,dc=nl"
JANEUS_PASS = ""
|
<commit_before>
LDAP_NAME = 'ldap://'
LDAP_PASSWORD = ''
LDAP_DN = 'cn=writeuser,ou=sysUsers,dc=jd,dc=nl'
SECRET_KEY = ''
DATABASES = {
"default": {
# Add "postgresql_psycopg2", "mysql", "sqlite3" or "oracle".
"ENGINE": "django.db.backends.sqlite3",
# DB name or path to database file if using sqlite3.
"NAME": "dev.db",
# Not used with sqlite3.
"USER": "",
# Not used with sqlite3.
"PASSWORD": "",
# Set to empty string for localhost. Not used with sqlite3.
"HOST": "",
# Set to empty string for default. Not used with sqlite3.
"PORT": "",
}
}
<commit_msg>Add Janeus settings to local_settings example<commit_after>
|
DATABASES = {
"default": {
# Add "postgresql_psycopg2", "mysql", "sqlite3" or "oracle".
"ENGINE": "django.db.backends.sqlite3",
# DB name or path to database file if using sqlite3.
"NAME": "dev.db",
# Not used with sqlite3.
"USER": "",
# Not used with sqlite3.
"PASSWORD": "",
# Set to empty string for localhost. Not used with sqlite3.
"HOST": "",
# Set to empty string for default. Not used with sqlite3.
"PORT": "",
}
}
#################
# LDAP SETTINGS #
#################
LDAP_NAME = 'ldap://127.0.0.1:389/'
LDAP_DN = 'cn=writeall,ou=sysUsers,dc=jd,dc=nl'
LDAP_PASSWORD = ''
###################
# JANEUS SETTINGS #
###################
JANEUS_SERVER = "ldap://127.0.0.1:389/"
JANEUS_DN = "cn=readall,ou=sysUsers,dc=jd,dc=nl"
JANEUS_PASS = ""
|
LDAP_NAME = 'ldap://'
LDAP_PASSWORD = ''
LDAP_DN = 'cn=writeuser,ou=sysUsers,dc=jd,dc=nl'
SECRET_KEY = ''
DATABASES = {
"default": {
# Add "postgresql_psycopg2", "mysql", "sqlite3" or "oracle".
"ENGINE": "django.db.backends.sqlite3",
# DB name or path to database file if using sqlite3.
"NAME": "dev.db",
# Not used with sqlite3.
"USER": "",
# Not used with sqlite3.
"PASSWORD": "",
# Set to empty string for localhost. Not used with sqlite3.
"HOST": "",
# Set to empty string for default. Not used with sqlite3.
"PORT": "",
}
}
Add Janeus settings to local_settings example
DATABASES = {
"default": {
# Add "postgresql_psycopg2", "mysql", "sqlite3" or "oracle".
"ENGINE": "django.db.backends.sqlite3",
# DB name or path to database file if using sqlite3.
"NAME": "dev.db",
# Not used with sqlite3.
"USER": "",
# Not used with sqlite3.
"PASSWORD": "",
# Set to empty string for localhost. Not used with sqlite3.
"HOST": "",
# Set to empty string for default. Not used with sqlite3.
"PORT": "",
}
}
#################
# LDAP SETTINGS #
#################
LDAP_NAME = 'ldap://127.0.0.1:389/'
LDAP_DN = 'cn=writeall,ou=sysUsers,dc=jd,dc=nl'
LDAP_PASSWORD = ''
###################
# JANEUS SETTINGS #
###################
JANEUS_SERVER = "ldap://127.0.0.1:389/"
JANEUS_DN = "cn=readall,ou=sysUsers,dc=jd,dc=nl"
JANEUS_PASS = ""
|
<commit_before>
LDAP_NAME = 'ldap://'
LDAP_PASSWORD = ''
LDAP_DN = 'cn=writeuser,ou=sysUsers,dc=jd,dc=nl'
SECRET_KEY = ''
DATABASES = {
"default": {
# Add "postgresql_psycopg2", "mysql", "sqlite3" or "oracle".
"ENGINE": "django.db.backends.sqlite3",
# DB name or path to database file if using sqlite3.
"NAME": "dev.db",
# Not used with sqlite3.
"USER": "",
# Not used with sqlite3.
"PASSWORD": "",
# Set to empty string for localhost. Not used with sqlite3.
"HOST": "",
# Set to empty string for default. Not used with sqlite3.
"PORT": "",
}
}
<commit_msg>Add Janeus settings to local_settings example<commit_after>
DATABASES = {
"default": {
# Add "postgresql_psycopg2", "mysql", "sqlite3" or "oracle".
"ENGINE": "django.db.backends.sqlite3",
# DB name or path to database file if using sqlite3.
"NAME": "dev.db",
# Not used with sqlite3.
"USER": "",
# Not used with sqlite3.
"PASSWORD": "",
# Set to empty string for localhost. Not used with sqlite3.
"HOST": "",
# Set to empty string for default. Not used with sqlite3.
"PORT": "",
}
}
#################
# LDAP SETTINGS #
#################
LDAP_NAME = 'ldap://127.0.0.1:389/'
LDAP_DN = 'cn=writeall,ou=sysUsers,dc=jd,dc=nl'
LDAP_PASSWORD = ''
###################
# JANEUS SETTINGS #
###################
JANEUS_SERVER = "ldap://127.0.0.1:389/"
JANEUS_DN = "cn=readall,ou=sysUsers,dc=jd,dc=nl"
JANEUS_PASS = ""
|
9e406380196a51a2502878a641ea90a11d6a19c3
|
comrade/core/context_processors.py
|
comrade/core/context_processors.py
|
from django.conf import settings
from django.contrib.sites.models import Site
from settings import DeploymentType
def default(request):
context = {}
context['DEPLOYMENT'] = settings.DEPLOYMENT
context['current_site'] = Site.objects.get_current()
if settings.DEPLOYMENT != DeploymentType.PRODUCTION:
context['GIT_COMMIT'] = settings.GIT_COMMIT
context['site_email'] = settings.CONTACT_EMAIL
if request.is_secure():
context['protocol'] = 'https://'
else:
context['protocol'] = 'http://'
context['current_site_url'] = (context['protocol'] +
context['current_site'].domain)
return context
def ssl_media(request):
if request.is_secure():
ssl_media_url = settings.MEDIA_URL.replace('http://','https://')
else:
ssl_media_url = settings.MEDIA_URL
return {'MEDIA_URL': ssl_media_url}
|
from django.conf import settings
from django.contrib.sites.models import Site
from settings import DeploymentType
def default(request):
context = {}
context['DEPLOYMENT'] = settings.DEPLOYMENT
context['current_site'] = Site.objects.get_current()
if settings.DEPLOYMENT != DeploymentType.PRODUCTION:
context['GIT_COMMIT'] = settings.GIT_COMMIT
context['site_email'] = settings.CONTACT_EMAIL
if request.is_secure():
context['protocol'] = 'https://'
else:
context['protocol'] = 'http://'
context['current_site_url'] = (context['protocol'] +
context['current_site'].domain)
return context
def profile(request):
context = {}
if request.user.is_authenticated():
context['profile'] = request.user.get_profile()
return context
def ssl_media(request):
if request.is_secure():
ssl_media_url = settings.MEDIA_URL.replace('http://','https://')
else:
ssl_media_url = settings.MEDIA_URL
return {'MEDIA_URL': ssl_media_url}
|
Add a context processor that adds the UserProfile to each context.
|
Add a context processor that adds the UserProfile to each context.
|
Python
|
mit
|
bueda/django-comrade
|
from django.conf import settings
from django.contrib.sites.models import Site
from settings import DeploymentType
def default(request):
context = {}
context['DEPLOYMENT'] = settings.DEPLOYMENT
context['current_site'] = Site.objects.get_current()
if settings.DEPLOYMENT != DeploymentType.PRODUCTION:
context['GIT_COMMIT'] = settings.GIT_COMMIT
context['site_email'] = settings.CONTACT_EMAIL
if request.is_secure():
context['protocol'] = 'https://'
else:
context['protocol'] = 'http://'
context['current_site_url'] = (context['protocol'] +
context['current_site'].domain)
return context
def ssl_media(request):
if request.is_secure():
ssl_media_url = settings.MEDIA_URL.replace('http://','https://')
else:
ssl_media_url = settings.MEDIA_URL
return {'MEDIA_URL': ssl_media_url}
Add a context processor that adds the UserProfile to each context.
|
from django.conf import settings
from django.contrib.sites.models import Site
from settings import DeploymentType
def default(request):
context = {}
context['DEPLOYMENT'] = settings.DEPLOYMENT
context['current_site'] = Site.objects.get_current()
if settings.DEPLOYMENT != DeploymentType.PRODUCTION:
context['GIT_COMMIT'] = settings.GIT_COMMIT
context['site_email'] = settings.CONTACT_EMAIL
if request.is_secure():
context['protocol'] = 'https://'
else:
context['protocol'] = 'http://'
context['current_site_url'] = (context['protocol'] +
context['current_site'].domain)
return context
def profile(request):
context = {}
if request.user.is_authenticated():
context['profile'] = request.user.get_profile()
return context
def ssl_media(request):
if request.is_secure():
ssl_media_url = settings.MEDIA_URL.replace('http://','https://')
else:
ssl_media_url = settings.MEDIA_URL
return {'MEDIA_URL': ssl_media_url}
|
<commit_before>from django.conf import settings
from django.contrib.sites.models import Site
from settings import DeploymentType
def default(request):
context = {}
context['DEPLOYMENT'] = settings.DEPLOYMENT
context['current_site'] = Site.objects.get_current()
if settings.DEPLOYMENT != DeploymentType.PRODUCTION:
context['GIT_COMMIT'] = settings.GIT_COMMIT
context['site_email'] = settings.CONTACT_EMAIL
if request.is_secure():
context['protocol'] = 'https://'
else:
context['protocol'] = 'http://'
context['current_site_url'] = (context['protocol'] +
context['current_site'].domain)
return context
def ssl_media(request):
if request.is_secure():
ssl_media_url = settings.MEDIA_URL.replace('http://','https://')
else:
ssl_media_url = settings.MEDIA_URL
return {'MEDIA_URL': ssl_media_url}
<commit_msg>Add a context processor that adds the UserProfile to each context.<commit_after>
|
from django.conf import settings
from django.contrib.sites.models import Site
from settings import DeploymentType
def default(request):
context = {}
context['DEPLOYMENT'] = settings.DEPLOYMENT
context['current_site'] = Site.objects.get_current()
if settings.DEPLOYMENT != DeploymentType.PRODUCTION:
context['GIT_COMMIT'] = settings.GIT_COMMIT
context['site_email'] = settings.CONTACT_EMAIL
if request.is_secure():
context['protocol'] = 'https://'
else:
context['protocol'] = 'http://'
context['current_site_url'] = (context['protocol'] +
context['current_site'].domain)
return context
def profile(request):
context = {}
if request.user.is_authenticated():
context['profile'] = request.user.get_profile()
return context
def ssl_media(request):
if request.is_secure():
ssl_media_url = settings.MEDIA_URL.replace('http://','https://')
else:
ssl_media_url = settings.MEDIA_URL
return {'MEDIA_URL': ssl_media_url}
|
from django.conf import settings
from django.contrib.sites.models import Site
from settings import DeploymentType
def default(request):
context = {}
context['DEPLOYMENT'] = settings.DEPLOYMENT
context['current_site'] = Site.objects.get_current()
if settings.DEPLOYMENT != DeploymentType.PRODUCTION:
context['GIT_COMMIT'] = settings.GIT_COMMIT
context['site_email'] = settings.CONTACT_EMAIL
if request.is_secure():
context['protocol'] = 'https://'
else:
context['protocol'] = 'http://'
context['current_site_url'] = (context['protocol'] +
context['current_site'].domain)
return context
def ssl_media(request):
if request.is_secure():
ssl_media_url = settings.MEDIA_URL.replace('http://','https://')
else:
ssl_media_url = settings.MEDIA_URL
return {'MEDIA_URL': ssl_media_url}
Add a context processor that adds the UserProfile to each context.from django.conf import settings
from django.contrib.sites.models import Site
from settings import DeploymentType
def default(request):
context = {}
context['DEPLOYMENT'] = settings.DEPLOYMENT
context['current_site'] = Site.objects.get_current()
if settings.DEPLOYMENT != DeploymentType.PRODUCTION:
context['GIT_COMMIT'] = settings.GIT_COMMIT
context['site_email'] = settings.CONTACT_EMAIL
if request.is_secure():
context['protocol'] = 'https://'
else:
context['protocol'] = 'http://'
context['current_site_url'] = (context['protocol'] +
context['current_site'].domain)
return context
def profile(request):
context = {}
if request.user.is_authenticated():
context['profile'] = request.user.get_profile()
return context
def ssl_media(request):
if request.is_secure():
ssl_media_url = settings.MEDIA_URL.replace('http://','https://')
else:
ssl_media_url = settings.MEDIA_URL
return {'MEDIA_URL': ssl_media_url}
|
<commit_before>from django.conf import settings
from django.contrib.sites.models import Site
from settings import DeploymentType
def default(request):
context = {}
context['DEPLOYMENT'] = settings.DEPLOYMENT
context['current_site'] = Site.objects.get_current()
if settings.DEPLOYMENT != DeploymentType.PRODUCTION:
context['GIT_COMMIT'] = settings.GIT_COMMIT
context['site_email'] = settings.CONTACT_EMAIL
if request.is_secure():
context['protocol'] = 'https://'
else:
context['protocol'] = 'http://'
context['current_site_url'] = (context['protocol'] +
context['current_site'].domain)
return context
def ssl_media(request):
if request.is_secure():
ssl_media_url = settings.MEDIA_URL.replace('http://','https://')
else:
ssl_media_url = settings.MEDIA_URL
return {'MEDIA_URL': ssl_media_url}
<commit_msg>Add a context processor that adds the UserProfile to each context.<commit_after>from django.conf import settings
from django.contrib.sites.models import Site
from settings import DeploymentType
def default(request):
context = {}
context['DEPLOYMENT'] = settings.DEPLOYMENT
context['current_site'] = Site.objects.get_current()
if settings.DEPLOYMENT != DeploymentType.PRODUCTION:
context['GIT_COMMIT'] = settings.GIT_COMMIT
context['site_email'] = settings.CONTACT_EMAIL
if request.is_secure():
context['protocol'] = 'https://'
else:
context['protocol'] = 'http://'
context['current_site_url'] = (context['protocol'] +
context['current_site'].domain)
return context
def profile(request):
context = {}
if request.user.is_authenticated():
context['profile'] = request.user.get_profile()
return context
def ssl_media(request):
if request.is_secure():
ssl_media_url = settings.MEDIA_URL.replace('http://','https://')
else:
ssl_media_url = settings.MEDIA_URL
return {'MEDIA_URL': ssl_media_url}
|
2a71b48fb3ff2ec720ace74e30a83102c31863dc
|
labonneboite/common/email_util.py
|
labonneboite/common/email_util.py
|
# coding: utf8
import json
import logging
from labonneboite.conf import settings
logger = logging.getLogger('main')
class MailNoSendException(Exception):
pass
class EmailClient(object):
to = settings.FORM_EMAIL
from_email = settings.ADMIN_EMAIL
subject = 'nouveau message entreprise LBB'
class MandrillClient(EmailClient):
def __init__(self, mandrill):
self.mandrill = mandrill
def send(self, html):
from_email = self.from_email
to_email = self.to
response = self.mandrill.send_email(
subject=self.subject,
to=[{'email': to_email}],
html=html,
from_email=from_email)
content = json.loads(response.content.decode())
if content[0]["status"] != "sent":
raise MailNoSendException("email was not sent from %s to %s" % (from_email, to_email))
return response
|
# coding: utf8
import json
import logging
from urllib.error import HTTPError
from labonneboite.conf import settings
logger = logging.getLogger('main')
class MailNoSendException(Exception):
pass
class EmailClient(object):
to = settings.FORM_EMAIL
from_email = settings.ADMIN_EMAIL
subject = 'nouveau message entreprise LBB'
class MandrillClient(EmailClient):
def __init__(self, mandrill):
self.mandrill = mandrill
def send(self, html):
from_email = self.from_email
to_email = self.to
try:
response = self.mandrill.send_email(
subject=self.subject,
to=[{'email': to_email}],
html=html,
from_email=from_email)
content = json.loads(response.content.decode())
if content[0]["status"] != "sent":
raise MailNoSendException("email was not sent from %s to %s" % (from_email, to_email))
except HTTPError:
raise MailNoSendException("email was not sent from %s to %s" % (from_email, to_email))
return response
|
Handle HttpError when sending email
|
Handle HttpError when sending email
|
Python
|
agpl-3.0
|
StartupsPoleEmploi/labonneboite,StartupsPoleEmploi/labonneboite,StartupsPoleEmploi/labonneboite,StartupsPoleEmploi/labonneboite
|
# coding: utf8
import json
import logging
from labonneboite.conf import settings
logger = logging.getLogger('main')
class MailNoSendException(Exception):
pass
class EmailClient(object):
to = settings.FORM_EMAIL
from_email = settings.ADMIN_EMAIL
subject = 'nouveau message entreprise LBB'
class MandrillClient(EmailClient):
def __init__(self, mandrill):
self.mandrill = mandrill
def send(self, html):
from_email = self.from_email
to_email = self.to
response = self.mandrill.send_email(
subject=self.subject,
to=[{'email': to_email}],
html=html,
from_email=from_email)
content = json.loads(response.content.decode())
if content[0]["status"] != "sent":
raise MailNoSendException("email was not sent from %s to %s" % (from_email, to_email))
return response
Handle HttpError when sending email
|
# coding: utf8
import json
import logging
from urllib.error import HTTPError
from labonneboite.conf import settings
logger = logging.getLogger('main')
class MailNoSendException(Exception):
pass
class EmailClient(object):
to = settings.FORM_EMAIL
from_email = settings.ADMIN_EMAIL
subject = 'nouveau message entreprise LBB'
class MandrillClient(EmailClient):
def __init__(self, mandrill):
self.mandrill = mandrill
def send(self, html):
from_email = self.from_email
to_email = self.to
try:
response = self.mandrill.send_email(
subject=self.subject,
to=[{'email': to_email}],
html=html,
from_email=from_email)
content = json.loads(response.content.decode())
if content[0]["status"] != "sent":
raise MailNoSendException("email was not sent from %s to %s" % (from_email, to_email))
except HTTPError:
raise MailNoSendException("email was not sent from %s to %s" % (from_email, to_email))
return response
|
<commit_before># coding: utf8
import json
import logging
from labonneboite.conf import settings
logger = logging.getLogger('main')
class MailNoSendException(Exception):
pass
class EmailClient(object):
to = settings.FORM_EMAIL
from_email = settings.ADMIN_EMAIL
subject = 'nouveau message entreprise LBB'
class MandrillClient(EmailClient):
def __init__(self, mandrill):
self.mandrill = mandrill
def send(self, html):
from_email = self.from_email
to_email = self.to
response = self.mandrill.send_email(
subject=self.subject,
to=[{'email': to_email}],
html=html,
from_email=from_email)
content = json.loads(response.content.decode())
if content[0]["status"] != "sent":
raise MailNoSendException("email was not sent from %s to %s" % (from_email, to_email))
return response
<commit_msg>Handle HttpError when sending email<commit_after>
|
# coding: utf8
import json
import logging
from urllib.error import HTTPError
from labonneboite.conf import settings
logger = logging.getLogger('main')
class MailNoSendException(Exception):
pass
class EmailClient(object):
to = settings.FORM_EMAIL
from_email = settings.ADMIN_EMAIL
subject = 'nouveau message entreprise LBB'
class MandrillClient(EmailClient):
def __init__(self, mandrill):
self.mandrill = mandrill
def send(self, html):
from_email = self.from_email
to_email = self.to
try:
response = self.mandrill.send_email(
subject=self.subject,
to=[{'email': to_email}],
html=html,
from_email=from_email)
content = json.loads(response.content.decode())
if content[0]["status"] != "sent":
raise MailNoSendException("email was not sent from %s to %s" % (from_email, to_email))
except HTTPError:
raise MailNoSendException("email was not sent from %s to %s" % (from_email, to_email))
return response
|
# coding: utf8
import json
import logging
from labonneboite.conf import settings
logger = logging.getLogger('main')
class MailNoSendException(Exception):
pass
class EmailClient(object):
to = settings.FORM_EMAIL
from_email = settings.ADMIN_EMAIL
subject = 'nouveau message entreprise LBB'
class MandrillClient(EmailClient):
def __init__(self, mandrill):
self.mandrill = mandrill
def send(self, html):
from_email = self.from_email
to_email = self.to
response = self.mandrill.send_email(
subject=self.subject,
to=[{'email': to_email}],
html=html,
from_email=from_email)
content = json.loads(response.content.decode())
if content[0]["status"] != "sent":
raise MailNoSendException("email was not sent from %s to %s" % (from_email, to_email))
return response
Handle HttpError when sending email# coding: utf8
import json
import logging
from urllib.error import HTTPError
from labonneboite.conf import settings
logger = logging.getLogger('main')
class MailNoSendException(Exception):
pass
class EmailClient(object):
to = settings.FORM_EMAIL
from_email = settings.ADMIN_EMAIL
subject = 'nouveau message entreprise LBB'
class MandrillClient(EmailClient):
def __init__(self, mandrill):
self.mandrill = mandrill
def send(self, html):
from_email = self.from_email
to_email = self.to
try:
response = self.mandrill.send_email(
subject=self.subject,
to=[{'email': to_email}],
html=html,
from_email=from_email)
content = json.loads(response.content.decode())
if content[0]["status"] != "sent":
raise MailNoSendException("email was not sent from %s to %s" % (from_email, to_email))
except HTTPError:
raise MailNoSendException("email was not sent from %s to %s" % (from_email, to_email))
return response
|
<commit_before># coding: utf8
import json
import logging
from labonneboite.conf import settings
logger = logging.getLogger('main')
class MailNoSendException(Exception):
pass
class EmailClient(object):
to = settings.FORM_EMAIL
from_email = settings.ADMIN_EMAIL
subject = 'nouveau message entreprise LBB'
class MandrillClient(EmailClient):
def __init__(self, mandrill):
self.mandrill = mandrill
def send(self, html):
from_email = self.from_email
to_email = self.to
response = self.mandrill.send_email(
subject=self.subject,
to=[{'email': to_email}],
html=html,
from_email=from_email)
content = json.loads(response.content.decode())
if content[0]["status"] != "sent":
raise MailNoSendException("email was not sent from %s to %s" % (from_email, to_email))
return response
<commit_msg>Handle HttpError when sending email<commit_after># coding: utf8
import json
import logging
from urllib.error import HTTPError
from labonneboite.conf import settings
logger = logging.getLogger('main')
class MailNoSendException(Exception):
pass
class EmailClient(object):
to = settings.FORM_EMAIL
from_email = settings.ADMIN_EMAIL
subject = 'nouveau message entreprise LBB'
class MandrillClient(EmailClient):
def __init__(self, mandrill):
self.mandrill = mandrill
def send(self, html):
from_email = self.from_email
to_email = self.to
try:
response = self.mandrill.send_email(
subject=self.subject,
to=[{'email': to_email}],
html=html,
from_email=from_email)
content = json.loads(response.content.decode())
if content[0]["status"] != "sent":
raise MailNoSendException("email was not sent from %s to %s" % (from_email, to_email))
except HTTPError:
raise MailNoSendException("email was not sent from %s to %s" % (from_email, to_email))
return response
|
60352e8a3c41ec804ac1bd6b9f3af4bf611edc0b
|
profiles/views.py
|
profiles/views.py
|
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.contrib.sites.models import Site
from django.core.urlresolvers import reverse
from django.views.generic import FormView, TemplateView
from django.utils.datastructures import MultiValueDictKeyError
from incuna.utils import get_class_from_path
from profiles.models import Profile
from profiles.utils import class_view_decorator
try:
ProfileForm = get_class_from_path(settings.PROFILE_FORM_CLASS)
except AttributeError:
from forms import ProfileForm
@class_view_decorator(login_required)
class ProfileView(TemplateView):
template_name = 'profiles/profile.html'
@class_view_decorator(login_required)
class ProfileEdit(FormView):
form_class = ProfileForm
template_name = 'profiles/profile_form.html'
def form_valid(self, form):
instance = super(ProfileEdit, self).form_valid(form)
self.request.user.message_set.create(message='Your profile has been updated.')
return instance
def get_context_data(self, **kwargs):
context = super(ProfileEdit, self).get_context_data(**kwargs)
context['site'] = Site.objects.get_current()
return context
def get_object(self):
if isinstance(self.request.user, Profile):
return self.request.user
return self.request.user.profile
def get_success_url(self):
try:
return self.request.GET['next']
except MultiValueDictKeyError:
return reverse('profile')
|
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.contrib.sites.models import Site
from django.core.urlresolvers import reverse
from django.utils.datastructures import MultiValueDictKeyError
from django.views.generic import TemplateView, UpdateView
from incuna.utils import get_class_from_path
from profiles.models import Profile
from profiles.utils import class_view_decorator
try:
ProfileForm = get_class_from_path(settings.PROFILE_FORM_CLASS)
except AttributeError:
from forms import ProfileForm
@class_view_decorator(login_required)
class ProfileView(TemplateView):
template_name = 'profiles/profile.html'
@class_view_decorator(login_required)
class ProfileEdit(UpdateView):
form_class = ProfileForm
template_name = 'profiles/profile_form.html'
def form_valid(self, form):
instance = super(ProfileEdit, self).form_valid(form)
self.request.user.message_set.create(message='Your profile has been updated.')
return instance
def get_context_data(self, **kwargs):
context = super(ProfileEdit, self).get_context_data(**kwargs)
context['site'] = Site.objects.get_current()
return context
def get_object(self):
if isinstance(self.request.user, Profile):
return self.request.user
return self.request.user.profile
def get_success_url(self):
try:
return self.request.GET['next']
except MultiValueDictKeyError:
return reverse('profile')
|
Use an update view instead of form view
|
Use an update view instead of form view
|
Python
|
bsd-2-clause
|
incuna/django-extensible-profiles
|
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.contrib.sites.models import Site
from django.core.urlresolvers import reverse
from django.views.generic import FormView, TemplateView
from django.utils.datastructures import MultiValueDictKeyError
from incuna.utils import get_class_from_path
from profiles.models import Profile
from profiles.utils import class_view_decorator
try:
ProfileForm = get_class_from_path(settings.PROFILE_FORM_CLASS)
except AttributeError:
from forms import ProfileForm
@class_view_decorator(login_required)
class ProfileView(TemplateView):
template_name = 'profiles/profile.html'
@class_view_decorator(login_required)
class ProfileEdit(FormView):
form_class = ProfileForm
template_name = 'profiles/profile_form.html'
def form_valid(self, form):
instance = super(ProfileEdit, self).form_valid(form)
self.request.user.message_set.create(message='Your profile has been updated.')
return instance
def get_context_data(self, **kwargs):
context = super(ProfileEdit, self).get_context_data(**kwargs)
context['site'] = Site.objects.get_current()
return context
def get_object(self):
if isinstance(self.request.user, Profile):
return self.request.user
return self.request.user.profile
def get_success_url(self):
try:
return self.request.GET['next']
except MultiValueDictKeyError:
return reverse('profile')
Use an update view instead of form view
|
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.contrib.sites.models import Site
from django.core.urlresolvers import reverse
from django.utils.datastructures import MultiValueDictKeyError
from django.views.generic import TemplateView, UpdateView
from incuna.utils import get_class_from_path
from profiles.models import Profile
from profiles.utils import class_view_decorator
try:
ProfileForm = get_class_from_path(settings.PROFILE_FORM_CLASS)
except AttributeError:
from forms import ProfileForm
@class_view_decorator(login_required)
class ProfileView(TemplateView):
template_name = 'profiles/profile.html'
@class_view_decorator(login_required)
class ProfileEdit(UpdateView):
form_class = ProfileForm
template_name = 'profiles/profile_form.html'
def form_valid(self, form):
instance = super(ProfileEdit, self).form_valid(form)
self.request.user.message_set.create(message='Your profile has been updated.')
return instance
def get_context_data(self, **kwargs):
context = super(ProfileEdit, self).get_context_data(**kwargs)
context['site'] = Site.objects.get_current()
return context
def get_object(self):
if isinstance(self.request.user, Profile):
return self.request.user
return self.request.user.profile
def get_success_url(self):
try:
return self.request.GET['next']
except MultiValueDictKeyError:
return reverse('profile')
|
<commit_before>from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.contrib.sites.models import Site
from django.core.urlresolvers import reverse
from django.views.generic import FormView, TemplateView
from django.utils.datastructures import MultiValueDictKeyError
from incuna.utils import get_class_from_path
from profiles.models import Profile
from profiles.utils import class_view_decorator
try:
ProfileForm = get_class_from_path(settings.PROFILE_FORM_CLASS)
except AttributeError:
from forms import ProfileForm
@class_view_decorator(login_required)
class ProfileView(TemplateView):
template_name = 'profiles/profile.html'
@class_view_decorator(login_required)
class ProfileEdit(FormView):
form_class = ProfileForm
template_name = 'profiles/profile_form.html'
def form_valid(self, form):
instance = super(ProfileEdit, self).form_valid(form)
self.request.user.message_set.create(message='Your profile has been updated.')
return instance
def get_context_data(self, **kwargs):
context = super(ProfileEdit, self).get_context_data(**kwargs)
context['site'] = Site.objects.get_current()
return context
def get_object(self):
if isinstance(self.request.user, Profile):
return self.request.user
return self.request.user.profile
def get_success_url(self):
try:
return self.request.GET['next']
except MultiValueDictKeyError:
return reverse('profile')
<commit_msg>Use an update view instead of form view<commit_after>
|
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.contrib.sites.models import Site
from django.core.urlresolvers import reverse
from django.utils.datastructures import MultiValueDictKeyError
from django.views.generic import TemplateView, UpdateView
from incuna.utils import get_class_from_path
from profiles.models import Profile
from profiles.utils import class_view_decorator
try:
ProfileForm = get_class_from_path(settings.PROFILE_FORM_CLASS)
except AttributeError:
from forms import ProfileForm
@class_view_decorator(login_required)
class ProfileView(TemplateView):
template_name = 'profiles/profile.html'
@class_view_decorator(login_required)
class ProfileEdit(UpdateView):
form_class = ProfileForm
template_name = 'profiles/profile_form.html'
def form_valid(self, form):
instance = super(ProfileEdit, self).form_valid(form)
self.request.user.message_set.create(message='Your profile has been updated.')
return instance
def get_context_data(self, **kwargs):
context = super(ProfileEdit, self).get_context_data(**kwargs)
context['site'] = Site.objects.get_current()
return context
def get_object(self):
if isinstance(self.request.user, Profile):
return self.request.user
return self.request.user.profile
def get_success_url(self):
try:
return self.request.GET['next']
except MultiValueDictKeyError:
return reverse('profile')
|
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.contrib.sites.models import Site
from django.core.urlresolvers import reverse
from django.views.generic import FormView, TemplateView
from django.utils.datastructures import MultiValueDictKeyError
from incuna.utils import get_class_from_path
from profiles.models import Profile
from profiles.utils import class_view_decorator
try:
ProfileForm = get_class_from_path(settings.PROFILE_FORM_CLASS)
except AttributeError:
from forms import ProfileForm
@class_view_decorator(login_required)
class ProfileView(TemplateView):
template_name = 'profiles/profile.html'
@class_view_decorator(login_required)
class ProfileEdit(FormView):
form_class = ProfileForm
template_name = 'profiles/profile_form.html'
def form_valid(self, form):
instance = super(ProfileEdit, self).form_valid(form)
self.request.user.message_set.create(message='Your profile has been updated.')
return instance
def get_context_data(self, **kwargs):
context = super(ProfileEdit, self).get_context_data(**kwargs)
context['site'] = Site.objects.get_current()
return context
def get_object(self):
if isinstance(self.request.user, Profile):
return self.request.user
return self.request.user.profile
def get_success_url(self):
try:
return self.request.GET['next']
except MultiValueDictKeyError:
return reverse('profile')
Use an update view instead of form viewfrom django.conf import settings
from django.contrib.auth.decorators import login_required
from django.contrib.sites.models import Site
from django.core.urlresolvers import reverse
from django.utils.datastructures import MultiValueDictKeyError
from django.views.generic import TemplateView, UpdateView
from incuna.utils import get_class_from_path
from profiles.models import Profile
from profiles.utils import class_view_decorator
try:
ProfileForm = get_class_from_path(settings.PROFILE_FORM_CLASS)
except AttributeError:
from forms import ProfileForm
@class_view_decorator(login_required)
class ProfileView(TemplateView):
template_name = 'profiles/profile.html'
@class_view_decorator(login_required)
class ProfileEdit(UpdateView):
form_class = ProfileForm
template_name = 'profiles/profile_form.html'
def form_valid(self, form):
instance = super(ProfileEdit, self).form_valid(form)
self.request.user.message_set.create(message='Your profile has been updated.')
return instance
def get_context_data(self, **kwargs):
context = super(ProfileEdit, self).get_context_data(**kwargs)
context['site'] = Site.objects.get_current()
return context
def get_object(self):
if isinstance(self.request.user, Profile):
return self.request.user
return self.request.user.profile
def get_success_url(self):
try:
return self.request.GET['next']
except MultiValueDictKeyError:
return reverse('profile')
|
<commit_before>from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.contrib.sites.models import Site
from django.core.urlresolvers import reverse
from django.views.generic import FormView, TemplateView
from django.utils.datastructures import MultiValueDictKeyError
from incuna.utils import get_class_from_path
from profiles.models import Profile
from profiles.utils import class_view_decorator
try:
ProfileForm = get_class_from_path(settings.PROFILE_FORM_CLASS)
except AttributeError:
from forms import ProfileForm
@class_view_decorator(login_required)
class ProfileView(TemplateView):
template_name = 'profiles/profile.html'
@class_view_decorator(login_required)
class ProfileEdit(FormView):
form_class = ProfileForm
template_name = 'profiles/profile_form.html'
def form_valid(self, form):
instance = super(ProfileEdit, self).form_valid(form)
self.request.user.message_set.create(message='Your profile has been updated.')
return instance
def get_context_data(self, **kwargs):
context = super(ProfileEdit, self).get_context_data(**kwargs)
context['site'] = Site.objects.get_current()
return context
def get_object(self):
if isinstance(self.request.user, Profile):
return self.request.user
return self.request.user.profile
def get_success_url(self):
try:
return self.request.GET['next']
except MultiValueDictKeyError:
return reverse('profile')
<commit_msg>Use an update view instead of form view<commit_after>from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.contrib.sites.models import Site
from django.core.urlresolvers import reverse
from django.utils.datastructures import MultiValueDictKeyError
from django.views.generic import TemplateView, UpdateView
from incuna.utils import get_class_from_path
from profiles.models import Profile
from profiles.utils import class_view_decorator
try:
ProfileForm = get_class_from_path(settings.PROFILE_FORM_CLASS)
except AttributeError:
from forms import ProfileForm
@class_view_decorator(login_required)
class ProfileView(TemplateView):
template_name = 'profiles/profile.html'
@class_view_decorator(login_required)
class ProfileEdit(UpdateView):
form_class = ProfileForm
template_name = 'profiles/profile_form.html'
def form_valid(self, form):
instance = super(ProfileEdit, self).form_valid(form)
self.request.user.message_set.create(message='Your profile has been updated.')
return instance
def get_context_data(self, **kwargs):
context = super(ProfileEdit, self).get_context_data(**kwargs)
context['site'] = Site.objects.get_current()
return context
def get_object(self):
if isinstance(self.request.user, Profile):
return self.request.user
return self.request.user.profile
def get_success_url(self):
try:
return self.request.GET['next']
except MultiValueDictKeyError:
return reverse('profile')
|
89e749f88be822af2f6292a90211fb90fe95479a
|
ckanext/ckanext-apicatalog_routes/ckanext/apicatalog_routes/tests/test_plugin.py
|
ckanext/ckanext-apicatalog_routes/ckanext/apicatalog_routes/tests/test_plugin.py
|
"""Tests for plugin.py."""
import pytest
from ckan.tests import factories
import ckan.tests.helpers as helpers
from ckan.plugins.toolkit import NotAuthorized
@pytest.mark.ckan_config('ckan.plugins', 'apicatalog_routes')
@pytest.mark.usefixtures('clean_db', 'with_plugins', 'with_request_context')
class TestApicatalogRoutes(object):
def test_non_sysadmins_should_not_be_able_to_delete_subsystems(self):
user = factories.User()
org_users = [{"name": user["name"], "capacity": "admin"}]
org = factories.Organization(users=org_users)
subsystem = factories.Dataset(
owner_org=org["id"]
)
context = {'ignore_auth': False, 'user': user['name']}
with pytest.raises(NotAuthorized):
helpers.call_action('package_delete', context, name=subsystem['name'])
|
"""Tests for plugin.py."""
import pytest
from ckan.tests import factories
import ckan.tests.helpers as helpers
from ckan.plugins.toolkit import NotAuthorized
@pytest.mark.ckan_config('ckan.plugins', 'apicatalog_routes')
@pytest.mark.usefixtures('clean_db', 'with_plugins', 'with_request_context')
class TestApicatalogRoutes(object):
def test_non_sysadmins_should_not_be_able_to_delete_subsystems(self):
user = factories.User()
org_users = [{"name": user["name"], "capacity": "admin"}]
org = factories.Organization(users=org_users)
subsystem = factories.Dataset(
owner_org=org["id"]
)
context = {'ignore_auth': False, 'user': user['name']}
with pytest.raises(NotAuthorized):
helpers.call_action('package_delete', context, id=subsystem['name'])
|
Use id instead of name
|
Use id instead of name
|
Python
|
mit
|
vrk-kpa/api-catalog,vrk-kpa/api-catalog,vrk-kpa/api-catalog,vrk-kpa/api-catalog
|
"""Tests for plugin.py."""
import pytest
from ckan.tests import factories
import ckan.tests.helpers as helpers
from ckan.plugins.toolkit import NotAuthorized
@pytest.mark.ckan_config('ckan.plugins', 'apicatalog_routes')
@pytest.mark.usefixtures('clean_db', 'with_plugins', 'with_request_context')
class TestApicatalogRoutes(object):
def test_non_sysadmins_should_not_be_able_to_delete_subsystems(self):
user = factories.User()
org_users = [{"name": user["name"], "capacity": "admin"}]
org = factories.Organization(users=org_users)
subsystem = factories.Dataset(
owner_org=org["id"]
)
context = {'ignore_auth': False, 'user': user['name']}
with pytest.raises(NotAuthorized):
helpers.call_action('package_delete', context, name=subsystem['name'])
Use id instead of name
|
"""Tests for plugin.py."""
import pytest
from ckan.tests import factories
import ckan.tests.helpers as helpers
from ckan.plugins.toolkit import NotAuthorized
@pytest.mark.ckan_config('ckan.plugins', 'apicatalog_routes')
@pytest.mark.usefixtures('clean_db', 'with_plugins', 'with_request_context')
class TestApicatalogRoutes(object):
def test_non_sysadmins_should_not_be_able_to_delete_subsystems(self):
user = factories.User()
org_users = [{"name": user["name"], "capacity": "admin"}]
org = factories.Organization(users=org_users)
subsystem = factories.Dataset(
owner_org=org["id"]
)
context = {'ignore_auth': False, 'user': user['name']}
with pytest.raises(NotAuthorized):
helpers.call_action('package_delete', context, id=subsystem['name'])
|
<commit_before>"""Tests for plugin.py."""
import pytest
from ckan.tests import factories
import ckan.tests.helpers as helpers
from ckan.plugins.toolkit import NotAuthorized
@pytest.mark.ckan_config('ckan.plugins', 'apicatalog_routes')
@pytest.mark.usefixtures('clean_db', 'with_plugins', 'with_request_context')
class TestApicatalogRoutes(object):
def test_non_sysadmins_should_not_be_able_to_delete_subsystems(self):
user = factories.User()
org_users = [{"name": user["name"], "capacity": "admin"}]
org = factories.Organization(users=org_users)
subsystem = factories.Dataset(
owner_org=org["id"]
)
context = {'ignore_auth': False, 'user': user['name']}
with pytest.raises(NotAuthorized):
helpers.call_action('package_delete', context, name=subsystem['name'])
<commit_msg>Use id instead of name<commit_after>
|
"""Tests for plugin.py."""
import pytest
from ckan.tests import factories
import ckan.tests.helpers as helpers
from ckan.plugins.toolkit import NotAuthorized
@pytest.mark.ckan_config('ckan.plugins', 'apicatalog_routes')
@pytest.mark.usefixtures('clean_db', 'with_plugins', 'with_request_context')
class TestApicatalogRoutes(object):
def test_non_sysadmins_should_not_be_able_to_delete_subsystems(self):
user = factories.User()
org_users = [{"name": user["name"], "capacity": "admin"}]
org = factories.Organization(users=org_users)
subsystem = factories.Dataset(
owner_org=org["id"]
)
context = {'ignore_auth': False, 'user': user['name']}
with pytest.raises(NotAuthorized):
helpers.call_action('package_delete', context, id=subsystem['name'])
|
"""Tests for plugin.py."""
import pytest
from ckan.tests import factories
import ckan.tests.helpers as helpers
from ckan.plugins.toolkit import NotAuthorized
@pytest.mark.ckan_config('ckan.plugins', 'apicatalog_routes')
@pytest.mark.usefixtures('clean_db', 'with_plugins', 'with_request_context')
class TestApicatalogRoutes(object):
def test_non_sysadmins_should_not_be_able_to_delete_subsystems(self):
user = factories.User()
org_users = [{"name": user["name"], "capacity": "admin"}]
org = factories.Organization(users=org_users)
subsystem = factories.Dataset(
owner_org=org["id"]
)
context = {'ignore_auth': False, 'user': user['name']}
with pytest.raises(NotAuthorized):
helpers.call_action('package_delete', context, name=subsystem['name'])
Use id instead of name"""Tests for plugin.py."""
import pytest
from ckan.tests import factories
import ckan.tests.helpers as helpers
from ckan.plugins.toolkit import NotAuthorized
@pytest.mark.ckan_config('ckan.plugins', 'apicatalog_routes')
@pytest.mark.usefixtures('clean_db', 'with_plugins', 'with_request_context')
class TestApicatalogRoutes(object):
def test_non_sysadmins_should_not_be_able_to_delete_subsystems(self):
user = factories.User()
org_users = [{"name": user["name"], "capacity": "admin"}]
org = factories.Organization(users=org_users)
subsystem = factories.Dataset(
owner_org=org["id"]
)
context = {'ignore_auth': False, 'user': user['name']}
with pytest.raises(NotAuthorized):
helpers.call_action('package_delete', context, id=subsystem['name'])
|
<commit_before>"""Tests for plugin.py."""
import pytest
from ckan.tests import factories
import ckan.tests.helpers as helpers
from ckan.plugins.toolkit import NotAuthorized
@pytest.mark.ckan_config('ckan.plugins', 'apicatalog_routes')
@pytest.mark.usefixtures('clean_db', 'with_plugins', 'with_request_context')
class TestApicatalogRoutes(object):
def test_non_sysadmins_should_not_be_able_to_delete_subsystems(self):
user = factories.User()
org_users = [{"name": user["name"], "capacity": "admin"}]
org = factories.Organization(users=org_users)
subsystem = factories.Dataset(
owner_org=org["id"]
)
context = {'ignore_auth': False, 'user': user['name']}
with pytest.raises(NotAuthorized):
helpers.call_action('package_delete', context, name=subsystem['name'])
<commit_msg>Use id instead of name<commit_after>"""Tests for plugin.py."""
import pytest
from ckan.tests import factories
import ckan.tests.helpers as helpers
from ckan.plugins.toolkit import NotAuthorized
@pytest.mark.ckan_config('ckan.plugins', 'apicatalog_routes')
@pytest.mark.usefixtures('clean_db', 'with_plugins', 'with_request_context')
class TestApicatalogRoutes(object):
def test_non_sysadmins_should_not_be_able_to_delete_subsystems(self):
user = factories.User()
org_users = [{"name": user["name"], "capacity": "admin"}]
org = factories.Organization(users=org_users)
subsystem = factories.Dataset(
owner_org=org["id"]
)
context = {'ignore_auth': False, 'user': user['name']}
with pytest.raises(NotAuthorized):
helpers.call_action('package_delete', context, id=subsystem['name'])
|
b89716c4e7ba69c36a04bca00da20cfa8bb6a5e7
|
ideascube/conf/idb_col_llavedelsaber.py
|
ideascube/conf/idb_col_llavedelsaber.py
|
"""Configuration for Llave Del Saber, Colombia"""
from .idb import * # noqa
LANGUAGE_CODE = 'es'
DOMAIN = 'bibliotecamovil.lan'
ALLOWED_HOSTS = ['.bibliotecamovil.lan', 'localhost']
|
"""Configuration for Llave Del Saber, Colombia"""
from .idb import * # noqa
LANGUAGE_CODE = 'es'
DOMAIN = 'bibliotecamovil.lan'
ALLOWED_HOSTS = ['.bibliotecamovil.lan', 'localhost']
USER_FORM_FIELDS = USER_FORM_FIELDS + (
(_('Personal informations'), ['extra']),
)
USER_EXTRA_FIELD_LABEL = 'Etnicidad'
|
Add a custom 'extra' field to idb_col_llavadelsaber
|
Add a custom 'extra' field to idb_col_llavadelsaber
|
Python
|
agpl-3.0
|
ideascube/ideascube,ideascube/ideascube,ideascube/ideascube,ideascube/ideascube
|
"""Configuration for Llave Del Saber, Colombia"""
from .idb import * # noqa
LANGUAGE_CODE = 'es'
DOMAIN = 'bibliotecamovil.lan'
ALLOWED_HOSTS = ['.bibliotecamovil.lan', 'localhost']
Add a custom 'extra' field to idb_col_llavadelsaber
|
"""Configuration for Llave Del Saber, Colombia"""
from .idb import * # noqa
LANGUAGE_CODE = 'es'
DOMAIN = 'bibliotecamovil.lan'
ALLOWED_HOSTS = ['.bibliotecamovil.lan', 'localhost']
USER_FORM_FIELDS = USER_FORM_FIELDS + (
(_('Personal informations'), ['extra']),
)
USER_EXTRA_FIELD_LABEL = 'Etnicidad'
|
<commit_before>"""Configuration for Llave Del Saber, Colombia"""
from .idb import * # noqa
LANGUAGE_CODE = 'es'
DOMAIN = 'bibliotecamovil.lan'
ALLOWED_HOSTS = ['.bibliotecamovil.lan', 'localhost']
<commit_msg>Add a custom 'extra' field to idb_col_llavadelsaber<commit_after>
|
"""Configuration for Llave Del Saber, Colombia"""
from .idb import * # noqa
LANGUAGE_CODE = 'es'
DOMAIN = 'bibliotecamovil.lan'
ALLOWED_HOSTS = ['.bibliotecamovil.lan', 'localhost']
USER_FORM_FIELDS = USER_FORM_FIELDS + (
(_('Personal informations'), ['extra']),
)
USER_EXTRA_FIELD_LABEL = 'Etnicidad'
|
"""Configuration for Llave Del Saber, Colombia"""
from .idb import * # noqa
LANGUAGE_CODE = 'es'
DOMAIN = 'bibliotecamovil.lan'
ALLOWED_HOSTS = ['.bibliotecamovil.lan', 'localhost']
Add a custom 'extra' field to idb_col_llavadelsaber"""Configuration for Llave Del Saber, Colombia"""
from .idb import * # noqa
LANGUAGE_CODE = 'es'
DOMAIN = 'bibliotecamovil.lan'
ALLOWED_HOSTS = ['.bibliotecamovil.lan', 'localhost']
USER_FORM_FIELDS = USER_FORM_FIELDS + (
(_('Personal informations'), ['extra']),
)
USER_EXTRA_FIELD_LABEL = 'Etnicidad'
|
<commit_before>"""Configuration for Llave Del Saber, Colombia"""
from .idb import * # noqa
LANGUAGE_CODE = 'es'
DOMAIN = 'bibliotecamovil.lan'
ALLOWED_HOSTS = ['.bibliotecamovil.lan', 'localhost']
<commit_msg>Add a custom 'extra' field to idb_col_llavadelsaber<commit_after>"""Configuration for Llave Del Saber, Colombia"""
from .idb import * # noqa
LANGUAGE_CODE = 'es'
DOMAIN = 'bibliotecamovil.lan'
ALLOWED_HOSTS = ['.bibliotecamovil.lan', 'localhost']
USER_FORM_FIELDS = USER_FORM_FIELDS + (
(_('Personal informations'), ['extra']),
)
USER_EXTRA_FIELD_LABEL = 'Etnicidad'
|
220df047067ee1be995cfad7db4a192093c3ac9b
|
SNParray/Create_SQLtable_From_DesignVCF.py
|
SNParray/Create_SQLtable_From_DesignVCF.py
|
#!/usr/bin/python
import vcf
import os
from optparse import OptionParser
parser = OptionParser()
parser.add_option("--vcf", dest="vcf_file", help="Path to VCF to convert", default=False)
#parser.add_option("--conf", dest="config_file", help="Path to DataBase config file", default=False)
(options, args) = parser.parse_args()
if not os.path.exists(options.vcf_file):
print "Invalid VCF file"
exit(0)
TABLE_NAME = os.path.basename(options.vcf_file).replace("_design.vcf","")
VCF_READER = vcf.Reader(open(options.vcf_file, 'r'))
SNP_TEMPLATE = """%s tinyint(3) NOT NULL"""
SQL_TEMPLATE = """CREATE TABLE %s (
Sample varchar(100) NOT NULL,
%s,
PRIMARY KEY (Sample)
);
"""
SNPids = []
for vcf_record in VCF_READER:
SNPids.append(vcf_record.ID)
print SQL_TEMPLATE%(TABLE_NAME,",\n".join([SNP_TEMPLATE%(x) for x in SNPids]))
|
#!/usr/bin/python
import vcf
import os
from optparse import OptionParser
parser = OptionParser()
parser.add_option("--vcf", dest="vcf_file", help="Path to VCF to convert", default="")
#parser.add_option("--conf", dest="config_file", help="Path to DataBase config file", default=False)
(options, args) = parser.parse_args()
if not os.path.exists(options.vcf_file):
print "Invalid VCF file"
exit(0)
TABLE_NAME = os.path.basename(options.vcf_file).replace("_design.vcf","")
VCF_READER = vcf.Reader(open(options.vcf_file, 'r'))
SNP_TEMPLATE = """%s tinyint(3) NOT NULL"""
SQL_TEMPLATE = """CREATE TABLE %s (
Sample varchar(100) NOT NULL,
%s,
PRIMARY KEY (Sample)
);
"""
SNPids = []
for vcf_record in VCF_READER:
SNPids.append(vcf_record.ID)
print SQL_TEMPLATE%(TABLE_NAME,",\n".join([SNP_TEMPLATE%(x) for x in SNPids]))
|
Fix the assumption that options.vcf_file is a string.
|
Fix the assumption that options.vcf_file is a string.
|
Python
|
mit
|
CuppenResearch/Genetics,jdeligt/Genetics,jdeligt/Genetics,jdeligt/Genetics,CuppenResearch/Genetics,CuppenResearch/Genetics
|
#!/usr/bin/python
import vcf
import os
from optparse import OptionParser
parser = OptionParser()
parser.add_option("--vcf", dest="vcf_file", help="Path to VCF to convert", default=False)
#parser.add_option("--conf", dest="config_file", help="Path to DataBase config file", default=False)
(options, args) = parser.parse_args()
if not os.path.exists(options.vcf_file):
print "Invalid VCF file"
exit(0)
TABLE_NAME = os.path.basename(options.vcf_file).replace("_design.vcf","")
VCF_READER = vcf.Reader(open(options.vcf_file, 'r'))
SNP_TEMPLATE = """%s tinyint(3) NOT NULL"""
SQL_TEMPLATE = """CREATE TABLE %s (
Sample varchar(100) NOT NULL,
%s,
PRIMARY KEY (Sample)
);
"""
SNPids = []
for vcf_record in VCF_READER:
SNPids.append(vcf_record.ID)
print SQL_TEMPLATE%(TABLE_NAME,",\n".join([SNP_TEMPLATE%(x) for x in SNPids]))
Fix the assumption that options.vcf_file is a string.
|
#!/usr/bin/python
import vcf
import os
from optparse import OptionParser
parser = OptionParser()
parser.add_option("--vcf", dest="vcf_file", help="Path to VCF to convert", default="")
#parser.add_option("--conf", dest="config_file", help="Path to DataBase config file", default=False)
(options, args) = parser.parse_args()
if not os.path.exists(options.vcf_file):
print "Invalid VCF file"
exit(0)
TABLE_NAME = os.path.basename(options.vcf_file).replace("_design.vcf","")
VCF_READER = vcf.Reader(open(options.vcf_file, 'r'))
SNP_TEMPLATE = """%s tinyint(3) NOT NULL"""
SQL_TEMPLATE = """CREATE TABLE %s (
Sample varchar(100) NOT NULL,
%s,
PRIMARY KEY (Sample)
);
"""
SNPids = []
for vcf_record in VCF_READER:
SNPids.append(vcf_record.ID)
print SQL_TEMPLATE%(TABLE_NAME,",\n".join([SNP_TEMPLATE%(x) for x in SNPids]))
|
<commit_before>#!/usr/bin/python
import vcf
import os
from optparse import OptionParser
parser = OptionParser()
parser.add_option("--vcf", dest="vcf_file", help="Path to VCF to convert", default=False)
#parser.add_option("--conf", dest="config_file", help="Path to DataBase config file", default=False)
(options, args) = parser.parse_args()
if not os.path.exists(options.vcf_file):
print "Invalid VCF file"
exit(0)
TABLE_NAME = os.path.basename(options.vcf_file).replace("_design.vcf","")
VCF_READER = vcf.Reader(open(options.vcf_file, 'r'))
SNP_TEMPLATE = """%s tinyint(3) NOT NULL"""
SQL_TEMPLATE = """CREATE TABLE %s (
Sample varchar(100) NOT NULL,
%s,
PRIMARY KEY (Sample)
);
"""
SNPids = []
for vcf_record in VCF_READER:
SNPids.append(vcf_record.ID)
print SQL_TEMPLATE%(TABLE_NAME,",\n".join([SNP_TEMPLATE%(x) for x in SNPids]))
<commit_msg>Fix the assumption that options.vcf_file is a string.<commit_after>
|
#!/usr/bin/python
import vcf
import os
from optparse import OptionParser
parser = OptionParser()
parser.add_option("--vcf", dest="vcf_file", help="Path to VCF to convert", default="")
#parser.add_option("--conf", dest="config_file", help="Path to DataBase config file", default=False)
(options, args) = parser.parse_args()
if not os.path.exists(options.vcf_file):
print "Invalid VCF file"
exit(0)
TABLE_NAME = os.path.basename(options.vcf_file).replace("_design.vcf","")
VCF_READER = vcf.Reader(open(options.vcf_file, 'r'))
SNP_TEMPLATE = """%s tinyint(3) NOT NULL"""
SQL_TEMPLATE = """CREATE TABLE %s (
Sample varchar(100) NOT NULL,
%s,
PRIMARY KEY (Sample)
);
"""
SNPids = []
for vcf_record in VCF_READER:
SNPids.append(vcf_record.ID)
print SQL_TEMPLATE%(TABLE_NAME,",\n".join([SNP_TEMPLATE%(x) for x in SNPids]))
|
#!/usr/bin/python
import vcf
import os
from optparse import OptionParser
parser = OptionParser()
parser.add_option("--vcf", dest="vcf_file", help="Path to VCF to convert", default=False)
#parser.add_option("--conf", dest="config_file", help="Path to DataBase config file", default=False)
(options, args) = parser.parse_args()
if not os.path.exists(options.vcf_file):
print "Invalid VCF file"
exit(0)
TABLE_NAME = os.path.basename(options.vcf_file).replace("_design.vcf","")
VCF_READER = vcf.Reader(open(options.vcf_file, 'r'))
SNP_TEMPLATE = """%s tinyint(3) NOT NULL"""
SQL_TEMPLATE = """CREATE TABLE %s (
Sample varchar(100) NOT NULL,
%s,
PRIMARY KEY (Sample)
);
"""
SNPids = []
for vcf_record in VCF_READER:
SNPids.append(vcf_record.ID)
print SQL_TEMPLATE%(TABLE_NAME,",\n".join([SNP_TEMPLATE%(x) for x in SNPids]))
Fix the assumption that options.vcf_file is a string.#!/usr/bin/python
import vcf
import os
from optparse import OptionParser
parser = OptionParser()
parser.add_option("--vcf", dest="vcf_file", help="Path to VCF to convert", default="")
#parser.add_option("--conf", dest="config_file", help="Path to DataBase config file", default=False)
(options, args) = parser.parse_args()
if not os.path.exists(options.vcf_file):
print "Invalid VCF file"
exit(0)
TABLE_NAME = os.path.basename(options.vcf_file).replace("_design.vcf","")
VCF_READER = vcf.Reader(open(options.vcf_file, 'r'))
SNP_TEMPLATE = """%s tinyint(3) NOT NULL"""
SQL_TEMPLATE = """CREATE TABLE %s (
Sample varchar(100) NOT NULL,
%s,
PRIMARY KEY (Sample)
);
"""
SNPids = []
for vcf_record in VCF_READER:
SNPids.append(vcf_record.ID)
print SQL_TEMPLATE%(TABLE_NAME,",\n".join([SNP_TEMPLATE%(x) for x in SNPids]))
|
<commit_before>#!/usr/bin/python
import vcf
import os
from optparse import OptionParser
parser = OptionParser()
parser.add_option("--vcf", dest="vcf_file", help="Path to VCF to convert", default=False)
#parser.add_option("--conf", dest="config_file", help="Path to DataBase config file", default=False)
(options, args) = parser.parse_args()
if not os.path.exists(options.vcf_file):
print "Invalid VCF file"
exit(0)
TABLE_NAME = os.path.basename(options.vcf_file).replace("_design.vcf","")
VCF_READER = vcf.Reader(open(options.vcf_file, 'r'))
SNP_TEMPLATE = """%s tinyint(3) NOT NULL"""
SQL_TEMPLATE = """CREATE TABLE %s (
Sample varchar(100) NOT NULL,
%s,
PRIMARY KEY (Sample)
);
"""
SNPids = []
for vcf_record in VCF_READER:
SNPids.append(vcf_record.ID)
print SQL_TEMPLATE%(TABLE_NAME,",\n".join([SNP_TEMPLATE%(x) for x in SNPids]))
<commit_msg>Fix the assumption that options.vcf_file is a string.<commit_after>#!/usr/bin/python
import vcf
import os
from optparse import OptionParser
parser = OptionParser()
parser.add_option("--vcf", dest="vcf_file", help="Path to VCF to convert", default="")
#parser.add_option("--conf", dest="config_file", help="Path to DataBase config file", default=False)
(options, args) = parser.parse_args()
if not os.path.exists(options.vcf_file):
print "Invalid VCF file"
exit(0)
TABLE_NAME = os.path.basename(options.vcf_file).replace("_design.vcf","")
VCF_READER = vcf.Reader(open(options.vcf_file, 'r'))
SNP_TEMPLATE = """%s tinyint(3) NOT NULL"""
SQL_TEMPLATE = """CREATE TABLE %s (
Sample varchar(100) NOT NULL,
%s,
PRIMARY KEY (Sample)
);
"""
SNPids = []
for vcf_record in VCF_READER:
SNPids.append(vcf_record.ID)
print SQL_TEMPLATE%(TABLE_NAME,",\n".join([SNP_TEMPLATE%(x) for x in SNPids]))
|
263e31a5d87b8134a25df97eee06f1fe9c1e94bc
|
django_countries/release.py
|
django_countries/release.py
|
"""
This file provides zest.releaser entrypoints using when releasing new
django-countries versions.
"""
import os
from txclib.commands import cmd_pull
from txclib.utils import find_dot_tx
from txclib.log import logger
from zest.releaser.utils import ask, execute_command
from django.core.management import call_command
import django_countries
def translations(data):
if data["name"] != "django-countries":
return
if not ask("Pull translations from transifex and compile", default=True):
return
_handlers = logger.handlers
logger.handlers = []
try:
cmd_pull(argv=["-a", "--minimum-perc=60"], path_to_tx=find_dot_tx())
finally:
logger.handlers = _handlers
_cwd = os.getcwd()
os.chdir(os.path.dirname(django_countries.__file__))
try:
call_command("compilemessages")
execute_command(["git", "add", "locale"])
finally:
os.chdir(_cwd)
|
"""
This file provides zest.releaser entrypoints using when releasing new
django-countries versions.
"""
import os
import re
import shutil
from txclib.commands import cmd_pull
from txclib.utils import find_dot_tx
from txclib.log import logger
from zest.releaser.utils import ask, execute_command
from django.core.management import call_command
import django_countries
def translations(data):
if data["name"] != "django-countries":
return
if not ask("Pull translations from transifex and compile", default=True):
return
_handlers = logger.handlers
logger.handlers = []
try:
cmd_pull(argv=["-a", "--minimum-perc=60"], path_to_tx=find_dot_tx())
finally:
logger.handlers = _handlers
fix_locale_paths()
_cwd = os.getcwd()
os.chdir(os.path.dirname(django_countries.__file__))
try:
call_command("compilemessages")
execute_command(["git", "add", "locale"])
finally:
os.chdir(_cwd)
def fix_locale_paths():
lpath = os.path.join(os.path.dirname(django_countries.__file__), "locale")
for name in os.listdir(lpath):
if re.match(r"\w\w-\w{3}", name):
new_path = os.path.join(lpath, name.replace("-", "_", 1))
if os.path.exists(new_path):
shutil.rmtree(new_path)
os.rename(os.path.join(lpath, name), new_path)
|
Fix locale paths when pulling from transifex
|
Fix locale paths when pulling from transifex
|
Python
|
mit
|
SmileyChris/django-countries
|
"""
This file provides zest.releaser entrypoints using when releasing new
django-countries versions.
"""
import os
from txclib.commands import cmd_pull
from txclib.utils import find_dot_tx
from txclib.log import logger
from zest.releaser.utils import ask, execute_command
from django.core.management import call_command
import django_countries
def translations(data):
if data["name"] != "django-countries":
return
if not ask("Pull translations from transifex and compile", default=True):
return
_handlers = logger.handlers
logger.handlers = []
try:
cmd_pull(argv=["-a", "--minimum-perc=60"], path_to_tx=find_dot_tx())
finally:
logger.handlers = _handlers
_cwd = os.getcwd()
os.chdir(os.path.dirname(django_countries.__file__))
try:
call_command("compilemessages")
execute_command(["git", "add", "locale"])
finally:
os.chdir(_cwd)
Fix locale paths when pulling from transifex
|
"""
This file provides zest.releaser entrypoints using when releasing new
django-countries versions.
"""
import os
import re
import shutil
from txclib.commands import cmd_pull
from txclib.utils import find_dot_tx
from txclib.log import logger
from zest.releaser.utils import ask, execute_command
from django.core.management import call_command
import django_countries
def translations(data):
if data["name"] != "django-countries":
return
if not ask("Pull translations from transifex and compile", default=True):
return
_handlers = logger.handlers
logger.handlers = []
try:
cmd_pull(argv=["-a", "--minimum-perc=60"], path_to_tx=find_dot_tx())
finally:
logger.handlers = _handlers
fix_locale_paths()
_cwd = os.getcwd()
os.chdir(os.path.dirname(django_countries.__file__))
try:
call_command("compilemessages")
execute_command(["git", "add", "locale"])
finally:
os.chdir(_cwd)
def fix_locale_paths():
lpath = os.path.join(os.path.dirname(django_countries.__file__), "locale")
for name in os.listdir(lpath):
if re.match(r"\w\w-\w{3}", name):
new_path = os.path.join(lpath, name.replace("-", "_", 1))
if os.path.exists(new_path):
shutil.rmtree(new_path)
os.rename(os.path.join(lpath, name), new_path)
|
<commit_before>"""
This file provides zest.releaser entrypoints using when releasing new
django-countries versions.
"""
import os
from txclib.commands import cmd_pull
from txclib.utils import find_dot_tx
from txclib.log import logger
from zest.releaser.utils import ask, execute_command
from django.core.management import call_command
import django_countries
def translations(data):
if data["name"] != "django-countries":
return
if not ask("Pull translations from transifex and compile", default=True):
return
_handlers = logger.handlers
logger.handlers = []
try:
cmd_pull(argv=["-a", "--minimum-perc=60"], path_to_tx=find_dot_tx())
finally:
logger.handlers = _handlers
_cwd = os.getcwd()
os.chdir(os.path.dirname(django_countries.__file__))
try:
call_command("compilemessages")
execute_command(["git", "add", "locale"])
finally:
os.chdir(_cwd)
<commit_msg>Fix locale paths when pulling from transifex<commit_after>
|
"""
This file provides zest.releaser entrypoints using when releasing new
django-countries versions.
"""
import os
import re
import shutil
from txclib.commands import cmd_pull
from txclib.utils import find_dot_tx
from txclib.log import logger
from zest.releaser.utils import ask, execute_command
from django.core.management import call_command
import django_countries
def translations(data):
if data["name"] != "django-countries":
return
if not ask("Pull translations from transifex and compile", default=True):
return
_handlers = logger.handlers
logger.handlers = []
try:
cmd_pull(argv=["-a", "--minimum-perc=60"], path_to_tx=find_dot_tx())
finally:
logger.handlers = _handlers
fix_locale_paths()
_cwd = os.getcwd()
os.chdir(os.path.dirname(django_countries.__file__))
try:
call_command("compilemessages")
execute_command(["git", "add", "locale"])
finally:
os.chdir(_cwd)
def fix_locale_paths():
lpath = os.path.join(os.path.dirname(django_countries.__file__), "locale")
for name in os.listdir(lpath):
if re.match(r"\w\w-\w{3}", name):
new_path = os.path.join(lpath, name.replace("-", "_", 1))
if os.path.exists(new_path):
shutil.rmtree(new_path)
os.rename(os.path.join(lpath, name), new_path)
|
"""
This file provides zest.releaser entrypoints using when releasing new
django-countries versions.
"""
import os
from txclib.commands import cmd_pull
from txclib.utils import find_dot_tx
from txclib.log import logger
from zest.releaser.utils import ask, execute_command
from django.core.management import call_command
import django_countries
def translations(data):
if data["name"] != "django-countries":
return
if not ask("Pull translations from transifex and compile", default=True):
return
_handlers = logger.handlers
logger.handlers = []
try:
cmd_pull(argv=["-a", "--minimum-perc=60"], path_to_tx=find_dot_tx())
finally:
logger.handlers = _handlers
_cwd = os.getcwd()
os.chdir(os.path.dirname(django_countries.__file__))
try:
call_command("compilemessages")
execute_command(["git", "add", "locale"])
finally:
os.chdir(_cwd)
Fix locale paths when pulling from transifex"""
This file provides zest.releaser entrypoints using when releasing new
django-countries versions.
"""
import os
import re
import shutil
from txclib.commands import cmd_pull
from txclib.utils import find_dot_tx
from txclib.log import logger
from zest.releaser.utils import ask, execute_command
from django.core.management import call_command
import django_countries
def translations(data):
if data["name"] != "django-countries":
return
if not ask("Pull translations from transifex and compile", default=True):
return
_handlers = logger.handlers
logger.handlers = []
try:
cmd_pull(argv=["-a", "--minimum-perc=60"], path_to_tx=find_dot_tx())
finally:
logger.handlers = _handlers
fix_locale_paths()
_cwd = os.getcwd()
os.chdir(os.path.dirname(django_countries.__file__))
try:
call_command("compilemessages")
execute_command(["git", "add", "locale"])
finally:
os.chdir(_cwd)
def fix_locale_paths():
lpath = os.path.join(os.path.dirname(django_countries.__file__), "locale")
for name in os.listdir(lpath):
if re.match(r"\w\w-\w{3}", name):
new_path = os.path.join(lpath, name.replace("-", "_", 1))
if os.path.exists(new_path):
shutil.rmtree(new_path)
os.rename(os.path.join(lpath, name), new_path)
|
<commit_before>"""
This file provides zest.releaser entrypoints using when releasing new
django-countries versions.
"""
import os
from txclib.commands import cmd_pull
from txclib.utils import find_dot_tx
from txclib.log import logger
from zest.releaser.utils import ask, execute_command
from django.core.management import call_command
import django_countries
def translations(data):
if data["name"] != "django-countries":
return
if not ask("Pull translations from transifex and compile", default=True):
return
_handlers = logger.handlers
logger.handlers = []
try:
cmd_pull(argv=["-a", "--minimum-perc=60"], path_to_tx=find_dot_tx())
finally:
logger.handlers = _handlers
_cwd = os.getcwd()
os.chdir(os.path.dirname(django_countries.__file__))
try:
call_command("compilemessages")
execute_command(["git", "add", "locale"])
finally:
os.chdir(_cwd)
<commit_msg>Fix locale paths when pulling from transifex<commit_after>"""
This file provides zest.releaser entrypoints using when releasing new
django-countries versions.
"""
import os
import re
import shutil
from txclib.commands import cmd_pull
from txclib.utils import find_dot_tx
from txclib.log import logger
from zest.releaser.utils import ask, execute_command
from django.core.management import call_command
import django_countries
def translations(data):
if data["name"] != "django-countries":
return
if not ask("Pull translations from transifex and compile", default=True):
return
_handlers = logger.handlers
logger.handlers = []
try:
cmd_pull(argv=["-a", "--minimum-perc=60"], path_to_tx=find_dot_tx())
finally:
logger.handlers = _handlers
fix_locale_paths()
_cwd = os.getcwd()
os.chdir(os.path.dirname(django_countries.__file__))
try:
call_command("compilemessages")
execute_command(["git", "add", "locale"])
finally:
os.chdir(_cwd)
def fix_locale_paths():
lpath = os.path.join(os.path.dirname(django_countries.__file__), "locale")
for name in os.listdir(lpath):
if re.match(r"\w\w-\w{3}", name):
new_path = os.path.join(lpath, name.replace("-", "_", 1))
if os.path.exists(new_path):
shutil.rmtree(new_path)
os.rename(os.path.join(lpath, name), new_path)
|
6448691ed77be2fd74761e056eeb5f16a881fd54
|
test_settings.py
|
test_settings.py
|
from foundry.settings import *
# We cannot use ssqlite or spatialite because it cannot handle the 'distinct'
# in admin.py.
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': 'competition',
'USER': 'test',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
SOUTH_TESTS_MIGRATE = False
|
from foundry.settings import *
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': 'competition',
'USER': 'test',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
# Need this last line until django-setuptest is improved.
|
Adjust test settings to be in line with jmbo-skeleton
|
Adjust test settings to be in line with jmbo-skeleton
|
Python
|
bsd-3-clause
|
praekelt/jmbo-competition,praekelt/jmbo-competition
|
from foundry.settings import *
# We cannot use ssqlite or spatialite because it cannot handle the 'distinct'
# in admin.py.
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': 'competition',
'USER': 'test',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
SOUTH_TESTS_MIGRATE = False
Adjust test settings to be in line with jmbo-skeleton
|
from foundry.settings import *
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': 'competition',
'USER': 'test',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
# Need this last line until django-setuptest is improved.
|
<commit_before>from foundry.settings import *
# We cannot use ssqlite or spatialite because it cannot handle the 'distinct'
# in admin.py.
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': 'competition',
'USER': 'test',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
SOUTH_TESTS_MIGRATE = False
<commit_msg>Adjust test settings to be in line with jmbo-skeleton<commit_after>
|
from foundry.settings import *
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': 'competition',
'USER': 'test',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
# Need this last line until django-setuptest is improved.
|
from foundry.settings import *
# We cannot use ssqlite or spatialite because it cannot handle the 'distinct'
# in admin.py.
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': 'competition',
'USER': 'test',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
SOUTH_TESTS_MIGRATE = False
Adjust test settings to be in line with jmbo-skeletonfrom foundry.settings import *
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': 'competition',
'USER': 'test',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
# Need this last line until django-setuptest is improved.
|
<commit_before>from foundry.settings import *
# We cannot use ssqlite or spatialite because it cannot handle the 'distinct'
# in admin.py.
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': 'competition',
'USER': 'test',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
SOUTH_TESTS_MIGRATE = False
<commit_msg>Adjust test settings to be in line with jmbo-skeleton<commit_after>from foundry.settings import *
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': 'competition',
'USER': 'test',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
# Need this last line until django-setuptest is improved.
|
66a1fb19aadfcf90d5627b36755d700291cef4b4
|
py/desisurvey/test/test_rules.py
|
py/desisurvey/test/test_rules.py
|
import unittest
import numpy as np
import desisurvey.tiles
from desisurvey.test.base import Tester
from desisurvey.rules import Rules
class TestRules(Tester):
def test_rules(self):
rules = Rules()
tiles = desisurvey.tiles.get_tiles()
completed = np.ones(tiles.ntiles, bool)
rules.apply(completed)
completed[:] = False
rules.apply(completed)
gen = np.random.RandomState(123)
for i in range(10):
completed[gen.choice(tiles.ntiles, tiles.ntiles // 10, replace=False)] = True
rules.apply(completed)
def test_suite():
"""Allows testing of only this module with the command::
python setup.py test -m <modulename>
"""
return unittest.defaultTestLoader.loadTestsFromName(__name__)
|
import unittest
import numpy as np
import desisurvey.tiles
from desisurvey.test.base import Tester
from desisurvey.rules import Rules
class TestRules(Tester):
def test_rules(self):
rules = Rules('rules-layers.yaml')
tiles = desisurvey.tiles.get_tiles()
completed = np.ones(tiles.ntiles, bool)
rules.apply(completed)
completed[:] = False
rules.apply(completed)
gen = np.random.RandomState(123)
for i in range(10):
completed[gen.choice(tiles.ntiles, tiles.ntiles // 10, replace=False)] = True
rules.apply(completed)
def test_suite():
"""Allows testing of only this module with the command::
python setup.py test -m <modulename>
"""
return unittest.defaultTestLoader.loadTestsFromName(__name__)
|
Use simpler rules file for testing with tiles subset
|
Use simpler rules file for testing with tiles subset
|
Python
|
bsd-3-clause
|
desihub/desisurvey,desihub/desisurvey
|
import unittest
import numpy as np
import desisurvey.tiles
from desisurvey.test.base import Tester
from desisurvey.rules import Rules
class TestRules(Tester):
def test_rules(self):
rules = Rules()
tiles = desisurvey.tiles.get_tiles()
completed = np.ones(tiles.ntiles, bool)
rules.apply(completed)
completed[:] = False
rules.apply(completed)
gen = np.random.RandomState(123)
for i in range(10):
completed[gen.choice(tiles.ntiles, tiles.ntiles // 10, replace=False)] = True
rules.apply(completed)
def test_suite():
"""Allows testing of only this module with the command::
python setup.py test -m <modulename>
"""
return unittest.defaultTestLoader.loadTestsFromName(__name__)
Use simpler rules file for testing with tiles subset
|
import unittest
import numpy as np
import desisurvey.tiles
from desisurvey.test.base import Tester
from desisurvey.rules import Rules
class TestRules(Tester):
def test_rules(self):
rules = Rules('rules-layers.yaml')
tiles = desisurvey.tiles.get_tiles()
completed = np.ones(tiles.ntiles, bool)
rules.apply(completed)
completed[:] = False
rules.apply(completed)
gen = np.random.RandomState(123)
for i in range(10):
completed[gen.choice(tiles.ntiles, tiles.ntiles // 10, replace=False)] = True
rules.apply(completed)
def test_suite():
"""Allows testing of only this module with the command::
python setup.py test -m <modulename>
"""
return unittest.defaultTestLoader.loadTestsFromName(__name__)
|
<commit_before>import unittest
import numpy as np
import desisurvey.tiles
from desisurvey.test.base import Tester
from desisurvey.rules import Rules
class TestRules(Tester):
def test_rules(self):
rules = Rules()
tiles = desisurvey.tiles.get_tiles()
completed = np.ones(tiles.ntiles, bool)
rules.apply(completed)
completed[:] = False
rules.apply(completed)
gen = np.random.RandomState(123)
for i in range(10):
completed[gen.choice(tiles.ntiles, tiles.ntiles // 10, replace=False)] = True
rules.apply(completed)
def test_suite():
"""Allows testing of only this module with the command::
python setup.py test -m <modulename>
"""
return unittest.defaultTestLoader.loadTestsFromName(__name__)
<commit_msg>Use simpler rules file for testing with tiles subset<commit_after>
|
import unittest
import numpy as np
import desisurvey.tiles
from desisurvey.test.base import Tester
from desisurvey.rules import Rules
class TestRules(Tester):
def test_rules(self):
rules = Rules('rules-layers.yaml')
tiles = desisurvey.tiles.get_tiles()
completed = np.ones(tiles.ntiles, bool)
rules.apply(completed)
completed[:] = False
rules.apply(completed)
gen = np.random.RandomState(123)
for i in range(10):
completed[gen.choice(tiles.ntiles, tiles.ntiles // 10, replace=False)] = True
rules.apply(completed)
def test_suite():
"""Allows testing of only this module with the command::
python setup.py test -m <modulename>
"""
return unittest.defaultTestLoader.loadTestsFromName(__name__)
|
import unittest
import numpy as np
import desisurvey.tiles
from desisurvey.test.base import Tester
from desisurvey.rules import Rules
class TestRules(Tester):
def test_rules(self):
rules = Rules()
tiles = desisurvey.tiles.get_tiles()
completed = np.ones(tiles.ntiles, bool)
rules.apply(completed)
completed[:] = False
rules.apply(completed)
gen = np.random.RandomState(123)
for i in range(10):
completed[gen.choice(tiles.ntiles, tiles.ntiles // 10, replace=False)] = True
rules.apply(completed)
def test_suite():
"""Allows testing of only this module with the command::
python setup.py test -m <modulename>
"""
return unittest.defaultTestLoader.loadTestsFromName(__name__)
Use simpler rules file for testing with tiles subsetimport unittest
import numpy as np
import desisurvey.tiles
from desisurvey.test.base import Tester
from desisurvey.rules import Rules
class TestRules(Tester):
def test_rules(self):
rules = Rules('rules-layers.yaml')
tiles = desisurvey.tiles.get_tiles()
completed = np.ones(tiles.ntiles, bool)
rules.apply(completed)
completed[:] = False
rules.apply(completed)
gen = np.random.RandomState(123)
for i in range(10):
completed[gen.choice(tiles.ntiles, tiles.ntiles // 10, replace=False)] = True
rules.apply(completed)
def test_suite():
"""Allows testing of only this module with the command::
python setup.py test -m <modulename>
"""
return unittest.defaultTestLoader.loadTestsFromName(__name__)
|
<commit_before>import unittest
import numpy as np
import desisurvey.tiles
from desisurvey.test.base import Tester
from desisurvey.rules import Rules
class TestRules(Tester):
def test_rules(self):
rules = Rules()
tiles = desisurvey.tiles.get_tiles()
completed = np.ones(tiles.ntiles, bool)
rules.apply(completed)
completed[:] = False
rules.apply(completed)
gen = np.random.RandomState(123)
for i in range(10):
completed[gen.choice(tiles.ntiles, tiles.ntiles // 10, replace=False)] = True
rules.apply(completed)
def test_suite():
"""Allows testing of only this module with the command::
python setup.py test -m <modulename>
"""
return unittest.defaultTestLoader.loadTestsFromName(__name__)
<commit_msg>Use simpler rules file for testing with tiles subset<commit_after>import unittest
import numpy as np
import desisurvey.tiles
from desisurvey.test.base import Tester
from desisurvey.rules import Rules
class TestRules(Tester):
def test_rules(self):
rules = Rules('rules-layers.yaml')
tiles = desisurvey.tiles.get_tiles()
completed = np.ones(tiles.ntiles, bool)
rules.apply(completed)
completed[:] = False
rules.apply(completed)
gen = np.random.RandomState(123)
for i in range(10):
completed[gen.choice(tiles.ntiles, tiles.ntiles // 10, replace=False)] = True
rules.apply(completed)
def test_suite():
"""Allows testing of only this module with the command::
python setup.py test -m <modulename>
"""
return unittest.defaultTestLoader.loadTestsFromName(__name__)
|
6471e9422b654a9e96c4f51cfbd65e06bc9e0400
|
app/access_control.py
|
app/access_control.py
|
from functools import wraps
from flask import flash, redirect, url_for, session
from app import views
def login_required(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if 'logged_in' in session:
return f(*args, **kwargs)
else:
flash("Please login to continue.", "danger")
return redirect(url_for("login"))
return decorated_function
def for_guests(f):
@wraps(f)
def decorated_function(*args, **kwrags):
if not 'logged_in' in session:
return f(*args, **kwargs)
else:
flash("Invalid Action.", "danger")
return redirect(url_for("dashboard"))
return decorated_function
|
from functools import wraps
from flask import flash, redirect, url_for, session
from app import views
def login_required(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if 'logged_in' in session:
return f(*args, **kwargs)
else:
flash("Please login to continue.", "danger")
return redirect(url_for("login"))
return decorated_function
def for_guests(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if not 'logged_in' in session:
return f(*args, **kwargs)
else:
flash("Invalid Action.", "danger")
return redirect(url_for("dashboard"))
return decorated_function
|
Fix typo in for_guest function.
|
Fix typo in for_guest function.
`**kwrags**` should be `**kwargs**`
|
Python
|
mit
|
alchermd/flask-todo-app,alchermd/flask-todo-app
|
from functools import wraps
from flask import flash, redirect, url_for, session
from app import views
def login_required(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if 'logged_in' in session:
return f(*args, **kwargs)
else:
flash("Please login to continue.", "danger")
return redirect(url_for("login"))
return decorated_function
def for_guests(f):
@wraps(f)
def decorated_function(*args, **kwrags):
if not 'logged_in' in session:
return f(*args, **kwargs)
else:
flash("Invalid Action.", "danger")
return redirect(url_for("dashboard"))
return decorated_functionFix typo in for_guest function.
`**kwrags**` should be `**kwargs**`
|
from functools import wraps
from flask import flash, redirect, url_for, session
from app import views
def login_required(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if 'logged_in' in session:
return f(*args, **kwargs)
else:
flash("Please login to continue.", "danger")
return redirect(url_for("login"))
return decorated_function
def for_guests(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if not 'logged_in' in session:
return f(*args, **kwargs)
else:
flash("Invalid Action.", "danger")
return redirect(url_for("dashboard"))
return decorated_function
|
<commit_before>from functools import wraps
from flask import flash, redirect, url_for, session
from app import views
def login_required(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if 'logged_in' in session:
return f(*args, **kwargs)
else:
flash("Please login to continue.", "danger")
return redirect(url_for("login"))
return decorated_function
def for_guests(f):
@wraps(f)
def decorated_function(*args, **kwrags):
if not 'logged_in' in session:
return f(*args, **kwargs)
else:
flash("Invalid Action.", "danger")
return redirect(url_for("dashboard"))
return decorated_function<commit_msg>Fix typo in for_guest function.
`**kwrags**` should be `**kwargs**`<commit_after>
|
from functools import wraps
from flask import flash, redirect, url_for, session
from app import views
def login_required(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if 'logged_in' in session:
return f(*args, **kwargs)
else:
flash("Please login to continue.", "danger")
return redirect(url_for("login"))
return decorated_function
def for_guests(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if not 'logged_in' in session:
return f(*args, **kwargs)
else:
flash("Invalid Action.", "danger")
return redirect(url_for("dashboard"))
return decorated_function
|
from functools import wraps
from flask import flash, redirect, url_for, session
from app import views
def login_required(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if 'logged_in' in session:
return f(*args, **kwargs)
else:
flash("Please login to continue.", "danger")
return redirect(url_for("login"))
return decorated_function
def for_guests(f):
@wraps(f)
def decorated_function(*args, **kwrags):
if not 'logged_in' in session:
return f(*args, **kwargs)
else:
flash("Invalid Action.", "danger")
return redirect(url_for("dashboard"))
return decorated_functionFix typo in for_guest function.
`**kwrags**` should be `**kwargs**`from functools import wraps
from flask import flash, redirect, url_for, session
from app import views
def login_required(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if 'logged_in' in session:
return f(*args, **kwargs)
else:
flash("Please login to continue.", "danger")
return redirect(url_for("login"))
return decorated_function
def for_guests(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if not 'logged_in' in session:
return f(*args, **kwargs)
else:
flash("Invalid Action.", "danger")
return redirect(url_for("dashboard"))
return decorated_function
|
<commit_before>from functools import wraps
from flask import flash, redirect, url_for, session
from app import views
def login_required(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if 'logged_in' in session:
return f(*args, **kwargs)
else:
flash("Please login to continue.", "danger")
return redirect(url_for("login"))
return decorated_function
def for_guests(f):
@wraps(f)
def decorated_function(*args, **kwrags):
if not 'logged_in' in session:
return f(*args, **kwargs)
else:
flash("Invalid Action.", "danger")
return redirect(url_for("dashboard"))
return decorated_function<commit_msg>Fix typo in for_guest function.
`**kwrags**` should be `**kwargs**`<commit_after>from functools import wraps
from flask import flash, redirect, url_for, session
from app import views
def login_required(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if 'logged_in' in session:
return f(*args, **kwargs)
else:
flash("Please login to continue.", "danger")
return redirect(url_for("login"))
return decorated_function
def for_guests(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if not 'logged_in' in session:
return f(*args, **kwargs)
else:
flash("Invalid Action.", "danger")
return redirect(url_for("dashboard"))
return decorated_function
|
2dfe0b180e31026e30e8a0ba6a59310e1fbad289
|
opbeat_pyramid/__init__.py
|
opbeat_pyramid/__init__.py
|
__VERSION__ = '1.0.8'
import pyramid.tweens
def includeme(config, module_name='opbeat_pyramid'):
""" Extensibility function for using this module with any Pyramid app. """
config.scan(module_name)
config.add_tween(
'opbeat_pyramid.subscribers.opbeat_tween_factory',
over=[
pyramid.tweens.EXCVIEW,
# if pyramid_tm is in the pipeline we want to track errors caused
# by commit/abort so we try to place ourselves over it
'pyramid_tm.tm_tween_factory',
]
)
|
__VERSION__ = '1.0.8'
def includeme(config, module_name='opbeat_pyramid'):
""" Extensibility function for using this module with any Pyramid app. """
config.scan(module_name)
# TODO: Is there a decorator for this?
config.add_tween(
'opbeat_pyramid.subscribers.opbeat_tween_factory',
over=[
'pyramid.tweens.EXCVIEW',
'pyramid_tm.tm_tween_factory',
]
)
|
Use strings to allow setup.py to import this.
|
Use strings to allow setup.py to import this.
This should never import depencencies, or else setup.py will fail when
getting the version because the dependencies don't exist yet.
|
Python
|
mit
|
monokrome/opbeat_pyramid,britco/opbeat_pyramid
|
__VERSION__ = '1.0.8'
import pyramid.tweens
def includeme(config, module_name='opbeat_pyramid'):
""" Extensibility function for using this module with any Pyramid app. """
config.scan(module_name)
config.add_tween(
'opbeat_pyramid.subscribers.opbeat_tween_factory',
over=[
pyramid.tweens.EXCVIEW,
# if pyramid_tm is in the pipeline we want to track errors caused
# by commit/abort so we try to place ourselves over it
'pyramid_tm.tm_tween_factory',
]
)
Use strings to allow setup.py to import this.
This should never import depencencies, or else setup.py will fail when
getting the version because the dependencies don't exist yet.
|
__VERSION__ = '1.0.8'
def includeme(config, module_name='opbeat_pyramid'):
""" Extensibility function for using this module with any Pyramid app. """
config.scan(module_name)
# TODO: Is there a decorator for this?
config.add_tween(
'opbeat_pyramid.subscribers.opbeat_tween_factory',
over=[
'pyramid.tweens.EXCVIEW',
'pyramid_tm.tm_tween_factory',
]
)
|
<commit_before>__VERSION__ = '1.0.8'
import pyramid.tweens
def includeme(config, module_name='opbeat_pyramid'):
""" Extensibility function for using this module with any Pyramid app. """
config.scan(module_name)
config.add_tween(
'opbeat_pyramid.subscribers.opbeat_tween_factory',
over=[
pyramid.tweens.EXCVIEW,
# if pyramid_tm is in the pipeline we want to track errors caused
# by commit/abort so we try to place ourselves over it
'pyramid_tm.tm_tween_factory',
]
)
<commit_msg>Use strings to allow setup.py to import this.
This should never import depencencies, or else setup.py will fail when
getting the version because the dependencies don't exist yet.<commit_after>
|
__VERSION__ = '1.0.8'
def includeme(config, module_name='opbeat_pyramid'):
""" Extensibility function for using this module with any Pyramid app. """
config.scan(module_name)
# TODO: Is there a decorator for this?
config.add_tween(
'opbeat_pyramid.subscribers.opbeat_tween_factory',
over=[
'pyramid.tweens.EXCVIEW',
'pyramid_tm.tm_tween_factory',
]
)
|
__VERSION__ = '1.0.8'
import pyramid.tweens
def includeme(config, module_name='opbeat_pyramid'):
""" Extensibility function for using this module with any Pyramid app. """
config.scan(module_name)
config.add_tween(
'opbeat_pyramid.subscribers.opbeat_tween_factory',
over=[
pyramid.tweens.EXCVIEW,
# if pyramid_tm is in the pipeline we want to track errors caused
# by commit/abort so we try to place ourselves over it
'pyramid_tm.tm_tween_factory',
]
)
Use strings to allow setup.py to import this.
This should never import depencencies, or else setup.py will fail when
getting the version because the dependencies don't exist yet.__VERSION__ = '1.0.8'
def includeme(config, module_name='opbeat_pyramid'):
""" Extensibility function for using this module with any Pyramid app. """
config.scan(module_name)
# TODO: Is there a decorator for this?
config.add_tween(
'opbeat_pyramid.subscribers.opbeat_tween_factory',
over=[
'pyramid.tweens.EXCVIEW',
'pyramid_tm.tm_tween_factory',
]
)
|
<commit_before>__VERSION__ = '1.0.8'
import pyramid.tweens
def includeme(config, module_name='opbeat_pyramid'):
""" Extensibility function for using this module with any Pyramid app. """
config.scan(module_name)
config.add_tween(
'opbeat_pyramid.subscribers.opbeat_tween_factory',
over=[
pyramid.tweens.EXCVIEW,
# if pyramid_tm is in the pipeline we want to track errors caused
# by commit/abort so we try to place ourselves over it
'pyramid_tm.tm_tween_factory',
]
)
<commit_msg>Use strings to allow setup.py to import this.
This should never import depencencies, or else setup.py will fail when
getting the version because the dependencies don't exist yet.<commit_after>__VERSION__ = '1.0.8'
def includeme(config, module_name='opbeat_pyramid'):
""" Extensibility function for using this module with any Pyramid app. """
config.scan(module_name)
# TODO: Is there a decorator for this?
config.add_tween(
'opbeat_pyramid.subscribers.opbeat_tween_factory',
over=[
'pyramid.tweens.EXCVIEW',
'pyramid_tm.tm_tween_factory',
]
)
|
44dce5edc73199ffb0c151280cfe9e75acb73c5e
|
polling/tests/factories.py
|
polling/tests/factories.py
|
from datetime import datetime
import factory
from polling.models import State
from polling.models import CANDIDATE_NONE
class StateFactory(factory.DjangoModelFactory):
class Meta:
model = State
name = factory.Sequence(lambda n: "state-%d" % n)
updated = factory.LazyFunction(datetime.now)
abbv = factory.LazyAttribute(lambda obj: obj.name[-2:])
tipping_point_rank = factory.Sequence(lambda n: int(n))
safe_for = CANDIDATE_NONE
safe_rank = -1
leans = CANDIDATE_NONE
lean_rank = -1
|
from datetime import datetime
import us
import factory
from polling.models import State
from polling.models import CANDIDATE_NONE
class StateFactory(factory.DjangoModelFactory):
class Meta:
model = State
name = factory.Sequence(lambda n: us.STATES[n])
updated = factory.LazyFunction(datetime.now)
abbv = factory.LazyAttribute(
lambda obj: us.states.lookup(unicode(obj.name)).abbr)
tipping_point_rank = factory.Sequence(lambda n: int(n))
safe_for = CANDIDATE_NONE
safe_rank = -1
leans = CANDIDATE_NONE
lean_rank = -1
|
Use real states in StateFactory
|
Use real states in StateFactory
|
Python
|
mit
|
sbuss/voteswap,sbuss/voteswap,sbuss/voteswap,sbuss/voteswap
|
from datetime import datetime
import factory
from polling.models import State
from polling.models import CANDIDATE_NONE
class StateFactory(factory.DjangoModelFactory):
class Meta:
model = State
name = factory.Sequence(lambda n: "state-%d" % n)
updated = factory.LazyFunction(datetime.now)
abbv = factory.LazyAttribute(lambda obj: obj.name[-2:])
tipping_point_rank = factory.Sequence(lambda n: int(n))
safe_for = CANDIDATE_NONE
safe_rank = -1
leans = CANDIDATE_NONE
lean_rank = -1
Use real states in StateFactory
|
from datetime import datetime
import us
import factory
from polling.models import State
from polling.models import CANDIDATE_NONE
class StateFactory(factory.DjangoModelFactory):
class Meta:
model = State
name = factory.Sequence(lambda n: us.STATES[n])
updated = factory.LazyFunction(datetime.now)
abbv = factory.LazyAttribute(
lambda obj: us.states.lookup(unicode(obj.name)).abbr)
tipping_point_rank = factory.Sequence(lambda n: int(n))
safe_for = CANDIDATE_NONE
safe_rank = -1
leans = CANDIDATE_NONE
lean_rank = -1
|
<commit_before>from datetime import datetime
import factory
from polling.models import State
from polling.models import CANDIDATE_NONE
class StateFactory(factory.DjangoModelFactory):
class Meta:
model = State
name = factory.Sequence(lambda n: "state-%d" % n)
updated = factory.LazyFunction(datetime.now)
abbv = factory.LazyAttribute(lambda obj: obj.name[-2:])
tipping_point_rank = factory.Sequence(lambda n: int(n))
safe_for = CANDIDATE_NONE
safe_rank = -1
leans = CANDIDATE_NONE
lean_rank = -1
<commit_msg>Use real states in StateFactory<commit_after>
|
from datetime import datetime
import us
import factory
from polling.models import State
from polling.models import CANDIDATE_NONE
class StateFactory(factory.DjangoModelFactory):
class Meta:
model = State
name = factory.Sequence(lambda n: us.STATES[n])
updated = factory.LazyFunction(datetime.now)
abbv = factory.LazyAttribute(
lambda obj: us.states.lookup(unicode(obj.name)).abbr)
tipping_point_rank = factory.Sequence(lambda n: int(n))
safe_for = CANDIDATE_NONE
safe_rank = -1
leans = CANDIDATE_NONE
lean_rank = -1
|
from datetime import datetime
import factory
from polling.models import State
from polling.models import CANDIDATE_NONE
class StateFactory(factory.DjangoModelFactory):
class Meta:
model = State
name = factory.Sequence(lambda n: "state-%d" % n)
updated = factory.LazyFunction(datetime.now)
abbv = factory.LazyAttribute(lambda obj: obj.name[-2:])
tipping_point_rank = factory.Sequence(lambda n: int(n))
safe_for = CANDIDATE_NONE
safe_rank = -1
leans = CANDIDATE_NONE
lean_rank = -1
Use real states in StateFactoryfrom datetime import datetime
import us
import factory
from polling.models import State
from polling.models import CANDIDATE_NONE
class StateFactory(factory.DjangoModelFactory):
class Meta:
model = State
name = factory.Sequence(lambda n: us.STATES[n])
updated = factory.LazyFunction(datetime.now)
abbv = factory.LazyAttribute(
lambda obj: us.states.lookup(unicode(obj.name)).abbr)
tipping_point_rank = factory.Sequence(lambda n: int(n))
safe_for = CANDIDATE_NONE
safe_rank = -1
leans = CANDIDATE_NONE
lean_rank = -1
|
<commit_before>from datetime import datetime
import factory
from polling.models import State
from polling.models import CANDIDATE_NONE
class StateFactory(factory.DjangoModelFactory):
class Meta:
model = State
name = factory.Sequence(lambda n: "state-%d" % n)
updated = factory.LazyFunction(datetime.now)
abbv = factory.LazyAttribute(lambda obj: obj.name[-2:])
tipping_point_rank = factory.Sequence(lambda n: int(n))
safe_for = CANDIDATE_NONE
safe_rank = -1
leans = CANDIDATE_NONE
lean_rank = -1
<commit_msg>Use real states in StateFactory<commit_after>from datetime import datetime
import us
import factory
from polling.models import State
from polling.models import CANDIDATE_NONE
class StateFactory(factory.DjangoModelFactory):
class Meta:
model = State
name = factory.Sequence(lambda n: us.STATES[n])
updated = factory.LazyFunction(datetime.now)
abbv = factory.LazyAttribute(
lambda obj: us.states.lookup(unicode(obj.name)).abbr)
tipping_point_rank = factory.Sequence(lambda n: int(n))
safe_for = CANDIDATE_NONE
safe_rank = -1
leans = CANDIDATE_NONE
lean_rank = -1
|
57015bec555ca2a3f2e5893158d00f2dd2ca441c
|
errs.py
|
errs.py
|
import sys
class ConfigError(Exception):
def __init__(self, message):
self.message = message
sys.stdout.write("\nERROR: " + str(message) + "\n\n")
class ParseError(Exception):
def __init__(self, message):
self.message = message
sys.stdout.write("\nERROR: " + str(message) + "\n\n")
|
import sys
class GenericException(Exception):
def __init__(self, message):
self.message = message
sys.stdout.write("\nERROR: " + str(message) + "\n\n")
class ConfigError(GenericException):
pass
class ParseError(GenericException):
pass
|
Make errors a bit easier to copy
|
Make errors a bit easier to copy
|
Python
|
agpl-3.0
|
OpenTechStrategies/anvil
|
import sys
class ConfigError(Exception):
def __init__(self, message):
self.message = message
sys.stdout.write("\nERROR: " + str(message) + "\n\n")
class ParseError(Exception):
def __init__(self, message):
self.message = message
sys.stdout.write("\nERROR: " + str(message) + "\n\n")
Make errors a bit easier to copy
|
import sys
class GenericException(Exception):
def __init__(self, message):
self.message = message
sys.stdout.write("\nERROR: " + str(message) + "\n\n")
class ConfigError(GenericException):
pass
class ParseError(GenericException):
pass
|
<commit_before>import sys
class ConfigError(Exception):
def __init__(self, message):
self.message = message
sys.stdout.write("\nERROR: " + str(message) + "\n\n")
class ParseError(Exception):
def __init__(self, message):
self.message = message
sys.stdout.write("\nERROR: " + str(message) + "\n\n")
<commit_msg>Make errors a bit easier to copy<commit_after>
|
import sys
class GenericException(Exception):
def __init__(self, message):
self.message = message
sys.stdout.write("\nERROR: " + str(message) + "\n\n")
class ConfigError(GenericException):
pass
class ParseError(GenericException):
pass
|
import sys
class ConfigError(Exception):
def __init__(self, message):
self.message = message
sys.stdout.write("\nERROR: " + str(message) + "\n\n")
class ParseError(Exception):
def __init__(self, message):
self.message = message
sys.stdout.write("\nERROR: " + str(message) + "\n\n")
Make errors a bit easier to copyimport sys
class GenericException(Exception):
def __init__(self, message):
self.message = message
sys.stdout.write("\nERROR: " + str(message) + "\n\n")
class ConfigError(GenericException):
pass
class ParseError(GenericException):
pass
|
<commit_before>import sys
class ConfigError(Exception):
def __init__(self, message):
self.message = message
sys.stdout.write("\nERROR: " + str(message) + "\n\n")
class ParseError(Exception):
def __init__(self, message):
self.message = message
sys.stdout.write("\nERROR: " + str(message) + "\n\n")
<commit_msg>Make errors a bit easier to copy<commit_after>import sys
class GenericException(Exception):
def __init__(self, message):
self.message = message
sys.stdout.write("\nERROR: " + str(message) + "\n\n")
class ConfigError(GenericException):
pass
class ParseError(GenericException):
pass
|
5d8228f44c8aa6fa8d07b3a4b6cb662ccb764bd2
|
cv/urls.py
|
cv/urls.py
|
"""cv URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'^djadm/', admin.site.urls),
url(r'^$', include('main.urls')),
]
|
"""cv URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'^djadm/', admin.site.urls),
url(r'^', include('main.urls')),
]
|
Fix url pattern for main
|
Fix url pattern for main
|
Python
|
mit
|
cthtuf/django-cv,cthtuf/django-cv,cthtuf/django-cv
|
"""cv URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'^djadm/', admin.site.urls),
url(r'^$', include('main.urls')),
]
Fix url pattern for main
|
"""cv URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'^djadm/', admin.site.urls),
url(r'^', include('main.urls')),
]
|
<commit_before>"""cv URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'^djadm/', admin.site.urls),
url(r'^$', include('main.urls')),
]
<commit_msg>Fix url pattern for main<commit_after>
|
"""cv URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'^djadm/', admin.site.urls),
url(r'^', include('main.urls')),
]
|
"""cv URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'^djadm/', admin.site.urls),
url(r'^$', include('main.urls')),
]
Fix url pattern for main"""cv URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'^djadm/', admin.site.urls),
url(r'^', include('main.urls')),
]
|
<commit_before>"""cv URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'^djadm/', admin.site.urls),
url(r'^$', include('main.urls')),
]
<commit_msg>Fix url pattern for main<commit_after>"""cv URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'^djadm/', admin.site.urls),
url(r'^', include('main.urls')),
]
|
7e14b99bd26b804805afb9f52dbdaf1b4d6e5a5c
|
fsspec/implementations/tests/test_memory.py
|
fsspec/implementations/tests/test_memory.py
|
import pytest
import sys
def test_1(m):
m.touch("/somefile") # NB: is found with or without initial /
m.touch("afiles/and/anothers")
files = m.find("")
if "somefile" in files:
assert files == ["afiles/and/anothers", "somefile"]
else:
assert files == ["/somefile", "afiles/and/anothers"]
files = list(m.get_mapper(""))
if "somefile" in files:
assert files == ["afiles/and/anothers", "somefile"]
else:
assert files == ["/somefile", "afiles/and/anothers"]
@pytest.mark.xfail(
sys.version_info < (3, 6),
reason="py35 error, see https://github.com/intake/filesystem_spec/issues/148",
)
def test_ls(m):
m.touch("/dir/afile")
m.touch("/dir/dir1/bfile")
m.touch("/dir/dir1/cfile")
assert m.ls("/", False) == ["/dir/"]
assert m.ls("/dir", False) == ["/dir/afile", "/dir/dir1/"]
assert m.ls("/dir", True)[0]["type"] == "file"
assert m.ls("/dir", True)[1]["type"] == "directory"
assert len(m.ls("/dir/dir1")) == 2
|
import pytest
import sys
def test_1(m):
m.touch("/somefile") # NB: is found with or without initial /
m.touch("afiles/and/anothers")
files = m.find("")
if "somefile" in files:
assert files == ["afiles/and/anothers", "somefile"]
else:
assert files == ["/somefile", "afiles/and/anothers"]
files = sorted(m.get_mapper(""))
if "somefile" in files:
assert files == ["afiles/and/anothers", "somefile"]
else:
assert files == ["/somefile", "afiles/and/anothers"]
@pytest.mark.xfail(
sys.version_info < (3, 6),
reason="py35 error, see https://github.com/intake/filesystem_spec/issues/148",
)
def test_ls(m):
m.touch("/dir/afile")
m.touch("/dir/dir1/bfile")
m.touch("/dir/dir1/cfile")
assert m.ls("/", False) == ["/dir/"]
assert m.ls("/dir", False) == ["/dir/afile", "/dir/dir1/"]
assert m.ls("/dir", True)[0]["type"] == "file"
assert m.ls("/dir", True)[1]["type"] == "directory"
assert len(m.ls("/dir/dir1")) == 2
|
Fix ordering issue in test
|
Fix ordering issue in test
|
Python
|
bsd-3-clause
|
fsspec/filesystem_spec,fsspec/filesystem_spec,intake/filesystem_spec
|
import pytest
import sys
def test_1(m):
m.touch("/somefile") # NB: is found with or without initial /
m.touch("afiles/and/anothers")
files = m.find("")
if "somefile" in files:
assert files == ["afiles/and/anothers", "somefile"]
else:
assert files == ["/somefile", "afiles/and/anothers"]
files = list(m.get_mapper(""))
if "somefile" in files:
assert files == ["afiles/and/anothers", "somefile"]
else:
assert files == ["/somefile", "afiles/and/anothers"]
@pytest.mark.xfail(
sys.version_info < (3, 6),
reason="py35 error, see https://github.com/intake/filesystem_spec/issues/148",
)
def test_ls(m):
m.touch("/dir/afile")
m.touch("/dir/dir1/bfile")
m.touch("/dir/dir1/cfile")
assert m.ls("/", False) == ["/dir/"]
assert m.ls("/dir", False) == ["/dir/afile", "/dir/dir1/"]
assert m.ls("/dir", True)[0]["type"] == "file"
assert m.ls("/dir", True)[1]["type"] == "directory"
assert len(m.ls("/dir/dir1")) == 2
Fix ordering issue in test
|
import pytest
import sys
def test_1(m):
m.touch("/somefile") # NB: is found with or without initial /
m.touch("afiles/and/anothers")
files = m.find("")
if "somefile" in files:
assert files == ["afiles/and/anothers", "somefile"]
else:
assert files == ["/somefile", "afiles/and/anothers"]
files = sorted(m.get_mapper(""))
if "somefile" in files:
assert files == ["afiles/and/anothers", "somefile"]
else:
assert files == ["/somefile", "afiles/and/anothers"]
@pytest.mark.xfail(
sys.version_info < (3, 6),
reason="py35 error, see https://github.com/intake/filesystem_spec/issues/148",
)
def test_ls(m):
m.touch("/dir/afile")
m.touch("/dir/dir1/bfile")
m.touch("/dir/dir1/cfile")
assert m.ls("/", False) == ["/dir/"]
assert m.ls("/dir", False) == ["/dir/afile", "/dir/dir1/"]
assert m.ls("/dir", True)[0]["type"] == "file"
assert m.ls("/dir", True)[1]["type"] == "directory"
assert len(m.ls("/dir/dir1")) == 2
|
<commit_before>import pytest
import sys
def test_1(m):
m.touch("/somefile") # NB: is found with or without initial /
m.touch("afiles/and/anothers")
files = m.find("")
if "somefile" in files:
assert files == ["afiles/and/anothers", "somefile"]
else:
assert files == ["/somefile", "afiles/and/anothers"]
files = list(m.get_mapper(""))
if "somefile" in files:
assert files == ["afiles/and/anothers", "somefile"]
else:
assert files == ["/somefile", "afiles/and/anothers"]
@pytest.mark.xfail(
sys.version_info < (3, 6),
reason="py35 error, see https://github.com/intake/filesystem_spec/issues/148",
)
def test_ls(m):
m.touch("/dir/afile")
m.touch("/dir/dir1/bfile")
m.touch("/dir/dir1/cfile")
assert m.ls("/", False) == ["/dir/"]
assert m.ls("/dir", False) == ["/dir/afile", "/dir/dir1/"]
assert m.ls("/dir", True)[0]["type"] == "file"
assert m.ls("/dir", True)[1]["type"] == "directory"
assert len(m.ls("/dir/dir1")) == 2
<commit_msg>Fix ordering issue in test<commit_after>
|
import pytest
import sys
def test_1(m):
m.touch("/somefile") # NB: is found with or without initial /
m.touch("afiles/and/anothers")
files = m.find("")
if "somefile" in files:
assert files == ["afiles/and/anothers", "somefile"]
else:
assert files == ["/somefile", "afiles/and/anothers"]
files = sorted(m.get_mapper(""))
if "somefile" in files:
assert files == ["afiles/and/anothers", "somefile"]
else:
assert files == ["/somefile", "afiles/and/anothers"]
@pytest.mark.xfail(
sys.version_info < (3, 6),
reason="py35 error, see https://github.com/intake/filesystem_spec/issues/148",
)
def test_ls(m):
m.touch("/dir/afile")
m.touch("/dir/dir1/bfile")
m.touch("/dir/dir1/cfile")
assert m.ls("/", False) == ["/dir/"]
assert m.ls("/dir", False) == ["/dir/afile", "/dir/dir1/"]
assert m.ls("/dir", True)[0]["type"] == "file"
assert m.ls("/dir", True)[1]["type"] == "directory"
assert len(m.ls("/dir/dir1")) == 2
|
import pytest
import sys
def test_1(m):
m.touch("/somefile") # NB: is found with or without initial /
m.touch("afiles/and/anothers")
files = m.find("")
if "somefile" in files:
assert files == ["afiles/and/anothers", "somefile"]
else:
assert files == ["/somefile", "afiles/and/anothers"]
files = list(m.get_mapper(""))
if "somefile" in files:
assert files == ["afiles/and/anothers", "somefile"]
else:
assert files == ["/somefile", "afiles/and/anothers"]
@pytest.mark.xfail(
sys.version_info < (3, 6),
reason="py35 error, see https://github.com/intake/filesystem_spec/issues/148",
)
def test_ls(m):
m.touch("/dir/afile")
m.touch("/dir/dir1/bfile")
m.touch("/dir/dir1/cfile")
assert m.ls("/", False) == ["/dir/"]
assert m.ls("/dir", False) == ["/dir/afile", "/dir/dir1/"]
assert m.ls("/dir", True)[0]["type"] == "file"
assert m.ls("/dir", True)[1]["type"] == "directory"
assert len(m.ls("/dir/dir1")) == 2
Fix ordering issue in testimport pytest
import sys
def test_1(m):
m.touch("/somefile") # NB: is found with or without initial /
m.touch("afiles/and/anothers")
files = m.find("")
if "somefile" in files:
assert files == ["afiles/and/anothers", "somefile"]
else:
assert files == ["/somefile", "afiles/and/anothers"]
files = sorted(m.get_mapper(""))
if "somefile" in files:
assert files == ["afiles/and/anothers", "somefile"]
else:
assert files == ["/somefile", "afiles/and/anothers"]
@pytest.mark.xfail(
sys.version_info < (3, 6),
reason="py35 error, see https://github.com/intake/filesystem_spec/issues/148",
)
def test_ls(m):
m.touch("/dir/afile")
m.touch("/dir/dir1/bfile")
m.touch("/dir/dir1/cfile")
assert m.ls("/", False) == ["/dir/"]
assert m.ls("/dir", False) == ["/dir/afile", "/dir/dir1/"]
assert m.ls("/dir", True)[0]["type"] == "file"
assert m.ls("/dir", True)[1]["type"] == "directory"
assert len(m.ls("/dir/dir1")) == 2
|
<commit_before>import pytest
import sys
def test_1(m):
m.touch("/somefile") # NB: is found with or without initial /
m.touch("afiles/and/anothers")
files = m.find("")
if "somefile" in files:
assert files == ["afiles/and/anothers", "somefile"]
else:
assert files == ["/somefile", "afiles/and/anothers"]
files = list(m.get_mapper(""))
if "somefile" in files:
assert files == ["afiles/and/anothers", "somefile"]
else:
assert files == ["/somefile", "afiles/and/anothers"]
@pytest.mark.xfail(
sys.version_info < (3, 6),
reason="py35 error, see https://github.com/intake/filesystem_spec/issues/148",
)
def test_ls(m):
m.touch("/dir/afile")
m.touch("/dir/dir1/bfile")
m.touch("/dir/dir1/cfile")
assert m.ls("/", False) == ["/dir/"]
assert m.ls("/dir", False) == ["/dir/afile", "/dir/dir1/"]
assert m.ls("/dir", True)[0]["type"] == "file"
assert m.ls("/dir", True)[1]["type"] == "directory"
assert len(m.ls("/dir/dir1")) == 2
<commit_msg>Fix ordering issue in test<commit_after>import pytest
import sys
def test_1(m):
m.touch("/somefile") # NB: is found with or without initial /
m.touch("afiles/and/anothers")
files = m.find("")
if "somefile" in files:
assert files == ["afiles/and/anothers", "somefile"]
else:
assert files == ["/somefile", "afiles/and/anothers"]
files = sorted(m.get_mapper(""))
if "somefile" in files:
assert files == ["afiles/and/anothers", "somefile"]
else:
assert files == ["/somefile", "afiles/and/anothers"]
@pytest.mark.xfail(
sys.version_info < (3, 6),
reason="py35 error, see https://github.com/intake/filesystem_spec/issues/148",
)
def test_ls(m):
m.touch("/dir/afile")
m.touch("/dir/dir1/bfile")
m.touch("/dir/dir1/cfile")
assert m.ls("/", False) == ["/dir/"]
assert m.ls("/dir", False) == ["/dir/afile", "/dir/dir1/"]
assert m.ls("/dir", True)[0]["type"] == "file"
assert m.ls("/dir", True)[1]["type"] == "directory"
assert len(m.ls("/dir/dir1")) == 2
|
0f9884e884751aab6be342f68d917afafa61ea54
|
marten/__init__.py
|
marten/__init__.py
|
"""Stupid simple Python configuration management"""
from __future__ import absolute_import
import os as _os
__version__ = '0.5.1'
# Attempt to auto-load a default configuration from files in <cwd>/.marten/ based on the MARTEN_ENV env variable
# MARTEN_ENV defaults to 'default'
_marten_dir = _os.path.join(_os.getcwd(), '.marten')
_os.environ.setdefault('MARTEN_ENV', 'default')
if _os.path.isdir(_marten_dir):
from .configurations import parse_directory as _parse_directory
config = _parse_directory(_marten_dir, _os.environ['MARTEN_ENV'])
else:
from .configurations import Configuration as _Configuration
config = _Configuration({})
|
"""Stupid simple Python configuration management"""
from __future__ import absolute_import
import os as _os
__version__ = '0.5.2'
# Attempt to auto-load a default configuration from files in <cwd>/.marten/ based on the MARTEN_ENV env variable
# MARTEN_ENV defaults to 'default'
_marten_dir = _os.path.join(_os.getcwd(), '.marten')
_os.environ.setdefault('MARTEN_ENV', 'default')
try:
if _os.path.isdir(_marten_dir):
from .configurations import parse_directory as _parse_directory
config = _parse_directory(_marten_dir, _os.environ['MARTEN_ENV'])
else:
from .configurations import Configuration as _Configuration
config = _Configuration({})
except ImportError:
config = None
|
Handle ImportError at package root
|
Handle ImportError at package root
|
Python
|
mit
|
nick-allen/marten
|
"""Stupid simple Python configuration management"""
from __future__ import absolute_import
import os as _os
__version__ = '0.5.1'
# Attempt to auto-load a default configuration from files in <cwd>/.marten/ based on the MARTEN_ENV env variable
# MARTEN_ENV defaults to 'default'
_marten_dir = _os.path.join(_os.getcwd(), '.marten')
_os.environ.setdefault('MARTEN_ENV', 'default')
if _os.path.isdir(_marten_dir):
from .configurations import parse_directory as _parse_directory
config = _parse_directory(_marten_dir, _os.environ['MARTEN_ENV'])
else:
from .configurations import Configuration as _Configuration
config = _Configuration({})
Handle ImportError at package root
|
"""Stupid simple Python configuration management"""
from __future__ import absolute_import
import os as _os
__version__ = '0.5.2'
# Attempt to auto-load a default configuration from files in <cwd>/.marten/ based on the MARTEN_ENV env variable
# MARTEN_ENV defaults to 'default'
_marten_dir = _os.path.join(_os.getcwd(), '.marten')
_os.environ.setdefault('MARTEN_ENV', 'default')
try:
if _os.path.isdir(_marten_dir):
from .configurations import parse_directory as _parse_directory
config = _parse_directory(_marten_dir, _os.environ['MARTEN_ENV'])
else:
from .configurations import Configuration as _Configuration
config = _Configuration({})
except ImportError:
config = None
|
<commit_before>"""Stupid simple Python configuration management"""
from __future__ import absolute_import
import os as _os
__version__ = '0.5.1'
# Attempt to auto-load a default configuration from files in <cwd>/.marten/ based on the MARTEN_ENV env variable
# MARTEN_ENV defaults to 'default'
_marten_dir = _os.path.join(_os.getcwd(), '.marten')
_os.environ.setdefault('MARTEN_ENV', 'default')
if _os.path.isdir(_marten_dir):
from .configurations import parse_directory as _parse_directory
config = _parse_directory(_marten_dir, _os.environ['MARTEN_ENV'])
else:
from .configurations import Configuration as _Configuration
config = _Configuration({})
<commit_msg>Handle ImportError at package root<commit_after>
|
"""Stupid simple Python configuration management"""
from __future__ import absolute_import
import os as _os
__version__ = '0.5.2'
# Attempt to auto-load a default configuration from files in <cwd>/.marten/ based on the MARTEN_ENV env variable
# MARTEN_ENV defaults to 'default'
_marten_dir = _os.path.join(_os.getcwd(), '.marten')
_os.environ.setdefault('MARTEN_ENV', 'default')
try:
if _os.path.isdir(_marten_dir):
from .configurations import parse_directory as _parse_directory
config = _parse_directory(_marten_dir, _os.environ['MARTEN_ENV'])
else:
from .configurations import Configuration as _Configuration
config = _Configuration({})
except ImportError:
config = None
|
"""Stupid simple Python configuration management"""
from __future__ import absolute_import
import os as _os
__version__ = '0.5.1'
# Attempt to auto-load a default configuration from files in <cwd>/.marten/ based on the MARTEN_ENV env variable
# MARTEN_ENV defaults to 'default'
_marten_dir = _os.path.join(_os.getcwd(), '.marten')
_os.environ.setdefault('MARTEN_ENV', 'default')
if _os.path.isdir(_marten_dir):
from .configurations import parse_directory as _parse_directory
config = _parse_directory(_marten_dir, _os.environ['MARTEN_ENV'])
else:
from .configurations import Configuration as _Configuration
config = _Configuration({})
Handle ImportError at package root"""Stupid simple Python configuration management"""
from __future__ import absolute_import
import os as _os
__version__ = '0.5.2'
# Attempt to auto-load a default configuration from files in <cwd>/.marten/ based on the MARTEN_ENV env variable
# MARTEN_ENV defaults to 'default'
_marten_dir = _os.path.join(_os.getcwd(), '.marten')
_os.environ.setdefault('MARTEN_ENV', 'default')
try:
if _os.path.isdir(_marten_dir):
from .configurations import parse_directory as _parse_directory
config = _parse_directory(_marten_dir, _os.environ['MARTEN_ENV'])
else:
from .configurations import Configuration as _Configuration
config = _Configuration({})
except ImportError:
config = None
|
<commit_before>"""Stupid simple Python configuration management"""
from __future__ import absolute_import
import os as _os
__version__ = '0.5.1'
# Attempt to auto-load a default configuration from files in <cwd>/.marten/ based on the MARTEN_ENV env variable
# MARTEN_ENV defaults to 'default'
_marten_dir = _os.path.join(_os.getcwd(), '.marten')
_os.environ.setdefault('MARTEN_ENV', 'default')
if _os.path.isdir(_marten_dir):
from .configurations import parse_directory as _parse_directory
config = _parse_directory(_marten_dir, _os.environ['MARTEN_ENV'])
else:
from .configurations import Configuration as _Configuration
config = _Configuration({})
<commit_msg>Handle ImportError at package root<commit_after>"""Stupid simple Python configuration management"""
from __future__ import absolute_import
import os as _os
__version__ = '0.5.2'
# Attempt to auto-load a default configuration from files in <cwd>/.marten/ based on the MARTEN_ENV env variable
# MARTEN_ENV defaults to 'default'
_marten_dir = _os.path.join(_os.getcwd(), '.marten')
_os.environ.setdefault('MARTEN_ENV', 'default')
try:
if _os.path.isdir(_marten_dir):
from .configurations import parse_directory as _parse_directory
config = _parse_directory(_marten_dir, _os.environ['MARTEN_ENV'])
else:
from .configurations import Configuration as _Configuration
config = _Configuration({})
except ImportError:
config = None
|
25d909d95fe4a065d91eec49f4c3e0fa810233e5
|
DownloadData/download_data.py
|
DownloadData/download_data.py
|
import ocpaccess.download
ocpaccess.download.get_data(
token = "kasthuri11", zoom = 1,
x_start = 0, x_stop = 10752,
y_start = 0, y_stop = 13312,
z_start = 1, z_stop = 1850,
location = "data"
)
# You may want to run the code below (a much smaller sample) first,
# in order to be sure that your environment is set up correctly and
# the server is responding correctly.
"""
ocpaccess.download.get_data(
token = "kasthuri11", zoom = 1,
x_start = 5000, x_stop = 6000,
y_start = 5000, y_stop = 6000,
z_start = 1, z_stop = 185,
location = "sample_data"
)
"""
|
import ocpaccess.download
"""
Two sets of tokens are supplied below -- `common` and `full`.
common The most common data download.)
full LARGE FILE WARNING
For the compressed data, visit [our website].
"""
# Common
common = ['kasthuri11',
'kat11vesicles',
'kat11segments',
'kat11synapses',
'kat11mito']
# Full
full = ['kasthuri11',
'kasthuri11cc',
'kasthuri14Maine',
'kasthuri14s1colEM',
'kasthuri14s1colANNO',
'kat11mojocylinder',
'kat11redcylinder',
'kat11greencylinder',
'ac3',
'ac4',
'kat11vesicles',
'kat11segments',
'kat11synapses',
'kat11mito']
download_tokens = # put `common` or `full`
# download_tokens = common
for t in download_tokens:
ocpaccess.download.get_data(
token = t, zoom = 1,
start = 0, x_stop = 10752,
y_start = 0, y_stop = 13312,
z_start = 1, z_stop = 1850,
location = t
)
# You may want to run the code below (a much smaller sample) first,
# in order to be sure that your environment is set up correctly and
# the server is responding correctly.
"""
ocpaccess.download.get_data(
token = "kasthuri11", zoom = 1,
x_start = 5000, x_stop = 6000,
y_start = 5000, y_stop = 6000,
z_start = 1, z_stop = 185,
location = "sample_data"
)
"""
|
Add script to download by 'set'
|
Add script to download by 'set'
|
Python
|
apache-2.0
|
openconnectome/kasthuri2015,neurodata/kasthuri2015,openconnectome/kasthuri2015,openconnectome/Kasthuri-et-al-2014,neurodata/kasthuri2015,openconnectome/Kasthuri-et-al-2014,openconnectome/Kasthuri-et-al-2014,neurodata/kasthuri2015,openconnectome/kasthuri2015
|
import ocpaccess.download
ocpaccess.download.get_data(
token = "kasthuri11", zoom = 1,
x_start = 0, x_stop = 10752,
y_start = 0, y_stop = 13312,
z_start = 1, z_stop = 1850,
location = "data"
)
# You may want to run the code below (a much smaller sample) first,
# in order to be sure that your environment is set up correctly and
# the server is responding correctly.
"""
ocpaccess.download.get_data(
token = "kasthuri11", zoom = 1,
x_start = 5000, x_stop = 6000,
y_start = 5000, y_stop = 6000,
z_start = 1, z_stop = 185,
location = "sample_data"
)
"""
Add script to download by 'set'
|
import ocpaccess.download
"""
Two sets of tokens are supplied below -- `common` and `full`.
common The most common data download.)
full LARGE FILE WARNING
For the compressed data, visit [our website].
"""
# Common
common = ['kasthuri11',
'kat11vesicles',
'kat11segments',
'kat11synapses',
'kat11mito']
# Full
full = ['kasthuri11',
'kasthuri11cc',
'kasthuri14Maine',
'kasthuri14s1colEM',
'kasthuri14s1colANNO',
'kat11mojocylinder',
'kat11redcylinder',
'kat11greencylinder',
'ac3',
'ac4',
'kat11vesicles',
'kat11segments',
'kat11synapses',
'kat11mito']
download_tokens = # put `common` or `full`
# download_tokens = common
for t in download_tokens:
ocpaccess.download.get_data(
token = t, zoom = 1,
start = 0, x_stop = 10752,
y_start = 0, y_stop = 13312,
z_start = 1, z_stop = 1850,
location = t
)
# You may want to run the code below (a much smaller sample) first,
# in order to be sure that your environment is set up correctly and
# the server is responding correctly.
"""
ocpaccess.download.get_data(
token = "kasthuri11", zoom = 1,
x_start = 5000, x_stop = 6000,
y_start = 5000, y_stop = 6000,
z_start = 1, z_stop = 185,
location = "sample_data"
)
"""
|
<commit_before>import ocpaccess.download
ocpaccess.download.get_data(
token = "kasthuri11", zoom = 1,
x_start = 0, x_stop = 10752,
y_start = 0, y_stop = 13312,
z_start = 1, z_stop = 1850,
location = "data"
)
# You may want to run the code below (a much smaller sample) first,
# in order to be sure that your environment is set up correctly and
# the server is responding correctly.
"""
ocpaccess.download.get_data(
token = "kasthuri11", zoom = 1,
x_start = 5000, x_stop = 6000,
y_start = 5000, y_stop = 6000,
z_start = 1, z_stop = 185,
location = "sample_data"
)
"""
<commit_msg>Add script to download by 'set'<commit_after>
|
import ocpaccess.download
"""
Two sets of tokens are supplied below -- `common` and `full`.
common The most common data download.)
full LARGE FILE WARNING
For the compressed data, visit [our website].
"""
# Common
common = ['kasthuri11',
'kat11vesicles',
'kat11segments',
'kat11synapses',
'kat11mito']
# Full
full = ['kasthuri11',
'kasthuri11cc',
'kasthuri14Maine',
'kasthuri14s1colEM',
'kasthuri14s1colANNO',
'kat11mojocylinder',
'kat11redcylinder',
'kat11greencylinder',
'ac3',
'ac4',
'kat11vesicles',
'kat11segments',
'kat11synapses',
'kat11mito']
download_tokens = # put `common` or `full`
# download_tokens = common
for t in download_tokens:
ocpaccess.download.get_data(
token = t, zoom = 1,
start = 0, x_stop = 10752,
y_start = 0, y_stop = 13312,
z_start = 1, z_stop = 1850,
location = t
)
# You may want to run the code below (a much smaller sample) first,
# in order to be sure that your environment is set up correctly and
# the server is responding correctly.
"""
ocpaccess.download.get_data(
token = "kasthuri11", zoom = 1,
x_start = 5000, x_stop = 6000,
y_start = 5000, y_stop = 6000,
z_start = 1, z_stop = 185,
location = "sample_data"
)
"""
|
import ocpaccess.download
ocpaccess.download.get_data(
token = "kasthuri11", zoom = 1,
x_start = 0, x_stop = 10752,
y_start = 0, y_stop = 13312,
z_start = 1, z_stop = 1850,
location = "data"
)
# You may want to run the code below (a much smaller sample) first,
# in order to be sure that your environment is set up correctly and
# the server is responding correctly.
"""
ocpaccess.download.get_data(
token = "kasthuri11", zoom = 1,
x_start = 5000, x_stop = 6000,
y_start = 5000, y_stop = 6000,
z_start = 1, z_stop = 185,
location = "sample_data"
)
"""
Add script to download by 'set'import ocpaccess.download
"""
Two sets of tokens are supplied below -- `common` and `full`.
common The most common data download.)
full LARGE FILE WARNING
For the compressed data, visit [our website].
"""
# Common
common = ['kasthuri11',
'kat11vesicles',
'kat11segments',
'kat11synapses',
'kat11mito']
# Full
full = ['kasthuri11',
'kasthuri11cc',
'kasthuri14Maine',
'kasthuri14s1colEM',
'kasthuri14s1colANNO',
'kat11mojocylinder',
'kat11redcylinder',
'kat11greencylinder',
'ac3',
'ac4',
'kat11vesicles',
'kat11segments',
'kat11synapses',
'kat11mito']
download_tokens = # put `common` or `full`
# download_tokens = common
for t in download_tokens:
ocpaccess.download.get_data(
token = t, zoom = 1,
start = 0, x_stop = 10752,
y_start = 0, y_stop = 13312,
z_start = 1, z_stop = 1850,
location = t
)
# You may want to run the code below (a much smaller sample) first,
# in order to be sure that your environment is set up correctly and
# the server is responding correctly.
"""
ocpaccess.download.get_data(
token = "kasthuri11", zoom = 1,
x_start = 5000, x_stop = 6000,
y_start = 5000, y_stop = 6000,
z_start = 1, z_stop = 185,
location = "sample_data"
)
"""
|
<commit_before>import ocpaccess.download
ocpaccess.download.get_data(
token = "kasthuri11", zoom = 1,
x_start = 0, x_stop = 10752,
y_start = 0, y_stop = 13312,
z_start = 1, z_stop = 1850,
location = "data"
)
# You may want to run the code below (a much smaller sample) first,
# in order to be sure that your environment is set up correctly and
# the server is responding correctly.
"""
ocpaccess.download.get_data(
token = "kasthuri11", zoom = 1,
x_start = 5000, x_stop = 6000,
y_start = 5000, y_stop = 6000,
z_start = 1, z_stop = 185,
location = "sample_data"
)
"""
<commit_msg>Add script to download by 'set'<commit_after>import ocpaccess.download
"""
Two sets of tokens are supplied below -- `common` and `full`.
common The most common data download.)
full LARGE FILE WARNING
For the compressed data, visit [our website].
"""
# Common
common = ['kasthuri11',
'kat11vesicles',
'kat11segments',
'kat11synapses',
'kat11mito']
# Full
full = ['kasthuri11',
'kasthuri11cc',
'kasthuri14Maine',
'kasthuri14s1colEM',
'kasthuri14s1colANNO',
'kat11mojocylinder',
'kat11redcylinder',
'kat11greencylinder',
'ac3',
'ac4',
'kat11vesicles',
'kat11segments',
'kat11synapses',
'kat11mito']
download_tokens = # put `common` or `full`
# download_tokens = common
for t in download_tokens:
ocpaccess.download.get_data(
token = t, zoom = 1,
start = 0, x_stop = 10752,
y_start = 0, y_stop = 13312,
z_start = 1, z_stop = 1850,
location = t
)
# You may want to run the code below (a much smaller sample) first,
# in order to be sure that your environment is set up correctly and
# the server is responding correctly.
"""
ocpaccess.download.get_data(
token = "kasthuri11", zoom = 1,
x_start = 5000, x_stop = 6000,
y_start = 5000, y_stop = 6000,
z_start = 1, z_stop = 185,
location = "sample_data"
)
"""
|
01b25dd0df59ba7a309a25433abc09f86d5d5096
|
app/main/messaging.py
|
app/main/messaging.py
|
from flask import request, session
from . import main
import twilio.twiml
@main.route("/report_incident", methods=['GET', 'POST'])
def handle_message():
step = session.get('step', 0)
message = request.values.get('Body') # noqa
resp = twilio.twiml.Response()
if step is 0:
resp.message("Welcome to step 0")
elif step is 1:
resp.message("Welcome to step 1")
elif step is 2:
resp.message("Welcome to step 2")
else:
resp.message("Welcome to the final step")
session['step'] = step + 1
return str(resp)
|
from flask import request, session
from . import main
import twilio.twiml
@main.route("/report_incident", methods=['GET', 'POST'])
def handle_message():
message = request.values.get('Body') # noqa
resp = twilio.twiml.Response()
if message.lower().contains('report'):
step = session.get('step', 0)
if step is 0:
resp.message("Which Agency Owns the Vehicle? A)SEPTA Bus, B)SEPTA CCT, C)SEPTA, D)PWD, E)PECO, F)Streets, G)Others") # noqa
elif step is 1:
resp.message("What is the License Plate Number? (eg.MG-1234E)")
elif step is 2:
resp.message("What is the Vehicle ID? (eg.105014)")
elif step is 3:
resp.message("How many minutes has it been Idling for? (eg. 10)")
elif step is 4:
resp.message("Please describe the situation (eg. The driver is sleeping)") # noqa
else:
resp.message("Thanks!")
session['step'] = step + 1
return str(resp)
main.secret_key = '7c\xf9\r\xa7\xea\xdc\xef\x96\xf7\x8c\xaf\xdeW!\x81jp\xf7[}%\xda2' # noqa
|
Add scret key for sessioning
|
Add scret key for sessioning
|
Python
|
mit
|
hack4impact/clean-air-council,hack4impact/clean-air-council,hack4impact/clean-air-council
|
from flask import request, session
from . import main
import twilio.twiml
@main.route("/report_incident", methods=['GET', 'POST'])
def handle_message():
step = session.get('step', 0)
message = request.values.get('Body') # noqa
resp = twilio.twiml.Response()
if step is 0:
resp.message("Welcome to step 0")
elif step is 1:
resp.message("Welcome to step 1")
elif step is 2:
resp.message("Welcome to step 2")
else:
resp.message("Welcome to the final step")
session['step'] = step + 1
return str(resp)
Add scret key for sessioning
|
from flask import request, session
from . import main
import twilio.twiml
@main.route("/report_incident", methods=['GET', 'POST'])
def handle_message():
message = request.values.get('Body') # noqa
resp = twilio.twiml.Response()
if message.lower().contains('report'):
step = session.get('step', 0)
if step is 0:
resp.message("Which Agency Owns the Vehicle? A)SEPTA Bus, B)SEPTA CCT, C)SEPTA, D)PWD, E)PECO, F)Streets, G)Others") # noqa
elif step is 1:
resp.message("What is the License Plate Number? (eg.MG-1234E)")
elif step is 2:
resp.message("What is the Vehicle ID? (eg.105014)")
elif step is 3:
resp.message("How many minutes has it been Idling for? (eg. 10)")
elif step is 4:
resp.message("Please describe the situation (eg. The driver is sleeping)") # noqa
else:
resp.message("Thanks!")
session['step'] = step + 1
return str(resp)
main.secret_key = '7c\xf9\r\xa7\xea\xdc\xef\x96\xf7\x8c\xaf\xdeW!\x81jp\xf7[}%\xda2' # noqa
|
<commit_before>from flask import request, session
from . import main
import twilio.twiml
@main.route("/report_incident", methods=['GET', 'POST'])
def handle_message():
step = session.get('step', 0)
message = request.values.get('Body') # noqa
resp = twilio.twiml.Response()
if step is 0:
resp.message("Welcome to step 0")
elif step is 1:
resp.message("Welcome to step 1")
elif step is 2:
resp.message("Welcome to step 2")
else:
resp.message("Welcome to the final step")
session['step'] = step + 1
return str(resp)
<commit_msg>Add scret key for sessioning<commit_after>
|
from flask import request, session
from . import main
import twilio.twiml
@main.route("/report_incident", methods=['GET', 'POST'])
def handle_message():
message = request.values.get('Body') # noqa
resp = twilio.twiml.Response()
if message.lower().contains('report'):
step = session.get('step', 0)
if step is 0:
resp.message("Which Agency Owns the Vehicle? A)SEPTA Bus, B)SEPTA CCT, C)SEPTA, D)PWD, E)PECO, F)Streets, G)Others") # noqa
elif step is 1:
resp.message("What is the License Plate Number? (eg.MG-1234E)")
elif step is 2:
resp.message("What is the Vehicle ID? (eg.105014)")
elif step is 3:
resp.message("How many minutes has it been Idling for? (eg. 10)")
elif step is 4:
resp.message("Please describe the situation (eg. The driver is sleeping)") # noqa
else:
resp.message("Thanks!")
session['step'] = step + 1
return str(resp)
main.secret_key = '7c\xf9\r\xa7\xea\xdc\xef\x96\xf7\x8c\xaf\xdeW!\x81jp\xf7[}%\xda2' # noqa
|
from flask import request, session
from . import main
import twilio.twiml
@main.route("/report_incident", methods=['GET', 'POST'])
def handle_message():
step = session.get('step', 0)
message = request.values.get('Body') # noqa
resp = twilio.twiml.Response()
if step is 0:
resp.message("Welcome to step 0")
elif step is 1:
resp.message("Welcome to step 1")
elif step is 2:
resp.message("Welcome to step 2")
else:
resp.message("Welcome to the final step")
session['step'] = step + 1
return str(resp)
Add scret key for sessioningfrom flask import request, session
from . import main
import twilio.twiml
@main.route("/report_incident", methods=['GET', 'POST'])
def handle_message():
message = request.values.get('Body') # noqa
resp = twilio.twiml.Response()
if message.lower().contains('report'):
step = session.get('step', 0)
if step is 0:
resp.message("Which Agency Owns the Vehicle? A)SEPTA Bus, B)SEPTA CCT, C)SEPTA, D)PWD, E)PECO, F)Streets, G)Others") # noqa
elif step is 1:
resp.message("What is the License Plate Number? (eg.MG-1234E)")
elif step is 2:
resp.message("What is the Vehicle ID? (eg.105014)")
elif step is 3:
resp.message("How many minutes has it been Idling for? (eg. 10)")
elif step is 4:
resp.message("Please describe the situation (eg. The driver is sleeping)") # noqa
else:
resp.message("Thanks!")
session['step'] = step + 1
return str(resp)
main.secret_key = '7c\xf9\r\xa7\xea\xdc\xef\x96\xf7\x8c\xaf\xdeW!\x81jp\xf7[}%\xda2' # noqa
|
<commit_before>from flask import request, session
from . import main
import twilio.twiml
@main.route("/report_incident", methods=['GET', 'POST'])
def handle_message():
step = session.get('step', 0)
message = request.values.get('Body') # noqa
resp = twilio.twiml.Response()
if step is 0:
resp.message("Welcome to step 0")
elif step is 1:
resp.message("Welcome to step 1")
elif step is 2:
resp.message("Welcome to step 2")
else:
resp.message("Welcome to the final step")
session['step'] = step + 1
return str(resp)
<commit_msg>Add scret key for sessioning<commit_after>from flask import request, session
from . import main
import twilio.twiml
@main.route("/report_incident", methods=['GET', 'POST'])
def handle_message():
message = request.values.get('Body') # noqa
resp = twilio.twiml.Response()
if message.lower().contains('report'):
step = session.get('step', 0)
if step is 0:
resp.message("Which Agency Owns the Vehicle? A)SEPTA Bus, B)SEPTA CCT, C)SEPTA, D)PWD, E)PECO, F)Streets, G)Others") # noqa
elif step is 1:
resp.message("What is the License Plate Number? (eg.MG-1234E)")
elif step is 2:
resp.message("What is the Vehicle ID? (eg.105014)")
elif step is 3:
resp.message("How many minutes has it been Idling for? (eg. 10)")
elif step is 4:
resp.message("Please describe the situation (eg. The driver is sleeping)") # noqa
else:
resp.message("Thanks!")
session['step'] = step + 1
return str(resp)
main.secret_key = '7c\xf9\r\xa7\xea\xdc\xef\x96\xf7\x8c\xaf\xdeW!\x81jp\xf7[}%\xda2' # noqa
|
db5f4d9325d1f1c67160c925b83e8a4574d4cb9a
|
portal/factories/celery.py
|
portal/factories/celery.py
|
from __future__ import absolute_import
from celery import Celery
__celery = None
def create_celery(app):
global __celery
if __celery:
return __celery
celery = Celery(
app.import_name,
broker=app.config['CELERY_BROKER_URL']
)
celery.conf.update(app.config)
TaskBase = celery.Task
class ContextTask(TaskBase):
abstract = True
def __call__(self, *args, **kwargs):
with app.app_context():
return TaskBase.__call__(self, *args, **kwargs)
celery.Task = ContextTask
__celery = celery
return __celery
|
from __future__ import absolute_import
from celery import Celery
from ..extensions import db
__celery = None
def create_celery(app):
global __celery
if __celery:
return __celery
celery = Celery(
app.import_name,
broker=app.config['CELERY_BROKER_URL']
)
celery.conf.update(app.config)
TaskBase = celery.Task
class ContextTask(TaskBase):
abstract = True
def __call__(self, *args, **kwargs):
with app.app_context():
db.session = db.create_scoped_session()
try:
response = TaskBase.__call__(self, *args, **kwargs)
finally:
db.session.remove()
return response
celery.Task = ContextTask
__celery = celery
return __celery
|
Remove DB session after task
|
Remove DB session after task
|
Python
|
bsd-3-clause
|
uwcirg/true_nth_usa_portal,uwcirg/true_nth_usa_portal,uwcirg/true_nth_usa_portal,uwcirg/true_nth_usa_portal
|
from __future__ import absolute_import
from celery import Celery
__celery = None
def create_celery(app):
global __celery
if __celery:
return __celery
celery = Celery(
app.import_name,
broker=app.config['CELERY_BROKER_URL']
)
celery.conf.update(app.config)
TaskBase = celery.Task
class ContextTask(TaskBase):
abstract = True
def __call__(self, *args, **kwargs):
with app.app_context():
return TaskBase.__call__(self, *args, **kwargs)
celery.Task = ContextTask
__celery = celery
return __celery
Remove DB session after task
|
from __future__ import absolute_import
from celery import Celery
from ..extensions import db
__celery = None
def create_celery(app):
global __celery
if __celery:
return __celery
celery = Celery(
app.import_name,
broker=app.config['CELERY_BROKER_URL']
)
celery.conf.update(app.config)
TaskBase = celery.Task
class ContextTask(TaskBase):
abstract = True
def __call__(self, *args, **kwargs):
with app.app_context():
db.session = db.create_scoped_session()
try:
response = TaskBase.__call__(self, *args, **kwargs)
finally:
db.session.remove()
return response
celery.Task = ContextTask
__celery = celery
return __celery
|
<commit_before>from __future__ import absolute_import
from celery import Celery
__celery = None
def create_celery(app):
global __celery
if __celery:
return __celery
celery = Celery(
app.import_name,
broker=app.config['CELERY_BROKER_URL']
)
celery.conf.update(app.config)
TaskBase = celery.Task
class ContextTask(TaskBase):
abstract = True
def __call__(self, *args, **kwargs):
with app.app_context():
return TaskBase.__call__(self, *args, **kwargs)
celery.Task = ContextTask
__celery = celery
return __celery
<commit_msg>Remove DB session after task<commit_after>
|
from __future__ import absolute_import
from celery import Celery
from ..extensions import db
__celery = None
def create_celery(app):
global __celery
if __celery:
return __celery
celery = Celery(
app.import_name,
broker=app.config['CELERY_BROKER_URL']
)
celery.conf.update(app.config)
TaskBase = celery.Task
class ContextTask(TaskBase):
abstract = True
def __call__(self, *args, **kwargs):
with app.app_context():
db.session = db.create_scoped_session()
try:
response = TaskBase.__call__(self, *args, **kwargs)
finally:
db.session.remove()
return response
celery.Task = ContextTask
__celery = celery
return __celery
|
from __future__ import absolute_import
from celery import Celery
__celery = None
def create_celery(app):
global __celery
if __celery:
return __celery
celery = Celery(
app.import_name,
broker=app.config['CELERY_BROKER_URL']
)
celery.conf.update(app.config)
TaskBase = celery.Task
class ContextTask(TaskBase):
abstract = True
def __call__(self, *args, **kwargs):
with app.app_context():
return TaskBase.__call__(self, *args, **kwargs)
celery.Task = ContextTask
__celery = celery
return __celery
Remove DB session after taskfrom __future__ import absolute_import
from celery import Celery
from ..extensions import db
__celery = None
def create_celery(app):
global __celery
if __celery:
return __celery
celery = Celery(
app.import_name,
broker=app.config['CELERY_BROKER_URL']
)
celery.conf.update(app.config)
TaskBase = celery.Task
class ContextTask(TaskBase):
abstract = True
def __call__(self, *args, **kwargs):
with app.app_context():
db.session = db.create_scoped_session()
try:
response = TaskBase.__call__(self, *args, **kwargs)
finally:
db.session.remove()
return response
celery.Task = ContextTask
__celery = celery
return __celery
|
<commit_before>from __future__ import absolute_import
from celery import Celery
__celery = None
def create_celery(app):
global __celery
if __celery:
return __celery
celery = Celery(
app.import_name,
broker=app.config['CELERY_BROKER_URL']
)
celery.conf.update(app.config)
TaskBase = celery.Task
class ContextTask(TaskBase):
abstract = True
def __call__(self, *args, **kwargs):
with app.app_context():
return TaskBase.__call__(self, *args, **kwargs)
celery.Task = ContextTask
__celery = celery
return __celery
<commit_msg>Remove DB session after task<commit_after>from __future__ import absolute_import
from celery import Celery
from ..extensions import db
__celery = None
def create_celery(app):
global __celery
if __celery:
return __celery
celery = Celery(
app.import_name,
broker=app.config['CELERY_BROKER_URL']
)
celery.conf.update(app.config)
TaskBase = celery.Task
class ContextTask(TaskBase):
abstract = True
def __call__(self, *args, **kwargs):
with app.app_context():
db.session = db.create_scoped_session()
try:
response = TaskBase.__call__(self, *args, **kwargs)
finally:
db.session.remove()
return response
celery.Task = ContextTask
__celery = celery
return __celery
|
eb796cc0473ee1c3805e172e5f8035ef16f89c76
|
micromanager/resources/base.py
|
micromanager/resources/base.py
|
from abc import ABCMeta, abstractmethod
from googleapiclienthelpers.discovery import build_subresource
class ResourceBase(metaclass=ABCMeta):
@abstractmethod
def get(self):
pass
@abstractmethod
def update(self):
pass
class GoogleAPIResourceBase(ResourceBase, metaclass=ABCMeta):
# Names of the get and update methods. Most are the same but override in
# the Resource if necessary
get_method = "get"
update_method = "update"
def __init__(self, resource_data, **kwargs):
full_resource_path = "{}.{}".format(
self.service_name,
self.resource_path
)
self.service = build_subresource(
full_resource_path,
self.version,
**kwargs
)
self.resource_data = resource_data
def get(self):
method = getattr(self.service, self.get_method)
return method(**self._get_request_args()).execute()
def update(self, body):
method = getattr(self.service, self.update_method)
return method(**self._update_request_args(body)).execute()
|
from abc import ABCMeta, abstractmethod
from googleapiclienthelpers.discovery import build_subresource
class ResourceBase(metaclass=ABCMeta):
@abstractmethod
def get(self):
pass
@abstractmethod
def update(self):
pass
class GoogleAPIResourceBase(ResourceBase, metaclass=ABCMeta):
# Names of the get and update methods. Most are the same but override in
# the Resource if necessary
get_method = "get"
update_method = "update"
def __init__(self, resource_data, **kwargs):
full_resource_path = "{}.{}".format(
self.service_name,
self.resource_path
)
self.service = build_subresource(
full_resource_path,
self.version,
**kwargs
)
self.resource_data = resource_data
def type(self):
return ".".join(["gcp", self.service_name, self.resource_path])
def get(self):
method = getattr(self.service, self.get_method)
return method(**self._get_request_args()).execute()
def update(self, body):
method = getattr(self.service, self.update_method)
return method(**self._update_request_args(body)).execute()
|
Add a 'type()' function to resources for policy engines to lookup policies
|
Add a 'type()' function to resources for policy engines to lookup policies
|
Python
|
apache-2.0
|
forseti-security/resource-policy-evaluation-library
|
from abc import ABCMeta, abstractmethod
from googleapiclienthelpers.discovery import build_subresource
class ResourceBase(metaclass=ABCMeta):
@abstractmethod
def get(self):
pass
@abstractmethod
def update(self):
pass
class GoogleAPIResourceBase(ResourceBase, metaclass=ABCMeta):
# Names of the get and update methods. Most are the same but override in
# the Resource if necessary
get_method = "get"
update_method = "update"
def __init__(self, resource_data, **kwargs):
full_resource_path = "{}.{}".format(
self.service_name,
self.resource_path
)
self.service = build_subresource(
full_resource_path,
self.version,
**kwargs
)
self.resource_data = resource_data
def get(self):
method = getattr(self.service, self.get_method)
return method(**self._get_request_args()).execute()
def update(self, body):
method = getattr(self.service, self.update_method)
return method(**self._update_request_args(body)).execute()
Add a 'type()' function to resources for policy engines to lookup policies
|
from abc import ABCMeta, abstractmethod
from googleapiclienthelpers.discovery import build_subresource
class ResourceBase(metaclass=ABCMeta):
@abstractmethod
def get(self):
pass
@abstractmethod
def update(self):
pass
class GoogleAPIResourceBase(ResourceBase, metaclass=ABCMeta):
# Names of the get and update methods. Most are the same but override in
# the Resource if necessary
get_method = "get"
update_method = "update"
def __init__(self, resource_data, **kwargs):
full_resource_path = "{}.{}".format(
self.service_name,
self.resource_path
)
self.service = build_subresource(
full_resource_path,
self.version,
**kwargs
)
self.resource_data = resource_data
def type(self):
return ".".join(["gcp", self.service_name, self.resource_path])
def get(self):
method = getattr(self.service, self.get_method)
return method(**self._get_request_args()).execute()
def update(self, body):
method = getattr(self.service, self.update_method)
return method(**self._update_request_args(body)).execute()
|
<commit_before>from abc import ABCMeta, abstractmethod
from googleapiclienthelpers.discovery import build_subresource
class ResourceBase(metaclass=ABCMeta):
@abstractmethod
def get(self):
pass
@abstractmethod
def update(self):
pass
class GoogleAPIResourceBase(ResourceBase, metaclass=ABCMeta):
# Names of the get and update methods. Most are the same but override in
# the Resource if necessary
get_method = "get"
update_method = "update"
def __init__(self, resource_data, **kwargs):
full_resource_path = "{}.{}".format(
self.service_name,
self.resource_path
)
self.service = build_subresource(
full_resource_path,
self.version,
**kwargs
)
self.resource_data = resource_data
def get(self):
method = getattr(self.service, self.get_method)
return method(**self._get_request_args()).execute()
def update(self, body):
method = getattr(self.service, self.update_method)
return method(**self._update_request_args(body)).execute()
<commit_msg>Add a 'type()' function to resources for policy engines to lookup policies<commit_after>
|
from abc import ABCMeta, abstractmethod
from googleapiclienthelpers.discovery import build_subresource
class ResourceBase(metaclass=ABCMeta):
@abstractmethod
def get(self):
pass
@abstractmethod
def update(self):
pass
class GoogleAPIResourceBase(ResourceBase, metaclass=ABCMeta):
# Names of the get and update methods. Most are the same but override in
# the Resource if necessary
get_method = "get"
update_method = "update"
def __init__(self, resource_data, **kwargs):
full_resource_path = "{}.{}".format(
self.service_name,
self.resource_path
)
self.service = build_subresource(
full_resource_path,
self.version,
**kwargs
)
self.resource_data = resource_data
def type(self):
return ".".join(["gcp", self.service_name, self.resource_path])
def get(self):
method = getattr(self.service, self.get_method)
return method(**self._get_request_args()).execute()
def update(self, body):
method = getattr(self.service, self.update_method)
return method(**self._update_request_args(body)).execute()
|
from abc import ABCMeta, abstractmethod
from googleapiclienthelpers.discovery import build_subresource
class ResourceBase(metaclass=ABCMeta):
@abstractmethod
def get(self):
pass
@abstractmethod
def update(self):
pass
class GoogleAPIResourceBase(ResourceBase, metaclass=ABCMeta):
# Names of the get and update methods. Most are the same but override in
# the Resource if necessary
get_method = "get"
update_method = "update"
def __init__(self, resource_data, **kwargs):
full_resource_path = "{}.{}".format(
self.service_name,
self.resource_path
)
self.service = build_subresource(
full_resource_path,
self.version,
**kwargs
)
self.resource_data = resource_data
def get(self):
method = getattr(self.service, self.get_method)
return method(**self._get_request_args()).execute()
def update(self, body):
method = getattr(self.service, self.update_method)
return method(**self._update_request_args(body)).execute()
Add a 'type()' function to resources for policy engines to lookup policiesfrom abc import ABCMeta, abstractmethod
from googleapiclienthelpers.discovery import build_subresource
class ResourceBase(metaclass=ABCMeta):
@abstractmethod
def get(self):
pass
@abstractmethod
def update(self):
pass
class GoogleAPIResourceBase(ResourceBase, metaclass=ABCMeta):
# Names of the get and update methods. Most are the same but override in
# the Resource if necessary
get_method = "get"
update_method = "update"
def __init__(self, resource_data, **kwargs):
full_resource_path = "{}.{}".format(
self.service_name,
self.resource_path
)
self.service = build_subresource(
full_resource_path,
self.version,
**kwargs
)
self.resource_data = resource_data
def type(self):
return ".".join(["gcp", self.service_name, self.resource_path])
def get(self):
method = getattr(self.service, self.get_method)
return method(**self._get_request_args()).execute()
def update(self, body):
method = getattr(self.service, self.update_method)
return method(**self._update_request_args(body)).execute()
|
<commit_before>from abc import ABCMeta, abstractmethod
from googleapiclienthelpers.discovery import build_subresource
class ResourceBase(metaclass=ABCMeta):
@abstractmethod
def get(self):
pass
@abstractmethod
def update(self):
pass
class GoogleAPIResourceBase(ResourceBase, metaclass=ABCMeta):
# Names of the get and update methods. Most are the same but override in
# the Resource if necessary
get_method = "get"
update_method = "update"
def __init__(self, resource_data, **kwargs):
full_resource_path = "{}.{}".format(
self.service_name,
self.resource_path
)
self.service = build_subresource(
full_resource_path,
self.version,
**kwargs
)
self.resource_data = resource_data
def get(self):
method = getattr(self.service, self.get_method)
return method(**self._get_request_args()).execute()
def update(self, body):
method = getattr(self.service, self.update_method)
return method(**self._update_request_args(body)).execute()
<commit_msg>Add a 'type()' function to resources for policy engines to lookup policies<commit_after>from abc import ABCMeta, abstractmethod
from googleapiclienthelpers.discovery import build_subresource
class ResourceBase(metaclass=ABCMeta):
@abstractmethod
def get(self):
pass
@abstractmethod
def update(self):
pass
class GoogleAPIResourceBase(ResourceBase, metaclass=ABCMeta):
# Names of the get and update methods. Most are the same but override in
# the Resource if necessary
get_method = "get"
update_method = "update"
def __init__(self, resource_data, **kwargs):
full_resource_path = "{}.{}".format(
self.service_name,
self.resource_path
)
self.service = build_subresource(
full_resource_path,
self.version,
**kwargs
)
self.resource_data = resource_data
def type(self):
return ".".join(["gcp", self.service_name, self.resource_path])
def get(self):
method = getattr(self.service, self.get_method)
return method(**self._get_request_args()).execute()
def update(self, body):
method = getattr(self.service, self.update_method)
return method(**self._update_request_args(body)).execute()
|
4de06bbbda30adb1b91ba0e35986761de4a0ed46
|
cluster/management/commands/update_jobs.py
|
cluster/management/commands/update_jobs.py
|
import logging
from django.core.management.base import BaseCommand
from django.contrib.auth import get_user_model
from cluster.utils import get_jobs
logger = logging.getLogger(__name__)
class Command(BaseCommand):
args = ''
help = 'Update Job data from clusters'
def handle(self, *args, **options):
logger.debug("Updating all the jobs")
for user in get_user_model().objects.all():
creds = get_user_model().credentials.all()
get_jobs(creds)
|
import logging
from django.core.management.base import BaseCommand
from django.contrib.auth import get_user_model
from cluster.utils import get_jobs
logger = logging.getLogger(__name__)
class Command(BaseCommand):
args = ''
help = 'Update Job data from clusters'
def handle(self, *args, **options):
logger.debug("Updating all the jobs")
for user in get_user_model().objects.all():
creds = user.credentials.all()
get_jobs(creds)
|
Fix typo where get_user_model() was used rather than the user
|
Fix typo where get_user_model() was used rather than the user
|
Python
|
mit
|
crcollins/chemtools-webapp,crcollins/chemtools-webapp,crcollins/chemtools-webapp,crcollins/chemtools-webapp,crcollins/chemtools-webapp
|
import logging
from django.core.management.base import BaseCommand
from django.contrib.auth import get_user_model
from cluster.utils import get_jobs
logger = logging.getLogger(__name__)
class Command(BaseCommand):
args = ''
help = 'Update Job data from clusters'
def handle(self, *args, **options):
logger.debug("Updating all the jobs")
for user in get_user_model().objects.all():
creds = get_user_model().credentials.all()
get_jobs(creds)
Fix typo where get_user_model() was used rather than the user
|
import logging
from django.core.management.base import BaseCommand
from django.contrib.auth import get_user_model
from cluster.utils import get_jobs
logger = logging.getLogger(__name__)
class Command(BaseCommand):
args = ''
help = 'Update Job data from clusters'
def handle(self, *args, **options):
logger.debug("Updating all the jobs")
for user in get_user_model().objects.all():
creds = user.credentials.all()
get_jobs(creds)
|
<commit_before>import logging
from django.core.management.base import BaseCommand
from django.contrib.auth import get_user_model
from cluster.utils import get_jobs
logger = logging.getLogger(__name__)
class Command(BaseCommand):
args = ''
help = 'Update Job data from clusters'
def handle(self, *args, **options):
logger.debug("Updating all the jobs")
for user in get_user_model().objects.all():
creds = get_user_model().credentials.all()
get_jobs(creds)
<commit_msg>Fix typo where get_user_model() was used rather than the user<commit_after>
|
import logging
from django.core.management.base import BaseCommand
from django.contrib.auth import get_user_model
from cluster.utils import get_jobs
logger = logging.getLogger(__name__)
class Command(BaseCommand):
args = ''
help = 'Update Job data from clusters'
def handle(self, *args, **options):
logger.debug("Updating all the jobs")
for user in get_user_model().objects.all():
creds = user.credentials.all()
get_jobs(creds)
|
import logging
from django.core.management.base import BaseCommand
from django.contrib.auth import get_user_model
from cluster.utils import get_jobs
logger = logging.getLogger(__name__)
class Command(BaseCommand):
args = ''
help = 'Update Job data from clusters'
def handle(self, *args, **options):
logger.debug("Updating all the jobs")
for user in get_user_model().objects.all():
creds = get_user_model().credentials.all()
get_jobs(creds)
Fix typo where get_user_model() was used rather than the userimport logging
from django.core.management.base import BaseCommand
from django.contrib.auth import get_user_model
from cluster.utils import get_jobs
logger = logging.getLogger(__name__)
class Command(BaseCommand):
args = ''
help = 'Update Job data from clusters'
def handle(self, *args, **options):
logger.debug("Updating all the jobs")
for user in get_user_model().objects.all():
creds = user.credentials.all()
get_jobs(creds)
|
<commit_before>import logging
from django.core.management.base import BaseCommand
from django.contrib.auth import get_user_model
from cluster.utils import get_jobs
logger = logging.getLogger(__name__)
class Command(BaseCommand):
args = ''
help = 'Update Job data from clusters'
def handle(self, *args, **options):
logger.debug("Updating all the jobs")
for user in get_user_model().objects.all():
creds = get_user_model().credentials.all()
get_jobs(creds)
<commit_msg>Fix typo where get_user_model() was used rather than the user<commit_after>import logging
from django.core.management.base import BaseCommand
from django.contrib.auth import get_user_model
from cluster.utils import get_jobs
logger = logging.getLogger(__name__)
class Command(BaseCommand):
args = ''
help = 'Update Job data from clusters'
def handle(self, *args, **options):
logger.debug("Updating all the jobs")
for user in get_user_model().objects.all():
creds = user.credentials.all()
get_jobs(creds)
|
df3ccf1b848ab3829f16ff0486c677763e5b383b
|
lc034_find_first_and_last_position_of_element_in_sorted_array.py
|
lc034_find_first_and_last_position_of_element_in_sorted_array.py
|
"""Leetcode 34. Find First and Last Position of Element in Sorted Array
Medium
Given an array of integers nums sorted in ascending order,
find the starting and ending position of a given target value.
Your algorithm's runtime complexity must be in the order of O(log n).
If the target is not found in the array, return [-1, -1].
Example 1:
Input: nums = [5,7,7,8,8,10], target = 8
Output: [3,4]
Example 2:
Input: nums = [5,7,7,8,8,10], target = 6
Output: [-1,-1]
"""
class Solution(object):
def searchRange(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: List[int]
"""
pass
def main():
pass
if __name__ == '__main__':
main()
|
"""Leetcode 34. Find First and Last Position of Element in Sorted Array
Medium
Given an array of integers nums sorted in ascending order,
find the starting and ending position of a given target value.
Your algorithm's runtime complexity must be in the order of O(log n).
If the target is not found in the array, return [-1, -1].
Example 1:
Input: nums = [5,7,7,8,8,10], target = 8
Output: [3,4]
Example 2:
Input: nums = [5,7,7,8,8,10], target = 6
Output: [-1,-1]
"""
class Solution(object):
def searchRange(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: List[int]
"""
# Apply to 2 binary searches to update result [-1, -1].
res = [-1, -1]
if not nums:
return res
# Apply the 1st binary search to search target's left position.
first, last = 0, len(nums) - 1
while first < last:
mid = first + (last - first) // 2
if nums[mid] < target:
first = mid + 1
else:
last = mid
if nums[first] != target:
return res
else:
res[0] = first
# Apply the 2nd binary search to search target's right position.
last = len(nums) - 1
while first < last:
# Make mid biased to the right.
mid = first + (last - first) // 2 + 1
if nums[mid] > target:
last = mid - 1
else:
first = mid
res[1] = last
return res
def main():
# Ans: [3,4]
nums = [5,7,7,8,8,10]
target = 8
print Solution().searchRange(nums, target)
# Ans: [-1,-1]
nums = [5,7,7,8,8,10]
target = 6
print Solution().searchRange(nums, target)
if __name__ == '__main__':
main()
|
Complete sol by 2 binary searches
|
Complete sol by 2 binary searches
|
Python
|
bsd-2-clause
|
bowen0701/algorithms_data_structures
|
"""Leetcode 34. Find First and Last Position of Element in Sorted Array
Medium
Given an array of integers nums sorted in ascending order,
find the starting and ending position of a given target value.
Your algorithm's runtime complexity must be in the order of O(log n).
If the target is not found in the array, return [-1, -1].
Example 1:
Input: nums = [5,7,7,8,8,10], target = 8
Output: [3,4]
Example 2:
Input: nums = [5,7,7,8,8,10], target = 6
Output: [-1,-1]
"""
class Solution(object):
def searchRange(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: List[int]
"""
pass
def main():
pass
if __name__ == '__main__':
main()
Complete sol by 2 binary searches
|
"""Leetcode 34. Find First and Last Position of Element in Sorted Array
Medium
Given an array of integers nums sorted in ascending order,
find the starting and ending position of a given target value.
Your algorithm's runtime complexity must be in the order of O(log n).
If the target is not found in the array, return [-1, -1].
Example 1:
Input: nums = [5,7,7,8,8,10], target = 8
Output: [3,4]
Example 2:
Input: nums = [5,7,7,8,8,10], target = 6
Output: [-1,-1]
"""
class Solution(object):
def searchRange(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: List[int]
"""
# Apply to 2 binary searches to update result [-1, -1].
res = [-1, -1]
if not nums:
return res
# Apply the 1st binary search to search target's left position.
first, last = 0, len(nums) - 1
while first < last:
mid = first + (last - first) // 2
if nums[mid] < target:
first = mid + 1
else:
last = mid
if nums[first] != target:
return res
else:
res[0] = first
# Apply the 2nd binary search to search target's right position.
last = len(nums) - 1
while first < last:
# Make mid biased to the right.
mid = first + (last - first) // 2 + 1
if nums[mid] > target:
last = mid - 1
else:
first = mid
res[1] = last
return res
def main():
# Ans: [3,4]
nums = [5,7,7,8,8,10]
target = 8
print Solution().searchRange(nums, target)
# Ans: [-1,-1]
nums = [5,7,7,8,8,10]
target = 6
print Solution().searchRange(nums, target)
if __name__ == '__main__':
main()
|
<commit_before>"""Leetcode 34. Find First and Last Position of Element in Sorted Array
Medium
Given an array of integers nums sorted in ascending order,
find the starting and ending position of a given target value.
Your algorithm's runtime complexity must be in the order of O(log n).
If the target is not found in the array, return [-1, -1].
Example 1:
Input: nums = [5,7,7,8,8,10], target = 8
Output: [3,4]
Example 2:
Input: nums = [5,7,7,8,8,10], target = 6
Output: [-1,-1]
"""
class Solution(object):
def searchRange(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: List[int]
"""
pass
def main():
pass
if __name__ == '__main__':
main()
<commit_msg>Complete sol by 2 binary searches<commit_after>
|
"""Leetcode 34. Find First and Last Position of Element in Sorted Array
Medium
Given an array of integers nums sorted in ascending order,
find the starting and ending position of a given target value.
Your algorithm's runtime complexity must be in the order of O(log n).
If the target is not found in the array, return [-1, -1].
Example 1:
Input: nums = [5,7,7,8,8,10], target = 8
Output: [3,4]
Example 2:
Input: nums = [5,7,7,8,8,10], target = 6
Output: [-1,-1]
"""
class Solution(object):
def searchRange(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: List[int]
"""
# Apply to 2 binary searches to update result [-1, -1].
res = [-1, -1]
if not nums:
return res
# Apply the 1st binary search to search target's left position.
first, last = 0, len(nums) - 1
while first < last:
mid = first + (last - first) // 2
if nums[mid] < target:
first = mid + 1
else:
last = mid
if nums[first] != target:
return res
else:
res[0] = first
# Apply the 2nd binary search to search target's right position.
last = len(nums) - 1
while first < last:
# Make mid biased to the right.
mid = first + (last - first) // 2 + 1
if nums[mid] > target:
last = mid - 1
else:
first = mid
res[1] = last
return res
def main():
# Ans: [3,4]
nums = [5,7,7,8,8,10]
target = 8
print Solution().searchRange(nums, target)
# Ans: [-1,-1]
nums = [5,7,7,8,8,10]
target = 6
print Solution().searchRange(nums, target)
if __name__ == '__main__':
main()
|
"""Leetcode 34. Find First and Last Position of Element in Sorted Array
Medium
Given an array of integers nums sorted in ascending order,
find the starting and ending position of a given target value.
Your algorithm's runtime complexity must be in the order of O(log n).
If the target is not found in the array, return [-1, -1].
Example 1:
Input: nums = [5,7,7,8,8,10], target = 8
Output: [3,4]
Example 2:
Input: nums = [5,7,7,8,8,10], target = 6
Output: [-1,-1]
"""
class Solution(object):
def searchRange(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: List[int]
"""
pass
def main():
pass
if __name__ == '__main__':
main()
Complete sol by 2 binary searches"""Leetcode 34. Find First and Last Position of Element in Sorted Array
Medium
Given an array of integers nums sorted in ascending order,
find the starting and ending position of a given target value.
Your algorithm's runtime complexity must be in the order of O(log n).
If the target is not found in the array, return [-1, -1].
Example 1:
Input: nums = [5,7,7,8,8,10], target = 8
Output: [3,4]
Example 2:
Input: nums = [5,7,7,8,8,10], target = 6
Output: [-1,-1]
"""
class Solution(object):
def searchRange(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: List[int]
"""
# Apply to 2 binary searches to update result [-1, -1].
res = [-1, -1]
if not nums:
return res
# Apply the 1st binary search to search target's left position.
first, last = 0, len(nums) - 1
while first < last:
mid = first + (last - first) // 2
if nums[mid] < target:
first = mid + 1
else:
last = mid
if nums[first] != target:
return res
else:
res[0] = first
# Apply the 2nd binary search to search target's right position.
last = len(nums) - 1
while first < last:
# Make mid biased to the right.
mid = first + (last - first) // 2 + 1
if nums[mid] > target:
last = mid - 1
else:
first = mid
res[1] = last
return res
def main():
# Ans: [3,4]
nums = [5,7,7,8,8,10]
target = 8
print Solution().searchRange(nums, target)
# Ans: [-1,-1]
nums = [5,7,7,8,8,10]
target = 6
print Solution().searchRange(nums, target)
if __name__ == '__main__':
main()
|
<commit_before>"""Leetcode 34. Find First and Last Position of Element in Sorted Array
Medium
Given an array of integers nums sorted in ascending order,
find the starting and ending position of a given target value.
Your algorithm's runtime complexity must be in the order of O(log n).
If the target is not found in the array, return [-1, -1].
Example 1:
Input: nums = [5,7,7,8,8,10], target = 8
Output: [3,4]
Example 2:
Input: nums = [5,7,7,8,8,10], target = 6
Output: [-1,-1]
"""
class Solution(object):
def searchRange(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: List[int]
"""
pass
def main():
pass
if __name__ == '__main__':
main()
<commit_msg>Complete sol by 2 binary searches<commit_after>"""Leetcode 34. Find First and Last Position of Element in Sorted Array
Medium
Given an array of integers nums sorted in ascending order,
find the starting and ending position of a given target value.
Your algorithm's runtime complexity must be in the order of O(log n).
If the target is not found in the array, return [-1, -1].
Example 1:
Input: nums = [5,7,7,8,8,10], target = 8
Output: [3,4]
Example 2:
Input: nums = [5,7,7,8,8,10], target = 6
Output: [-1,-1]
"""
class Solution(object):
def searchRange(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: List[int]
"""
# Apply to 2 binary searches to update result [-1, -1].
res = [-1, -1]
if not nums:
return res
# Apply the 1st binary search to search target's left position.
first, last = 0, len(nums) - 1
while first < last:
mid = first + (last - first) // 2
if nums[mid] < target:
first = mid + 1
else:
last = mid
if nums[first] != target:
return res
else:
res[0] = first
# Apply the 2nd binary search to search target's right position.
last = len(nums) - 1
while first < last:
# Make mid biased to the right.
mid = first + (last - first) // 2 + 1
if nums[mid] > target:
last = mid - 1
else:
first = mid
res[1] = last
return res
def main():
# Ans: [3,4]
nums = [5,7,7,8,8,10]
target = 8
print Solution().searchRange(nums, target)
# Ans: [-1,-1]
nums = [5,7,7,8,8,10]
target = 6
print Solution().searchRange(nums, target)
if __name__ == '__main__':
main()
|
b98aff1d853b5a92f25dad55571d81623f524d95
|
cozyfuse/interface/app_modified.py
|
cozyfuse/interface/app_modified.py
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
import wx
import gettext
from CozyFrame import CozyFrame, TaskBarIcon
if __name__ == "__main__":
gettext.install("app") # replace with the appropriate catalog name
app = wx.PySimpleApp(0)
wx.InitAllImageHandlers()
cozy_frame = CozyFrame(None, wx.ID_ANY, "")
app.SetTopWindow(cozy_frame)
cozy_frame.Show()
taskBarIcon()
app.MainLoop()
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
import wx
import gettext
from CozyFrame import CozyFrame, TaskBarIcon
if __name__ == "__main__":
gettext.install("app") # replace with the appropriate catalog name
app = wx.PySimpleApp(0)
wx.InitAllImageHandlers()
cozy_frame = CozyFrame(None, wx.ID_ANY, "")
app.SetTopWindow(cozy_frame)
cozy_frame.Show()
TaskBarIcon()
app.MainLoop()
|
Integrate tray icon in the app
|
[ref] Integrate tray icon in the app
|
Python
|
bsd-3-clause
|
cozy-labs/cozy-fuse
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
import wx
import gettext
from CozyFrame import CozyFrame, TaskBarIcon
if __name__ == "__main__":
gettext.install("app") # replace with the appropriate catalog name
app = wx.PySimpleApp(0)
wx.InitAllImageHandlers()
cozy_frame = CozyFrame(None, wx.ID_ANY, "")
app.SetTopWindow(cozy_frame)
cozy_frame.Show()
taskBarIcon()
app.MainLoop()
[ref] Integrate tray icon in the app
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
import wx
import gettext
from CozyFrame import CozyFrame, TaskBarIcon
if __name__ == "__main__":
gettext.install("app") # replace with the appropriate catalog name
app = wx.PySimpleApp(0)
wx.InitAllImageHandlers()
cozy_frame = CozyFrame(None, wx.ID_ANY, "")
app.SetTopWindow(cozy_frame)
cozy_frame.Show()
TaskBarIcon()
app.MainLoop()
|
<commit_before>#!/usr/bin/env python
# -*- coding: UTF-8 -*-
import wx
import gettext
from CozyFrame import CozyFrame, TaskBarIcon
if __name__ == "__main__":
gettext.install("app") # replace with the appropriate catalog name
app = wx.PySimpleApp(0)
wx.InitAllImageHandlers()
cozy_frame = CozyFrame(None, wx.ID_ANY, "")
app.SetTopWindow(cozy_frame)
cozy_frame.Show()
taskBarIcon()
app.MainLoop()
<commit_msg>[ref] Integrate tray icon in the app<commit_after>
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
import wx
import gettext
from CozyFrame import CozyFrame, TaskBarIcon
if __name__ == "__main__":
gettext.install("app") # replace with the appropriate catalog name
app = wx.PySimpleApp(0)
wx.InitAllImageHandlers()
cozy_frame = CozyFrame(None, wx.ID_ANY, "")
app.SetTopWindow(cozy_frame)
cozy_frame.Show()
TaskBarIcon()
app.MainLoop()
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
import wx
import gettext
from CozyFrame import CozyFrame, TaskBarIcon
if __name__ == "__main__":
gettext.install("app") # replace with the appropriate catalog name
app = wx.PySimpleApp(0)
wx.InitAllImageHandlers()
cozy_frame = CozyFrame(None, wx.ID_ANY, "")
app.SetTopWindow(cozy_frame)
cozy_frame.Show()
taskBarIcon()
app.MainLoop()
[ref] Integrate tray icon in the app#!/usr/bin/env python
# -*- coding: UTF-8 -*-
import wx
import gettext
from CozyFrame import CozyFrame, TaskBarIcon
if __name__ == "__main__":
gettext.install("app") # replace with the appropriate catalog name
app = wx.PySimpleApp(0)
wx.InitAllImageHandlers()
cozy_frame = CozyFrame(None, wx.ID_ANY, "")
app.SetTopWindow(cozy_frame)
cozy_frame.Show()
TaskBarIcon()
app.MainLoop()
|
<commit_before>#!/usr/bin/env python
# -*- coding: UTF-8 -*-
import wx
import gettext
from CozyFrame import CozyFrame, TaskBarIcon
if __name__ == "__main__":
gettext.install("app") # replace with the appropriate catalog name
app = wx.PySimpleApp(0)
wx.InitAllImageHandlers()
cozy_frame = CozyFrame(None, wx.ID_ANY, "")
app.SetTopWindow(cozy_frame)
cozy_frame.Show()
taskBarIcon()
app.MainLoop()
<commit_msg>[ref] Integrate tray icon in the app<commit_after>#!/usr/bin/env python
# -*- coding: UTF-8 -*-
import wx
import gettext
from CozyFrame import CozyFrame, TaskBarIcon
if __name__ == "__main__":
gettext.install("app") # replace with the appropriate catalog name
app = wx.PySimpleApp(0)
wx.InitAllImageHandlers()
cozy_frame = CozyFrame(None, wx.ID_ANY, "")
app.SetTopWindow(cozy_frame)
cozy_frame.Show()
TaskBarIcon()
app.MainLoop()
|
8c9f1c7722f1e35c440ba3540153e2cfe8ad9187
|
scikits/learn/tests/test_base.py
|
scikits/learn/tests/test_base.py
|
from nose.tools import assert_true, assert_false, assert_equal
from ..base import BaseEstimator
class MyEstimator(BaseEstimator):
def __init__(self, l1=0):
self.l1 = l1
def test_renew():
"""Tests that BaseEstimator._new() creates a correct deep copy.
We create an estimator, make a copy of its original state
(which, in this case, is the current state of the setimator),
and check that the obtained copy is a correct deep copy.
"""
from scikits.learn.feature_selection import SelectFpr, f_classif
selector = SelectFpr(f_classif, alpha=0.1)
new_selector = selector._reinit()
assert_true(selector is not new_selector)
assert_equal(selector._get_params(), new_selector._get_params())
def test_renew_2():
"""Tests that BaseEstimator._new() doesn't copy everything.
We first create an estimator, give it an own attribute, and
make a copy of its original state. Then we check that the copy doesn't have
the specific attribute we manually added to the initial estimator.
"""
from scikits.learn.feature_selection import SelectFpr, f_classif
selector = SelectFpr(f_classif, alpha=0.1)
selector.own_attribute = "test"
new_selector = selector._reinit()
assert_false(hasattr(new_selector, "own_attribute"))
def test_repr():
""" Smoke test the repr of the
"""
my_estimator = MyEstimator()
repr(my_estimator)
|
from nose.tools import assert_true, assert_false, assert_equal
from ..base import BaseEstimator
class MyEstimator(BaseEstimator):
def __init__(self, l1=0):
self.l1 = l1
def test_reinit():
"""Tests that BaseEstimator._new() creates a correct deep copy.
We create an estimator, make a copy of its original state
(which, in this case, is the current state of the setimator),
and check that the obtained copy is a correct deep copy.
"""
from scikits.learn.feature_selection import SelectFpr, f_classif
selector = SelectFpr(f_classif, alpha=0.1)
new_selector = selector._reinit()
assert_true(selector is not new_selector)
assert_equal(selector._get_params(), new_selector._get_params())
def test_reinit_2():
"""Tests that BaseEstimator._new() doesn't copy everything.
We first create an estimator, give it an own attribute, and
make a copy of its original state. Then we check that the copy doesn't have
the specific attribute we manually added to the initial estimator.
"""
from scikits.learn.feature_selection import SelectFpr, f_classif
selector = SelectFpr(f_classif, alpha=0.1)
selector.own_attribute = "test"
new_selector = selector._reinit()
assert_false(hasattr(new_selector, "own_attribute"))
def test_repr():
""" Smoke test the repr of the
"""
my_estimator = MyEstimator()
repr(my_estimator)
|
Change test name for the _reinit() method.
|
Change test name for the _reinit() method.
|
Python
|
bsd-3-clause
|
massmutual/scikit-learn,tosolveit/scikit-learn,idlead/scikit-learn,russel1237/scikit-learn,manhhomienbienthuy/scikit-learn,ChanChiChoi/scikit-learn,Barmaley-exe/scikit-learn,equialgo/scikit-learn,maheshakya/scikit-learn,PatrickChrist/scikit-learn,liyu1990/sklearn,ahoyosid/scikit-learn,huobaowangxi/scikit-learn,kashif/scikit-learn,sinhrks/scikit-learn,bigdataelephants/scikit-learn,Fireblend/scikit-learn,AIML/scikit-learn,murali-munna/scikit-learn,billy-inn/scikit-learn,0asa/scikit-learn,phdowling/scikit-learn,btabibian/scikit-learn,kylerbrown/scikit-learn,Barmaley-exe/scikit-learn,ahoyosid/scikit-learn,gotomypc/scikit-learn,petosegan/scikit-learn,liangz0707/scikit-learn,jorik041/scikit-learn,HolgerPeters/scikit-learn,ephes/scikit-learn,idlead/scikit-learn,bhargav/scikit-learn,zaxtax/scikit-learn,RomainBrault/scikit-learn,mwv/scikit-learn,wazeerzulfikar/scikit-learn,jorge2703/scikit-learn,Achuth17/scikit-learn,nikitasingh981/scikit-learn,raghavrv/scikit-learn,harshaneelhg/scikit-learn,yunfeilu/scikit-learn,YinongLong/scikit-learn,aflaxman/scikit-learn,ycaihua/scikit-learn,anurag313/scikit-learn,IndraVikas/scikit-learn,cdegroc/scikit-learn,yanlend/scikit-learn,ZENGXH/scikit-learn,Akshay0724/scikit-learn,shyamalschandra/scikit-learn,rishikksh20/scikit-learn,abimannans/scikit-learn,CforED/Machine-Learning,yanlend/scikit-learn,xuewei4d/scikit-learn,mwv/scikit-learn,henrykironde/scikit-learn,wanggang3333/scikit-learn,sanketloke/scikit-learn,Achuth17/scikit-learn,mikebenfield/scikit-learn,h2educ/scikit-learn,toastedcornflakes/scikit-learn,wanggang3333/scikit-learn,etkirsch/scikit-learn,arahuja/scikit-learn,zaxtax/scikit-learn,vortex-ape/scikit-learn,OshynSong/scikit-learn,larsmans/scikit-learn,Titan-C/scikit-learn,florian-f/sklearn,mxjl620/scikit-learn,anirudhjayaraman/scikit-learn,jm-begon/scikit-learn,khkaminska/scikit-learn,shangwuhencc/scikit-learn,bthirion/scikit-learn,quheng/scikit-learn,nesterione/scikit-learn,lbishal/scikit-learn,jseabold/scikit-learn,alexsavio/scikit-learn,cdegroc/scikit-learn,alvarofierroclavero/scikit-learn,costypetrisor/scikit-learn,sumspr/scikit-learn,wzbozon/scikit-learn,themrmax/scikit-learn,bthirion/scikit-learn,lin-credible/scikit-learn,vivekmishra1991/scikit-learn,herilalaina/scikit-learn,thientu/scikit-learn,tosolveit/scikit-learn,madjelan/scikit-learn,depet/scikit-learn,jereze/scikit-learn,spallavolu/scikit-learn,davidgbe/scikit-learn,MohammedWasim/scikit-learn,ilo10/scikit-learn,3manuek/scikit-learn,cauchycui/scikit-learn,kylerbrown/scikit-learn,jayflo/scikit-learn,shahankhatch/scikit-learn,beepee14/scikit-learn,yask123/scikit-learn,BiaDarkia/scikit-learn,devanshdalal/scikit-learn,RayMick/scikit-learn,chrsrds/scikit-learn,xiaoxiamii/scikit-learn,wanggang3333/scikit-learn,ephes/scikit-learn,mojoboss/scikit-learn,mxjl620/scikit-learn,shusenl/scikit-learn,evgchz/scikit-learn,jjx02230808/project0223,abhishekgahlot/scikit-learn,akionakamura/scikit-learn,vigilv/scikit-learn,fredhusser/scikit-learn,mblondel/scikit-learn,tmhm/scikit-learn,Akshay0724/scikit-learn,raghavrv/scikit-learn,cwu2011/scikit-learn,mattgiguere/scikit-learn,madjelan/scikit-learn,glennq/scikit-learn,saiwing-yeung/scikit-learn,jakobworldpeace/scikit-learn,hrjn/scikit-learn,aabadie/scikit-learn,tdhopper/scikit-learn,siutanwong/scikit-learn,PatrickChrist/scikit-learn,Achuth17/scikit-learn,heli522/scikit-learn,RomainBrault/scikit-learn,shangwuhencc/scikit-learn,mrshu/scikit-learn,jjx02230808/project0223,rahuldhote/scikit-learn,mattgiguere/scikit-learn,nvoron23/scikit-learn,jmschrei/scikit-learn,ominux/scikit-learn,tosolveit/scikit-learn,belltailjp/scikit-learn,poryfly/scikit-learn,yunfeilu/scikit-learn,terkkila/scikit-learn,vortex-ape/scikit-learn,eickenberg/scikit-learn,shenzebang/scikit-learn,Garrett-R/scikit-learn,zuku1985/scikit-learn,MechCoder/scikit-learn,iismd17/scikit-learn,B3AU/waveTree,zihua/scikit-learn,xubenben/scikit-learn,Barmaley-exe/scikit-learn,pv/scikit-learn,Djabbz/scikit-learn,appapantula/scikit-learn,andaag/scikit-learn,lin-credible/scikit-learn,smartscheduling/scikit-learn-categorical-tree,petosegan/scikit-learn,MechCoder/scikit-learn,jkarnows/scikit-learn,IssamLaradji/scikit-learn,yask123/scikit-learn,mattgiguere/scikit-learn,eg-zhang/scikit-learn,voxlol/scikit-learn,arjoly/scikit-learn,tomlof/scikit-learn,rajat1994/scikit-learn,jakirkham/scikit-learn,mjudsp/Tsallis,RPGOne/scikit-learn,untom/scikit-learn,dhruv13J/scikit-learn,clemkoa/scikit-learn,kmike/scikit-learn,andaag/scikit-learn,bhargav/scikit-learn,rrohan/scikit-learn,sonnyhu/scikit-learn,xiaoxiamii/scikit-learn,liberatorqjw/scikit-learn,mhdella/scikit-learn,dhruv13J/scikit-learn,bhargav/scikit-learn,AlexanderFabisch/scikit-learn,frank-tancf/scikit-learn,potash/scikit-learn,AnasGhrab/scikit-learn,jpautom/scikit-learn,loli/sklearn-ensembletrees,harshaneelhg/scikit-learn,espg/scikit-learn,kmike/scikit-learn,mikebenfield/scikit-learn,fzalkow/scikit-learn,hdmetor/scikit-learn,shyamalschandra/scikit-learn,sonnyhu/scikit-learn,mjgrav2001/scikit-learn,kagayakidan/scikit-learn,shikhardb/scikit-learn,LohithBlaze/scikit-learn,xwolf12/scikit-learn,pythonvietnam/scikit-learn,jzt5132/scikit-learn,hitszxp/scikit-learn,Garrett-R/scikit-learn,pratapvardhan/scikit-learn,abhishekgahlot/scikit-learn,Djabbz/scikit-learn,rahul-c1/scikit-learn,schets/scikit-learn,evgchz/scikit-learn,aabadie/scikit-learn,jseabold/scikit-learn,trankmichael/scikit-learn,mojoboss/scikit-learn,JosmanPS/scikit-learn,vinayak-mehta/scikit-learn,wlamond/scikit-learn,evgchz/scikit-learn,espg/scikit-learn,dsquareindia/scikit-learn,aabadie/scikit-learn,cdegroc/scikit-learn,krez13/scikit-learn,lin-credible/scikit-learn,jblackburne/scikit-learn,ElDeveloper/scikit-learn,AlexRobson/scikit-learn,kevin-intel/scikit-learn,wazeerzulfikar/scikit-learn,huzq/scikit-learn,xyguo/scikit-learn,arabenjamin/scikit-learn,arahuja/scikit-learn,glennq/scikit-learn,aetilley/scikit-learn,trungnt13/scikit-learn,nrhine1/scikit-learn,bthirion/scikit-learn,kashif/scikit-learn,hitszxp/scikit-learn,DonBeo/scikit-learn,ogrisel/scikit-learn,cwu2011/scikit-learn,ChanChiChoi/scikit-learn,hlin117/scikit-learn,depet/scikit-learn,nmayorov/scikit-learn,Titan-C/scikit-learn,xzh86/scikit-learn,sarahgrogan/scikit-learn,untom/scikit-learn,PatrickOReilly/scikit-learn,vinayak-mehta/scikit-learn,wazeerzulfikar/scikit-learn,Aasmi/scikit-learn,Jimmy-Morzaria/scikit-learn,DSLituiev/scikit-learn,herilalaina/scikit-learn,hsuantien/scikit-learn,jaidevd/scikit-learn,sumspr/scikit-learn,rexshihaoren/scikit-learn,simon-pepin/scikit-learn,saiwing-yeung/scikit-learn,ephes/scikit-learn,theoryno3/scikit-learn,mjudsp/Tsallis,massmutual/scikit-learn,ssaeger/scikit-learn,shusenl/scikit-learn,loli/semisupervisedforests,abhishekkrthakur/scikit-learn,lenovor/scikit-learn,btabibian/scikit-learn,macks22/scikit-learn,Obus/scikit-learn,Lawrence-Liu/scikit-learn,rrohan/scikit-learn,lucidfrontier45/scikit-learn,q1ang/scikit-learn,joernhees/scikit-learn,ivannz/scikit-learn,CVML/scikit-learn,0x0all/scikit-learn,fengzhyuan/scikit-learn,ashhher3/scikit-learn,walterreade/scikit-learn,heli522/scikit-learn,Adai0808/scikit-learn,rohanp/scikit-learn,ldirer/scikit-learn,3manuek/scikit-learn,zorojean/scikit-learn,terkkila/scikit-learn,wzbozon/scikit-learn,murali-munna/scikit-learn,ElDeveloper/scikit-learn,russel1237/scikit-learn,anirudhjayaraman/scikit-learn,qifeigit/scikit-learn,billy-inn/scikit-learn,mfjb/scikit-learn,mjudsp/Tsallis,yyjiang/scikit-learn,treycausey/scikit-learn,espg/scikit-learn,MartinSavc/scikit-learn,mattilyra/scikit-learn,moutai/scikit-learn,cainiaocome/scikit-learn,nikitasingh981/scikit-learn,Srisai85/scikit-learn,dhruv13J/scikit-learn,mjgrav2001/scikit-learn,JsNoNo/scikit-learn,equialgo/scikit-learn,vortex-ape/scikit-learn,cl4rke/scikit-learn,idlead/scikit-learn,0x0all/scikit-learn,giorgiop/scikit-learn,kagayakidan/scikit-learn,Garrett-R/scikit-learn,fabianp/scikit-learn,Myasuka/scikit-learn,Clyde-fare/scikit-learn,xavierwu/scikit-learn,sergeyf/scikit-learn,ky822/scikit-learn,fzalkow/scikit-learn,saiwing-yeung/scikit-learn,xavierwu/scikit-learn,ChanChiChoi/scikit-learn,cl4rke/scikit-learn,MartinSavc/scikit-learn,mlyundin/scikit-learn,jakobworldpeace/scikit-learn,xyguo/scikit-learn,phdowling/scikit-learn,andrewnc/scikit-learn,mrshu/scikit-learn,zorroblue/scikit-learn,jakirkham/scikit-learn,shahankhatch/scikit-learn,sergeyf/scikit-learn,nelson-liu/scikit-learn,henrykironde/scikit-learn,ElDeveloper/scikit-learn,JosmanPS/scikit-learn,kevin-intel/scikit-learn,DonBeo/scikit-learn,jakobworldpeace/scikit-learn,AlexanderFabisch/scikit-learn,manashmndl/scikit-learn,jjx02230808/project0223,PatrickOReilly/scikit-learn,samzhang111/scikit-learn,petosegan/scikit-learn,aminert/scikit-learn,pompiduskus/scikit-learn,mehdidc/scikit-learn,waterponey/scikit-learn,hlin117/scikit-learn,rahuldhote/scikit-learn,betatim/scikit-learn,manashmndl/scikit-learn,fabianp/scikit-learn,joshloyal/scikit-learn,eg-zhang/scikit-learn,cybernet14/scikit-learn,lazywei/scikit-learn,trungnt13/scikit-learn,elkingtonmcb/scikit-learn,pompiduskus/scikit-learn,tdhopper/scikit-learn,clemkoa/scikit-learn,alexsavio/scikit-learn,ngoix/OCRF,lbishal/scikit-learn,trankmichael/scikit-learn,fredhusser/scikit-learn,ElDeveloper/scikit-learn,BiaDarkia/scikit-learn,anurag313/scikit-learn,hlin117/scikit-learn,thilbern/scikit-learn,shikhardb/scikit-learn,massmutual/scikit-learn,robin-lai/scikit-learn,ZENGXH/scikit-learn,herilalaina/scikit-learn,mfjb/scikit-learn,ningchi/scikit-learn,mxjl620/scikit-learn,zorojean/scikit-learn,appapantula/scikit-learn,costypetrisor/scikit-learn,abhishekkrthakur/scikit-learn,AlexanderFabisch/scikit-learn,zorroblue/scikit-learn,hainm/scikit-learn,hainm/scikit-learn,rvraghav93/scikit-learn,samzhang111/scikit-learn,russel1237/scikit-learn,adamgreenhall/scikit-learn,billy-inn/scikit-learn,luo66/scikit-learn,chrsrds/scikit-learn,vybstat/scikit-learn,toastedcornflakes/scikit-learn,lucidfrontier45/scikit-learn,plissonf/scikit-learn,murali-munna/scikit-learn,pnedunuri/scikit-learn,mrshu/scikit-learn,robbymeals/scikit-learn,shahankhatch/scikit-learn,ndingwall/scikit-learn,fyffyt/scikit-learn,lesteve/scikit-learn,bikong2/scikit-learn,cybernet14/scikit-learn,nesterione/scikit-learn,siutanwong/scikit-learn,mhue/scikit-learn,jzt5132/scikit-learn,Adai0808/scikit-learn,chrsrds/scikit-learn,ssaeger/scikit-learn,anntzer/scikit-learn,cauchycui/scikit-learn,giorgiop/scikit-learn,glemaitre/scikit-learn,hdmetor/scikit-learn,r-mart/scikit-learn,meduz/scikit-learn,LiaoPan/scikit-learn,zorojean/scikit-learn,dingocuster/scikit-learn,anurag313/scikit-learn,scikit-learn/scikit-learn,roxyboy/scikit-learn,Clyde-fare/scikit-learn,samzhang111/scikit-learn,scikit-learn/scikit-learn,CforED/Machine-Learning,cainiaocome/scikit-learn,chrisburr/scikit-learn,dsquareindia/scikit-learn,wzbozon/scikit-learn,kmike/scikit-learn,mhue/scikit-learn,tosolveit/scikit-learn,Clyde-fare/scikit-learn,alexeyum/scikit-learn,vortex-ape/scikit-learn,kaichogami/scikit-learn,rvraghav93/scikit-learn,rahul-c1/scikit-learn,shangwuhencc/scikit-learn,joernhees/scikit-learn,nvoron23/scikit-learn,stylianos-kampakis/scikit-learn,jlegendary/scikit-learn,voxlol/scikit-learn,betatim/scikit-learn,ldirer/scikit-learn,btabibian/scikit-learn,chrisburr/scikit-learn,AIML/scikit-learn,UNR-AERIAL/scikit-learn,florian-f/sklearn,scikit-learn/scikit-learn,ashhher3/scikit-learn,hsiaoyi0504/scikit-learn,gotomypc/scikit-learn,mrshu/scikit-learn,ogrisel/scikit-learn,MartinSavc/scikit-learn,Akshay0724/scikit-learn,lesteve/scikit-learn,thientu/scikit-learn,lenovor/scikit-learn,zorroblue/scikit-learn,sonnyhu/scikit-learn,imaculate/scikit-learn,siutanwong/scikit-learn,TomDLT/scikit-learn,PrashntS/scikit-learn,pkruskal/scikit-learn,glennq/scikit-learn,ankurankan/scikit-learn,treycausey/scikit-learn,ilyes14/scikit-learn,olologin/scikit-learn,ankurankan/scikit-learn,kagayakidan/scikit-learn,glouppe/scikit-learn,CVML/scikit-learn,ashhher3/scikit-learn,ngoix/OCRF,yyjiang/scikit-learn,vybstat/scikit-learn,murali-munna/scikit-learn,smartscheduling/scikit-learn-categorical-tree,jlegendary/scikit-learn,stylianos-kampakis/scikit-learn,shenzebang/scikit-learn,Aasmi/scikit-learn,mattilyra/scikit-learn,huobaowangxi/scikit-learn,hrjn/scikit-learn,glouppe/scikit-learn,depet/scikit-learn,plissonf/scikit-learn,theoryno3/scikit-learn,poryfly/scikit-learn,maheshakya/scikit-learn,belltailjp/scikit-learn,altairpearl/scikit-learn,kashif/scikit-learn,466152112/scikit-learn,hdmetor/scikit-learn,ky822/scikit-learn,florian-f/sklearn,tdhopper/scikit-learn,fabioticconi/scikit-learn,rohanp/scikit-learn,sonnyhu/scikit-learn,NunoEdgarGub1/scikit-learn,NunoEdgarGub1/scikit-learn,joernhees/scikit-learn,carrillo/scikit-learn,vybstat/scikit-learn,devanshdalal/scikit-learn,hugobowne/scikit-learn,kaichogami/scikit-learn,ishanic/scikit-learn,sarahgrogan/scikit-learn,nelson-liu/scikit-learn,fabioticconi/scikit-learn,aetilley/scikit-learn,liberatorqjw/scikit-learn,imaculate/scikit-learn,scikit-learn/scikit-learn,Windy-Ground/scikit-learn,Fireblend/scikit-learn,alexeyum/scikit-learn,vermouthmjl/scikit-learn,sgenoud/scikit-learn,alexsavio/scikit-learn,kashif/scikit-learn,anirudhjayaraman/scikit-learn,larsmans/scikit-learn,manashmndl/scikit-learn,raghavrv/scikit-learn,kaichogami/scikit-learn,jereze/scikit-learn,ZenDevelopmentSystems/scikit-learn,justincassidy/scikit-learn,MatthieuBizien/scikit-learn,mjgrav2001/scikit-learn,mugizico/scikit-learn,MatthieuBizien/scikit-learn,aflaxman/scikit-learn,jm-begon/scikit-learn,Aasmi/scikit-learn,AIML/scikit-learn,ndingwall/scikit-learn,arahuja/scikit-learn,B3AU/waveTree,waterponey/scikit-learn,pompiduskus/scikit-learn,mblondel/scikit-learn,schets/scikit-learn,pythonvietnam/scikit-learn,AlexandreAbraham/scikit-learn,elkingtonmcb/scikit-learn,quheng/scikit-learn,aewhatley/scikit-learn,hitszxp/scikit-learn,wazeerzulfikar/scikit-learn,3manuek/scikit-learn,aetilley/scikit-learn,ilyes14/scikit-learn,jpautom/scikit-learn,imaculate/scikit-learn,fredhusser/scikit-learn,trankmichael/scikit-learn,DSLituiev/scikit-learn,rsivapr/scikit-learn,xavierwu/scikit-learn,ycaihua/scikit-learn,voxlol/scikit-learn,zhenv5/scikit-learn,vivekmishra1991/scikit-learn,joernhees/scikit-learn,nomadcube/scikit-learn,idlead/scikit-learn,IndraVikas/scikit-learn,shahankhatch/scikit-learn,plissonf/scikit-learn,ldirer/scikit-learn,RachitKansal/scikit-learn,ankurankan/scikit-learn,zaxtax/scikit-learn,Djabbz/scikit-learn,UNR-AERIAL/scikit-learn,mugizico/scikit-learn,bhargav/scikit-learn,Windy-Ground/scikit-learn,DSLituiev/scikit-learn,andaag/scikit-learn,RayMick/scikit-learn,vermouthmjl/scikit-learn,robbymeals/scikit-learn,MartinDelzant/scikit-learn,bnaul/scikit-learn,procoder317/scikit-learn,marcocaccin/scikit-learn,MartinDelzant/scikit-learn,abhishekkrthakur/scikit-learn,vybstat/scikit-learn,IssamLaradji/scikit-learn,LiaoPan/scikit-learn,meduz/scikit-learn,chrsrds/scikit-learn,jkarnows/scikit-learn,dsquareindia/scikit-learn,sinhrks/scikit-learn,aminert/scikit-learn,betatim/scikit-learn,shusenl/scikit-learn,fzalkow/scikit-learn,liangz0707/scikit-learn,thilbern/scikit-learn,madjelan/scikit-learn,jmetzen/scikit-learn,h2educ/scikit-learn,rohanp/scikit-learn,abimannans/scikit-learn,fyffyt/scikit-learn,AnasGhrab/scikit-learn,luo66/scikit-learn,roxyboy/scikit-learn,cdegroc/scikit-learn,f3r/scikit-learn,Vimos/scikit-learn,jorge2703/scikit-learn,heli522/scikit-learn,krez13/scikit-learn,vigilv/scikit-learn,xwolf12/scikit-learn,marcocaccin/scikit-learn,kylerbrown/scikit-learn,zhenv5/scikit-learn,yyjiang/scikit-learn,NelisVerhoef/scikit-learn,pv/scikit-learn,ankurankan/scikit-learn,xyguo/scikit-learn,fengzhyuan/scikit-learn,cl4rke/scikit-learn,jmschrei/scikit-learn,liyu1990/sklearn,pythonvietnam/scikit-learn,466152112/scikit-learn,rishikksh20/scikit-learn,adamgreenhall/scikit-learn,Vimos/scikit-learn,andrewnc/scikit-learn,Srisai85/scikit-learn,eg-zhang/scikit-learn,larsmans/scikit-learn,mugizico/scikit-learn,nelson-liu/scikit-learn,zhenv5/scikit-learn,smartscheduling/scikit-learn-categorical-tree,trungnt13/scikit-learn,sgenoud/scikit-learn,stylianos-kampakis/scikit-learn,carrillo/scikit-learn,xiaoxiamii/scikit-learn,belltailjp/scikit-learn,ogrisel/scikit-learn,rsivapr/scikit-learn,xzh86/scikit-learn,alexsavio/scikit-learn,henrykironde/scikit-learn,AlexandreAbraham/scikit-learn,fbagirov/scikit-learn,loli/sklearn-ensembletrees,quheng/scikit-learn,aewhatley/scikit-learn,andrewnc/scikit-learn,jm-begon/scikit-learn,RPGOne/scikit-learn,glemaitre/scikit-learn,theoryno3/scikit-learn,yyjiang/scikit-learn,JPFrancoia/scikit-learn,hsuantien/scikit-learn,arabenjamin/scikit-learn,equialgo/scikit-learn,JosmanPS/scikit-learn,mayblue9/scikit-learn,AlexRobson/scikit-learn,hsiaoyi0504/scikit-learn,eg-zhang/scikit-learn,3manuek/scikit-learn,mattgiguere/scikit-learn,giorgiop/scikit-learn,TomDLT/scikit-learn,untom/scikit-learn,IshankGulati/scikit-learn,JeanKossaifi/scikit-learn,fbagirov/scikit-learn,yonglehou/scikit-learn,ivannz/scikit-learn,0asa/scikit-learn,anntzer/scikit-learn,shyamalschandra/scikit-learn,djgagne/scikit-learn,ChanderG/scikit-learn,dsullivan7/scikit-learn,h2educ/scikit-learn,mlyundin/scikit-learn,Djabbz/scikit-learn,liberatorqjw/scikit-learn,0asa/scikit-learn,vshtanko/scikit-learn,NunoEdgarGub1/scikit-learn,YinongLong/scikit-learn,russel1237/scikit-learn,ngoix/OCRF,DSLituiev/scikit-learn,henridwyer/scikit-learn,nomadcube/scikit-learn,pkruskal/scikit-learn,jayflo/scikit-learn,akionakamura/scikit-learn,walterreade/scikit-learn,simon-pepin/scikit-learn,trankmichael/scikit-learn,meduz/scikit-learn,NelisVerhoef/scikit-learn,jorge2703/scikit-learn,eickenberg/scikit-learn,kaichogami/scikit-learn,RachitKansal/scikit-learn,PrashntS/scikit-learn,adamgreenhall/scikit-learn,RachitKansal/scikit-learn,hitszxp/scikit-learn,anntzer/scikit-learn,hrjn/scikit-learn,poryfly/scikit-learn,0x0all/scikit-learn,nmayorov/scikit-learn,jmetzen/scikit-learn,f3r/scikit-learn,jmschrei/scikit-learn,elkingtonmcb/scikit-learn,ningchi/scikit-learn,MechCoder/scikit-learn,betatim/scikit-learn,justincassidy/scikit-learn,bigdataelephants/scikit-learn,elkingtonmcb/scikit-learn,djgagne/scikit-learn,Nyker510/scikit-learn,hugobowne/scikit-learn,nhejazi/scikit-learn,jlegendary/scikit-learn,schets/scikit-learn,sanketloke/scikit-learn,pianomania/scikit-learn,larsmans/scikit-learn,pianomania/scikit-learn,sgenoud/scikit-learn,Fireblend/scikit-learn,h2educ/scikit-learn,dsullivan7/scikit-learn,dingocuster/scikit-learn,yunfeilu/scikit-learn,sanketloke/scikit-learn,sergeyf/scikit-learn,ominux/scikit-learn,etkirsch/scikit-learn,voxlol/scikit-learn,gclenaghan/scikit-learn,vibhorag/scikit-learn,JeanKossaifi/scikit-learn,loli/semisupervisedforests,ky822/scikit-learn,hainm/scikit-learn,harshaneelhg/scikit-learn,cauchycui/scikit-learn,moutai/scikit-learn,abhishekgahlot/scikit-learn,rohanp/scikit-learn,pypot/scikit-learn,chrisburr/scikit-learn,altairpearl/scikit-learn,Nyker510/scikit-learn,mwv/scikit-learn,mayblue9/scikit-learn,equialgo/scikit-learn,tomlof/scikit-learn,JPFrancoia/scikit-learn,kylerbrown/scikit-learn,rahul-c1/scikit-learn,sergeyf/scikit-learn,r-mart/scikit-learn,pypot/scikit-learn,davidgbe/scikit-learn,LohithBlaze/scikit-learn,djgagne/scikit-learn,xuewei4d/scikit-learn,vshtanko/scikit-learn,PrashntS/scikit-learn,thilbern/scikit-learn,NunoEdgarGub1/scikit-learn,JeanKossaifi/scikit-learn,jmetzen/scikit-learn,xubenben/scikit-learn,khkaminska/scikit-learn,qifeigit/scikit-learn,lbishal/scikit-learn,mattilyra/scikit-learn,hsuantien/scikit-learn,wlamond/scikit-learn,vibhorag/scikit-learn,aewhatley/scikit-learn,roxyboy/scikit-learn,zorojean/scikit-learn,fabianp/scikit-learn,dsullivan7/scikit-learn,altairpearl/scikit-learn,yask123/scikit-learn,cybernet14/scikit-learn,mugizico/scikit-learn,macks22/scikit-learn,liyu1990/sklearn,ClimbsRocks/scikit-learn,dsquareindia/scikit-learn,vinayak-mehta/scikit-learn,Barmaley-exe/scikit-learn,ilyes14/scikit-learn,lesteve/scikit-learn,jereze/scikit-learn,florian-f/sklearn,samzhang111/scikit-learn,jlegendary/scikit-learn,andrewnc/scikit-learn,jaidevd/scikit-learn,nhejazi/scikit-learn,samuel1208/scikit-learn,aminert/scikit-learn,CVML/scikit-learn,roxyboy/scikit-learn,lenovor/scikit-learn,bigdataelephants/scikit-learn,sinhrks/scikit-learn,hitszxp/scikit-learn,hlin117/scikit-learn,B3AU/waveTree,gclenaghan/scikit-learn,fredhusser/scikit-learn,pv/scikit-learn,CVML/scikit-learn,ningchi/scikit-learn,MartinDelzant/scikit-learn,maheshakya/scikit-learn,PatrickChrist/scikit-learn,ishanic/scikit-learn,NelisVerhoef/scikit-learn,pnedunuri/scikit-learn,michigraber/scikit-learn,JPFrancoia/scikit-learn,rahuldhote/scikit-learn,ilo10/scikit-learn,mayblue9/scikit-learn,chrisburr/scikit-learn,PrashntS/scikit-learn,kjung/scikit-learn,loli/sklearn-ensembletrees,waterponey/scikit-learn,ChanderG/scikit-learn,CforED/Machine-Learning,olologin/scikit-learn,ltiao/scikit-learn,xzh86/scikit-learn,deepesch/scikit-learn,jorik041/scikit-learn,theoryno3/scikit-learn,cwu2011/scikit-learn,jblackburne/scikit-learn,IshankGulati/scikit-learn,vshtanko/scikit-learn,gclenaghan/scikit-learn,florian-f/sklearn,jorik041/scikit-learn,ssaeger/scikit-learn,nesterione/scikit-learn,jaidevd/scikit-learn,simon-pepin/scikit-learn,simon-pepin/scikit-learn,MartinDelzant/scikit-learn,AlexandreAbraham/scikit-learn,maheshakya/scikit-learn,f3r/scikit-learn,ycaihua/scikit-learn,zuku1985/scikit-learn,fbagirov/scikit-learn,victorbergelin/scikit-learn,huobaowangxi/scikit-learn,terkkila/scikit-learn,Myasuka/scikit-learn,ZENGXH/scikit-learn,saiwing-yeung/scikit-learn,deepesch/scikit-learn,Adai0808/scikit-learn,LiaoPan/scikit-learn,larsmans/scikit-learn,manashmndl/scikit-learn,eickenberg/scikit-learn,MatthieuBizien/scikit-learn,IssamLaradji/scikit-learn,pratapvardhan/scikit-learn,jm-begon/scikit-learn,stylianos-kampakis/scikit-learn,sarahgrogan/scikit-learn,rexshihaoren/scikit-learn,gotomypc/scikit-learn,samuel1208/scikit-learn,ssaeger/scikit-learn,themrmax/scikit-learn,michigraber/scikit-learn,toastedcornflakes/scikit-learn,marcocaccin/scikit-learn,huzq/scikit-learn,vibhorag/scikit-learn,anirudhjayaraman/scikit-learn,YinongLong/scikit-learn,depet/scikit-learn,DonBeo/scikit-learn,vermouthmjl/scikit-learn,dhruv13J/scikit-learn,q1ang/scikit-learn,fbagirov/scikit-learn,lucidfrontier45/scikit-learn,justincassidy/scikit-learn,djgagne/scikit-learn,akionakamura/scikit-learn,icdishb/scikit-learn,amueller/scikit-learn,clemkoa/scikit-learn,fabioticconi/scikit-learn,fabioticconi/scikit-learn,smartscheduling/scikit-learn-categorical-tree,Achuth17/scikit-learn,xubenben/scikit-learn,Adai0808/scikit-learn,fengzhyuan/scikit-learn,ivannz/scikit-learn,davidgbe/scikit-learn,icdishb/scikit-learn,DonBeo/scikit-learn,petosegan/scikit-learn,jayflo/scikit-learn,B3AU/waveTree,schets/scikit-learn,terkkila/scikit-learn,loli/semisupervisedforests,jakirkham/scikit-learn,qifeigit/scikit-learn,arabenjamin/scikit-learn,eickenberg/scikit-learn,meduz/scikit-learn,liangz0707/scikit-learn,HolgerPeters/scikit-learn,treycausey/scikit-learn,gotomypc/scikit-learn,BiaDarkia/scikit-learn,mlyundin/scikit-learn,sumspr/scikit-learn,pypot/scikit-learn,rvraghav93/scikit-learn,moutai/scikit-learn,rishikksh20/scikit-learn,alvarofierroclavero/scikit-learn,q1ang/scikit-learn,lesteve/scikit-learn,jjx02230808/project0223,ominux/scikit-learn,vermouthmjl/scikit-learn,aflaxman/scikit-learn,tawsifkhan/scikit-learn,heli522/scikit-learn,jkarnows/scikit-learn,procoder317/scikit-learn,robin-lai/scikit-learn,macks22/scikit-learn,mhdella/scikit-learn,beepee14/scikit-learn,hsiaoyi0504/scikit-learn,yask123/scikit-learn,waterponey/scikit-learn,andaag/scikit-learn,robbymeals/scikit-learn,lin-credible/scikit-learn,mfjb/scikit-learn,Sentient07/scikit-learn,joshloyal/scikit-learn,liangz0707/scikit-learn,phdowling/scikit-learn,wlamond/scikit-learn,jorik041/scikit-learn,arabenjamin/scikit-learn,RayMick/scikit-learn,mrshu/scikit-learn,nrhine1/scikit-learn,deepesch/scikit-learn,vibhorag/scikit-learn,ominux/scikit-learn,pnedunuri/scikit-learn,shikhardb/scikit-learn,hugobowne/scikit-learn,fengzhyuan/scikit-learn,PatrickOReilly/scikit-learn,AnasGhrab/scikit-learn,jaidevd/scikit-learn,mjgrav2001/scikit-learn,cainiaocome/scikit-learn,hsuantien/scikit-learn,aminert/scikit-learn,LiaoPan/scikit-learn,jzt5132/scikit-learn,arjoly/scikit-learn,loli/sklearn-ensembletrees,IshankGulati/scikit-learn,JPFrancoia/scikit-learn,walterreade/scikit-learn,pythonvietnam/scikit-learn,beepee14/scikit-learn,OshynSong/scikit-learn,xiaoxiamii/scikit-learn,bnaul/scikit-learn,henrykironde/scikit-learn,mhdella/scikit-learn,ycaihua/scikit-learn,vivekmishra1991/scikit-learn,icdishb/scikit-learn,Obus/scikit-learn,aabadie/scikit-learn,nikitasingh981/scikit-learn,rajat1994/scikit-learn,nmayorov/scikit-learn,aflaxman/scikit-learn,sinhrks/scikit-learn,pompiduskus/scikit-learn,spallavolu/scikit-learn,YinongLong/scikit-learn,treycausey/scikit-learn,fabianp/scikit-learn,MohammedWasim/scikit-learn,bikong2/scikit-learn,jakobworldpeace/scikit-learn,macks22/scikit-learn,mattilyra/scikit-learn,PatrickChrist/scikit-learn,Sentient07/scikit-learn,shangwuhencc/scikit-learn,Lawrence-Liu/scikit-learn,UNR-AERIAL/scikit-learn,procoder317/scikit-learn,Srisai85/scikit-learn,TomDLT/scikit-learn,ilo10/scikit-learn,vigilv/scikit-learn,Windy-Ground/scikit-learn,etkirsch/scikit-learn,Titan-C/scikit-learn,ChanderG/scikit-learn,glemaitre/scikit-learn,jayflo/scikit-learn,liberatorqjw/scikit-learn,tawsifkhan/scikit-learn,cauchycui/scikit-learn,jpautom/scikit-learn,liyu1990/sklearn,Obus/scikit-learn,sumspr/scikit-learn,AnasGhrab/scikit-learn,lazywei/scikit-learn,themrmax/scikit-learn,robin-lai/scikit-learn,mfjb/scikit-learn,466152112/scikit-learn,ycaihua/scikit-learn,cybernet14/scikit-learn,IssamLaradji/scikit-learn,shusenl/scikit-learn,xwolf12/scikit-learn,arjoly/scikit-learn,ZENGXH/scikit-learn,thilbern/scikit-learn,huzq/scikit-learn,robbymeals/scikit-learn,khkaminska/scikit-learn,nhejazi/scikit-learn,dingocuster/scikit-learn,ZenDevelopmentSystems/scikit-learn,belltailjp/scikit-learn,zhenv5/scikit-learn,ishanic/scikit-learn,BiaDarkia/scikit-learn,xzh86/scikit-learn,glennq/scikit-learn,RachitKansal/scikit-learn,tawsifkhan/scikit-learn,rishikksh20/scikit-learn,kmike/scikit-learn,HolgerPeters/scikit-learn,abimannans/scikit-learn,thientu/scikit-learn,ltiao/scikit-learn,raghavrv/scikit-learn,depet/scikit-learn,nrhine1/scikit-learn,jblackburne/scikit-learn,sgenoud/scikit-learn,rajat1994/scikit-learn,r-mart/scikit-learn,jmschrei/scikit-learn,Sentient07/scikit-learn,Clyde-fare/scikit-learn,yonglehou/scikit-learn,jakirkham/scikit-learn,pnedunuri/scikit-learn,trungnt13/scikit-learn,Myasuka/scikit-learn,jorge2703/scikit-learn,zuku1985/scikit-learn,NelisVerhoef/scikit-learn,luo66/scikit-learn,ogrisel/scikit-learn,tawsifkhan/scikit-learn,mayblue9/scikit-learn,btabibian/scikit-learn,IndraVikas/scikit-learn,shenzebang/scikit-learn,samuel1208/scikit-learn,ChanChiChoi/scikit-learn,tomlof/scikit-learn,yonglehou/scikit-learn,JeanKossaifi/scikit-learn,B3AU/waveTree,rexshihaoren/scikit-learn,ilyes14/scikit-learn,ngoix/OCRF,victorbergelin/scikit-learn,pianomania/scikit-learn,hrjn/scikit-learn,loli/sklearn-ensembletrees,michigraber/scikit-learn,toastedcornflakes/scikit-learn,anntzer/scikit-learn,jmetzen/scikit-learn,ClimbsRocks/scikit-learn,frank-tancf/scikit-learn,devanshdalal/scikit-learn,Titan-C/scikit-learn,UNR-AERIAL/scikit-learn,AIML/scikit-learn,aetilley/scikit-learn,themrmax/scikit-learn,IshankGulati/scikit-learn,arahuja/scikit-learn,cwu2011/scikit-learn,0asa/scikit-learn,wlamond/scikit-learn,hsiaoyi0504/scikit-learn,akionakamura/scikit-learn,0x0all/scikit-learn,joshloyal/scikit-learn,dingocuster/scikit-learn,jseabold/scikit-learn,appapantula/scikit-learn,quheng/scikit-learn,RomainBrault/scikit-learn,sarahgrogan/scikit-learn,carrillo/scikit-learn,mattilyra/scikit-learn,khkaminska/scikit-learn,xyguo/scikit-learn,huobaowangxi/scikit-learn,bikong2/scikit-learn,alexeyum/scikit-learn,mikebenfield/scikit-learn,ngoix/OCRF,xwolf12/scikit-learn,Obus/scikit-learn,HolgerPeters/scikit-learn,samuel1208/scikit-learn,RPGOne/scikit-learn,lbishal/scikit-learn,krez13/scikit-learn,vinayak-mehta/scikit-learn,jblackburne/scikit-learn,wzbozon/scikit-learn,jkarnows/scikit-learn,PatrickOReilly/scikit-learn,aewhatley/scikit-learn,herilalaina/scikit-learn,maheshakya/scikit-learn,JsNoNo/scikit-learn,abimannans/scikit-learn,hdmetor/scikit-learn,mjudsp/Tsallis,bnaul/scikit-learn,appapantula/scikit-learn,dsullivan7/scikit-learn,potash/scikit-learn,kjung/scikit-learn,billy-inn/scikit-learn,pv/scikit-learn,kjung/scikit-learn,LohithBlaze/scikit-learn,nhejazi/scikit-learn,ningchi/scikit-learn,Myasuka/scikit-learn,pianomania/scikit-learn,mikebenfield/scikit-learn,AlexRobson/scikit-learn,sgenoud/scikit-learn,henridwyer/scikit-learn,AlexRobson/scikit-learn,ahoyosid/scikit-learn,yanlend/scikit-learn,olologin/scikit-learn,ChanderG/scikit-learn,amueller/scikit-learn,evgchz/scikit-learn,shikhardb/scikit-learn,OshynSong/scikit-learn,tdhopper/scikit-learn,kevin-intel/scikit-learn,zorroblue/scikit-learn,jseabold/scikit-learn,alexeyum/scikit-learn,vshtanko/scikit-learn,pratapvardhan/scikit-learn,evgchz/scikit-learn,Nyker510/scikit-learn,MohammedWasim/scikit-learn,alvarofierroclavero/scikit-learn,icdishb/scikit-learn,henridwyer/scikit-learn,abhishekgahlot/scikit-learn,Garrett-R/scikit-learn,amueller/scikit-learn,ldirer/scikit-learn,lucidfrontier45/scikit-learn,hugobowne/scikit-learn,etkirsch/scikit-learn,fzalkow/scikit-learn,manhhomienbienthuy/scikit-learn,mwv/scikit-learn,nikitasingh981/scikit-learn,mojoboss/scikit-learn,ltiao/scikit-learn,rvraghav93/scikit-learn,CforED/Machine-Learning,ndingwall/scikit-learn,Jimmy-Morzaria/scikit-learn,ivannz/scikit-learn,wanggang3333/scikit-learn,phdowling/scikit-learn,kagayakidan/scikit-learn,fyffyt/scikit-learn,eickenberg/scikit-learn,bthirion/scikit-learn,RomainBrault/scikit-learn,davidgbe/scikit-learn,procoder317/scikit-learn,zaxtax/scikit-learn,ZenDevelopmentSystems/scikit-learn,mehdidc/scikit-learn,f3r/scikit-learn,carrillo/scikit-learn,MohammedWasim/scikit-learn,rahul-c1/scikit-learn,nrhine1/scikit-learn,deepesch/scikit-learn,luo66/scikit-learn,beepee14/scikit-learn,nmayorov/scikit-learn,Nyker510/scikit-learn,shenzebang/scikit-learn,OshynSong/scikit-learn,mlyundin/scikit-learn,rahuldhote/scikit-learn,hainm/scikit-learn,RayMick/scikit-learn,pratapvardhan/scikit-learn,jzt5132/scikit-learn,mhdella/scikit-learn,abhishekkrthakur/scikit-learn,mhue/scikit-learn,mhue/scikit-learn,tmhm/scikit-learn,ClimbsRocks/scikit-learn,thientu/scikit-learn,clemkoa/scikit-learn,ndingwall/scikit-learn,nelson-liu/scikit-learn,tomlof/scikit-learn,zihua/scikit-learn,nomadcube/scikit-learn,zihua/scikit-learn,henridwyer/scikit-learn,ngoix/OCRF,xubenben/scikit-learn,JsNoNo/scikit-learn,giorgiop/scikit-learn,tmhm/scikit-learn,nvoron23/scikit-learn,Vimos/scikit-learn,Garrett-R/scikit-learn,xuewei4d/scikit-learn,MatthieuBizien/scikit-learn,RPGOne/scikit-learn,vigilv/scikit-learn,joshloyal/scikit-learn,manhhomienbienthuy/scikit-learn,altairpearl/scikit-learn,nvoron23/scikit-learn,ky822/scikit-learn,spallavolu/scikit-learn,mblondel/scikit-learn,zihua/scikit-learn,Jimmy-Morzaria/scikit-learn,potash/scikit-learn,loli/semisupervisedforests,devanshdalal/scikit-learn,manhhomienbienthuy/scikit-learn,kmike/scikit-learn,pkruskal/scikit-learn,pkruskal/scikit-learn,yunfeilu/scikit-learn,MechCoder/scikit-learn,mblondel/scikit-learn,victorbergelin/scikit-learn,mojoboss/scikit-learn,0asa/scikit-learn,Lawrence-Liu/scikit-learn,ashhher3/scikit-learn,sanketloke/scikit-learn,amueller/scikit-learn,xuewei4d/scikit-learn,ishanic/scikit-learn,lucidfrontier45/scikit-learn,pypot/scikit-learn,qifeigit/scikit-learn,cainiaocome/scikit-learn,Aasmi/scikit-learn,kevin-intel/scikit-learn,alvarofierroclavero/scikit-learn,IndraVikas/scikit-learn,plissonf/scikit-learn,jereze/scikit-learn,harshaneelhg/scikit-learn,bigdataelephants/scikit-learn,frank-tancf/scikit-learn,AlexanderFabisch/scikit-learn,potash/scikit-learn,q1ang/scikit-learn,Srisai85/scikit-learn,rajat1994/scikit-learn,0x0all/scikit-learn,krez13/scikit-learn,JsNoNo/scikit-learn,TomDLT/scikit-learn,arjoly/scikit-learn,untom/scikit-learn,imaculate/scikit-learn,adamgreenhall/scikit-learn,ahoyosid/scikit-learn,rrohan/scikit-learn,LohithBlaze/scikit-learn,frank-tancf/scikit-learn,ephes/scikit-learn,olologin/scikit-learn,kjung/scikit-learn,ankurankan/scikit-learn,AlexandreAbraham/scikit-learn,yonglehou/scikit-learn,costypetrisor/scikit-learn,MartinSavc/scikit-learn,Fireblend/scikit-learn,lazywei/scikit-learn,iismd17/scikit-learn,espg/scikit-learn,zuku1985/scikit-learn,justincassidy/scikit-learn,massmutual/scikit-learn,mjudsp/Tsallis,yanlend/scikit-learn,mehdidc/scikit-learn,spallavolu/scikit-learn,mxjl620/scikit-learn,siutanwong/scikit-learn,fyffyt/scikit-learn,nomadcube/scikit-learn,shyamalschandra/scikit-learn,michigraber/scikit-learn,cl4rke/scikit-learn,rsivapr/scikit-learn,rsivapr/scikit-learn,ClimbsRocks/scikit-learn,Windy-Ground/scikit-learn,anurag313/scikit-learn,lenovor/scikit-learn,glouppe/scikit-learn,bikong2/scikit-learn,JosmanPS/scikit-learn,nesterione/scikit-learn,jpautom/scikit-learn,xavierwu/scikit-learn,Jimmy-Morzaria/scikit-learn,ilo10/scikit-learn,466152112/scikit-learn,mehdidc/scikit-learn,huzq/scikit-learn,r-mart/scikit-learn,walterreade/scikit-learn,bnaul/scikit-learn,poryfly/scikit-learn,iismd17/scikit-learn,robin-lai/scikit-learn,glouppe/scikit-learn,Lawrence-Liu/scikit-learn,gclenaghan/scikit-learn,marcocaccin/scikit-learn,ltiao/scikit-learn,madjelan/scikit-learn,treycausey/scikit-learn,vivekmishra1991/scikit-learn,glemaitre/scikit-learn,costypetrisor/scikit-learn,lazywei/scikit-learn,ZenDevelopmentSystems/scikit-learn,Akshay0724/scikit-learn,abhishekgahlot/scikit-learn,Vimos/scikit-learn,rsivapr/scikit-learn,rexshihaoren/scikit-learn,victorbergelin/scikit-learn,tmhm/scikit-learn,iismd17/scikit-learn,Sentient07/scikit-learn,moutai/scikit-learn,rrohan/scikit-learn
|
from nose.tools import assert_true, assert_false, assert_equal
from ..base import BaseEstimator
class MyEstimator(BaseEstimator):
def __init__(self, l1=0):
self.l1 = l1
def test_renew():
"""Tests that BaseEstimator._new() creates a correct deep copy.
We create an estimator, make a copy of its original state
(which, in this case, is the current state of the setimator),
and check that the obtained copy is a correct deep copy.
"""
from scikits.learn.feature_selection import SelectFpr, f_classif
selector = SelectFpr(f_classif, alpha=0.1)
new_selector = selector._reinit()
assert_true(selector is not new_selector)
assert_equal(selector._get_params(), new_selector._get_params())
def test_renew_2():
"""Tests that BaseEstimator._new() doesn't copy everything.
We first create an estimator, give it an own attribute, and
make a copy of its original state. Then we check that the copy doesn't have
the specific attribute we manually added to the initial estimator.
"""
from scikits.learn.feature_selection import SelectFpr, f_classif
selector = SelectFpr(f_classif, alpha=0.1)
selector.own_attribute = "test"
new_selector = selector._reinit()
assert_false(hasattr(new_selector, "own_attribute"))
def test_repr():
""" Smoke test the repr of the
"""
my_estimator = MyEstimator()
repr(my_estimator)
Change test name for the _reinit() method.
|
from nose.tools import assert_true, assert_false, assert_equal
from ..base import BaseEstimator
class MyEstimator(BaseEstimator):
def __init__(self, l1=0):
self.l1 = l1
def test_reinit():
"""Tests that BaseEstimator._new() creates a correct deep copy.
We create an estimator, make a copy of its original state
(which, in this case, is the current state of the setimator),
and check that the obtained copy is a correct deep copy.
"""
from scikits.learn.feature_selection import SelectFpr, f_classif
selector = SelectFpr(f_classif, alpha=0.1)
new_selector = selector._reinit()
assert_true(selector is not new_selector)
assert_equal(selector._get_params(), new_selector._get_params())
def test_reinit_2():
"""Tests that BaseEstimator._new() doesn't copy everything.
We first create an estimator, give it an own attribute, and
make a copy of its original state. Then we check that the copy doesn't have
the specific attribute we manually added to the initial estimator.
"""
from scikits.learn.feature_selection import SelectFpr, f_classif
selector = SelectFpr(f_classif, alpha=0.1)
selector.own_attribute = "test"
new_selector = selector._reinit()
assert_false(hasattr(new_selector, "own_attribute"))
def test_repr():
""" Smoke test the repr of the
"""
my_estimator = MyEstimator()
repr(my_estimator)
|
<commit_before>from nose.tools import assert_true, assert_false, assert_equal
from ..base import BaseEstimator
class MyEstimator(BaseEstimator):
def __init__(self, l1=0):
self.l1 = l1
def test_renew():
"""Tests that BaseEstimator._new() creates a correct deep copy.
We create an estimator, make a copy of its original state
(which, in this case, is the current state of the setimator),
and check that the obtained copy is a correct deep copy.
"""
from scikits.learn.feature_selection import SelectFpr, f_classif
selector = SelectFpr(f_classif, alpha=0.1)
new_selector = selector._reinit()
assert_true(selector is not new_selector)
assert_equal(selector._get_params(), new_selector._get_params())
def test_renew_2():
"""Tests that BaseEstimator._new() doesn't copy everything.
We first create an estimator, give it an own attribute, and
make a copy of its original state. Then we check that the copy doesn't have
the specific attribute we manually added to the initial estimator.
"""
from scikits.learn.feature_selection import SelectFpr, f_classif
selector = SelectFpr(f_classif, alpha=0.1)
selector.own_attribute = "test"
new_selector = selector._reinit()
assert_false(hasattr(new_selector, "own_attribute"))
def test_repr():
""" Smoke test the repr of the
"""
my_estimator = MyEstimator()
repr(my_estimator)
<commit_msg>Change test name for the _reinit() method.<commit_after>
|
from nose.tools import assert_true, assert_false, assert_equal
from ..base import BaseEstimator
class MyEstimator(BaseEstimator):
def __init__(self, l1=0):
self.l1 = l1
def test_reinit():
"""Tests that BaseEstimator._new() creates a correct deep copy.
We create an estimator, make a copy of its original state
(which, in this case, is the current state of the setimator),
and check that the obtained copy is a correct deep copy.
"""
from scikits.learn.feature_selection import SelectFpr, f_classif
selector = SelectFpr(f_classif, alpha=0.1)
new_selector = selector._reinit()
assert_true(selector is not new_selector)
assert_equal(selector._get_params(), new_selector._get_params())
def test_reinit_2():
"""Tests that BaseEstimator._new() doesn't copy everything.
We first create an estimator, give it an own attribute, and
make a copy of its original state. Then we check that the copy doesn't have
the specific attribute we manually added to the initial estimator.
"""
from scikits.learn.feature_selection import SelectFpr, f_classif
selector = SelectFpr(f_classif, alpha=0.1)
selector.own_attribute = "test"
new_selector = selector._reinit()
assert_false(hasattr(new_selector, "own_attribute"))
def test_repr():
""" Smoke test the repr of the
"""
my_estimator = MyEstimator()
repr(my_estimator)
|
from nose.tools import assert_true, assert_false, assert_equal
from ..base import BaseEstimator
class MyEstimator(BaseEstimator):
def __init__(self, l1=0):
self.l1 = l1
def test_renew():
"""Tests that BaseEstimator._new() creates a correct deep copy.
We create an estimator, make a copy of its original state
(which, in this case, is the current state of the setimator),
and check that the obtained copy is a correct deep copy.
"""
from scikits.learn.feature_selection import SelectFpr, f_classif
selector = SelectFpr(f_classif, alpha=0.1)
new_selector = selector._reinit()
assert_true(selector is not new_selector)
assert_equal(selector._get_params(), new_selector._get_params())
def test_renew_2():
"""Tests that BaseEstimator._new() doesn't copy everything.
We first create an estimator, give it an own attribute, and
make a copy of its original state. Then we check that the copy doesn't have
the specific attribute we manually added to the initial estimator.
"""
from scikits.learn.feature_selection import SelectFpr, f_classif
selector = SelectFpr(f_classif, alpha=0.1)
selector.own_attribute = "test"
new_selector = selector._reinit()
assert_false(hasattr(new_selector, "own_attribute"))
def test_repr():
""" Smoke test the repr of the
"""
my_estimator = MyEstimator()
repr(my_estimator)
Change test name for the _reinit() method.from nose.tools import assert_true, assert_false, assert_equal
from ..base import BaseEstimator
class MyEstimator(BaseEstimator):
def __init__(self, l1=0):
self.l1 = l1
def test_reinit():
"""Tests that BaseEstimator._new() creates a correct deep copy.
We create an estimator, make a copy of its original state
(which, in this case, is the current state of the setimator),
and check that the obtained copy is a correct deep copy.
"""
from scikits.learn.feature_selection import SelectFpr, f_classif
selector = SelectFpr(f_classif, alpha=0.1)
new_selector = selector._reinit()
assert_true(selector is not new_selector)
assert_equal(selector._get_params(), new_selector._get_params())
def test_reinit_2():
"""Tests that BaseEstimator._new() doesn't copy everything.
We first create an estimator, give it an own attribute, and
make a copy of its original state. Then we check that the copy doesn't have
the specific attribute we manually added to the initial estimator.
"""
from scikits.learn.feature_selection import SelectFpr, f_classif
selector = SelectFpr(f_classif, alpha=0.1)
selector.own_attribute = "test"
new_selector = selector._reinit()
assert_false(hasattr(new_selector, "own_attribute"))
def test_repr():
""" Smoke test the repr of the
"""
my_estimator = MyEstimator()
repr(my_estimator)
|
<commit_before>from nose.tools import assert_true, assert_false, assert_equal
from ..base import BaseEstimator
class MyEstimator(BaseEstimator):
def __init__(self, l1=0):
self.l1 = l1
def test_renew():
"""Tests that BaseEstimator._new() creates a correct deep copy.
We create an estimator, make a copy of its original state
(which, in this case, is the current state of the setimator),
and check that the obtained copy is a correct deep copy.
"""
from scikits.learn.feature_selection import SelectFpr, f_classif
selector = SelectFpr(f_classif, alpha=0.1)
new_selector = selector._reinit()
assert_true(selector is not new_selector)
assert_equal(selector._get_params(), new_selector._get_params())
def test_renew_2():
"""Tests that BaseEstimator._new() doesn't copy everything.
We first create an estimator, give it an own attribute, and
make a copy of its original state. Then we check that the copy doesn't have
the specific attribute we manually added to the initial estimator.
"""
from scikits.learn.feature_selection import SelectFpr, f_classif
selector = SelectFpr(f_classif, alpha=0.1)
selector.own_attribute = "test"
new_selector = selector._reinit()
assert_false(hasattr(new_selector, "own_attribute"))
def test_repr():
""" Smoke test the repr of the
"""
my_estimator = MyEstimator()
repr(my_estimator)
<commit_msg>Change test name for the _reinit() method.<commit_after>from nose.tools import assert_true, assert_false, assert_equal
from ..base import BaseEstimator
class MyEstimator(BaseEstimator):
def __init__(self, l1=0):
self.l1 = l1
def test_reinit():
"""Tests that BaseEstimator._new() creates a correct deep copy.
We create an estimator, make a copy of its original state
(which, in this case, is the current state of the setimator),
and check that the obtained copy is a correct deep copy.
"""
from scikits.learn.feature_selection import SelectFpr, f_classif
selector = SelectFpr(f_classif, alpha=0.1)
new_selector = selector._reinit()
assert_true(selector is not new_selector)
assert_equal(selector._get_params(), new_selector._get_params())
def test_reinit_2():
"""Tests that BaseEstimator._new() doesn't copy everything.
We first create an estimator, give it an own attribute, and
make a copy of its original state. Then we check that the copy doesn't have
the specific attribute we manually added to the initial estimator.
"""
from scikits.learn.feature_selection import SelectFpr, f_classif
selector = SelectFpr(f_classif, alpha=0.1)
selector.own_attribute = "test"
new_selector = selector._reinit()
assert_false(hasattr(new_selector, "own_attribute"))
def test_repr():
""" Smoke test the repr of the
"""
my_estimator = MyEstimator()
repr(my_estimator)
|
bcec4724dc434218f7b2bce0aaabf391f86847b6
|
ocradmin/core/decorators.py
|
ocradmin/core/decorators.py
|
# Miscellaneos functions relating the projects app
import os
from datetime import datetime
from django.http import HttpResponseRedirect
from django.utils.http import urlquote
from django.conf import settings
def project_required(func):
"""
Decorator function for other actions that
require a project to be open in the session.
"""
def wrapper(request, *args, **kwargs):
path = urlquote(request.get_full_path())
if not request.session.get("project"):
return HttpResponseRedirect("/projects/list/?next=%s" % path)
return func(request, *args, **kwargs)
return wrapper
def saves_files(func):
"""
Decorator function for other actions that
require a project to be open in the session.
"""
def wrapper(request, *args, **kwargs):
temp = request.path.startswith("/ocr/")
project = request.session.get("project")
output_path = None
if project is None:
temp = True
if temp:
output_path = os.path.join(
settings.MEDIA_ROOT,
settings.TEMP_PATH,
request.user.username,
datetime.now().strftime("%Y%m%d%H%M%S")
)
else:
output_path = os.path.join(
settings.MEDIA_ROOT,
settings.USER_FILES_PATH,
project.slug
)
request.__class__.output_path = output_path
return func(request, *args, **kwargs)
return wrapper
|
# Miscellaneos functions relating the projects app
import os
from datetime import datetime
from django.http import HttpResponseRedirect
from django.utils.http import urlquote
from django.conf import settings
def project_required(func):
"""
Decorator function for other actions that
require a project to be open in the session.
"""
def wrapper(request, *args, **kwargs):
path = urlquote(request.get_full_path())
if not request.session.get("project"):
return HttpResponseRedirect("/projects/list/?next=%s" % path)
return func(request, *args, **kwargs)
return wrapper
def saves_files(func):
"""
Decorator function for other actions that
require a project to be open in the session.
"""
def wrapper(request, *args, **kwargs):
temp = request.path.startswith(("/ocr/", "/plugins/"))
project = request.session.get("project")
output_path = None
if project is None:
temp = True
if temp:
output_path = os.path.join(
settings.MEDIA_ROOT,
settings.TEMP_PATH,
request.user.username,
datetime.now().strftime("%Y%m%d%H%M%S")
)
else:
output_path = os.path.join(
settings.MEDIA_ROOT,
settings.USER_FILES_PATH,
project.slug
)
request.__class__.output_path = output_path
return func(request, *args, **kwargs)
return wrapper
|
Add plugins to the domains which handle temp files
|
Add plugins to the domains which handle temp files
|
Python
|
apache-2.0
|
vitorio/ocropodium,vitorio/ocropodium,vitorio/ocropodium,vitorio/ocropodium
|
# Miscellaneos functions relating the projects app
import os
from datetime import datetime
from django.http import HttpResponseRedirect
from django.utils.http import urlquote
from django.conf import settings
def project_required(func):
"""
Decorator function for other actions that
require a project to be open in the session.
"""
def wrapper(request, *args, **kwargs):
path = urlquote(request.get_full_path())
if not request.session.get("project"):
return HttpResponseRedirect("/projects/list/?next=%s" % path)
return func(request, *args, **kwargs)
return wrapper
def saves_files(func):
"""
Decorator function for other actions that
require a project to be open in the session.
"""
def wrapper(request, *args, **kwargs):
temp = request.path.startswith("/ocr/")
project = request.session.get("project")
output_path = None
if project is None:
temp = True
if temp:
output_path = os.path.join(
settings.MEDIA_ROOT,
settings.TEMP_PATH,
request.user.username,
datetime.now().strftime("%Y%m%d%H%M%S")
)
else:
output_path = os.path.join(
settings.MEDIA_ROOT,
settings.USER_FILES_PATH,
project.slug
)
request.__class__.output_path = output_path
return func(request, *args, **kwargs)
return wrapper
Add plugins to the domains which handle temp files
|
# Miscellaneos functions relating the projects app
import os
from datetime import datetime
from django.http import HttpResponseRedirect
from django.utils.http import urlquote
from django.conf import settings
def project_required(func):
"""
Decorator function for other actions that
require a project to be open in the session.
"""
def wrapper(request, *args, **kwargs):
path = urlquote(request.get_full_path())
if not request.session.get("project"):
return HttpResponseRedirect("/projects/list/?next=%s" % path)
return func(request, *args, **kwargs)
return wrapper
def saves_files(func):
"""
Decorator function for other actions that
require a project to be open in the session.
"""
def wrapper(request, *args, **kwargs):
temp = request.path.startswith(("/ocr/", "/plugins/"))
project = request.session.get("project")
output_path = None
if project is None:
temp = True
if temp:
output_path = os.path.join(
settings.MEDIA_ROOT,
settings.TEMP_PATH,
request.user.username,
datetime.now().strftime("%Y%m%d%H%M%S")
)
else:
output_path = os.path.join(
settings.MEDIA_ROOT,
settings.USER_FILES_PATH,
project.slug
)
request.__class__.output_path = output_path
return func(request, *args, **kwargs)
return wrapper
|
<commit_before># Miscellaneos functions relating the projects app
import os
from datetime import datetime
from django.http import HttpResponseRedirect
from django.utils.http import urlquote
from django.conf import settings
def project_required(func):
"""
Decorator function for other actions that
require a project to be open in the session.
"""
def wrapper(request, *args, **kwargs):
path = urlquote(request.get_full_path())
if not request.session.get("project"):
return HttpResponseRedirect("/projects/list/?next=%s" % path)
return func(request, *args, **kwargs)
return wrapper
def saves_files(func):
"""
Decorator function for other actions that
require a project to be open in the session.
"""
def wrapper(request, *args, **kwargs):
temp = request.path.startswith("/ocr/")
project = request.session.get("project")
output_path = None
if project is None:
temp = True
if temp:
output_path = os.path.join(
settings.MEDIA_ROOT,
settings.TEMP_PATH,
request.user.username,
datetime.now().strftime("%Y%m%d%H%M%S")
)
else:
output_path = os.path.join(
settings.MEDIA_ROOT,
settings.USER_FILES_PATH,
project.slug
)
request.__class__.output_path = output_path
return func(request, *args, **kwargs)
return wrapper
<commit_msg>Add plugins to the domains which handle temp files<commit_after>
|
# Miscellaneos functions relating the projects app
import os
from datetime import datetime
from django.http import HttpResponseRedirect
from django.utils.http import urlquote
from django.conf import settings
def project_required(func):
"""
Decorator function for other actions that
require a project to be open in the session.
"""
def wrapper(request, *args, **kwargs):
path = urlquote(request.get_full_path())
if not request.session.get("project"):
return HttpResponseRedirect("/projects/list/?next=%s" % path)
return func(request, *args, **kwargs)
return wrapper
def saves_files(func):
"""
Decorator function for other actions that
require a project to be open in the session.
"""
def wrapper(request, *args, **kwargs):
temp = request.path.startswith(("/ocr/", "/plugins/"))
project = request.session.get("project")
output_path = None
if project is None:
temp = True
if temp:
output_path = os.path.join(
settings.MEDIA_ROOT,
settings.TEMP_PATH,
request.user.username,
datetime.now().strftime("%Y%m%d%H%M%S")
)
else:
output_path = os.path.join(
settings.MEDIA_ROOT,
settings.USER_FILES_PATH,
project.slug
)
request.__class__.output_path = output_path
return func(request, *args, **kwargs)
return wrapper
|
# Miscellaneos functions relating the projects app
import os
from datetime import datetime
from django.http import HttpResponseRedirect
from django.utils.http import urlquote
from django.conf import settings
def project_required(func):
"""
Decorator function for other actions that
require a project to be open in the session.
"""
def wrapper(request, *args, **kwargs):
path = urlquote(request.get_full_path())
if not request.session.get("project"):
return HttpResponseRedirect("/projects/list/?next=%s" % path)
return func(request, *args, **kwargs)
return wrapper
def saves_files(func):
"""
Decorator function for other actions that
require a project to be open in the session.
"""
def wrapper(request, *args, **kwargs):
temp = request.path.startswith("/ocr/")
project = request.session.get("project")
output_path = None
if project is None:
temp = True
if temp:
output_path = os.path.join(
settings.MEDIA_ROOT,
settings.TEMP_PATH,
request.user.username,
datetime.now().strftime("%Y%m%d%H%M%S")
)
else:
output_path = os.path.join(
settings.MEDIA_ROOT,
settings.USER_FILES_PATH,
project.slug
)
request.__class__.output_path = output_path
return func(request, *args, **kwargs)
return wrapper
Add plugins to the domains which handle temp files# Miscellaneos functions relating the projects app
import os
from datetime import datetime
from django.http import HttpResponseRedirect
from django.utils.http import urlquote
from django.conf import settings
def project_required(func):
"""
Decorator function for other actions that
require a project to be open in the session.
"""
def wrapper(request, *args, **kwargs):
path = urlquote(request.get_full_path())
if not request.session.get("project"):
return HttpResponseRedirect("/projects/list/?next=%s" % path)
return func(request, *args, **kwargs)
return wrapper
def saves_files(func):
"""
Decorator function for other actions that
require a project to be open in the session.
"""
def wrapper(request, *args, **kwargs):
temp = request.path.startswith(("/ocr/", "/plugins/"))
project = request.session.get("project")
output_path = None
if project is None:
temp = True
if temp:
output_path = os.path.join(
settings.MEDIA_ROOT,
settings.TEMP_PATH,
request.user.username,
datetime.now().strftime("%Y%m%d%H%M%S")
)
else:
output_path = os.path.join(
settings.MEDIA_ROOT,
settings.USER_FILES_PATH,
project.slug
)
request.__class__.output_path = output_path
return func(request, *args, **kwargs)
return wrapper
|
<commit_before># Miscellaneos functions relating the projects app
import os
from datetime import datetime
from django.http import HttpResponseRedirect
from django.utils.http import urlquote
from django.conf import settings
def project_required(func):
"""
Decorator function for other actions that
require a project to be open in the session.
"""
def wrapper(request, *args, **kwargs):
path = urlquote(request.get_full_path())
if not request.session.get("project"):
return HttpResponseRedirect("/projects/list/?next=%s" % path)
return func(request, *args, **kwargs)
return wrapper
def saves_files(func):
"""
Decorator function for other actions that
require a project to be open in the session.
"""
def wrapper(request, *args, **kwargs):
temp = request.path.startswith("/ocr/")
project = request.session.get("project")
output_path = None
if project is None:
temp = True
if temp:
output_path = os.path.join(
settings.MEDIA_ROOT,
settings.TEMP_PATH,
request.user.username,
datetime.now().strftime("%Y%m%d%H%M%S")
)
else:
output_path = os.path.join(
settings.MEDIA_ROOT,
settings.USER_FILES_PATH,
project.slug
)
request.__class__.output_path = output_path
return func(request, *args, **kwargs)
return wrapper
<commit_msg>Add plugins to the domains which handle temp files<commit_after># Miscellaneos functions relating the projects app
import os
from datetime import datetime
from django.http import HttpResponseRedirect
from django.utils.http import urlquote
from django.conf import settings
def project_required(func):
"""
Decorator function for other actions that
require a project to be open in the session.
"""
def wrapper(request, *args, **kwargs):
path = urlquote(request.get_full_path())
if not request.session.get("project"):
return HttpResponseRedirect("/projects/list/?next=%s" % path)
return func(request, *args, **kwargs)
return wrapper
def saves_files(func):
"""
Decorator function for other actions that
require a project to be open in the session.
"""
def wrapper(request, *args, **kwargs):
temp = request.path.startswith(("/ocr/", "/plugins/"))
project = request.session.get("project")
output_path = None
if project is None:
temp = True
if temp:
output_path = os.path.join(
settings.MEDIA_ROOT,
settings.TEMP_PATH,
request.user.username,
datetime.now().strftime("%Y%m%d%H%M%S")
)
else:
output_path = os.path.join(
settings.MEDIA_ROOT,
settings.USER_FILES_PATH,
project.slug
)
request.__class__.output_path = output_path
return func(request, *args, **kwargs)
return wrapper
|
dde17a556103120ffbdf3dc08b822da2a781ff7e
|
myproject/myproject/project_settings.py
|
myproject/myproject/project_settings.py
|
# Project Settings - Settings that don't exist in settings.py that you want to
# add (e.g. USE_THOUSAND_SEPARATOR, GRAPPELLI_ADMIN_TITLE, CELERYBEAT_SCHEDULER,
# CELERYD_PREFETCH_MULTIPLIER, etc.)
#USE_THOUSAND_SEPARATOR = True
#GRAPPELLI_ADMIN_TITLE = ''
#import djcelery
#djcelery.setup_loader()
#CELERYBEAT_SCHEDULER = 'djcelery.schedulers.DatabaseScheduler'
#CELERYD_PREFETCH_MULTIPLIER = 1
import os
import sys
if 'PRODUCTION' in os.environ and os.environ['PRODUCTION'].lower() in [True, 'y', 'yes', '1',]:
from production_settings import *
elif 'runserver' in sys.argv:
from local_settings import *
else:
from production_settings import *
|
# Project Settings - Settings that don't exist in settings.py that you want to
# add (e.g. USE_THOUSAND_SEPARATOR, GRAPPELLI_ADMIN_TITLE, CELERYBEAT_SCHEDULER,
# CELERYD_PREFETCH_MULTIPLIER, etc.)
#USE_THOUSAND_SEPARATOR = True
#GRAPPELLI_ADMIN_TITLE = ''
#import djcelery
#djcelery.setup_loader()
#CELERYBEAT_SCHEDULER = 'djcelery.schedulers.DatabaseScheduler'
#CELERYD_PREFETCH_MULTIPLIER = 1
import os
import sys
version = 'PRODUCTION'
color = '[1;92m' # Bold High Intensity Green + Underline
if 'PRODUCTION' in os.environ and os.environ['PRODUCTION'].lower() in [True, 'y', 'yes', '1',]:
from local_settings import *
elif 'runserver' in sys.argv:
version = 'DEVELOPMENT'
color = '[1;93m' # Bold High Intensity Yellow + Underline
from local_settings import *
else:
from local_settings import *
print '\n{star} \x1b{color}{version}\x1b[0m {star}\n'.format(color=color,
star='\xE2\x98\x85',
version=version)
|
Use production settings by default; Display settings version in use
|
Use production settings by default; Display settings version in use
|
Python
|
unlicense
|
django-settings/django-settings
|
# Project Settings - Settings that don't exist in settings.py that you want to
# add (e.g. USE_THOUSAND_SEPARATOR, GRAPPELLI_ADMIN_TITLE, CELERYBEAT_SCHEDULER,
# CELERYD_PREFETCH_MULTIPLIER, etc.)
#USE_THOUSAND_SEPARATOR = True
#GRAPPELLI_ADMIN_TITLE = ''
#import djcelery
#djcelery.setup_loader()
#CELERYBEAT_SCHEDULER = 'djcelery.schedulers.DatabaseScheduler'
#CELERYD_PREFETCH_MULTIPLIER = 1
import os
import sys
if 'PRODUCTION' in os.environ and os.environ['PRODUCTION'].lower() in [True, 'y', 'yes', '1',]:
from production_settings import *
elif 'runserver' in sys.argv:
from local_settings import *
else:
from production_settings import *
Use production settings by default; Display settings version in use
|
# Project Settings - Settings that don't exist in settings.py that you want to
# add (e.g. USE_THOUSAND_SEPARATOR, GRAPPELLI_ADMIN_TITLE, CELERYBEAT_SCHEDULER,
# CELERYD_PREFETCH_MULTIPLIER, etc.)
#USE_THOUSAND_SEPARATOR = True
#GRAPPELLI_ADMIN_TITLE = ''
#import djcelery
#djcelery.setup_loader()
#CELERYBEAT_SCHEDULER = 'djcelery.schedulers.DatabaseScheduler'
#CELERYD_PREFETCH_MULTIPLIER = 1
import os
import sys
version = 'PRODUCTION'
color = '[1;92m' # Bold High Intensity Green + Underline
if 'PRODUCTION' in os.environ and os.environ['PRODUCTION'].lower() in [True, 'y', 'yes', '1',]:
from local_settings import *
elif 'runserver' in sys.argv:
version = 'DEVELOPMENT'
color = '[1;93m' # Bold High Intensity Yellow + Underline
from local_settings import *
else:
from local_settings import *
print '\n{star} \x1b{color}{version}\x1b[0m {star}\n'.format(color=color,
star='\xE2\x98\x85',
version=version)
|
<commit_before># Project Settings - Settings that don't exist in settings.py that you want to
# add (e.g. USE_THOUSAND_SEPARATOR, GRAPPELLI_ADMIN_TITLE, CELERYBEAT_SCHEDULER,
# CELERYD_PREFETCH_MULTIPLIER, etc.)
#USE_THOUSAND_SEPARATOR = True
#GRAPPELLI_ADMIN_TITLE = ''
#import djcelery
#djcelery.setup_loader()
#CELERYBEAT_SCHEDULER = 'djcelery.schedulers.DatabaseScheduler'
#CELERYD_PREFETCH_MULTIPLIER = 1
import os
import sys
if 'PRODUCTION' in os.environ and os.environ['PRODUCTION'].lower() in [True, 'y', 'yes', '1',]:
from production_settings import *
elif 'runserver' in sys.argv:
from local_settings import *
else:
from production_settings import *
<commit_msg>Use production settings by default; Display settings version in use<commit_after>
|
# Project Settings - Settings that don't exist in settings.py that you want to
# add (e.g. USE_THOUSAND_SEPARATOR, GRAPPELLI_ADMIN_TITLE, CELERYBEAT_SCHEDULER,
# CELERYD_PREFETCH_MULTIPLIER, etc.)
#USE_THOUSAND_SEPARATOR = True
#GRAPPELLI_ADMIN_TITLE = ''
#import djcelery
#djcelery.setup_loader()
#CELERYBEAT_SCHEDULER = 'djcelery.schedulers.DatabaseScheduler'
#CELERYD_PREFETCH_MULTIPLIER = 1
import os
import sys
version = 'PRODUCTION'
color = '[1;92m' # Bold High Intensity Green + Underline
if 'PRODUCTION' in os.environ and os.environ['PRODUCTION'].lower() in [True, 'y', 'yes', '1',]:
from local_settings import *
elif 'runserver' in sys.argv:
version = 'DEVELOPMENT'
color = '[1;93m' # Bold High Intensity Yellow + Underline
from local_settings import *
else:
from local_settings import *
print '\n{star} \x1b{color}{version}\x1b[0m {star}\n'.format(color=color,
star='\xE2\x98\x85',
version=version)
|
# Project Settings - Settings that don't exist in settings.py that you want to
# add (e.g. USE_THOUSAND_SEPARATOR, GRAPPELLI_ADMIN_TITLE, CELERYBEAT_SCHEDULER,
# CELERYD_PREFETCH_MULTIPLIER, etc.)
#USE_THOUSAND_SEPARATOR = True
#GRAPPELLI_ADMIN_TITLE = ''
#import djcelery
#djcelery.setup_loader()
#CELERYBEAT_SCHEDULER = 'djcelery.schedulers.DatabaseScheduler'
#CELERYD_PREFETCH_MULTIPLIER = 1
import os
import sys
if 'PRODUCTION' in os.environ and os.environ['PRODUCTION'].lower() in [True, 'y', 'yes', '1',]:
from production_settings import *
elif 'runserver' in sys.argv:
from local_settings import *
else:
from production_settings import *
Use production settings by default; Display settings version in use# Project Settings - Settings that don't exist in settings.py that you want to
# add (e.g. USE_THOUSAND_SEPARATOR, GRAPPELLI_ADMIN_TITLE, CELERYBEAT_SCHEDULER,
# CELERYD_PREFETCH_MULTIPLIER, etc.)
#USE_THOUSAND_SEPARATOR = True
#GRAPPELLI_ADMIN_TITLE = ''
#import djcelery
#djcelery.setup_loader()
#CELERYBEAT_SCHEDULER = 'djcelery.schedulers.DatabaseScheduler'
#CELERYD_PREFETCH_MULTIPLIER = 1
import os
import sys
version = 'PRODUCTION'
color = '[1;92m' # Bold High Intensity Green + Underline
if 'PRODUCTION' in os.environ and os.environ['PRODUCTION'].lower() in [True, 'y', 'yes', '1',]:
from local_settings import *
elif 'runserver' in sys.argv:
version = 'DEVELOPMENT'
color = '[1;93m' # Bold High Intensity Yellow + Underline
from local_settings import *
else:
from local_settings import *
print '\n{star} \x1b{color}{version}\x1b[0m {star}\n'.format(color=color,
star='\xE2\x98\x85',
version=version)
|
<commit_before># Project Settings - Settings that don't exist in settings.py that you want to
# add (e.g. USE_THOUSAND_SEPARATOR, GRAPPELLI_ADMIN_TITLE, CELERYBEAT_SCHEDULER,
# CELERYD_PREFETCH_MULTIPLIER, etc.)
#USE_THOUSAND_SEPARATOR = True
#GRAPPELLI_ADMIN_TITLE = ''
#import djcelery
#djcelery.setup_loader()
#CELERYBEAT_SCHEDULER = 'djcelery.schedulers.DatabaseScheduler'
#CELERYD_PREFETCH_MULTIPLIER = 1
import os
import sys
if 'PRODUCTION' in os.environ and os.environ['PRODUCTION'].lower() in [True, 'y', 'yes', '1',]:
from production_settings import *
elif 'runserver' in sys.argv:
from local_settings import *
else:
from production_settings import *
<commit_msg>Use production settings by default; Display settings version in use<commit_after># Project Settings - Settings that don't exist in settings.py that you want to
# add (e.g. USE_THOUSAND_SEPARATOR, GRAPPELLI_ADMIN_TITLE, CELERYBEAT_SCHEDULER,
# CELERYD_PREFETCH_MULTIPLIER, etc.)
#USE_THOUSAND_SEPARATOR = True
#GRAPPELLI_ADMIN_TITLE = ''
#import djcelery
#djcelery.setup_loader()
#CELERYBEAT_SCHEDULER = 'djcelery.schedulers.DatabaseScheduler'
#CELERYD_PREFETCH_MULTIPLIER = 1
import os
import sys
version = 'PRODUCTION'
color = '[1;92m' # Bold High Intensity Green + Underline
if 'PRODUCTION' in os.environ and os.environ['PRODUCTION'].lower() in [True, 'y', 'yes', '1',]:
from local_settings import *
elif 'runserver' in sys.argv:
version = 'DEVELOPMENT'
color = '[1;93m' # Bold High Intensity Yellow + Underline
from local_settings import *
else:
from local_settings import *
print '\n{star} \x1b{color}{version}\x1b[0m {star}\n'.format(color=color,
star='\xE2\x98\x85',
version=version)
|
c321a8fea477608172ac9f0421b8b3318ff6d388
|
carbonitex/carbonitex.py
|
carbonitex/carbonitex.py
|
import asyncio
from urllib.request import urlopen
from jshbot import configurations
from jshbot.exceptions import BotException
from jshbot.utilities import future
__version__ = '0.1.0'
EXCEPTION = 'Carbonitex Data Pusher'
uses_configuration = True
def get_commands():
return []
async def bot_on_ready_boot(bot):
"""Periodically sends a POST request to Carbonitex."""
carbonitex_key = configurations.get(bot, __name__, key='key')
use_loop = configurations.get(bot, __name__, key='enabled')
while use_loop:
print("In Carbonitex loop")
await asyncio.sleep(60*60*2) # 2 hour delay
try:
await future(
urlopen, 'https://www.carbonitex.net/discord/data/botdata.php',
data={'key': carbonitex_key, 'servercount': len(bot.servers)})
except Exception as e:
raise BotException(
EXCEPTION, "Failed to update Carbonitex data:", e)
|
import asyncio
from urllib.request import urlopen
from jshbot import configurations
from jshbot.exceptions import BotException
from jshbot.utilities import future
__version__ = '0.1.1'
EXCEPTION = 'Carbonitex Data Pusher'
uses_configuration = True
def get_commands():
return []
async def bot_on_ready_boot(bot):
"""Periodically sends a POST request to Carbonitex."""
carbonitex_key = configurations.get(bot, __name__, key='key')
use_loop = configurations.get(bot, __name__, key='enabled')
while use_loop:
print("In Carbonitex loop")
await asyncio.sleep(60*60*2) # 2 hour delay
servercount = sum(len(it.servers) for it in bot.all_instances)
try:
await future(
urlopen, 'https://www.carbonitex.net/discord/data/botdata.php',
data={'key': carbonitex_key, 'servercount': servercount})
except Exception as e:
raise BotException(
EXCEPTION, "Failed to update Carbonitex data:", e)
|
Adjust for multiple bot instances
|
Adjust for multiple bot instances
|
Python
|
mit
|
jkchen2/JshBot-plugins
|
import asyncio
from urllib.request import urlopen
from jshbot import configurations
from jshbot.exceptions import BotException
from jshbot.utilities import future
__version__ = '0.1.0'
EXCEPTION = 'Carbonitex Data Pusher'
uses_configuration = True
def get_commands():
return []
async def bot_on_ready_boot(bot):
"""Periodically sends a POST request to Carbonitex."""
carbonitex_key = configurations.get(bot, __name__, key='key')
use_loop = configurations.get(bot, __name__, key='enabled')
while use_loop:
print("In Carbonitex loop")
await asyncio.sleep(60*60*2) # 2 hour delay
try:
await future(
urlopen, 'https://www.carbonitex.net/discord/data/botdata.php',
data={'key': carbonitex_key, 'servercount': len(bot.servers)})
except Exception as e:
raise BotException(
EXCEPTION, "Failed to update Carbonitex data:", e)
Adjust for multiple bot instances
|
import asyncio
from urllib.request import urlopen
from jshbot import configurations
from jshbot.exceptions import BotException
from jshbot.utilities import future
__version__ = '0.1.1'
EXCEPTION = 'Carbonitex Data Pusher'
uses_configuration = True
def get_commands():
return []
async def bot_on_ready_boot(bot):
"""Periodically sends a POST request to Carbonitex."""
carbonitex_key = configurations.get(bot, __name__, key='key')
use_loop = configurations.get(bot, __name__, key='enabled')
while use_loop:
print("In Carbonitex loop")
await asyncio.sleep(60*60*2) # 2 hour delay
servercount = sum(len(it.servers) for it in bot.all_instances)
try:
await future(
urlopen, 'https://www.carbonitex.net/discord/data/botdata.php',
data={'key': carbonitex_key, 'servercount': servercount})
except Exception as e:
raise BotException(
EXCEPTION, "Failed to update Carbonitex data:", e)
|
<commit_before>import asyncio
from urllib.request import urlopen
from jshbot import configurations
from jshbot.exceptions import BotException
from jshbot.utilities import future
__version__ = '0.1.0'
EXCEPTION = 'Carbonitex Data Pusher'
uses_configuration = True
def get_commands():
return []
async def bot_on_ready_boot(bot):
"""Periodically sends a POST request to Carbonitex."""
carbonitex_key = configurations.get(bot, __name__, key='key')
use_loop = configurations.get(bot, __name__, key='enabled')
while use_loop:
print("In Carbonitex loop")
await asyncio.sleep(60*60*2) # 2 hour delay
try:
await future(
urlopen, 'https://www.carbonitex.net/discord/data/botdata.php',
data={'key': carbonitex_key, 'servercount': len(bot.servers)})
except Exception as e:
raise BotException(
EXCEPTION, "Failed to update Carbonitex data:", e)
<commit_msg>Adjust for multiple bot instances<commit_after>
|
import asyncio
from urllib.request import urlopen
from jshbot import configurations
from jshbot.exceptions import BotException
from jshbot.utilities import future
__version__ = '0.1.1'
EXCEPTION = 'Carbonitex Data Pusher'
uses_configuration = True
def get_commands():
return []
async def bot_on_ready_boot(bot):
"""Periodically sends a POST request to Carbonitex."""
carbonitex_key = configurations.get(bot, __name__, key='key')
use_loop = configurations.get(bot, __name__, key='enabled')
while use_loop:
print("In Carbonitex loop")
await asyncio.sleep(60*60*2) # 2 hour delay
servercount = sum(len(it.servers) for it in bot.all_instances)
try:
await future(
urlopen, 'https://www.carbonitex.net/discord/data/botdata.php',
data={'key': carbonitex_key, 'servercount': servercount})
except Exception as e:
raise BotException(
EXCEPTION, "Failed to update Carbonitex data:", e)
|
import asyncio
from urllib.request import urlopen
from jshbot import configurations
from jshbot.exceptions import BotException
from jshbot.utilities import future
__version__ = '0.1.0'
EXCEPTION = 'Carbonitex Data Pusher'
uses_configuration = True
def get_commands():
return []
async def bot_on_ready_boot(bot):
"""Periodically sends a POST request to Carbonitex."""
carbonitex_key = configurations.get(bot, __name__, key='key')
use_loop = configurations.get(bot, __name__, key='enabled')
while use_loop:
print("In Carbonitex loop")
await asyncio.sleep(60*60*2) # 2 hour delay
try:
await future(
urlopen, 'https://www.carbonitex.net/discord/data/botdata.php',
data={'key': carbonitex_key, 'servercount': len(bot.servers)})
except Exception as e:
raise BotException(
EXCEPTION, "Failed to update Carbonitex data:", e)
Adjust for multiple bot instancesimport asyncio
from urllib.request import urlopen
from jshbot import configurations
from jshbot.exceptions import BotException
from jshbot.utilities import future
__version__ = '0.1.1'
EXCEPTION = 'Carbonitex Data Pusher'
uses_configuration = True
def get_commands():
return []
async def bot_on_ready_boot(bot):
"""Periodically sends a POST request to Carbonitex."""
carbonitex_key = configurations.get(bot, __name__, key='key')
use_loop = configurations.get(bot, __name__, key='enabled')
while use_loop:
print("In Carbonitex loop")
await asyncio.sleep(60*60*2) # 2 hour delay
servercount = sum(len(it.servers) for it in bot.all_instances)
try:
await future(
urlopen, 'https://www.carbonitex.net/discord/data/botdata.php',
data={'key': carbonitex_key, 'servercount': servercount})
except Exception as e:
raise BotException(
EXCEPTION, "Failed to update Carbonitex data:", e)
|
<commit_before>import asyncio
from urllib.request import urlopen
from jshbot import configurations
from jshbot.exceptions import BotException
from jshbot.utilities import future
__version__ = '0.1.0'
EXCEPTION = 'Carbonitex Data Pusher'
uses_configuration = True
def get_commands():
return []
async def bot_on_ready_boot(bot):
"""Periodically sends a POST request to Carbonitex."""
carbonitex_key = configurations.get(bot, __name__, key='key')
use_loop = configurations.get(bot, __name__, key='enabled')
while use_loop:
print("In Carbonitex loop")
await asyncio.sleep(60*60*2) # 2 hour delay
try:
await future(
urlopen, 'https://www.carbonitex.net/discord/data/botdata.php',
data={'key': carbonitex_key, 'servercount': len(bot.servers)})
except Exception as e:
raise BotException(
EXCEPTION, "Failed to update Carbonitex data:", e)
<commit_msg>Adjust for multiple bot instances<commit_after>import asyncio
from urllib.request import urlopen
from jshbot import configurations
from jshbot.exceptions import BotException
from jshbot.utilities import future
__version__ = '0.1.1'
EXCEPTION = 'Carbonitex Data Pusher'
uses_configuration = True
def get_commands():
return []
async def bot_on_ready_boot(bot):
"""Periodically sends a POST request to Carbonitex."""
carbonitex_key = configurations.get(bot, __name__, key='key')
use_loop = configurations.get(bot, __name__, key='enabled')
while use_loop:
print("In Carbonitex loop")
await asyncio.sleep(60*60*2) # 2 hour delay
servercount = sum(len(it.servers) for it in bot.all_instances)
try:
await future(
urlopen, 'https://www.carbonitex.net/discord/data/botdata.php',
data={'key': carbonitex_key, 'servercount': servercount})
except Exception as e:
raise BotException(
EXCEPTION, "Failed to update Carbonitex data:", e)
|
8994c346bcd319e97e93b9eb66707df1016d28e9
|
nilmtk/__init__.py
|
nilmtk/__init__.py
|
# re-enable deprecation warnings
import warnings
warnings.simplefilter('default')
from nilmtk import *
from nilmtk.version import version as __version__
from nilmtk.timeframe import TimeFrame
from nilmtk.elecmeter import ElecMeter
from nilmtk.datastore import DataStore, HDFDataStore, CSVDataStore, Key
from nilmtk.metergroup import MeterGroup
from nilmtk.appliance import Appliance
from nilmtk.building import Building
from nilmtk.dataset import DataSet
global_meter_group = MeterGroup()
def teardown_package():
"""Nosetests package teardown function (run when tests are done).
See http://nose.readthedocs.org/en/latest/writing_tests.html#test-packages
Uses git to reset data_dir after tests have run.
"""
from nilmtk.tests.testingtools import data_dir
import subprocess
cmd = "git checkout -- {}".format(data_dir())
try:
subprocess.check_output(cmd, shell=True, cwd=data_dir())
except Exception:
print("Failed to run '{}'".format(cmd))
raise
else:
print("Succeeded in running '{}'".format(cmd))
|
# re-enable deprecation warnings
import warnings
warnings.simplefilter('default')
from nilmtk import *
from nilmtk.version import version as __version__
from nilmtk.timeframe import TimeFrame
from nilmtk.elecmeter import ElecMeter
from nilmtk.datastore import DataStore, HDFDataStore, CSVDataStore, Key
from nilmtk.metergroup import MeterGroup
from nilmtk.appliance import Appliance
from nilmtk.building import Building
from nilmtk.dataset import DataSet
global_meter_group = MeterGroup()
def teardown_package():
"""Nosetests package teardown function (run when tests are done).
See http://nose.readthedocs.org/en/latest/writing_tests.html#test-packages
Uses git to reset data_dir after tests have run.
"""
from nilmtk.tests.testingtools import data_dir
import subprocess
#Workaround for open .h5 files on Windows
from tables.file import _open_files
_open_files.close_all()
cmd = "git checkout -- {}".format(data_dir())
try:
subprocess.check_output(cmd, shell=True, cwd=data_dir())
except Exception:
print("Failed to run '{}'".format(cmd))
raise
else:
print("Succeeded in running '{}'".format(cmd))
|
Make sure all .h5 files are closed before trying to remove them while testing
|
Make sure all .h5 files are closed before trying to remove them while testing
|
Python
|
apache-2.0
|
nilmtk/nilmtk,nilmtk/nilmtk
|
# re-enable deprecation warnings
import warnings
warnings.simplefilter('default')
from nilmtk import *
from nilmtk.version import version as __version__
from nilmtk.timeframe import TimeFrame
from nilmtk.elecmeter import ElecMeter
from nilmtk.datastore import DataStore, HDFDataStore, CSVDataStore, Key
from nilmtk.metergroup import MeterGroup
from nilmtk.appliance import Appliance
from nilmtk.building import Building
from nilmtk.dataset import DataSet
global_meter_group = MeterGroup()
def teardown_package():
"""Nosetests package teardown function (run when tests are done).
See http://nose.readthedocs.org/en/latest/writing_tests.html#test-packages
Uses git to reset data_dir after tests have run.
"""
from nilmtk.tests.testingtools import data_dir
import subprocess
cmd = "git checkout -- {}".format(data_dir())
try:
subprocess.check_output(cmd, shell=True, cwd=data_dir())
except Exception:
print("Failed to run '{}'".format(cmd))
raise
else:
print("Succeeded in running '{}'".format(cmd))
Make sure all .h5 files are closed before trying to remove them while testing
|
# re-enable deprecation warnings
import warnings
warnings.simplefilter('default')
from nilmtk import *
from nilmtk.version import version as __version__
from nilmtk.timeframe import TimeFrame
from nilmtk.elecmeter import ElecMeter
from nilmtk.datastore import DataStore, HDFDataStore, CSVDataStore, Key
from nilmtk.metergroup import MeterGroup
from nilmtk.appliance import Appliance
from nilmtk.building import Building
from nilmtk.dataset import DataSet
global_meter_group = MeterGroup()
def teardown_package():
"""Nosetests package teardown function (run when tests are done).
See http://nose.readthedocs.org/en/latest/writing_tests.html#test-packages
Uses git to reset data_dir after tests have run.
"""
from nilmtk.tests.testingtools import data_dir
import subprocess
#Workaround for open .h5 files on Windows
from tables.file import _open_files
_open_files.close_all()
cmd = "git checkout -- {}".format(data_dir())
try:
subprocess.check_output(cmd, shell=True, cwd=data_dir())
except Exception:
print("Failed to run '{}'".format(cmd))
raise
else:
print("Succeeded in running '{}'".format(cmd))
|
<commit_before># re-enable deprecation warnings
import warnings
warnings.simplefilter('default')
from nilmtk import *
from nilmtk.version import version as __version__
from nilmtk.timeframe import TimeFrame
from nilmtk.elecmeter import ElecMeter
from nilmtk.datastore import DataStore, HDFDataStore, CSVDataStore, Key
from nilmtk.metergroup import MeterGroup
from nilmtk.appliance import Appliance
from nilmtk.building import Building
from nilmtk.dataset import DataSet
global_meter_group = MeterGroup()
def teardown_package():
"""Nosetests package teardown function (run when tests are done).
See http://nose.readthedocs.org/en/latest/writing_tests.html#test-packages
Uses git to reset data_dir after tests have run.
"""
from nilmtk.tests.testingtools import data_dir
import subprocess
cmd = "git checkout -- {}".format(data_dir())
try:
subprocess.check_output(cmd, shell=True, cwd=data_dir())
except Exception:
print("Failed to run '{}'".format(cmd))
raise
else:
print("Succeeded in running '{}'".format(cmd))
<commit_msg>Make sure all .h5 files are closed before trying to remove them while testing<commit_after>
|
# re-enable deprecation warnings
import warnings
warnings.simplefilter('default')
from nilmtk import *
from nilmtk.version import version as __version__
from nilmtk.timeframe import TimeFrame
from nilmtk.elecmeter import ElecMeter
from nilmtk.datastore import DataStore, HDFDataStore, CSVDataStore, Key
from nilmtk.metergroup import MeterGroup
from nilmtk.appliance import Appliance
from nilmtk.building import Building
from nilmtk.dataset import DataSet
global_meter_group = MeterGroup()
def teardown_package():
"""Nosetests package teardown function (run when tests are done).
See http://nose.readthedocs.org/en/latest/writing_tests.html#test-packages
Uses git to reset data_dir after tests have run.
"""
from nilmtk.tests.testingtools import data_dir
import subprocess
#Workaround for open .h5 files on Windows
from tables.file import _open_files
_open_files.close_all()
cmd = "git checkout -- {}".format(data_dir())
try:
subprocess.check_output(cmd, shell=True, cwd=data_dir())
except Exception:
print("Failed to run '{}'".format(cmd))
raise
else:
print("Succeeded in running '{}'".format(cmd))
|
# re-enable deprecation warnings
import warnings
warnings.simplefilter('default')
from nilmtk import *
from nilmtk.version import version as __version__
from nilmtk.timeframe import TimeFrame
from nilmtk.elecmeter import ElecMeter
from nilmtk.datastore import DataStore, HDFDataStore, CSVDataStore, Key
from nilmtk.metergroup import MeterGroup
from nilmtk.appliance import Appliance
from nilmtk.building import Building
from nilmtk.dataset import DataSet
global_meter_group = MeterGroup()
def teardown_package():
"""Nosetests package teardown function (run when tests are done).
See http://nose.readthedocs.org/en/latest/writing_tests.html#test-packages
Uses git to reset data_dir after tests have run.
"""
from nilmtk.tests.testingtools import data_dir
import subprocess
cmd = "git checkout -- {}".format(data_dir())
try:
subprocess.check_output(cmd, shell=True, cwd=data_dir())
except Exception:
print("Failed to run '{}'".format(cmd))
raise
else:
print("Succeeded in running '{}'".format(cmd))
Make sure all .h5 files are closed before trying to remove them while testing# re-enable deprecation warnings
import warnings
warnings.simplefilter('default')
from nilmtk import *
from nilmtk.version import version as __version__
from nilmtk.timeframe import TimeFrame
from nilmtk.elecmeter import ElecMeter
from nilmtk.datastore import DataStore, HDFDataStore, CSVDataStore, Key
from nilmtk.metergroup import MeterGroup
from nilmtk.appliance import Appliance
from nilmtk.building import Building
from nilmtk.dataset import DataSet
global_meter_group = MeterGroup()
def teardown_package():
"""Nosetests package teardown function (run when tests are done).
See http://nose.readthedocs.org/en/latest/writing_tests.html#test-packages
Uses git to reset data_dir after tests have run.
"""
from nilmtk.tests.testingtools import data_dir
import subprocess
#Workaround for open .h5 files on Windows
from tables.file import _open_files
_open_files.close_all()
cmd = "git checkout -- {}".format(data_dir())
try:
subprocess.check_output(cmd, shell=True, cwd=data_dir())
except Exception:
print("Failed to run '{}'".format(cmd))
raise
else:
print("Succeeded in running '{}'".format(cmd))
|
<commit_before># re-enable deprecation warnings
import warnings
warnings.simplefilter('default')
from nilmtk import *
from nilmtk.version import version as __version__
from nilmtk.timeframe import TimeFrame
from nilmtk.elecmeter import ElecMeter
from nilmtk.datastore import DataStore, HDFDataStore, CSVDataStore, Key
from nilmtk.metergroup import MeterGroup
from nilmtk.appliance import Appliance
from nilmtk.building import Building
from nilmtk.dataset import DataSet
global_meter_group = MeterGroup()
def teardown_package():
"""Nosetests package teardown function (run when tests are done).
See http://nose.readthedocs.org/en/latest/writing_tests.html#test-packages
Uses git to reset data_dir after tests have run.
"""
from nilmtk.tests.testingtools import data_dir
import subprocess
cmd = "git checkout -- {}".format(data_dir())
try:
subprocess.check_output(cmd, shell=True, cwd=data_dir())
except Exception:
print("Failed to run '{}'".format(cmd))
raise
else:
print("Succeeded in running '{}'".format(cmd))
<commit_msg>Make sure all .h5 files are closed before trying to remove them while testing<commit_after># re-enable deprecation warnings
import warnings
warnings.simplefilter('default')
from nilmtk import *
from nilmtk.version import version as __version__
from nilmtk.timeframe import TimeFrame
from nilmtk.elecmeter import ElecMeter
from nilmtk.datastore import DataStore, HDFDataStore, CSVDataStore, Key
from nilmtk.metergroup import MeterGroup
from nilmtk.appliance import Appliance
from nilmtk.building import Building
from nilmtk.dataset import DataSet
global_meter_group = MeterGroup()
def teardown_package():
"""Nosetests package teardown function (run when tests are done).
See http://nose.readthedocs.org/en/latest/writing_tests.html#test-packages
Uses git to reset data_dir after tests have run.
"""
from nilmtk.tests.testingtools import data_dir
import subprocess
#Workaround for open .h5 files on Windows
from tables.file import _open_files
_open_files.close_all()
cmd = "git checkout -- {}".format(data_dir())
try:
subprocess.check_output(cmd, shell=True, cwd=data_dir())
except Exception:
print("Failed to run '{}'".format(cmd))
raise
else:
print("Succeeded in running '{}'".format(cmd))
|
9509389f871a20465740494dd32a8d581572dd63
|
grammpy/Rule.py
|
grammpy/Rule.py
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from .Constants import EPSILON
class Rule:
#TODO rules -> rule -> left/right -> rules
right = [EPSILON]
left = [EPSILON]
rule = ([EPSILON], [EPSILON])
rules = [([EPSILON], [EPSILON])]
__active = True
@staticmethod
def is_regular():
return False
@staticmethod
def is_contextfree():
return False
@staticmethod
def is_context():
return False
@staticmethod
def is_unrestricted():
return False
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from .Constants import EPSILON
class Rule:
#TODO rules -> rule -> left/right -> fromSymbol/toSymbol -> rules
fromSymbol = EPSILON
toSymbol = EPSILON
right = [EPSILON]
left = [EPSILON]
rule = ([EPSILON], [EPSILON])
rules = [([EPSILON], [EPSILON])]
__active = True
@staticmethod
def is_regular():
return False
@staticmethod
def is_contextfree():
return False
@staticmethod
def is_context():
return False
@staticmethod
def is_unrestricted():
return False
|
Add fromSymbol and toSymbol to rule
|
Add fromSymbol and toSymbol to rule
|
Python
|
mit
|
PatrikValkovic/grammpy
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from .Constants import EPSILON
class Rule:
#TODO rules -> rule -> left/right -> rules
right = [EPSILON]
left = [EPSILON]
rule = ([EPSILON], [EPSILON])
rules = [([EPSILON], [EPSILON])]
__active = True
@staticmethod
def is_regular():
return False
@staticmethod
def is_contextfree():
return False
@staticmethod
def is_context():
return False
@staticmethod
def is_unrestricted():
return False
Add fromSymbol and toSymbol to rule
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from .Constants import EPSILON
class Rule:
#TODO rules -> rule -> left/right -> fromSymbol/toSymbol -> rules
fromSymbol = EPSILON
toSymbol = EPSILON
right = [EPSILON]
left = [EPSILON]
rule = ([EPSILON], [EPSILON])
rules = [([EPSILON], [EPSILON])]
__active = True
@staticmethod
def is_regular():
return False
@staticmethod
def is_contextfree():
return False
@staticmethod
def is_context():
return False
@staticmethod
def is_unrestricted():
return False
|
<commit_before>#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from .Constants import EPSILON
class Rule:
#TODO rules -> rule -> left/right -> rules
right = [EPSILON]
left = [EPSILON]
rule = ([EPSILON], [EPSILON])
rules = [([EPSILON], [EPSILON])]
__active = True
@staticmethod
def is_regular():
return False
@staticmethod
def is_contextfree():
return False
@staticmethod
def is_context():
return False
@staticmethod
def is_unrestricted():
return False
<commit_msg>Add fromSymbol and toSymbol to rule<commit_after>
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from .Constants import EPSILON
class Rule:
#TODO rules -> rule -> left/right -> fromSymbol/toSymbol -> rules
fromSymbol = EPSILON
toSymbol = EPSILON
right = [EPSILON]
left = [EPSILON]
rule = ([EPSILON], [EPSILON])
rules = [([EPSILON], [EPSILON])]
__active = True
@staticmethod
def is_regular():
return False
@staticmethod
def is_contextfree():
return False
@staticmethod
def is_context():
return False
@staticmethod
def is_unrestricted():
return False
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from .Constants import EPSILON
class Rule:
#TODO rules -> rule -> left/right -> rules
right = [EPSILON]
left = [EPSILON]
rule = ([EPSILON], [EPSILON])
rules = [([EPSILON], [EPSILON])]
__active = True
@staticmethod
def is_regular():
return False
@staticmethod
def is_contextfree():
return False
@staticmethod
def is_context():
return False
@staticmethod
def is_unrestricted():
return False
Add fromSymbol and toSymbol to rule#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from .Constants import EPSILON
class Rule:
#TODO rules -> rule -> left/right -> fromSymbol/toSymbol -> rules
fromSymbol = EPSILON
toSymbol = EPSILON
right = [EPSILON]
left = [EPSILON]
rule = ([EPSILON], [EPSILON])
rules = [([EPSILON], [EPSILON])]
__active = True
@staticmethod
def is_regular():
return False
@staticmethod
def is_contextfree():
return False
@staticmethod
def is_context():
return False
@staticmethod
def is_unrestricted():
return False
|
<commit_before>#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from .Constants import EPSILON
class Rule:
#TODO rules -> rule -> left/right -> rules
right = [EPSILON]
left = [EPSILON]
rule = ([EPSILON], [EPSILON])
rules = [([EPSILON], [EPSILON])]
__active = True
@staticmethod
def is_regular():
return False
@staticmethod
def is_contextfree():
return False
@staticmethod
def is_context():
return False
@staticmethod
def is_unrestricted():
return False
<commit_msg>Add fromSymbol and toSymbol to rule<commit_after>#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from .Constants import EPSILON
class Rule:
#TODO rules -> rule -> left/right -> fromSymbol/toSymbol -> rules
fromSymbol = EPSILON
toSymbol = EPSILON
right = [EPSILON]
left = [EPSILON]
rule = ([EPSILON], [EPSILON])
rules = [([EPSILON], [EPSILON])]
__active = True
@staticmethod
def is_regular():
return False
@staticmethod
def is_contextfree():
return False
@staticmethod
def is_context():
return False
@staticmethod
def is_unrestricted():
return False
|
1cc892fd521ae33b1d492004411db3f1392295c4
|
enhydris/telemetry/tasks.py
|
enhydris/telemetry/tasks.py
|
from django.core.cache import cache
from celery.utils.log import get_task_logger
from enhydris.celery import app
from enhydris.telemetry.models import Telemetry
FETCH_TIMEOUT = 300
LOCK_TIMEOUT = FETCH_TIMEOUT + 60
logger = get_task_logger(__name__)
@app.task
def fetch_all_telemetry_data():
for telemetry in Telemetry.objects.all():
if True:
fetch_telemetry_data.delay(telemetry.id)
@app.task(bind=True, soft_time_limit=FETCH_TIMEOUT, time_limit=FETCH_TIMEOUT + 10)
def fetch_telemetry_data(self, telemetry_id):
telemetry = Telemetry.objects.get(id=telemetry_id)
lock_id = f"telemetry-{telemetry_id}"
acquired_lock = cache.add(lock_id, self.app.oid, LOCK_TIMEOUT)
if acquired_lock:
telemetry.fetch()
cache.delete(lock_id)
else:
lock_owner = cache.get(lock_id)
logger.error(
f"Cannot acquire lock for fetching telemetry with id={telemetry.id}; "
f"apparently the lock is owned by {lock_owner}."
)
|
from django.core.cache import cache
from celery.utils.log import get_task_logger
from enhydris.celery import app
from enhydris.telemetry.models import Telemetry
FETCH_TIMEOUT = 300
LOCK_TIMEOUT = FETCH_TIMEOUT + 60
logger = get_task_logger(__name__)
@app.task
def fetch_all_telemetry_data():
for telemetry in Telemetry.objects.all():
if telemetry.is_due:
fetch_telemetry_data.delay(telemetry.id)
@app.task(bind=True, soft_time_limit=FETCH_TIMEOUT, time_limit=FETCH_TIMEOUT + 10)
def fetch_telemetry_data(self, telemetry_id):
telemetry = Telemetry.objects.get(id=telemetry_id)
lock_id = f"telemetry-{telemetry_id}"
acquired_lock = cache.add(lock_id, self.app.oid, LOCK_TIMEOUT)
if acquired_lock:
telemetry.fetch()
cache.delete(lock_id)
else:
lock_owner = cache.get(lock_id)
logger.error(
f"Cannot acquire lock for fetching telemetry with id={telemetry.id}; "
f"apparently the lock is owned by {lock_owner}."
)
|
Fix error in telemetry task
|
Fix error in telemetry task
A condition had been changed to always match for debugging purposes, and
was accidentally committed that way.
|
Python
|
agpl-3.0
|
openmeteo/enhydris,openmeteo/enhydris,openmeteo/enhydris,aptiko/enhydris,aptiko/enhydris,aptiko/enhydris
|
from django.core.cache import cache
from celery.utils.log import get_task_logger
from enhydris.celery import app
from enhydris.telemetry.models import Telemetry
FETCH_TIMEOUT = 300
LOCK_TIMEOUT = FETCH_TIMEOUT + 60
logger = get_task_logger(__name__)
@app.task
def fetch_all_telemetry_data():
for telemetry in Telemetry.objects.all():
if True:
fetch_telemetry_data.delay(telemetry.id)
@app.task(bind=True, soft_time_limit=FETCH_TIMEOUT, time_limit=FETCH_TIMEOUT + 10)
def fetch_telemetry_data(self, telemetry_id):
telemetry = Telemetry.objects.get(id=telemetry_id)
lock_id = f"telemetry-{telemetry_id}"
acquired_lock = cache.add(lock_id, self.app.oid, LOCK_TIMEOUT)
if acquired_lock:
telemetry.fetch()
cache.delete(lock_id)
else:
lock_owner = cache.get(lock_id)
logger.error(
f"Cannot acquire lock for fetching telemetry with id={telemetry.id}; "
f"apparently the lock is owned by {lock_owner}."
)
Fix error in telemetry task
A condition had been changed to always match for debugging purposes, and
was accidentally committed that way.
|
from django.core.cache import cache
from celery.utils.log import get_task_logger
from enhydris.celery import app
from enhydris.telemetry.models import Telemetry
FETCH_TIMEOUT = 300
LOCK_TIMEOUT = FETCH_TIMEOUT + 60
logger = get_task_logger(__name__)
@app.task
def fetch_all_telemetry_data():
for telemetry in Telemetry.objects.all():
if telemetry.is_due:
fetch_telemetry_data.delay(telemetry.id)
@app.task(bind=True, soft_time_limit=FETCH_TIMEOUT, time_limit=FETCH_TIMEOUT + 10)
def fetch_telemetry_data(self, telemetry_id):
telemetry = Telemetry.objects.get(id=telemetry_id)
lock_id = f"telemetry-{telemetry_id}"
acquired_lock = cache.add(lock_id, self.app.oid, LOCK_TIMEOUT)
if acquired_lock:
telemetry.fetch()
cache.delete(lock_id)
else:
lock_owner = cache.get(lock_id)
logger.error(
f"Cannot acquire lock for fetching telemetry with id={telemetry.id}; "
f"apparently the lock is owned by {lock_owner}."
)
|
<commit_before>from django.core.cache import cache
from celery.utils.log import get_task_logger
from enhydris.celery import app
from enhydris.telemetry.models import Telemetry
FETCH_TIMEOUT = 300
LOCK_TIMEOUT = FETCH_TIMEOUT + 60
logger = get_task_logger(__name__)
@app.task
def fetch_all_telemetry_data():
for telemetry in Telemetry.objects.all():
if True:
fetch_telemetry_data.delay(telemetry.id)
@app.task(bind=True, soft_time_limit=FETCH_TIMEOUT, time_limit=FETCH_TIMEOUT + 10)
def fetch_telemetry_data(self, telemetry_id):
telemetry = Telemetry.objects.get(id=telemetry_id)
lock_id = f"telemetry-{telemetry_id}"
acquired_lock = cache.add(lock_id, self.app.oid, LOCK_TIMEOUT)
if acquired_lock:
telemetry.fetch()
cache.delete(lock_id)
else:
lock_owner = cache.get(lock_id)
logger.error(
f"Cannot acquire lock for fetching telemetry with id={telemetry.id}; "
f"apparently the lock is owned by {lock_owner}."
)
<commit_msg>Fix error in telemetry task
A condition had been changed to always match for debugging purposes, and
was accidentally committed that way.<commit_after>
|
from django.core.cache import cache
from celery.utils.log import get_task_logger
from enhydris.celery import app
from enhydris.telemetry.models import Telemetry
FETCH_TIMEOUT = 300
LOCK_TIMEOUT = FETCH_TIMEOUT + 60
logger = get_task_logger(__name__)
@app.task
def fetch_all_telemetry_data():
for telemetry in Telemetry.objects.all():
if telemetry.is_due:
fetch_telemetry_data.delay(telemetry.id)
@app.task(bind=True, soft_time_limit=FETCH_TIMEOUT, time_limit=FETCH_TIMEOUT + 10)
def fetch_telemetry_data(self, telemetry_id):
telemetry = Telemetry.objects.get(id=telemetry_id)
lock_id = f"telemetry-{telemetry_id}"
acquired_lock = cache.add(lock_id, self.app.oid, LOCK_TIMEOUT)
if acquired_lock:
telemetry.fetch()
cache.delete(lock_id)
else:
lock_owner = cache.get(lock_id)
logger.error(
f"Cannot acquire lock for fetching telemetry with id={telemetry.id}; "
f"apparently the lock is owned by {lock_owner}."
)
|
from django.core.cache import cache
from celery.utils.log import get_task_logger
from enhydris.celery import app
from enhydris.telemetry.models import Telemetry
FETCH_TIMEOUT = 300
LOCK_TIMEOUT = FETCH_TIMEOUT + 60
logger = get_task_logger(__name__)
@app.task
def fetch_all_telemetry_data():
for telemetry in Telemetry.objects.all():
if True:
fetch_telemetry_data.delay(telemetry.id)
@app.task(bind=True, soft_time_limit=FETCH_TIMEOUT, time_limit=FETCH_TIMEOUT + 10)
def fetch_telemetry_data(self, telemetry_id):
telemetry = Telemetry.objects.get(id=telemetry_id)
lock_id = f"telemetry-{telemetry_id}"
acquired_lock = cache.add(lock_id, self.app.oid, LOCK_TIMEOUT)
if acquired_lock:
telemetry.fetch()
cache.delete(lock_id)
else:
lock_owner = cache.get(lock_id)
logger.error(
f"Cannot acquire lock for fetching telemetry with id={telemetry.id}; "
f"apparently the lock is owned by {lock_owner}."
)
Fix error in telemetry task
A condition had been changed to always match for debugging purposes, and
was accidentally committed that way.from django.core.cache import cache
from celery.utils.log import get_task_logger
from enhydris.celery import app
from enhydris.telemetry.models import Telemetry
FETCH_TIMEOUT = 300
LOCK_TIMEOUT = FETCH_TIMEOUT + 60
logger = get_task_logger(__name__)
@app.task
def fetch_all_telemetry_data():
for telemetry in Telemetry.objects.all():
if telemetry.is_due:
fetch_telemetry_data.delay(telemetry.id)
@app.task(bind=True, soft_time_limit=FETCH_TIMEOUT, time_limit=FETCH_TIMEOUT + 10)
def fetch_telemetry_data(self, telemetry_id):
telemetry = Telemetry.objects.get(id=telemetry_id)
lock_id = f"telemetry-{telemetry_id}"
acquired_lock = cache.add(lock_id, self.app.oid, LOCK_TIMEOUT)
if acquired_lock:
telemetry.fetch()
cache.delete(lock_id)
else:
lock_owner = cache.get(lock_id)
logger.error(
f"Cannot acquire lock for fetching telemetry with id={telemetry.id}; "
f"apparently the lock is owned by {lock_owner}."
)
|
<commit_before>from django.core.cache import cache
from celery.utils.log import get_task_logger
from enhydris.celery import app
from enhydris.telemetry.models import Telemetry
FETCH_TIMEOUT = 300
LOCK_TIMEOUT = FETCH_TIMEOUT + 60
logger = get_task_logger(__name__)
@app.task
def fetch_all_telemetry_data():
for telemetry in Telemetry.objects.all():
if True:
fetch_telemetry_data.delay(telemetry.id)
@app.task(bind=True, soft_time_limit=FETCH_TIMEOUT, time_limit=FETCH_TIMEOUT + 10)
def fetch_telemetry_data(self, telemetry_id):
telemetry = Telemetry.objects.get(id=telemetry_id)
lock_id = f"telemetry-{telemetry_id}"
acquired_lock = cache.add(lock_id, self.app.oid, LOCK_TIMEOUT)
if acquired_lock:
telemetry.fetch()
cache.delete(lock_id)
else:
lock_owner = cache.get(lock_id)
logger.error(
f"Cannot acquire lock for fetching telemetry with id={telemetry.id}; "
f"apparently the lock is owned by {lock_owner}."
)
<commit_msg>Fix error in telemetry task
A condition had been changed to always match for debugging purposes, and
was accidentally committed that way.<commit_after>from django.core.cache import cache
from celery.utils.log import get_task_logger
from enhydris.celery import app
from enhydris.telemetry.models import Telemetry
FETCH_TIMEOUT = 300
LOCK_TIMEOUT = FETCH_TIMEOUT + 60
logger = get_task_logger(__name__)
@app.task
def fetch_all_telemetry_data():
for telemetry in Telemetry.objects.all():
if telemetry.is_due:
fetch_telemetry_data.delay(telemetry.id)
@app.task(bind=True, soft_time_limit=FETCH_TIMEOUT, time_limit=FETCH_TIMEOUT + 10)
def fetch_telemetry_data(self, telemetry_id):
telemetry = Telemetry.objects.get(id=telemetry_id)
lock_id = f"telemetry-{telemetry_id}"
acquired_lock = cache.add(lock_id, self.app.oid, LOCK_TIMEOUT)
if acquired_lock:
telemetry.fetch()
cache.delete(lock_id)
else:
lock_owner = cache.get(lock_id)
logger.error(
f"Cannot acquire lock for fetching telemetry with id={telemetry.id}; "
f"apparently the lock is owned by {lock_owner}."
)
|
b8764f6045bbc1067806405ca2fba9c1622f997b
|
gweetr/utils.py
|
gweetr/utils.py
|
"""utils.py"""
import random
from pyechonest import config as echonest_config
from pyechonest import song as echonest_song
import rfc3987
from gweetr import app
from gweetr.exceptions import GweetrError
echonest_config.ECHO_NEST_API_KEY = app.config['ECHO_NEST_API_KEY']
def fetch_track(track_params):
"""
Fetch a track from 7digital via the Echo Nest API.
Available track parameters are listed at
http://developer.echonest.com/docs/v4/song.html#search
"""
try:
search_results = echonest_song.search(
buckets=['id:7digital-US', 'tracks'],
limit=True,
results=app.config['ECHO_NEST_SONG_RESULTS'],
**track_params
)
except TypeError as exc:
raise GweetrError("Received unknown track parameter: %s" % str(exc))
if search_results:
song_obj = random.choice(search_results)
tracks = song_obj.get_tracks('7digital-US')
track_data = tracks[0]
track = {
'title': song_obj.title,
'artist': song_obj.artist_name,
'url': track_data['preview_url']
}
return track
def is_valid_url(a_string):
"""Check if a string is a valid URL."""
match_obj = rfc3987.match(a_string, 'URI')
if match_obj:
return True
else:
return False
|
"""utils.py"""
import random
from pyechonest import config as echonest_config
from pyechonest import song as echonest_song
import rfc3987
from gweetr import app
from gweetr.exceptions import GweetrError
echonest_config.ECHO_NEST_API_KEY = app.config['ECHO_NEST_API_KEY']
def fetch_track(track_params):
"""
Fetch a track from 7digital via the Echo Nest API.
Available track parameters are listed at
http://developer.echonest.com/docs/v4/song.html#search
"""
try:
search_results = echonest_song.search(
buckets=['id:7digital-US', 'tracks'],
limit=True,
results=app.config['ECHO_NEST_SONG_RESULTS'],
**track_params
)
except TypeError as exc:
raise GweetrError("Received unknown track parameter: %s" % str(exc))
if search_results:
song_obj = random.choice(search_results)
tracks = song_obj.get_tracks('7digital-US')
track_data = tracks[0]
track = {
'title': song_obj.title,
'artist': song_obj.artist_name,
'url': track_data['preview_url']
}
return track
def is_valid_url(a_string):
"""Check if a string is a valid URL."""
return rfc3987.match(a_string, 'URI')
|
Remove unnecessary if true/else false
|
Remove unnecessary if true/else false
|
Python
|
mit
|
jbarbuto/gweetr
|
"""utils.py"""
import random
from pyechonest import config as echonest_config
from pyechonest import song as echonest_song
import rfc3987
from gweetr import app
from gweetr.exceptions import GweetrError
echonest_config.ECHO_NEST_API_KEY = app.config['ECHO_NEST_API_KEY']
def fetch_track(track_params):
"""
Fetch a track from 7digital via the Echo Nest API.
Available track parameters are listed at
http://developer.echonest.com/docs/v4/song.html#search
"""
try:
search_results = echonest_song.search(
buckets=['id:7digital-US', 'tracks'],
limit=True,
results=app.config['ECHO_NEST_SONG_RESULTS'],
**track_params
)
except TypeError as exc:
raise GweetrError("Received unknown track parameter: %s" % str(exc))
if search_results:
song_obj = random.choice(search_results)
tracks = song_obj.get_tracks('7digital-US')
track_data = tracks[0]
track = {
'title': song_obj.title,
'artist': song_obj.artist_name,
'url': track_data['preview_url']
}
return track
def is_valid_url(a_string):
"""Check if a string is a valid URL."""
match_obj = rfc3987.match(a_string, 'URI')
if match_obj:
return True
else:
return False
Remove unnecessary if true/else false
|
"""utils.py"""
import random
from pyechonest import config as echonest_config
from pyechonest import song as echonest_song
import rfc3987
from gweetr import app
from gweetr.exceptions import GweetrError
echonest_config.ECHO_NEST_API_KEY = app.config['ECHO_NEST_API_KEY']
def fetch_track(track_params):
"""
Fetch a track from 7digital via the Echo Nest API.
Available track parameters are listed at
http://developer.echonest.com/docs/v4/song.html#search
"""
try:
search_results = echonest_song.search(
buckets=['id:7digital-US', 'tracks'],
limit=True,
results=app.config['ECHO_NEST_SONG_RESULTS'],
**track_params
)
except TypeError as exc:
raise GweetrError("Received unknown track parameter: %s" % str(exc))
if search_results:
song_obj = random.choice(search_results)
tracks = song_obj.get_tracks('7digital-US')
track_data = tracks[0]
track = {
'title': song_obj.title,
'artist': song_obj.artist_name,
'url': track_data['preview_url']
}
return track
def is_valid_url(a_string):
"""Check if a string is a valid URL."""
return rfc3987.match(a_string, 'URI')
|
<commit_before>"""utils.py"""
import random
from pyechonest import config as echonest_config
from pyechonest import song as echonest_song
import rfc3987
from gweetr import app
from gweetr.exceptions import GweetrError
echonest_config.ECHO_NEST_API_KEY = app.config['ECHO_NEST_API_KEY']
def fetch_track(track_params):
"""
Fetch a track from 7digital via the Echo Nest API.
Available track parameters are listed at
http://developer.echonest.com/docs/v4/song.html#search
"""
try:
search_results = echonest_song.search(
buckets=['id:7digital-US', 'tracks'],
limit=True,
results=app.config['ECHO_NEST_SONG_RESULTS'],
**track_params
)
except TypeError as exc:
raise GweetrError("Received unknown track parameter: %s" % str(exc))
if search_results:
song_obj = random.choice(search_results)
tracks = song_obj.get_tracks('7digital-US')
track_data = tracks[0]
track = {
'title': song_obj.title,
'artist': song_obj.artist_name,
'url': track_data['preview_url']
}
return track
def is_valid_url(a_string):
"""Check if a string is a valid URL."""
match_obj = rfc3987.match(a_string, 'URI')
if match_obj:
return True
else:
return False
<commit_msg>Remove unnecessary if true/else false<commit_after>
|
"""utils.py"""
import random
from pyechonest import config as echonest_config
from pyechonest import song as echonest_song
import rfc3987
from gweetr import app
from gweetr.exceptions import GweetrError
echonest_config.ECHO_NEST_API_KEY = app.config['ECHO_NEST_API_KEY']
def fetch_track(track_params):
"""
Fetch a track from 7digital via the Echo Nest API.
Available track parameters are listed at
http://developer.echonest.com/docs/v4/song.html#search
"""
try:
search_results = echonest_song.search(
buckets=['id:7digital-US', 'tracks'],
limit=True,
results=app.config['ECHO_NEST_SONG_RESULTS'],
**track_params
)
except TypeError as exc:
raise GweetrError("Received unknown track parameter: %s" % str(exc))
if search_results:
song_obj = random.choice(search_results)
tracks = song_obj.get_tracks('7digital-US')
track_data = tracks[0]
track = {
'title': song_obj.title,
'artist': song_obj.artist_name,
'url': track_data['preview_url']
}
return track
def is_valid_url(a_string):
"""Check if a string is a valid URL."""
return rfc3987.match(a_string, 'URI')
|
"""utils.py"""
import random
from pyechonest import config as echonest_config
from pyechonest import song as echonest_song
import rfc3987
from gweetr import app
from gweetr.exceptions import GweetrError
echonest_config.ECHO_NEST_API_KEY = app.config['ECHO_NEST_API_KEY']
def fetch_track(track_params):
"""
Fetch a track from 7digital via the Echo Nest API.
Available track parameters are listed at
http://developer.echonest.com/docs/v4/song.html#search
"""
try:
search_results = echonest_song.search(
buckets=['id:7digital-US', 'tracks'],
limit=True,
results=app.config['ECHO_NEST_SONG_RESULTS'],
**track_params
)
except TypeError as exc:
raise GweetrError("Received unknown track parameter: %s" % str(exc))
if search_results:
song_obj = random.choice(search_results)
tracks = song_obj.get_tracks('7digital-US')
track_data = tracks[0]
track = {
'title': song_obj.title,
'artist': song_obj.artist_name,
'url': track_data['preview_url']
}
return track
def is_valid_url(a_string):
"""Check if a string is a valid URL."""
match_obj = rfc3987.match(a_string, 'URI')
if match_obj:
return True
else:
return False
Remove unnecessary if true/else false"""utils.py"""
import random
from pyechonest import config as echonest_config
from pyechonest import song as echonest_song
import rfc3987
from gweetr import app
from gweetr.exceptions import GweetrError
echonest_config.ECHO_NEST_API_KEY = app.config['ECHO_NEST_API_KEY']
def fetch_track(track_params):
"""
Fetch a track from 7digital via the Echo Nest API.
Available track parameters are listed at
http://developer.echonest.com/docs/v4/song.html#search
"""
try:
search_results = echonest_song.search(
buckets=['id:7digital-US', 'tracks'],
limit=True,
results=app.config['ECHO_NEST_SONG_RESULTS'],
**track_params
)
except TypeError as exc:
raise GweetrError("Received unknown track parameter: %s" % str(exc))
if search_results:
song_obj = random.choice(search_results)
tracks = song_obj.get_tracks('7digital-US')
track_data = tracks[0]
track = {
'title': song_obj.title,
'artist': song_obj.artist_name,
'url': track_data['preview_url']
}
return track
def is_valid_url(a_string):
"""Check if a string is a valid URL."""
return rfc3987.match(a_string, 'URI')
|
<commit_before>"""utils.py"""
import random
from pyechonest import config as echonest_config
from pyechonest import song as echonest_song
import rfc3987
from gweetr import app
from gweetr.exceptions import GweetrError
echonest_config.ECHO_NEST_API_KEY = app.config['ECHO_NEST_API_KEY']
def fetch_track(track_params):
"""
Fetch a track from 7digital via the Echo Nest API.
Available track parameters are listed at
http://developer.echonest.com/docs/v4/song.html#search
"""
try:
search_results = echonest_song.search(
buckets=['id:7digital-US', 'tracks'],
limit=True,
results=app.config['ECHO_NEST_SONG_RESULTS'],
**track_params
)
except TypeError as exc:
raise GweetrError("Received unknown track parameter: %s" % str(exc))
if search_results:
song_obj = random.choice(search_results)
tracks = song_obj.get_tracks('7digital-US')
track_data = tracks[0]
track = {
'title': song_obj.title,
'artist': song_obj.artist_name,
'url': track_data['preview_url']
}
return track
def is_valid_url(a_string):
"""Check if a string is a valid URL."""
match_obj = rfc3987.match(a_string, 'URI')
if match_obj:
return True
else:
return False
<commit_msg>Remove unnecessary if true/else false<commit_after>"""utils.py"""
import random
from pyechonest import config as echonest_config
from pyechonest import song as echonest_song
import rfc3987
from gweetr import app
from gweetr.exceptions import GweetrError
echonest_config.ECHO_NEST_API_KEY = app.config['ECHO_NEST_API_KEY']
def fetch_track(track_params):
"""
Fetch a track from 7digital via the Echo Nest API.
Available track parameters are listed at
http://developer.echonest.com/docs/v4/song.html#search
"""
try:
search_results = echonest_song.search(
buckets=['id:7digital-US', 'tracks'],
limit=True,
results=app.config['ECHO_NEST_SONG_RESULTS'],
**track_params
)
except TypeError as exc:
raise GweetrError("Received unknown track parameter: %s" % str(exc))
if search_results:
song_obj = random.choice(search_results)
tracks = song_obj.get_tracks('7digital-US')
track_data = tracks[0]
track = {
'title': song_obj.title,
'artist': song_obj.artist_name,
'url': track_data['preview_url']
}
return track
def is_valid_url(a_string):
"""Check if a string is a valid URL."""
return rfc3987.match(a_string, 'URI')
|
284cfbb4297d1d91c8c82e0f9a159a1614510ace
|
example.py
|
example.py
|
#!/usr/bin/env python
from confman import ConfigSource
options = \
{
'tags': ['desktop'],
'hostname': 'test',
}
from sys import argv
from os import path
samples_path = path.join(path.dirname(argv[0]), 'samples')
c = ConfigSource(samples_path, "/tmp/dotfiles-test", None, options)
c.analyze()
c.check()
c.sync()
print
from pprint import pprint
pprint(c)
|
#!/usr/bin/env python
from confman import ConfigSource
options = \
{
'tags': ['desktop'],
'hostname': 'test',
}
from os import path
samples_path = path.join(path.dirname(__file__), 'samples')
c = ConfigSource(samples_path, "/tmp/dotfiles-test", None, options)
c.analyze()
c.check()
c.sync()
print
from pprint import pprint
pprint(c)
|
Use __file__ instead of argv[0]
|
Use __file__ instead of argv[0]
|
Python
|
mit
|
laurentb/confman
|
#!/usr/bin/env python
from confman import ConfigSource
options = \
{
'tags': ['desktop'],
'hostname': 'test',
}
from sys import argv
from os import path
samples_path = path.join(path.dirname(argv[0]), 'samples')
c = ConfigSource(samples_path, "/tmp/dotfiles-test", None, options)
c.analyze()
c.check()
c.sync()
print
from pprint import pprint
pprint(c)
Use __file__ instead of argv[0]
|
#!/usr/bin/env python
from confman import ConfigSource
options = \
{
'tags': ['desktop'],
'hostname': 'test',
}
from os import path
samples_path = path.join(path.dirname(__file__), 'samples')
c = ConfigSource(samples_path, "/tmp/dotfiles-test", None, options)
c.analyze()
c.check()
c.sync()
print
from pprint import pprint
pprint(c)
|
<commit_before>#!/usr/bin/env python
from confman import ConfigSource
options = \
{
'tags': ['desktop'],
'hostname': 'test',
}
from sys import argv
from os import path
samples_path = path.join(path.dirname(argv[0]), 'samples')
c = ConfigSource(samples_path, "/tmp/dotfiles-test", None, options)
c.analyze()
c.check()
c.sync()
print
from pprint import pprint
pprint(c)
<commit_msg>Use __file__ instead of argv[0]<commit_after>
|
#!/usr/bin/env python
from confman import ConfigSource
options = \
{
'tags': ['desktop'],
'hostname': 'test',
}
from os import path
samples_path = path.join(path.dirname(__file__), 'samples')
c = ConfigSource(samples_path, "/tmp/dotfiles-test", None, options)
c.analyze()
c.check()
c.sync()
print
from pprint import pprint
pprint(c)
|
#!/usr/bin/env python
from confman import ConfigSource
options = \
{
'tags': ['desktop'],
'hostname': 'test',
}
from sys import argv
from os import path
samples_path = path.join(path.dirname(argv[0]), 'samples')
c = ConfigSource(samples_path, "/tmp/dotfiles-test", None, options)
c.analyze()
c.check()
c.sync()
print
from pprint import pprint
pprint(c)
Use __file__ instead of argv[0]#!/usr/bin/env python
from confman import ConfigSource
options = \
{
'tags': ['desktop'],
'hostname': 'test',
}
from os import path
samples_path = path.join(path.dirname(__file__), 'samples')
c = ConfigSource(samples_path, "/tmp/dotfiles-test", None, options)
c.analyze()
c.check()
c.sync()
print
from pprint import pprint
pprint(c)
|
<commit_before>#!/usr/bin/env python
from confman import ConfigSource
options = \
{
'tags': ['desktop'],
'hostname': 'test',
}
from sys import argv
from os import path
samples_path = path.join(path.dirname(argv[0]), 'samples')
c = ConfigSource(samples_path, "/tmp/dotfiles-test", None, options)
c.analyze()
c.check()
c.sync()
print
from pprint import pprint
pprint(c)
<commit_msg>Use __file__ instead of argv[0]<commit_after>#!/usr/bin/env python
from confman import ConfigSource
options = \
{
'tags': ['desktop'],
'hostname': 'test',
}
from os import path
samples_path = path.join(path.dirname(__file__), 'samples')
c = ConfigSource(samples_path, "/tmp/dotfiles-test", None, options)
c.analyze()
c.check()
c.sync()
print
from pprint import pprint
pprint(c)
|
4ade33843cb53362ef3eeea7bf7762d3e3edfa9f
|
sandbox/sandbox/urls.py
|
sandbox/sandbox/urls.py
|
from django.contrib import admin
from django.conf import settings
from django.conf.urls import patterns, include, url
from oscar.app import shop
from oscar_mws.dashboard.app import application as mws_app
admin.autodiscover()
urlpatterns = patterns(
'',
url(r'^admin/', include(admin.site.urls)),
url(r'^dashboard/', include(mws_app.urls)),
url(r'', include(shop.urls)),
)
if settings.DEBUG:
urlpatterns += patterns(
'',
url(
r'^media/(?P<path>.*)$',
'django.views.static.serve', {
'document_root': settings.MEDIA_ROOT,
}
),
)
|
from django.contrib import admin
from django.conf import settings
from django.conf.urls import patterns, include, url
from oscar.app import shop
from oscar_mws.dashboard.app import application as mws_app
admin.autodiscover()
urlpatterns = patterns(
'',
url(r'^admin/', include(admin.site.urls)),
# i18n URLS need to live outside of i18n_patterns scope of the shop
url(r'^i18n/', include('django.conf.urls.i18n')),
url(r'^dashboard/', include(mws_app.urls)),
url(r'', include(shop.urls)),
)
if settings.DEBUG:
urlpatterns += patterns(
'',
url(
r'^media/(?P<path>.*)$',
'django.views.static.serve', {
'document_root': settings.MEDIA_ROOT,
}
),
)
|
Add translation URLs as required by Oscar
|
Add translation URLs as required by Oscar
Fixes #5.
|
Python
|
bsd-3-clause
|
django-oscar/django-oscar-mws,django-oscar/django-oscar-mws
|
from django.contrib import admin
from django.conf import settings
from django.conf.urls import patterns, include, url
from oscar.app import shop
from oscar_mws.dashboard.app import application as mws_app
admin.autodiscover()
urlpatterns = patterns(
'',
url(r'^admin/', include(admin.site.urls)),
url(r'^dashboard/', include(mws_app.urls)),
url(r'', include(shop.urls)),
)
if settings.DEBUG:
urlpatterns += patterns(
'',
url(
r'^media/(?P<path>.*)$',
'django.views.static.serve', {
'document_root': settings.MEDIA_ROOT,
}
),
)
Add translation URLs as required by Oscar
Fixes #5.
|
from django.contrib import admin
from django.conf import settings
from django.conf.urls import patterns, include, url
from oscar.app import shop
from oscar_mws.dashboard.app import application as mws_app
admin.autodiscover()
urlpatterns = patterns(
'',
url(r'^admin/', include(admin.site.urls)),
# i18n URLS need to live outside of i18n_patterns scope of the shop
url(r'^i18n/', include('django.conf.urls.i18n')),
url(r'^dashboard/', include(mws_app.urls)),
url(r'', include(shop.urls)),
)
if settings.DEBUG:
urlpatterns += patterns(
'',
url(
r'^media/(?P<path>.*)$',
'django.views.static.serve', {
'document_root': settings.MEDIA_ROOT,
}
),
)
|
<commit_before>from django.contrib import admin
from django.conf import settings
from django.conf.urls import patterns, include, url
from oscar.app import shop
from oscar_mws.dashboard.app import application as mws_app
admin.autodiscover()
urlpatterns = patterns(
'',
url(r'^admin/', include(admin.site.urls)),
url(r'^dashboard/', include(mws_app.urls)),
url(r'', include(shop.urls)),
)
if settings.DEBUG:
urlpatterns += patterns(
'',
url(
r'^media/(?P<path>.*)$',
'django.views.static.serve', {
'document_root': settings.MEDIA_ROOT,
}
),
)
<commit_msg>Add translation URLs as required by Oscar
Fixes #5.<commit_after>
|
from django.contrib import admin
from django.conf import settings
from django.conf.urls import patterns, include, url
from oscar.app import shop
from oscar_mws.dashboard.app import application as mws_app
admin.autodiscover()
urlpatterns = patterns(
'',
url(r'^admin/', include(admin.site.urls)),
# i18n URLS need to live outside of i18n_patterns scope of the shop
url(r'^i18n/', include('django.conf.urls.i18n')),
url(r'^dashboard/', include(mws_app.urls)),
url(r'', include(shop.urls)),
)
if settings.DEBUG:
urlpatterns += patterns(
'',
url(
r'^media/(?P<path>.*)$',
'django.views.static.serve', {
'document_root': settings.MEDIA_ROOT,
}
),
)
|
from django.contrib import admin
from django.conf import settings
from django.conf.urls import patterns, include, url
from oscar.app import shop
from oscar_mws.dashboard.app import application as mws_app
admin.autodiscover()
urlpatterns = patterns(
'',
url(r'^admin/', include(admin.site.urls)),
url(r'^dashboard/', include(mws_app.urls)),
url(r'', include(shop.urls)),
)
if settings.DEBUG:
urlpatterns += patterns(
'',
url(
r'^media/(?P<path>.*)$',
'django.views.static.serve', {
'document_root': settings.MEDIA_ROOT,
}
),
)
Add translation URLs as required by Oscar
Fixes #5.from django.contrib import admin
from django.conf import settings
from django.conf.urls import patterns, include, url
from oscar.app import shop
from oscar_mws.dashboard.app import application as mws_app
admin.autodiscover()
urlpatterns = patterns(
'',
url(r'^admin/', include(admin.site.urls)),
# i18n URLS need to live outside of i18n_patterns scope of the shop
url(r'^i18n/', include('django.conf.urls.i18n')),
url(r'^dashboard/', include(mws_app.urls)),
url(r'', include(shop.urls)),
)
if settings.DEBUG:
urlpatterns += patterns(
'',
url(
r'^media/(?P<path>.*)$',
'django.views.static.serve', {
'document_root': settings.MEDIA_ROOT,
}
),
)
|
<commit_before>from django.contrib import admin
from django.conf import settings
from django.conf.urls import patterns, include, url
from oscar.app import shop
from oscar_mws.dashboard.app import application as mws_app
admin.autodiscover()
urlpatterns = patterns(
'',
url(r'^admin/', include(admin.site.urls)),
url(r'^dashboard/', include(mws_app.urls)),
url(r'', include(shop.urls)),
)
if settings.DEBUG:
urlpatterns += patterns(
'',
url(
r'^media/(?P<path>.*)$',
'django.views.static.serve', {
'document_root': settings.MEDIA_ROOT,
}
),
)
<commit_msg>Add translation URLs as required by Oscar
Fixes #5.<commit_after>from django.contrib import admin
from django.conf import settings
from django.conf.urls import patterns, include, url
from oscar.app import shop
from oscar_mws.dashboard.app import application as mws_app
admin.autodiscover()
urlpatterns = patterns(
'',
url(r'^admin/', include(admin.site.urls)),
# i18n URLS need to live outside of i18n_patterns scope of the shop
url(r'^i18n/', include('django.conf.urls.i18n')),
url(r'^dashboard/', include(mws_app.urls)),
url(r'', include(shop.urls)),
)
if settings.DEBUG:
urlpatterns += patterns(
'',
url(
r'^media/(?P<path>.*)$',
'django.views.static.serve', {
'document_root': settings.MEDIA_ROOT,
}
),
)
|
7f51f153f0fd1fd1dde06808879911897686f819
|
cities/Sample_City.py
|
cities/Sample_City.py
|
from bs4 import BeautifulSoup
import json
import datetime
import pytz
# The URL for the page where the parking lots are listed
data_url = "http://example.com"
# Name of the city, just in case it contains umlauts which this filename shouldn't
city_name = "Sample City"
# Name of this file (without '.py'), sorry for needing this, but it makes things easier
file_name = "Sample_City"
def parse_html(html):
soup = BeautifulSoup(html)
# Do everything necessary to scrape the contents of the html
# into a dictionary of the format specified by the schema.
def get_geodata_for_lot(lot_name):
geofile = open("./cities/" + file_name + ".geojson")
geodata = geofile.read()
geofile.close()
geodata = json.loads(geodata)
for feature in geodata["features"]:
if feature["properties"]["name"] == lot_name:
return {
"lon": feature["geometry"]["coordinates"][0],
"lat": feature["geometry"]["coordinates"][1]
}
return []
if __name__ == "__main__":
file = open("../tests/sample_city.html")
html_data = file.read()
file.close()
parse_html(html_data)
|
from bs4 import BeautifulSoup
import datetime
import pytz
from geodata import GeoData
# The URL for the page where the parking lots are listed
data_url = "http://example.com"
# Name of the city, just in case it contains umlauts which this filename shouldn't
city_name = "Sample City"
# Name of this file (without '.py'), sorry for needing this, but it makes things easier
file_name = "Sample_City"
# Uncomment the following line if there's geodata in the format of Sample_City.geodata in this directory
# geodata = GeoData(city_name)
def parse_html(html):
soup = BeautifulSoup(html)
# Do everything necessary to scrape the contents of the html
# into a dictionary of the format specified by the schema.
data = {
"last_updated": "",
"lots": []
}
print(data)
return data
# the following is for testing this out, just delete it all when done
if __name__ == "__main__":
with open("../tests/sample_city.html") as f:
parse_html(f.read())
|
Clean up sample city file
|
Clean up sample city file
|
Python
|
mit
|
offenesdresden/ParkAPI,Mic92/ParkAPI,offenesdresden/ParkAPI,Mic92/ParkAPI
|
from bs4 import BeautifulSoup
import json
import datetime
import pytz
# The URL for the page where the parking lots are listed
data_url = "http://example.com"
# Name of the city, just in case it contains umlauts which this filename shouldn't
city_name = "Sample City"
# Name of this file (without '.py'), sorry for needing this, but it makes things easier
file_name = "Sample_City"
def parse_html(html):
soup = BeautifulSoup(html)
# Do everything necessary to scrape the contents of the html
# into a dictionary of the format specified by the schema.
def get_geodata_for_lot(lot_name):
geofile = open("./cities/" + file_name + ".geojson")
geodata = geofile.read()
geofile.close()
geodata = json.loads(geodata)
for feature in geodata["features"]:
if feature["properties"]["name"] == lot_name:
return {
"lon": feature["geometry"]["coordinates"][0],
"lat": feature["geometry"]["coordinates"][1]
}
return []
if __name__ == "__main__":
file = open("../tests/sample_city.html")
html_data = file.read()
file.close()
parse_html(html_data)
Clean up sample city file
|
from bs4 import BeautifulSoup
import datetime
import pytz
from geodata import GeoData
# The URL for the page where the parking lots are listed
data_url = "http://example.com"
# Name of the city, just in case it contains umlauts which this filename shouldn't
city_name = "Sample City"
# Name of this file (without '.py'), sorry for needing this, but it makes things easier
file_name = "Sample_City"
# Uncomment the following line if there's geodata in the format of Sample_City.geodata in this directory
# geodata = GeoData(city_name)
def parse_html(html):
soup = BeautifulSoup(html)
# Do everything necessary to scrape the contents of the html
# into a dictionary of the format specified by the schema.
data = {
"last_updated": "",
"lots": []
}
print(data)
return data
# the following is for testing this out, just delete it all when done
if __name__ == "__main__":
with open("../tests/sample_city.html") as f:
parse_html(f.read())
|
<commit_before>from bs4 import BeautifulSoup
import json
import datetime
import pytz
# The URL for the page where the parking lots are listed
data_url = "http://example.com"
# Name of the city, just in case it contains umlauts which this filename shouldn't
city_name = "Sample City"
# Name of this file (without '.py'), sorry for needing this, but it makes things easier
file_name = "Sample_City"
def parse_html(html):
soup = BeautifulSoup(html)
# Do everything necessary to scrape the contents of the html
# into a dictionary of the format specified by the schema.
def get_geodata_for_lot(lot_name):
geofile = open("./cities/" + file_name + ".geojson")
geodata = geofile.read()
geofile.close()
geodata = json.loads(geodata)
for feature in geodata["features"]:
if feature["properties"]["name"] == lot_name:
return {
"lon": feature["geometry"]["coordinates"][0],
"lat": feature["geometry"]["coordinates"][1]
}
return []
if __name__ == "__main__":
file = open("../tests/sample_city.html")
html_data = file.read()
file.close()
parse_html(html_data)
<commit_msg>Clean up sample city file<commit_after>
|
from bs4 import BeautifulSoup
import datetime
import pytz
from geodata import GeoData
# The URL for the page where the parking lots are listed
data_url = "http://example.com"
# Name of the city, just in case it contains umlauts which this filename shouldn't
city_name = "Sample City"
# Name of this file (without '.py'), sorry for needing this, but it makes things easier
file_name = "Sample_City"
# Uncomment the following line if there's geodata in the format of Sample_City.geodata in this directory
# geodata = GeoData(city_name)
def parse_html(html):
soup = BeautifulSoup(html)
# Do everything necessary to scrape the contents of the html
# into a dictionary of the format specified by the schema.
data = {
"last_updated": "",
"lots": []
}
print(data)
return data
# the following is for testing this out, just delete it all when done
if __name__ == "__main__":
with open("../tests/sample_city.html") as f:
parse_html(f.read())
|
from bs4 import BeautifulSoup
import json
import datetime
import pytz
# The URL for the page where the parking lots are listed
data_url = "http://example.com"
# Name of the city, just in case it contains umlauts which this filename shouldn't
city_name = "Sample City"
# Name of this file (without '.py'), sorry for needing this, but it makes things easier
file_name = "Sample_City"
def parse_html(html):
soup = BeautifulSoup(html)
# Do everything necessary to scrape the contents of the html
# into a dictionary of the format specified by the schema.
def get_geodata_for_lot(lot_name):
geofile = open("./cities/" + file_name + ".geojson")
geodata = geofile.read()
geofile.close()
geodata = json.loads(geodata)
for feature in geodata["features"]:
if feature["properties"]["name"] == lot_name:
return {
"lon": feature["geometry"]["coordinates"][0],
"lat": feature["geometry"]["coordinates"][1]
}
return []
if __name__ == "__main__":
file = open("../tests/sample_city.html")
html_data = file.read()
file.close()
parse_html(html_data)
Clean up sample city filefrom bs4 import BeautifulSoup
import datetime
import pytz
from geodata import GeoData
# The URL for the page where the parking lots are listed
data_url = "http://example.com"
# Name of the city, just in case it contains umlauts which this filename shouldn't
city_name = "Sample City"
# Name of this file (without '.py'), sorry for needing this, but it makes things easier
file_name = "Sample_City"
# Uncomment the following line if there's geodata in the format of Sample_City.geodata in this directory
# geodata = GeoData(city_name)
def parse_html(html):
soup = BeautifulSoup(html)
# Do everything necessary to scrape the contents of the html
# into a dictionary of the format specified by the schema.
data = {
"last_updated": "",
"lots": []
}
print(data)
return data
# the following is for testing this out, just delete it all when done
if __name__ == "__main__":
with open("../tests/sample_city.html") as f:
parse_html(f.read())
|
<commit_before>from bs4 import BeautifulSoup
import json
import datetime
import pytz
# The URL for the page where the parking lots are listed
data_url = "http://example.com"
# Name of the city, just in case it contains umlauts which this filename shouldn't
city_name = "Sample City"
# Name of this file (without '.py'), sorry for needing this, but it makes things easier
file_name = "Sample_City"
def parse_html(html):
soup = BeautifulSoup(html)
# Do everything necessary to scrape the contents of the html
# into a dictionary of the format specified by the schema.
def get_geodata_for_lot(lot_name):
geofile = open("./cities/" + file_name + ".geojson")
geodata = geofile.read()
geofile.close()
geodata = json.loads(geodata)
for feature in geodata["features"]:
if feature["properties"]["name"] == lot_name:
return {
"lon": feature["geometry"]["coordinates"][0],
"lat": feature["geometry"]["coordinates"][1]
}
return []
if __name__ == "__main__":
file = open("../tests/sample_city.html")
html_data = file.read()
file.close()
parse_html(html_data)
<commit_msg>Clean up sample city file<commit_after>from bs4 import BeautifulSoup
import datetime
import pytz
from geodata import GeoData
# The URL for the page where the parking lots are listed
data_url = "http://example.com"
# Name of the city, just in case it contains umlauts which this filename shouldn't
city_name = "Sample City"
# Name of this file (without '.py'), sorry for needing this, but it makes things easier
file_name = "Sample_City"
# Uncomment the following line if there's geodata in the format of Sample_City.geodata in this directory
# geodata = GeoData(city_name)
def parse_html(html):
soup = BeautifulSoup(html)
# Do everything necessary to scrape the contents of the html
# into a dictionary of the format specified by the schema.
data = {
"last_updated": "",
"lots": []
}
print(data)
return data
# the following is for testing this out, just delete it all when done
if __name__ == "__main__":
with open("../tests/sample_city.html") as f:
parse_html(f.read())
|
07e9b55784f856c0175b4fbfeeceeb387abf7ad5
|
red_green_bar2.py
|
red_green_bar2.py
|
#!/usr/bin/env python2
'''
Given:
1. status code: (0 - OK, other value - BAD)
2. terminal window width
shows red/green bar to visualize return code of previous command
'''
import sys
if len(sys.argv) >= 2:
value = int(sys.argv[1])
cols_limit = int(sys.argv[2])
esc = chr(27)
if value:
col_char = '1'
else:
col_char = '2'
print (''.join((
esc,
'[4',
col_char,
'm',
' ' * (cols_limit - 2),
esc,
'[0m',
)))
else:
print ('''
Usage: %(prog_name)s status_code number_of_columns
1. status code: 0 - OK (green color), other values - BAD (red color)
2. number of columns: the width of text console
''' % dict(
prog_name=sys.argv[0],
))
|
#!/usr/bin/env python2
'''
Given:
1. status code: (0 - OK, other value - BAD)
2. terminal window width
shows red/green bar to visualize return code of previous command
'''
import sys
if len(sys.argv) >= 2:
value = int(sys.argv[1])
if value:
col_char = '1'
else:
col_char = '2'
cols_limit = int(sys.argv[2])
esc = chr(27)
print (''.join((
esc,
'[4',
col_char,
'm',
' ' * (cols_limit - 2),
esc,
'[0m',
)))
else:
print ('''
Usage: %(prog_name)s status_code number_of_columns
1. status code: 0 - OK (green color), other values - BAD (red color)
2. number of columns: the width of text console
''' % dict(
prog_name=sys.argv[0],
))
|
Prepare for y (yellow) first argument
|
Prepare for y (yellow) first argument
|
Python
|
mit
|
kwadrat/rgb_tdd
|
#!/usr/bin/env python2
'''
Given:
1. status code: (0 - OK, other value - BAD)
2. terminal window width
shows red/green bar to visualize return code of previous command
'''
import sys
if len(sys.argv) >= 2:
value = int(sys.argv[1])
cols_limit = int(sys.argv[2])
esc = chr(27)
if value:
col_char = '1'
else:
col_char = '2'
print (''.join((
esc,
'[4',
col_char,
'm',
' ' * (cols_limit - 2),
esc,
'[0m',
)))
else:
print ('''
Usage: %(prog_name)s status_code number_of_columns
1. status code: 0 - OK (green color), other values - BAD (red color)
2. number of columns: the width of text console
''' % dict(
prog_name=sys.argv[0],
))
Prepare for y (yellow) first argument
|
#!/usr/bin/env python2
'''
Given:
1. status code: (0 - OK, other value - BAD)
2. terminal window width
shows red/green bar to visualize return code of previous command
'''
import sys
if len(sys.argv) >= 2:
value = int(sys.argv[1])
if value:
col_char = '1'
else:
col_char = '2'
cols_limit = int(sys.argv[2])
esc = chr(27)
print (''.join((
esc,
'[4',
col_char,
'm',
' ' * (cols_limit - 2),
esc,
'[0m',
)))
else:
print ('''
Usage: %(prog_name)s status_code number_of_columns
1. status code: 0 - OK (green color), other values - BAD (red color)
2. number of columns: the width of text console
''' % dict(
prog_name=sys.argv[0],
))
|
<commit_before>#!/usr/bin/env python2
'''
Given:
1. status code: (0 - OK, other value - BAD)
2. terminal window width
shows red/green bar to visualize return code of previous command
'''
import sys
if len(sys.argv) >= 2:
value = int(sys.argv[1])
cols_limit = int(sys.argv[2])
esc = chr(27)
if value:
col_char = '1'
else:
col_char = '2'
print (''.join((
esc,
'[4',
col_char,
'm',
' ' * (cols_limit - 2),
esc,
'[0m',
)))
else:
print ('''
Usage: %(prog_name)s status_code number_of_columns
1. status code: 0 - OK (green color), other values - BAD (red color)
2. number of columns: the width of text console
''' % dict(
prog_name=sys.argv[0],
))
<commit_msg>Prepare for y (yellow) first argument<commit_after>
|
#!/usr/bin/env python2
'''
Given:
1. status code: (0 - OK, other value - BAD)
2. terminal window width
shows red/green bar to visualize return code of previous command
'''
import sys
if len(sys.argv) >= 2:
value = int(sys.argv[1])
if value:
col_char = '1'
else:
col_char = '2'
cols_limit = int(sys.argv[2])
esc = chr(27)
print (''.join((
esc,
'[4',
col_char,
'm',
' ' * (cols_limit - 2),
esc,
'[0m',
)))
else:
print ('''
Usage: %(prog_name)s status_code number_of_columns
1. status code: 0 - OK (green color), other values - BAD (red color)
2. number of columns: the width of text console
''' % dict(
prog_name=sys.argv[0],
))
|
#!/usr/bin/env python2
'''
Given:
1. status code: (0 - OK, other value - BAD)
2. terminal window width
shows red/green bar to visualize return code of previous command
'''
import sys
if len(sys.argv) >= 2:
value = int(sys.argv[1])
cols_limit = int(sys.argv[2])
esc = chr(27)
if value:
col_char = '1'
else:
col_char = '2'
print (''.join((
esc,
'[4',
col_char,
'm',
' ' * (cols_limit - 2),
esc,
'[0m',
)))
else:
print ('''
Usage: %(prog_name)s status_code number_of_columns
1. status code: 0 - OK (green color), other values - BAD (red color)
2. number of columns: the width of text console
''' % dict(
prog_name=sys.argv[0],
))
Prepare for y (yellow) first argument#!/usr/bin/env python2
'''
Given:
1. status code: (0 - OK, other value - BAD)
2. terminal window width
shows red/green bar to visualize return code of previous command
'''
import sys
if len(sys.argv) >= 2:
value = int(sys.argv[1])
if value:
col_char = '1'
else:
col_char = '2'
cols_limit = int(sys.argv[2])
esc = chr(27)
print (''.join((
esc,
'[4',
col_char,
'm',
' ' * (cols_limit - 2),
esc,
'[0m',
)))
else:
print ('''
Usage: %(prog_name)s status_code number_of_columns
1. status code: 0 - OK (green color), other values - BAD (red color)
2. number of columns: the width of text console
''' % dict(
prog_name=sys.argv[0],
))
|
<commit_before>#!/usr/bin/env python2
'''
Given:
1. status code: (0 - OK, other value - BAD)
2. terminal window width
shows red/green bar to visualize return code of previous command
'''
import sys
if len(sys.argv) >= 2:
value = int(sys.argv[1])
cols_limit = int(sys.argv[2])
esc = chr(27)
if value:
col_char = '1'
else:
col_char = '2'
print (''.join((
esc,
'[4',
col_char,
'm',
' ' * (cols_limit - 2),
esc,
'[0m',
)))
else:
print ('''
Usage: %(prog_name)s status_code number_of_columns
1. status code: 0 - OK (green color), other values - BAD (red color)
2. number of columns: the width of text console
''' % dict(
prog_name=sys.argv[0],
))
<commit_msg>Prepare for y (yellow) first argument<commit_after>#!/usr/bin/env python2
'''
Given:
1. status code: (0 - OK, other value - BAD)
2. terminal window width
shows red/green bar to visualize return code of previous command
'''
import sys
if len(sys.argv) >= 2:
value = int(sys.argv[1])
if value:
col_char = '1'
else:
col_char = '2'
cols_limit = int(sys.argv[2])
esc = chr(27)
print (''.join((
esc,
'[4',
col_char,
'm',
' ' * (cols_limit - 2),
esc,
'[0m',
)))
else:
print ('''
Usage: %(prog_name)s status_code number_of_columns
1. status code: 0 - OK (green color), other values - BAD (red color)
2. number of columns: the width of text console
''' % dict(
prog_name=sys.argv[0],
))
|
084eb32734731ee23e33e7360ec9f92e1e533f01
|
__init__.py
|
__init__.py
|
from spinsys import constructors
from spinsys import exceptions
from spinsys import half
from spinsys import hamiltonians
from spinsys import quantities
from spinsys import state_generators
from spinsys import tests
from spinsys import utils
__all__ = [
"constructors",
"exceptions",
"half",
"hamiltonians",
"quantities",
"state_generators",
"tests",
"utils"
]
|
from spinsys import constructors
from spinsys import exceptions
from spinsys import half
from spinsys import hamiltonians
from spinsys import quantities
from spinsys import state_generators
from spinsys import tests
from spinsys import utils
import shutil
import numpy
__all__ = [
"constructors",
"exceptions",
"half",
"hamiltonians",
"quantities",
"state_generators",
"tests",
"utils"
]
# set default print options for better display of data on screen
term_width = tuple(shutil.get_terminal_size())[0]
numpy.set_printoptions(precision=5, suppress=True, linewidth=term_width)
|
Set better defaults for numpy's print function
|
Set better defaults for numpy's print function
|
Python
|
bsd-3-clause
|
macthecadillac/spinsys
|
from spinsys import constructors
from spinsys import exceptions
from spinsys import half
from spinsys import hamiltonians
from spinsys import quantities
from spinsys import state_generators
from spinsys import tests
from spinsys import utils
__all__ = [
"constructors",
"exceptions",
"half",
"hamiltonians",
"quantities",
"state_generators",
"tests",
"utils"
]
Set better defaults for numpy's print function
|
from spinsys import constructors
from spinsys import exceptions
from spinsys import half
from spinsys import hamiltonians
from spinsys import quantities
from spinsys import state_generators
from spinsys import tests
from spinsys import utils
import shutil
import numpy
__all__ = [
"constructors",
"exceptions",
"half",
"hamiltonians",
"quantities",
"state_generators",
"tests",
"utils"
]
# set default print options for better display of data on screen
term_width = tuple(shutil.get_terminal_size())[0]
numpy.set_printoptions(precision=5, suppress=True, linewidth=term_width)
|
<commit_before>from spinsys import constructors
from spinsys import exceptions
from spinsys import half
from spinsys import hamiltonians
from spinsys import quantities
from spinsys import state_generators
from spinsys import tests
from spinsys import utils
__all__ = [
"constructors",
"exceptions",
"half",
"hamiltonians",
"quantities",
"state_generators",
"tests",
"utils"
]
<commit_msg>Set better defaults for numpy's print function<commit_after>
|
from spinsys import constructors
from spinsys import exceptions
from spinsys import half
from spinsys import hamiltonians
from spinsys import quantities
from spinsys import state_generators
from spinsys import tests
from spinsys import utils
import shutil
import numpy
__all__ = [
"constructors",
"exceptions",
"half",
"hamiltonians",
"quantities",
"state_generators",
"tests",
"utils"
]
# set default print options for better display of data on screen
term_width = tuple(shutil.get_terminal_size())[0]
numpy.set_printoptions(precision=5, suppress=True, linewidth=term_width)
|
from spinsys import constructors
from spinsys import exceptions
from spinsys import half
from spinsys import hamiltonians
from spinsys import quantities
from spinsys import state_generators
from spinsys import tests
from spinsys import utils
__all__ = [
"constructors",
"exceptions",
"half",
"hamiltonians",
"quantities",
"state_generators",
"tests",
"utils"
]
Set better defaults for numpy's print functionfrom spinsys import constructors
from spinsys import exceptions
from spinsys import half
from spinsys import hamiltonians
from spinsys import quantities
from spinsys import state_generators
from spinsys import tests
from spinsys import utils
import shutil
import numpy
__all__ = [
"constructors",
"exceptions",
"half",
"hamiltonians",
"quantities",
"state_generators",
"tests",
"utils"
]
# set default print options for better display of data on screen
term_width = tuple(shutil.get_terminal_size())[0]
numpy.set_printoptions(precision=5, suppress=True, linewidth=term_width)
|
<commit_before>from spinsys import constructors
from spinsys import exceptions
from spinsys import half
from spinsys import hamiltonians
from spinsys import quantities
from spinsys import state_generators
from spinsys import tests
from spinsys import utils
__all__ = [
"constructors",
"exceptions",
"half",
"hamiltonians",
"quantities",
"state_generators",
"tests",
"utils"
]
<commit_msg>Set better defaults for numpy's print function<commit_after>from spinsys import constructors
from spinsys import exceptions
from spinsys import half
from spinsys import hamiltonians
from spinsys import quantities
from spinsys import state_generators
from spinsys import tests
from spinsys import utils
import shutil
import numpy
__all__ = [
"constructors",
"exceptions",
"half",
"hamiltonians",
"quantities",
"state_generators",
"tests",
"utils"
]
# set default print options for better display of data on screen
term_width = tuple(shutil.get_terminal_size())[0]
numpy.set_printoptions(precision=5, suppress=True, linewidth=term_width)
|
98506d6ba1d1e7c8d3cf62d97f7c3f2f23bc4841
|
chainer/training/extensions/value_observation.py
|
chainer/training/extensions/value_observation.py
|
from chainer.training import extension
def observe_value(observation_key, target_func):
"""Returns a trainer extension to continuously record a value.
Args:
observation_key (str): Key of observation to record.
target_func (function): Function that returns the value to record.
It must take one argument: :class:~chainer.training.Trainer object.
Returns:
The extension function.
"""
@extension.make_extension(
trigger=(1, 'epoch'), priority=extension.PRIORITY_WRITER)
def _observe_value(trainer):
trainer.observation[observation_key] = target_func(trainer)
return _observe_value
def observe_lr(optimizer_name='main', observation_key='lr'):
"""Returns a trainer extension to record the learning rate.
Args:
optimizer_name (str): Name of optimizer whose learning rate is
recorded.
observation_key (str): Key of observation to record.
Returns:
The extension function.
"""
return observe_value(
observation_key,
lambda trainer: trainer.updater.get_optimizer(optimizer_name).lr)
|
from chainer.training import extension
def observe_value(observation_key, target_func):
"""Returns a trainer extension to continuously record a value.
Args:
observation_key (str): Key of observation to record.
target_func (function): Function that returns the value to record.
It must take one argument: :class:~chainer.training.Trainer object.
Returns:
The extension function.
This extension is triggered every 1 epoch by default.
To change this, specify ``trigger`` argument to
:meth:`Trainer.extend() <chainer.training.Trainer.extend>` method.
"""
@extension.make_extension(
trigger=(1, 'epoch'), priority=extension.PRIORITY_WRITER)
def _observe_value(trainer):
trainer.observation[observation_key] = target_func(trainer)
return _observe_value
def observe_lr(optimizer_name='main', observation_key='lr'):
"""Returns a trainer extension to record the learning rate.
Args:
optimizer_name (str): Name of optimizer whose learning rate is
recorded.
observation_key (str): Key of observation to record.
Returns:
The extension function.
This extension is triggered every 1 epoch by default.
To change this, specify ``trigger`` argument to
:meth:`Trainer.extend() <chainer.training.Trainer.extend>` method.
"""
return observe_value(
observation_key,
lambda trainer: trainer.updater.get_optimizer(optimizer_name).lr)
|
Document observe_value and observe_lr trigger interval
|
Document observe_value and observe_lr trigger interval
|
Python
|
mit
|
hvy/chainer,niboshi/chainer,okuta/chainer,hvy/chainer,keisuke-umezawa/chainer,chainer/chainer,chainer/chainer,niboshi/chainer,niboshi/chainer,wkentaro/chainer,keisuke-umezawa/chainer,pfnet/chainer,chainer/chainer,okuta/chainer,hvy/chainer,niboshi/chainer,wkentaro/chainer,wkentaro/chainer,okuta/chainer,wkentaro/chainer,chainer/chainer,keisuke-umezawa/chainer,keisuke-umezawa/chainer,hvy/chainer,okuta/chainer
|
from chainer.training import extension
def observe_value(observation_key, target_func):
"""Returns a trainer extension to continuously record a value.
Args:
observation_key (str): Key of observation to record.
target_func (function): Function that returns the value to record.
It must take one argument: :class:~chainer.training.Trainer object.
Returns:
The extension function.
"""
@extension.make_extension(
trigger=(1, 'epoch'), priority=extension.PRIORITY_WRITER)
def _observe_value(trainer):
trainer.observation[observation_key] = target_func(trainer)
return _observe_value
def observe_lr(optimizer_name='main', observation_key='lr'):
"""Returns a trainer extension to record the learning rate.
Args:
optimizer_name (str): Name of optimizer whose learning rate is
recorded.
observation_key (str): Key of observation to record.
Returns:
The extension function.
"""
return observe_value(
observation_key,
lambda trainer: trainer.updater.get_optimizer(optimizer_name).lr)
Document observe_value and observe_lr trigger interval
|
from chainer.training import extension
def observe_value(observation_key, target_func):
"""Returns a trainer extension to continuously record a value.
Args:
observation_key (str): Key of observation to record.
target_func (function): Function that returns the value to record.
It must take one argument: :class:~chainer.training.Trainer object.
Returns:
The extension function.
This extension is triggered every 1 epoch by default.
To change this, specify ``trigger`` argument to
:meth:`Trainer.extend() <chainer.training.Trainer.extend>` method.
"""
@extension.make_extension(
trigger=(1, 'epoch'), priority=extension.PRIORITY_WRITER)
def _observe_value(trainer):
trainer.observation[observation_key] = target_func(trainer)
return _observe_value
def observe_lr(optimizer_name='main', observation_key='lr'):
"""Returns a trainer extension to record the learning rate.
Args:
optimizer_name (str): Name of optimizer whose learning rate is
recorded.
observation_key (str): Key of observation to record.
Returns:
The extension function.
This extension is triggered every 1 epoch by default.
To change this, specify ``trigger`` argument to
:meth:`Trainer.extend() <chainer.training.Trainer.extend>` method.
"""
return observe_value(
observation_key,
lambda trainer: trainer.updater.get_optimizer(optimizer_name).lr)
|
<commit_before>from chainer.training import extension
def observe_value(observation_key, target_func):
"""Returns a trainer extension to continuously record a value.
Args:
observation_key (str): Key of observation to record.
target_func (function): Function that returns the value to record.
It must take one argument: :class:~chainer.training.Trainer object.
Returns:
The extension function.
"""
@extension.make_extension(
trigger=(1, 'epoch'), priority=extension.PRIORITY_WRITER)
def _observe_value(trainer):
trainer.observation[observation_key] = target_func(trainer)
return _observe_value
def observe_lr(optimizer_name='main', observation_key='lr'):
"""Returns a trainer extension to record the learning rate.
Args:
optimizer_name (str): Name of optimizer whose learning rate is
recorded.
observation_key (str): Key of observation to record.
Returns:
The extension function.
"""
return observe_value(
observation_key,
lambda trainer: trainer.updater.get_optimizer(optimizer_name).lr)
<commit_msg>Document observe_value and observe_lr trigger interval<commit_after>
|
from chainer.training import extension
def observe_value(observation_key, target_func):
"""Returns a trainer extension to continuously record a value.
Args:
observation_key (str): Key of observation to record.
target_func (function): Function that returns the value to record.
It must take one argument: :class:~chainer.training.Trainer object.
Returns:
The extension function.
This extension is triggered every 1 epoch by default.
To change this, specify ``trigger`` argument to
:meth:`Trainer.extend() <chainer.training.Trainer.extend>` method.
"""
@extension.make_extension(
trigger=(1, 'epoch'), priority=extension.PRIORITY_WRITER)
def _observe_value(trainer):
trainer.observation[observation_key] = target_func(trainer)
return _observe_value
def observe_lr(optimizer_name='main', observation_key='lr'):
"""Returns a trainer extension to record the learning rate.
Args:
optimizer_name (str): Name of optimizer whose learning rate is
recorded.
observation_key (str): Key of observation to record.
Returns:
The extension function.
This extension is triggered every 1 epoch by default.
To change this, specify ``trigger`` argument to
:meth:`Trainer.extend() <chainer.training.Trainer.extend>` method.
"""
return observe_value(
observation_key,
lambda trainer: trainer.updater.get_optimizer(optimizer_name).lr)
|
from chainer.training import extension
def observe_value(observation_key, target_func):
"""Returns a trainer extension to continuously record a value.
Args:
observation_key (str): Key of observation to record.
target_func (function): Function that returns the value to record.
It must take one argument: :class:~chainer.training.Trainer object.
Returns:
The extension function.
"""
@extension.make_extension(
trigger=(1, 'epoch'), priority=extension.PRIORITY_WRITER)
def _observe_value(trainer):
trainer.observation[observation_key] = target_func(trainer)
return _observe_value
def observe_lr(optimizer_name='main', observation_key='lr'):
"""Returns a trainer extension to record the learning rate.
Args:
optimizer_name (str): Name of optimizer whose learning rate is
recorded.
observation_key (str): Key of observation to record.
Returns:
The extension function.
"""
return observe_value(
observation_key,
lambda trainer: trainer.updater.get_optimizer(optimizer_name).lr)
Document observe_value and observe_lr trigger intervalfrom chainer.training import extension
def observe_value(observation_key, target_func):
"""Returns a trainer extension to continuously record a value.
Args:
observation_key (str): Key of observation to record.
target_func (function): Function that returns the value to record.
It must take one argument: :class:~chainer.training.Trainer object.
Returns:
The extension function.
This extension is triggered every 1 epoch by default.
To change this, specify ``trigger`` argument to
:meth:`Trainer.extend() <chainer.training.Trainer.extend>` method.
"""
@extension.make_extension(
trigger=(1, 'epoch'), priority=extension.PRIORITY_WRITER)
def _observe_value(trainer):
trainer.observation[observation_key] = target_func(trainer)
return _observe_value
def observe_lr(optimizer_name='main', observation_key='lr'):
"""Returns a trainer extension to record the learning rate.
Args:
optimizer_name (str): Name of optimizer whose learning rate is
recorded.
observation_key (str): Key of observation to record.
Returns:
The extension function.
This extension is triggered every 1 epoch by default.
To change this, specify ``trigger`` argument to
:meth:`Trainer.extend() <chainer.training.Trainer.extend>` method.
"""
return observe_value(
observation_key,
lambda trainer: trainer.updater.get_optimizer(optimizer_name).lr)
|
<commit_before>from chainer.training import extension
def observe_value(observation_key, target_func):
"""Returns a trainer extension to continuously record a value.
Args:
observation_key (str): Key of observation to record.
target_func (function): Function that returns the value to record.
It must take one argument: :class:~chainer.training.Trainer object.
Returns:
The extension function.
"""
@extension.make_extension(
trigger=(1, 'epoch'), priority=extension.PRIORITY_WRITER)
def _observe_value(trainer):
trainer.observation[observation_key] = target_func(trainer)
return _observe_value
def observe_lr(optimizer_name='main', observation_key='lr'):
"""Returns a trainer extension to record the learning rate.
Args:
optimizer_name (str): Name of optimizer whose learning rate is
recorded.
observation_key (str): Key of observation to record.
Returns:
The extension function.
"""
return observe_value(
observation_key,
lambda trainer: trainer.updater.get_optimizer(optimizer_name).lr)
<commit_msg>Document observe_value and observe_lr trigger interval<commit_after>from chainer.training import extension
def observe_value(observation_key, target_func):
"""Returns a trainer extension to continuously record a value.
Args:
observation_key (str): Key of observation to record.
target_func (function): Function that returns the value to record.
It must take one argument: :class:~chainer.training.Trainer object.
Returns:
The extension function.
This extension is triggered every 1 epoch by default.
To change this, specify ``trigger`` argument to
:meth:`Trainer.extend() <chainer.training.Trainer.extend>` method.
"""
@extension.make_extension(
trigger=(1, 'epoch'), priority=extension.PRIORITY_WRITER)
def _observe_value(trainer):
trainer.observation[observation_key] = target_func(trainer)
return _observe_value
def observe_lr(optimizer_name='main', observation_key='lr'):
"""Returns a trainer extension to record the learning rate.
Args:
optimizer_name (str): Name of optimizer whose learning rate is
recorded.
observation_key (str): Key of observation to record.
Returns:
The extension function.
This extension is triggered every 1 epoch by default.
To change this, specify ``trigger`` argument to
:meth:`Trainer.extend() <chainer.training.Trainer.extend>` method.
"""
return observe_value(
observation_key,
lambda trainer: trainer.updater.get_optimizer(optimizer_name).lr)
|
3a5a6db3b869841cf5c55eed2f5ec877a443a571
|
chrome/test/functional/chromeos_html_terminal.py
|
chrome/test/functional/chromeos_html_terminal.py
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import pyauto_functional # must be imported before pyauto
import pyauto
class ChromeosHTMLTerminalTest(pyauto.PyUITest):
"""Basic tests for ChromeOS HTML Terminal.
Requires ChromeOS to be logged in.
"""
def _GetExtensionInfoById(self, extensions, id):
for x in extensions:
if x['id'] == id:
return x
return None
def testInstallHTMLTerminal(self):
"""Basic installation test for HTML Terminal on ChromeOS."""
crx_file_path = os.path.abspath(
os.path.join(self.DataDir(), 'pyauto_private', 'apps',
'SecureShell-dev-0.7.9.3.crx'))
ext_id = self.InstallExtension(crx_file_path)
self.assertTrue(ext_id, 'Failed to install extension.')
extension = self._GetExtensionInfoById(self.GetExtensionsInfo(), ext_id)
self.assertTrue(extension['is_enabled'],
msg='Extension was not enabled on installation.')
self.assertFalse(extension['allowed_in_incognito'],
msg='Extension was allowed in incognito on installation.')
if __name__ == '__main__':
pyauto_functional.Main()
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import pyauto_functional # must be imported before pyauto
import pyauto
class ChromeosHTMLTerminalTest(pyauto.PyUITest):
"""Basic tests for ChromeOS HTML Terminal.
Requires ChromeOS to be logged in.
"""
def _GetExtensionInfoById(self, extensions, id):
for x in extensions:
if x['id'] == id:
return x
return None
def testInstallAndUninstallSecureShellExt(self):
"""Basic installation test for HTML Terminal on ChromeOS."""
crx_file_path = os.path.abspath(
os.path.join(self.DataDir(), 'pyauto_private', 'apps',
'SecureShell-dev-0.7.9.3.crx'))
ext_id = self.InstallExtension(crx_file_path)
self.assertTrue(ext_id, 'Failed to install extension.')
extension = self._GetExtensionInfoById(self.GetExtensionsInfo(), ext_id)
self.assertTrue(extension['is_enabled'],
msg='Extension was not enabled on installation.')
self.assertFalse(extension['allowed_in_incognito'],
msg='Extension was allowed in incognito on installation.')
# Uninstall HTML Terminal extension
self.assertTrue(self.UninstallExtensionById(ext_id),
msg='Failed to uninstall extension.')
if __name__ == '__main__':
pyauto_functional.Main()
|
Add uninstall HTML Terminal extension
|
Add uninstall HTML Terminal extension
BUG=
TEST=This is a test to uninstall HTML terminal extension
Review URL: https://chromiumcodereview.appspot.com/10332227
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@137790 0039d316-1c4b-4281-b951-d872f2087c98
|
Python
|
bsd-3-clause
|
hgl888/chromium-crosswalk,markYoungH/chromium.src,hujiajie/pa-chromium,hujiajie/pa-chromium,TheTypoMaster/chromium-crosswalk,zcbenz/cefode-chromium,Chilledheart/chromium,timopulkkinen/BubbleFish,dushu1203/chromium.src,junmin-zhu/chromium-rivertrail,Pluto-tv/chromium-crosswalk,zcbenz/cefode-chromium,crosswalk-project/chromium-crosswalk-efl,junmin-zhu/chromium-rivertrail,Chilledheart/chromium,mohamed--abdel-maksoud/chromium.src,pozdnyakov/chromium-crosswalk,bright-sparks/chromium-spacewalk,ChromiumWebApps/chromium,junmin-zhu/chromium-rivertrail,M4sse/chromium.src,patrickm/chromium.src,TheTypoMaster/chromium-crosswalk,ondra-novak/chromium.src,patrickm/chromium.src,dushu1203/chromium.src,timopulkkinen/BubbleFish,Pluto-tv/chromium-crosswalk,jaruba/chromium.src,fujunwei/chromium-crosswalk,littlstar/chromium.src,littlstar/chromium.src,Pluto-tv/chromium-crosswalk,junmin-zhu/chromium-rivertrail,M4sse/chromium.src,mogoweb/chromium-crosswalk,axinging/chromium-crosswalk,Just-D/chromium-1,ltilve/chromium,hujiajie/pa-chromium,chuan9/chromium-crosswalk,anirudhSK/chromium,markYoungH/chromium.src,markYoungH/chromium.src,markYoungH/chromium.src,dednal/chromium.src,Fireblend/chromium-crosswalk,patrickm/chromium.src,pozdnyakov/chromium-crosswalk,chuan9/chromium-crosswalk,Just-D/chromium-1,jaruba/chromium.src,anirudhSK/chromium,keishi/chromium,ChromiumWebApps/chromium,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk-efl,jaruba/chromium.src,jaruba/chromium.src,dednal/chromium.src,dednal/chromium.src,ChromiumWebApps/chromium,mohamed--abdel-maksoud/chromium.src,Chilledheart/chromium,axinging/chromium-crosswalk,Pluto-tv/chromium-crosswalk,anirudhSK/chromium,Chilledheart/chromium,keishi/chromium,krieger-od/nwjs_chromium.src,dushu1203/chromium.src,hgl888/chromium-crosswalk-efl,ondra-novak/chromium.src,axinging/chromium-crosswalk,hujiajie/pa-chromium,pozdnyakov/chromium-crosswalk,anirudhSK/chromium,hgl888/chromium-crosswalk-efl,mohamed--abdel-maksoud/chromium.src,mogoweb/chromium-crosswalk,littlstar/chromium.src,keishi/chromium,littlstar/chromium.src,TheTypoMaster/chromium-crosswalk,hujiajie/pa-chromium,krieger-od/nwjs_chromium.src,markYoungH/chromium.src,fujunwei/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,junmin-zhu/chromium-rivertrail,bright-sparks/chromium-spacewalk,bright-sparks/chromium-spacewalk,dednal/chromium.src,hujiajie/pa-chromium,ltilve/chromium,ChromiumWebApps/chromium,hujiajie/pa-chromium,M4sse/chromium.src,pozdnyakov/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,crosswalk-project/chromium-crosswalk-efl,zcbenz/cefode-chromium,pozdnyakov/chromium-crosswalk,Jonekee/chromium.src,fujunwei/chromium-crosswalk,fujunwei/chromium-crosswalk,dednal/chromium.src,littlstar/chromium.src,PeterWangIntel/chromium-crosswalk,mogoweb/chromium-crosswalk,mogoweb/chromium-crosswalk,littlstar/chromium.src,timopulkkinen/BubbleFish,nacl-webkit/chrome_deps,ltilve/chromium,patrickm/chromium.src,mohamed--abdel-maksoud/chromium.src,fujunwei/chromium-crosswalk,nacl-webkit/chrome_deps,dednal/chromium.src,Jonekee/chromium.src,hujiajie/pa-chromium,crosswalk-project/chromium-crosswalk-efl,timopulkkinen/BubbleFish,patrickm/chromium.src,hgl888/chromium-crosswalk-efl,markYoungH/chromium.src,anirudhSK/chromium,crosswalk-project/chromium-crosswalk-efl,fujunwei/chromium-crosswalk,hgl888/chromium-crosswalk,Jonekee/chromium.src,M4sse/chromium.src,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk-efl,ondra-novak/chromium.src,nacl-webkit/chrome_deps,Pluto-tv/chromium-crosswalk,Just-D/chromium-1,dednal/chromium.src,mohamed--abdel-maksoud/chromium.src,crosswalk-project/chromium-crosswalk-efl,pozdnyakov/chromium-crosswalk,ChromiumWebApps/chromium,dushu1203/chromium.src,krieger-od/nwjs_chromium.src,bright-sparks/chromium-spacewalk,krieger-od/nwjs_chromium.src,axinging/chromium-crosswalk,ltilve/chromium,Jonekee/chromium.src,krieger-od/nwjs_chromium.src,crosswalk-project/chromium-crosswalk-efl,Chilledheart/chromium,mohamed--abdel-maksoud/chromium.src,hgl888/chromium-crosswalk-efl,crosswalk-project/chromium-crosswalk-efl,Chilledheart/chromium,pozdnyakov/chromium-crosswalk,junmin-zhu/chromium-rivertrail,axinging/chromium-crosswalk,ondra-novak/chromium.src,zcbenz/cefode-chromium,Chilledheart/chromium,Fireblend/chromium-crosswalk,jaruba/chromium.src,ChromiumWebApps/chromium,mogoweb/chromium-crosswalk,ondra-novak/chromium.src,nacl-webkit/chrome_deps,axinging/chromium-crosswalk,timopulkkinen/BubbleFish,zcbenz/cefode-chromium,M4sse/chromium.src,Jonekee/chromium.src,zcbenz/cefode-chromium,Pluto-tv/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Fireblend/chromium-crosswalk,bright-sparks/chromium-spacewalk,dushu1203/chromium.src,mogoweb/chromium-crosswalk,dushu1203/chromium.src,junmin-zhu/chromium-rivertrail,ondra-novak/chromium.src,hgl888/chromium-crosswalk,ChromiumWebApps/chromium,jaruba/chromium.src,M4sse/chromium.src,PeterWangIntel/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,timopulkkinen/BubbleFish,TheTypoMaster/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,markYoungH/chromium.src,Jonekee/chromium.src,anirudhSK/chromium,dednal/chromium.src,patrickm/chromium.src,chuan9/chromium-crosswalk,zcbenz/cefode-chromium,littlstar/chromium.src,Fireblend/chromium-crosswalk,nacl-webkit/chrome_deps,ondra-novak/chromium.src,Chilledheart/chromium,nacl-webkit/chrome_deps,nacl-webkit/chrome_deps,dushu1203/chromium.src,Just-D/chromium-1,jaruba/chromium.src,jaruba/chromium.src,hgl888/chromium-crosswalk-efl,TheTypoMaster/chromium-crosswalk,Pluto-tv/chromium-crosswalk,markYoungH/chromium.src,keishi/chromium,Fireblend/chromium-crosswalk,axinging/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,bright-sparks/chromium-spacewalk,Just-D/chromium-1,ltilve/chromium,dushu1203/chromium.src,chuan9/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,keishi/chromium,PeterWangIntel/chromium-crosswalk,keishi/chromium,jaruba/chromium.src,patrickm/chromium.src,krieger-od/nwjs_chromium.src,anirudhSK/chromium,zcbenz/cefode-chromium,hgl888/chromium-crosswalk-efl,hgl888/chromium-crosswalk-efl,junmin-zhu/chromium-rivertrail,markYoungH/chromium.src,patrickm/chromium.src,ChromiumWebApps/chromium,Just-D/chromium-1,M4sse/chromium.src,axinging/chromium-crosswalk,timopulkkinen/BubbleFish,ondra-novak/chromium.src,pozdnyakov/chromium-crosswalk,anirudhSK/chromium,hgl888/chromium-crosswalk,Just-D/chromium-1,Just-D/chromium-1,Just-D/chromium-1,dednal/chromium.src,krieger-od/nwjs_chromium.src,dushu1203/chromium.src,bright-sparks/chromium-spacewalk,hgl888/chromium-crosswalk-efl,pozdnyakov/chromium-crosswalk,anirudhSK/chromium,jaruba/chromium.src,mohamed--abdel-maksoud/chromium.src,ChromiumWebApps/chromium,ChromiumWebApps/chromium,dushu1203/chromium.src,M4sse/chromium.src,timopulkkinen/BubbleFish,Fireblend/chromium-crosswalk,mogoweb/chromium-crosswalk,ondra-novak/chromium.src,M4sse/chromium.src,timopulkkinen/BubbleFish,ChromiumWebApps/chromium,keishi/chromium,mogoweb/chromium-crosswalk,hgl888/chromium-crosswalk,ltilve/chromium,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk,Jonekee/chromium.src,axinging/chromium-crosswalk,nacl-webkit/chrome_deps,M4sse/chromium.src,junmin-zhu/chromium-rivertrail,Jonekee/chromium.src,chuan9/chromium-crosswalk,Jonekee/chromium.src,krieger-od/nwjs_chromium.src,junmin-zhu/chromium-rivertrail,zcbenz/cefode-chromium,dednal/chromium.src,mogoweb/chromium-crosswalk,ChromiumWebApps/chromium,krieger-od/nwjs_chromium.src,axinging/chromium-crosswalk,anirudhSK/chromium,TheTypoMaster/chromium-crosswalk,ltilve/chromium,patrickm/chromium.src,bright-sparks/chromium-spacewalk,PeterWangIntel/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,ltilve/chromium,fujunwei/chromium-crosswalk,keishi/chromium,Fireblend/chromium-crosswalk,chuan9/chromium-crosswalk,jaruba/chromium.src,dushu1203/chromium.src,Jonekee/chromium.src,mogoweb/chromium-crosswalk,axinging/chromium-crosswalk,markYoungH/chromium.src,bright-sparks/chromium-spacewalk,hgl888/chromium-crosswalk,chuan9/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,krieger-od/nwjs_chromium.src,zcbenz/cefode-chromium,hujiajie/pa-chromium,ltilve/chromium,Fireblend/chromium-crosswalk,timopulkkinen/BubbleFish,keishi/chromium,littlstar/chromium.src,pozdnyakov/chromium-crosswalk,nacl-webkit/chrome_deps,Fireblend/chromium-crosswalk,krieger-od/nwjs_chromium.src,chuan9/chromium-crosswalk,junmin-zhu/chromium-rivertrail,pozdnyakov/chromium-crosswalk,hgl888/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,dednal/chromium.src,markYoungH/chromium.src,M4sse/chromium.src,crosswalk-project/chromium-crosswalk-efl,anirudhSK/chromium,keishi/chromium,anirudhSK/chromium,fujunwei/chromium-crosswalk,hujiajie/pa-chromium,zcbenz/cefode-chromium,timopulkkinen/BubbleFish,hujiajie/pa-chromium,nacl-webkit/chrome_deps,fujunwei/chromium-crosswalk,Chilledheart/chromium,nacl-webkit/chrome_deps,keishi/chromium,Jonekee/chromium.src,PeterWangIntel/chromium-crosswalk,Pluto-tv/chromium-crosswalk,PeterWangIntel/chromium-crosswalk
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import pyauto_functional # must be imported before pyauto
import pyauto
class ChromeosHTMLTerminalTest(pyauto.PyUITest):
"""Basic tests for ChromeOS HTML Terminal.
Requires ChromeOS to be logged in.
"""
def _GetExtensionInfoById(self, extensions, id):
for x in extensions:
if x['id'] == id:
return x
return None
def testInstallHTMLTerminal(self):
"""Basic installation test for HTML Terminal on ChromeOS."""
crx_file_path = os.path.abspath(
os.path.join(self.DataDir(), 'pyauto_private', 'apps',
'SecureShell-dev-0.7.9.3.crx'))
ext_id = self.InstallExtension(crx_file_path)
self.assertTrue(ext_id, 'Failed to install extension.')
extension = self._GetExtensionInfoById(self.GetExtensionsInfo(), ext_id)
self.assertTrue(extension['is_enabled'],
msg='Extension was not enabled on installation.')
self.assertFalse(extension['allowed_in_incognito'],
msg='Extension was allowed in incognito on installation.')
if __name__ == '__main__':
pyauto_functional.Main()
Add uninstall HTML Terminal extension
BUG=
TEST=This is a test to uninstall HTML terminal extension
Review URL: https://chromiumcodereview.appspot.com/10332227
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@137790 0039d316-1c4b-4281-b951-d872f2087c98
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import pyauto_functional # must be imported before pyauto
import pyauto
class ChromeosHTMLTerminalTest(pyauto.PyUITest):
"""Basic tests for ChromeOS HTML Terminal.
Requires ChromeOS to be logged in.
"""
def _GetExtensionInfoById(self, extensions, id):
for x in extensions:
if x['id'] == id:
return x
return None
def testInstallAndUninstallSecureShellExt(self):
"""Basic installation test for HTML Terminal on ChromeOS."""
crx_file_path = os.path.abspath(
os.path.join(self.DataDir(), 'pyauto_private', 'apps',
'SecureShell-dev-0.7.9.3.crx'))
ext_id = self.InstallExtension(crx_file_path)
self.assertTrue(ext_id, 'Failed to install extension.')
extension = self._GetExtensionInfoById(self.GetExtensionsInfo(), ext_id)
self.assertTrue(extension['is_enabled'],
msg='Extension was not enabled on installation.')
self.assertFalse(extension['allowed_in_incognito'],
msg='Extension was allowed in incognito on installation.')
# Uninstall HTML Terminal extension
self.assertTrue(self.UninstallExtensionById(ext_id),
msg='Failed to uninstall extension.')
if __name__ == '__main__':
pyauto_functional.Main()
|
<commit_before>#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import pyauto_functional # must be imported before pyauto
import pyauto
class ChromeosHTMLTerminalTest(pyauto.PyUITest):
"""Basic tests for ChromeOS HTML Terminal.
Requires ChromeOS to be logged in.
"""
def _GetExtensionInfoById(self, extensions, id):
for x in extensions:
if x['id'] == id:
return x
return None
def testInstallHTMLTerminal(self):
"""Basic installation test for HTML Terminal on ChromeOS."""
crx_file_path = os.path.abspath(
os.path.join(self.DataDir(), 'pyauto_private', 'apps',
'SecureShell-dev-0.7.9.3.crx'))
ext_id = self.InstallExtension(crx_file_path)
self.assertTrue(ext_id, 'Failed to install extension.')
extension = self._GetExtensionInfoById(self.GetExtensionsInfo(), ext_id)
self.assertTrue(extension['is_enabled'],
msg='Extension was not enabled on installation.')
self.assertFalse(extension['allowed_in_incognito'],
msg='Extension was allowed in incognito on installation.')
if __name__ == '__main__':
pyauto_functional.Main()
<commit_msg>Add uninstall HTML Terminal extension
BUG=
TEST=This is a test to uninstall HTML terminal extension
Review URL: https://chromiumcodereview.appspot.com/10332227
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@137790 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import pyauto_functional # must be imported before pyauto
import pyauto
class ChromeosHTMLTerminalTest(pyauto.PyUITest):
"""Basic tests for ChromeOS HTML Terminal.
Requires ChromeOS to be logged in.
"""
def _GetExtensionInfoById(self, extensions, id):
for x in extensions:
if x['id'] == id:
return x
return None
def testInstallAndUninstallSecureShellExt(self):
"""Basic installation test for HTML Terminal on ChromeOS."""
crx_file_path = os.path.abspath(
os.path.join(self.DataDir(), 'pyauto_private', 'apps',
'SecureShell-dev-0.7.9.3.crx'))
ext_id = self.InstallExtension(crx_file_path)
self.assertTrue(ext_id, 'Failed to install extension.')
extension = self._GetExtensionInfoById(self.GetExtensionsInfo(), ext_id)
self.assertTrue(extension['is_enabled'],
msg='Extension was not enabled on installation.')
self.assertFalse(extension['allowed_in_incognito'],
msg='Extension was allowed in incognito on installation.')
# Uninstall HTML Terminal extension
self.assertTrue(self.UninstallExtensionById(ext_id),
msg='Failed to uninstall extension.')
if __name__ == '__main__':
pyauto_functional.Main()
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import pyauto_functional # must be imported before pyauto
import pyauto
class ChromeosHTMLTerminalTest(pyauto.PyUITest):
"""Basic tests for ChromeOS HTML Terminal.
Requires ChromeOS to be logged in.
"""
def _GetExtensionInfoById(self, extensions, id):
for x in extensions:
if x['id'] == id:
return x
return None
def testInstallHTMLTerminal(self):
"""Basic installation test for HTML Terminal on ChromeOS."""
crx_file_path = os.path.abspath(
os.path.join(self.DataDir(), 'pyauto_private', 'apps',
'SecureShell-dev-0.7.9.3.crx'))
ext_id = self.InstallExtension(crx_file_path)
self.assertTrue(ext_id, 'Failed to install extension.')
extension = self._GetExtensionInfoById(self.GetExtensionsInfo(), ext_id)
self.assertTrue(extension['is_enabled'],
msg='Extension was not enabled on installation.')
self.assertFalse(extension['allowed_in_incognito'],
msg='Extension was allowed in incognito on installation.')
if __name__ == '__main__':
pyauto_functional.Main()
Add uninstall HTML Terminal extension
BUG=
TEST=This is a test to uninstall HTML terminal extension
Review URL: https://chromiumcodereview.appspot.com/10332227
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@137790 0039d316-1c4b-4281-b951-d872f2087c98#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import pyauto_functional # must be imported before pyauto
import pyauto
class ChromeosHTMLTerminalTest(pyauto.PyUITest):
"""Basic tests for ChromeOS HTML Terminal.
Requires ChromeOS to be logged in.
"""
def _GetExtensionInfoById(self, extensions, id):
for x in extensions:
if x['id'] == id:
return x
return None
def testInstallAndUninstallSecureShellExt(self):
"""Basic installation test for HTML Terminal on ChromeOS."""
crx_file_path = os.path.abspath(
os.path.join(self.DataDir(), 'pyauto_private', 'apps',
'SecureShell-dev-0.7.9.3.crx'))
ext_id = self.InstallExtension(crx_file_path)
self.assertTrue(ext_id, 'Failed to install extension.')
extension = self._GetExtensionInfoById(self.GetExtensionsInfo(), ext_id)
self.assertTrue(extension['is_enabled'],
msg='Extension was not enabled on installation.')
self.assertFalse(extension['allowed_in_incognito'],
msg='Extension was allowed in incognito on installation.')
# Uninstall HTML Terminal extension
self.assertTrue(self.UninstallExtensionById(ext_id),
msg='Failed to uninstall extension.')
if __name__ == '__main__':
pyauto_functional.Main()
|
<commit_before>#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import pyauto_functional # must be imported before pyauto
import pyauto
class ChromeosHTMLTerminalTest(pyauto.PyUITest):
"""Basic tests for ChromeOS HTML Terminal.
Requires ChromeOS to be logged in.
"""
def _GetExtensionInfoById(self, extensions, id):
for x in extensions:
if x['id'] == id:
return x
return None
def testInstallHTMLTerminal(self):
"""Basic installation test for HTML Terminal on ChromeOS."""
crx_file_path = os.path.abspath(
os.path.join(self.DataDir(), 'pyauto_private', 'apps',
'SecureShell-dev-0.7.9.3.crx'))
ext_id = self.InstallExtension(crx_file_path)
self.assertTrue(ext_id, 'Failed to install extension.')
extension = self._GetExtensionInfoById(self.GetExtensionsInfo(), ext_id)
self.assertTrue(extension['is_enabled'],
msg='Extension was not enabled on installation.')
self.assertFalse(extension['allowed_in_incognito'],
msg='Extension was allowed in incognito on installation.')
if __name__ == '__main__':
pyauto_functional.Main()
<commit_msg>Add uninstall HTML Terminal extension
BUG=
TEST=This is a test to uninstall HTML terminal extension
Review URL: https://chromiumcodereview.appspot.com/10332227
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@137790 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import pyauto_functional # must be imported before pyauto
import pyauto
class ChromeosHTMLTerminalTest(pyauto.PyUITest):
"""Basic tests for ChromeOS HTML Terminal.
Requires ChromeOS to be logged in.
"""
def _GetExtensionInfoById(self, extensions, id):
for x in extensions:
if x['id'] == id:
return x
return None
def testInstallAndUninstallSecureShellExt(self):
"""Basic installation test for HTML Terminal on ChromeOS."""
crx_file_path = os.path.abspath(
os.path.join(self.DataDir(), 'pyauto_private', 'apps',
'SecureShell-dev-0.7.9.3.crx'))
ext_id = self.InstallExtension(crx_file_path)
self.assertTrue(ext_id, 'Failed to install extension.')
extension = self._GetExtensionInfoById(self.GetExtensionsInfo(), ext_id)
self.assertTrue(extension['is_enabled'],
msg='Extension was not enabled on installation.')
self.assertFalse(extension['allowed_in_incognito'],
msg='Extension was allowed in incognito on installation.')
# Uninstall HTML Terminal extension
self.assertTrue(self.UninstallExtensionById(ext_id),
msg='Failed to uninstall extension.')
if __name__ == '__main__':
pyauto_functional.Main()
|
e1703021a467b38d61e59da5aff5e7280b021ade
|
TutsPy/tut.py
|
TutsPy/tut.py
|
import re
import requests
from bs4 import BeautifulSoup
from utils import download_file
import os
SUBJECT = 'seo'
INDEX_ENDPOINT = 'http://www.tutorialspoint.com/%s/index.htm'
DOWNLOAD_ENDPOINT = 'http://www.tutorialspoint.com/%s/pdf/%s.pdf'
def get_all_chapters():
r = requests.get(INDEX_ENDPOINT%SUBJECT)
soup = BeautifulSoup(r.text)
links = soup.find_all("a",{"target":"_top"})
os.makedirs(SUBJECT)
for link in links:
if(re.match(r'^/'+SUBJECT,link['href'])):
filename = link['href'].split('/')[-1]
download_file(DOWNLOAD_ENDPOINT%(SUBJECT,filename.split('.')[0]),SUBJECT+'/'+filename.split('.')[0])
get_all_chapters()
|
import re
import requests
from bs4 import BeautifulSoup
from utils import download_file
import os
SUBJECT = 'seo'
INDEX_ENDPOINT = 'http://www.tutorialspoint.com/%s/index.htm'
DOWNLOAD_ENDPOINT = 'http://www.tutorialspoint.com/%s/pdf/%s.pdf'
def get_all_chapters():
r = requests.get(INDEX_ENDPOINT%SUBJECT)
soup = BeautifulSoup(r.text)
links = soup.find_all("a",{"target":"_top"})
os.makedirs(SUBJECT)
for link in links:
if(re.match(r'^/'+SUBJECT,link['href'])):
filename = link['href'].split('/')[-1]
download_file(DOWNLOAD_ENDPOINT%(SUBJECT,filename.split('.')[0]),SUBJECT+'/'+filename.split('.')[0])
if __name__ == '__main__':
get_all_chapters()
|
Add check of command line program execution
|
Add check of command line program execution
|
Python
|
mit
|
voidabhi/python-scripts,voidabhi/python-scripts,voidabhi/python-scripts,voidabhi/python-scripts,voidabhi/python-scripts
|
import re
import requests
from bs4 import BeautifulSoup
from utils import download_file
import os
SUBJECT = 'seo'
INDEX_ENDPOINT = 'http://www.tutorialspoint.com/%s/index.htm'
DOWNLOAD_ENDPOINT = 'http://www.tutorialspoint.com/%s/pdf/%s.pdf'
def get_all_chapters():
r = requests.get(INDEX_ENDPOINT%SUBJECT)
soup = BeautifulSoup(r.text)
links = soup.find_all("a",{"target":"_top"})
os.makedirs(SUBJECT)
for link in links:
if(re.match(r'^/'+SUBJECT,link['href'])):
filename = link['href'].split('/')[-1]
download_file(DOWNLOAD_ENDPOINT%(SUBJECT,filename.split('.')[0]),SUBJECT+'/'+filename.split('.')[0])
get_all_chapters()
Add check of command line program execution
|
import re
import requests
from bs4 import BeautifulSoup
from utils import download_file
import os
SUBJECT = 'seo'
INDEX_ENDPOINT = 'http://www.tutorialspoint.com/%s/index.htm'
DOWNLOAD_ENDPOINT = 'http://www.tutorialspoint.com/%s/pdf/%s.pdf'
def get_all_chapters():
r = requests.get(INDEX_ENDPOINT%SUBJECT)
soup = BeautifulSoup(r.text)
links = soup.find_all("a",{"target":"_top"})
os.makedirs(SUBJECT)
for link in links:
if(re.match(r'^/'+SUBJECT,link['href'])):
filename = link['href'].split('/')[-1]
download_file(DOWNLOAD_ENDPOINT%(SUBJECT,filename.split('.')[0]),SUBJECT+'/'+filename.split('.')[0])
if __name__ == '__main__':
get_all_chapters()
|
<commit_before>
import re
import requests
from bs4 import BeautifulSoup
from utils import download_file
import os
SUBJECT = 'seo'
INDEX_ENDPOINT = 'http://www.tutorialspoint.com/%s/index.htm'
DOWNLOAD_ENDPOINT = 'http://www.tutorialspoint.com/%s/pdf/%s.pdf'
def get_all_chapters():
r = requests.get(INDEX_ENDPOINT%SUBJECT)
soup = BeautifulSoup(r.text)
links = soup.find_all("a",{"target":"_top"})
os.makedirs(SUBJECT)
for link in links:
if(re.match(r'^/'+SUBJECT,link['href'])):
filename = link['href'].split('/')[-1]
download_file(DOWNLOAD_ENDPOINT%(SUBJECT,filename.split('.')[0]),SUBJECT+'/'+filename.split('.')[0])
get_all_chapters()
<commit_msg>Add check of command line program execution<commit_after>
|
import re
import requests
from bs4 import BeautifulSoup
from utils import download_file
import os
SUBJECT = 'seo'
INDEX_ENDPOINT = 'http://www.tutorialspoint.com/%s/index.htm'
DOWNLOAD_ENDPOINT = 'http://www.tutorialspoint.com/%s/pdf/%s.pdf'
def get_all_chapters():
r = requests.get(INDEX_ENDPOINT%SUBJECT)
soup = BeautifulSoup(r.text)
links = soup.find_all("a",{"target":"_top"})
os.makedirs(SUBJECT)
for link in links:
if(re.match(r'^/'+SUBJECT,link['href'])):
filename = link['href'].split('/')[-1]
download_file(DOWNLOAD_ENDPOINT%(SUBJECT,filename.split('.')[0]),SUBJECT+'/'+filename.split('.')[0])
if __name__ == '__main__':
get_all_chapters()
|
import re
import requests
from bs4 import BeautifulSoup
from utils import download_file
import os
SUBJECT = 'seo'
INDEX_ENDPOINT = 'http://www.tutorialspoint.com/%s/index.htm'
DOWNLOAD_ENDPOINT = 'http://www.tutorialspoint.com/%s/pdf/%s.pdf'
def get_all_chapters():
r = requests.get(INDEX_ENDPOINT%SUBJECT)
soup = BeautifulSoup(r.text)
links = soup.find_all("a",{"target":"_top"})
os.makedirs(SUBJECT)
for link in links:
if(re.match(r'^/'+SUBJECT,link['href'])):
filename = link['href'].split('/')[-1]
download_file(DOWNLOAD_ENDPOINT%(SUBJECT,filename.split('.')[0]),SUBJECT+'/'+filename.split('.')[0])
get_all_chapters()
Add check of command line program execution
import re
import requests
from bs4 import BeautifulSoup
from utils import download_file
import os
SUBJECT = 'seo'
INDEX_ENDPOINT = 'http://www.tutorialspoint.com/%s/index.htm'
DOWNLOAD_ENDPOINT = 'http://www.tutorialspoint.com/%s/pdf/%s.pdf'
def get_all_chapters():
r = requests.get(INDEX_ENDPOINT%SUBJECT)
soup = BeautifulSoup(r.text)
links = soup.find_all("a",{"target":"_top"})
os.makedirs(SUBJECT)
for link in links:
if(re.match(r'^/'+SUBJECT,link['href'])):
filename = link['href'].split('/')[-1]
download_file(DOWNLOAD_ENDPOINT%(SUBJECT,filename.split('.')[0]),SUBJECT+'/'+filename.split('.')[0])
if __name__ == '__main__':
get_all_chapters()
|
<commit_before>
import re
import requests
from bs4 import BeautifulSoup
from utils import download_file
import os
SUBJECT = 'seo'
INDEX_ENDPOINT = 'http://www.tutorialspoint.com/%s/index.htm'
DOWNLOAD_ENDPOINT = 'http://www.tutorialspoint.com/%s/pdf/%s.pdf'
def get_all_chapters():
r = requests.get(INDEX_ENDPOINT%SUBJECT)
soup = BeautifulSoup(r.text)
links = soup.find_all("a",{"target":"_top"})
os.makedirs(SUBJECT)
for link in links:
if(re.match(r'^/'+SUBJECT,link['href'])):
filename = link['href'].split('/')[-1]
download_file(DOWNLOAD_ENDPOINT%(SUBJECT,filename.split('.')[0]),SUBJECT+'/'+filename.split('.')[0])
get_all_chapters()
<commit_msg>Add check of command line program execution<commit_after>
import re
import requests
from bs4 import BeautifulSoup
from utils import download_file
import os
SUBJECT = 'seo'
INDEX_ENDPOINT = 'http://www.tutorialspoint.com/%s/index.htm'
DOWNLOAD_ENDPOINT = 'http://www.tutorialspoint.com/%s/pdf/%s.pdf'
def get_all_chapters():
r = requests.get(INDEX_ENDPOINT%SUBJECT)
soup = BeautifulSoup(r.text)
links = soup.find_all("a",{"target":"_top"})
os.makedirs(SUBJECT)
for link in links:
if(re.match(r'^/'+SUBJECT,link['href'])):
filename = link['href'].split('/')[-1]
download_file(DOWNLOAD_ENDPOINT%(SUBJECT,filename.split('.')[0]),SUBJECT+'/'+filename.split('.')[0])
if __name__ == '__main__':
get_all_chapters()
|
feab28c495c8ade1ef6b9f658df9a7cde1d63936
|
httpobs/website/monitoring.py
|
httpobs/website/monitoring.py
|
from flask import abort, Blueprint, jsonify
from httpobs import SOURCE_URL, VERSION
from httpobs.conf import BROKER_URL
from httpobs.database import get_cursor
import kombu
monitoring_api = Blueprint('monitoring-api', __name__)
@monitoring_api.route('/__heartbeat__')
def heartbeat():
# TODO: check celery status
try:
# Check the database
with get_cursor() as _: # noqa
pass
except:
abort(500)
return jsonify({'database': 'OK'})
@monitoring_api.route('/__lbheartbeat__')
def lbheartbeat():
return ''
@monitoring_api.route('/__version__')
def version():
return jsonify({'source': SOURCE_URL,
'version': VERSION})
|
from flask import abort, Blueprint, jsonify
from httpobs import SOURCE_URL, VERSION
from httpobs.database import get_cursor
monitoring_api = Blueprint('monitoring-api', __name__)
@monitoring_api.route('/__heartbeat__')
def heartbeat():
# TODO: check celery status
try:
# Check the database
with get_cursor() as _: # noqa
pass
except:
abort(500)
return jsonify({'database': 'OK'})
@monitoring_api.route('/__lbheartbeat__')
def lbheartbeat():
return ''
@monitoring_api.route('/__version__')
def version():
return jsonify({'source': SOURCE_URL,
'version': VERSION})
|
Fix flake8 error in travis
|
Fix flake8 error in travis
|
Python
|
mpl-2.0
|
mozilla/http-observatory,mozilla/http-observatory,mozilla/http-observatory,april/http-observatory,april/http-observatory,april/http-observatory
|
from flask import abort, Blueprint, jsonify
from httpobs import SOURCE_URL, VERSION
from httpobs.conf import BROKER_URL
from httpobs.database import get_cursor
import kombu
monitoring_api = Blueprint('monitoring-api', __name__)
@monitoring_api.route('/__heartbeat__')
def heartbeat():
# TODO: check celery status
try:
# Check the database
with get_cursor() as _: # noqa
pass
except:
abort(500)
return jsonify({'database': 'OK'})
@monitoring_api.route('/__lbheartbeat__')
def lbheartbeat():
return ''
@monitoring_api.route('/__version__')
def version():
return jsonify({'source': SOURCE_URL,
'version': VERSION})
Fix flake8 error in travis
|
from flask import abort, Blueprint, jsonify
from httpobs import SOURCE_URL, VERSION
from httpobs.database import get_cursor
monitoring_api = Blueprint('monitoring-api', __name__)
@monitoring_api.route('/__heartbeat__')
def heartbeat():
# TODO: check celery status
try:
# Check the database
with get_cursor() as _: # noqa
pass
except:
abort(500)
return jsonify({'database': 'OK'})
@monitoring_api.route('/__lbheartbeat__')
def lbheartbeat():
return ''
@monitoring_api.route('/__version__')
def version():
return jsonify({'source': SOURCE_URL,
'version': VERSION})
|
<commit_before>from flask import abort, Blueprint, jsonify
from httpobs import SOURCE_URL, VERSION
from httpobs.conf import BROKER_URL
from httpobs.database import get_cursor
import kombu
monitoring_api = Blueprint('monitoring-api', __name__)
@monitoring_api.route('/__heartbeat__')
def heartbeat():
# TODO: check celery status
try:
# Check the database
with get_cursor() as _: # noqa
pass
except:
abort(500)
return jsonify({'database': 'OK'})
@monitoring_api.route('/__lbheartbeat__')
def lbheartbeat():
return ''
@monitoring_api.route('/__version__')
def version():
return jsonify({'source': SOURCE_URL,
'version': VERSION})
<commit_msg>Fix flake8 error in travis<commit_after>
|
from flask import abort, Blueprint, jsonify
from httpobs import SOURCE_URL, VERSION
from httpobs.database import get_cursor
monitoring_api = Blueprint('monitoring-api', __name__)
@monitoring_api.route('/__heartbeat__')
def heartbeat():
# TODO: check celery status
try:
# Check the database
with get_cursor() as _: # noqa
pass
except:
abort(500)
return jsonify({'database': 'OK'})
@monitoring_api.route('/__lbheartbeat__')
def lbheartbeat():
return ''
@monitoring_api.route('/__version__')
def version():
return jsonify({'source': SOURCE_URL,
'version': VERSION})
|
from flask import abort, Blueprint, jsonify
from httpobs import SOURCE_URL, VERSION
from httpobs.conf import BROKER_URL
from httpobs.database import get_cursor
import kombu
monitoring_api = Blueprint('monitoring-api', __name__)
@monitoring_api.route('/__heartbeat__')
def heartbeat():
# TODO: check celery status
try:
# Check the database
with get_cursor() as _: # noqa
pass
except:
abort(500)
return jsonify({'database': 'OK'})
@monitoring_api.route('/__lbheartbeat__')
def lbheartbeat():
return ''
@monitoring_api.route('/__version__')
def version():
return jsonify({'source': SOURCE_URL,
'version': VERSION})
Fix flake8 error in travisfrom flask import abort, Blueprint, jsonify
from httpobs import SOURCE_URL, VERSION
from httpobs.database import get_cursor
monitoring_api = Blueprint('monitoring-api', __name__)
@monitoring_api.route('/__heartbeat__')
def heartbeat():
# TODO: check celery status
try:
# Check the database
with get_cursor() as _: # noqa
pass
except:
abort(500)
return jsonify({'database': 'OK'})
@monitoring_api.route('/__lbheartbeat__')
def lbheartbeat():
return ''
@monitoring_api.route('/__version__')
def version():
return jsonify({'source': SOURCE_URL,
'version': VERSION})
|
<commit_before>from flask import abort, Blueprint, jsonify
from httpobs import SOURCE_URL, VERSION
from httpobs.conf import BROKER_URL
from httpobs.database import get_cursor
import kombu
monitoring_api = Blueprint('monitoring-api', __name__)
@monitoring_api.route('/__heartbeat__')
def heartbeat():
# TODO: check celery status
try:
# Check the database
with get_cursor() as _: # noqa
pass
except:
abort(500)
return jsonify({'database': 'OK'})
@monitoring_api.route('/__lbheartbeat__')
def lbheartbeat():
return ''
@monitoring_api.route('/__version__')
def version():
return jsonify({'source': SOURCE_URL,
'version': VERSION})
<commit_msg>Fix flake8 error in travis<commit_after>from flask import abort, Blueprint, jsonify
from httpobs import SOURCE_URL, VERSION
from httpobs.database import get_cursor
monitoring_api = Blueprint('monitoring-api', __name__)
@monitoring_api.route('/__heartbeat__')
def heartbeat():
# TODO: check celery status
try:
# Check the database
with get_cursor() as _: # noqa
pass
except:
abort(500)
return jsonify({'database': 'OK'})
@monitoring_api.route('/__lbheartbeat__')
def lbheartbeat():
return ''
@monitoring_api.route('/__version__')
def version():
return jsonify({'source': SOURCE_URL,
'version': VERSION})
|
4026b8e352229c6f640d428640cd08919ba440e6
|
dodo_commands/extra/webdev_commands/django-manage.py
|
dodo_commands/extra/webdev_commands/django-manage.py
|
"""Run a django-manage command."""
import argparse
from dodo_commands.extra.standard_commands import DodoCommand
from dodo_commands.util import remove_trailing_dashes
class Command(DodoCommand): # noqa
decorators = ['docker']
def add_arguments_imp(self, parser): # noqa
parser.add_argument(
'manage_args',
nargs=argparse.REMAINDER
)
def handle_imp( # noqa
self, manage_args, *args, **kwargs
):
self.runcmd(
[
self.get_config("/DJANGO/python"),
"manage.py",
] + remove_trailing_dashes(manage_args),
cwd=self.get_config("/DJANGO/src_dir")
)
|
"""Run a django-manage command."""
import argparse
from dodo_commands.extra.standard_commands import DodoCommand
from dodo_commands.framework.util import remove_trailing_dashes
class Command(DodoCommand): # noqa
decorators = ['docker']
def add_arguments_imp(self, parser): # noqa
parser.add_argument(
'manage_args',
nargs=argparse.REMAINDER
)
def handle_imp( # noqa
self, manage_args, *args, **kwargs
):
self.runcmd(
[
self.get_config("/DJANGO/python"),
"manage.py",
] + remove_trailing_dashes(manage_args),
cwd=self.get_config("/DJANGO/src_dir")
)
|
Fix remaining broken import of remove_trailing_dashes
|
Fix remaining broken import of remove_trailing_dashes
|
Python
|
mit
|
mnieber/dodo_commands
|
"""Run a django-manage command."""
import argparse
from dodo_commands.extra.standard_commands import DodoCommand
from dodo_commands.util import remove_trailing_dashes
class Command(DodoCommand): # noqa
decorators = ['docker']
def add_arguments_imp(self, parser): # noqa
parser.add_argument(
'manage_args',
nargs=argparse.REMAINDER
)
def handle_imp( # noqa
self, manage_args, *args, **kwargs
):
self.runcmd(
[
self.get_config("/DJANGO/python"),
"manage.py",
] + remove_trailing_dashes(manage_args),
cwd=self.get_config("/DJANGO/src_dir")
)
Fix remaining broken import of remove_trailing_dashes
|
"""Run a django-manage command."""
import argparse
from dodo_commands.extra.standard_commands import DodoCommand
from dodo_commands.framework.util import remove_trailing_dashes
class Command(DodoCommand): # noqa
decorators = ['docker']
def add_arguments_imp(self, parser): # noqa
parser.add_argument(
'manage_args',
nargs=argparse.REMAINDER
)
def handle_imp( # noqa
self, manage_args, *args, **kwargs
):
self.runcmd(
[
self.get_config("/DJANGO/python"),
"manage.py",
] + remove_trailing_dashes(manage_args),
cwd=self.get_config("/DJANGO/src_dir")
)
|
<commit_before>"""Run a django-manage command."""
import argparse
from dodo_commands.extra.standard_commands import DodoCommand
from dodo_commands.util import remove_trailing_dashes
class Command(DodoCommand): # noqa
decorators = ['docker']
def add_arguments_imp(self, parser): # noqa
parser.add_argument(
'manage_args',
nargs=argparse.REMAINDER
)
def handle_imp( # noqa
self, manage_args, *args, **kwargs
):
self.runcmd(
[
self.get_config("/DJANGO/python"),
"manage.py",
] + remove_trailing_dashes(manage_args),
cwd=self.get_config("/DJANGO/src_dir")
)
<commit_msg>Fix remaining broken import of remove_trailing_dashes<commit_after>
|
"""Run a django-manage command."""
import argparse
from dodo_commands.extra.standard_commands import DodoCommand
from dodo_commands.framework.util import remove_trailing_dashes
class Command(DodoCommand): # noqa
decorators = ['docker']
def add_arguments_imp(self, parser): # noqa
parser.add_argument(
'manage_args',
nargs=argparse.REMAINDER
)
def handle_imp( # noqa
self, manage_args, *args, **kwargs
):
self.runcmd(
[
self.get_config("/DJANGO/python"),
"manage.py",
] + remove_trailing_dashes(manage_args),
cwd=self.get_config("/DJANGO/src_dir")
)
|
"""Run a django-manage command."""
import argparse
from dodo_commands.extra.standard_commands import DodoCommand
from dodo_commands.util import remove_trailing_dashes
class Command(DodoCommand): # noqa
decorators = ['docker']
def add_arguments_imp(self, parser): # noqa
parser.add_argument(
'manage_args',
nargs=argparse.REMAINDER
)
def handle_imp( # noqa
self, manage_args, *args, **kwargs
):
self.runcmd(
[
self.get_config("/DJANGO/python"),
"manage.py",
] + remove_trailing_dashes(manage_args),
cwd=self.get_config("/DJANGO/src_dir")
)
Fix remaining broken import of remove_trailing_dashes"""Run a django-manage command."""
import argparse
from dodo_commands.extra.standard_commands import DodoCommand
from dodo_commands.framework.util import remove_trailing_dashes
class Command(DodoCommand): # noqa
decorators = ['docker']
def add_arguments_imp(self, parser): # noqa
parser.add_argument(
'manage_args',
nargs=argparse.REMAINDER
)
def handle_imp( # noqa
self, manage_args, *args, **kwargs
):
self.runcmd(
[
self.get_config("/DJANGO/python"),
"manage.py",
] + remove_trailing_dashes(manage_args),
cwd=self.get_config("/DJANGO/src_dir")
)
|
<commit_before>"""Run a django-manage command."""
import argparse
from dodo_commands.extra.standard_commands import DodoCommand
from dodo_commands.util import remove_trailing_dashes
class Command(DodoCommand): # noqa
decorators = ['docker']
def add_arguments_imp(self, parser): # noqa
parser.add_argument(
'manage_args',
nargs=argparse.REMAINDER
)
def handle_imp( # noqa
self, manage_args, *args, **kwargs
):
self.runcmd(
[
self.get_config("/DJANGO/python"),
"manage.py",
] + remove_trailing_dashes(manage_args),
cwd=self.get_config("/DJANGO/src_dir")
)
<commit_msg>Fix remaining broken import of remove_trailing_dashes<commit_after>"""Run a django-manage command."""
import argparse
from dodo_commands.extra.standard_commands import DodoCommand
from dodo_commands.framework.util import remove_trailing_dashes
class Command(DodoCommand): # noqa
decorators = ['docker']
def add_arguments_imp(self, parser): # noqa
parser.add_argument(
'manage_args',
nargs=argparse.REMAINDER
)
def handle_imp( # noqa
self, manage_args, *args, **kwargs
):
self.runcmd(
[
self.get_config("/DJANGO/python"),
"manage.py",
] + remove_trailing_dashes(manage_args),
cwd=self.get_config("/DJANGO/src_dir")
)
|
3c68bae8da0767b01cddec34d88012ca0ea1d6ba
|
booksforcha/booksforcha.py
|
booksforcha/booksforcha.py
|
# -*- coding: utf-8 -*-
import schedule
from feed import load_feed
from twitter import send_queued_tweet
import time
import os
RSS_FEED_LIST = os.environ['RSS_FEED_LIST']
def parse_feed_list(s):
parsed = s.split(',')
if parsed == ['']:
return []
else:
return parsed
def main():
rsslist = parse_feed_list(RSS_FEED_LIST)
schedule.every(10).seconds.do(load_feed, rsslist)
schedule.every(5).minutes.do(send_queued_tweet)
while True:
schedule.run_pending()
time.sleep(1)
def __main__():
main()
if __name__ == "__main__":
try:
__main__()
except (KeyboardInterrupt):
exit('Received Ctrl+C. Stopping application.', 1)
|
# -*- coding: utf-8 -*-
import schedule
from feed import load_feed
from twitter import send_queued_tweet
import time
import os
RSS_FEED_LIST = os.environ['RSS_FEED_LIST']
def parse_feed_list(s):
parsed = s.split(',')
if parsed == ['']:
return []
else:
return parsed
def main():
rsslist = parse_feed_list(RSS_FEED_LIST)
schedule.every().hour.do(load_feed, rsslist)
schedule.every(5).minutes.do(send_queued_tweet)
while True:
schedule.run_pending()
time.sleep(1)
def __main__():
main()
if __name__ == "__main__":
try:
__main__()
except (KeyboardInterrupt):
exit('Received Ctrl+C. Stopping application.', 1)
|
Adjust when to load data.
|
Adjust when to load data.
|
Python
|
mit
|
ChattanoogaPublicLibrary/booksforcha
|
# -*- coding: utf-8 -*-
import schedule
from feed import load_feed
from twitter import send_queued_tweet
import time
import os
RSS_FEED_LIST = os.environ['RSS_FEED_LIST']
def parse_feed_list(s):
parsed = s.split(',')
if parsed == ['']:
return []
else:
return parsed
def main():
rsslist = parse_feed_list(RSS_FEED_LIST)
schedule.every(10).seconds.do(load_feed, rsslist)
schedule.every(5).minutes.do(send_queued_tweet)
while True:
schedule.run_pending()
time.sleep(1)
def __main__():
main()
if __name__ == "__main__":
try:
__main__()
except (KeyboardInterrupt):
exit('Received Ctrl+C. Stopping application.', 1)
Adjust when to load data.
|
# -*- coding: utf-8 -*-
import schedule
from feed import load_feed
from twitter import send_queued_tweet
import time
import os
RSS_FEED_LIST = os.environ['RSS_FEED_LIST']
def parse_feed_list(s):
parsed = s.split(',')
if parsed == ['']:
return []
else:
return parsed
def main():
rsslist = parse_feed_list(RSS_FEED_LIST)
schedule.every().hour.do(load_feed, rsslist)
schedule.every(5).minutes.do(send_queued_tweet)
while True:
schedule.run_pending()
time.sleep(1)
def __main__():
main()
if __name__ == "__main__":
try:
__main__()
except (KeyboardInterrupt):
exit('Received Ctrl+C. Stopping application.', 1)
|
<commit_before># -*- coding: utf-8 -*-
import schedule
from feed import load_feed
from twitter import send_queued_tweet
import time
import os
RSS_FEED_LIST = os.environ['RSS_FEED_LIST']
def parse_feed_list(s):
parsed = s.split(',')
if parsed == ['']:
return []
else:
return parsed
def main():
rsslist = parse_feed_list(RSS_FEED_LIST)
schedule.every(10).seconds.do(load_feed, rsslist)
schedule.every(5).minutes.do(send_queued_tweet)
while True:
schedule.run_pending()
time.sleep(1)
def __main__():
main()
if __name__ == "__main__":
try:
__main__()
except (KeyboardInterrupt):
exit('Received Ctrl+C. Stopping application.', 1)
<commit_msg>Adjust when to load data.<commit_after>
|
# -*- coding: utf-8 -*-
import schedule
from feed import load_feed
from twitter import send_queued_tweet
import time
import os
RSS_FEED_LIST = os.environ['RSS_FEED_LIST']
def parse_feed_list(s):
parsed = s.split(',')
if parsed == ['']:
return []
else:
return parsed
def main():
rsslist = parse_feed_list(RSS_FEED_LIST)
schedule.every().hour.do(load_feed, rsslist)
schedule.every(5).minutes.do(send_queued_tweet)
while True:
schedule.run_pending()
time.sleep(1)
def __main__():
main()
if __name__ == "__main__":
try:
__main__()
except (KeyboardInterrupt):
exit('Received Ctrl+C. Stopping application.', 1)
|
# -*- coding: utf-8 -*-
import schedule
from feed import load_feed
from twitter import send_queued_tweet
import time
import os
RSS_FEED_LIST = os.environ['RSS_FEED_LIST']
def parse_feed_list(s):
parsed = s.split(',')
if parsed == ['']:
return []
else:
return parsed
def main():
rsslist = parse_feed_list(RSS_FEED_LIST)
schedule.every(10).seconds.do(load_feed, rsslist)
schedule.every(5).minutes.do(send_queued_tweet)
while True:
schedule.run_pending()
time.sleep(1)
def __main__():
main()
if __name__ == "__main__":
try:
__main__()
except (KeyboardInterrupt):
exit('Received Ctrl+C. Stopping application.', 1)
Adjust when to load data.# -*- coding: utf-8 -*-
import schedule
from feed import load_feed
from twitter import send_queued_tweet
import time
import os
RSS_FEED_LIST = os.environ['RSS_FEED_LIST']
def parse_feed_list(s):
parsed = s.split(',')
if parsed == ['']:
return []
else:
return parsed
def main():
rsslist = parse_feed_list(RSS_FEED_LIST)
schedule.every().hour.do(load_feed, rsslist)
schedule.every(5).minutes.do(send_queued_tweet)
while True:
schedule.run_pending()
time.sleep(1)
def __main__():
main()
if __name__ == "__main__":
try:
__main__()
except (KeyboardInterrupt):
exit('Received Ctrl+C. Stopping application.', 1)
|
<commit_before># -*- coding: utf-8 -*-
import schedule
from feed import load_feed
from twitter import send_queued_tweet
import time
import os
RSS_FEED_LIST = os.environ['RSS_FEED_LIST']
def parse_feed_list(s):
parsed = s.split(',')
if parsed == ['']:
return []
else:
return parsed
def main():
rsslist = parse_feed_list(RSS_FEED_LIST)
schedule.every(10).seconds.do(load_feed, rsslist)
schedule.every(5).minutes.do(send_queued_tweet)
while True:
schedule.run_pending()
time.sleep(1)
def __main__():
main()
if __name__ == "__main__":
try:
__main__()
except (KeyboardInterrupt):
exit('Received Ctrl+C. Stopping application.', 1)
<commit_msg>Adjust when to load data.<commit_after># -*- coding: utf-8 -*-
import schedule
from feed import load_feed
from twitter import send_queued_tweet
import time
import os
RSS_FEED_LIST = os.environ['RSS_FEED_LIST']
def parse_feed_list(s):
parsed = s.split(',')
if parsed == ['']:
return []
else:
return parsed
def main():
rsslist = parse_feed_list(RSS_FEED_LIST)
schedule.every().hour.do(load_feed, rsslist)
schedule.every(5).minutes.do(send_queued_tweet)
while True:
schedule.run_pending()
time.sleep(1)
def __main__():
main()
if __name__ == "__main__":
try:
__main__()
except (KeyboardInterrupt):
exit('Received Ctrl+C. Stopping application.', 1)
|
81d6119f452afa0f69db4a7ab1f37906469d3b64
|
annotator/model/__init__.py
|
annotator/model/__init__.py
|
from .annotation import Annotation
from .consumer import Consumer
from .user import User
|
__all__ = ['Annotation', 'Consumer', 'User']
from .annotation import Annotation
from .consumer import Consumer
from .user import User
|
Make annotator.model 'import *' friendly
|
Make annotator.model 'import *' friendly
|
Python
|
mit
|
ningyifan/annotator-store,nobita-isc/annotator-store,happybelly/annotator-store,nobita-isc/annotator-store,nobita-isc/annotator-store,nobita-isc/annotator-store,openannotation/annotator-store
|
from .annotation import Annotation
from .consumer import Consumer
from .user import User
Make annotator.model 'import *' friendly
|
__all__ = ['Annotation', 'Consumer', 'User']
from .annotation import Annotation
from .consumer import Consumer
from .user import User
|
<commit_before>from .annotation import Annotation
from .consumer import Consumer
from .user import User
<commit_msg>Make annotator.model 'import *' friendly<commit_after>
|
__all__ = ['Annotation', 'Consumer', 'User']
from .annotation import Annotation
from .consumer import Consumer
from .user import User
|
from .annotation import Annotation
from .consumer import Consumer
from .user import User
Make annotator.model 'import *' friendly__all__ = ['Annotation', 'Consumer', 'User']
from .annotation import Annotation
from .consumer import Consumer
from .user import User
|
<commit_before>from .annotation import Annotation
from .consumer import Consumer
from .user import User
<commit_msg>Make annotator.model 'import *' friendly<commit_after>__all__ = ['Annotation', 'Consumer', 'User']
from .annotation import Annotation
from .consumer import Consumer
from .user import User
|
a925c19b85fcd3a2b6d08d253d3c8d1ef3c7b02f
|
core/migrations/0008_auto_20151029_0953.py
|
core/migrations/0008_auto_20151029_0953.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.contrib.contenttypes.management import update_all_contenttypes
def add_impersonate_permission(apps, schema_editor):
update_all_contenttypes() # Fixes tests
ContentType = apps.get_model('contenttypes.ContentType')
Permission = apps.get_model('auth.Permission')
content_type = ContentType.objects.get(app_label='auth', model='user')
Permission.objects.create(content_type=content_type,
codename='impersonate',
name='Can impersonate other user')
def delete_impersonate_permission(apps, schema_editor):
apps.get_model('auth.Permission').objects.get(
codename='impersonate').delete()
class Migration(migrations.Migration):
dependencies = [
('core', '0007_auto_20150319_0929'),
]
operations = [
migrations.RunPython(add_impersonate_permission,
delete_impersonate_permission),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.contrib.contenttypes.management import update_all_contenttypes
def add_impersonate_permission(apps, schema_editor):
update_all_contenttypes() # Fixes tests
ContentType = apps.get_model('contenttypes.ContentType')
Permission = apps.get_model('auth.Permission')
content_type = ContentType.objects.using(
schema_editor.connection.alias).get(app_label='auth', model='user')
Permission.objects.using(schema_editor.connection.alias).create(
content_type=content_type,
codename='impersonate',
name='Can impersonate other user'
)
def delete_impersonate_permission(apps, schema_editor):
perm = apps.get_model('auth.Permission').objects.using(
schema_editor.connection.alias).get(codename='impersonate')
perm.delete(using=schema_editor.connection.alias)
class Migration(migrations.Migration):
dependencies = [
('core', '0007_auto_20150319_0929'),
]
operations = [
migrations.RunPython(add_impersonate_permission,
delete_impersonate_permission),
]
|
Update latest migration to use the database provided to the migrate management command
|
Update latest migration to use the database provided to the migrate management command
|
Python
|
agpl-3.0
|
tic-ull/portal-del-investigador,tic-ull/portal-del-investigador,tic-ull/portal-del-investigador,tic-ull/portal-del-investigador
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.contrib.contenttypes.management import update_all_contenttypes
def add_impersonate_permission(apps, schema_editor):
update_all_contenttypes() # Fixes tests
ContentType = apps.get_model('contenttypes.ContentType')
Permission = apps.get_model('auth.Permission')
content_type = ContentType.objects.get(app_label='auth', model='user')
Permission.objects.create(content_type=content_type,
codename='impersonate',
name='Can impersonate other user')
def delete_impersonate_permission(apps, schema_editor):
apps.get_model('auth.Permission').objects.get(
codename='impersonate').delete()
class Migration(migrations.Migration):
dependencies = [
('core', '0007_auto_20150319_0929'),
]
operations = [
migrations.RunPython(add_impersonate_permission,
delete_impersonate_permission),
]Update latest migration to use the database provided to the migrate management command
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.contrib.contenttypes.management import update_all_contenttypes
def add_impersonate_permission(apps, schema_editor):
update_all_contenttypes() # Fixes tests
ContentType = apps.get_model('contenttypes.ContentType')
Permission = apps.get_model('auth.Permission')
content_type = ContentType.objects.using(
schema_editor.connection.alias).get(app_label='auth', model='user')
Permission.objects.using(schema_editor.connection.alias).create(
content_type=content_type,
codename='impersonate',
name='Can impersonate other user'
)
def delete_impersonate_permission(apps, schema_editor):
perm = apps.get_model('auth.Permission').objects.using(
schema_editor.connection.alias).get(codename='impersonate')
perm.delete(using=schema_editor.connection.alias)
class Migration(migrations.Migration):
dependencies = [
('core', '0007_auto_20150319_0929'),
]
operations = [
migrations.RunPython(add_impersonate_permission,
delete_impersonate_permission),
]
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.contrib.contenttypes.management import update_all_contenttypes
def add_impersonate_permission(apps, schema_editor):
update_all_contenttypes() # Fixes tests
ContentType = apps.get_model('contenttypes.ContentType')
Permission = apps.get_model('auth.Permission')
content_type = ContentType.objects.get(app_label='auth', model='user')
Permission.objects.create(content_type=content_type,
codename='impersonate',
name='Can impersonate other user')
def delete_impersonate_permission(apps, schema_editor):
apps.get_model('auth.Permission').objects.get(
codename='impersonate').delete()
class Migration(migrations.Migration):
dependencies = [
('core', '0007_auto_20150319_0929'),
]
operations = [
migrations.RunPython(add_impersonate_permission,
delete_impersonate_permission),
]<commit_msg>Update latest migration to use the database provided to the migrate management command<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.contrib.contenttypes.management import update_all_contenttypes
def add_impersonate_permission(apps, schema_editor):
update_all_contenttypes() # Fixes tests
ContentType = apps.get_model('contenttypes.ContentType')
Permission = apps.get_model('auth.Permission')
content_type = ContentType.objects.using(
schema_editor.connection.alias).get(app_label='auth', model='user')
Permission.objects.using(schema_editor.connection.alias).create(
content_type=content_type,
codename='impersonate',
name='Can impersonate other user'
)
def delete_impersonate_permission(apps, schema_editor):
perm = apps.get_model('auth.Permission').objects.using(
schema_editor.connection.alias).get(codename='impersonate')
perm.delete(using=schema_editor.connection.alias)
class Migration(migrations.Migration):
dependencies = [
('core', '0007_auto_20150319_0929'),
]
operations = [
migrations.RunPython(add_impersonate_permission,
delete_impersonate_permission),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.contrib.contenttypes.management import update_all_contenttypes
def add_impersonate_permission(apps, schema_editor):
update_all_contenttypes() # Fixes tests
ContentType = apps.get_model('contenttypes.ContentType')
Permission = apps.get_model('auth.Permission')
content_type = ContentType.objects.get(app_label='auth', model='user')
Permission.objects.create(content_type=content_type,
codename='impersonate',
name='Can impersonate other user')
def delete_impersonate_permission(apps, schema_editor):
apps.get_model('auth.Permission').objects.get(
codename='impersonate').delete()
class Migration(migrations.Migration):
dependencies = [
('core', '0007_auto_20150319_0929'),
]
operations = [
migrations.RunPython(add_impersonate_permission,
delete_impersonate_permission),
]Update latest migration to use the database provided to the migrate management command# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.contrib.contenttypes.management import update_all_contenttypes
def add_impersonate_permission(apps, schema_editor):
update_all_contenttypes() # Fixes tests
ContentType = apps.get_model('contenttypes.ContentType')
Permission = apps.get_model('auth.Permission')
content_type = ContentType.objects.using(
schema_editor.connection.alias).get(app_label='auth', model='user')
Permission.objects.using(schema_editor.connection.alias).create(
content_type=content_type,
codename='impersonate',
name='Can impersonate other user'
)
def delete_impersonate_permission(apps, schema_editor):
perm = apps.get_model('auth.Permission').objects.using(
schema_editor.connection.alias).get(codename='impersonate')
perm.delete(using=schema_editor.connection.alias)
class Migration(migrations.Migration):
dependencies = [
('core', '0007_auto_20150319_0929'),
]
operations = [
migrations.RunPython(add_impersonate_permission,
delete_impersonate_permission),
]
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.contrib.contenttypes.management import update_all_contenttypes
def add_impersonate_permission(apps, schema_editor):
update_all_contenttypes() # Fixes tests
ContentType = apps.get_model('contenttypes.ContentType')
Permission = apps.get_model('auth.Permission')
content_type = ContentType.objects.get(app_label='auth', model='user')
Permission.objects.create(content_type=content_type,
codename='impersonate',
name='Can impersonate other user')
def delete_impersonate_permission(apps, schema_editor):
apps.get_model('auth.Permission').objects.get(
codename='impersonate').delete()
class Migration(migrations.Migration):
dependencies = [
('core', '0007_auto_20150319_0929'),
]
operations = [
migrations.RunPython(add_impersonate_permission,
delete_impersonate_permission),
]<commit_msg>Update latest migration to use the database provided to the migrate management command<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.contrib.contenttypes.management import update_all_contenttypes
def add_impersonate_permission(apps, schema_editor):
update_all_contenttypes() # Fixes tests
ContentType = apps.get_model('contenttypes.ContentType')
Permission = apps.get_model('auth.Permission')
content_type = ContentType.objects.using(
schema_editor.connection.alias).get(app_label='auth', model='user')
Permission.objects.using(schema_editor.connection.alias).create(
content_type=content_type,
codename='impersonate',
name='Can impersonate other user'
)
def delete_impersonate_permission(apps, schema_editor):
perm = apps.get_model('auth.Permission').objects.using(
schema_editor.connection.alias).get(codename='impersonate')
perm.delete(using=schema_editor.connection.alias)
class Migration(migrations.Migration):
dependencies = [
('core', '0007_auto_20150319_0929'),
]
operations = [
migrations.RunPython(add_impersonate_permission,
delete_impersonate_permission),
]
|
d74f7d2384d48115ea58737332e4636ba9fdd4aa
|
ini_tools/get_ini_fields.py
|
ini_tools/get_ini_fields.py
|
"""
collect information about fields and values in ini file
usage run script with file name in directory with unpacked stats.
Script will collect data from all files with name.
You can specify path as second argument.
python get_ini_fields.py body.ini
python get_ini_fields.py body.ini "C:/games/warzone2100"
"""
import os
import sys
from config_parser import WZConfigParser
def get_ini_fields(fields, path):
cp = WZConfigParser()
cp.load(path)
for section in cp.sections():
for key, value in cp.items(section):
fields.setdefault(key, set()).add(value)
if __name__ == "__main__":
name = sys.argv[1]
path = sys.argv[2]
fields = {}
for base, dirs, files in os.walk(path):
if name in files:
file_path = os.path.join(base, name)
get_ini_fields(fields, file_path)
print "collectiong data from", file_path
for field, values in fields.items():
print field, ' '.join(sorted(values))
|
"""
collect information about fields and values in ini file
usage run script with file name in directory with unpacked stats.
Script will collect data from all files with name.
You can specify path as second argument.
python get_ini_fields.py body.ini
python get_ini_fields.py body.ini "C:/games/warzone2100"
"""
import os
import sys
from config_parser import WZConfigParser
def get_ini_fields(fields, path):
cp = WZConfigParser()
cp.load(path)
for section in cp.sections():
for key, value in cp.items(section):
fields.setdefault(key, []).append(value)
if __name__ == "__main__":
name = sys.argv[1]
path = sys.argv[2]
fields = {}
for base, dirs, files in os.walk(path):
if name in files:
file_path = os.path.join(base, name)
get_ini_fields(fields, file_path)
print "collectiong data from", file_path
max_size = max(map(len, fields.values()))
for field, values in fields.items():
print field, "requires=%s" % (len(values) == max_size), "values:", ', '.join('"%s"' % x for x in sorted(set(values)))
|
Update collect ini script. Now it shows if field is required.
|
Update collect ini script. Now it shows if field is required.
|
Python
|
cc0-1.0
|
haoNoQ/wztools2100,haoNoQ/wztools2100,haoNoQ/wztools2100
|
"""
collect information about fields and values in ini file
usage run script with file name in directory with unpacked stats.
Script will collect data from all files with name.
You can specify path as second argument.
python get_ini_fields.py body.ini
python get_ini_fields.py body.ini "C:/games/warzone2100"
"""
import os
import sys
from config_parser import WZConfigParser
def get_ini_fields(fields, path):
cp = WZConfigParser()
cp.load(path)
for section in cp.sections():
for key, value in cp.items(section):
fields.setdefault(key, set()).add(value)
if __name__ == "__main__":
name = sys.argv[1]
path = sys.argv[2]
fields = {}
for base, dirs, files in os.walk(path):
if name in files:
file_path = os.path.join(base, name)
get_ini_fields(fields, file_path)
print "collectiong data from", file_path
for field, values in fields.items():
print field, ' '.join(sorted(values))
Update collect ini script. Now it shows if field is required.
|
"""
collect information about fields and values in ini file
usage run script with file name in directory with unpacked stats.
Script will collect data from all files with name.
You can specify path as second argument.
python get_ini_fields.py body.ini
python get_ini_fields.py body.ini "C:/games/warzone2100"
"""
import os
import sys
from config_parser import WZConfigParser
def get_ini_fields(fields, path):
cp = WZConfigParser()
cp.load(path)
for section in cp.sections():
for key, value in cp.items(section):
fields.setdefault(key, []).append(value)
if __name__ == "__main__":
name = sys.argv[1]
path = sys.argv[2]
fields = {}
for base, dirs, files in os.walk(path):
if name in files:
file_path = os.path.join(base, name)
get_ini_fields(fields, file_path)
print "collectiong data from", file_path
max_size = max(map(len, fields.values()))
for field, values in fields.items():
print field, "requires=%s" % (len(values) == max_size), "values:", ', '.join('"%s"' % x for x in sorted(set(values)))
|
<commit_before>"""
collect information about fields and values in ini file
usage run script with file name in directory with unpacked stats.
Script will collect data from all files with name.
You can specify path as second argument.
python get_ini_fields.py body.ini
python get_ini_fields.py body.ini "C:/games/warzone2100"
"""
import os
import sys
from config_parser import WZConfigParser
def get_ini_fields(fields, path):
cp = WZConfigParser()
cp.load(path)
for section in cp.sections():
for key, value in cp.items(section):
fields.setdefault(key, set()).add(value)
if __name__ == "__main__":
name = sys.argv[1]
path = sys.argv[2]
fields = {}
for base, dirs, files in os.walk(path):
if name in files:
file_path = os.path.join(base, name)
get_ini_fields(fields, file_path)
print "collectiong data from", file_path
for field, values in fields.items():
print field, ' '.join(sorted(values))
<commit_msg>Update collect ini script. Now it shows if field is required.<commit_after>
|
"""
collect information about fields and values in ini file
usage run script with file name in directory with unpacked stats.
Script will collect data from all files with name.
You can specify path as second argument.
python get_ini_fields.py body.ini
python get_ini_fields.py body.ini "C:/games/warzone2100"
"""
import os
import sys
from config_parser import WZConfigParser
def get_ini_fields(fields, path):
cp = WZConfigParser()
cp.load(path)
for section in cp.sections():
for key, value in cp.items(section):
fields.setdefault(key, []).append(value)
if __name__ == "__main__":
name = sys.argv[1]
path = sys.argv[2]
fields = {}
for base, dirs, files in os.walk(path):
if name in files:
file_path = os.path.join(base, name)
get_ini_fields(fields, file_path)
print "collectiong data from", file_path
max_size = max(map(len, fields.values()))
for field, values in fields.items():
print field, "requires=%s" % (len(values) == max_size), "values:", ', '.join('"%s"' % x for x in sorted(set(values)))
|
"""
collect information about fields and values in ini file
usage run script with file name in directory with unpacked stats.
Script will collect data from all files with name.
You can specify path as second argument.
python get_ini_fields.py body.ini
python get_ini_fields.py body.ini "C:/games/warzone2100"
"""
import os
import sys
from config_parser import WZConfigParser
def get_ini_fields(fields, path):
cp = WZConfigParser()
cp.load(path)
for section in cp.sections():
for key, value in cp.items(section):
fields.setdefault(key, set()).add(value)
if __name__ == "__main__":
name = sys.argv[1]
path = sys.argv[2]
fields = {}
for base, dirs, files in os.walk(path):
if name in files:
file_path = os.path.join(base, name)
get_ini_fields(fields, file_path)
print "collectiong data from", file_path
for field, values in fields.items():
print field, ' '.join(sorted(values))
Update collect ini script. Now it shows if field is required."""
collect information about fields and values in ini file
usage run script with file name in directory with unpacked stats.
Script will collect data from all files with name.
You can specify path as second argument.
python get_ini_fields.py body.ini
python get_ini_fields.py body.ini "C:/games/warzone2100"
"""
import os
import sys
from config_parser import WZConfigParser
def get_ini_fields(fields, path):
cp = WZConfigParser()
cp.load(path)
for section in cp.sections():
for key, value in cp.items(section):
fields.setdefault(key, []).append(value)
if __name__ == "__main__":
name = sys.argv[1]
path = sys.argv[2]
fields = {}
for base, dirs, files in os.walk(path):
if name in files:
file_path = os.path.join(base, name)
get_ini_fields(fields, file_path)
print "collectiong data from", file_path
max_size = max(map(len, fields.values()))
for field, values in fields.items():
print field, "requires=%s" % (len(values) == max_size), "values:", ', '.join('"%s"' % x for x in sorted(set(values)))
|
<commit_before>"""
collect information about fields and values in ini file
usage run script with file name in directory with unpacked stats.
Script will collect data from all files with name.
You can specify path as second argument.
python get_ini_fields.py body.ini
python get_ini_fields.py body.ini "C:/games/warzone2100"
"""
import os
import sys
from config_parser import WZConfigParser
def get_ini_fields(fields, path):
cp = WZConfigParser()
cp.load(path)
for section in cp.sections():
for key, value in cp.items(section):
fields.setdefault(key, set()).add(value)
if __name__ == "__main__":
name = sys.argv[1]
path = sys.argv[2]
fields = {}
for base, dirs, files in os.walk(path):
if name in files:
file_path = os.path.join(base, name)
get_ini_fields(fields, file_path)
print "collectiong data from", file_path
for field, values in fields.items():
print field, ' '.join(sorted(values))
<commit_msg>Update collect ini script. Now it shows if field is required.<commit_after>"""
collect information about fields and values in ini file
usage run script with file name in directory with unpacked stats.
Script will collect data from all files with name.
You can specify path as second argument.
python get_ini_fields.py body.ini
python get_ini_fields.py body.ini "C:/games/warzone2100"
"""
import os
import sys
from config_parser import WZConfigParser
def get_ini_fields(fields, path):
cp = WZConfigParser()
cp.load(path)
for section in cp.sections():
for key, value in cp.items(section):
fields.setdefault(key, []).append(value)
if __name__ == "__main__":
name = sys.argv[1]
path = sys.argv[2]
fields = {}
for base, dirs, files in os.walk(path):
if name in files:
file_path = os.path.join(base, name)
get_ini_fields(fields, file_path)
print "collectiong data from", file_path
max_size = max(map(len, fields.values()))
for field, values in fields.items():
print field, "requires=%s" % (len(values) == max_size), "values:", ', '.join('"%s"' % x for x in sorted(set(values)))
|
733ea52d5374c4c5d5c10f1a04e3300fd6f4695c
|
features/steps/interactive.py
|
features/steps/interactive.py
|
import time, pexpect, re
import nose.tools as nt
import subprocess as spr
PROMPT = "root@\w+:[^\r]+"
ENTER = "\n"
def type(process, input_):
process.send(input_.encode())
process.expect(PROMPT)
# Remove the typed input from the returned standard out
return re.sub(re.escape(input_.strip()), '', process.before).strip()
@when(u'I run the interactive command')
def step_impl(context):
process = pexpect.spawn(context.text)
time.sleep(0.5)
type(process, ENTER)
class Output(object):
pass
context.output = Output()
context.output.stderr = ""
context.output.stdout = ""
context.process = process
@when(u'I type')
def step_impl(context):
cmd = context.text.strip() + "\n"
context.output.stdout = type(context.process, cmd)
@when(u'I exit the shell')
def step_impl(context):
context.process.send("exit\n")
|
import time, pexpect, re
PROMPT = "root@\w+:[^\r]+"
UP_ARROW = "\x1b[A"
def type(process, input_):
process.send(input_.encode())
process.expect(PROMPT)
# Remove the typed input from the returned standard out
return re.sub(re.escape(input_.strip()), '', process.before).strip()
@when(u'I run the interactive command')
def step_impl(context):
process = pexpect.spawn(context.text)
time.sleep(3)
type(process, UP_ARROW)
class Output(object):
pass
context.output = Output()
context.output.stderr = ""
context.output.stdout = ""
context.process = process
@when(u'I type')
def step_impl(context):
cmd = context.text.strip() + "\n"
context.output.stdout = type(context.process, cmd)
@when(u'I exit the shell')
def step_impl(context):
context.process.send("exit\n")
|
Allow tty process longer time to spawn in feature tests
|
Allow tty process longer time to spawn in feature tests
|
Python
|
mit
|
michaelbarton/command-line-interface,bioboxes/command-line-interface,michaelbarton/command-line-interface,bioboxes/command-line-interface,pbelmann/command-line-interface,pbelmann/command-line-interface
|
import time, pexpect, re
import nose.tools as nt
import subprocess as spr
PROMPT = "root@\w+:[^\r]+"
ENTER = "\n"
def type(process, input_):
process.send(input_.encode())
process.expect(PROMPT)
# Remove the typed input from the returned standard out
return re.sub(re.escape(input_.strip()), '', process.before).strip()
@when(u'I run the interactive command')
def step_impl(context):
process = pexpect.spawn(context.text)
time.sleep(0.5)
type(process, ENTER)
class Output(object):
pass
context.output = Output()
context.output.stderr = ""
context.output.stdout = ""
context.process = process
@when(u'I type')
def step_impl(context):
cmd = context.text.strip() + "\n"
context.output.stdout = type(context.process, cmd)
@when(u'I exit the shell')
def step_impl(context):
context.process.send("exit\n")
Allow tty process longer time to spawn in feature tests
|
import time, pexpect, re
PROMPT = "root@\w+:[^\r]+"
UP_ARROW = "\x1b[A"
def type(process, input_):
process.send(input_.encode())
process.expect(PROMPT)
# Remove the typed input from the returned standard out
return re.sub(re.escape(input_.strip()), '', process.before).strip()
@when(u'I run the interactive command')
def step_impl(context):
process = pexpect.spawn(context.text)
time.sleep(3)
type(process, UP_ARROW)
class Output(object):
pass
context.output = Output()
context.output.stderr = ""
context.output.stdout = ""
context.process = process
@when(u'I type')
def step_impl(context):
cmd = context.text.strip() + "\n"
context.output.stdout = type(context.process, cmd)
@when(u'I exit the shell')
def step_impl(context):
context.process.send("exit\n")
|
<commit_before>import time, pexpect, re
import nose.tools as nt
import subprocess as spr
PROMPT = "root@\w+:[^\r]+"
ENTER = "\n"
def type(process, input_):
process.send(input_.encode())
process.expect(PROMPT)
# Remove the typed input from the returned standard out
return re.sub(re.escape(input_.strip()), '', process.before).strip()
@when(u'I run the interactive command')
def step_impl(context):
process = pexpect.spawn(context.text)
time.sleep(0.5)
type(process, ENTER)
class Output(object):
pass
context.output = Output()
context.output.stderr = ""
context.output.stdout = ""
context.process = process
@when(u'I type')
def step_impl(context):
cmd = context.text.strip() + "\n"
context.output.stdout = type(context.process, cmd)
@when(u'I exit the shell')
def step_impl(context):
context.process.send("exit\n")
<commit_msg>Allow tty process longer time to spawn in feature tests<commit_after>
|
import time, pexpect, re
PROMPT = "root@\w+:[^\r]+"
UP_ARROW = "\x1b[A"
def type(process, input_):
process.send(input_.encode())
process.expect(PROMPT)
# Remove the typed input from the returned standard out
return re.sub(re.escape(input_.strip()), '', process.before).strip()
@when(u'I run the interactive command')
def step_impl(context):
process = pexpect.spawn(context.text)
time.sleep(3)
type(process, UP_ARROW)
class Output(object):
pass
context.output = Output()
context.output.stderr = ""
context.output.stdout = ""
context.process = process
@when(u'I type')
def step_impl(context):
cmd = context.text.strip() + "\n"
context.output.stdout = type(context.process, cmd)
@when(u'I exit the shell')
def step_impl(context):
context.process.send("exit\n")
|
import time, pexpect, re
import nose.tools as nt
import subprocess as spr
PROMPT = "root@\w+:[^\r]+"
ENTER = "\n"
def type(process, input_):
process.send(input_.encode())
process.expect(PROMPT)
# Remove the typed input from the returned standard out
return re.sub(re.escape(input_.strip()), '', process.before).strip()
@when(u'I run the interactive command')
def step_impl(context):
process = pexpect.spawn(context.text)
time.sleep(0.5)
type(process, ENTER)
class Output(object):
pass
context.output = Output()
context.output.stderr = ""
context.output.stdout = ""
context.process = process
@when(u'I type')
def step_impl(context):
cmd = context.text.strip() + "\n"
context.output.stdout = type(context.process, cmd)
@when(u'I exit the shell')
def step_impl(context):
context.process.send("exit\n")
Allow tty process longer time to spawn in feature testsimport time, pexpect, re
PROMPT = "root@\w+:[^\r]+"
UP_ARROW = "\x1b[A"
def type(process, input_):
process.send(input_.encode())
process.expect(PROMPT)
# Remove the typed input from the returned standard out
return re.sub(re.escape(input_.strip()), '', process.before).strip()
@when(u'I run the interactive command')
def step_impl(context):
process = pexpect.spawn(context.text)
time.sleep(3)
type(process, UP_ARROW)
class Output(object):
pass
context.output = Output()
context.output.stderr = ""
context.output.stdout = ""
context.process = process
@when(u'I type')
def step_impl(context):
cmd = context.text.strip() + "\n"
context.output.stdout = type(context.process, cmd)
@when(u'I exit the shell')
def step_impl(context):
context.process.send("exit\n")
|
<commit_before>import time, pexpect, re
import nose.tools as nt
import subprocess as spr
PROMPT = "root@\w+:[^\r]+"
ENTER = "\n"
def type(process, input_):
process.send(input_.encode())
process.expect(PROMPT)
# Remove the typed input from the returned standard out
return re.sub(re.escape(input_.strip()), '', process.before).strip()
@when(u'I run the interactive command')
def step_impl(context):
process = pexpect.spawn(context.text)
time.sleep(0.5)
type(process, ENTER)
class Output(object):
pass
context.output = Output()
context.output.stderr = ""
context.output.stdout = ""
context.process = process
@when(u'I type')
def step_impl(context):
cmd = context.text.strip() + "\n"
context.output.stdout = type(context.process, cmd)
@when(u'I exit the shell')
def step_impl(context):
context.process.send("exit\n")
<commit_msg>Allow tty process longer time to spawn in feature tests<commit_after>import time, pexpect, re
PROMPT = "root@\w+:[^\r]+"
UP_ARROW = "\x1b[A"
def type(process, input_):
process.send(input_.encode())
process.expect(PROMPT)
# Remove the typed input from the returned standard out
return re.sub(re.escape(input_.strip()), '', process.before).strip()
@when(u'I run the interactive command')
def step_impl(context):
process = pexpect.spawn(context.text)
time.sleep(3)
type(process, UP_ARROW)
class Output(object):
pass
context.output = Output()
context.output.stderr = ""
context.output.stdout = ""
context.process = process
@when(u'I type')
def step_impl(context):
cmd = context.text.strip() + "\n"
context.output.stdout = type(context.process, cmd)
@when(u'I exit the shell')
def step_impl(context):
context.process.send("exit\n")
|
e5799aae4ea73509b183cd40e8f9c629e2e445a1
|
lektor_embed_x.py
|
lektor_embed_x.py
|
# -*- coding: utf-8 -*-
"""
lektor-embed-x
~~~~~~~~~~~~~~
Simply embed rich contents from popular sites in Lektor-generated pages
:copyright: (c) 2016 by Khaled Monsoor
:license: The MIT License
"""
from embedx import OnlineContent
from lektor.pluginsystem import Plugin
from markupsafe import Markup
__version__ = '0.1.2'
__author__ = 'Khaled Monsoor <k@kmonsoor.com>'
class EmbedXMixin(object):
def autolink(self, link, is_email):
if is_email:
return super(EmbedXMixin, self).autolink(link, True)
else:
try:
content = OnlineContent(link)
# print content.get_embed_code()
return Markup(content.get_embed_code())
except (NotImplementedError, ValueError):
print('This host or this specific content is not supported yet. link: {0}'.format(link))
return super(EmbedXMixin, self).autolink(link, False)
class EmbedXPlugin(Plugin):
name = u'lektor-embed-x'
description = u'Simply embed rich content from popular sites in Lektor-generated pages'
def on_markdown_config(self, config, **extra):
config.renderer_mixins.append(EmbedXMixin)
|
# -*- coding: utf-8 -*-
"""
lektor-embed-x
~~~~~~~~~~~~~~
Simply embed rich contents from popular sites in Lektor-generated pages
:copyright: (c) 2016 by Khaled Monsoor
:license: The MIT License
"""
from embedx import OnlineContent
from lektor.pluginsystem import Plugin
from markupsafe import Markup
__version__ = '0.1.2'
__author__ = 'Khaled Monsoor <k@kmonsoor.com>'
class EmbedXMixin(object):
def autolink(self, link, is_email):
if is_email:
return super(EmbedXMixin, self).autolink(link, True)
else:
try:
content = OnlineContent(link)
# print content.get_embed_code()
return Markup(content.get_embed_code())
except (NotImplementedError, ValueError):
print('This host or this specific content is not supported yet. link: {0}'.format(link))
return Markup(super(EmbedXMixin, self).autolink(link, False))
class EmbedXPlugin(Plugin):
name = u'lektor-embed-x'
description = u'Simply embed rich content from popular sites in Lektor-generated pages'
def on_markdown_config(self, config, **extra):
config.renderer_mixins.append(EmbedXMixin)
|
Return a Markup object when the host is not supported
|
Return a Markup object when the host is not supported
This might be an issue with the Markdown parser used by lektor (mistune),
but when a Markup() object is returned, then regular links must also
be marked as Markup() or they won't be escaped.
|
Python
|
mit
|
kmonsoor/lektor-embed-x
|
# -*- coding: utf-8 -*-
"""
lektor-embed-x
~~~~~~~~~~~~~~
Simply embed rich contents from popular sites in Lektor-generated pages
:copyright: (c) 2016 by Khaled Monsoor
:license: The MIT License
"""
from embedx import OnlineContent
from lektor.pluginsystem import Plugin
from markupsafe import Markup
__version__ = '0.1.2'
__author__ = 'Khaled Monsoor <k@kmonsoor.com>'
class EmbedXMixin(object):
def autolink(self, link, is_email):
if is_email:
return super(EmbedXMixin, self).autolink(link, True)
else:
try:
content = OnlineContent(link)
# print content.get_embed_code()
return Markup(content.get_embed_code())
except (NotImplementedError, ValueError):
print('This host or this specific content is not supported yet. link: {0}'.format(link))
return super(EmbedXMixin, self).autolink(link, False)
class EmbedXPlugin(Plugin):
name = u'lektor-embed-x'
description = u'Simply embed rich content from popular sites in Lektor-generated pages'
def on_markdown_config(self, config, **extra):
config.renderer_mixins.append(EmbedXMixin)
Return a Markup object when the host is not supported
This might be an issue with the Markdown parser used by lektor (mistune),
but when a Markup() object is returned, then regular links must also
be marked as Markup() or they won't be escaped.
|
# -*- coding: utf-8 -*-
"""
lektor-embed-x
~~~~~~~~~~~~~~
Simply embed rich contents from popular sites in Lektor-generated pages
:copyright: (c) 2016 by Khaled Monsoor
:license: The MIT License
"""
from embedx import OnlineContent
from lektor.pluginsystem import Plugin
from markupsafe import Markup
__version__ = '0.1.2'
__author__ = 'Khaled Monsoor <k@kmonsoor.com>'
class EmbedXMixin(object):
def autolink(self, link, is_email):
if is_email:
return super(EmbedXMixin, self).autolink(link, True)
else:
try:
content = OnlineContent(link)
# print content.get_embed_code()
return Markup(content.get_embed_code())
except (NotImplementedError, ValueError):
print('This host or this specific content is not supported yet. link: {0}'.format(link))
return Markup(super(EmbedXMixin, self).autolink(link, False))
class EmbedXPlugin(Plugin):
name = u'lektor-embed-x'
description = u'Simply embed rich content from popular sites in Lektor-generated pages'
def on_markdown_config(self, config, **extra):
config.renderer_mixins.append(EmbedXMixin)
|
<commit_before># -*- coding: utf-8 -*-
"""
lektor-embed-x
~~~~~~~~~~~~~~
Simply embed rich contents from popular sites in Lektor-generated pages
:copyright: (c) 2016 by Khaled Monsoor
:license: The MIT License
"""
from embedx import OnlineContent
from lektor.pluginsystem import Plugin
from markupsafe import Markup
__version__ = '0.1.2'
__author__ = 'Khaled Monsoor <k@kmonsoor.com>'
class EmbedXMixin(object):
def autolink(self, link, is_email):
if is_email:
return super(EmbedXMixin, self).autolink(link, True)
else:
try:
content = OnlineContent(link)
# print content.get_embed_code()
return Markup(content.get_embed_code())
except (NotImplementedError, ValueError):
print('This host or this specific content is not supported yet. link: {0}'.format(link))
return super(EmbedXMixin, self).autolink(link, False)
class EmbedXPlugin(Plugin):
name = u'lektor-embed-x'
description = u'Simply embed rich content from popular sites in Lektor-generated pages'
def on_markdown_config(self, config, **extra):
config.renderer_mixins.append(EmbedXMixin)
<commit_msg>Return a Markup object when the host is not supported
This might be an issue with the Markdown parser used by lektor (mistune),
but when a Markup() object is returned, then regular links must also
be marked as Markup() or they won't be escaped.<commit_after>
|
# -*- coding: utf-8 -*-
"""
lektor-embed-x
~~~~~~~~~~~~~~
Simply embed rich contents from popular sites in Lektor-generated pages
:copyright: (c) 2016 by Khaled Monsoor
:license: The MIT License
"""
from embedx import OnlineContent
from lektor.pluginsystem import Plugin
from markupsafe import Markup
__version__ = '0.1.2'
__author__ = 'Khaled Monsoor <k@kmonsoor.com>'
class EmbedXMixin(object):
def autolink(self, link, is_email):
if is_email:
return super(EmbedXMixin, self).autolink(link, True)
else:
try:
content = OnlineContent(link)
# print content.get_embed_code()
return Markup(content.get_embed_code())
except (NotImplementedError, ValueError):
print('This host or this specific content is not supported yet. link: {0}'.format(link))
return Markup(super(EmbedXMixin, self).autolink(link, False))
class EmbedXPlugin(Plugin):
name = u'lektor-embed-x'
description = u'Simply embed rich content from popular sites in Lektor-generated pages'
def on_markdown_config(self, config, **extra):
config.renderer_mixins.append(EmbedXMixin)
|
# -*- coding: utf-8 -*-
"""
lektor-embed-x
~~~~~~~~~~~~~~
Simply embed rich contents from popular sites in Lektor-generated pages
:copyright: (c) 2016 by Khaled Monsoor
:license: The MIT License
"""
from embedx import OnlineContent
from lektor.pluginsystem import Plugin
from markupsafe import Markup
__version__ = '0.1.2'
__author__ = 'Khaled Monsoor <k@kmonsoor.com>'
class EmbedXMixin(object):
def autolink(self, link, is_email):
if is_email:
return super(EmbedXMixin, self).autolink(link, True)
else:
try:
content = OnlineContent(link)
# print content.get_embed_code()
return Markup(content.get_embed_code())
except (NotImplementedError, ValueError):
print('This host or this specific content is not supported yet. link: {0}'.format(link))
return super(EmbedXMixin, self).autolink(link, False)
class EmbedXPlugin(Plugin):
name = u'lektor-embed-x'
description = u'Simply embed rich content from popular sites in Lektor-generated pages'
def on_markdown_config(self, config, **extra):
config.renderer_mixins.append(EmbedXMixin)
Return a Markup object when the host is not supported
This might be an issue with the Markdown parser used by lektor (mistune),
but when a Markup() object is returned, then regular links must also
be marked as Markup() or they won't be escaped.# -*- coding: utf-8 -*-
"""
lektor-embed-x
~~~~~~~~~~~~~~
Simply embed rich contents from popular sites in Lektor-generated pages
:copyright: (c) 2016 by Khaled Monsoor
:license: The MIT License
"""
from embedx import OnlineContent
from lektor.pluginsystem import Plugin
from markupsafe import Markup
__version__ = '0.1.2'
__author__ = 'Khaled Monsoor <k@kmonsoor.com>'
class EmbedXMixin(object):
def autolink(self, link, is_email):
if is_email:
return super(EmbedXMixin, self).autolink(link, True)
else:
try:
content = OnlineContent(link)
# print content.get_embed_code()
return Markup(content.get_embed_code())
except (NotImplementedError, ValueError):
print('This host or this specific content is not supported yet. link: {0}'.format(link))
return Markup(super(EmbedXMixin, self).autolink(link, False))
class EmbedXPlugin(Plugin):
name = u'lektor-embed-x'
description = u'Simply embed rich content from popular sites in Lektor-generated pages'
def on_markdown_config(self, config, **extra):
config.renderer_mixins.append(EmbedXMixin)
|
<commit_before># -*- coding: utf-8 -*-
"""
lektor-embed-x
~~~~~~~~~~~~~~
Simply embed rich contents from popular sites in Lektor-generated pages
:copyright: (c) 2016 by Khaled Monsoor
:license: The MIT License
"""
from embedx import OnlineContent
from lektor.pluginsystem import Plugin
from markupsafe import Markup
__version__ = '0.1.2'
__author__ = 'Khaled Monsoor <k@kmonsoor.com>'
class EmbedXMixin(object):
def autolink(self, link, is_email):
if is_email:
return super(EmbedXMixin, self).autolink(link, True)
else:
try:
content = OnlineContent(link)
# print content.get_embed_code()
return Markup(content.get_embed_code())
except (NotImplementedError, ValueError):
print('This host or this specific content is not supported yet. link: {0}'.format(link))
return super(EmbedXMixin, self).autolink(link, False)
class EmbedXPlugin(Plugin):
name = u'lektor-embed-x'
description = u'Simply embed rich content from popular sites in Lektor-generated pages'
def on_markdown_config(self, config, **extra):
config.renderer_mixins.append(EmbedXMixin)
<commit_msg>Return a Markup object when the host is not supported
This might be an issue with the Markdown parser used by lektor (mistune),
but when a Markup() object is returned, then regular links must also
be marked as Markup() or they won't be escaped.<commit_after># -*- coding: utf-8 -*-
"""
lektor-embed-x
~~~~~~~~~~~~~~
Simply embed rich contents from popular sites in Lektor-generated pages
:copyright: (c) 2016 by Khaled Monsoor
:license: The MIT License
"""
from embedx import OnlineContent
from lektor.pluginsystem import Plugin
from markupsafe import Markup
__version__ = '0.1.2'
__author__ = 'Khaled Monsoor <k@kmonsoor.com>'
class EmbedXMixin(object):
def autolink(self, link, is_email):
if is_email:
return super(EmbedXMixin, self).autolink(link, True)
else:
try:
content = OnlineContent(link)
# print content.get_embed_code()
return Markup(content.get_embed_code())
except (NotImplementedError, ValueError):
print('This host or this specific content is not supported yet. link: {0}'.format(link))
return Markup(super(EmbedXMixin, self).autolink(link, False))
class EmbedXPlugin(Plugin):
name = u'lektor-embed-x'
description = u'Simply embed rich content from popular sites in Lektor-generated pages'
def on_markdown_config(self, config, **extra):
config.renderer_mixins.append(EmbedXMixin)
|
b71adef99d0facef9572b3a7fc60a34bc3036888
|
blanc_basic_news/news/templatetags/news_tags.py
|
blanc_basic_news/news/templatetags/news_tags.py
|
from django import template
from django.utils import timezone
from blanc_basic_news.news import get_post_model
from blanc_basic_news.news.models import Category
register = template.Library()
@register.assignment_tag
def get_news_categories():
return Category.objects.all()
@register.assignment_tag
def get_news_months():
return get_post_model().objects.filter(
published=True, date__lte=timezone.now()).dates('date', 'month')
@register.assignment_tag
def get_latest_news(count, category=None):
post_list = get_post_model().objects.select_related().filter(
published=True, date__lte=timezone.now())
# Optional filter by category
if category is not None:
post_list = post_list.filter(category__slug=category)
return post_list[:count]
|
from django import template
from django.utils import timezone
from blanc_basic_news.news import get_post_model
from blanc_basic_news.news.models import Category
register = template.Library()
@register.assignment_tag
def get_news_categories():
return Category.objects.all()
@register.assignment_tag
def get_news_months():
return get_post_model().objects.filter(
published=True, date__lte=timezone.now()).dates('date_url', 'month')
@register.assignment_tag
def get_latest_news(count, category=None):
post_list = get_post_model().objects.select_related().filter(
published=True, date__lte=timezone.now())
# Optional filter by category
if category is not None:
post_list = post_list.filter(category__slug=category)
return post_list[:count]
|
Use date_url for .dates(), as Django 1.6 doesn't like DateTimeField here
|
Use date_url for .dates(), as Django 1.6 doesn't like DateTimeField here
|
Python
|
bsd-3-clause
|
blancltd/blanc-basic-news
|
from django import template
from django.utils import timezone
from blanc_basic_news.news import get_post_model
from blanc_basic_news.news.models import Category
register = template.Library()
@register.assignment_tag
def get_news_categories():
return Category.objects.all()
@register.assignment_tag
def get_news_months():
return get_post_model().objects.filter(
published=True, date__lte=timezone.now()).dates('date', 'month')
@register.assignment_tag
def get_latest_news(count, category=None):
post_list = get_post_model().objects.select_related().filter(
published=True, date__lte=timezone.now())
# Optional filter by category
if category is not None:
post_list = post_list.filter(category__slug=category)
return post_list[:count]
Use date_url for .dates(), as Django 1.6 doesn't like DateTimeField here
|
from django import template
from django.utils import timezone
from blanc_basic_news.news import get_post_model
from blanc_basic_news.news.models import Category
register = template.Library()
@register.assignment_tag
def get_news_categories():
return Category.objects.all()
@register.assignment_tag
def get_news_months():
return get_post_model().objects.filter(
published=True, date__lte=timezone.now()).dates('date_url', 'month')
@register.assignment_tag
def get_latest_news(count, category=None):
post_list = get_post_model().objects.select_related().filter(
published=True, date__lte=timezone.now())
# Optional filter by category
if category is not None:
post_list = post_list.filter(category__slug=category)
return post_list[:count]
|
<commit_before>from django import template
from django.utils import timezone
from blanc_basic_news.news import get_post_model
from blanc_basic_news.news.models import Category
register = template.Library()
@register.assignment_tag
def get_news_categories():
return Category.objects.all()
@register.assignment_tag
def get_news_months():
return get_post_model().objects.filter(
published=True, date__lte=timezone.now()).dates('date', 'month')
@register.assignment_tag
def get_latest_news(count, category=None):
post_list = get_post_model().objects.select_related().filter(
published=True, date__lte=timezone.now())
# Optional filter by category
if category is not None:
post_list = post_list.filter(category__slug=category)
return post_list[:count]
<commit_msg>Use date_url for .dates(), as Django 1.6 doesn't like DateTimeField here<commit_after>
|
from django import template
from django.utils import timezone
from blanc_basic_news.news import get_post_model
from blanc_basic_news.news.models import Category
register = template.Library()
@register.assignment_tag
def get_news_categories():
return Category.objects.all()
@register.assignment_tag
def get_news_months():
return get_post_model().objects.filter(
published=True, date__lte=timezone.now()).dates('date_url', 'month')
@register.assignment_tag
def get_latest_news(count, category=None):
post_list = get_post_model().objects.select_related().filter(
published=True, date__lte=timezone.now())
# Optional filter by category
if category is not None:
post_list = post_list.filter(category__slug=category)
return post_list[:count]
|
from django import template
from django.utils import timezone
from blanc_basic_news.news import get_post_model
from blanc_basic_news.news.models import Category
register = template.Library()
@register.assignment_tag
def get_news_categories():
return Category.objects.all()
@register.assignment_tag
def get_news_months():
return get_post_model().objects.filter(
published=True, date__lte=timezone.now()).dates('date', 'month')
@register.assignment_tag
def get_latest_news(count, category=None):
post_list = get_post_model().objects.select_related().filter(
published=True, date__lte=timezone.now())
# Optional filter by category
if category is not None:
post_list = post_list.filter(category__slug=category)
return post_list[:count]
Use date_url for .dates(), as Django 1.6 doesn't like DateTimeField herefrom django import template
from django.utils import timezone
from blanc_basic_news.news import get_post_model
from blanc_basic_news.news.models import Category
register = template.Library()
@register.assignment_tag
def get_news_categories():
return Category.objects.all()
@register.assignment_tag
def get_news_months():
return get_post_model().objects.filter(
published=True, date__lte=timezone.now()).dates('date_url', 'month')
@register.assignment_tag
def get_latest_news(count, category=None):
post_list = get_post_model().objects.select_related().filter(
published=True, date__lte=timezone.now())
# Optional filter by category
if category is not None:
post_list = post_list.filter(category__slug=category)
return post_list[:count]
|
<commit_before>from django import template
from django.utils import timezone
from blanc_basic_news.news import get_post_model
from blanc_basic_news.news.models import Category
register = template.Library()
@register.assignment_tag
def get_news_categories():
return Category.objects.all()
@register.assignment_tag
def get_news_months():
return get_post_model().objects.filter(
published=True, date__lte=timezone.now()).dates('date', 'month')
@register.assignment_tag
def get_latest_news(count, category=None):
post_list = get_post_model().objects.select_related().filter(
published=True, date__lte=timezone.now())
# Optional filter by category
if category is not None:
post_list = post_list.filter(category__slug=category)
return post_list[:count]
<commit_msg>Use date_url for .dates(), as Django 1.6 doesn't like DateTimeField here<commit_after>from django import template
from django.utils import timezone
from blanc_basic_news.news import get_post_model
from blanc_basic_news.news.models import Category
register = template.Library()
@register.assignment_tag
def get_news_categories():
return Category.objects.all()
@register.assignment_tag
def get_news_months():
return get_post_model().objects.filter(
published=True, date__lte=timezone.now()).dates('date_url', 'month')
@register.assignment_tag
def get_latest_news(count, category=None):
post_list = get_post_model().objects.select_related().filter(
published=True, date__lte=timezone.now())
# Optional filter by category
if category is not None:
post_list = post_list.filter(category__slug=category)
return post_list[:count]
|
ba8560e4fc51afc6985d78cc131d46f07fe3260c
|
pyximport/test/test_reload.py
|
pyximport/test/test_reload.py
|
from __future__ import absolute_import, print_function
import time, os, sys
from . import test_pyximport
if 1:
from distutils import sysconfig
try:
sysconfig.set_python_build()
except AttributeError:
pass
import pyxbuild
print(pyxbuild.distutils.sysconfig == sysconfig)
def test():
tempdir = test_pyximport.make_tempdir()
sys.path.append(tempdir)
hello_file = os.path.join(tempdir, "hello.pyx")
open(hello_file, "w").write("x = 1; print x; before = 'before'\n")
import hello
assert hello.x == 1
time.sleep(1) # sleep to make sure that new "hello.pyx" has later
# timestamp than object file.
open(hello_file, "w").write("x = 2; print x; after = 'after'\n")
reload(hello)
assert hello.x == 2, "Reload should work on Python 2.3 but not 2.2"
test_pyximport.remove_tempdir(tempdir)
if __name__=="__main__":
test()
|
from __future__ import absolute_import, print_function
import time, os, sys
from . import test_pyximport
if 1:
from distutils import sysconfig
try:
sysconfig.set_python_build()
except AttributeError:
pass
import pyxbuild
print(pyxbuild.distutils.sysconfig == sysconfig)
def test():
tempdir = test_pyximport.make_tempdir()
sys.path.append(tempdir)
hello_file = os.path.join(tempdir, "hello.pyx")
open(hello_file, "w").write("x = 1; print x; before = 'before'\n")
import hello
assert hello.x == 1
time.sleep(1) # sleep to make sure that new "hello.pyx" has later
# timestamp than object file.
open(hello_file, "w").write("x = 2; print x; after = 'after'\n")
reload(hello)
assert hello.x == 2, "Reload should work on Python 2.3 but not 2.2"
test_pyximport.remove_tempdir(tempdir)
if __name__=="__main__":
test()
|
Fix bad indentation in a pyximport test
|
Fix bad indentation in a pyximport test
The indentation was inadvertently broken when expanding tabs in
e908c0b9262008014d0698732acb5de48dbbf950.
Fixes:
$ python pyximport/test/test_reload.py
File "pyximport/test/test_reload.py", line 23
assert hello.x == 1
^
IndentationError: unexpected indent
|
Python
|
apache-2.0
|
scoder/cython,ChristopherHogan/cython,cython/cython,scoder/cython,ChristopherHogan/cython,cython/cython,scoder/cython,da-woods/cython,da-woods/cython,da-woods/cython,scoder/cython,cython/cython,da-woods/cython,ChristopherHogan/cython,cython/cython
|
from __future__ import absolute_import, print_function
import time, os, sys
from . import test_pyximport
if 1:
from distutils import sysconfig
try:
sysconfig.set_python_build()
except AttributeError:
pass
import pyxbuild
print(pyxbuild.distutils.sysconfig == sysconfig)
def test():
tempdir = test_pyximport.make_tempdir()
sys.path.append(tempdir)
hello_file = os.path.join(tempdir, "hello.pyx")
open(hello_file, "w").write("x = 1; print x; before = 'before'\n")
import hello
assert hello.x == 1
time.sleep(1) # sleep to make sure that new "hello.pyx" has later
# timestamp than object file.
open(hello_file, "w").write("x = 2; print x; after = 'after'\n")
reload(hello)
assert hello.x == 2, "Reload should work on Python 2.3 but not 2.2"
test_pyximport.remove_tempdir(tempdir)
if __name__=="__main__":
test()
Fix bad indentation in a pyximport test
The indentation was inadvertently broken when expanding tabs in
e908c0b9262008014d0698732acb5de48dbbf950.
Fixes:
$ python pyximport/test/test_reload.py
File "pyximport/test/test_reload.py", line 23
assert hello.x == 1
^
IndentationError: unexpected indent
|
from __future__ import absolute_import, print_function
import time, os, sys
from . import test_pyximport
if 1:
from distutils import sysconfig
try:
sysconfig.set_python_build()
except AttributeError:
pass
import pyxbuild
print(pyxbuild.distutils.sysconfig == sysconfig)
def test():
tempdir = test_pyximport.make_tempdir()
sys.path.append(tempdir)
hello_file = os.path.join(tempdir, "hello.pyx")
open(hello_file, "w").write("x = 1; print x; before = 'before'\n")
import hello
assert hello.x == 1
time.sleep(1) # sleep to make sure that new "hello.pyx" has later
# timestamp than object file.
open(hello_file, "w").write("x = 2; print x; after = 'after'\n")
reload(hello)
assert hello.x == 2, "Reload should work on Python 2.3 but not 2.2"
test_pyximport.remove_tempdir(tempdir)
if __name__=="__main__":
test()
|
<commit_before>from __future__ import absolute_import, print_function
import time, os, sys
from . import test_pyximport
if 1:
from distutils import sysconfig
try:
sysconfig.set_python_build()
except AttributeError:
pass
import pyxbuild
print(pyxbuild.distutils.sysconfig == sysconfig)
def test():
tempdir = test_pyximport.make_tempdir()
sys.path.append(tempdir)
hello_file = os.path.join(tempdir, "hello.pyx")
open(hello_file, "w").write("x = 1; print x; before = 'before'\n")
import hello
assert hello.x == 1
time.sleep(1) # sleep to make sure that new "hello.pyx" has later
# timestamp than object file.
open(hello_file, "w").write("x = 2; print x; after = 'after'\n")
reload(hello)
assert hello.x == 2, "Reload should work on Python 2.3 but not 2.2"
test_pyximport.remove_tempdir(tempdir)
if __name__=="__main__":
test()
<commit_msg>Fix bad indentation in a pyximport test
The indentation was inadvertently broken when expanding tabs in
e908c0b9262008014d0698732acb5de48dbbf950.
Fixes:
$ python pyximport/test/test_reload.py
File "pyximport/test/test_reload.py", line 23
assert hello.x == 1
^
IndentationError: unexpected indent<commit_after>
|
from __future__ import absolute_import, print_function
import time, os, sys
from . import test_pyximport
if 1:
from distutils import sysconfig
try:
sysconfig.set_python_build()
except AttributeError:
pass
import pyxbuild
print(pyxbuild.distutils.sysconfig == sysconfig)
def test():
tempdir = test_pyximport.make_tempdir()
sys.path.append(tempdir)
hello_file = os.path.join(tempdir, "hello.pyx")
open(hello_file, "w").write("x = 1; print x; before = 'before'\n")
import hello
assert hello.x == 1
time.sleep(1) # sleep to make sure that new "hello.pyx" has later
# timestamp than object file.
open(hello_file, "w").write("x = 2; print x; after = 'after'\n")
reload(hello)
assert hello.x == 2, "Reload should work on Python 2.3 but not 2.2"
test_pyximport.remove_tempdir(tempdir)
if __name__=="__main__":
test()
|
from __future__ import absolute_import, print_function
import time, os, sys
from . import test_pyximport
if 1:
from distutils import sysconfig
try:
sysconfig.set_python_build()
except AttributeError:
pass
import pyxbuild
print(pyxbuild.distutils.sysconfig == sysconfig)
def test():
tempdir = test_pyximport.make_tempdir()
sys.path.append(tempdir)
hello_file = os.path.join(tempdir, "hello.pyx")
open(hello_file, "w").write("x = 1; print x; before = 'before'\n")
import hello
assert hello.x == 1
time.sleep(1) # sleep to make sure that new "hello.pyx" has later
# timestamp than object file.
open(hello_file, "w").write("x = 2; print x; after = 'after'\n")
reload(hello)
assert hello.x == 2, "Reload should work on Python 2.3 but not 2.2"
test_pyximport.remove_tempdir(tempdir)
if __name__=="__main__":
test()
Fix bad indentation in a pyximport test
The indentation was inadvertently broken when expanding tabs in
e908c0b9262008014d0698732acb5de48dbbf950.
Fixes:
$ python pyximport/test/test_reload.py
File "pyximport/test/test_reload.py", line 23
assert hello.x == 1
^
IndentationError: unexpected indentfrom __future__ import absolute_import, print_function
import time, os, sys
from . import test_pyximport
if 1:
from distutils import sysconfig
try:
sysconfig.set_python_build()
except AttributeError:
pass
import pyxbuild
print(pyxbuild.distutils.sysconfig == sysconfig)
def test():
tempdir = test_pyximport.make_tempdir()
sys.path.append(tempdir)
hello_file = os.path.join(tempdir, "hello.pyx")
open(hello_file, "w").write("x = 1; print x; before = 'before'\n")
import hello
assert hello.x == 1
time.sleep(1) # sleep to make sure that new "hello.pyx" has later
# timestamp than object file.
open(hello_file, "w").write("x = 2; print x; after = 'after'\n")
reload(hello)
assert hello.x == 2, "Reload should work on Python 2.3 but not 2.2"
test_pyximport.remove_tempdir(tempdir)
if __name__=="__main__":
test()
|
<commit_before>from __future__ import absolute_import, print_function
import time, os, sys
from . import test_pyximport
if 1:
from distutils import sysconfig
try:
sysconfig.set_python_build()
except AttributeError:
pass
import pyxbuild
print(pyxbuild.distutils.sysconfig == sysconfig)
def test():
tempdir = test_pyximport.make_tempdir()
sys.path.append(tempdir)
hello_file = os.path.join(tempdir, "hello.pyx")
open(hello_file, "w").write("x = 1; print x; before = 'before'\n")
import hello
assert hello.x == 1
time.sleep(1) # sleep to make sure that new "hello.pyx" has later
# timestamp than object file.
open(hello_file, "w").write("x = 2; print x; after = 'after'\n")
reload(hello)
assert hello.x == 2, "Reload should work on Python 2.3 but not 2.2"
test_pyximport.remove_tempdir(tempdir)
if __name__=="__main__":
test()
<commit_msg>Fix bad indentation in a pyximport test
The indentation was inadvertently broken when expanding tabs in
e908c0b9262008014d0698732acb5de48dbbf950.
Fixes:
$ python pyximport/test/test_reload.py
File "pyximport/test/test_reload.py", line 23
assert hello.x == 1
^
IndentationError: unexpected indent<commit_after>from __future__ import absolute_import, print_function
import time, os, sys
from . import test_pyximport
if 1:
from distutils import sysconfig
try:
sysconfig.set_python_build()
except AttributeError:
pass
import pyxbuild
print(pyxbuild.distutils.sysconfig == sysconfig)
def test():
tempdir = test_pyximport.make_tempdir()
sys.path.append(tempdir)
hello_file = os.path.join(tempdir, "hello.pyx")
open(hello_file, "w").write("x = 1; print x; before = 'before'\n")
import hello
assert hello.x == 1
time.sleep(1) # sleep to make sure that new "hello.pyx" has later
# timestamp than object file.
open(hello_file, "w").write("x = 2; print x; after = 'after'\n")
reload(hello)
assert hello.x == 2, "Reload should work on Python 2.3 but not 2.2"
test_pyximport.remove_tempdir(tempdir)
if __name__=="__main__":
test()
|
5a79acaccab59e50788cd2da31a93f2f1b69ca53
|
codeforces/div3/579/C/C.py
|
codeforces/div3/579/C/C.py
|
from math import gcd, sqrt, ceil
n = int(input())
a = list(map(int, input().split()))
a = list(set(a))
divisor = a[0]
for ai in a[1:]:
divisor = gcd(divisor, ai)
result = 1 # 1 is always a divisor
limit = ceil(sqrt(divisor)) + 1
primes = [2] + list(range(3, limit, 2))
for prime_factor in primes:
power = 0
while divisor % prime_factor == 0:
divisor /= prime_factor
power += 1
if power > 0:
result *= 1 + power
print(result)
|
from math import gcd, sqrt, ceil
n = int(input())
a = list(map(int, input().split()))
a = list(set(a))
divisor = a[0]
for ai in a[1:]:
divisor = gcd(divisor, ai)
result = 1 # 1 is always a divisor
limit = divisor // 2 + 1
primes = [2] + list(range(3, limit, 2))
for prime_factor in primes:
if prime_factor > divisor:
break
power = 0
while divisor % prime_factor == 0:
divisor /= prime_factor
power += 1
result *= 1 + power
print(result)
|
Fix WA 10: remove limit heuristics
|
Fix WA 10: remove limit heuristics
- in CF Div 3 579C
Signed-off-by: Karel Ha <70f8965fdfb04f1fc0e708a55d9e822c449f57d3@gmail.com>
|
Python
|
mit
|
mathemage/CompetitiveProgramming,mathemage/CompetitiveProgramming,mathemage/CompetitiveProgramming,mathemage/CompetitiveProgramming,mathemage/CompetitiveProgramming,mathemage/CompetitiveProgramming
|
from math import gcd, sqrt, ceil
n = int(input())
a = list(map(int, input().split()))
a = list(set(a))
divisor = a[0]
for ai in a[1:]:
divisor = gcd(divisor, ai)
result = 1 # 1 is always a divisor
limit = ceil(sqrt(divisor)) + 1
primes = [2] + list(range(3, limit, 2))
for prime_factor in primes:
power = 0
while divisor % prime_factor == 0:
divisor /= prime_factor
power += 1
if power > 0:
result *= 1 + power
print(result)
Fix WA 10: remove limit heuristics
- in CF Div 3 579C
Signed-off-by: Karel Ha <70f8965fdfb04f1fc0e708a55d9e822c449f57d3@gmail.com>
|
from math import gcd, sqrt, ceil
n = int(input())
a = list(map(int, input().split()))
a = list(set(a))
divisor = a[0]
for ai in a[1:]:
divisor = gcd(divisor, ai)
result = 1 # 1 is always a divisor
limit = divisor // 2 + 1
primes = [2] + list(range(3, limit, 2))
for prime_factor in primes:
if prime_factor > divisor:
break
power = 0
while divisor % prime_factor == 0:
divisor /= prime_factor
power += 1
result *= 1 + power
print(result)
|
<commit_before>from math import gcd, sqrt, ceil
n = int(input())
a = list(map(int, input().split()))
a = list(set(a))
divisor = a[0]
for ai in a[1:]:
divisor = gcd(divisor, ai)
result = 1 # 1 is always a divisor
limit = ceil(sqrt(divisor)) + 1
primes = [2] + list(range(3, limit, 2))
for prime_factor in primes:
power = 0
while divisor % prime_factor == 0:
divisor /= prime_factor
power += 1
if power > 0:
result *= 1 + power
print(result)
<commit_msg>Fix WA 10: remove limit heuristics
- in CF Div 3 579C
Signed-off-by: Karel Ha <70f8965fdfb04f1fc0e708a55d9e822c449f57d3@gmail.com><commit_after>
|
from math import gcd, sqrt, ceil
n = int(input())
a = list(map(int, input().split()))
a = list(set(a))
divisor = a[0]
for ai in a[1:]:
divisor = gcd(divisor, ai)
result = 1 # 1 is always a divisor
limit = divisor // 2 + 1
primes = [2] + list(range(3, limit, 2))
for prime_factor in primes:
if prime_factor > divisor:
break
power = 0
while divisor % prime_factor == 0:
divisor /= prime_factor
power += 1
result *= 1 + power
print(result)
|
from math import gcd, sqrt, ceil
n = int(input())
a = list(map(int, input().split()))
a = list(set(a))
divisor = a[0]
for ai in a[1:]:
divisor = gcd(divisor, ai)
result = 1 # 1 is always a divisor
limit = ceil(sqrt(divisor)) + 1
primes = [2] + list(range(3, limit, 2))
for prime_factor in primes:
power = 0
while divisor % prime_factor == 0:
divisor /= prime_factor
power += 1
if power > 0:
result *= 1 + power
print(result)
Fix WA 10: remove limit heuristics
- in CF Div 3 579C
Signed-off-by: Karel Ha <70f8965fdfb04f1fc0e708a55d9e822c449f57d3@gmail.com>from math import gcd, sqrt, ceil
n = int(input())
a = list(map(int, input().split()))
a = list(set(a))
divisor = a[0]
for ai in a[1:]:
divisor = gcd(divisor, ai)
result = 1 # 1 is always a divisor
limit = divisor // 2 + 1
primes = [2] + list(range(3, limit, 2))
for prime_factor in primes:
if prime_factor > divisor:
break
power = 0
while divisor % prime_factor == 0:
divisor /= prime_factor
power += 1
result *= 1 + power
print(result)
|
<commit_before>from math import gcd, sqrt, ceil
n = int(input())
a = list(map(int, input().split()))
a = list(set(a))
divisor = a[0]
for ai in a[1:]:
divisor = gcd(divisor, ai)
result = 1 # 1 is always a divisor
limit = ceil(sqrt(divisor)) + 1
primes = [2] + list(range(3, limit, 2))
for prime_factor in primes:
power = 0
while divisor % prime_factor == 0:
divisor /= prime_factor
power += 1
if power > 0:
result *= 1 + power
print(result)
<commit_msg>Fix WA 10: remove limit heuristics
- in CF Div 3 579C
Signed-off-by: Karel Ha <70f8965fdfb04f1fc0e708a55d9e822c449f57d3@gmail.com><commit_after>from math import gcd, sqrt, ceil
n = int(input())
a = list(map(int, input().split()))
a = list(set(a))
divisor = a[0]
for ai in a[1:]:
divisor = gcd(divisor, ai)
result = 1 # 1 is always a divisor
limit = divisor // 2 + 1
primes = [2] + list(range(3, limit, 2))
for prime_factor in primes:
if prime_factor > divisor:
break
power = 0
while divisor % prime_factor == 0:
divisor /= prime_factor
power += 1
result *= 1 + power
print(result)
|
3dd22e9c88a0b02655481ef3ca0f5376b8aae1b5
|
spacy/tests/regression/test_issue834.py
|
spacy/tests/regression/test_issue834.py
|
# coding: utf-8
from __future__ import unicode_literals
from io import StringIO
word2vec_str = """, -0.046107 -0.035951 -0.560418
de -0.648927 -0.400976 -0.527124
. 0.113685 0.439990 -0.634510
-1.499184 -0.184280 -0.598371"""
def test_issue834(en_vocab):
f = StringIO(word2vec_str)
vector_length = en_vocab.load_vectors(f)
assert vector_length == 3
|
# coding: utf-8
from __future__ import unicode_literals
from io import StringIO
import pytest
word2vec_str = """, -0.046107 -0.035951 -0.560418
de -0.648927 -0.400976 -0.527124
. 0.113685 0.439990 -0.634510
-1.499184 -0.184280 -0.598371"""
@pytest.mark.xfail
def test_issue834(en_vocab):
f = StringIO(word2vec_str)
vector_length = en_vocab.load_vectors(f)
assert vector_length == 3
|
Mark vectors test as xfail (temporary)
|
Mark vectors test as xfail (temporary)
|
Python
|
mit
|
oroszgy/spaCy.hu,oroszgy/spaCy.hu,recognai/spaCy,Gregory-Howard/spaCy,aikramer2/spaCy,spacy-io/spaCy,Gregory-Howard/spaCy,oroszgy/spaCy.hu,raphael0202/spaCy,raphael0202/spaCy,aikramer2/spaCy,spacy-io/spaCy,honnibal/spaCy,raphael0202/spaCy,aikramer2/spaCy,aikramer2/spaCy,banglakit/spaCy,recognai/spaCy,honnibal/spaCy,Gregory-Howard/spaCy,explosion/spaCy,banglakit/spaCy,spacy-io/spaCy,banglakit/spaCy,spacy-io/spaCy,oroszgy/spaCy.hu,honnibal/spaCy,aikramer2/spaCy,raphael0202/spaCy,raphael0202/spaCy,raphael0202/spaCy,recognai/spaCy,explosion/spaCy,explosion/spaCy,recognai/spaCy,explosion/spaCy,honnibal/spaCy,oroszgy/spaCy.hu,Gregory-Howard/spaCy,recognai/spaCy,Gregory-Howard/spaCy,banglakit/spaCy,recognai/spaCy,banglakit/spaCy,oroszgy/spaCy.hu,explosion/spaCy,explosion/spaCy,aikramer2/spaCy,Gregory-Howard/spaCy,spacy-io/spaCy,spacy-io/spaCy,banglakit/spaCy
|
# coding: utf-8
from __future__ import unicode_literals
from io import StringIO
word2vec_str = """, -0.046107 -0.035951 -0.560418
de -0.648927 -0.400976 -0.527124
. 0.113685 0.439990 -0.634510
-1.499184 -0.184280 -0.598371"""
def test_issue834(en_vocab):
f = StringIO(word2vec_str)
vector_length = en_vocab.load_vectors(f)
assert vector_length == 3
Mark vectors test as xfail (temporary)
|
# coding: utf-8
from __future__ import unicode_literals
from io import StringIO
import pytest
word2vec_str = """, -0.046107 -0.035951 -0.560418
de -0.648927 -0.400976 -0.527124
. 0.113685 0.439990 -0.634510
-1.499184 -0.184280 -0.598371"""
@pytest.mark.xfail
def test_issue834(en_vocab):
f = StringIO(word2vec_str)
vector_length = en_vocab.load_vectors(f)
assert vector_length == 3
|
<commit_before># coding: utf-8
from __future__ import unicode_literals
from io import StringIO
word2vec_str = """, -0.046107 -0.035951 -0.560418
de -0.648927 -0.400976 -0.527124
. 0.113685 0.439990 -0.634510
-1.499184 -0.184280 -0.598371"""
def test_issue834(en_vocab):
f = StringIO(word2vec_str)
vector_length = en_vocab.load_vectors(f)
assert vector_length == 3
<commit_msg>Mark vectors test as xfail (temporary)<commit_after>
|
# coding: utf-8
from __future__ import unicode_literals
from io import StringIO
import pytest
word2vec_str = """, -0.046107 -0.035951 -0.560418
de -0.648927 -0.400976 -0.527124
. 0.113685 0.439990 -0.634510
-1.499184 -0.184280 -0.598371"""
@pytest.mark.xfail
def test_issue834(en_vocab):
f = StringIO(word2vec_str)
vector_length = en_vocab.load_vectors(f)
assert vector_length == 3
|
# coding: utf-8
from __future__ import unicode_literals
from io import StringIO
word2vec_str = """, -0.046107 -0.035951 -0.560418
de -0.648927 -0.400976 -0.527124
. 0.113685 0.439990 -0.634510
-1.499184 -0.184280 -0.598371"""
def test_issue834(en_vocab):
f = StringIO(word2vec_str)
vector_length = en_vocab.load_vectors(f)
assert vector_length == 3
Mark vectors test as xfail (temporary)# coding: utf-8
from __future__ import unicode_literals
from io import StringIO
import pytest
word2vec_str = """, -0.046107 -0.035951 -0.560418
de -0.648927 -0.400976 -0.527124
. 0.113685 0.439990 -0.634510
-1.499184 -0.184280 -0.598371"""
@pytest.mark.xfail
def test_issue834(en_vocab):
f = StringIO(word2vec_str)
vector_length = en_vocab.load_vectors(f)
assert vector_length == 3
|
<commit_before># coding: utf-8
from __future__ import unicode_literals
from io import StringIO
word2vec_str = """, -0.046107 -0.035951 -0.560418
de -0.648927 -0.400976 -0.527124
. 0.113685 0.439990 -0.634510
-1.499184 -0.184280 -0.598371"""
def test_issue834(en_vocab):
f = StringIO(word2vec_str)
vector_length = en_vocab.load_vectors(f)
assert vector_length == 3
<commit_msg>Mark vectors test as xfail (temporary)<commit_after># coding: utf-8
from __future__ import unicode_literals
from io import StringIO
import pytest
word2vec_str = """, -0.046107 -0.035951 -0.560418
de -0.648927 -0.400976 -0.527124
. 0.113685 0.439990 -0.634510
-1.499184 -0.184280 -0.598371"""
@pytest.mark.xfail
def test_issue834(en_vocab):
f = StringIO(word2vec_str)
vector_length = en_vocab.load_vectors(f)
assert vector_length == 3
|
7a4f4d2456c5ed0609efe7777d2b9e22854ac449
|
social_django/compat.py
|
social_django/compat.py
|
# coding=utf-8
import six
import django
from django.db import models
if django.VERSION >= (2, 0):
from django.urls import reverse
else:
from django.core.urlresolvers import reverse
if django.VERSION >= (1, 10):
from django.utils.deprecation import MiddlewareMixin
else:
MiddlewareMixin = object
def get_rel_model(field):
if django.VERSION >= (2, 0):
return field.model
user_model = field.rel.to
if isinstance(user_model, six.string_types):
app_label, model_name = user_model.split('.')
user_model = models.get_model(app_label, model_name)
return user_model
|
# coding=utf-8
import six
import django
from django.db import models
if django.VERSION >= (2, 0):
from django.urls import reverse
else:
from django.core.urlresolvers import reverse
if django.VERSION >= (1, 10):
from django.utils.deprecation import MiddlewareMixin
else:
MiddlewareMixin = object
def get_rel_model(field):
if django.VERSION >= (2, 0):
return field.remote_field.model
user_model = field.rel.to
if isinstance(user_model, six.string_types):
app_label, model_name = user_model.split('.')
user_model = models.get_model(app_label, model_name)
return user_model
|
Fix getting model of foreign key field.
|
Fix getting model of foreign key field.
|
Python
|
bsd-3-clause
|
python-social-auth/social-app-django,python-social-auth/social-app-django,python-social-auth/social-app-django
|
# coding=utf-8
import six
import django
from django.db import models
if django.VERSION >= (2, 0):
from django.urls import reverse
else:
from django.core.urlresolvers import reverse
if django.VERSION >= (1, 10):
from django.utils.deprecation import MiddlewareMixin
else:
MiddlewareMixin = object
def get_rel_model(field):
if django.VERSION >= (2, 0):
return field.model
user_model = field.rel.to
if isinstance(user_model, six.string_types):
app_label, model_name = user_model.split('.')
user_model = models.get_model(app_label, model_name)
return user_model
Fix getting model of foreign key field.
|
# coding=utf-8
import six
import django
from django.db import models
if django.VERSION >= (2, 0):
from django.urls import reverse
else:
from django.core.urlresolvers import reverse
if django.VERSION >= (1, 10):
from django.utils.deprecation import MiddlewareMixin
else:
MiddlewareMixin = object
def get_rel_model(field):
if django.VERSION >= (2, 0):
return field.remote_field.model
user_model = field.rel.to
if isinstance(user_model, six.string_types):
app_label, model_name = user_model.split('.')
user_model = models.get_model(app_label, model_name)
return user_model
|
<commit_before># coding=utf-8
import six
import django
from django.db import models
if django.VERSION >= (2, 0):
from django.urls import reverse
else:
from django.core.urlresolvers import reverse
if django.VERSION >= (1, 10):
from django.utils.deprecation import MiddlewareMixin
else:
MiddlewareMixin = object
def get_rel_model(field):
if django.VERSION >= (2, 0):
return field.model
user_model = field.rel.to
if isinstance(user_model, six.string_types):
app_label, model_name = user_model.split('.')
user_model = models.get_model(app_label, model_name)
return user_model
<commit_msg>Fix getting model of foreign key field.<commit_after>
|
# coding=utf-8
import six
import django
from django.db import models
if django.VERSION >= (2, 0):
from django.urls import reverse
else:
from django.core.urlresolvers import reverse
if django.VERSION >= (1, 10):
from django.utils.deprecation import MiddlewareMixin
else:
MiddlewareMixin = object
def get_rel_model(field):
if django.VERSION >= (2, 0):
return field.remote_field.model
user_model = field.rel.to
if isinstance(user_model, six.string_types):
app_label, model_name = user_model.split('.')
user_model = models.get_model(app_label, model_name)
return user_model
|
# coding=utf-8
import six
import django
from django.db import models
if django.VERSION >= (2, 0):
from django.urls import reverse
else:
from django.core.urlresolvers import reverse
if django.VERSION >= (1, 10):
from django.utils.deprecation import MiddlewareMixin
else:
MiddlewareMixin = object
def get_rel_model(field):
if django.VERSION >= (2, 0):
return field.model
user_model = field.rel.to
if isinstance(user_model, six.string_types):
app_label, model_name = user_model.split('.')
user_model = models.get_model(app_label, model_name)
return user_model
Fix getting model of foreign key field.# coding=utf-8
import six
import django
from django.db import models
if django.VERSION >= (2, 0):
from django.urls import reverse
else:
from django.core.urlresolvers import reverse
if django.VERSION >= (1, 10):
from django.utils.deprecation import MiddlewareMixin
else:
MiddlewareMixin = object
def get_rel_model(field):
if django.VERSION >= (2, 0):
return field.remote_field.model
user_model = field.rel.to
if isinstance(user_model, six.string_types):
app_label, model_name = user_model.split('.')
user_model = models.get_model(app_label, model_name)
return user_model
|
<commit_before># coding=utf-8
import six
import django
from django.db import models
if django.VERSION >= (2, 0):
from django.urls import reverse
else:
from django.core.urlresolvers import reverse
if django.VERSION >= (1, 10):
from django.utils.deprecation import MiddlewareMixin
else:
MiddlewareMixin = object
def get_rel_model(field):
if django.VERSION >= (2, 0):
return field.model
user_model = field.rel.to
if isinstance(user_model, six.string_types):
app_label, model_name = user_model.split('.')
user_model = models.get_model(app_label, model_name)
return user_model
<commit_msg>Fix getting model of foreign key field.<commit_after># coding=utf-8
import six
import django
from django.db import models
if django.VERSION >= (2, 0):
from django.urls import reverse
else:
from django.core.urlresolvers import reverse
if django.VERSION >= (1, 10):
from django.utils.deprecation import MiddlewareMixin
else:
MiddlewareMixin = object
def get_rel_model(field):
if django.VERSION >= (2, 0):
return field.remote_field.model
user_model = field.rel.to
if isinstance(user_model, six.string_types):
app_label, model_name = user_model.split('.')
user_model = models.get_model(app_label, model_name)
return user_model
|
54a6031c54c8b64eeeed7a28f7836f886022bdd0
|
main.py
|
main.py
|
from evaluate_user import evaluate_user
def main():
user_id = ""
while user_id != 'exit':
user_id = raw_input("Enter user id: ")
if user_id != 'exit' and evaluate_user(user_id) == 1:
print("Cannot evaluate user.\n")
if __name__ == "__main__":
main()
|
from evaluate_user import evaluate_user
from Tkinter import *
from ScrolledText import ScrolledText
from ttk import Frame, Button, Label, Style
import re
class EvaluatorWindow(Frame):
def __init__(self, parent):
Frame.__init__(self, parent)
self.parent = parent
self.parent.title("Twitter Judge")
self.style = Style()
self.style.theme_use("default")
output_frame = Frame(self, relief = RIDGE, borderwidth = 1)
output_frame.pack(anchor = N, fill = BOTH, expand = True)
output_text = ScrolledText(output_frame)
self.output_text = output_text
output_text.pack(fill = BOTH, expand = True)
input_frame = Frame(self, height = 32)
input_frame.pack(anchor = S, fill = X, expand = False)
user_label = Label(input_frame, text = "Enter username:")
user_label.pack(side = LEFT)
judge_button = Button(input_frame, text = "Judge!", command = lambda: judge(user_entry.get(), self))
judge_button.pack(side = RIGHT)
user_entry = Entry(input_frame)
user_entry.pack(fill = X, padx = 5, pady = 5, expand = True)
self.pack(fill = BOTH, expand = True)
# Write results to the output as if this is an open file
def write(self, output):
self.output_text.insert(INSERT, output)
self.output_text.see('insert')
self.output_text.update()
return len(output)
def judge(user_id, output_file):
# strip away the '@' if the user included it
user_id = re.sub('@','', user_id)
# Notify the user if the attempt failed for any reason
if user_id != 'exit' and evaluate_user(user_id, output_file) == 1:
print("An error occured.\n")
def main():
window = Tk()
window.geometry("450x600")
app = EvaluatorWindow(window)
window.mainloop()
if __name__ == "__main__":
main()
|
Replace line endings with whitespace instead of deleting them.
|
Replace line endings with whitespace instead of deleting them.
|
Python
|
apache-2.0
|
ngrudzinski/sentiment_analysis_437
|
from evaluate_user import evaluate_user
def main():
user_id = ""
while user_id != 'exit':
user_id = raw_input("Enter user id: ")
if user_id != 'exit' and evaluate_user(user_id) == 1:
print("Cannot evaluate user.\n")
if __name__ == "__main__":
main()Replace line endings with whitespace instead of deleting them.
|
from evaluate_user import evaluate_user
from Tkinter import *
from ScrolledText import ScrolledText
from ttk import Frame, Button, Label, Style
import re
class EvaluatorWindow(Frame):
def __init__(self, parent):
Frame.__init__(self, parent)
self.parent = parent
self.parent.title("Twitter Judge")
self.style = Style()
self.style.theme_use("default")
output_frame = Frame(self, relief = RIDGE, borderwidth = 1)
output_frame.pack(anchor = N, fill = BOTH, expand = True)
output_text = ScrolledText(output_frame)
self.output_text = output_text
output_text.pack(fill = BOTH, expand = True)
input_frame = Frame(self, height = 32)
input_frame.pack(anchor = S, fill = X, expand = False)
user_label = Label(input_frame, text = "Enter username:")
user_label.pack(side = LEFT)
judge_button = Button(input_frame, text = "Judge!", command = lambda: judge(user_entry.get(), self))
judge_button.pack(side = RIGHT)
user_entry = Entry(input_frame)
user_entry.pack(fill = X, padx = 5, pady = 5, expand = True)
self.pack(fill = BOTH, expand = True)
# Write results to the output as if this is an open file
def write(self, output):
self.output_text.insert(INSERT, output)
self.output_text.see('insert')
self.output_text.update()
return len(output)
def judge(user_id, output_file):
# strip away the '@' if the user included it
user_id = re.sub('@','', user_id)
# Notify the user if the attempt failed for any reason
if user_id != 'exit' and evaluate_user(user_id, output_file) == 1:
print("An error occured.\n")
def main():
window = Tk()
window.geometry("450x600")
app = EvaluatorWindow(window)
window.mainloop()
if __name__ == "__main__":
main()
|
<commit_before>from evaluate_user import evaluate_user
def main():
user_id = ""
while user_id != 'exit':
user_id = raw_input("Enter user id: ")
if user_id != 'exit' and evaluate_user(user_id) == 1:
print("Cannot evaluate user.\n")
if __name__ == "__main__":
main()<commit_msg>Replace line endings with whitespace instead of deleting them.<commit_after>
|
from evaluate_user import evaluate_user
from Tkinter import *
from ScrolledText import ScrolledText
from ttk import Frame, Button, Label, Style
import re
class EvaluatorWindow(Frame):
def __init__(self, parent):
Frame.__init__(self, parent)
self.parent = parent
self.parent.title("Twitter Judge")
self.style = Style()
self.style.theme_use("default")
output_frame = Frame(self, relief = RIDGE, borderwidth = 1)
output_frame.pack(anchor = N, fill = BOTH, expand = True)
output_text = ScrolledText(output_frame)
self.output_text = output_text
output_text.pack(fill = BOTH, expand = True)
input_frame = Frame(self, height = 32)
input_frame.pack(anchor = S, fill = X, expand = False)
user_label = Label(input_frame, text = "Enter username:")
user_label.pack(side = LEFT)
judge_button = Button(input_frame, text = "Judge!", command = lambda: judge(user_entry.get(), self))
judge_button.pack(side = RIGHT)
user_entry = Entry(input_frame)
user_entry.pack(fill = X, padx = 5, pady = 5, expand = True)
self.pack(fill = BOTH, expand = True)
# Write results to the output as if this is an open file
def write(self, output):
self.output_text.insert(INSERT, output)
self.output_text.see('insert')
self.output_text.update()
return len(output)
def judge(user_id, output_file):
# strip away the '@' if the user included it
user_id = re.sub('@','', user_id)
# Notify the user if the attempt failed for any reason
if user_id != 'exit' and evaluate_user(user_id, output_file) == 1:
print("An error occured.\n")
def main():
window = Tk()
window.geometry("450x600")
app = EvaluatorWindow(window)
window.mainloop()
if __name__ == "__main__":
main()
|
from evaluate_user import evaluate_user
def main():
user_id = ""
while user_id != 'exit':
user_id = raw_input("Enter user id: ")
if user_id != 'exit' and evaluate_user(user_id) == 1:
print("Cannot evaluate user.\n")
if __name__ == "__main__":
main()Replace line endings with whitespace instead of deleting them.from evaluate_user import evaluate_user
from Tkinter import *
from ScrolledText import ScrolledText
from ttk import Frame, Button, Label, Style
import re
class EvaluatorWindow(Frame):
def __init__(self, parent):
Frame.__init__(self, parent)
self.parent = parent
self.parent.title("Twitter Judge")
self.style = Style()
self.style.theme_use("default")
output_frame = Frame(self, relief = RIDGE, borderwidth = 1)
output_frame.pack(anchor = N, fill = BOTH, expand = True)
output_text = ScrolledText(output_frame)
self.output_text = output_text
output_text.pack(fill = BOTH, expand = True)
input_frame = Frame(self, height = 32)
input_frame.pack(anchor = S, fill = X, expand = False)
user_label = Label(input_frame, text = "Enter username:")
user_label.pack(side = LEFT)
judge_button = Button(input_frame, text = "Judge!", command = lambda: judge(user_entry.get(), self))
judge_button.pack(side = RIGHT)
user_entry = Entry(input_frame)
user_entry.pack(fill = X, padx = 5, pady = 5, expand = True)
self.pack(fill = BOTH, expand = True)
# Write results to the output as if this is an open file
def write(self, output):
self.output_text.insert(INSERT, output)
self.output_text.see('insert')
self.output_text.update()
return len(output)
def judge(user_id, output_file):
# strip away the '@' if the user included it
user_id = re.sub('@','', user_id)
# Notify the user if the attempt failed for any reason
if user_id != 'exit' and evaluate_user(user_id, output_file) == 1:
print("An error occured.\n")
def main():
window = Tk()
window.geometry("450x600")
app = EvaluatorWindow(window)
window.mainloop()
if __name__ == "__main__":
main()
|
<commit_before>from evaluate_user import evaluate_user
def main():
user_id = ""
while user_id != 'exit':
user_id = raw_input("Enter user id: ")
if user_id != 'exit' and evaluate_user(user_id) == 1:
print("Cannot evaluate user.\n")
if __name__ == "__main__":
main()<commit_msg>Replace line endings with whitespace instead of deleting them.<commit_after>from evaluate_user import evaluate_user
from Tkinter import *
from ScrolledText import ScrolledText
from ttk import Frame, Button, Label, Style
import re
class EvaluatorWindow(Frame):
def __init__(self, parent):
Frame.__init__(self, parent)
self.parent = parent
self.parent.title("Twitter Judge")
self.style = Style()
self.style.theme_use("default")
output_frame = Frame(self, relief = RIDGE, borderwidth = 1)
output_frame.pack(anchor = N, fill = BOTH, expand = True)
output_text = ScrolledText(output_frame)
self.output_text = output_text
output_text.pack(fill = BOTH, expand = True)
input_frame = Frame(self, height = 32)
input_frame.pack(anchor = S, fill = X, expand = False)
user_label = Label(input_frame, text = "Enter username:")
user_label.pack(side = LEFT)
judge_button = Button(input_frame, text = "Judge!", command = lambda: judge(user_entry.get(), self))
judge_button.pack(side = RIGHT)
user_entry = Entry(input_frame)
user_entry.pack(fill = X, padx = 5, pady = 5, expand = True)
self.pack(fill = BOTH, expand = True)
# Write results to the output as if this is an open file
def write(self, output):
self.output_text.insert(INSERT, output)
self.output_text.see('insert')
self.output_text.update()
return len(output)
def judge(user_id, output_file):
# strip away the '@' if the user included it
user_id = re.sub('@','', user_id)
# Notify the user if the attempt failed for any reason
if user_id != 'exit' and evaluate_user(user_id, output_file) == 1:
print("An error occured.\n")
def main():
window = Tk()
window.geometry("450x600")
app = EvaluatorWindow(window)
window.mainloop()
if __name__ == "__main__":
main()
|
1b5e68192302a3a234820e4c8908a689ece7c3ae
|
sphinx_epytext/__init__.py
|
sphinx_epytext/__init__.py
|
# -*- coding: utf-8 -*-
# Copyright 2014 John Vandenberg
# Licensed under the MIT License, see LICENSE file for details.
"""Sphinx epytext support."""
from sphinx_epytext.process_docstring import process_docstring
def setup(app):
"""Sphinx extension setup function.
When the extension is loaded, Sphinx imports this module and executes
the ``setup()`` function, which in turn notifies Sphinx of everything
the extension offers.
"""
from sphinx.application import Sphinx
if not isinstance(app, Sphinx):
return # probably called by tests
app.connect('autodoc-process-docstring', process_docstring)
|
# -*- coding: utf-8 -*-
# Copyright 2014-5 John Vandenberg
# Licensed under the MIT License, see LICENSE file for details.
"""Sphinx epytext support."""
from sphinx.application import Sphinx
from sphinx_epytext.process_docstring import process_docstring
def setup(app):
"""Sphinx extension setup function.
When the extension is loaded, Sphinx imports this module and executes
the ``setup()`` function, which in turn notifies Sphinx of everything
the extension offers.
"""
if not isinstance(app, Sphinx):
return # probably called by tests
app.connect('autodoc-process-docstring', process_docstring)
|
Move imports to top of module
|
Move imports to top of module
|
Python
|
mit
|
jayvdb/sphinx-epytext
|
# -*- coding: utf-8 -*-
# Copyright 2014 John Vandenberg
# Licensed under the MIT License, see LICENSE file for details.
"""Sphinx epytext support."""
from sphinx_epytext.process_docstring import process_docstring
def setup(app):
"""Sphinx extension setup function.
When the extension is loaded, Sphinx imports this module and executes
the ``setup()`` function, which in turn notifies Sphinx of everything
the extension offers.
"""
from sphinx.application import Sphinx
if not isinstance(app, Sphinx):
return # probably called by tests
app.connect('autodoc-process-docstring', process_docstring)
Move imports to top of module
|
# -*- coding: utf-8 -*-
# Copyright 2014-5 John Vandenberg
# Licensed under the MIT License, see LICENSE file for details.
"""Sphinx epytext support."""
from sphinx.application import Sphinx
from sphinx_epytext.process_docstring import process_docstring
def setup(app):
"""Sphinx extension setup function.
When the extension is loaded, Sphinx imports this module and executes
the ``setup()`` function, which in turn notifies Sphinx of everything
the extension offers.
"""
if not isinstance(app, Sphinx):
return # probably called by tests
app.connect('autodoc-process-docstring', process_docstring)
|
<commit_before># -*- coding: utf-8 -*-
# Copyright 2014 John Vandenberg
# Licensed under the MIT License, see LICENSE file for details.
"""Sphinx epytext support."""
from sphinx_epytext.process_docstring import process_docstring
def setup(app):
"""Sphinx extension setup function.
When the extension is loaded, Sphinx imports this module and executes
the ``setup()`` function, which in turn notifies Sphinx of everything
the extension offers.
"""
from sphinx.application import Sphinx
if not isinstance(app, Sphinx):
return # probably called by tests
app.connect('autodoc-process-docstring', process_docstring)
<commit_msg>Move imports to top of module<commit_after>
|
# -*- coding: utf-8 -*-
# Copyright 2014-5 John Vandenberg
# Licensed under the MIT License, see LICENSE file for details.
"""Sphinx epytext support."""
from sphinx.application import Sphinx
from sphinx_epytext.process_docstring import process_docstring
def setup(app):
"""Sphinx extension setup function.
When the extension is loaded, Sphinx imports this module and executes
the ``setup()`` function, which in turn notifies Sphinx of everything
the extension offers.
"""
if not isinstance(app, Sphinx):
return # probably called by tests
app.connect('autodoc-process-docstring', process_docstring)
|
# -*- coding: utf-8 -*-
# Copyright 2014 John Vandenberg
# Licensed under the MIT License, see LICENSE file for details.
"""Sphinx epytext support."""
from sphinx_epytext.process_docstring import process_docstring
def setup(app):
"""Sphinx extension setup function.
When the extension is loaded, Sphinx imports this module and executes
the ``setup()`` function, which in turn notifies Sphinx of everything
the extension offers.
"""
from sphinx.application import Sphinx
if not isinstance(app, Sphinx):
return # probably called by tests
app.connect('autodoc-process-docstring', process_docstring)
Move imports to top of module# -*- coding: utf-8 -*-
# Copyright 2014-5 John Vandenberg
# Licensed under the MIT License, see LICENSE file for details.
"""Sphinx epytext support."""
from sphinx.application import Sphinx
from sphinx_epytext.process_docstring import process_docstring
def setup(app):
"""Sphinx extension setup function.
When the extension is loaded, Sphinx imports this module and executes
the ``setup()`` function, which in turn notifies Sphinx of everything
the extension offers.
"""
if not isinstance(app, Sphinx):
return # probably called by tests
app.connect('autodoc-process-docstring', process_docstring)
|
<commit_before># -*- coding: utf-8 -*-
# Copyright 2014 John Vandenberg
# Licensed under the MIT License, see LICENSE file for details.
"""Sphinx epytext support."""
from sphinx_epytext.process_docstring import process_docstring
def setup(app):
"""Sphinx extension setup function.
When the extension is loaded, Sphinx imports this module and executes
the ``setup()`` function, which in turn notifies Sphinx of everything
the extension offers.
"""
from sphinx.application import Sphinx
if not isinstance(app, Sphinx):
return # probably called by tests
app.connect('autodoc-process-docstring', process_docstring)
<commit_msg>Move imports to top of module<commit_after># -*- coding: utf-8 -*-
# Copyright 2014-5 John Vandenberg
# Licensed under the MIT License, see LICENSE file for details.
"""Sphinx epytext support."""
from sphinx.application import Sphinx
from sphinx_epytext.process_docstring import process_docstring
def setup(app):
"""Sphinx extension setup function.
When the extension is loaded, Sphinx imports this module and executes
the ``setup()`` function, which in turn notifies Sphinx of everything
the extension offers.
"""
if not isinstance(app, Sphinx):
return # probably called by tests
app.connect('autodoc-process-docstring', process_docstring)
|
fdc1c82533ca541d6e666f2834b86fa9d7cb9969
|
bin/update/deploy_dev_base.py
|
bin/update/deploy_dev_base.py
|
import logging
from commander.deploy import task
from deploy_base import * # noqa
log = logging.getLogger(__name__)
@task
def database(ctx):
with ctx.lcd(settings.SRC_DIR):
# only ever run this one on demo and dev.
ctx.local("python2.6 manage.py bedrock_truncate_database --yes-i-am-sure")
ctx.local("python2.6 manage.py syncdb --migrate --noinput")
|
import logging
from commander.deploy import task
from deploy_base import * # noqa
log = logging.getLogger(__name__)
@task
def database(ctx):
with ctx.lcd(settings.SRC_DIR):
# only ever run this one on demo and dev.
ctx.local("python2.6 manage.py bedrock_truncate_database --yes-i-am-sure")
ctx.local("python2.6 manage.py syncdb --migrate --noinput")
ctx.local("python2.6 manage.py rnasync")
|
Call rnasync after truncating db on dev deploy
|
Call rnasync after truncating db on dev deploy
|
Python
|
mpl-2.0
|
ckprice/bedrock,gerv/bedrock,CSCI-462-01-2017/bedrock,ericawright/bedrock,pmclanahan/bedrock,bensternthal/bedrock,pascalchevrel/bedrock,ckprice/bedrock,kyoshino/bedrock,schalkneethling/bedrock,CSCI-462-01-2017/bedrock,pmclanahan/bedrock,amjadm61/bedrock,Jobava/bedrock,mermi/bedrock,alexgibson/bedrock,ckprice/bedrock,glogiotatidis/bedrock,mahinthjoe/bedrock,chirilo/bedrock,TheJJ100100/bedrock,yglazko/bedrock,amjadm61/bedrock,CSCI-462-01-2017/bedrock,mermi/bedrock,schalkneethling/bedrock,l-hedgehog/bedrock,petabyte/bedrock,gauthierm/bedrock,mkmelin/bedrock,l-hedgehog/bedrock,marcoscaceres/bedrock,jpetto/bedrock,mozilla/bedrock,analytics-pros/mozilla-bedrock,petabyte/bedrock,gerv/bedrock,dudepare/bedrock,dudepare/bedrock,l-hedgehog/bedrock,hoosteeno/bedrock,jacshfr/mozilla-bedrock,mkmelin/bedrock,andreadelrio/bedrock,flodolo/bedrock,pascalchevrel/bedrock,MichaelKohler/bedrock,hoosteeno/bedrock,Sancus/bedrock,gauthierm/bedrock,glogiotatidis/bedrock,jacshfr/mozilla-bedrock,Sancus/bedrock,petabyte/bedrock,jacshfr/mozilla-bedrock,mozilla/bedrock,mkmelin/bedrock,andreadelrio/bedrock,marcoscaceres/bedrock,TheJJ100100/bedrock,sgarrity/bedrock,sylvestre/bedrock,ericawright/bedrock,TheoChevalier/bedrock,davehunt/bedrock,bensternthal/bedrock,mozilla/bedrock,mermi/bedrock,SujaySKumar/bedrock,TheoChevalier/bedrock,sgarrity/bedrock,amjadm61/bedrock,jgmize/bedrock,amjadm61/bedrock,mermi/bedrock,mozilla/bedrock,malena/bedrock,sgarrity/bedrock,sylvestre/bedrock,flodolo/bedrock,chirilo/bedrock,SujaySKumar/bedrock,marcoscaceres/bedrock,pmclanahan/bedrock,andreadelrio/bedrock,Jobava/bedrock,alexgibson/bedrock,pascalchevrel/bedrock,ericawright/bedrock,sylvestre/bedrock,analytics-pros/mozilla-bedrock,rishiloyola/bedrock,jpetto/bedrock,davehunt/bedrock,glogiotatidis/bedrock,marcoscaceres/bedrock,davehunt/bedrock,MichaelKohler/bedrock,yglazko/bedrock,amjadm61/bedrock,craigcook/bedrock,dudepare/bedrock,SujaySKumar/bedrock,jpetto/bedrock,glogiotatidis/bedrock,jacshfr/mozilla-bedrock,flodolo/bedrock,chirilo/bedrock,hoosteeno/bedrock,ericawright/bedrock,mahinthjoe/bedrock,SujaySKumar/bedrock,sylvestre/bedrock,davehunt/bedrock,pmclanahan/bedrock,jacshfr/mozilla-bedrock,mahinthjoe/bedrock,mkmelin/bedrock,kyoshino/bedrock,Sancus/bedrock,MichaelKohler/bedrock,analytics-pros/mozilla-bedrock,andreadelrio/bedrock,craigcook/bedrock,TheJJ100100/bedrock,jgmize/bedrock,craigcook/bedrock,Jobava/bedrock,kyoshino/bedrock,craigcook/bedrock,sgarrity/bedrock,TheoChevalier/bedrock,dudepare/bedrock,pascalchevrel/bedrock,hoosteeno/bedrock,l-hedgehog/bedrock,rishiloyola/bedrock,kyoshino/bedrock,yglazko/bedrock,gerv/bedrock,jgmize/bedrock,alexgibson/bedrock,alexgibson/bedrock,yglazko/bedrock,jgmize/bedrock,rishiloyola/bedrock,gauthierm/bedrock,bensternthal/bedrock,CSCI-462-01-2017/bedrock,gerv/bedrock,TheJJ100100/bedrock,jpetto/bedrock,mahinthjoe/bedrock,TheoChevalier/bedrock,schalkneethling/bedrock,chirilo/bedrock,malena/bedrock,Sancus/bedrock,flodolo/bedrock,malena/bedrock,malena/bedrock,petabyte/bedrock,Jobava/bedrock,analytics-pros/mozilla-bedrock,MichaelKohler/bedrock,schalkneethling/bedrock,rishiloyola/bedrock,gauthierm/bedrock,bensternthal/bedrock,ckprice/bedrock
|
import logging
from commander.deploy import task
from deploy_base import * # noqa
log = logging.getLogger(__name__)
@task
def database(ctx):
with ctx.lcd(settings.SRC_DIR):
# only ever run this one on demo and dev.
ctx.local("python2.6 manage.py bedrock_truncate_database --yes-i-am-sure")
ctx.local("python2.6 manage.py syncdb --migrate --noinput")
Call rnasync after truncating db on dev deploy
|
import logging
from commander.deploy import task
from deploy_base import * # noqa
log = logging.getLogger(__name__)
@task
def database(ctx):
with ctx.lcd(settings.SRC_DIR):
# only ever run this one on demo and dev.
ctx.local("python2.6 manage.py bedrock_truncate_database --yes-i-am-sure")
ctx.local("python2.6 manage.py syncdb --migrate --noinput")
ctx.local("python2.6 manage.py rnasync")
|
<commit_before>import logging
from commander.deploy import task
from deploy_base import * # noqa
log = logging.getLogger(__name__)
@task
def database(ctx):
with ctx.lcd(settings.SRC_DIR):
# only ever run this one on demo and dev.
ctx.local("python2.6 manage.py bedrock_truncate_database --yes-i-am-sure")
ctx.local("python2.6 manage.py syncdb --migrate --noinput")
<commit_msg>Call rnasync after truncating db on dev deploy<commit_after>
|
import logging
from commander.deploy import task
from deploy_base import * # noqa
log = logging.getLogger(__name__)
@task
def database(ctx):
with ctx.lcd(settings.SRC_DIR):
# only ever run this one on demo and dev.
ctx.local("python2.6 manage.py bedrock_truncate_database --yes-i-am-sure")
ctx.local("python2.6 manage.py syncdb --migrate --noinput")
ctx.local("python2.6 manage.py rnasync")
|
import logging
from commander.deploy import task
from deploy_base import * # noqa
log = logging.getLogger(__name__)
@task
def database(ctx):
with ctx.lcd(settings.SRC_DIR):
# only ever run this one on demo and dev.
ctx.local("python2.6 manage.py bedrock_truncate_database --yes-i-am-sure")
ctx.local("python2.6 manage.py syncdb --migrate --noinput")
Call rnasync after truncating db on dev deployimport logging
from commander.deploy import task
from deploy_base import * # noqa
log = logging.getLogger(__name__)
@task
def database(ctx):
with ctx.lcd(settings.SRC_DIR):
# only ever run this one on demo and dev.
ctx.local("python2.6 manage.py bedrock_truncate_database --yes-i-am-sure")
ctx.local("python2.6 manage.py syncdb --migrate --noinput")
ctx.local("python2.6 manage.py rnasync")
|
<commit_before>import logging
from commander.deploy import task
from deploy_base import * # noqa
log = logging.getLogger(__name__)
@task
def database(ctx):
with ctx.lcd(settings.SRC_DIR):
# only ever run this one on demo and dev.
ctx.local("python2.6 manage.py bedrock_truncate_database --yes-i-am-sure")
ctx.local("python2.6 manage.py syncdb --migrate --noinput")
<commit_msg>Call rnasync after truncating db on dev deploy<commit_after>import logging
from commander.deploy import task
from deploy_base import * # noqa
log = logging.getLogger(__name__)
@task
def database(ctx):
with ctx.lcd(settings.SRC_DIR):
# only ever run this one on demo and dev.
ctx.local("python2.6 manage.py bedrock_truncate_database --yes-i-am-sure")
ctx.local("python2.6 manage.py syncdb --migrate --noinput")
ctx.local("python2.6 manage.py rnasync")
|
cba07745953e4b5c2c66c1698841b5f081e5da9d
|
greenmine/settings/__init__.py
|
greenmine/settings/__init__.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import os
try:
print "Trying import local.py settings..."
from .local import *
except ImportError:
print "Trying import development.py settings..."
from .development import *
|
# -*- coding: utf-8 -*-
from __future__ import (
absolute_import,
print_function
)
import os, sys
try:
print("Trying import local.py settings...", file=sys.stderr)
from .local import *
except ImportError:
print("Trying import development.py settings...", file=sys.stderr)
from .development import *
|
Send more print message to sys.stderr
|
Smallfix: Send more print message to sys.stderr
|
Python
|
agpl-3.0
|
joshisa/taiga-back,WALR/taiga-back,jeffdwyatt/taiga-back,bdang2012/taiga-back-casting,CMLL/taiga-back,astronaut1712/taiga-back,bdang2012/taiga-back-casting,joshisa/taiga-back,dycodedev/taiga-back,Zaneh-/bearded-tribble-back,CoolCloud/taiga-back,Rademade/taiga-back,gam-phon/taiga-back,bdang2012/taiga-back-casting,19kestier/taiga-back,forging2012/taiga-back,obimod/taiga-back,taigaio/taiga-back,gam-phon/taiga-back,WALR/taiga-back,CoolCloud/taiga-back,bdang2012/taiga-back-casting,jeffdwyatt/taiga-back,Rademade/taiga-back,CoolCloud/taiga-back,xdevelsistemas/taiga-back-community,WALR/taiga-back,CMLL/taiga-back,gam-phon/taiga-back,seanchen/taiga-back,astronaut1712/taiga-back,Rademade/taiga-back,astronaut1712/taiga-back,Zaneh-/bearded-tribble-back,Tigerwhit4/taiga-back,gauravjns/taiga-back,joshisa/taiga-back,dayatz/taiga-back,astagi/taiga-back,coopsource/taiga-back,joshisa/taiga-back,EvgeneOskin/taiga-back,forging2012/taiga-back,astronaut1712/taiga-back,rajiteh/taiga-back,gauravjns/taiga-back,coopsource/taiga-back,dycodedev/taiga-back,dycodedev/taiga-back,astagi/taiga-back,forging2012/taiga-back,gauravjns/taiga-back,Rademade/taiga-back,frt-arch/taiga-back,rajiteh/taiga-back,WALR/taiga-back,obimod/taiga-back,Tigerwhit4/taiga-back,gauravjns/taiga-back,crr0004/taiga-back,dycodedev/taiga-back,xdevelsistemas/taiga-back-community,19kestier/taiga-back,EvgeneOskin/taiga-back,coopsource/taiga-back,frt-arch/taiga-back,taigaio/taiga-back,crr0004/taiga-back,taigaio/taiga-back,astagi/taiga-back,19kestier/taiga-back,Zaneh-/bearded-tribble-back,dayatz/taiga-back,CoolCloud/taiga-back,seanchen/taiga-back,obimod/taiga-back,obimod/taiga-back,forging2012/taiga-back,Tigerwhit4/taiga-back,jeffdwyatt/taiga-back,coopsource/taiga-back,Rademade/taiga-back,dayatz/taiga-back,xdevelsistemas/taiga-back-community,CMLL/taiga-back,Tigerwhit4/taiga-back,CMLL/taiga-back,jeffdwyatt/taiga-back,crr0004/taiga-back,seanchen/taiga-back,rajiteh/taiga-back,EvgeneOskin/taiga-back,rajiteh/taiga-back,gam-phon/taiga-back,astagi/taiga-back,seanchen/taiga-back,crr0004/taiga-back,frt-arch/taiga-back,EvgeneOskin/taiga-back
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import os
try:
print "Trying import local.py settings..."
from .local import *
except ImportError:
print "Trying import development.py settings..."
from .development import *
Smallfix: Send more print message to sys.stderr
|
# -*- coding: utf-8 -*-
from __future__ import (
absolute_import,
print_function
)
import os, sys
try:
print("Trying import local.py settings...", file=sys.stderr)
from .local import *
except ImportError:
print("Trying import development.py settings...", file=sys.stderr)
from .development import *
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import absolute_import
import os
try:
print "Trying import local.py settings..."
from .local import *
except ImportError:
print "Trying import development.py settings..."
from .development import *
<commit_msg>Smallfix: Send more print message to sys.stderr<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import (
absolute_import,
print_function
)
import os, sys
try:
print("Trying import local.py settings...", file=sys.stderr)
from .local import *
except ImportError:
print("Trying import development.py settings...", file=sys.stderr)
from .development import *
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import os
try:
print "Trying import local.py settings..."
from .local import *
except ImportError:
print "Trying import development.py settings..."
from .development import *
Smallfix: Send more print message to sys.stderr# -*- coding: utf-8 -*-
from __future__ import (
absolute_import,
print_function
)
import os, sys
try:
print("Trying import local.py settings...", file=sys.stderr)
from .local import *
except ImportError:
print("Trying import development.py settings...", file=sys.stderr)
from .development import *
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import absolute_import
import os
try:
print "Trying import local.py settings..."
from .local import *
except ImportError:
print "Trying import development.py settings..."
from .development import *
<commit_msg>Smallfix: Send more print message to sys.stderr<commit_after># -*- coding: utf-8 -*-
from __future__ import (
absolute_import,
print_function
)
import os, sys
try:
print("Trying import local.py settings...", file=sys.stderr)
from .local import *
except ImportError:
print("Trying import development.py settings...", file=sys.stderr)
from .development import *
|
d28cb95d7033b34e34661700dc8a1aafd6a61bc8
|
src/db/schema.py
|
src/db/schema.py
|
import logging
from datetime import datetime
import sqlalchemy as sql
from sqlalchemy import Table, Column, Integer, String, DateTime, Text, ForeignKey
from sqlalchemy import orm
from sqlalchemy.ext.declarative import declarative_base
import db
Base = declarative_base()
Base.metadata.bind = db.engine
class Round(Base):
__tablename__ = 'round'
id = Column(Integer, primary_key=True)
start_time = Column(DateTime, default=datetime.now)
end_time = Column(DateTime, nullable=False)
duration_secs = Column(Integer, nullable=False)
submissions = orm.relationship('Submission', backref='round',
lazy='dynamic')
class Perspective(Base):
__tablename__ = 'perspective'
id = Column(Integer, primary_key=True)
gender = Column(String(6), nullable=False)
text = Column(Text, nullable=False)
created_at = Column(DateTime, default=datetime.now)
submissions = orm.relationship('Submission', backref='perspective',
lazy='dynamic')
class Submission(Base):
__tablename__ = 'submission'
id = Column(Integer, primary_key=True)
perspective_id = Column(Integer, ForeignKey('perspective.id'), nullable=False)
round_id = Column(Integer, ForeignKey('round.id'), nullable=False)
text = Column(Text, nullable=False)
likes = Column(Integer, default=0)
Base.metadata.create_all()
logging.info("Table information loaded")
|
import logging
from datetime import datetime
import sqlalchemy as sql
from sqlalchemy import Table, Column, Integer, String, DateTime, Text, ForeignKey
from sqlalchemy import orm
from sqlalchemy.ext.declarative import declarative_base
import db
Base = declarative_base()
Base.metadata.bind = db.engine
class Round(Base):
__tablename__ = 'round'
id = Column(Integer, primary_key=True)
start_time = Column(DateTime, default=datetime.now)
end_time = Column(DateTime, nullable=False)
duration_secs = Column(Integer, nullable=False)
class Perspective(Base):
__tablename__ = 'perspective'
id = Column(Integer, primary_key=True)
gender = Column(String(6), nullable=False)
text = Column(Text, nullable=False)
created_at = Column(DateTime, default=datetime.now)
submissions = orm.relationship('Submission', backref='perspective',
lazy='dynamic')
class Submission(Base):
__tablename__ = 'submission'
id = Column(Integer, primary_key=True)
perspective_id = Column(Integer, ForeignKey('perspective.id'), nullable=False)
text = Column(Text, nullable=False)
likes = Column(Integer, default=0)
Base.metadata.create_all()
logging.info("Table information loaded")
|
Allow round_id for submissions to be calculated dynamically
|
Allow round_id for submissions to be calculated dynamically
|
Python
|
apache-2.0
|
pascalc/narrative-roulette,pascalc/narrative-roulette
|
import logging
from datetime import datetime
import sqlalchemy as sql
from sqlalchemy import Table, Column, Integer, String, DateTime, Text, ForeignKey
from sqlalchemy import orm
from sqlalchemy.ext.declarative import declarative_base
import db
Base = declarative_base()
Base.metadata.bind = db.engine
class Round(Base):
__tablename__ = 'round'
id = Column(Integer, primary_key=True)
start_time = Column(DateTime, default=datetime.now)
end_time = Column(DateTime, nullable=False)
duration_secs = Column(Integer, nullable=False)
submissions = orm.relationship('Submission', backref='round',
lazy='dynamic')
class Perspective(Base):
__tablename__ = 'perspective'
id = Column(Integer, primary_key=True)
gender = Column(String(6), nullable=False)
text = Column(Text, nullable=False)
created_at = Column(DateTime, default=datetime.now)
submissions = orm.relationship('Submission', backref='perspective',
lazy='dynamic')
class Submission(Base):
__tablename__ = 'submission'
id = Column(Integer, primary_key=True)
perspective_id = Column(Integer, ForeignKey('perspective.id'), nullable=False)
round_id = Column(Integer, ForeignKey('round.id'), nullable=False)
text = Column(Text, nullable=False)
likes = Column(Integer, default=0)
Base.metadata.create_all()
logging.info("Table information loaded")
Allow round_id for submissions to be calculated dynamically
|
import logging
from datetime import datetime
import sqlalchemy as sql
from sqlalchemy import Table, Column, Integer, String, DateTime, Text, ForeignKey
from sqlalchemy import orm
from sqlalchemy.ext.declarative import declarative_base
import db
Base = declarative_base()
Base.metadata.bind = db.engine
class Round(Base):
__tablename__ = 'round'
id = Column(Integer, primary_key=True)
start_time = Column(DateTime, default=datetime.now)
end_time = Column(DateTime, nullable=False)
duration_secs = Column(Integer, nullable=False)
class Perspective(Base):
__tablename__ = 'perspective'
id = Column(Integer, primary_key=True)
gender = Column(String(6), nullable=False)
text = Column(Text, nullable=False)
created_at = Column(DateTime, default=datetime.now)
submissions = orm.relationship('Submission', backref='perspective',
lazy='dynamic')
class Submission(Base):
__tablename__ = 'submission'
id = Column(Integer, primary_key=True)
perspective_id = Column(Integer, ForeignKey('perspective.id'), nullable=False)
text = Column(Text, nullable=False)
likes = Column(Integer, default=0)
Base.metadata.create_all()
logging.info("Table information loaded")
|
<commit_before>import logging
from datetime import datetime
import sqlalchemy as sql
from sqlalchemy import Table, Column, Integer, String, DateTime, Text, ForeignKey
from sqlalchemy import orm
from sqlalchemy.ext.declarative import declarative_base
import db
Base = declarative_base()
Base.metadata.bind = db.engine
class Round(Base):
__tablename__ = 'round'
id = Column(Integer, primary_key=True)
start_time = Column(DateTime, default=datetime.now)
end_time = Column(DateTime, nullable=False)
duration_secs = Column(Integer, nullable=False)
submissions = orm.relationship('Submission', backref='round',
lazy='dynamic')
class Perspective(Base):
__tablename__ = 'perspective'
id = Column(Integer, primary_key=True)
gender = Column(String(6), nullable=False)
text = Column(Text, nullable=False)
created_at = Column(DateTime, default=datetime.now)
submissions = orm.relationship('Submission', backref='perspective',
lazy='dynamic')
class Submission(Base):
__tablename__ = 'submission'
id = Column(Integer, primary_key=True)
perspective_id = Column(Integer, ForeignKey('perspective.id'), nullable=False)
round_id = Column(Integer, ForeignKey('round.id'), nullable=False)
text = Column(Text, nullable=False)
likes = Column(Integer, default=0)
Base.metadata.create_all()
logging.info("Table information loaded")
<commit_msg>Allow round_id for submissions to be calculated dynamically<commit_after>
|
import logging
from datetime import datetime
import sqlalchemy as sql
from sqlalchemy import Table, Column, Integer, String, DateTime, Text, ForeignKey
from sqlalchemy import orm
from sqlalchemy.ext.declarative import declarative_base
import db
Base = declarative_base()
Base.metadata.bind = db.engine
class Round(Base):
__tablename__ = 'round'
id = Column(Integer, primary_key=True)
start_time = Column(DateTime, default=datetime.now)
end_time = Column(DateTime, nullable=False)
duration_secs = Column(Integer, nullable=False)
class Perspective(Base):
__tablename__ = 'perspective'
id = Column(Integer, primary_key=True)
gender = Column(String(6), nullable=False)
text = Column(Text, nullable=False)
created_at = Column(DateTime, default=datetime.now)
submissions = orm.relationship('Submission', backref='perspective',
lazy='dynamic')
class Submission(Base):
__tablename__ = 'submission'
id = Column(Integer, primary_key=True)
perspective_id = Column(Integer, ForeignKey('perspective.id'), nullable=False)
text = Column(Text, nullable=False)
likes = Column(Integer, default=0)
Base.metadata.create_all()
logging.info("Table information loaded")
|
import logging
from datetime import datetime
import sqlalchemy as sql
from sqlalchemy import Table, Column, Integer, String, DateTime, Text, ForeignKey
from sqlalchemy import orm
from sqlalchemy.ext.declarative import declarative_base
import db
Base = declarative_base()
Base.metadata.bind = db.engine
class Round(Base):
__tablename__ = 'round'
id = Column(Integer, primary_key=True)
start_time = Column(DateTime, default=datetime.now)
end_time = Column(DateTime, nullable=False)
duration_secs = Column(Integer, nullable=False)
submissions = orm.relationship('Submission', backref='round',
lazy='dynamic')
class Perspective(Base):
__tablename__ = 'perspective'
id = Column(Integer, primary_key=True)
gender = Column(String(6), nullable=False)
text = Column(Text, nullable=False)
created_at = Column(DateTime, default=datetime.now)
submissions = orm.relationship('Submission', backref='perspective',
lazy='dynamic')
class Submission(Base):
__tablename__ = 'submission'
id = Column(Integer, primary_key=True)
perspective_id = Column(Integer, ForeignKey('perspective.id'), nullable=False)
round_id = Column(Integer, ForeignKey('round.id'), nullable=False)
text = Column(Text, nullable=False)
likes = Column(Integer, default=0)
Base.metadata.create_all()
logging.info("Table information loaded")
Allow round_id for submissions to be calculated dynamicallyimport logging
from datetime import datetime
import sqlalchemy as sql
from sqlalchemy import Table, Column, Integer, String, DateTime, Text, ForeignKey
from sqlalchemy import orm
from sqlalchemy.ext.declarative import declarative_base
import db
Base = declarative_base()
Base.metadata.bind = db.engine
class Round(Base):
__tablename__ = 'round'
id = Column(Integer, primary_key=True)
start_time = Column(DateTime, default=datetime.now)
end_time = Column(DateTime, nullable=False)
duration_secs = Column(Integer, nullable=False)
class Perspective(Base):
__tablename__ = 'perspective'
id = Column(Integer, primary_key=True)
gender = Column(String(6), nullable=False)
text = Column(Text, nullable=False)
created_at = Column(DateTime, default=datetime.now)
submissions = orm.relationship('Submission', backref='perspective',
lazy='dynamic')
class Submission(Base):
__tablename__ = 'submission'
id = Column(Integer, primary_key=True)
perspective_id = Column(Integer, ForeignKey('perspective.id'), nullable=False)
text = Column(Text, nullable=False)
likes = Column(Integer, default=0)
Base.metadata.create_all()
logging.info("Table information loaded")
|
<commit_before>import logging
from datetime import datetime
import sqlalchemy as sql
from sqlalchemy import Table, Column, Integer, String, DateTime, Text, ForeignKey
from sqlalchemy import orm
from sqlalchemy.ext.declarative import declarative_base
import db
Base = declarative_base()
Base.metadata.bind = db.engine
class Round(Base):
__tablename__ = 'round'
id = Column(Integer, primary_key=True)
start_time = Column(DateTime, default=datetime.now)
end_time = Column(DateTime, nullable=False)
duration_secs = Column(Integer, nullable=False)
submissions = orm.relationship('Submission', backref='round',
lazy='dynamic')
class Perspective(Base):
__tablename__ = 'perspective'
id = Column(Integer, primary_key=True)
gender = Column(String(6), nullable=False)
text = Column(Text, nullable=False)
created_at = Column(DateTime, default=datetime.now)
submissions = orm.relationship('Submission', backref='perspective',
lazy='dynamic')
class Submission(Base):
__tablename__ = 'submission'
id = Column(Integer, primary_key=True)
perspective_id = Column(Integer, ForeignKey('perspective.id'), nullable=False)
round_id = Column(Integer, ForeignKey('round.id'), nullable=False)
text = Column(Text, nullable=False)
likes = Column(Integer, default=0)
Base.metadata.create_all()
logging.info("Table information loaded")
<commit_msg>Allow round_id for submissions to be calculated dynamically<commit_after>import logging
from datetime import datetime
import sqlalchemy as sql
from sqlalchemy import Table, Column, Integer, String, DateTime, Text, ForeignKey
from sqlalchemy import orm
from sqlalchemy.ext.declarative import declarative_base
import db
Base = declarative_base()
Base.metadata.bind = db.engine
class Round(Base):
__tablename__ = 'round'
id = Column(Integer, primary_key=True)
start_time = Column(DateTime, default=datetime.now)
end_time = Column(DateTime, nullable=False)
duration_secs = Column(Integer, nullable=False)
class Perspective(Base):
__tablename__ = 'perspective'
id = Column(Integer, primary_key=True)
gender = Column(String(6), nullable=False)
text = Column(Text, nullable=False)
created_at = Column(DateTime, default=datetime.now)
submissions = orm.relationship('Submission', backref='perspective',
lazy='dynamic')
class Submission(Base):
__tablename__ = 'submission'
id = Column(Integer, primary_key=True)
perspective_id = Column(Integer, ForeignKey('perspective.id'), nullable=False)
text = Column(Text, nullable=False)
likes = Column(Integer, default=0)
Base.metadata.create_all()
logging.info("Table information loaded")
|
028cf52b2d09c6cd1ca8c0e1e779cd5d8ff3ca3a
|
tests/test_ubuntupkg.py
|
tests/test_ubuntupkg.py
|
# MIT licensed
# Copyright (c) 2017 Felix Yan <felixonmars@archlinux.org>, et al.
import pytest
pytestmark = pytest.mark.asyncio
async def test_ubuntupkg(get_version):
assert await get_version("sigrok-firmware-fx2lafw", {"ubuntupkg": None}) == "0.1.3-1"
async def test_ubuntupkg_strip_release(get_version):
assert await get_version("sigrok-firmware-fx2lafw", {"ubuntupkg": None, "strip-release": 1}) == "0.1.3"
async def test_ubuntupkg_suite(get_version):
assert await get_version("sigrok-firmware-fx2lafw", {"ubuntupkg": None, "suite": "xenial"}) == "0.1.2-1"
async def test_ubuntupkg_suite_with_paging(get_version):
assert await get_version("ffmpeg", {"ubuntupkg": None, "suite": "vivid"}) == "7:2.5.10-0ubuntu0.15.04.1"
|
# MIT licensed
# Copyright (c) 2017 Felix Yan <felixonmars@archlinux.org>, et al.
from flaky import flaky
import pytest
pytestmark = pytest.mark.asyncio
@flaky
async def test_ubuntupkg(get_version):
assert await get_version("sigrok-firmware-fx2lafw", {"ubuntupkg": None}) == "0.1.3-1"
@flaky
async def test_ubuntupkg_strip_release(get_version):
assert await get_version("sigrok-firmware-fx2lafw", {"ubuntupkg": None, "strip-release": 1}) == "0.1.3"
@flaky
async def test_ubuntupkg_suite(get_version):
assert await get_version("sigrok-firmware-fx2lafw", {"ubuntupkg": None, "suite": "xenial"}) == "0.1.2-1"
@flaky
async def test_ubuntupkg_suite_with_paging(get_version):
assert await get_version("ffmpeg", {"ubuntupkg": None, "suite": "vivid"}) == "7:2.5.10-0ubuntu0.15.04.1"
|
Mark ubuntupkg tests as flaky
|
Mark ubuntupkg tests as flaky
|
Python
|
mit
|
lilydjwg/nvchecker
|
# MIT licensed
# Copyright (c) 2017 Felix Yan <felixonmars@archlinux.org>, et al.
import pytest
pytestmark = pytest.mark.asyncio
async def test_ubuntupkg(get_version):
assert await get_version("sigrok-firmware-fx2lafw", {"ubuntupkg": None}) == "0.1.3-1"
async def test_ubuntupkg_strip_release(get_version):
assert await get_version("sigrok-firmware-fx2lafw", {"ubuntupkg": None, "strip-release": 1}) == "0.1.3"
async def test_ubuntupkg_suite(get_version):
assert await get_version("sigrok-firmware-fx2lafw", {"ubuntupkg": None, "suite": "xenial"}) == "0.1.2-1"
async def test_ubuntupkg_suite_with_paging(get_version):
assert await get_version("ffmpeg", {"ubuntupkg": None, "suite": "vivid"}) == "7:2.5.10-0ubuntu0.15.04.1"
Mark ubuntupkg tests as flaky
|
# MIT licensed
# Copyright (c) 2017 Felix Yan <felixonmars@archlinux.org>, et al.
from flaky import flaky
import pytest
pytestmark = pytest.mark.asyncio
@flaky
async def test_ubuntupkg(get_version):
assert await get_version("sigrok-firmware-fx2lafw", {"ubuntupkg": None}) == "0.1.3-1"
@flaky
async def test_ubuntupkg_strip_release(get_version):
assert await get_version("sigrok-firmware-fx2lafw", {"ubuntupkg": None, "strip-release": 1}) == "0.1.3"
@flaky
async def test_ubuntupkg_suite(get_version):
assert await get_version("sigrok-firmware-fx2lafw", {"ubuntupkg": None, "suite": "xenial"}) == "0.1.2-1"
@flaky
async def test_ubuntupkg_suite_with_paging(get_version):
assert await get_version("ffmpeg", {"ubuntupkg": None, "suite": "vivid"}) == "7:2.5.10-0ubuntu0.15.04.1"
|
<commit_before># MIT licensed
# Copyright (c) 2017 Felix Yan <felixonmars@archlinux.org>, et al.
import pytest
pytestmark = pytest.mark.asyncio
async def test_ubuntupkg(get_version):
assert await get_version("sigrok-firmware-fx2lafw", {"ubuntupkg": None}) == "0.1.3-1"
async def test_ubuntupkg_strip_release(get_version):
assert await get_version("sigrok-firmware-fx2lafw", {"ubuntupkg": None, "strip-release": 1}) == "0.1.3"
async def test_ubuntupkg_suite(get_version):
assert await get_version("sigrok-firmware-fx2lafw", {"ubuntupkg": None, "suite": "xenial"}) == "0.1.2-1"
async def test_ubuntupkg_suite_with_paging(get_version):
assert await get_version("ffmpeg", {"ubuntupkg": None, "suite": "vivid"}) == "7:2.5.10-0ubuntu0.15.04.1"
<commit_msg>Mark ubuntupkg tests as flaky<commit_after>
|
# MIT licensed
# Copyright (c) 2017 Felix Yan <felixonmars@archlinux.org>, et al.
from flaky import flaky
import pytest
pytestmark = pytest.mark.asyncio
@flaky
async def test_ubuntupkg(get_version):
assert await get_version("sigrok-firmware-fx2lafw", {"ubuntupkg": None}) == "0.1.3-1"
@flaky
async def test_ubuntupkg_strip_release(get_version):
assert await get_version("sigrok-firmware-fx2lafw", {"ubuntupkg": None, "strip-release": 1}) == "0.1.3"
@flaky
async def test_ubuntupkg_suite(get_version):
assert await get_version("sigrok-firmware-fx2lafw", {"ubuntupkg": None, "suite": "xenial"}) == "0.1.2-1"
@flaky
async def test_ubuntupkg_suite_with_paging(get_version):
assert await get_version("ffmpeg", {"ubuntupkg": None, "suite": "vivid"}) == "7:2.5.10-0ubuntu0.15.04.1"
|
# MIT licensed
# Copyright (c) 2017 Felix Yan <felixonmars@archlinux.org>, et al.
import pytest
pytestmark = pytest.mark.asyncio
async def test_ubuntupkg(get_version):
assert await get_version("sigrok-firmware-fx2lafw", {"ubuntupkg": None}) == "0.1.3-1"
async def test_ubuntupkg_strip_release(get_version):
assert await get_version("sigrok-firmware-fx2lafw", {"ubuntupkg": None, "strip-release": 1}) == "0.1.3"
async def test_ubuntupkg_suite(get_version):
assert await get_version("sigrok-firmware-fx2lafw", {"ubuntupkg": None, "suite": "xenial"}) == "0.1.2-1"
async def test_ubuntupkg_suite_with_paging(get_version):
assert await get_version("ffmpeg", {"ubuntupkg": None, "suite": "vivid"}) == "7:2.5.10-0ubuntu0.15.04.1"
Mark ubuntupkg tests as flaky# MIT licensed
# Copyright (c) 2017 Felix Yan <felixonmars@archlinux.org>, et al.
from flaky import flaky
import pytest
pytestmark = pytest.mark.asyncio
@flaky
async def test_ubuntupkg(get_version):
assert await get_version("sigrok-firmware-fx2lafw", {"ubuntupkg": None}) == "0.1.3-1"
@flaky
async def test_ubuntupkg_strip_release(get_version):
assert await get_version("sigrok-firmware-fx2lafw", {"ubuntupkg": None, "strip-release": 1}) == "0.1.3"
@flaky
async def test_ubuntupkg_suite(get_version):
assert await get_version("sigrok-firmware-fx2lafw", {"ubuntupkg": None, "suite": "xenial"}) == "0.1.2-1"
@flaky
async def test_ubuntupkg_suite_with_paging(get_version):
assert await get_version("ffmpeg", {"ubuntupkg": None, "suite": "vivid"}) == "7:2.5.10-0ubuntu0.15.04.1"
|
<commit_before># MIT licensed
# Copyright (c) 2017 Felix Yan <felixonmars@archlinux.org>, et al.
import pytest
pytestmark = pytest.mark.asyncio
async def test_ubuntupkg(get_version):
assert await get_version("sigrok-firmware-fx2lafw", {"ubuntupkg": None}) == "0.1.3-1"
async def test_ubuntupkg_strip_release(get_version):
assert await get_version("sigrok-firmware-fx2lafw", {"ubuntupkg": None, "strip-release": 1}) == "0.1.3"
async def test_ubuntupkg_suite(get_version):
assert await get_version("sigrok-firmware-fx2lafw", {"ubuntupkg": None, "suite": "xenial"}) == "0.1.2-1"
async def test_ubuntupkg_suite_with_paging(get_version):
assert await get_version("ffmpeg", {"ubuntupkg": None, "suite": "vivid"}) == "7:2.5.10-0ubuntu0.15.04.1"
<commit_msg>Mark ubuntupkg tests as flaky<commit_after># MIT licensed
# Copyright (c) 2017 Felix Yan <felixonmars@archlinux.org>, et al.
from flaky import flaky
import pytest
pytestmark = pytest.mark.asyncio
@flaky
async def test_ubuntupkg(get_version):
assert await get_version("sigrok-firmware-fx2lafw", {"ubuntupkg": None}) == "0.1.3-1"
@flaky
async def test_ubuntupkg_strip_release(get_version):
assert await get_version("sigrok-firmware-fx2lafw", {"ubuntupkg": None, "strip-release": 1}) == "0.1.3"
@flaky
async def test_ubuntupkg_suite(get_version):
assert await get_version("sigrok-firmware-fx2lafw", {"ubuntupkg": None, "suite": "xenial"}) == "0.1.2-1"
@flaky
async def test_ubuntupkg_suite_with_paging(get_version):
assert await get_version("ffmpeg", {"ubuntupkg": None, "suite": "vivid"}) == "7:2.5.10-0ubuntu0.15.04.1"
|
3401b0147f19839323b06b0838e6fdbd5c125649
|
opps/core/models/published.py
|
opps/core/models/published.py
|
#!/usr/bin/env python
from django.db import models
from django.utils.translation import ugettext_lazy as _
from datetime import datetime
class PublisherMnager(models.Manager):
def all_published(self):
return super(PublisherMnager, self).get_query_set().filter(
date_available__lte=datetime.now(), published=True)
class Publisher(models.Model):
date_available = models.DateTimeField(_(u"Date available"),
default=datetime.now, null=True)
published = models.BooleanField(_(u"Published"), default=False)
objects = PublisherMnager()
class Meta:
abstract = True
def is_published(self):
return self.published and \
self.date_available.replace(tzinfo=None) <= datetime.now()
|
#!/usr/bin/env python
from django.db import models
from django.utils.translation import ugettext_lazy as _
from datetime import datetime
class PublisherMnager(models.Manager):
def all_published(self):
return super(PublisherMnager, self).get_query_set().filter(
date_available__lte=datetime.now(), published=True)
class Published(models.Model):
date_available = models.DateTimeField(_(u"Date available"),
default=datetime.now, null=True)
published = models.BooleanField(_(u"Published"), default=False)
objects = PublisherMnager()
class Meta:
abstract = True
def is_published(self):
return self.published and \
self.date_available.replace(tzinfo=None) <= datetime.now()
|
Change class name Publisher to Published on models
|
Change class name Publisher to Published on models
|
Python
|
mit
|
jeanmask/opps,jeanmask/opps,opps/opps,YACOWS/opps,jeanmask/opps,jeanmask/opps,opps/opps,opps/opps,YACOWS/opps,williamroot/opps,williamroot/opps,williamroot/opps,YACOWS/opps,williamroot/opps,opps/opps,YACOWS/opps
|
#!/usr/bin/env python
from django.db import models
from django.utils.translation import ugettext_lazy as _
from datetime import datetime
class PublisherMnager(models.Manager):
def all_published(self):
return super(PublisherMnager, self).get_query_set().filter(
date_available__lte=datetime.now(), published=True)
class Publisher(models.Model):
date_available = models.DateTimeField(_(u"Date available"),
default=datetime.now, null=True)
published = models.BooleanField(_(u"Published"), default=False)
objects = PublisherMnager()
class Meta:
abstract = True
def is_published(self):
return self.published and \
self.date_available.replace(tzinfo=None) <= datetime.now()
Change class name Publisher to Published on models
|
#!/usr/bin/env python
from django.db import models
from django.utils.translation import ugettext_lazy as _
from datetime import datetime
class PublisherMnager(models.Manager):
def all_published(self):
return super(PublisherMnager, self).get_query_set().filter(
date_available__lte=datetime.now(), published=True)
class Published(models.Model):
date_available = models.DateTimeField(_(u"Date available"),
default=datetime.now, null=True)
published = models.BooleanField(_(u"Published"), default=False)
objects = PublisherMnager()
class Meta:
abstract = True
def is_published(self):
return self.published and \
self.date_available.replace(tzinfo=None) <= datetime.now()
|
<commit_before>#!/usr/bin/env python
from django.db import models
from django.utils.translation import ugettext_lazy as _
from datetime import datetime
class PublisherMnager(models.Manager):
def all_published(self):
return super(PublisherMnager, self).get_query_set().filter(
date_available__lte=datetime.now(), published=True)
class Publisher(models.Model):
date_available = models.DateTimeField(_(u"Date available"),
default=datetime.now, null=True)
published = models.BooleanField(_(u"Published"), default=False)
objects = PublisherMnager()
class Meta:
abstract = True
def is_published(self):
return self.published and \
self.date_available.replace(tzinfo=None) <= datetime.now()
<commit_msg>Change class name Publisher to Published on models<commit_after>
|
#!/usr/bin/env python
from django.db import models
from django.utils.translation import ugettext_lazy as _
from datetime import datetime
class PublisherMnager(models.Manager):
def all_published(self):
return super(PublisherMnager, self).get_query_set().filter(
date_available__lte=datetime.now(), published=True)
class Published(models.Model):
date_available = models.DateTimeField(_(u"Date available"),
default=datetime.now, null=True)
published = models.BooleanField(_(u"Published"), default=False)
objects = PublisherMnager()
class Meta:
abstract = True
def is_published(self):
return self.published and \
self.date_available.replace(tzinfo=None) <= datetime.now()
|
#!/usr/bin/env python
from django.db import models
from django.utils.translation import ugettext_lazy as _
from datetime import datetime
class PublisherMnager(models.Manager):
def all_published(self):
return super(PublisherMnager, self).get_query_set().filter(
date_available__lte=datetime.now(), published=True)
class Publisher(models.Model):
date_available = models.DateTimeField(_(u"Date available"),
default=datetime.now, null=True)
published = models.BooleanField(_(u"Published"), default=False)
objects = PublisherMnager()
class Meta:
abstract = True
def is_published(self):
return self.published and \
self.date_available.replace(tzinfo=None) <= datetime.now()
Change class name Publisher to Published on models#!/usr/bin/env python
from django.db import models
from django.utils.translation import ugettext_lazy as _
from datetime import datetime
class PublisherMnager(models.Manager):
def all_published(self):
return super(PublisherMnager, self).get_query_set().filter(
date_available__lte=datetime.now(), published=True)
class Published(models.Model):
date_available = models.DateTimeField(_(u"Date available"),
default=datetime.now, null=True)
published = models.BooleanField(_(u"Published"), default=False)
objects = PublisherMnager()
class Meta:
abstract = True
def is_published(self):
return self.published and \
self.date_available.replace(tzinfo=None) <= datetime.now()
|
<commit_before>#!/usr/bin/env python
from django.db import models
from django.utils.translation import ugettext_lazy as _
from datetime import datetime
class PublisherMnager(models.Manager):
def all_published(self):
return super(PublisherMnager, self).get_query_set().filter(
date_available__lte=datetime.now(), published=True)
class Publisher(models.Model):
date_available = models.DateTimeField(_(u"Date available"),
default=datetime.now, null=True)
published = models.BooleanField(_(u"Published"), default=False)
objects = PublisherMnager()
class Meta:
abstract = True
def is_published(self):
return self.published and \
self.date_available.replace(tzinfo=None) <= datetime.now()
<commit_msg>Change class name Publisher to Published on models<commit_after>#!/usr/bin/env python
from django.db import models
from django.utils.translation import ugettext_lazy as _
from datetime import datetime
class PublisherMnager(models.Manager):
def all_published(self):
return super(PublisherMnager, self).get_query_set().filter(
date_available__lte=datetime.now(), published=True)
class Published(models.Model):
date_available = models.DateTimeField(_(u"Date available"),
default=datetime.now, null=True)
published = models.BooleanField(_(u"Published"), default=False)
objects = PublisherMnager()
class Meta:
abstract = True
def is_published(self):
return self.published and \
self.date_available.replace(tzinfo=None) <= datetime.now()
|
67f39d1b51014b06877b93ba32a18a1f53cd231c
|
mama_cas/urls.py
|
mama_cas/urls.py
|
"""
URLconf for CAS server URIs as described in the CAS protocol.
"""
from django.conf.urls import patterns
from django.conf.urls import url
from django.views.decorators.cache import never_cache
from mama_cas.views import login
from mama_cas.views import logout
from mama_cas.views import validate
from mama_cas.views import service_validate
from mama_cas.views import proxy_validate
from mama_cas.views import proxy
urlpatterns = patterns('',
url(r'^cas/login/$',
never_cache(login),
{'template_name': 'mama_cas/login.html'},
name='cas_login'),
url(r'^cas/logout/$',
never_cache(logout),
{'template_name': 'mama_cas/logout.html'},
name='cas_logout'),
url(r'^cas/validate/$',
never_cache(validate),
name='cas_validate'),
url(r'^cas/serviceValidate/$',
never_cache(service_validate),
name='cas_service_validate'),
url(r'^cas/proxyValidate/$',
never_cache(proxy_validate),
name='cas_proxy_validate'),
url(r'^cas/proxy/$',
never_cache(proxy),
name='cas_proxy'),
)
|
"""
URLconf for CAS server URIs as described in the CAS protocol.
"""
from django.conf.urls import patterns
from django.conf.urls import url
from django.views.decorators.cache import never_cache
from mama_cas.views import login
from mama_cas.views import logout
from mama_cas.views import validate
from mama_cas.views import service_validate
from mama_cas.views import proxy_validate
from mama_cas.views import proxy
urlpatterns = patterns('',
url(r'^login/$',
never_cache(login),
{'template_name': 'mama_cas/login.html'},
name='cas_login'),
url(r'^logout/$',
never_cache(logout),
{'template_name': 'mama_cas/logout.html'},
name='cas_logout'),
url(r'^validate/$',
never_cache(validate),
name='cas_validate'),
url(r'^serviceValidate/$',
never_cache(service_validate),
name='cas_service_validate'),
url(r'^proxyValidate/$',
never_cache(proxy_validate),
name='cas_proxy_validate'),
url(r'^proxy/$',
never_cache(proxy),
name='cas_proxy'),
)
|
Remove prefix from URL paths
|
Remove prefix from URL paths
This path information should be set by the including project.
|
Python
|
bsd-3-clause
|
harlov/django-mama-cas,harlov/django-mama-cas,forcityplatform/django-mama-cas,orbitvu/django-mama-cas,forcityplatform/django-mama-cas,jbittel/django-mama-cas,orbitvu/django-mama-cas,jbittel/django-mama-cas
|
"""
URLconf for CAS server URIs as described in the CAS protocol.
"""
from django.conf.urls import patterns
from django.conf.urls import url
from django.views.decorators.cache import never_cache
from mama_cas.views import login
from mama_cas.views import logout
from mama_cas.views import validate
from mama_cas.views import service_validate
from mama_cas.views import proxy_validate
from mama_cas.views import proxy
urlpatterns = patterns('',
url(r'^cas/login/$',
never_cache(login),
{'template_name': 'mama_cas/login.html'},
name='cas_login'),
url(r'^cas/logout/$',
never_cache(logout),
{'template_name': 'mama_cas/logout.html'},
name='cas_logout'),
url(r'^cas/validate/$',
never_cache(validate),
name='cas_validate'),
url(r'^cas/serviceValidate/$',
never_cache(service_validate),
name='cas_service_validate'),
url(r'^cas/proxyValidate/$',
never_cache(proxy_validate),
name='cas_proxy_validate'),
url(r'^cas/proxy/$',
never_cache(proxy),
name='cas_proxy'),
)
Remove prefix from URL paths
This path information should be set by the including project.
|
"""
URLconf for CAS server URIs as described in the CAS protocol.
"""
from django.conf.urls import patterns
from django.conf.urls import url
from django.views.decorators.cache import never_cache
from mama_cas.views import login
from mama_cas.views import logout
from mama_cas.views import validate
from mama_cas.views import service_validate
from mama_cas.views import proxy_validate
from mama_cas.views import proxy
urlpatterns = patterns('',
url(r'^login/$',
never_cache(login),
{'template_name': 'mama_cas/login.html'},
name='cas_login'),
url(r'^logout/$',
never_cache(logout),
{'template_name': 'mama_cas/logout.html'},
name='cas_logout'),
url(r'^validate/$',
never_cache(validate),
name='cas_validate'),
url(r'^serviceValidate/$',
never_cache(service_validate),
name='cas_service_validate'),
url(r'^proxyValidate/$',
never_cache(proxy_validate),
name='cas_proxy_validate'),
url(r'^proxy/$',
never_cache(proxy),
name='cas_proxy'),
)
|
<commit_before>"""
URLconf for CAS server URIs as described in the CAS protocol.
"""
from django.conf.urls import patterns
from django.conf.urls import url
from django.views.decorators.cache import never_cache
from mama_cas.views import login
from mama_cas.views import logout
from mama_cas.views import validate
from mama_cas.views import service_validate
from mama_cas.views import proxy_validate
from mama_cas.views import proxy
urlpatterns = patterns('',
url(r'^cas/login/$',
never_cache(login),
{'template_name': 'mama_cas/login.html'},
name='cas_login'),
url(r'^cas/logout/$',
never_cache(logout),
{'template_name': 'mama_cas/logout.html'},
name='cas_logout'),
url(r'^cas/validate/$',
never_cache(validate),
name='cas_validate'),
url(r'^cas/serviceValidate/$',
never_cache(service_validate),
name='cas_service_validate'),
url(r'^cas/proxyValidate/$',
never_cache(proxy_validate),
name='cas_proxy_validate'),
url(r'^cas/proxy/$',
never_cache(proxy),
name='cas_proxy'),
)
<commit_msg>Remove prefix from URL paths
This path information should be set by the including project.<commit_after>
|
"""
URLconf for CAS server URIs as described in the CAS protocol.
"""
from django.conf.urls import patterns
from django.conf.urls import url
from django.views.decorators.cache import never_cache
from mama_cas.views import login
from mama_cas.views import logout
from mama_cas.views import validate
from mama_cas.views import service_validate
from mama_cas.views import proxy_validate
from mama_cas.views import proxy
urlpatterns = patterns('',
url(r'^login/$',
never_cache(login),
{'template_name': 'mama_cas/login.html'},
name='cas_login'),
url(r'^logout/$',
never_cache(logout),
{'template_name': 'mama_cas/logout.html'},
name='cas_logout'),
url(r'^validate/$',
never_cache(validate),
name='cas_validate'),
url(r'^serviceValidate/$',
never_cache(service_validate),
name='cas_service_validate'),
url(r'^proxyValidate/$',
never_cache(proxy_validate),
name='cas_proxy_validate'),
url(r'^proxy/$',
never_cache(proxy),
name='cas_proxy'),
)
|
"""
URLconf for CAS server URIs as described in the CAS protocol.
"""
from django.conf.urls import patterns
from django.conf.urls import url
from django.views.decorators.cache import never_cache
from mama_cas.views import login
from mama_cas.views import logout
from mama_cas.views import validate
from mama_cas.views import service_validate
from mama_cas.views import proxy_validate
from mama_cas.views import proxy
urlpatterns = patterns('',
url(r'^cas/login/$',
never_cache(login),
{'template_name': 'mama_cas/login.html'},
name='cas_login'),
url(r'^cas/logout/$',
never_cache(logout),
{'template_name': 'mama_cas/logout.html'},
name='cas_logout'),
url(r'^cas/validate/$',
never_cache(validate),
name='cas_validate'),
url(r'^cas/serviceValidate/$',
never_cache(service_validate),
name='cas_service_validate'),
url(r'^cas/proxyValidate/$',
never_cache(proxy_validate),
name='cas_proxy_validate'),
url(r'^cas/proxy/$',
never_cache(proxy),
name='cas_proxy'),
)
Remove prefix from URL paths
This path information should be set by the including project."""
URLconf for CAS server URIs as described in the CAS protocol.
"""
from django.conf.urls import patterns
from django.conf.urls import url
from django.views.decorators.cache import never_cache
from mama_cas.views import login
from mama_cas.views import logout
from mama_cas.views import validate
from mama_cas.views import service_validate
from mama_cas.views import proxy_validate
from mama_cas.views import proxy
urlpatterns = patterns('',
url(r'^login/$',
never_cache(login),
{'template_name': 'mama_cas/login.html'},
name='cas_login'),
url(r'^logout/$',
never_cache(logout),
{'template_name': 'mama_cas/logout.html'},
name='cas_logout'),
url(r'^validate/$',
never_cache(validate),
name='cas_validate'),
url(r'^serviceValidate/$',
never_cache(service_validate),
name='cas_service_validate'),
url(r'^proxyValidate/$',
never_cache(proxy_validate),
name='cas_proxy_validate'),
url(r'^proxy/$',
never_cache(proxy),
name='cas_proxy'),
)
|
<commit_before>"""
URLconf for CAS server URIs as described in the CAS protocol.
"""
from django.conf.urls import patterns
from django.conf.urls import url
from django.views.decorators.cache import never_cache
from mama_cas.views import login
from mama_cas.views import logout
from mama_cas.views import validate
from mama_cas.views import service_validate
from mama_cas.views import proxy_validate
from mama_cas.views import proxy
urlpatterns = patterns('',
url(r'^cas/login/$',
never_cache(login),
{'template_name': 'mama_cas/login.html'},
name='cas_login'),
url(r'^cas/logout/$',
never_cache(logout),
{'template_name': 'mama_cas/logout.html'},
name='cas_logout'),
url(r'^cas/validate/$',
never_cache(validate),
name='cas_validate'),
url(r'^cas/serviceValidate/$',
never_cache(service_validate),
name='cas_service_validate'),
url(r'^cas/proxyValidate/$',
never_cache(proxy_validate),
name='cas_proxy_validate'),
url(r'^cas/proxy/$',
never_cache(proxy),
name='cas_proxy'),
)
<commit_msg>Remove prefix from URL paths
This path information should be set by the including project.<commit_after>"""
URLconf for CAS server URIs as described in the CAS protocol.
"""
from django.conf.urls import patterns
from django.conf.urls import url
from django.views.decorators.cache import never_cache
from mama_cas.views import login
from mama_cas.views import logout
from mama_cas.views import validate
from mama_cas.views import service_validate
from mama_cas.views import proxy_validate
from mama_cas.views import proxy
urlpatterns = patterns('',
url(r'^login/$',
never_cache(login),
{'template_name': 'mama_cas/login.html'},
name='cas_login'),
url(r'^logout/$',
never_cache(logout),
{'template_name': 'mama_cas/logout.html'},
name='cas_logout'),
url(r'^validate/$',
never_cache(validate),
name='cas_validate'),
url(r'^serviceValidate/$',
never_cache(service_validate),
name='cas_service_validate'),
url(r'^proxyValidate/$',
never_cache(proxy_validate),
name='cas_proxy_validate'),
url(r'^proxy/$',
never_cache(proxy),
name='cas_proxy'),
)
|
b379c60e59584c931cc441fd1d64a9049d1c2b55
|
src/formatter.py
|
src/formatter.py
|
import json
from collections import OrderedDict
from .command import Command
from .settings import FormatterSettings
class Formatter():
def __init__(self, name, command='', args=''):
self.__name = name
self.__command = command.split(' ') if command else []
self.__args = args.split(' ') if args else []
self.__settings = FormatterSettings(name.lower())
@property
def name(self):
return self.__name
@property
def sources(self):
return self.__settings.sources
@property
def options(self):
return self.__settings.options
@property
def format_on_save(self):
return self.__settings.format_on_save
@format_on_save.setter
def format_on_save(self, value):
self.__settings.format_on_save = value
def format(self, input):
command = self.__command
options = self.options
args = self.__args
return Command(command + options + args).run(input)
class JsonFormatter(Formatter):
def __init__(self):
super().__init__(name='JSON')
def format(self, input):
try:
data = json.loads(input, object_pairs_hook=OrderedDict)
return json.dumps(data, indent=4), None
except ValueError:
return None, 'Invalid JSON'
|
import json
from collections import OrderedDict
from .command import Command
from .settings import FormatterSettings
class Formatter():
def __init__(self, name, command=None, args=None, formatter=None):
self.__name = name
self.__format = formatter
self.__settings = FormatterSettings(name.lower())
if not formatter:
command = command.split(' ') if command else []
options = self.__settings.options
args = args.split(' ') if args else []
shell_command = Command(command + options + args)
def external_format(input):
return shell_command.run(input)
self.__format = external_format
@property
def name(self):
return self.__name
@property
def sources(self):
return self.__settings.sources
@property
def format_on_save(self):
return self.__settings.format_on_save
@format_on_save.setter
def format_on_save(self, value):
self.__settings.format_on_save = value
def format(self, input):
return self.__format(input)
class JsonFormatter(Formatter):
def __init__(self):
def format_json(input):
try:
data = json.loads(input, object_pairs_hook=OrderedDict)
return json.dumps(data, indent=4), None
except ValueError:
return None, 'Invalid JSON'
super().__init__(name='JSON', formatter=format_json)
|
Allow format function to be provided to Formatter constructor
|
Allow format function to be provided to Formatter constructor
|
Python
|
mit
|
Rypac/sublime-format
|
import json
from collections import OrderedDict
from .command import Command
from .settings import FormatterSettings
class Formatter():
def __init__(self, name, command='', args=''):
self.__name = name
self.__command = command.split(' ') if command else []
self.__args = args.split(' ') if args else []
self.__settings = FormatterSettings(name.lower())
@property
def name(self):
return self.__name
@property
def sources(self):
return self.__settings.sources
@property
def options(self):
return self.__settings.options
@property
def format_on_save(self):
return self.__settings.format_on_save
@format_on_save.setter
def format_on_save(self, value):
self.__settings.format_on_save = value
def format(self, input):
command = self.__command
options = self.options
args = self.__args
return Command(command + options + args).run(input)
class JsonFormatter(Formatter):
def __init__(self):
super().__init__(name='JSON')
def format(self, input):
try:
data = json.loads(input, object_pairs_hook=OrderedDict)
return json.dumps(data, indent=4), None
except ValueError:
return None, 'Invalid JSON'
Allow format function to be provided to Formatter constructor
|
import json
from collections import OrderedDict
from .command import Command
from .settings import FormatterSettings
class Formatter():
def __init__(self, name, command=None, args=None, formatter=None):
self.__name = name
self.__format = formatter
self.__settings = FormatterSettings(name.lower())
if not formatter:
command = command.split(' ') if command else []
options = self.__settings.options
args = args.split(' ') if args else []
shell_command = Command(command + options + args)
def external_format(input):
return shell_command.run(input)
self.__format = external_format
@property
def name(self):
return self.__name
@property
def sources(self):
return self.__settings.sources
@property
def format_on_save(self):
return self.__settings.format_on_save
@format_on_save.setter
def format_on_save(self, value):
self.__settings.format_on_save = value
def format(self, input):
return self.__format(input)
class JsonFormatter(Formatter):
def __init__(self):
def format_json(input):
try:
data = json.loads(input, object_pairs_hook=OrderedDict)
return json.dumps(data, indent=4), None
except ValueError:
return None, 'Invalid JSON'
super().__init__(name='JSON', formatter=format_json)
|
<commit_before>import json
from collections import OrderedDict
from .command import Command
from .settings import FormatterSettings
class Formatter():
def __init__(self, name, command='', args=''):
self.__name = name
self.__command = command.split(' ') if command else []
self.__args = args.split(' ') if args else []
self.__settings = FormatterSettings(name.lower())
@property
def name(self):
return self.__name
@property
def sources(self):
return self.__settings.sources
@property
def options(self):
return self.__settings.options
@property
def format_on_save(self):
return self.__settings.format_on_save
@format_on_save.setter
def format_on_save(self, value):
self.__settings.format_on_save = value
def format(self, input):
command = self.__command
options = self.options
args = self.__args
return Command(command + options + args).run(input)
class JsonFormatter(Formatter):
def __init__(self):
super().__init__(name='JSON')
def format(self, input):
try:
data = json.loads(input, object_pairs_hook=OrderedDict)
return json.dumps(data, indent=4), None
except ValueError:
return None, 'Invalid JSON'
<commit_msg>Allow format function to be provided to Formatter constructor<commit_after>
|
import json
from collections import OrderedDict
from .command import Command
from .settings import FormatterSettings
class Formatter():
def __init__(self, name, command=None, args=None, formatter=None):
self.__name = name
self.__format = formatter
self.__settings = FormatterSettings(name.lower())
if not formatter:
command = command.split(' ') if command else []
options = self.__settings.options
args = args.split(' ') if args else []
shell_command = Command(command + options + args)
def external_format(input):
return shell_command.run(input)
self.__format = external_format
@property
def name(self):
return self.__name
@property
def sources(self):
return self.__settings.sources
@property
def format_on_save(self):
return self.__settings.format_on_save
@format_on_save.setter
def format_on_save(self, value):
self.__settings.format_on_save = value
def format(self, input):
return self.__format(input)
class JsonFormatter(Formatter):
def __init__(self):
def format_json(input):
try:
data = json.loads(input, object_pairs_hook=OrderedDict)
return json.dumps(data, indent=4), None
except ValueError:
return None, 'Invalid JSON'
super().__init__(name='JSON', formatter=format_json)
|
import json
from collections import OrderedDict
from .command import Command
from .settings import FormatterSettings
class Formatter():
def __init__(self, name, command='', args=''):
self.__name = name
self.__command = command.split(' ') if command else []
self.__args = args.split(' ') if args else []
self.__settings = FormatterSettings(name.lower())
@property
def name(self):
return self.__name
@property
def sources(self):
return self.__settings.sources
@property
def options(self):
return self.__settings.options
@property
def format_on_save(self):
return self.__settings.format_on_save
@format_on_save.setter
def format_on_save(self, value):
self.__settings.format_on_save = value
def format(self, input):
command = self.__command
options = self.options
args = self.__args
return Command(command + options + args).run(input)
class JsonFormatter(Formatter):
def __init__(self):
super().__init__(name='JSON')
def format(self, input):
try:
data = json.loads(input, object_pairs_hook=OrderedDict)
return json.dumps(data, indent=4), None
except ValueError:
return None, 'Invalid JSON'
Allow format function to be provided to Formatter constructorimport json
from collections import OrderedDict
from .command import Command
from .settings import FormatterSettings
class Formatter():
def __init__(self, name, command=None, args=None, formatter=None):
self.__name = name
self.__format = formatter
self.__settings = FormatterSettings(name.lower())
if not formatter:
command = command.split(' ') if command else []
options = self.__settings.options
args = args.split(' ') if args else []
shell_command = Command(command + options + args)
def external_format(input):
return shell_command.run(input)
self.__format = external_format
@property
def name(self):
return self.__name
@property
def sources(self):
return self.__settings.sources
@property
def format_on_save(self):
return self.__settings.format_on_save
@format_on_save.setter
def format_on_save(self, value):
self.__settings.format_on_save = value
def format(self, input):
return self.__format(input)
class JsonFormatter(Formatter):
def __init__(self):
def format_json(input):
try:
data = json.loads(input, object_pairs_hook=OrderedDict)
return json.dumps(data, indent=4), None
except ValueError:
return None, 'Invalid JSON'
super().__init__(name='JSON', formatter=format_json)
|
<commit_before>import json
from collections import OrderedDict
from .command import Command
from .settings import FormatterSettings
class Formatter():
def __init__(self, name, command='', args=''):
self.__name = name
self.__command = command.split(' ') if command else []
self.__args = args.split(' ') if args else []
self.__settings = FormatterSettings(name.lower())
@property
def name(self):
return self.__name
@property
def sources(self):
return self.__settings.sources
@property
def options(self):
return self.__settings.options
@property
def format_on_save(self):
return self.__settings.format_on_save
@format_on_save.setter
def format_on_save(self, value):
self.__settings.format_on_save = value
def format(self, input):
command = self.__command
options = self.options
args = self.__args
return Command(command + options + args).run(input)
class JsonFormatter(Formatter):
def __init__(self):
super().__init__(name='JSON')
def format(self, input):
try:
data = json.loads(input, object_pairs_hook=OrderedDict)
return json.dumps(data, indent=4), None
except ValueError:
return None, 'Invalid JSON'
<commit_msg>Allow format function to be provided to Formatter constructor<commit_after>import json
from collections import OrderedDict
from .command import Command
from .settings import FormatterSettings
class Formatter():
def __init__(self, name, command=None, args=None, formatter=None):
self.__name = name
self.__format = formatter
self.__settings = FormatterSettings(name.lower())
if not formatter:
command = command.split(' ') if command else []
options = self.__settings.options
args = args.split(' ') if args else []
shell_command = Command(command + options + args)
def external_format(input):
return shell_command.run(input)
self.__format = external_format
@property
def name(self):
return self.__name
@property
def sources(self):
return self.__settings.sources
@property
def format_on_save(self):
return self.__settings.format_on_save
@format_on_save.setter
def format_on_save(self, value):
self.__settings.format_on_save = value
def format(self, input):
return self.__format(input)
class JsonFormatter(Formatter):
def __init__(self):
def format_json(input):
try:
data = json.loads(input, object_pairs_hook=OrderedDict)
return json.dumps(data, indent=4), None
except ValueError:
return None, 'Invalid JSON'
super().__init__(name='JSON', formatter=format_json)
|
bd577d23c5cdee1ae2d5c76a712a0519265ee13d
|
src/event_manager/views.py
|
src/event_manager/views.py
|
from django.shortcuts import render
from event_manager.models import Suggestion, Event
from django.contrib.auth.decorators import login_required
def home(request):
return render(request, 'login2.html', {})
#FIXME: Remove comment when login works
#@login_required
def my_suggestions(request):
#FIXME: Need to only select so many, also only yours
suggestions = Suggestion.objects.values()
return render(request, 'suggestions.html', suggestions)
#FIXME: Remove comment when login works
#@login_required
def my_events(request):
#FIXME: Need to only select so many, also only yours
events = Event.objects.values()
return render(request, 'events.html', events)
|
from django.shortcuts import render
from event_manager.models import Suggestion, Event
from django.contrib.auth.decorators import login_required
def home(request):
return render(request, 'login2.html', {})
#FIXME: Remove comment when login works
#@login_required
def my_suggestions(request):
#FIXME: Need to only select so many, also only yours
suggestions = Suggestion.objects.values()
return render(request, 'suggestions.html', {'suggestions': suggestions})
#FIXME: Remove comment when login works
#@login_required
def my_events(request):
#FIXME: Need to only select so many, also only yours
events = Event.objects.values()
return render(request, 'events.html', {'events': events})
|
Switch from raw dict to context dict
|
Switch from raw dict to context dict
|
Python
|
agpl-3.0
|
DavidJFelix/hatchit,DavidJFelix/hatchit,DavidJFelix/hatchit
|
from django.shortcuts import render
from event_manager.models import Suggestion, Event
from django.contrib.auth.decorators import login_required
def home(request):
return render(request, 'login2.html', {})
#FIXME: Remove comment when login works
#@login_required
def my_suggestions(request):
#FIXME: Need to only select so many, also only yours
suggestions = Suggestion.objects.values()
return render(request, 'suggestions.html', suggestions)
#FIXME: Remove comment when login works
#@login_required
def my_events(request):
#FIXME: Need to only select so many, also only yours
events = Event.objects.values()
return render(request, 'events.html', events)
Switch from raw dict to context dict
|
from django.shortcuts import render
from event_manager.models import Suggestion, Event
from django.contrib.auth.decorators import login_required
def home(request):
return render(request, 'login2.html', {})
#FIXME: Remove comment when login works
#@login_required
def my_suggestions(request):
#FIXME: Need to only select so many, also only yours
suggestions = Suggestion.objects.values()
return render(request, 'suggestions.html', {'suggestions': suggestions})
#FIXME: Remove comment when login works
#@login_required
def my_events(request):
#FIXME: Need to only select so many, also only yours
events = Event.objects.values()
return render(request, 'events.html', {'events': events})
|
<commit_before>from django.shortcuts import render
from event_manager.models import Suggestion, Event
from django.contrib.auth.decorators import login_required
def home(request):
return render(request, 'login2.html', {})
#FIXME: Remove comment when login works
#@login_required
def my_suggestions(request):
#FIXME: Need to only select so many, also only yours
suggestions = Suggestion.objects.values()
return render(request, 'suggestions.html', suggestions)
#FIXME: Remove comment when login works
#@login_required
def my_events(request):
#FIXME: Need to only select so many, also only yours
events = Event.objects.values()
return render(request, 'events.html', events)
<commit_msg>Switch from raw dict to context dict<commit_after>
|
from django.shortcuts import render
from event_manager.models import Suggestion, Event
from django.contrib.auth.decorators import login_required
def home(request):
return render(request, 'login2.html', {})
#FIXME: Remove comment when login works
#@login_required
def my_suggestions(request):
#FIXME: Need to only select so many, also only yours
suggestions = Suggestion.objects.values()
return render(request, 'suggestions.html', {'suggestions': suggestions})
#FIXME: Remove comment when login works
#@login_required
def my_events(request):
#FIXME: Need to only select so many, also only yours
events = Event.objects.values()
return render(request, 'events.html', {'events': events})
|
from django.shortcuts import render
from event_manager.models import Suggestion, Event
from django.contrib.auth.decorators import login_required
def home(request):
return render(request, 'login2.html', {})
#FIXME: Remove comment when login works
#@login_required
def my_suggestions(request):
#FIXME: Need to only select so many, also only yours
suggestions = Suggestion.objects.values()
return render(request, 'suggestions.html', suggestions)
#FIXME: Remove comment when login works
#@login_required
def my_events(request):
#FIXME: Need to only select so many, also only yours
events = Event.objects.values()
return render(request, 'events.html', events)
Switch from raw dict to context dictfrom django.shortcuts import render
from event_manager.models import Suggestion, Event
from django.contrib.auth.decorators import login_required
def home(request):
return render(request, 'login2.html', {})
#FIXME: Remove comment when login works
#@login_required
def my_suggestions(request):
#FIXME: Need to only select so many, also only yours
suggestions = Suggestion.objects.values()
return render(request, 'suggestions.html', {'suggestions': suggestions})
#FIXME: Remove comment when login works
#@login_required
def my_events(request):
#FIXME: Need to only select so many, also only yours
events = Event.objects.values()
return render(request, 'events.html', {'events': events})
|
<commit_before>from django.shortcuts import render
from event_manager.models import Suggestion, Event
from django.contrib.auth.decorators import login_required
def home(request):
return render(request, 'login2.html', {})
#FIXME: Remove comment when login works
#@login_required
def my_suggestions(request):
#FIXME: Need to only select so many, also only yours
suggestions = Suggestion.objects.values()
return render(request, 'suggestions.html', suggestions)
#FIXME: Remove comment when login works
#@login_required
def my_events(request):
#FIXME: Need to only select so many, also only yours
events = Event.objects.values()
return render(request, 'events.html', events)
<commit_msg>Switch from raw dict to context dict<commit_after>from django.shortcuts import render
from event_manager.models import Suggestion, Event
from django.contrib.auth.decorators import login_required
def home(request):
return render(request, 'login2.html', {})
#FIXME: Remove comment when login works
#@login_required
def my_suggestions(request):
#FIXME: Need to only select so many, also only yours
suggestions = Suggestion.objects.values()
return render(request, 'suggestions.html', {'suggestions': suggestions})
#FIXME: Remove comment when login works
#@login_required
def my_events(request):
#FIXME: Need to only select so many, also only yours
events = Event.objects.values()
return render(request, 'events.html', {'events': events})
|
1ae797e18286fd781797689a567f9d23ab3179d1
|
modules/tools.py
|
modules/tools.py
|
inf = infinity = float('inf')
class Timer:
def __init__(self, duration, *callbacks):
self.duration = duration
self.callbacks = list(callbacks)
self.elapsed = 0
self.expired = False
self.paused = False
def register(self, callback):
self.callbacks.append(callback)
def unregister(self, callback):
self.callbacks.remove(callback)
def pause(self):
self.paused = True
def unpause(self):
self.pause = False
def update(self, time):
if self.expired:
return
if self.elapsed > self.duration:
return
self.elapsed += time
if self.elapsed > self.duration:
self.expired = True
for callback in self.callbacks:
callback()
def has_expired(self):
return self.expired
|
inf = infinity = float('inf')
class Timer:
def __init__(self, duration, *callbacks):
self.duration = duration
self.callbacks = list(callbacks)
self.elapsed = 0
self.expired = False
self.paused = False
def update(self, time):
if self.expired:
return
if self.elapsed > self.duration:
return
self.elapsed += time
if self.elapsed > self.duration:
self.expired = True
for callback in self.callbacks:
callback()
def restart(self):
self.expired = False
self.elapsed = 0
def pause(self):
self.paused = True
def unpause(self):
self.pause = False
def register(self, callback):
self.callbacks.append(callback)
def unregister(self, callback):
self.callbacks.remove(callback)
def has_expired(self):
return self.expired
|
Add a restart() method to Timer.
|
Add a restart() method to Timer.
|
Python
|
mit
|
kxgames/kxg
|
inf = infinity = float('inf')
class Timer:
def __init__(self, duration, *callbacks):
self.duration = duration
self.callbacks = list(callbacks)
self.elapsed = 0
self.expired = False
self.paused = False
def register(self, callback):
self.callbacks.append(callback)
def unregister(self, callback):
self.callbacks.remove(callback)
def pause(self):
self.paused = True
def unpause(self):
self.pause = False
def update(self, time):
if self.expired:
return
if self.elapsed > self.duration:
return
self.elapsed += time
if self.elapsed > self.duration:
self.expired = True
for callback in self.callbacks:
callback()
def has_expired(self):
return self.expired
Add a restart() method to Timer.
|
inf = infinity = float('inf')
class Timer:
def __init__(self, duration, *callbacks):
self.duration = duration
self.callbacks = list(callbacks)
self.elapsed = 0
self.expired = False
self.paused = False
def update(self, time):
if self.expired:
return
if self.elapsed > self.duration:
return
self.elapsed += time
if self.elapsed > self.duration:
self.expired = True
for callback in self.callbacks:
callback()
def restart(self):
self.expired = False
self.elapsed = 0
def pause(self):
self.paused = True
def unpause(self):
self.pause = False
def register(self, callback):
self.callbacks.append(callback)
def unregister(self, callback):
self.callbacks.remove(callback)
def has_expired(self):
return self.expired
|
<commit_before>inf = infinity = float('inf')
class Timer:
def __init__(self, duration, *callbacks):
self.duration = duration
self.callbacks = list(callbacks)
self.elapsed = 0
self.expired = False
self.paused = False
def register(self, callback):
self.callbacks.append(callback)
def unregister(self, callback):
self.callbacks.remove(callback)
def pause(self):
self.paused = True
def unpause(self):
self.pause = False
def update(self, time):
if self.expired:
return
if self.elapsed > self.duration:
return
self.elapsed += time
if self.elapsed > self.duration:
self.expired = True
for callback in self.callbacks:
callback()
def has_expired(self):
return self.expired
<commit_msg>Add a restart() method to Timer.<commit_after>
|
inf = infinity = float('inf')
class Timer:
def __init__(self, duration, *callbacks):
self.duration = duration
self.callbacks = list(callbacks)
self.elapsed = 0
self.expired = False
self.paused = False
def update(self, time):
if self.expired:
return
if self.elapsed > self.duration:
return
self.elapsed += time
if self.elapsed > self.duration:
self.expired = True
for callback in self.callbacks:
callback()
def restart(self):
self.expired = False
self.elapsed = 0
def pause(self):
self.paused = True
def unpause(self):
self.pause = False
def register(self, callback):
self.callbacks.append(callback)
def unregister(self, callback):
self.callbacks.remove(callback)
def has_expired(self):
return self.expired
|
inf = infinity = float('inf')
class Timer:
def __init__(self, duration, *callbacks):
self.duration = duration
self.callbacks = list(callbacks)
self.elapsed = 0
self.expired = False
self.paused = False
def register(self, callback):
self.callbacks.append(callback)
def unregister(self, callback):
self.callbacks.remove(callback)
def pause(self):
self.paused = True
def unpause(self):
self.pause = False
def update(self, time):
if self.expired:
return
if self.elapsed > self.duration:
return
self.elapsed += time
if self.elapsed > self.duration:
self.expired = True
for callback in self.callbacks:
callback()
def has_expired(self):
return self.expired
Add a restart() method to Timer.inf = infinity = float('inf')
class Timer:
def __init__(self, duration, *callbacks):
self.duration = duration
self.callbacks = list(callbacks)
self.elapsed = 0
self.expired = False
self.paused = False
def update(self, time):
if self.expired:
return
if self.elapsed > self.duration:
return
self.elapsed += time
if self.elapsed > self.duration:
self.expired = True
for callback in self.callbacks:
callback()
def restart(self):
self.expired = False
self.elapsed = 0
def pause(self):
self.paused = True
def unpause(self):
self.pause = False
def register(self, callback):
self.callbacks.append(callback)
def unregister(self, callback):
self.callbacks.remove(callback)
def has_expired(self):
return self.expired
|
<commit_before>inf = infinity = float('inf')
class Timer:
def __init__(self, duration, *callbacks):
self.duration = duration
self.callbacks = list(callbacks)
self.elapsed = 0
self.expired = False
self.paused = False
def register(self, callback):
self.callbacks.append(callback)
def unregister(self, callback):
self.callbacks.remove(callback)
def pause(self):
self.paused = True
def unpause(self):
self.pause = False
def update(self, time):
if self.expired:
return
if self.elapsed > self.duration:
return
self.elapsed += time
if self.elapsed > self.duration:
self.expired = True
for callback in self.callbacks:
callback()
def has_expired(self):
return self.expired
<commit_msg>Add a restart() method to Timer.<commit_after>inf = infinity = float('inf')
class Timer:
def __init__(self, duration, *callbacks):
self.duration = duration
self.callbacks = list(callbacks)
self.elapsed = 0
self.expired = False
self.paused = False
def update(self, time):
if self.expired:
return
if self.elapsed > self.duration:
return
self.elapsed += time
if self.elapsed > self.duration:
self.expired = True
for callback in self.callbacks:
callback()
def restart(self):
self.expired = False
self.elapsed = 0
def pause(self):
self.paused = True
def unpause(self):
self.pause = False
def register(self, callback):
self.callbacks.append(callback)
def unregister(self, callback):
self.callbacks.remove(callback)
def has_expired(self):
return self.expired
|
5cdc5755b1a687c9b34bfd575163ac367816f12a
|
migrations/versions/3961ccb5d884_increase_artifact_name_length.py
|
migrations/versions/3961ccb5d884_increase_artifact_name_length.py
|
"""increase artifact name length
Revision ID: 3961ccb5d884
Revises: 1b229c83511d
Create Date: 2015-11-05 15:34:28.189700
"""
# revision identifiers, used by Alembic.
revision = '3961ccb5d884'
down_revision = '1b229c83511d'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.alter_column('artifact', 'name', sa.VARCHAR(1024))
def downgrade():
op.alter_column('artifact', 'name', sa.VARCHAR(128))
|
"""increase artifact name length
Revision ID: 3961ccb5d884
Revises: 1b229c83511d
Create Date: 2015-11-05 15:34:28.189700
"""
# revision identifiers, used by Alembic.
revision = '3961ccb5d884'
down_revision = '1b229c83511d'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.alter_column('artifact', 'name', type_=sa.VARCHAR(1024))
def downgrade():
op.alter_column('artifact', 'name', type_=sa.VARCHAR(128))
|
Fix extend artifact name migration script.
|
Fix extend artifact name migration script.
Test Plan: ran migration locally and checked table schema
Reviewers: anupc, kylec
Reviewed By: kylec
Subscribers: changesbot
Differential Revision: https://tails.corp.dropbox.com/D151824
|
Python
|
apache-2.0
|
dropbox/changes,dropbox/changes,dropbox/changes,dropbox/changes
|
"""increase artifact name length
Revision ID: 3961ccb5d884
Revises: 1b229c83511d
Create Date: 2015-11-05 15:34:28.189700
"""
# revision identifiers, used by Alembic.
revision = '3961ccb5d884'
down_revision = '1b229c83511d'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.alter_column('artifact', 'name', sa.VARCHAR(1024))
def downgrade():
op.alter_column('artifact', 'name', sa.VARCHAR(128))
Fix extend artifact name migration script.
Test Plan: ran migration locally and checked table schema
Reviewers: anupc, kylec
Reviewed By: kylec
Subscribers: changesbot
Differential Revision: https://tails.corp.dropbox.com/D151824
|
"""increase artifact name length
Revision ID: 3961ccb5d884
Revises: 1b229c83511d
Create Date: 2015-11-05 15:34:28.189700
"""
# revision identifiers, used by Alembic.
revision = '3961ccb5d884'
down_revision = '1b229c83511d'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.alter_column('artifact', 'name', type_=sa.VARCHAR(1024))
def downgrade():
op.alter_column('artifact', 'name', type_=sa.VARCHAR(128))
|
<commit_before>"""increase artifact name length
Revision ID: 3961ccb5d884
Revises: 1b229c83511d
Create Date: 2015-11-05 15:34:28.189700
"""
# revision identifiers, used by Alembic.
revision = '3961ccb5d884'
down_revision = '1b229c83511d'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.alter_column('artifact', 'name', sa.VARCHAR(1024))
def downgrade():
op.alter_column('artifact', 'name', sa.VARCHAR(128))
<commit_msg>Fix extend artifact name migration script.
Test Plan: ran migration locally and checked table schema
Reviewers: anupc, kylec
Reviewed By: kylec
Subscribers: changesbot
Differential Revision: https://tails.corp.dropbox.com/D151824<commit_after>
|
"""increase artifact name length
Revision ID: 3961ccb5d884
Revises: 1b229c83511d
Create Date: 2015-11-05 15:34:28.189700
"""
# revision identifiers, used by Alembic.
revision = '3961ccb5d884'
down_revision = '1b229c83511d'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.alter_column('artifact', 'name', type_=sa.VARCHAR(1024))
def downgrade():
op.alter_column('artifact', 'name', type_=sa.VARCHAR(128))
|
"""increase artifact name length
Revision ID: 3961ccb5d884
Revises: 1b229c83511d
Create Date: 2015-11-05 15:34:28.189700
"""
# revision identifiers, used by Alembic.
revision = '3961ccb5d884'
down_revision = '1b229c83511d'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.alter_column('artifact', 'name', sa.VARCHAR(1024))
def downgrade():
op.alter_column('artifact', 'name', sa.VARCHAR(128))
Fix extend artifact name migration script.
Test Plan: ran migration locally and checked table schema
Reviewers: anupc, kylec
Reviewed By: kylec
Subscribers: changesbot
Differential Revision: https://tails.corp.dropbox.com/D151824"""increase artifact name length
Revision ID: 3961ccb5d884
Revises: 1b229c83511d
Create Date: 2015-11-05 15:34:28.189700
"""
# revision identifiers, used by Alembic.
revision = '3961ccb5d884'
down_revision = '1b229c83511d'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.alter_column('artifact', 'name', type_=sa.VARCHAR(1024))
def downgrade():
op.alter_column('artifact', 'name', type_=sa.VARCHAR(128))
|
<commit_before>"""increase artifact name length
Revision ID: 3961ccb5d884
Revises: 1b229c83511d
Create Date: 2015-11-05 15:34:28.189700
"""
# revision identifiers, used by Alembic.
revision = '3961ccb5d884'
down_revision = '1b229c83511d'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.alter_column('artifact', 'name', sa.VARCHAR(1024))
def downgrade():
op.alter_column('artifact', 'name', sa.VARCHAR(128))
<commit_msg>Fix extend artifact name migration script.
Test Plan: ran migration locally and checked table schema
Reviewers: anupc, kylec
Reviewed By: kylec
Subscribers: changesbot
Differential Revision: https://tails.corp.dropbox.com/D151824<commit_after>"""increase artifact name length
Revision ID: 3961ccb5d884
Revises: 1b229c83511d
Create Date: 2015-11-05 15:34:28.189700
"""
# revision identifiers, used by Alembic.
revision = '3961ccb5d884'
down_revision = '1b229c83511d'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.alter_column('artifact', 'name', type_=sa.VARCHAR(1024))
def downgrade():
op.alter_column('artifact', 'name', type_=sa.VARCHAR(128))
|
223f248a1d1791b1a098876317905f4930330487
|
salesforce/backend/__init__.py
|
salesforce/backend/__init__.py
|
# django-salesforce
#
# by Phil Christensen
# (c) 2012-2013 Freelancers Union (http://www.freelancersunion.org)
# See LICENSE.md for details
#
"""
Database backend for the Salesforce API.
"""
import socket
from django.conf import settings
import logging
log = logging.getLogger(__name__)
sf_alias = getattr(settings, 'SALESFORCE_DB_ALIAS', 'salesforce')
# The maximal number of retries for requests to SF API.
MAX_RETRIES = getattr(settings, 'REQUESTS_MAX_RETRIES', 1)
def getaddrinfo_wrapper(host, port, family=socket.AF_INET, socktype=0, proto=0, flags=0):
return orig_getaddrinfo(host, port, family, socktype, proto, flags)
# patch to IPv4 if required and not patched by anything other yet
if getattr(settings, 'IPV4_ONLY', False) and socket.getaddrinfo.__module__ in ('socket', '_socket'):
log.info("Patched socket to IPv4 only")
orig_getaddrinfo = socket.getaddrinfo
# replace the original socket.getaddrinfo by our version
socket.getaddrinfo = getaddrinfo_wrapper
|
# django-salesforce
#
# by Phil Christensen
# (c) 2012-2013 Freelancers Union (http://www.freelancersunion.org)
# See LICENSE.md for details
#
"""
Database backend for the Salesforce API.
No code in this directory is used with standard databases, even if a standard
database is used for running some application tests on objects defined by
SalesforceModel. All code for SF models that can be used with non SF databases
should be located directly in the 'salesforce' directory in files 'models.py',
'fields.py', 'manager.py', 'router.py', 'admin.py'.
Incorrectly located files: (It is better not to change it now.)
backend/manager.py => manager.py
auth.py => backend/auth.py
"""
import socket
from django.conf import settings
import logging
log = logging.getLogger(__name__)
sf_alias = getattr(settings, 'SALESFORCE_DB_ALIAS', 'salesforce')
# The maximal number of retries for requests to SF API.
MAX_RETRIES = getattr(settings, 'REQUESTS_MAX_RETRIES', 1)
def getaddrinfo_wrapper(host, port, family=socket.AF_INET, socktype=0, proto=0, flags=0):
return orig_getaddrinfo(host, port, family, socktype, proto, flags)
# patch to IPv4 if required and not patched by anything other yet
if getattr(settings, 'IPV4_ONLY', False) and socket.getaddrinfo.__module__ in ('socket', '_socket'):
log.info("Patched socket to IPv4 only")
orig_getaddrinfo = socket.getaddrinfo
# replace the original socket.getaddrinfo by our version
socket.getaddrinfo = getaddrinfo_wrapper
|
Comment about the directory structure
|
Comment about the directory structure
|
Python
|
mit
|
django-salesforce/django-salesforce,chromakey/django-salesforce,django-salesforce/django-salesforce,philchristensen/django-salesforce,chromakey/django-salesforce,philchristensen/django-salesforce,chromakey/django-salesforce,hynekcer/django-salesforce,philchristensen/django-salesforce,hynekcer/django-salesforce,hynekcer/django-salesforce,django-salesforce/django-salesforce
|
# django-salesforce
#
# by Phil Christensen
# (c) 2012-2013 Freelancers Union (http://www.freelancersunion.org)
# See LICENSE.md for details
#
"""
Database backend for the Salesforce API.
"""
import socket
from django.conf import settings
import logging
log = logging.getLogger(__name__)
sf_alias = getattr(settings, 'SALESFORCE_DB_ALIAS', 'salesforce')
# The maximal number of retries for requests to SF API.
MAX_RETRIES = getattr(settings, 'REQUESTS_MAX_RETRIES', 1)
def getaddrinfo_wrapper(host, port, family=socket.AF_INET, socktype=0, proto=0, flags=0):
return orig_getaddrinfo(host, port, family, socktype, proto, flags)
# patch to IPv4 if required and not patched by anything other yet
if getattr(settings, 'IPV4_ONLY', False) and socket.getaddrinfo.__module__ in ('socket', '_socket'):
log.info("Patched socket to IPv4 only")
orig_getaddrinfo = socket.getaddrinfo
# replace the original socket.getaddrinfo by our version
socket.getaddrinfo = getaddrinfo_wrapper
Comment about the directory structure
|
# django-salesforce
#
# by Phil Christensen
# (c) 2012-2013 Freelancers Union (http://www.freelancersunion.org)
# See LICENSE.md for details
#
"""
Database backend for the Salesforce API.
No code in this directory is used with standard databases, even if a standard
database is used for running some application tests on objects defined by
SalesforceModel. All code for SF models that can be used with non SF databases
should be located directly in the 'salesforce' directory in files 'models.py',
'fields.py', 'manager.py', 'router.py', 'admin.py'.
Incorrectly located files: (It is better not to change it now.)
backend/manager.py => manager.py
auth.py => backend/auth.py
"""
import socket
from django.conf import settings
import logging
log = logging.getLogger(__name__)
sf_alias = getattr(settings, 'SALESFORCE_DB_ALIAS', 'salesforce')
# The maximal number of retries for requests to SF API.
MAX_RETRIES = getattr(settings, 'REQUESTS_MAX_RETRIES', 1)
def getaddrinfo_wrapper(host, port, family=socket.AF_INET, socktype=0, proto=0, flags=0):
return orig_getaddrinfo(host, port, family, socktype, proto, flags)
# patch to IPv4 if required and not patched by anything other yet
if getattr(settings, 'IPV4_ONLY', False) and socket.getaddrinfo.__module__ in ('socket', '_socket'):
log.info("Patched socket to IPv4 only")
orig_getaddrinfo = socket.getaddrinfo
# replace the original socket.getaddrinfo by our version
socket.getaddrinfo = getaddrinfo_wrapper
|
<commit_before># django-salesforce
#
# by Phil Christensen
# (c) 2012-2013 Freelancers Union (http://www.freelancersunion.org)
# See LICENSE.md for details
#
"""
Database backend for the Salesforce API.
"""
import socket
from django.conf import settings
import logging
log = logging.getLogger(__name__)
sf_alias = getattr(settings, 'SALESFORCE_DB_ALIAS', 'salesforce')
# The maximal number of retries for requests to SF API.
MAX_RETRIES = getattr(settings, 'REQUESTS_MAX_RETRIES', 1)
def getaddrinfo_wrapper(host, port, family=socket.AF_INET, socktype=0, proto=0, flags=0):
return orig_getaddrinfo(host, port, family, socktype, proto, flags)
# patch to IPv4 if required and not patched by anything other yet
if getattr(settings, 'IPV4_ONLY', False) and socket.getaddrinfo.__module__ in ('socket', '_socket'):
log.info("Patched socket to IPv4 only")
orig_getaddrinfo = socket.getaddrinfo
# replace the original socket.getaddrinfo by our version
socket.getaddrinfo = getaddrinfo_wrapper
<commit_msg>Comment about the directory structure<commit_after>
|
# django-salesforce
#
# by Phil Christensen
# (c) 2012-2013 Freelancers Union (http://www.freelancersunion.org)
# See LICENSE.md for details
#
"""
Database backend for the Salesforce API.
No code in this directory is used with standard databases, even if a standard
database is used for running some application tests on objects defined by
SalesforceModel. All code for SF models that can be used with non SF databases
should be located directly in the 'salesforce' directory in files 'models.py',
'fields.py', 'manager.py', 'router.py', 'admin.py'.
Incorrectly located files: (It is better not to change it now.)
backend/manager.py => manager.py
auth.py => backend/auth.py
"""
import socket
from django.conf import settings
import logging
log = logging.getLogger(__name__)
sf_alias = getattr(settings, 'SALESFORCE_DB_ALIAS', 'salesforce')
# The maximal number of retries for requests to SF API.
MAX_RETRIES = getattr(settings, 'REQUESTS_MAX_RETRIES', 1)
def getaddrinfo_wrapper(host, port, family=socket.AF_INET, socktype=0, proto=0, flags=0):
return orig_getaddrinfo(host, port, family, socktype, proto, flags)
# patch to IPv4 if required and not patched by anything other yet
if getattr(settings, 'IPV4_ONLY', False) and socket.getaddrinfo.__module__ in ('socket', '_socket'):
log.info("Patched socket to IPv4 only")
orig_getaddrinfo = socket.getaddrinfo
# replace the original socket.getaddrinfo by our version
socket.getaddrinfo = getaddrinfo_wrapper
|
# django-salesforce
#
# by Phil Christensen
# (c) 2012-2013 Freelancers Union (http://www.freelancersunion.org)
# See LICENSE.md for details
#
"""
Database backend for the Salesforce API.
"""
import socket
from django.conf import settings
import logging
log = logging.getLogger(__name__)
sf_alias = getattr(settings, 'SALESFORCE_DB_ALIAS', 'salesforce')
# The maximal number of retries for requests to SF API.
MAX_RETRIES = getattr(settings, 'REQUESTS_MAX_RETRIES', 1)
def getaddrinfo_wrapper(host, port, family=socket.AF_INET, socktype=0, proto=0, flags=0):
return orig_getaddrinfo(host, port, family, socktype, proto, flags)
# patch to IPv4 if required and not patched by anything other yet
if getattr(settings, 'IPV4_ONLY', False) and socket.getaddrinfo.__module__ in ('socket', '_socket'):
log.info("Patched socket to IPv4 only")
orig_getaddrinfo = socket.getaddrinfo
# replace the original socket.getaddrinfo by our version
socket.getaddrinfo = getaddrinfo_wrapper
Comment about the directory structure# django-salesforce
#
# by Phil Christensen
# (c) 2012-2013 Freelancers Union (http://www.freelancersunion.org)
# See LICENSE.md for details
#
"""
Database backend for the Salesforce API.
No code in this directory is used with standard databases, even if a standard
database is used for running some application tests on objects defined by
SalesforceModel. All code for SF models that can be used with non SF databases
should be located directly in the 'salesforce' directory in files 'models.py',
'fields.py', 'manager.py', 'router.py', 'admin.py'.
Incorrectly located files: (It is better not to change it now.)
backend/manager.py => manager.py
auth.py => backend/auth.py
"""
import socket
from django.conf import settings
import logging
log = logging.getLogger(__name__)
sf_alias = getattr(settings, 'SALESFORCE_DB_ALIAS', 'salesforce')
# The maximal number of retries for requests to SF API.
MAX_RETRIES = getattr(settings, 'REQUESTS_MAX_RETRIES', 1)
def getaddrinfo_wrapper(host, port, family=socket.AF_INET, socktype=0, proto=0, flags=0):
return orig_getaddrinfo(host, port, family, socktype, proto, flags)
# patch to IPv4 if required and not patched by anything other yet
if getattr(settings, 'IPV4_ONLY', False) and socket.getaddrinfo.__module__ in ('socket', '_socket'):
log.info("Patched socket to IPv4 only")
orig_getaddrinfo = socket.getaddrinfo
# replace the original socket.getaddrinfo by our version
socket.getaddrinfo = getaddrinfo_wrapper
|
<commit_before># django-salesforce
#
# by Phil Christensen
# (c) 2012-2013 Freelancers Union (http://www.freelancersunion.org)
# See LICENSE.md for details
#
"""
Database backend for the Salesforce API.
"""
import socket
from django.conf import settings
import logging
log = logging.getLogger(__name__)
sf_alias = getattr(settings, 'SALESFORCE_DB_ALIAS', 'salesforce')
# The maximal number of retries for requests to SF API.
MAX_RETRIES = getattr(settings, 'REQUESTS_MAX_RETRIES', 1)
def getaddrinfo_wrapper(host, port, family=socket.AF_INET, socktype=0, proto=0, flags=0):
return orig_getaddrinfo(host, port, family, socktype, proto, flags)
# patch to IPv4 if required and not patched by anything other yet
if getattr(settings, 'IPV4_ONLY', False) and socket.getaddrinfo.__module__ in ('socket', '_socket'):
log.info("Patched socket to IPv4 only")
orig_getaddrinfo = socket.getaddrinfo
# replace the original socket.getaddrinfo by our version
socket.getaddrinfo = getaddrinfo_wrapper
<commit_msg>Comment about the directory structure<commit_after># django-salesforce
#
# by Phil Christensen
# (c) 2012-2013 Freelancers Union (http://www.freelancersunion.org)
# See LICENSE.md for details
#
"""
Database backend for the Salesforce API.
No code in this directory is used with standard databases, even if a standard
database is used for running some application tests on objects defined by
SalesforceModel. All code for SF models that can be used with non SF databases
should be located directly in the 'salesforce' directory in files 'models.py',
'fields.py', 'manager.py', 'router.py', 'admin.py'.
Incorrectly located files: (It is better not to change it now.)
backend/manager.py => manager.py
auth.py => backend/auth.py
"""
import socket
from django.conf import settings
import logging
log = logging.getLogger(__name__)
sf_alias = getattr(settings, 'SALESFORCE_DB_ALIAS', 'salesforce')
# The maximal number of retries for requests to SF API.
MAX_RETRIES = getattr(settings, 'REQUESTS_MAX_RETRIES', 1)
def getaddrinfo_wrapper(host, port, family=socket.AF_INET, socktype=0, proto=0, flags=0):
return orig_getaddrinfo(host, port, family, socktype, proto, flags)
# patch to IPv4 if required and not patched by anything other yet
if getattr(settings, 'IPV4_ONLY', False) and socket.getaddrinfo.__module__ in ('socket', '_socket'):
log.info("Patched socket to IPv4 only")
orig_getaddrinfo = socket.getaddrinfo
# replace the original socket.getaddrinfo by our version
socket.getaddrinfo = getaddrinfo_wrapper
|
b23ed2d6d74c4604e9bb7b55faf121661ee9f785
|
statePointsGen.py
|
statePointsGen.py
|
# Thermo State Solver
# Solves for state parameters at various points in a simple thermodynamic model
# Developed by Neal DeBuhr
import csv
import argparse
import itertools
|
# Thermo State Solver
# Solves for state parameters at various points in a simple thermodynamic model
# Developed by Neal DeBuhr
import csv
import argparse
import itertools
import string
numPoints=int(input('Number of points in analysis:'))
num2alpha = dict(zip(range(1, 27), string.ascii_uppercase))
outRow=['']
outRows=[]
paramsPoints=['mdot','h','T','P','s']
parser = argparse.ArgumentParser()
parser.add_argument('-o','--output',required=True)
args = parser.parse_args()
for point in range(1,numPoints+1):
outRow.append(num2alpha[point])
outRows.append(outRow)
for param in paramsPoints:
outRows.append([param])
with open(args.output, 'wt') as csvfile:
fileEqns = csv.writer(csvfile, delimiter=',',
quotechar="'", quoting=csv.QUOTE_MINIMAL)
for row in outRows:
fileEqns.writerow(row)
print("Output file: %s" % args.output)
|
Develop program for points file generation
|
Develop program for points file generation
|
Python
|
mit
|
ndebuhr/thermo-state-solver,ndebuhr/thermo-state-solver
|
# Thermo State Solver
# Solves for state parameters at various points in a simple thermodynamic model
# Developed by Neal DeBuhr
import csv
import argparse
import itertools
Develop program for points file generation
|
# Thermo State Solver
# Solves for state parameters at various points in a simple thermodynamic model
# Developed by Neal DeBuhr
import csv
import argparse
import itertools
import string
numPoints=int(input('Number of points in analysis:'))
num2alpha = dict(zip(range(1, 27), string.ascii_uppercase))
outRow=['']
outRows=[]
paramsPoints=['mdot','h','T','P','s']
parser = argparse.ArgumentParser()
parser.add_argument('-o','--output',required=True)
args = parser.parse_args()
for point in range(1,numPoints+1):
outRow.append(num2alpha[point])
outRows.append(outRow)
for param in paramsPoints:
outRows.append([param])
with open(args.output, 'wt') as csvfile:
fileEqns = csv.writer(csvfile, delimiter=',',
quotechar="'", quoting=csv.QUOTE_MINIMAL)
for row in outRows:
fileEqns.writerow(row)
print("Output file: %s" % args.output)
|
<commit_before># Thermo State Solver
# Solves for state parameters at various points in a simple thermodynamic model
# Developed by Neal DeBuhr
import csv
import argparse
import itertools
<commit_msg>Develop program for points file generation<commit_after>
|
# Thermo State Solver
# Solves for state parameters at various points in a simple thermodynamic model
# Developed by Neal DeBuhr
import csv
import argparse
import itertools
import string
numPoints=int(input('Number of points in analysis:'))
num2alpha = dict(zip(range(1, 27), string.ascii_uppercase))
outRow=['']
outRows=[]
paramsPoints=['mdot','h','T','P','s']
parser = argparse.ArgumentParser()
parser.add_argument('-o','--output',required=True)
args = parser.parse_args()
for point in range(1,numPoints+1):
outRow.append(num2alpha[point])
outRows.append(outRow)
for param in paramsPoints:
outRows.append([param])
with open(args.output, 'wt') as csvfile:
fileEqns = csv.writer(csvfile, delimiter=',',
quotechar="'", quoting=csv.QUOTE_MINIMAL)
for row in outRows:
fileEqns.writerow(row)
print("Output file: %s" % args.output)
|
# Thermo State Solver
# Solves for state parameters at various points in a simple thermodynamic model
# Developed by Neal DeBuhr
import csv
import argparse
import itertools
Develop program for points file generation# Thermo State Solver
# Solves for state parameters at various points in a simple thermodynamic model
# Developed by Neal DeBuhr
import csv
import argparse
import itertools
import string
numPoints=int(input('Number of points in analysis:'))
num2alpha = dict(zip(range(1, 27), string.ascii_uppercase))
outRow=['']
outRows=[]
paramsPoints=['mdot','h','T','P','s']
parser = argparse.ArgumentParser()
parser.add_argument('-o','--output',required=True)
args = parser.parse_args()
for point in range(1,numPoints+1):
outRow.append(num2alpha[point])
outRows.append(outRow)
for param in paramsPoints:
outRows.append([param])
with open(args.output, 'wt') as csvfile:
fileEqns = csv.writer(csvfile, delimiter=',',
quotechar="'", quoting=csv.QUOTE_MINIMAL)
for row in outRows:
fileEqns.writerow(row)
print("Output file: %s" % args.output)
|
<commit_before># Thermo State Solver
# Solves for state parameters at various points in a simple thermodynamic model
# Developed by Neal DeBuhr
import csv
import argparse
import itertools
<commit_msg>Develop program for points file generation<commit_after># Thermo State Solver
# Solves for state parameters at various points in a simple thermodynamic model
# Developed by Neal DeBuhr
import csv
import argparse
import itertools
import string
numPoints=int(input('Number of points in analysis:'))
num2alpha = dict(zip(range(1, 27), string.ascii_uppercase))
outRow=['']
outRows=[]
paramsPoints=['mdot','h','T','P','s']
parser = argparse.ArgumentParser()
parser.add_argument('-o','--output',required=True)
args = parser.parse_args()
for point in range(1,numPoints+1):
outRow.append(num2alpha[point])
outRows.append(outRow)
for param in paramsPoints:
outRows.append([param])
with open(args.output, 'wt') as csvfile:
fileEqns = csv.writer(csvfile, delimiter=',',
quotechar="'", quoting=csv.QUOTE_MINIMAL)
for row in outRows:
fileEqns.writerow(row)
print("Output file: %s" % args.output)
|
cccdb3b914b1466a34a4b3d0a1b47b880e21168b
|
pml/__init__.py
|
pml/__init__.py
|
SP = 'setpoint'
RB = 'readback'
ENG = 'machine'
PHY = 'physics'
|
SP = 'setpoint'
RB = 'readback'
ENG = 'engineering'
PHY = 'physics'
|
Change variable name for consistency
|
Change variable name for consistency
|
Python
|
apache-2.0
|
willrogers/pml,willrogers/pml
|
SP = 'setpoint'
RB = 'readback'
ENG = 'machine'
PHY = 'physics'
Change variable name for consistency
|
SP = 'setpoint'
RB = 'readback'
ENG = 'engineering'
PHY = 'physics'
|
<commit_before>SP = 'setpoint'
RB = 'readback'
ENG = 'machine'
PHY = 'physics'
<commit_msg>Change variable name for consistency<commit_after>
|
SP = 'setpoint'
RB = 'readback'
ENG = 'engineering'
PHY = 'physics'
|
SP = 'setpoint'
RB = 'readback'
ENG = 'machine'
PHY = 'physics'
Change variable name for consistencySP = 'setpoint'
RB = 'readback'
ENG = 'engineering'
PHY = 'physics'
|
<commit_before>SP = 'setpoint'
RB = 'readback'
ENG = 'machine'
PHY = 'physics'
<commit_msg>Change variable name for consistency<commit_after>SP = 'setpoint'
RB = 'readback'
ENG = 'engineering'
PHY = 'physics'
|
f3b9cca8571acd1815534c5eb409f2ef166f897c
|
crispy/main.py
|
crispy/main.py
|
# coding: utf-8
###################################################################
# Copyright (c) 2016-2020 European Synchrotron Radiation Facility #
# #
# Author: Marius Retegan #
# #
# This work is licensed under the terms of the MIT license. #
# For further information, see https://github.com/mretegan/crispy #
###################################################################
"""This module is the entry point to the application."""
import logging
import sys
import warnings
from PyQt5.QtCore import QLocale
from PyQt5.QtWidgets import QApplication
from crispy.config import Config
from crispy.gui.main import MainWindow
from crispy.loggers import setUpLoggers
logger = logging.getLogger("crispy.main")
warnings.filterwarnings("ignore", category=UserWarning)
def main():
setUpLoggers()
app = QApplication([])
# This must be done after the application is instantiated.
locale = QLocale(QLocale.C)
locale.setNumberOptions(QLocale.OmitGroupSeparator)
QLocale.setDefault(locale)
config = Config()
config.removeOldFiles()
settings = config.read()
# Set default values if the config file is empty or was not created.
if not settings.allKeys():
logger.debug("Loading default settings.")
config.loadDefaults()
logger.info("Starting the application.")
window = MainWindow()
window.show()
logger.info("Ready.")
sys.exit(app.exec_())
if __name__ == "__main__":
main()
|
# coding: utf-8
###################################################################
# Copyright (c) 2016-2020 European Synchrotron Radiation Facility #
# #
# Author: Marius Retegan #
# #
# This work is licensed under the terms of the MIT license. #
# For further information, see https://github.com/mretegan/crispy #
###################################################################
"""This module is the entry point to the application."""
import logging
import sys
import warnings
from PyQt5.QtCore import QLocale
from PyQt5.QtWidgets import QApplication
from crispy.config import Config
from crispy.gui.main import MainWindow
from crispy.loggers import setUpLoggers
logger = logging.getLogger("crispy.main")
warnings.filterwarnings("ignore", category=UserWarning)
def main():
app = QApplication([])
# This must be done after the application is instantiated.
locale = QLocale(QLocale.C)
locale.setNumberOptions(QLocale.OmitGroupSeparator)
QLocale.setDefault(locale)
config = Config()
config.removeOldFiles()
settings = config.read()
# Set default values if the config file is empty or was not created.
if not settings.allKeys():
logger.debug("Loading default settings.")
config.loadDefaults()
setUpLoggers()
logger.info("Starting the application.")
window = MainWindow()
window.show()
logger.info("Ready.")
sys.exit(app.exec_())
if __name__ == "__main__":
main()
|
Set up loggers after the configuration file is loaded
|
Set up loggers after the configuration file is loaded
|
Python
|
mit
|
mretegan/crispy,mretegan/crispy
|
# coding: utf-8
###################################################################
# Copyright (c) 2016-2020 European Synchrotron Radiation Facility #
# #
# Author: Marius Retegan #
# #
# This work is licensed under the terms of the MIT license. #
# For further information, see https://github.com/mretegan/crispy #
###################################################################
"""This module is the entry point to the application."""
import logging
import sys
import warnings
from PyQt5.QtCore import QLocale
from PyQt5.QtWidgets import QApplication
from crispy.config import Config
from crispy.gui.main import MainWindow
from crispy.loggers import setUpLoggers
logger = logging.getLogger("crispy.main")
warnings.filterwarnings("ignore", category=UserWarning)
def main():
setUpLoggers()
app = QApplication([])
# This must be done after the application is instantiated.
locale = QLocale(QLocale.C)
locale.setNumberOptions(QLocale.OmitGroupSeparator)
QLocale.setDefault(locale)
config = Config()
config.removeOldFiles()
settings = config.read()
# Set default values if the config file is empty or was not created.
if not settings.allKeys():
logger.debug("Loading default settings.")
config.loadDefaults()
logger.info("Starting the application.")
window = MainWindow()
window.show()
logger.info("Ready.")
sys.exit(app.exec_())
if __name__ == "__main__":
main()
Set up loggers after the configuration file is loaded
|
# coding: utf-8
###################################################################
# Copyright (c) 2016-2020 European Synchrotron Radiation Facility #
# #
# Author: Marius Retegan #
# #
# This work is licensed under the terms of the MIT license. #
# For further information, see https://github.com/mretegan/crispy #
###################################################################
"""This module is the entry point to the application."""
import logging
import sys
import warnings
from PyQt5.QtCore import QLocale
from PyQt5.QtWidgets import QApplication
from crispy.config import Config
from crispy.gui.main import MainWindow
from crispy.loggers import setUpLoggers
logger = logging.getLogger("crispy.main")
warnings.filterwarnings("ignore", category=UserWarning)
def main():
app = QApplication([])
# This must be done after the application is instantiated.
locale = QLocale(QLocale.C)
locale.setNumberOptions(QLocale.OmitGroupSeparator)
QLocale.setDefault(locale)
config = Config()
config.removeOldFiles()
settings = config.read()
# Set default values if the config file is empty or was not created.
if not settings.allKeys():
logger.debug("Loading default settings.")
config.loadDefaults()
setUpLoggers()
logger.info("Starting the application.")
window = MainWindow()
window.show()
logger.info("Ready.")
sys.exit(app.exec_())
if __name__ == "__main__":
main()
|
<commit_before># coding: utf-8
###################################################################
# Copyright (c) 2016-2020 European Synchrotron Radiation Facility #
# #
# Author: Marius Retegan #
# #
# This work is licensed under the terms of the MIT license. #
# For further information, see https://github.com/mretegan/crispy #
###################################################################
"""This module is the entry point to the application."""
import logging
import sys
import warnings
from PyQt5.QtCore import QLocale
from PyQt5.QtWidgets import QApplication
from crispy.config import Config
from crispy.gui.main import MainWindow
from crispy.loggers import setUpLoggers
logger = logging.getLogger("crispy.main")
warnings.filterwarnings("ignore", category=UserWarning)
def main():
setUpLoggers()
app = QApplication([])
# This must be done after the application is instantiated.
locale = QLocale(QLocale.C)
locale.setNumberOptions(QLocale.OmitGroupSeparator)
QLocale.setDefault(locale)
config = Config()
config.removeOldFiles()
settings = config.read()
# Set default values if the config file is empty or was not created.
if not settings.allKeys():
logger.debug("Loading default settings.")
config.loadDefaults()
logger.info("Starting the application.")
window = MainWindow()
window.show()
logger.info("Ready.")
sys.exit(app.exec_())
if __name__ == "__main__":
main()
<commit_msg>Set up loggers after the configuration file is loaded<commit_after>
|
# coding: utf-8
###################################################################
# Copyright (c) 2016-2020 European Synchrotron Radiation Facility #
# #
# Author: Marius Retegan #
# #
# This work is licensed under the terms of the MIT license. #
# For further information, see https://github.com/mretegan/crispy #
###################################################################
"""This module is the entry point to the application."""
import logging
import sys
import warnings
from PyQt5.QtCore import QLocale
from PyQt5.QtWidgets import QApplication
from crispy.config import Config
from crispy.gui.main import MainWindow
from crispy.loggers import setUpLoggers
logger = logging.getLogger("crispy.main")
warnings.filterwarnings("ignore", category=UserWarning)
def main():
app = QApplication([])
# This must be done after the application is instantiated.
locale = QLocale(QLocale.C)
locale.setNumberOptions(QLocale.OmitGroupSeparator)
QLocale.setDefault(locale)
config = Config()
config.removeOldFiles()
settings = config.read()
# Set default values if the config file is empty or was not created.
if not settings.allKeys():
logger.debug("Loading default settings.")
config.loadDefaults()
setUpLoggers()
logger.info("Starting the application.")
window = MainWindow()
window.show()
logger.info("Ready.")
sys.exit(app.exec_())
if __name__ == "__main__":
main()
|
# coding: utf-8
###################################################################
# Copyright (c) 2016-2020 European Synchrotron Radiation Facility #
# #
# Author: Marius Retegan #
# #
# This work is licensed under the terms of the MIT license. #
# For further information, see https://github.com/mretegan/crispy #
###################################################################
"""This module is the entry point to the application."""
import logging
import sys
import warnings
from PyQt5.QtCore import QLocale
from PyQt5.QtWidgets import QApplication
from crispy.config import Config
from crispy.gui.main import MainWindow
from crispy.loggers import setUpLoggers
logger = logging.getLogger("crispy.main")
warnings.filterwarnings("ignore", category=UserWarning)
def main():
setUpLoggers()
app = QApplication([])
# This must be done after the application is instantiated.
locale = QLocale(QLocale.C)
locale.setNumberOptions(QLocale.OmitGroupSeparator)
QLocale.setDefault(locale)
config = Config()
config.removeOldFiles()
settings = config.read()
# Set default values if the config file is empty or was not created.
if not settings.allKeys():
logger.debug("Loading default settings.")
config.loadDefaults()
logger.info("Starting the application.")
window = MainWindow()
window.show()
logger.info("Ready.")
sys.exit(app.exec_())
if __name__ == "__main__":
main()
Set up loggers after the configuration file is loaded# coding: utf-8
###################################################################
# Copyright (c) 2016-2020 European Synchrotron Radiation Facility #
# #
# Author: Marius Retegan #
# #
# This work is licensed under the terms of the MIT license. #
# For further information, see https://github.com/mretegan/crispy #
###################################################################
"""This module is the entry point to the application."""
import logging
import sys
import warnings
from PyQt5.QtCore import QLocale
from PyQt5.QtWidgets import QApplication
from crispy.config import Config
from crispy.gui.main import MainWindow
from crispy.loggers import setUpLoggers
logger = logging.getLogger("crispy.main")
warnings.filterwarnings("ignore", category=UserWarning)
def main():
app = QApplication([])
# This must be done after the application is instantiated.
locale = QLocale(QLocale.C)
locale.setNumberOptions(QLocale.OmitGroupSeparator)
QLocale.setDefault(locale)
config = Config()
config.removeOldFiles()
settings = config.read()
# Set default values if the config file is empty or was not created.
if not settings.allKeys():
logger.debug("Loading default settings.")
config.loadDefaults()
setUpLoggers()
logger.info("Starting the application.")
window = MainWindow()
window.show()
logger.info("Ready.")
sys.exit(app.exec_())
if __name__ == "__main__":
main()
|
<commit_before># coding: utf-8
###################################################################
# Copyright (c) 2016-2020 European Synchrotron Radiation Facility #
# #
# Author: Marius Retegan #
# #
# This work is licensed under the terms of the MIT license. #
# For further information, see https://github.com/mretegan/crispy #
###################################################################
"""This module is the entry point to the application."""
import logging
import sys
import warnings
from PyQt5.QtCore import QLocale
from PyQt5.QtWidgets import QApplication
from crispy.config import Config
from crispy.gui.main import MainWindow
from crispy.loggers import setUpLoggers
logger = logging.getLogger("crispy.main")
warnings.filterwarnings("ignore", category=UserWarning)
def main():
setUpLoggers()
app = QApplication([])
# This must be done after the application is instantiated.
locale = QLocale(QLocale.C)
locale.setNumberOptions(QLocale.OmitGroupSeparator)
QLocale.setDefault(locale)
config = Config()
config.removeOldFiles()
settings = config.read()
# Set default values if the config file is empty or was not created.
if not settings.allKeys():
logger.debug("Loading default settings.")
config.loadDefaults()
logger.info("Starting the application.")
window = MainWindow()
window.show()
logger.info("Ready.")
sys.exit(app.exec_())
if __name__ == "__main__":
main()
<commit_msg>Set up loggers after the configuration file is loaded<commit_after># coding: utf-8
###################################################################
# Copyright (c) 2016-2020 European Synchrotron Radiation Facility #
# #
# Author: Marius Retegan #
# #
# This work is licensed under the terms of the MIT license. #
# For further information, see https://github.com/mretegan/crispy #
###################################################################
"""This module is the entry point to the application."""
import logging
import sys
import warnings
from PyQt5.QtCore import QLocale
from PyQt5.QtWidgets import QApplication
from crispy.config import Config
from crispy.gui.main import MainWindow
from crispy.loggers import setUpLoggers
logger = logging.getLogger("crispy.main")
warnings.filterwarnings("ignore", category=UserWarning)
def main():
app = QApplication([])
# This must be done after the application is instantiated.
locale = QLocale(QLocale.C)
locale.setNumberOptions(QLocale.OmitGroupSeparator)
QLocale.setDefault(locale)
config = Config()
config.removeOldFiles()
settings = config.read()
# Set default values if the config file is empty or was not created.
if not settings.allKeys():
logger.debug("Loading default settings.")
config.loadDefaults()
setUpLoggers()
logger.info("Starting the application.")
window = MainWindow()
window.show()
logger.info("Ready.")
sys.exit(app.exec_())
if __name__ == "__main__":
main()
|
ef003a3ebf14545927d055a0deda7e1982e90e53
|
scripts/capnp_test_pycapnp.py
|
scripts/capnp_test_pycapnp.py
|
#!/usr/bin/env python
import capnp
import os
capnp.add_import_hook([os.getcwd(), "/usr/local/include/"]) # change this to be auto-detected?
import test_capnp
import sys
def decode(name):
print getattr(test_capnp, name)._short_str()
def encode(name):
val = getattr(test_capnp, name)
class_name = name[0].upper() + name[1:]
message = getattr(test_capnp, class_name).from_dict(val.to_dict())
print message.to_bytes()
if sys.argv[1] == 'decode':
decode(sys.argv[2])
else:
encode(sys.argv[2])
|
#!/usr/bin/env python
from __future__ import print_function
import capnp
import os
capnp.add_import_hook([os.getcwd(), "/usr/local/include/"]) # change this to be auto-detected?
import test_capnp
import sys
def decode(name):
class_name = name[0].upper() + name[1:]
print(getattr(test_capnp, class_name).from_bytes(sys.stdin.read())._short_str())
def encode(name):
val = getattr(test_capnp, name)
class_name = name[0].upper() + name[1:]
message = getattr(test_capnp, class_name).from_dict(val.to_dict())
print(message.to_bytes())
if sys.argv[1] == 'decode':
decode(sys.argv[2])
else:
encode(sys.argv[2])
|
Fix decode test to actually decode message from stdin
|
Fix decode test to actually decode message from stdin
|
Python
|
bsd-2-clause
|
tempbottle/pycapnp,tempbottle/pycapnp,SymbiFlow/pycapnp,jparyani/pycapnp,SymbiFlow/pycapnp,SymbiFlow/pycapnp,rcrowder/pycapnp,jparyani/pycapnp,jparyani/pycapnp,rcrowder/pycapnp,SymbiFlow/pycapnp,jparyani/pycapnp,tempbottle/pycapnp,rcrowder/pycapnp,rcrowder/pycapnp,tempbottle/pycapnp
|
#!/usr/bin/env python
import capnp
import os
capnp.add_import_hook([os.getcwd(), "/usr/local/include/"]) # change this to be auto-detected?
import test_capnp
import sys
def decode(name):
print getattr(test_capnp, name)._short_str()
def encode(name):
val = getattr(test_capnp, name)
class_name = name[0].upper() + name[1:]
message = getattr(test_capnp, class_name).from_dict(val.to_dict())
print message.to_bytes()
if sys.argv[1] == 'decode':
decode(sys.argv[2])
else:
encode(sys.argv[2])
Fix decode test to actually decode message from stdin
|
#!/usr/bin/env python
from __future__ import print_function
import capnp
import os
capnp.add_import_hook([os.getcwd(), "/usr/local/include/"]) # change this to be auto-detected?
import test_capnp
import sys
def decode(name):
class_name = name[0].upper() + name[1:]
print(getattr(test_capnp, class_name).from_bytes(sys.stdin.read())._short_str())
def encode(name):
val = getattr(test_capnp, name)
class_name = name[0].upper() + name[1:]
message = getattr(test_capnp, class_name).from_dict(val.to_dict())
print(message.to_bytes())
if sys.argv[1] == 'decode':
decode(sys.argv[2])
else:
encode(sys.argv[2])
|
<commit_before>#!/usr/bin/env python
import capnp
import os
capnp.add_import_hook([os.getcwd(), "/usr/local/include/"]) # change this to be auto-detected?
import test_capnp
import sys
def decode(name):
print getattr(test_capnp, name)._short_str()
def encode(name):
val = getattr(test_capnp, name)
class_name = name[0].upper() + name[1:]
message = getattr(test_capnp, class_name).from_dict(val.to_dict())
print message.to_bytes()
if sys.argv[1] == 'decode':
decode(sys.argv[2])
else:
encode(sys.argv[2])
<commit_msg>Fix decode test to actually decode message from stdin<commit_after>
|
#!/usr/bin/env python
from __future__ import print_function
import capnp
import os
capnp.add_import_hook([os.getcwd(), "/usr/local/include/"]) # change this to be auto-detected?
import test_capnp
import sys
def decode(name):
class_name = name[0].upper() + name[1:]
print(getattr(test_capnp, class_name).from_bytes(sys.stdin.read())._short_str())
def encode(name):
val = getattr(test_capnp, name)
class_name = name[0].upper() + name[1:]
message = getattr(test_capnp, class_name).from_dict(val.to_dict())
print(message.to_bytes())
if sys.argv[1] == 'decode':
decode(sys.argv[2])
else:
encode(sys.argv[2])
|
#!/usr/bin/env python
import capnp
import os
capnp.add_import_hook([os.getcwd(), "/usr/local/include/"]) # change this to be auto-detected?
import test_capnp
import sys
def decode(name):
print getattr(test_capnp, name)._short_str()
def encode(name):
val = getattr(test_capnp, name)
class_name = name[0].upper() + name[1:]
message = getattr(test_capnp, class_name).from_dict(val.to_dict())
print message.to_bytes()
if sys.argv[1] == 'decode':
decode(sys.argv[2])
else:
encode(sys.argv[2])
Fix decode test to actually decode message from stdin#!/usr/bin/env python
from __future__ import print_function
import capnp
import os
capnp.add_import_hook([os.getcwd(), "/usr/local/include/"]) # change this to be auto-detected?
import test_capnp
import sys
def decode(name):
class_name = name[0].upper() + name[1:]
print(getattr(test_capnp, class_name).from_bytes(sys.stdin.read())._short_str())
def encode(name):
val = getattr(test_capnp, name)
class_name = name[0].upper() + name[1:]
message = getattr(test_capnp, class_name).from_dict(val.to_dict())
print(message.to_bytes())
if sys.argv[1] == 'decode':
decode(sys.argv[2])
else:
encode(sys.argv[2])
|
<commit_before>#!/usr/bin/env python
import capnp
import os
capnp.add_import_hook([os.getcwd(), "/usr/local/include/"]) # change this to be auto-detected?
import test_capnp
import sys
def decode(name):
print getattr(test_capnp, name)._short_str()
def encode(name):
val = getattr(test_capnp, name)
class_name = name[0].upper() + name[1:]
message = getattr(test_capnp, class_name).from_dict(val.to_dict())
print message.to_bytes()
if sys.argv[1] == 'decode':
decode(sys.argv[2])
else:
encode(sys.argv[2])
<commit_msg>Fix decode test to actually decode message from stdin<commit_after>#!/usr/bin/env python
from __future__ import print_function
import capnp
import os
capnp.add_import_hook([os.getcwd(), "/usr/local/include/"]) # change this to be auto-detected?
import test_capnp
import sys
def decode(name):
class_name = name[0].upper() + name[1:]
print(getattr(test_capnp, class_name).from_bytes(sys.stdin.read())._short_str())
def encode(name):
val = getattr(test_capnp, name)
class_name = name[0].upper() + name[1:]
message = getattr(test_capnp, class_name).from_dict(val.to_dict())
print(message.to_bytes())
if sys.argv[1] == 'decode':
decode(sys.argv[2])
else:
encode(sys.argv[2])
|
52e9390d88062e9442b18a7793e6696a36f5b9c3
|
testinfra/functional/test_tor_interfaces.py
|
testinfra/functional/test_tor_interfaces.py
|
import os
import re
import pytest
sdvars = pytest.securedrop_test_vars
@pytest.mark.xfail
@pytest.mark.parametrize('site', sdvars.tor_url_files)
@pytest.mark.skipif(os.environ.get('FPF_CI', 'false') == "false",
reason="Can only assure Tor is configured in CI atm")
def test_www(Command, site):
"""
Ensure tor interface is reachable and returns expected content.
"""
# Extract Onion URL from saved onion file, fetched back from app-staging.
onion_url_filepath = os.path.join(
os.path.dirname(__file__),
"../../install_files/ansible-base/{}".format(site['file'])
)
onion_url_raw = open(onion_url_filepath, 'ro').read()
onion_url = re.search("\w+\.onion", onion_url_raw).group()
# Fetch Onion URL via curl to confirm interface is rendered correctly.
curl_tor = 'curl -s --socks5-hostname "${{TOR_PROXY}}":9050 {}'.format(
onion_url)
curl_tor_status = '{} -o /dev/null -w "%{{http_code}}"'.format(curl_tor)
site_scrape = Command.check_output(curl_tor)
assert Command.check_output(curl_tor_status) == "200"
assert site['check_string'] in site_scrape
assert site['error_string'] not in site_scrape
|
import os
import re
import pytest
sdvars = pytest.securedrop_test_vars
@pytest.mark.parametrize('site', sdvars.tor_url_files)
@pytest.mark.skipif(os.environ.get('FPF_CI', 'false') == "false",
reason="Can only assure Tor is configured in CI atm")
def test_www(Command, site):
"""
Ensure tor interface is reachable and returns expected content.
"""
# Extract Onion URL from saved onion file, fetched back from app-staging.
onion_url_filepath = os.path.join(
os.path.dirname(__file__),
"../../install_files/ansible-base/{}".format(site['file'])
)
onion_url_raw = open(onion_url_filepath, 'ro').read()
onion_url = re.search("\w+\.onion", onion_url_raw).group()
# Fetch Onion URL via curl to confirm interface is rendered correctly.
curl_tor = 'curl -s --socks5-hostname "${{TOR_PROXY}}":9050 {}'.format(
onion_url)
curl_tor_status = '{} -o /dev/null -w "%{{http_code}}"'.format(curl_tor)
site_scrape = Command.check_output(curl_tor)
assert Command.check_output(curl_tor_status) == "200"
assert site['check_string'] in site_scrape
assert site['error_string'] not in site_scrape
|
Remove XFAIL on functional tor test
|
Remove XFAIL on functional tor test
|
Python
|
agpl-3.0
|
conorsch/securedrop,ehartsuyker/securedrop,garrettr/securedrop,ehartsuyker/securedrop,conorsch/securedrop,heartsucker/securedrop,garrettr/securedrop,ehartsuyker/securedrop,ehartsuyker/securedrop,conorsch/securedrop,ehartsuyker/securedrop,heartsucker/securedrop,conorsch/securedrop,heartsucker/securedrop,ehartsuyker/securedrop,heartsucker/securedrop,garrettr/securedrop,conorsch/securedrop,garrettr/securedrop,heartsucker/securedrop
|
import os
import re
import pytest
sdvars = pytest.securedrop_test_vars
@pytest.mark.xfail
@pytest.mark.parametrize('site', sdvars.tor_url_files)
@pytest.mark.skipif(os.environ.get('FPF_CI', 'false') == "false",
reason="Can only assure Tor is configured in CI atm")
def test_www(Command, site):
"""
Ensure tor interface is reachable and returns expected content.
"""
# Extract Onion URL from saved onion file, fetched back from app-staging.
onion_url_filepath = os.path.join(
os.path.dirname(__file__),
"../../install_files/ansible-base/{}".format(site['file'])
)
onion_url_raw = open(onion_url_filepath, 'ro').read()
onion_url = re.search("\w+\.onion", onion_url_raw).group()
# Fetch Onion URL via curl to confirm interface is rendered correctly.
curl_tor = 'curl -s --socks5-hostname "${{TOR_PROXY}}":9050 {}'.format(
onion_url)
curl_tor_status = '{} -o /dev/null -w "%{{http_code}}"'.format(curl_tor)
site_scrape = Command.check_output(curl_tor)
assert Command.check_output(curl_tor_status) == "200"
assert site['check_string'] in site_scrape
assert site['error_string'] not in site_scrape
Remove XFAIL on functional tor test
|
import os
import re
import pytest
sdvars = pytest.securedrop_test_vars
@pytest.mark.parametrize('site', sdvars.tor_url_files)
@pytest.mark.skipif(os.environ.get('FPF_CI', 'false') == "false",
reason="Can only assure Tor is configured in CI atm")
def test_www(Command, site):
"""
Ensure tor interface is reachable and returns expected content.
"""
# Extract Onion URL from saved onion file, fetched back from app-staging.
onion_url_filepath = os.path.join(
os.path.dirname(__file__),
"../../install_files/ansible-base/{}".format(site['file'])
)
onion_url_raw = open(onion_url_filepath, 'ro').read()
onion_url = re.search("\w+\.onion", onion_url_raw).group()
# Fetch Onion URL via curl to confirm interface is rendered correctly.
curl_tor = 'curl -s --socks5-hostname "${{TOR_PROXY}}":9050 {}'.format(
onion_url)
curl_tor_status = '{} -o /dev/null -w "%{{http_code}}"'.format(curl_tor)
site_scrape = Command.check_output(curl_tor)
assert Command.check_output(curl_tor_status) == "200"
assert site['check_string'] in site_scrape
assert site['error_string'] not in site_scrape
|
<commit_before>import os
import re
import pytest
sdvars = pytest.securedrop_test_vars
@pytest.mark.xfail
@pytest.mark.parametrize('site', sdvars.tor_url_files)
@pytest.mark.skipif(os.environ.get('FPF_CI', 'false') == "false",
reason="Can only assure Tor is configured in CI atm")
def test_www(Command, site):
"""
Ensure tor interface is reachable and returns expected content.
"""
# Extract Onion URL from saved onion file, fetched back from app-staging.
onion_url_filepath = os.path.join(
os.path.dirname(__file__),
"../../install_files/ansible-base/{}".format(site['file'])
)
onion_url_raw = open(onion_url_filepath, 'ro').read()
onion_url = re.search("\w+\.onion", onion_url_raw).group()
# Fetch Onion URL via curl to confirm interface is rendered correctly.
curl_tor = 'curl -s --socks5-hostname "${{TOR_PROXY}}":9050 {}'.format(
onion_url)
curl_tor_status = '{} -o /dev/null -w "%{{http_code}}"'.format(curl_tor)
site_scrape = Command.check_output(curl_tor)
assert Command.check_output(curl_tor_status) == "200"
assert site['check_string'] in site_scrape
assert site['error_string'] not in site_scrape
<commit_msg>Remove XFAIL on functional tor test<commit_after>
|
import os
import re
import pytest
sdvars = pytest.securedrop_test_vars
@pytest.mark.parametrize('site', sdvars.tor_url_files)
@pytest.mark.skipif(os.environ.get('FPF_CI', 'false') == "false",
reason="Can only assure Tor is configured in CI atm")
def test_www(Command, site):
"""
Ensure tor interface is reachable and returns expected content.
"""
# Extract Onion URL from saved onion file, fetched back from app-staging.
onion_url_filepath = os.path.join(
os.path.dirname(__file__),
"../../install_files/ansible-base/{}".format(site['file'])
)
onion_url_raw = open(onion_url_filepath, 'ro').read()
onion_url = re.search("\w+\.onion", onion_url_raw).group()
# Fetch Onion URL via curl to confirm interface is rendered correctly.
curl_tor = 'curl -s --socks5-hostname "${{TOR_PROXY}}":9050 {}'.format(
onion_url)
curl_tor_status = '{} -o /dev/null -w "%{{http_code}}"'.format(curl_tor)
site_scrape = Command.check_output(curl_tor)
assert Command.check_output(curl_tor_status) == "200"
assert site['check_string'] in site_scrape
assert site['error_string'] not in site_scrape
|
import os
import re
import pytest
sdvars = pytest.securedrop_test_vars
@pytest.mark.xfail
@pytest.mark.parametrize('site', sdvars.tor_url_files)
@pytest.mark.skipif(os.environ.get('FPF_CI', 'false') == "false",
reason="Can only assure Tor is configured in CI atm")
def test_www(Command, site):
"""
Ensure tor interface is reachable and returns expected content.
"""
# Extract Onion URL from saved onion file, fetched back from app-staging.
onion_url_filepath = os.path.join(
os.path.dirname(__file__),
"../../install_files/ansible-base/{}".format(site['file'])
)
onion_url_raw = open(onion_url_filepath, 'ro').read()
onion_url = re.search("\w+\.onion", onion_url_raw).group()
# Fetch Onion URL via curl to confirm interface is rendered correctly.
curl_tor = 'curl -s --socks5-hostname "${{TOR_PROXY}}":9050 {}'.format(
onion_url)
curl_tor_status = '{} -o /dev/null -w "%{{http_code}}"'.format(curl_tor)
site_scrape = Command.check_output(curl_tor)
assert Command.check_output(curl_tor_status) == "200"
assert site['check_string'] in site_scrape
assert site['error_string'] not in site_scrape
Remove XFAIL on functional tor testimport os
import re
import pytest
sdvars = pytest.securedrop_test_vars
@pytest.mark.parametrize('site', sdvars.tor_url_files)
@pytest.mark.skipif(os.environ.get('FPF_CI', 'false') == "false",
reason="Can only assure Tor is configured in CI atm")
def test_www(Command, site):
"""
Ensure tor interface is reachable and returns expected content.
"""
# Extract Onion URL from saved onion file, fetched back from app-staging.
onion_url_filepath = os.path.join(
os.path.dirname(__file__),
"../../install_files/ansible-base/{}".format(site['file'])
)
onion_url_raw = open(onion_url_filepath, 'ro').read()
onion_url = re.search("\w+\.onion", onion_url_raw).group()
# Fetch Onion URL via curl to confirm interface is rendered correctly.
curl_tor = 'curl -s --socks5-hostname "${{TOR_PROXY}}":9050 {}'.format(
onion_url)
curl_tor_status = '{} -o /dev/null -w "%{{http_code}}"'.format(curl_tor)
site_scrape = Command.check_output(curl_tor)
assert Command.check_output(curl_tor_status) == "200"
assert site['check_string'] in site_scrape
assert site['error_string'] not in site_scrape
|
<commit_before>import os
import re
import pytest
sdvars = pytest.securedrop_test_vars
@pytest.mark.xfail
@pytest.mark.parametrize('site', sdvars.tor_url_files)
@pytest.mark.skipif(os.environ.get('FPF_CI', 'false') == "false",
reason="Can only assure Tor is configured in CI atm")
def test_www(Command, site):
"""
Ensure tor interface is reachable and returns expected content.
"""
# Extract Onion URL from saved onion file, fetched back from app-staging.
onion_url_filepath = os.path.join(
os.path.dirname(__file__),
"../../install_files/ansible-base/{}".format(site['file'])
)
onion_url_raw = open(onion_url_filepath, 'ro').read()
onion_url = re.search("\w+\.onion", onion_url_raw).group()
# Fetch Onion URL via curl to confirm interface is rendered correctly.
curl_tor = 'curl -s --socks5-hostname "${{TOR_PROXY}}":9050 {}'.format(
onion_url)
curl_tor_status = '{} -o /dev/null -w "%{{http_code}}"'.format(curl_tor)
site_scrape = Command.check_output(curl_tor)
assert Command.check_output(curl_tor_status) == "200"
assert site['check_string'] in site_scrape
assert site['error_string'] not in site_scrape
<commit_msg>Remove XFAIL on functional tor test<commit_after>import os
import re
import pytest
sdvars = pytest.securedrop_test_vars
@pytest.mark.parametrize('site', sdvars.tor_url_files)
@pytest.mark.skipif(os.environ.get('FPF_CI', 'false') == "false",
reason="Can only assure Tor is configured in CI atm")
def test_www(Command, site):
"""
Ensure tor interface is reachable and returns expected content.
"""
# Extract Onion URL from saved onion file, fetched back from app-staging.
onion_url_filepath = os.path.join(
os.path.dirname(__file__),
"../../install_files/ansible-base/{}".format(site['file'])
)
onion_url_raw = open(onion_url_filepath, 'ro').read()
onion_url = re.search("\w+\.onion", onion_url_raw).group()
# Fetch Onion URL via curl to confirm interface is rendered correctly.
curl_tor = 'curl -s --socks5-hostname "${{TOR_PROXY}}":9050 {}'.format(
onion_url)
curl_tor_status = '{} -o /dev/null -w "%{{http_code}}"'.format(curl_tor)
site_scrape = Command.check_output(curl_tor)
assert Command.check_output(curl_tor_status) == "200"
assert site['check_string'] in site_scrape
assert site['error_string'] not in site_scrape
|
aee49d59b76400389ffa768950b479094059e385
|
linguist/tests/translations.py
|
linguist/tests/translations.py
|
# -*- coding: utf-8 -*_
from django.db import models
from ..base import ModelTranslationBase
from ..mixins import ModelMixin, ManagerMixin
class FooManager(ManagerMixin, models.Manager):
pass
class BarManager(ManagerMixin, models.Manager):
pass
class FooModel(ModelMixin, models.Model):
title = models.CharField(max_length=255, null=True, blank=True)
excerpt = models.TextField(null=True, blank=True)
body = models.TextField(null=True, blank=True)
created_at = models.DateTimeField(auto_now_add=True)
objects = FooManager()
class FooTranslation(ModelTranslationBase):
model = FooModel
identifier = 'foo'
fields = ('title', 'excerpt', 'body')
class BarModel(ModelMixin, models.Model):
title = models.CharField(max_length=255, null=True, blank=True)
objects = BarManager()
class BarTranslation(ModelTranslationBase):
model = BarModel
identifier = 'bar'
fields = ('title', )
class BadTranslation(object):
pass
class BadModel(object):
pass
|
# -*- coding: utf-8 -*_
from django.db import models
from ..base import ModelTranslationBase
from ..mixins import ModelMixin, ManagerMixin
class FooManager(ManagerMixin, models.Manager):
pass
class BarManager(ManagerMixin, models.Manager):
pass
class FooModel(ModelMixin, models.Model):
title = models.CharField(max_length=255)
excerpt = models.TextField(null=True, blank=True)
body = models.TextField(null=True, blank=True)
created_at = models.DateTimeField(auto_now_add=True)
objects = FooManager()
class Meta:
linguist = {
'identifier': 'foo',
'fields': ('title', 'excerpt', 'body'),
}
class BarModel(ModelMixin, models.Model):
title = models.CharField(max_length=255, null=True, blank=True)
objects = BarManager()
class Meta:
linguist = {
'identifier': 'bar',
'fields': ('title', ),
}
class BadTranslation(object):
pass
class BadModel(object):
pass
|
Update test models for new metaclass support.
|
Update test models for new metaclass support.
|
Python
|
mit
|
ulule/django-linguist
|
# -*- coding: utf-8 -*_
from django.db import models
from ..base import ModelTranslationBase
from ..mixins import ModelMixin, ManagerMixin
class FooManager(ManagerMixin, models.Manager):
pass
class BarManager(ManagerMixin, models.Manager):
pass
class FooModel(ModelMixin, models.Model):
title = models.CharField(max_length=255, null=True, blank=True)
excerpt = models.TextField(null=True, blank=True)
body = models.TextField(null=True, blank=True)
created_at = models.DateTimeField(auto_now_add=True)
objects = FooManager()
class FooTranslation(ModelTranslationBase):
model = FooModel
identifier = 'foo'
fields = ('title', 'excerpt', 'body')
class BarModel(ModelMixin, models.Model):
title = models.CharField(max_length=255, null=True, blank=True)
objects = BarManager()
class BarTranslation(ModelTranslationBase):
model = BarModel
identifier = 'bar'
fields = ('title', )
class BadTranslation(object):
pass
class BadModel(object):
pass
Update test models for new metaclass support.
|
# -*- coding: utf-8 -*_
from django.db import models
from ..base import ModelTranslationBase
from ..mixins import ModelMixin, ManagerMixin
class FooManager(ManagerMixin, models.Manager):
pass
class BarManager(ManagerMixin, models.Manager):
pass
class FooModel(ModelMixin, models.Model):
title = models.CharField(max_length=255)
excerpt = models.TextField(null=True, blank=True)
body = models.TextField(null=True, blank=True)
created_at = models.DateTimeField(auto_now_add=True)
objects = FooManager()
class Meta:
linguist = {
'identifier': 'foo',
'fields': ('title', 'excerpt', 'body'),
}
class BarModel(ModelMixin, models.Model):
title = models.CharField(max_length=255, null=True, blank=True)
objects = BarManager()
class Meta:
linguist = {
'identifier': 'bar',
'fields': ('title', ),
}
class BadTranslation(object):
pass
class BadModel(object):
pass
|
<commit_before># -*- coding: utf-8 -*_
from django.db import models
from ..base import ModelTranslationBase
from ..mixins import ModelMixin, ManagerMixin
class FooManager(ManagerMixin, models.Manager):
pass
class BarManager(ManagerMixin, models.Manager):
pass
class FooModel(ModelMixin, models.Model):
title = models.CharField(max_length=255, null=True, blank=True)
excerpt = models.TextField(null=True, blank=True)
body = models.TextField(null=True, blank=True)
created_at = models.DateTimeField(auto_now_add=True)
objects = FooManager()
class FooTranslation(ModelTranslationBase):
model = FooModel
identifier = 'foo'
fields = ('title', 'excerpt', 'body')
class BarModel(ModelMixin, models.Model):
title = models.CharField(max_length=255, null=True, blank=True)
objects = BarManager()
class BarTranslation(ModelTranslationBase):
model = BarModel
identifier = 'bar'
fields = ('title', )
class BadTranslation(object):
pass
class BadModel(object):
pass
<commit_msg>Update test models for new metaclass support.<commit_after>
|
# -*- coding: utf-8 -*_
from django.db import models
from ..base import ModelTranslationBase
from ..mixins import ModelMixin, ManagerMixin
class FooManager(ManagerMixin, models.Manager):
pass
class BarManager(ManagerMixin, models.Manager):
pass
class FooModel(ModelMixin, models.Model):
title = models.CharField(max_length=255)
excerpt = models.TextField(null=True, blank=True)
body = models.TextField(null=True, blank=True)
created_at = models.DateTimeField(auto_now_add=True)
objects = FooManager()
class Meta:
linguist = {
'identifier': 'foo',
'fields': ('title', 'excerpt', 'body'),
}
class BarModel(ModelMixin, models.Model):
title = models.CharField(max_length=255, null=True, blank=True)
objects = BarManager()
class Meta:
linguist = {
'identifier': 'bar',
'fields': ('title', ),
}
class BadTranslation(object):
pass
class BadModel(object):
pass
|
# -*- coding: utf-8 -*_
from django.db import models
from ..base import ModelTranslationBase
from ..mixins import ModelMixin, ManagerMixin
class FooManager(ManagerMixin, models.Manager):
pass
class BarManager(ManagerMixin, models.Manager):
pass
class FooModel(ModelMixin, models.Model):
title = models.CharField(max_length=255, null=True, blank=True)
excerpt = models.TextField(null=True, blank=True)
body = models.TextField(null=True, blank=True)
created_at = models.DateTimeField(auto_now_add=True)
objects = FooManager()
class FooTranslation(ModelTranslationBase):
model = FooModel
identifier = 'foo'
fields = ('title', 'excerpt', 'body')
class BarModel(ModelMixin, models.Model):
title = models.CharField(max_length=255, null=True, blank=True)
objects = BarManager()
class BarTranslation(ModelTranslationBase):
model = BarModel
identifier = 'bar'
fields = ('title', )
class BadTranslation(object):
pass
class BadModel(object):
pass
Update test models for new metaclass support.# -*- coding: utf-8 -*_
from django.db import models
from ..base import ModelTranslationBase
from ..mixins import ModelMixin, ManagerMixin
class FooManager(ManagerMixin, models.Manager):
pass
class BarManager(ManagerMixin, models.Manager):
pass
class FooModel(ModelMixin, models.Model):
title = models.CharField(max_length=255)
excerpt = models.TextField(null=True, blank=True)
body = models.TextField(null=True, blank=True)
created_at = models.DateTimeField(auto_now_add=True)
objects = FooManager()
class Meta:
linguist = {
'identifier': 'foo',
'fields': ('title', 'excerpt', 'body'),
}
class BarModel(ModelMixin, models.Model):
title = models.CharField(max_length=255, null=True, blank=True)
objects = BarManager()
class Meta:
linguist = {
'identifier': 'bar',
'fields': ('title', ),
}
class BadTranslation(object):
pass
class BadModel(object):
pass
|
<commit_before># -*- coding: utf-8 -*_
from django.db import models
from ..base import ModelTranslationBase
from ..mixins import ModelMixin, ManagerMixin
class FooManager(ManagerMixin, models.Manager):
pass
class BarManager(ManagerMixin, models.Manager):
pass
class FooModel(ModelMixin, models.Model):
title = models.CharField(max_length=255, null=True, blank=True)
excerpt = models.TextField(null=True, blank=True)
body = models.TextField(null=True, blank=True)
created_at = models.DateTimeField(auto_now_add=True)
objects = FooManager()
class FooTranslation(ModelTranslationBase):
model = FooModel
identifier = 'foo'
fields = ('title', 'excerpt', 'body')
class BarModel(ModelMixin, models.Model):
title = models.CharField(max_length=255, null=True, blank=True)
objects = BarManager()
class BarTranslation(ModelTranslationBase):
model = BarModel
identifier = 'bar'
fields = ('title', )
class BadTranslation(object):
pass
class BadModel(object):
pass
<commit_msg>Update test models for new metaclass support.<commit_after># -*- coding: utf-8 -*_
from django.db import models
from ..base import ModelTranslationBase
from ..mixins import ModelMixin, ManagerMixin
class FooManager(ManagerMixin, models.Manager):
pass
class BarManager(ManagerMixin, models.Manager):
pass
class FooModel(ModelMixin, models.Model):
title = models.CharField(max_length=255)
excerpt = models.TextField(null=True, blank=True)
body = models.TextField(null=True, blank=True)
created_at = models.DateTimeField(auto_now_add=True)
objects = FooManager()
class Meta:
linguist = {
'identifier': 'foo',
'fields': ('title', 'excerpt', 'body'),
}
class BarModel(ModelMixin, models.Model):
title = models.CharField(max_length=255, null=True, blank=True)
objects = BarManager()
class Meta:
linguist = {
'identifier': 'bar',
'fields': ('title', ),
}
class BadTranslation(object):
pass
class BadModel(object):
pass
|
786bc416ca00c7021f5881e459d2634e8fcd8458
|
src/vdb/src/_vdb/common.py
|
src/vdb/src/_vdb/common.py
|
# Copyright (c) 2005-2016 Stefanos Harhalakis <v13@v13.gr>
# Copyright (c) 2016-2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Collection, Mapping, Union
import ipaddress
SupportedTypes = Union[str, int, float, bool, ipaddress.IPv4Interface, ipaddress.IPv6Interface, dict, list, None]
# Convenience types
ResultDict = dict[str, SupportedTypes] # A result in dict form
ResultsDict = list[ResultDict] # A list of results
ValueParam = Mapping[str, SupportedTypes] # A parameter suitable for passing db values
WhereParam = Mapping[str, SupportedTypes] # A parameter suitable for WHERE
ParamDict = dict[str, SupportedTypes] # A concrete dict for values
OrderParam = Collection[str] # A parameter suitable for ORDER BY
class VDBError(Exception):
pass
|
# Copyright (c) 2005-2016 Stefanos Harhalakis <v13@v13.gr>
# Copyright (c) 2016-2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Collection, Mapping, Union
import ipaddress
SupportedTypes = Union[str, int, float, bool, ipaddress.IPv4Interface, ipaddress.IPv6Interface,
ipaddress.IPv4Network, ipaddress.IPv6Network, dict, list, None]
# Convenience types
ResultDict = dict[str, SupportedTypes] # A result in dict form
ResultsDict = list[ResultDict] # A list of results
ValueParam = Mapping[str, SupportedTypes] # A parameter suitable for passing db values
WhereParam = Mapping[str, SupportedTypes] # A parameter suitable for WHERE
ParamDict = dict[str, SupportedTypes] # A concrete dict for values
OrderParam = Collection[str] # A parameter suitable for ORDER BY
class VDBError(Exception):
pass
|
Add ipaddress.IPv[46]Network to the supported types
|
Add ipaddress.IPv[46]Network to the supported types
|
Python
|
apache-2.0
|
sharhalakis/vdns
|
# Copyright (c) 2005-2016 Stefanos Harhalakis <v13@v13.gr>
# Copyright (c) 2016-2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Collection, Mapping, Union
import ipaddress
SupportedTypes = Union[str, int, float, bool, ipaddress.IPv4Interface, ipaddress.IPv6Interface, dict, list, None]
# Convenience types
ResultDict = dict[str, SupportedTypes] # A result in dict form
ResultsDict = list[ResultDict] # A list of results
ValueParam = Mapping[str, SupportedTypes] # A parameter suitable for passing db values
WhereParam = Mapping[str, SupportedTypes] # A parameter suitable for WHERE
ParamDict = dict[str, SupportedTypes] # A concrete dict for values
OrderParam = Collection[str] # A parameter suitable for ORDER BY
class VDBError(Exception):
pass
Add ipaddress.IPv[46]Network to the supported types
|
# Copyright (c) 2005-2016 Stefanos Harhalakis <v13@v13.gr>
# Copyright (c) 2016-2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Collection, Mapping, Union
import ipaddress
SupportedTypes = Union[str, int, float, bool, ipaddress.IPv4Interface, ipaddress.IPv6Interface,
ipaddress.IPv4Network, ipaddress.IPv6Network, dict, list, None]
# Convenience types
ResultDict = dict[str, SupportedTypes] # A result in dict form
ResultsDict = list[ResultDict] # A list of results
ValueParam = Mapping[str, SupportedTypes] # A parameter suitable for passing db values
WhereParam = Mapping[str, SupportedTypes] # A parameter suitable for WHERE
ParamDict = dict[str, SupportedTypes] # A concrete dict for values
OrderParam = Collection[str] # A parameter suitable for ORDER BY
class VDBError(Exception):
pass
|
<commit_before># Copyright (c) 2005-2016 Stefanos Harhalakis <v13@v13.gr>
# Copyright (c) 2016-2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Collection, Mapping, Union
import ipaddress
SupportedTypes = Union[str, int, float, bool, ipaddress.IPv4Interface, ipaddress.IPv6Interface, dict, list, None]
# Convenience types
ResultDict = dict[str, SupportedTypes] # A result in dict form
ResultsDict = list[ResultDict] # A list of results
ValueParam = Mapping[str, SupportedTypes] # A parameter suitable for passing db values
WhereParam = Mapping[str, SupportedTypes] # A parameter suitable for WHERE
ParamDict = dict[str, SupportedTypes] # A concrete dict for values
OrderParam = Collection[str] # A parameter suitable for ORDER BY
class VDBError(Exception):
pass
<commit_msg>Add ipaddress.IPv[46]Network to the supported types<commit_after>
|
# Copyright (c) 2005-2016 Stefanos Harhalakis <v13@v13.gr>
# Copyright (c) 2016-2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Collection, Mapping, Union
import ipaddress
SupportedTypes = Union[str, int, float, bool, ipaddress.IPv4Interface, ipaddress.IPv6Interface,
ipaddress.IPv4Network, ipaddress.IPv6Network, dict, list, None]
# Convenience types
ResultDict = dict[str, SupportedTypes] # A result in dict form
ResultsDict = list[ResultDict] # A list of results
ValueParam = Mapping[str, SupportedTypes] # A parameter suitable for passing db values
WhereParam = Mapping[str, SupportedTypes] # A parameter suitable for WHERE
ParamDict = dict[str, SupportedTypes] # A concrete dict for values
OrderParam = Collection[str] # A parameter suitable for ORDER BY
class VDBError(Exception):
pass
|
# Copyright (c) 2005-2016 Stefanos Harhalakis <v13@v13.gr>
# Copyright (c) 2016-2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Collection, Mapping, Union
import ipaddress
SupportedTypes = Union[str, int, float, bool, ipaddress.IPv4Interface, ipaddress.IPv6Interface, dict, list, None]
# Convenience types
ResultDict = dict[str, SupportedTypes] # A result in dict form
ResultsDict = list[ResultDict] # A list of results
ValueParam = Mapping[str, SupportedTypes] # A parameter suitable for passing db values
WhereParam = Mapping[str, SupportedTypes] # A parameter suitable for WHERE
ParamDict = dict[str, SupportedTypes] # A concrete dict for values
OrderParam = Collection[str] # A parameter suitable for ORDER BY
class VDBError(Exception):
pass
Add ipaddress.IPv[46]Network to the supported types# Copyright (c) 2005-2016 Stefanos Harhalakis <v13@v13.gr>
# Copyright (c) 2016-2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Collection, Mapping, Union
import ipaddress
SupportedTypes = Union[str, int, float, bool, ipaddress.IPv4Interface, ipaddress.IPv6Interface,
ipaddress.IPv4Network, ipaddress.IPv6Network, dict, list, None]
# Convenience types
ResultDict = dict[str, SupportedTypes] # A result in dict form
ResultsDict = list[ResultDict] # A list of results
ValueParam = Mapping[str, SupportedTypes] # A parameter suitable for passing db values
WhereParam = Mapping[str, SupportedTypes] # A parameter suitable for WHERE
ParamDict = dict[str, SupportedTypes] # A concrete dict for values
OrderParam = Collection[str] # A parameter suitable for ORDER BY
class VDBError(Exception):
pass
|
<commit_before># Copyright (c) 2005-2016 Stefanos Harhalakis <v13@v13.gr>
# Copyright (c) 2016-2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Collection, Mapping, Union
import ipaddress
SupportedTypes = Union[str, int, float, bool, ipaddress.IPv4Interface, ipaddress.IPv6Interface, dict, list, None]
# Convenience types
ResultDict = dict[str, SupportedTypes] # A result in dict form
ResultsDict = list[ResultDict] # A list of results
ValueParam = Mapping[str, SupportedTypes] # A parameter suitable for passing db values
WhereParam = Mapping[str, SupportedTypes] # A parameter suitable for WHERE
ParamDict = dict[str, SupportedTypes] # A concrete dict for values
OrderParam = Collection[str] # A parameter suitable for ORDER BY
class VDBError(Exception):
pass
<commit_msg>Add ipaddress.IPv[46]Network to the supported types<commit_after># Copyright (c) 2005-2016 Stefanos Harhalakis <v13@v13.gr>
# Copyright (c) 2016-2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Collection, Mapping, Union
import ipaddress
SupportedTypes = Union[str, int, float, bool, ipaddress.IPv4Interface, ipaddress.IPv6Interface,
ipaddress.IPv4Network, ipaddress.IPv6Network, dict, list, None]
# Convenience types
ResultDict = dict[str, SupportedTypes] # A result in dict form
ResultsDict = list[ResultDict] # A list of results
ValueParam = Mapping[str, SupportedTypes] # A parameter suitable for passing db values
WhereParam = Mapping[str, SupportedTypes] # A parameter suitable for WHERE
ParamDict = dict[str, SupportedTypes] # A concrete dict for values
OrderParam = Collection[str] # A parameter suitable for ORDER BY
class VDBError(Exception):
pass
|
decc454dfb50258eaab4635379b1c18470246f62
|
indico/modules/events/views.py
|
indico/modules/events/views.py
|
# This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from MaKaC.webinterface.pages.admins import WPAdminsBase
from MaKaC.webinterface.pages.base import WPJinjaMixin
from MaKaC.webinterface.pages.conferences import WPConferenceDefaultDisplayBase
class WPReferenceTypes(WPJinjaMixin, WPAdminsBase):
template_prefix = 'events/'
class WPEventDisplay(WPJinjaMixin, WPConferenceDefaultDisplayBase):
template_prefix = 'events/'
def _getBody(self, params):
return WPJinjaMixin._getPageContent(self, params)
def getCSSFiles(self):
return WPConferenceDefaultDisplayBase.getCSSFiles(self) + self._asset_env['event_display_sass'].urls()
|
# This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from MaKaC.webinterface.pages.admins import WPAdminsBase
from MaKaC.webinterface.pages.base import WPJinjaMixin
from MaKaC.webinterface.pages.conferences import WPConferenceDefaultDisplayBase
class WPReferenceTypes(WPJinjaMixin, WPAdminsBase):
template_prefix = 'events/'
sidemenu_option = 'reference_types'
class WPEventDisplay(WPJinjaMixin, WPConferenceDefaultDisplayBase):
template_prefix = 'events/'
def _getBody(self, params):
return WPJinjaMixin._getPageContent(self, params)
def getCSSFiles(self):
return WPConferenceDefaultDisplayBase.getCSSFiles(self) + self._asset_env['event_display_sass'].urls()
|
Fix highlighting of "External ID Types" menu entry
|
Fix highlighting of "External ID Types" menu entry
|
Python
|
mit
|
ThiefMaster/indico,pferreir/indico,mic4ael/indico,ThiefMaster/indico,DirkHoffmann/indico,ThiefMaster/indico,OmeGak/indico,mic4ael/indico,indico/indico,DirkHoffmann/indico,mvidalgarcia/indico,mvidalgarcia/indico,pferreir/indico,OmeGak/indico,OmeGak/indico,mic4ael/indico,mic4ael/indico,indico/indico,DirkHoffmann/indico,indico/indico,ThiefMaster/indico,DirkHoffmann/indico,OmeGak/indico,mvidalgarcia/indico,indico/indico,pferreir/indico,pferreir/indico,mvidalgarcia/indico
|
# This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from MaKaC.webinterface.pages.admins import WPAdminsBase
from MaKaC.webinterface.pages.base import WPJinjaMixin
from MaKaC.webinterface.pages.conferences import WPConferenceDefaultDisplayBase
class WPReferenceTypes(WPJinjaMixin, WPAdminsBase):
template_prefix = 'events/'
class WPEventDisplay(WPJinjaMixin, WPConferenceDefaultDisplayBase):
template_prefix = 'events/'
def _getBody(self, params):
return WPJinjaMixin._getPageContent(self, params)
def getCSSFiles(self):
return WPConferenceDefaultDisplayBase.getCSSFiles(self) + self._asset_env['event_display_sass'].urls()
Fix highlighting of "External ID Types" menu entry
|
# This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from MaKaC.webinterface.pages.admins import WPAdminsBase
from MaKaC.webinterface.pages.base import WPJinjaMixin
from MaKaC.webinterface.pages.conferences import WPConferenceDefaultDisplayBase
class WPReferenceTypes(WPJinjaMixin, WPAdminsBase):
template_prefix = 'events/'
sidemenu_option = 'reference_types'
class WPEventDisplay(WPJinjaMixin, WPConferenceDefaultDisplayBase):
template_prefix = 'events/'
def _getBody(self, params):
return WPJinjaMixin._getPageContent(self, params)
def getCSSFiles(self):
return WPConferenceDefaultDisplayBase.getCSSFiles(self) + self._asset_env['event_display_sass'].urls()
|
<commit_before># This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from MaKaC.webinterface.pages.admins import WPAdminsBase
from MaKaC.webinterface.pages.base import WPJinjaMixin
from MaKaC.webinterface.pages.conferences import WPConferenceDefaultDisplayBase
class WPReferenceTypes(WPJinjaMixin, WPAdminsBase):
template_prefix = 'events/'
class WPEventDisplay(WPJinjaMixin, WPConferenceDefaultDisplayBase):
template_prefix = 'events/'
def _getBody(self, params):
return WPJinjaMixin._getPageContent(self, params)
def getCSSFiles(self):
return WPConferenceDefaultDisplayBase.getCSSFiles(self) + self._asset_env['event_display_sass'].urls()
<commit_msg>Fix highlighting of "External ID Types" menu entry<commit_after>
|
# This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from MaKaC.webinterface.pages.admins import WPAdminsBase
from MaKaC.webinterface.pages.base import WPJinjaMixin
from MaKaC.webinterface.pages.conferences import WPConferenceDefaultDisplayBase
class WPReferenceTypes(WPJinjaMixin, WPAdminsBase):
template_prefix = 'events/'
sidemenu_option = 'reference_types'
class WPEventDisplay(WPJinjaMixin, WPConferenceDefaultDisplayBase):
template_prefix = 'events/'
def _getBody(self, params):
return WPJinjaMixin._getPageContent(self, params)
def getCSSFiles(self):
return WPConferenceDefaultDisplayBase.getCSSFiles(self) + self._asset_env['event_display_sass'].urls()
|
# This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from MaKaC.webinterface.pages.admins import WPAdminsBase
from MaKaC.webinterface.pages.base import WPJinjaMixin
from MaKaC.webinterface.pages.conferences import WPConferenceDefaultDisplayBase
class WPReferenceTypes(WPJinjaMixin, WPAdminsBase):
template_prefix = 'events/'
class WPEventDisplay(WPJinjaMixin, WPConferenceDefaultDisplayBase):
template_prefix = 'events/'
def _getBody(self, params):
return WPJinjaMixin._getPageContent(self, params)
def getCSSFiles(self):
return WPConferenceDefaultDisplayBase.getCSSFiles(self) + self._asset_env['event_display_sass'].urls()
Fix highlighting of "External ID Types" menu entry# This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from MaKaC.webinterface.pages.admins import WPAdminsBase
from MaKaC.webinterface.pages.base import WPJinjaMixin
from MaKaC.webinterface.pages.conferences import WPConferenceDefaultDisplayBase
class WPReferenceTypes(WPJinjaMixin, WPAdminsBase):
template_prefix = 'events/'
sidemenu_option = 'reference_types'
class WPEventDisplay(WPJinjaMixin, WPConferenceDefaultDisplayBase):
template_prefix = 'events/'
def _getBody(self, params):
return WPJinjaMixin._getPageContent(self, params)
def getCSSFiles(self):
return WPConferenceDefaultDisplayBase.getCSSFiles(self) + self._asset_env['event_display_sass'].urls()
|
<commit_before># This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from MaKaC.webinterface.pages.admins import WPAdminsBase
from MaKaC.webinterface.pages.base import WPJinjaMixin
from MaKaC.webinterface.pages.conferences import WPConferenceDefaultDisplayBase
class WPReferenceTypes(WPJinjaMixin, WPAdminsBase):
template_prefix = 'events/'
class WPEventDisplay(WPJinjaMixin, WPConferenceDefaultDisplayBase):
template_prefix = 'events/'
def _getBody(self, params):
return WPJinjaMixin._getPageContent(self, params)
def getCSSFiles(self):
return WPConferenceDefaultDisplayBase.getCSSFiles(self) + self._asset_env['event_display_sass'].urls()
<commit_msg>Fix highlighting of "External ID Types" menu entry<commit_after># This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from MaKaC.webinterface.pages.admins import WPAdminsBase
from MaKaC.webinterface.pages.base import WPJinjaMixin
from MaKaC.webinterface.pages.conferences import WPConferenceDefaultDisplayBase
class WPReferenceTypes(WPJinjaMixin, WPAdminsBase):
template_prefix = 'events/'
sidemenu_option = 'reference_types'
class WPEventDisplay(WPJinjaMixin, WPConferenceDefaultDisplayBase):
template_prefix = 'events/'
def _getBody(self, params):
return WPJinjaMixin._getPageContent(self, params)
def getCSSFiles(self):
return WPConferenceDefaultDisplayBase.getCSSFiles(self) + self._asset_env['event_display_sass'].urls()
|
e579b04beb2f3c4fbe3e27d386919f3c8af888e5
|
retrieveData.py
|
retrieveData.py
|
#!/usr/bin/env python
import json, os, requests
from awsauth import S3Auth
key = os.environ.get('UWOPENDATA_APIKEY')
ACCESS_KEY = os.environ.get('AWS_ACCESS_KEY_ID')
SECRET_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
def getData(service):
payload = {'key': key, 'service': service}
r = requests.get('http://api.uwaterloo.ca/public/v1/', params=payload)
return r
foodMenu = getData('FoodMenu').text
requests.put('http://s3.amazonaws.com/uwfoodmenu/foodMenu.txt', data=foodMenu, auth=S3Auth(ACCESS_KEY, SECRET_KEY))
serviceInfo = getData('FoodServices').text
requests.put('http://s3.amazonaws.com/uwfoodmenu/serviceInfo.txt', data=serviceInfo, auth=S3Auth(ACCESS_KEY, SECRET_KEY))
|
#!/usr/bin/env python
import json, os, requests
from awsauth import S3Auth
key = os.environ.get('UWOPENDATA_APIKEY')
ACCESS_KEY = os.environ.get('AWS_ACCESS_KEY_ID')
SECRET_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
def getData(service):
payload = {'key': key, 'service': service}
r = requests.get('http://api.uwaterloo.ca/public/v1/', params=payload)
return r
foodMenu = getData('FoodMenu').text
requests.put('http://s3.amazonaws.com/uwfoodmenu/foodMenu.txt', data=foodMenu, auth=S3Auth(ACCESS_KEY, SECRET_KEY))
# serviceInfo = getData('FoodServices').text
# requests.put('http://s3.amazonaws.com/uwfoodmenu/serviceInfo.txt', data=serviceInfo, auth=S3Auth(ACCESS_KEY, SECRET_KEY))
|
Disable updating serviceInfo when retrieving daily data.
|
Disable updating serviceInfo when retrieving daily data.
|
Python
|
mit
|
alykhank/FoodMenu,alykhank/FoodMenu,alykhank/FoodMenu
|
#!/usr/bin/env python
import json, os, requests
from awsauth import S3Auth
key = os.environ.get('UWOPENDATA_APIKEY')
ACCESS_KEY = os.environ.get('AWS_ACCESS_KEY_ID')
SECRET_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
def getData(service):
payload = {'key': key, 'service': service}
r = requests.get('http://api.uwaterloo.ca/public/v1/', params=payload)
return r
foodMenu = getData('FoodMenu').text
requests.put('http://s3.amazonaws.com/uwfoodmenu/foodMenu.txt', data=foodMenu, auth=S3Auth(ACCESS_KEY, SECRET_KEY))
serviceInfo = getData('FoodServices').text
requests.put('http://s3.amazonaws.com/uwfoodmenu/serviceInfo.txt', data=serviceInfo, auth=S3Auth(ACCESS_KEY, SECRET_KEY))
Disable updating serviceInfo when retrieving daily data.
|
#!/usr/bin/env python
import json, os, requests
from awsauth import S3Auth
key = os.environ.get('UWOPENDATA_APIKEY')
ACCESS_KEY = os.environ.get('AWS_ACCESS_KEY_ID')
SECRET_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
def getData(service):
payload = {'key': key, 'service': service}
r = requests.get('http://api.uwaterloo.ca/public/v1/', params=payload)
return r
foodMenu = getData('FoodMenu').text
requests.put('http://s3.amazonaws.com/uwfoodmenu/foodMenu.txt', data=foodMenu, auth=S3Auth(ACCESS_KEY, SECRET_KEY))
# serviceInfo = getData('FoodServices').text
# requests.put('http://s3.amazonaws.com/uwfoodmenu/serviceInfo.txt', data=serviceInfo, auth=S3Auth(ACCESS_KEY, SECRET_KEY))
|
<commit_before>#!/usr/bin/env python
import json, os, requests
from awsauth import S3Auth
key = os.environ.get('UWOPENDATA_APIKEY')
ACCESS_KEY = os.environ.get('AWS_ACCESS_KEY_ID')
SECRET_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
def getData(service):
payload = {'key': key, 'service': service}
r = requests.get('http://api.uwaterloo.ca/public/v1/', params=payload)
return r
foodMenu = getData('FoodMenu').text
requests.put('http://s3.amazonaws.com/uwfoodmenu/foodMenu.txt', data=foodMenu, auth=S3Auth(ACCESS_KEY, SECRET_KEY))
serviceInfo = getData('FoodServices').text
requests.put('http://s3.amazonaws.com/uwfoodmenu/serviceInfo.txt', data=serviceInfo, auth=S3Auth(ACCESS_KEY, SECRET_KEY))
<commit_msg>Disable updating serviceInfo when retrieving daily data.<commit_after>
|
#!/usr/bin/env python
import json, os, requests
from awsauth import S3Auth
key = os.environ.get('UWOPENDATA_APIKEY')
ACCESS_KEY = os.environ.get('AWS_ACCESS_KEY_ID')
SECRET_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
def getData(service):
payload = {'key': key, 'service': service}
r = requests.get('http://api.uwaterloo.ca/public/v1/', params=payload)
return r
foodMenu = getData('FoodMenu').text
requests.put('http://s3.amazonaws.com/uwfoodmenu/foodMenu.txt', data=foodMenu, auth=S3Auth(ACCESS_KEY, SECRET_KEY))
# serviceInfo = getData('FoodServices').text
# requests.put('http://s3.amazonaws.com/uwfoodmenu/serviceInfo.txt', data=serviceInfo, auth=S3Auth(ACCESS_KEY, SECRET_KEY))
|
#!/usr/bin/env python
import json, os, requests
from awsauth import S3Auth
key = os.environ.get('UWOPENDATA_APIKEY')
ACCESS_KEY = os.environ.get('AWS_ACCESS_KEY_ID')
SECRET_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
def getData(service):
payload = {'key': key, 'service': service}
r = requests.get('http://api.uwaterloo.ca/public/v1/', params=payload)
return r
foodMenu = getData('FoodMenu').text
requests.put('http://s3.amazonaws.com/uwfoodmenu/foodMenu.txt', data=foodMenu, auth=S3Auth(ACCESS_KEY, SECRET_KEY))
serviceInfo = getData('FoodServices').text
requests.put('http://s3.amazonaws.com/uwfoodmenu/serviceInfo.txt', data=serviceInfo, auth=S3Auth(ACCESS_KEY, SECRET_KEY))
Disable updating serviceInfo when retrieving daily data.#!/usr/bin/env python
import json, os, requests
from awsauth import S3Auth
key = os.environ.get('UWOPENDATA_APIKEY')
ACCESS_KEY = os.environ.get('AWS_ACCESS_KEY_ID')
SECRET_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
def getData(service):
payload = {'key': key, 'service': service}
r = requests.get('http://api.uwaterloo.ca/public/v1/', params=payload)
return r
foodMenu = getData('FoodMenu').text
requests.put('http://s3.amazonaws.com/uwfoodmenu/foodMenu.txt', data=foodMenu, auth=S3Auth(ACCESS_KEY, SECRET_KEY))
# serviceInfo = getData('FoodServices').text
# requests.put('http://s3.amazonaws.com/uwfoodmenu/serviceInfo.txt', data=serviceInfo, auth=S3Auth(ACCESS_KEY, SECRET_KEY))
|
<commit_before>#!/usr/bin/env python
import json, os, requests
from awsauth import S3Auth
key = os.environ.get('UWOPENDATA_APIKEY')
ACCESS_KEY = os.environ.get('AWS_ACCESS_KEY_ID')
SECRET_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
def getData(service):
payload = {'key': key, 'service': service}
r = requests.get('http://api.uwaterloo.ca/public/v1/', params=payload)
return r
foodMenu = getData('FoodMenu').text
requests.put('http://s3.amazonaws.com/uwfoodmenu/foodMenu.txt', data=foodMenu, auth=S3Auth(ACCESS_KEY, SECRET_KEY))
serviceInfo = getData('FoodServices').text
requests.put('http://s3.amazonaws.com/uwfoodmenu/serviceInfo.txt', data=serviceInfo, auth=S3Auth(ACCESS_KEY, SECRET_KEY))
<commit_msg>Disable updating serviceInfo when retrieving daily data.<commit_after>#!/usr/bin/env python
import json, os, requests
from awsauth import S3Auth
key = os.environ.get('UWOPENDATA_APIKEY')
ACCESS_KEY = os.environ.get('AWS_ACCESS_KEY_ID')
SECRET_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
def getData(service):
payload = {'key': key, 'service': service}
r = requests.get('http://api.uwaterloo.ca/public/v1/', params=payload)
return r
foodMenu = getData('FoodMenu').text
requests.put('http://s3.amazonaws.com/uwfoodmenu/foodMenu.txt', data=foodMenu, auth=S3Auth(ACCESS_KEY, SECRET_KEY))
# serviceInfo = getData('FoodServices').text
# requests.put('http://s3.amazonaws.com/uwfoodmenu/serviceInfo.txt', data=serviceInfo, auth=S3Auth(ACCESS_KEY, SECRET_KEY))
|
6d3180ffd84e126ee4441a367a48a750d270892e
|
sumy/document/_sentence.py
|
sumy/document/_sentence.py
|
# -*- coding: utf8 -*-
from __future__ import absolute_import
from __future__ import division, print_function, unicode_literals
from itertools import chain
from .._compat import to_unicode, to_string, unicode_compatible
@unicode_compatible
class Sentence(object):
__slots__ = ("_words", "_is_heading",)
def __init__(self, words, is_heading=False):
self._words = tuple(map(to_unicode, words))
self._is_heading = bool(is_heading)
@property
def words(self):
return self._words
@property
def is_heading(self):
return self._is_heading
def __unicode__(self):
return " ".join(self._words)
def __repr__(self):
return to_string("<Sentence: %s>") % self.__str__()
|
# -*- coding: utf8 -*-
from __future__ import absolute_import
from __future__ import division, print_function, unicode_literals
import re
from itertools import chain
from .._compat import to_unicode, to_string, unicode_compatible
_WORD_PATTERN = re.compile(r"^[^\W_]+$", re.UNICODE)
@unicode_compatible
class Sentence(object):
__slots__ = ("_words", "_is_heading",)
def __init__(self, words, is_heading=False):
self._words = tuple(map(to_unicode, words))
self._is_heading = bool(is_heading)
@property
def words(self):
return tuple(filter(self._is_word, self._words))
@property
def is_heading(self):
return self._is_heading
def _is_word(self, word):
return bool(_WORD_PATTERN.search(word))
def __unicode__(self):
return " ".join(self._words)
def __repr__(self):
return to_string("<Sentence: %s>") % self.__str__()
|
Return only alphabetic words from sentence
|
Return only alphabetic words from sentence
|
Python
|
apache-2.0
|
miso-belica/sumy,miso-belica/sumy
|
# -*- coding: utf8 -*-
from __future__ import absolute_import
from __future__ import division, print_function, unicode_literals
from itertools import chain
from .._compat import to_unicode, to_string, unicode_compatible
@unicode_compatible
class Sentence(object):
__slots__ = ("_words", "_is_heading",)
def __init__(self, words, is_heading=False):
self._words = tuple(map(to_unicode, words))
self._is_heading = bool(is_heading)
@property
def words(self):
return self._words
@property
def is_heading(self):
return self._is_heading
def __unicode__(self):
return " ".join(self._words)
def __repr__(self):
return to_string("<Sentence: %s>") % self.__str__()
Return only alphabetic words from sentence
|
# -*- coding: utf8 -*-
from __future__ import absolute_import
from __future__ import division, print_function, unicode_literals
import re
from itertools import chain
from .._compat import to_unicode, to_string, unicode_compatible
_WORD_PATTERN = re.compile(r"^[^\W_]+$", re.UNICODE)
@unicode_compatible
class Sentence(object):
__slots__ = ("_words", "_is_heading",)
def __init__(self, words, is_heading=False):
self._words = tuple(map(to_unicode, words))
self._is_heading = bool(is_heading)
@property
def words(self):
return tuple(filter(self._is_word, self._words))
@property
def is_heading(self):
return self._is_heading
def _is_word(self, word):
return bool(_WORD_PATTERN.search(word))
def __unicode__(self):
return " ".join(self._words)
def __repr__(self):
return to_string("<Sentence: %s>") % self.__str__()
|
<commit_before># -*- coding: utf8 -*-
from __future__ import absolute_import
from __future__ import division, print_function, unicode_literals
from itertools import chain
from .._compat import to_unicode, to_string, unicode_compatible
@unicode_compatible
class Sentence(object):
__slots__ = ("_words", "_is_heading",)
def __init__(self, words, is_heading=False):
self._words = tuple(map(to_unicode, words))
self._is_heading = bool(is_heading)
@property
def words(self):
return self._words
@property
def is_heading(self):
return self._is_heading
def __unicode__(self):
return " ".join(self._words)
def __repr__(self):
return to_string("<Sentence: %s>") % self.__str__()
<commit_msg>Return only alphabetic words from sentence<commit_after>
|
# -*- coding: utf8 -*-
from __future__ import absolute_import
from __future__ import division, print_function, unicode_literals
import re
from itertools import chain
from .._compat import to_unicode, to_string, unicode_compatible
_WORD_PATTERN = re.compile(r"^[^\W_]+$", re.UNICODE)
@unicode_compatible
class Sentence(object):
__slots__ = ("_words", "_is_heading",)
def __init__(self, words, is_heading=False):
self._words = tuple(map(to_unicode, words))
self._is_heading = bool(is_heading)
@property
def words(self):
return tuple(filter(self._is_word, self._words))
@property
def is_heading(self):
return self._is_heading
def _is_word(self, word):
return bool(_WORD_PATTERN.search(word))
def __unicode__(self):
return " ".join(self._words)
def __repr__(self):
return to_string("<Sentence: %s>") % self.__str__()
|
# -*- coding: utf8 -*-
from __future__ import absolute_import
from __future__ import division, print_function, unicode_literals
from itertools import chain
from .._compat import to_unicode, to_string, unicode_compatible
@unicode_compatible
class Sentence(object):
__slots__ = ("_words", "_is_heading",)
def __init__(self, words, is_heading=False):
self._words = tuple(map(to_unicode, words))
self._is_heading = bool(is_heading)
@property
def words(self):
return self._words
@property
def is_heading(self):
return self._is_heading
def __unicode__(self):
return " ".join(self._words)
def __repr__(self):
return to_string("<Sentence: %s>") % self.__str__()
Return only alphabetic words from sentence# -*- coding: utf8 -*-
from __future__ import absolute_import
from __future__ import division, print_function, unicode_literals
import re
from itertools import chain
from .._compat import to_unicode, to_string, unicode_compatible
_WORD_PATTERN = re.compile(r"^[^\W_]+$", re.UNICODE)
@unicode_compatible
class Sentence(object):
__slots__ = ("_words", "_is_heading",)
def __init__(self, words, is_heading=False):
self._words = tuple(map(to_unicode, words))
self._is_heading = bool(is_heading)
@property
def words(self):
return tuple(filter(self._is_word, self._words))
@property
def is_heading(self):
return self._is_heading
def _is_word(self, word):
return bool(_WORD_PATTERN.search(word))
def __unicode__(self):
return " ".join(self._words)
def __repr__(self):
return to_string("<Sentence: %s>") % self.__str__()
|
<commit_before># -*- coding: utf8 -*-
from __future__ import absolute_import
from __future__ import division, print_function, unicode_literals
from itertools import chain
from .._compat import to_unicode, to_string, unicode_compatible
@unicode_compatible
class Sentence(object):
__slots__ = ("_words", "_is_heading",)
def __init__(self, words, is_heading=False):
self._words = tuple(map(to_unicode, words))
self._is_heading = bool(is_heading)
@property
def words(self):
return self._words
@property
def is_heading(self):
return self._is_heading
def __unicode__(self):
return " ".join(self._words)
def __repr__(self):
return to_string("<Sentence: %s>") % self.__str__()
<commit_msg>Return only alphabetic words from sentence<commit_after># -*- coding: utf8 -*-
from __future__ import absolute_import
from __future__ import division, print_function, unicode_literals
import re
from itertools import chain
from .._compat import to_unicode, to_string, unicode_compatible
_WORD_PATTERN = re.compile(r"^[^\W_]+$", re.UNICODE)
@unicode_compatible
class Sentence(object):
__slots__ = ("_words", "_is_heading",)
def __init__(self, words, is_heading=False):
self._words = tuple(map(to_unicode, words))
self._is_heading = bool(is_heading)
@property
def words(self):
return tuple(filter(self._is_word, self._words))
@property
def is_heading(self):
return self._is_heading
def _is_word(self, word):
return bool(_WORD_PATTERN.search(word))
def __unicode__(self):
return " ".join(self._words)
def __repr__(self):
return to_string("<Sentence: %s>") % self.__str__()
|
503924f054f6f81eb08eda9884e5e4adc4df1609
|
cupcake/smush/plot.py
|
cupcake/smush/plot.py
|
"""
User-facing interface for plotting all dimensionality reduction algorithms
"""
def smushplot(data, smusher='PCA', x=1, y=2, n_components=2, marker='o',
marker_order=None, text=False, text_order=None, linewidth=1,
linewidth_order=None, edgecolor='k', edgecolor_order=None,
smusher_kws=None, plot_kws=None):
"""Plot high dimensional data in 2d space
Parameters
----------
data : pandas.DataFrame or numpy.array
A (n_samples, m_features) wide matrix of observations. The samples
(rows) will be plotted relative to the reduced representation of the
features (columns)
smusher : str or object
Either a string specifying a valid dimensionality reduction algorithm
in ``sklearn.decomposition`` or ``sklearn.manifold``, or any object
with ``fit_transform()`` methods.
Notes
-----
"""
if isinstance(smusher, str):
# Need to get appropriate smusher from sklearn given the string
pass
else:
# Assume this is already an initialized sklearn object with the
# ``fit_transform()`` method
pass
|
"""
User-facing interface for plotting all dimensionality reduction algorithms
"""
def smushplot(data, smusher='PCA', x=1, y=2, n_components=2, marker='o',
marker_order=None, text=False, text_order=None, linewidth=1,
linewidth_order=None, edgecolor='k', edgecolor_order=None,
smusher_kws=None, plot_kws=None):
"""Plot high dimensional data in 2d space
Parameters
----------
data : pandas.DataFrame or numpy.array
A (n_samples, m_features) wide matrix of observations. The samples
(rows) will be plotted relative to the reduced representation of the
features (columns)
smusher : str or object
Either a string specifying a valid dimensionality reduction algorithm
in ``sklearn.decomposition`` or ``sklearn.manifold``, or any object
with ``fit_transform()`` methods.
x, y : int
1-based counting of which components to plot as the x- and y-axes. For
example, to plot component 4 on the x-axis and component 10 on the y,
do ``x=4, y=10``.
n_components : int
Number of components to use when reducing dimensionality
Notes
-----
"""
if isinstance(smusher, str):
# Need to get appropriate smusher from sklearn given the string
pass
else:
# Assume this is already an initialized sklearn object with the
# ``fit_transform()`` method
pass
|
Add x, y and n_components to docstring
|
Add x, y and n_components to docstring
|
Python
|
bsd-3-clause
|
olgabot/cupcake
|
"""
User-facing interface for plotting all dimensionality reduction algorithms
"""
def smushplot(data, smusher='PCA', x=1, y=2, n_components=2, marker='o',
marker_order=None, text=False, text_order=None, linewidth=1,
linewidth_order=None, edgecolor='k', edgecolor_order=None,
smusher_kws=None, plot_kws=None):
"""Plot high dimensional data in 2d space
Parameters
----------
data : pandas.DataFrame or numpy.array
A (n_samples, m_features) wide matrix of observations. The samples
(rows) will be plotted relative to the reduced representation of the
features (columns)
smusher : str or object
Either a string specifying a valid dimensionality reduction algorithm
in ``sklearn.decomposition`` or ``sklearn.manifold``, or any object
with ``fit_transform()`` methods.
Notes
-----
"""
if isinstance(smusher, str):
# Need to get appropriate smusher from sklearn given the string
pass
else:
# Assume this is already an initialized sklearn object with the
# ``fit_transform()`` method
pass
Add x, y and n_components to docstring
|
"""
User-facing interface for plotting all dimensionality reduction algorithms
"""
def smushplot(data, smusher='PCA', x=1, y=2, n_components=2, marker='o',
marker_order=None, text=False, text_order=None, linewidth=1,
linewidth_order=None, edgecolor='k', edgecolor_order=None,
smusher_kws=None, plot_kws=None):
"""Plot high dimensional data in 2d space
Parameters
----------
data : pandas.DataFrame or numpy.array
A (n_samples, m_features) wide matrix of observations. The samples
(rows) will be plotted relative to the reduced representation of the
features (columns)
smusher : str or object
Either a string specifying a valid dimensionality reduction algorithm
in ``sklearn.decomposition`` or ``sklearn.manifold``, or any object
with ``fit_transform()`` methods.
x, y : int
1-based counting of which components to plot as the x- and y-axes. For
example, to plot component 4 on the x-axis and component 10 on the y,
do ``x=4, y=10``.
n_components : int
Number of components to use when reducing dimensionality
Notes
-----
"""
if isinstance(smusher, str):
# Need to get appropriate smusher from sklearn given the string
pass
else:
# Assume this is already an initialized sklearn object with the
# ``fit_transform()`` method
pass
|
<commit_before>"""
User-facing interface for plotting all dimensionality reduction algorithms
"""
def smushplot(data, smusher='PCA', x=1, y=2, n_components=2, marker='o',
marker_order=None, text=False, text_order=None, linewidth=1,
linewidth_order=None, edgecolor='k', edgecolor_order=None,
smusher_kws=None, plot_kws=None):
"""Plot high dimensional data in 2d space
Parameters
----------
data : pandas.DataFrame or numpy.array
A (n_samples, m_features) wide matrix of observations. The samples
(rows) will be plotted relative to the reduced representation of the
features (columns)
smusher : str or object
Either a string specifying a valid dimensionality reduction algorithm
in ``sklearn.decomposition`` or ``sklearn.manifold``, or any object
with ``fit_transform()`` methods.
Notes
-----
"""
if isinstance(smusher, str):
# Need to get appropriate smusher from sklearn given the string
pass
else:
# Assume this is already an initialized sklearn object with the
# ``fit_transform()`` method
pass
<commit_msg>Add x, y and n_components to docstring<commit_after>
|
"""
User-facing interface for plotting all dimensionality reduction algorithms
"""
def smushplot(data, smusher='PCA', x=1, y=2, n_components=2, marker='o',
marker_order=None, text=False, text_order=None, linewidth=1,
linewidth_order=None, edgecolor='k', edgecolor_order=None,
smusher_kws=None, plot_kws=None):
"""Plot high dimensional data in 2d space
Parameters
----------
data : pandas.DataFrame or numpy.array
A (n_samples, m_features) wide matrix of observations. The samples
(rows) will be plotted relative to the reduced representation of the
features (columns)
smusher : str or object
Either a string specifying a valid dimensionality reduction algorithm
in ``sklearn.decomposition`` or ``sklearn.manifold``, or any object
with ``fit_transform()`` methods.
x, y : int
1-based counting of which components to plot as the x- and y-axes. For
example, to plot component 4 on the x-axis and component 10 on the y,
do ``x=4, y=10``.
n_components : int
Number of components to use when reducing dimensionality
Notes
-----
"""
if isinstance(smusher, str):
# Need to get appropriate smusher from sklearn given the string
pass
else:
# Assume this is already an initialized sklearn object with the
# ``fit_transform()`` method
pass
|
"""
User-facing interface for plotting all dimensionality reduction algorithms
"""
def smushplot(data, smusher='PCA', x=1, y=2, n_components=2, marker='o',
marker_order=None, text=False, text_order=None, linewidth=1,
linewidth_order=None, edgecolor='k', edgecolor_order=None,
smusher_kws=None, plot_kws=None):
"""Plot high dimensional data in 2d space
Parameters
----------
data : pandas.DataFrame or numpy.array
A (n_samples, m_features) wide matrix of observations. The samples
(rows) will be plotted relative to the reduced representation of the
features (columns)
smusher : str or object
Either a string specifying a valid dimensionality reduction algorithm
in ``sklearn.decomposition`` or ``sklearn.manifold``, or any object
with ``fit_transform()`` methods.
Notes
-----
"""
if isinstance(smusher, str):
# Need to get appropriate smusher from sklearn given the string
pass
else:
# Assume this is already an initialized sklearn object with the
# ``fit_transform()`` method
pass
Add x, y and n_components to docstring"""
User-facing interface for plotting all dimensionality reduction algorithms
"""
def smushplot(data, smusher='PCA', x=1, y=2, n_components=2, marker='o',
marker_order=None, text=False, text_order=None, linewidth=1,
linewidth_order=None, edgecolor='k', edgecolor_order=None,
smusher_kws=None, plot_kws=None):
"""Plot high dimensional data in 2d space
Parameters
----------
data : pandas.DataFrame or numpy.array
A (n_samples, m_features) wide matrix of observations. The samples
(rows) will be plotted relative to the reduced representation of the
features (columns)
smusher : str or object
Either a string specifying a valid dimensionality reduction algorithm
in ``sklearn.decomposition`` or ``sklearn.manifold``, or any object
with ``fit_transform()`` methods.
x, y : int
1-based counting of which components to plot as the x- and y-axes. For
example, to plot component 4 on the x-axis and component 10 on the y,
do ``x=4, y=10``.
n_components : int
Number of components to use when reducing dimensionality
Notes
-----
"""
if isinstance(smusher, str):
# Need to get appropriate smusher from sklearn given the string
pass
else:
# Assume this is already an initialized sklearn object with the
# ``fit_transform()`` method
pass
|
<commit_before>"""
User-facing interface for plotting all dimensionality reduction algorithms
"""
def smushplot(data, smusher='PCA', x=1, y=2, n_components=2, marker='o',
marker_order=None, text=False, text_order=None, linewidth=1,
linewidth_order=None, edgecolor='k', edgecolor_order=None,
smusher_kws=None, plot_kws=None):
"""Plot high dimensional data in 2d space
Parameters
----------
data : pandas.DataFrame or numpy.array
A (n_samples, m_features) wide matrix of observations. The samples
(rows) will be plotted relative to the reduced representation of the
features (columns)
smusher : str or object
Either a string specifying a valid dimensionality reduction algorithm
in ``sklearn.decomposition`` or ``sklearn.manifold``, or any object
with ``fit_transform()`` methods.
Notes
-----
"""
if isinstance(smusher, str):
# Need to get appropriate smusher from sklearn given the string
pass
else:
# Assume this is already an initialized sklearn object with the
# ``fit_transform()`` method
pass
<commit_msg>Add x, y and n_components to docstring<commit_after>"""
User-facing interface for plotting all dimensionality reduction algorithms
"""
def smushplot(data, smusher='PCA', x=1, y=2, n_components=2, marker='o',
marker_order=None, text=False, text_order=None, linewidth=1,
linewidth_order=None, edgecolor='k', edgecolor_order=None,
smusher_kws=None, plot_kws=None):
"""Plot high dimensional data in 2d space
Parameters
----------
data : pandas.DataFrame or numpy.array
A (n_samples, m_features) wide matrix of observations. The samples
(rows) will be plotted relative to the reduced representation of the
features (columns)
smusher : str or object
Either a string specifying a valid dimensionality reduction algorithm
in ``sklearn.decomposition`` or ``sklearn.manifold``, or any object
with ``fit_transform()`` methods.
x, y : int
1-based counting of which components to plot as the x- and y-axes. For
example, to plot component 4 on the x-axis and component 10 on the y,
do ``x=4, y=10``.
n_components : int
Number of components to use when reducing dimensionality
Notes
-----
"""
if isinstance(smusher, str):
# Need to get appropriate smusher from sklearn given the string
pass
else:
# Assume this is already an initialized sklearn object with the
# ``fit_transform()`` method
pass
|
a81c5bf24ab0271c60ed1db97d93c7d2e5ec6234
|
cutplanner/planner.py
|
cutplanner/planner.py
|
import collections
# simple structure to keep track of a specific piece
Piece = collections.namedtuple('Piece', 'id, length')
class Planner(object):
def __init__(self, sizes, needed, loss=0.25):
self.stock = []
self.stock_sizes = sorted(sizes)
self.pieces_needed = needed
self.cut_loss = loss
self.cur_stock = None
def get_largest_stock(self):
return self.stock_sizes[-1]
def cut_piece(self, piece):
""" Record the cut for the given piece """
cur_stock.cut(piece, self.cut_loss)
|
import collections
# simple structure to keep track of a specific piece
Piece = collections.namedtuple('Piece', 'id, length')
class Planner(object):
def __init__(self, sizes, needed, loss=0.25):
self.stock = []
self.stock_sizes = sorted(sizes)
self.pieces_needed = needed
self.cut_loss = loss
self.cur_stock = None
@property
def largest_stock(self):
return self.stock_sizes[-1]
def cut_piece(self, piece):
""" Record the cut for the given piece """
cur_stock.cut(piece, self.cut_loss)
|
Make largest stock a property
|
Make largest stock a property
|
Python
|
mit
|
alanc10n/py-cutplanner
|
import collections
# simple structure to keep track of a specific piece
Piece = collections.namedtuple('Piece', 'id, length')
class Planner(object):
def __init__(self, sizes, needed, loss=0.25):
self.stock = []
self.stock_sizes = sorted(sizes)
self.pieces_needed = needed
self.cut_loss = loss
self.cur_stock = None
def get_largest_stock(self):
return self.stock_sizes[-1]
def cut_piece(self, piece):
""" Record the cut for the given piece """
cur_stock.cut(piece, self.cut_loss)
Make largest stock a property
|
import collections
# simple structure to keep track of a specific piece
Piece = collections.namedtuple('Piece', 'id, length')
class Planner(object):
def __init__(self, sizes, needed, loss=0.25):
self.stock = []
self.stock_sizes = sorted(sizes)
self.pieces_needed = needed
self.cut_loss = loss
self.cur_stock = None
@property
def largest_stock(self):
return self.stock_sizes[-1]
def cut_piece(self, piece):
""" Record the cut for the given piece """
cur_stock.cut(piece, self.cut_loss)
|
<commit_before>import collections
# simple structure to keep track of a specific piece
Piece = collections.namedtuple('Piece', 'id, length')
class Planner(object):
def __init__(self, sizes, needed, loss=0.25):
self.stock = []
self.stock_sizes = sorted(sizes)
self.pieces_needed = needed
self.cut_loss = loss
self.cur_stock = None
def get_largest_stock(self):
return self.stock_sizes[-1]
def cut_piece(self, piece):
""" Record the cut for the given piece """
cur_stock.cut(piece, self.cut_loss)
<commit_msg>Make largest stock a property<commit_after>
|
import collections
# simple structure to keep track of a specific piece
Piece = collections.namedtuple('Piece', 'id, length')
class Planner(object):
def __init__(self, sizes, needed, loss=0.25):
self.stock = []
self.stock_sizes = sorted(sizes)
self.pieces_needed = needed
self.cut_loss = loss
self.cur_stock = None
@property
def largest_stock(self):
return self.stock_sizes[-1]
def cut_piece(self, piece):
""" Record the cut for the given piece """
cur_stock.cut(piece, self.cut_loss)
|
import collections
# simple structure to keep track of a specific piece
Piece = collections.namedtuple('Piece', 'id, length')
class Planner(object):
def __init__(self, sizes, needed, loss=0.25):
self.stock = []
self.stock_sizes = sorted(sizes)
self.pieces_needed = needed
self.cut_loss = loss
self.cur_stock = None
def get_largest_stock(self):
return self.stock_sizes[-1]
def cut_piece(self, piece):
""" Record the cut for the given piece """
cur_stock.cut(piece, self.cut_loss)
Make largest stock a propertyimport collections
# simple structure to keep track of a specific piece
Piece = collections.namedtuple('Piece', 'id, length')
class Planner(object):
def __init__(self, sizes, needed, loss=0.25):
self.stock = []
self.stock_sizes = sorted(sizes)
self.pieces_needed = needed
self.cut_loss = loss
self.cur_stock = None
@property
def largest_stock(self):
return self.stock_sizes[-1]
def cut_piece(self, piece):
""" Record the cut for the given piece """
cur_stock.cut(piece, self.cut_loss)
|
<commit_before>import collections
# simple structure to keep track of a specific piece
Piece = collections.namedtuple('Piece', 'id, length')
class Planner(object):
def __init__(self, sizes, needed, loss=0.25):
self.stock = []
self.stock_sizes = sorted(sizes)
self.pieces_needed = needed
self.cut_loss = loss
self.cur_stock = None
def get_largest_stock(self):
return self.stock_sizes[-1]
def cut_piece(self, piece):
""" Record the cut for the given piece """
cur_stock.cut(piece, self.cut_loss)
<commit_msg>Make largest stock a property<commit_after>import collections
# simple structure to keep track of a specific piece
Piece = collections.namedtuple('Piece', 'id, length')
class Planner(object):
def __init__(self, sizes, needed, loss=0.25):
self.stock = []
self.stock_sizes = sorted(sizes)
self.pieces_needed = needed
self.cut_loss = loss
self.cur_stock = None
@property
def largest_stock(self):
return self.stock_sizes[-1]
def cut_piece(self, piece):
""" Record the cut for the given piece """
cur_stock.cut(piece, self.cut_loss)
|
75d7441f90e077eeeb955e4eb0c514a1736a88fb
|
tohu/v3/utils.py
|
tohu/v3/utils.py
|
__all__ = ['identity', 'print_generated_sequence']
def identity(x):
"""
Helper function which returns its argument unchanged.
That is, `identity(x)` returns `x` for any input `x`.
"""
return x
def print_generated_sequence(gen, num, *, sep=", ", seed=None):
"""
Helper function which prints a sequence of `num` items
produced by the random generator `gen`.
"""
if seed:
gen.reset(seed)
elems = [str(next(gen)) for _ in range(num)]
sep_initial = "\n\n" if '\n' in sep else " "
print("Generated sequence:{}{}".format(sep_initial, sep.join(elems)))
|
from collections import namedtuple
__all__ = ['identity', 'print_generated_sequence']
def identity(x):
"""
Helper function which returns its argument unchanged.
That is, `identity(x)` returns `x` for any input `x`.
"""
return x
def print_generated_sequence(gen, num, *, sep=", ", seed=None):
"""
Helper function which prints a sequence of `num` items
produced by the random generator `gen`.
"""
if seed:
gen.reset(seed)
elems = [str(next(gen)) for _ in range(num)]
sep_initial = "\n\n" if '\n' in sep else " "
print("Generated sequence:{}{}".format(sep_initial, sep.join(elems)))
def make_dummy_tuples(chars='abcde'):
Quux = namedtuple('Quux', ['x', 'y'])
some_tuples = [Quux((c*2).upper(), c*2) for c in chars]
return some_tuples
|
Add helper function to produce some dummy tuples (for testing and debugging)
|
Add helper function to produce some dummy tuples (for testing and debugging)
|
Python
|
mit
|
maxalbert/tohu
|
__all__ = ['identity', 'print_generated_sequence']
def identity(x):
"""
Helper function which returns its argument unchanged.
That is, `identity(x)` returns `x` for any input `x`.
"""
return x
def print_generated_sequence(gen, num, *, sep=", ", seed=None):
"""
Helper function which prints a sequence of `num` items
produced by the random generator `gen`.
"""
if seed:
gen.reset(seed)
elems = [str(next(gen)) for _ in range(num)]
sep_initial = "\n\n" if '\n' in sep else " "
print("Generated sequence:{}{}".format(sep_initial, sep.join(elems)))Add helper function to produce some dummy tuples (for testing and debugging)
|
from collections import namedtuple
__all__ = ['identity', 'print_generated_sequence']
def identity(x):
"""
Helper function which returns its argument unchanged.
That is, `identity(x)` returns `x` for any input `x`.
"""
return x
def print_generated_sequence(gen, num, *, sep=", ", seed=None):
"""
Helper function which prints a sequence of `num` items
produced by the random generator `gen`.
"""
if seed:
gen.reset(seed)
elems = [str(next(gen)) for _ in range(num)]
sep_initial = "\n\n" if '\n' in sep else " "
print("Generated sequence:{}{}".format(sep_initial, sep.join(elems)))
def make_dummy_tuples(chars='abcde'):
Quux = namedtuple('Quux', ['x', 'y'])
some_tuples = [Quux((c*2).upper(), c*2) for c in chars]
return some_tuples
|
<commit_before>__all__ = ['identity', 'print_generated_sequence']
def identity(x):
"""
Helper function which returns its argument unchanged.
That is, `identity(x)` returns `x` for any input `x`.
"""
return x
def print_generated_sequence(gen, num, *, sep=", ", seed=None):
"""
Helper function which prints a sequence of `num` items
produced by the random generator `gen`.
"""
if seed:
gen.reset(seed)
elems = [str(next(gen)) for _ in range(num)]
sep_initial = "\n\n" if '\n' in sep else " "
print("Generated sequence:{}{}".format(sep_initial, sep.join(elems)))<commit_msg>Add helper function to produce some dummy tuples (for testing and debugging)<commit_after>
|
from collections import namedtuple
__all__ = ['identity', 'print_generated_sequence']
def identity(x):
"""
Helper function which returns its argument unchanged.
That is, `identity(x)` returns `x` for any input `x`.
"""
return x
def print_generated_sequence(gen, num, *, sep=", ", seed=None):
"""
Helper function which prints a sequence of `num` items
produced by the random generator `gen`.
"""
if seed:
gen.reset(seed)
elems = [str(next(gen)) for _ in range(num)]
sep_initial = "\n\n" if '\n' in sep else " "
print("Generated sequence:{}{}".format(sep_initial, sep.join(elems)))
def make_dummy_tuples(chars='abcde'):
Quux = namedtuple('Quux', ['x', 'y'])
some_tuples = [Quux((c*2).upper(), c*2) for c in chars]
return some_tuples
|
__all__ = ['identity', 'print_generated_sequence']
def identity(x):
"""
Helper function which returns its argument unchanged.
That is, `identity(x)` returns `x` for any input `x`.
"""
return x
def print_generated_sequence(gen, num, *, sep=", ", seed=None):
"""
Helper function which prints a sequence of `num` items
produced by the random generator `gen`.
"""
if seed:
gen.reset(seed)
elems = [str(next(gen)) for _ in range(num)]
sep_initial = "\n\n" if '\n' in sep else " "
print("Generated sequence:{}{}".format(sep_initial, sep.join(elems)))Add helper function to produce some dummy tuples (for testing and debugging)from collections import namedtuple
__all__ = ['identity', 'print_generated_sequence']
def identity(x):
"""
Helper function which returns its argument unchanged.
That is, `identity(x)` returns `x` for any input `x`.
"""
return x
def print_generated_sequence(gen, num, *, sep=", ", seed=None):
"""
Helper function which prints a sequence of `num` items
produced by the random generator `gen`.
"""
if seed:
gen.reset(seed)
elems = [str(next(gen)) for _ in range(num)]
sep_initial = "\n\n" if '\n' in sep else " "
print("Generated sequence:{}{}".format(sep_initial, sep.join(elems)))
def make_dummy_tuples(chars='abcde'):
Quux = namedtuple('Quux', ['x', 'y'])
some_tuples = [Quux((c*2).upper(), c*2) for c in chars]
return some_tuples
|
<commit_before>__all__ = ['identity', 'print_generated_sequence']
def identity(x):
"""
Helper function which returns its argument unchanged.
That is, `identity(x)` returns `x` for any input `x`.
"""
return x
def print_generated_sequence(gen, num, *, sep=", ", seed=None):
"""
Helper function which prints a sequence of `num` items
produced by the random generator `gen`.
"""
if seed:
gen.reset(seed)
elems = [str(next(gen)) for _ in range(num)]
sep_initial = "\n\n" if '\n' in sep else " "
print("Generated sequence:{}{}".format(sep_initial, sep.join(elems)))<commit_msg>Add helper function to produce some dummy tuples (for testing and debugging)<commit_after>from collections import namedtuple
__all__ = ['identity', 'print_generated_sequence']
def identity(x):
"""
Helper function which returns its argument unchanged.
That is, `identity(x)` returns `x` for any input `x`.
"""
return x
def print_generated_sequence(gen, num, *, sep=", ", seed=None):
"""
Helper function which prints a sequence of `num` items
produced by the random generator `gen`.
"""
if seed:
gen.reset(seed)
elems = [str(next(gen)) for _ in range(num)]
sep_initial = "\n\n" if '\n' in sep else " "
print("Generated sequence:{}{}".format(sep_initial, sep.join(elems)))
def make_dummy_tuples(chars='abcde'):
Quux = namedtuple('Quux', ['x', 'y'])
some_tuples = [Quux((c*2).upper(), c*2) for c in chars]
return some_tuples
|
1b252224b67ea7e9a9f9ac3e1a66b4170bdedef4
|
django_lightweight_queue/management/commands/queue_runner.py
|
django_lightweight_queue/management/commands/queue_runner.py
|
import logging
from optparse import make_option
from django.core.management.base import NoArgsCommand
from ...utils import get_backend
class Command(NoArgsCommand):
option_list = NoArgsCommand.option_list + (
make_option('--pidfile', action='store', dest='pidfile', default=None,
help="Fork and write pidfile to this file."),
)
def handle_noargs(self, **options):
level = {
'0': logging.WARNING,
'1': logging.INFO,
'2': logging.DEBUG,
}[options['verbosity']]
logging.basicConfig(level=level, format='%(levelname).1s: %(message)s')
logging.info("Starting queue runner")
backend = get_backend()
logging.info("Started backend %s", backend)
while True:
try:
logging.debug("Checking backend for items")
job = backend.dequeue(1)
except KeyboardInterrupt:
return
if job is not None:
job.run()
|
import logging
from optparse import make_option
from django.core.management.base import NoArgsCommand
from ...utils import get_backend, get_middleware
class Command(NoArgsCommand):
option_list = NoArgsCommand.option_list + (
make_option('--pidfile', action='store', dest='pidfile', default=None,
help="Fork and write pidfile to this file."),
)
def handle_noargs(self, **options):
level = {
'0': logging.WARNING,
'1': logging.INFO,
'2': logging.DEBUG,
}[options['verbosity']]
logging.basicConfig(level=level, format='%(levelname).1s: %(message)s')
logging.info("Starting queue runner")
backend = get_backend()
logging.info("Started backend %s", backend)
get_middleware()
logging.info("Loaded middleware")
while True:
try:
logging.debug("Checking backend for items")
job = backend.dequeue(1)
except KeyboardInterrupt:
return
if job is not None:
job.run()
|
Load middleware before we fork.
|
Load middleware before we fork.
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@thread.com>
|
Python
|
bsd-3-clause
|
lamby/django-lightweight-queue,thread/django-lightweight-queue,thread/django-lightweight-queue,prophile/django-lightweight-queue,prophile/django-lightweight-queue
|
import logging
from optparse import make_option
from django.core.management.base import NoArgsCommand
from ...utils import get_backend
class Command(NoArgsCommand):
option_list = NoArgsCommand.option_list + (
make_option('--pidfile', action='store', dest='pidfile', default=None,
help="Fork and write pidfile to this file."),
)
def handle_noargs(self, **options):
level = {
'0': logging.WARNING,
'1': logging.INFO,
'2': logging.DEBUG,
}[options['verbosity']]
logging.basicConfig(level=level, format='%(levelname).1s: %(message)s')
logging.info("Starting queue runner")
backend = get_backend()
logging.info("Started backend %s", backend)
while True:
try:
logging.debug("Checking backend for items")
job = backend.dequeue(1)
except KeyboardInterrupt:
return
if job is not None:
job.run()
Load middleware before we fork.
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@thread.com>
|
import logging
from optparse import make_option
from django.core.management.base import NoArgsCommand
from ...utils import get_backend, get_middleware
class Command(NoArgsCommand):
option_list = NoArgsCommand.option_list + (
make_option('--pidfile', action='store', dest='pidfile', default=None,
help="Fork and write pidfile to this file."),
)
def handle_noargs(self, **options):
level = {
'0': logging.WARNING,
'1': logging.INFO,
'2': logging.DEBUG,
}[options['verbosity']]
logging.basicConfig(level=level, format='%(levelname).1s: %(message)s')
logging.info("Starting queue runner")
backend = get_backend()
logging.info("Started backend %s", backend)
get_middleware()
logging.info("Loaded middleware")
while True:
try:
logging.debug("Checking backend for items")
job = backend.dequeue(1)
except KeyboardInterrupt:
return
if job is not None:
job.run()
|
<commit_before>import logging
from optparse import make_option
from django.core.management.base import NoArgsCommand
from ...utils import get_backend
class Command(NoArgsCommand):
option_list = NoArgsCommand.option_list + (
make_option('--pidfile', action='store', dest='pidfile', default=None,
help="Fork and write pidfile to this file."),
)
def handle_noargs(self, **options):
level = {
'0': logging.WARNING,
'1': logging.INFO,
'2': logging.DEBUG,
}[options['verbosity']]
logging.basicConfig(level=level, format='%(levelname).1s: %(message)s')
logging.info("Starting queue runner")
backend = get_backend()
logging.info("Started backend %s", backend)
while True:
try:
logging.debug("Checking backend for items")
job = backend.dequeue(1)
except KeyboardInterrupt:
return
if job is not None:
job.run()
<commit_msg>Load middleware before we fork.
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@thread.com><commit_after>
|
import logging
from optparse import make_option
from django.core.management.base import NoArgsCommand
from ...utils import get_backend, get_middleware
class Command(NoArgsCommand):
option_list = NoArgsCommand.option_list + (
make_option('--pidfile', action='store', dest='pidfile', default=None,
help="Fork and write pidfile to this file."),
)
def handle_noargs(self, **options):
level = {
'0': logging.WARNING,
'1': logging.INFO,
'2': logging.DEBUG,
}[options['verbosity']]
logging.basicConfig(level=level, format='%(levelname).1s: %(message)s')
logging.info("Starting queue runner")
backend = get_backend()
logging.info("Started backend %s", backend)
get_middleware()
logging.info("Loaded middleware")
while True:
try:
logging.debug("Checking backend for items")
job = backend.dequeue(1)
except KeyboardInterrupt:
return
if job is not None:
job.run()
|
import logging
from optparse import make_option
from django.core.management.base import NoArgsCommand
from ...utils import get_backend
class Command(NoArgsCommand):
option_list = NoArgsCommand.option_list + (
make_option('--pidfile', action='store', dest='pidfile', default=None,
help="Fork and write pidfile to this file."),
)
def handle_noargs(self, **options):
level = {
'0': logging.WARNING,
'1': logging.INFO,
'2': logging.DEBUG,
}[options['verbosity']]
logging.basicConfig(level=level, format='%(levelname).1s: %(message)s')
logging.info("Starting queue runner")
backend = get_backend()
logging.info("Started backend %s", backend)
while True:
try:
logging.debug("Checking backend for items")
job = backend.dequeue(1)
except KeyboardInterrupt:
return
if job is not None:
job.run()
Load middleware before we fork.
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@thread.com>import logging
from optparse import make_option
from django.core.management.base import NoArgsCommand
from ...utils import get_backend, get_middleware
class Command(NoArgsCommand):
option_list = NoArgsCommand.option_list + (
make_option('--pidfile', action='store', dest='pidfile', default=None,
help="Fork and write pidfile to this file."),
)
def handle_noargs(self, **options):
level = {
'0': logging.WARNING,
'1': logging.INFO,
'2': logging.DEBUG,
}[options['verbosity']]
logging.basicConfig(level=level, format='%(levelname).1s: %(message)s')
logging.info("Starting queue runner")
backend = get_backend()
logging.info("Started backend %s", backend)
get_middleware()
logging.info("Loaded middleware")
while True:
try:
logging.debug("Checking backend for items")
job = backend.dequeue(1)
except KeyboardInterrupt:
return
if job is not None:
job.run()
|
<commit_before>import logging
from optparse import make_option
from django.core.management.base import NoArgsCommand
from ...utils import get_backend
class Command(NoArgsCommand):
option_list = NoArgsCommand.option_list + (
make_option('--pidfile', action='store', dest='pidfile', default=None,
help="Fork and write pidfile to this file."),
)
def handle_noargs(self, **options):
level = {
'0': logging.WARNING,
'1': logging.INFO,
'2': logging.DEBUG,
}[options['verbosity']]
logging.basicConfig(level=level, format='%(levelname).1s: %(message)s')
logging.info("Starting queue runner")
backend = get_backend()
logging.info("Started backend %s", backend)
while True:
try:
logging.debug("Checking backend for items")
job = backend.dequeue(1)
except KeyboardInterrupt:
return
if job is not None:
job.run()
<commit_msg>Load middleware before we fork.
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@thread.com><commit_after>import logging
from optparse import make_option
from django.core.management.base import NoArgsCommand
from ...utils import get_backend, get_middleware
class Command(NoArgsCommand):
option_list = NoArgsCommand.option_list + (
make_option('--pidfile', action='store', dest='pidfile', default=None,
help="Fork and write pidfile to this file."),
)
def handle_noargs(self, **options):
level = {
'0': logging.WARNING,
'1': logging.INFO,
'2': logging.DEBUG,
}[options['verbosity']]
logging.basicConfig(level=level, format='%(levelname).1s: %(message)s')
logging.info("Starting queue runner")
backend = get_backend()
logging.info("Started backend %s", backend)
get_middleware()
logging.info("Loaded middleware")
while True:
try:
logging.debug("Checking backend for items")
job = backend.dequeue(1)
except KeyboardInterrupt:
return
if job is not None:
job.run()
|
9e0ab4bfcd9e22447e66bb87f2ed849ffcd0c57a
|
shutter/service.py
|
shutter/service.py
|
class Shutter(object):
def setup_database(self, pool):
self.__dbpool = pool
def snapshots(self, url):
def _get_snapshot_urls(txn, url):
txn.execute("""SELECT s.file_path
FROM shutter.urls u
,shutter.snapshots s
WHERE u.url = %s
AND u.id = s.url_id
ORDER BY s.created_at DESC
""", [url])
return txn.fetchall()
return self.__dbpool.runInteraction(_get_snapshot_urls, url)
|
class Shutter(object):
def setup_database(self, pool):
self.__dbpool = pool
def snapshots(self, url):
def _get_snapshot_urls(txn, url):
txn.execute("""SELECT s.file_path
FROM shutter.urls u
,shutter.snapshots s
WHERE u.url = %s
AND u.id = s.url_id
ORDER BY s.created_at DESC
""", [url])
return [column[0] for column in txn.fetchall()]
return self.__dbpool.runInteraction(_get_snapshot_urls, url)
|
Remove additional nesting from Shutter.snapshots results
|
Remove additional nesting from Shutter.snapshots results
In order to remove the bug we flatten the return value of txn.fetchall().
After the change rerunning the tests confirms that the service is behaving accordingly to our expectations.
|
Python
|
bsd-3-clause
|
mulander/shutter
|
class Shutter(object):
def setup_database(self, pool):
self.__dbpool = pool
def snapshots(self, url):
def _get_snapshot_urls(txn, url):
txn.execute("""SELECT s.file_path
FROM shutter.urls u
,shutter.snapshots s
WHERE u.url = %s
AND u.id = s.url_id
ORDER BY s.created_at DESC
""", [url])
return txn.fetchall()
return self.__dbpool.runInteraction(_get_snapshot_urls, url)Remove additional nesting from Shutter.snapshots results
In order to remove the bug we flatten the return value of txn.fetchall().
After the change rerunning the tests confirms that the service is behaving accordingly to our expectations.
|
class Shutter(object):
def setup_database(self, pool):
self.__dbpool = pool
def snapshots(self, url):
def _get_snapshot_urls(txn, url):
txn.execute("""SELECT s.file_path
FROM shutter.urls u
,shutter.snapshots s
WHERE u.url = %s
AND u.id = s.url_id
ORDER BY s.created_at DESC
""", [url])
return [column[0] for column in txn.fetchall()]
return self.__dbpool.runInteraction(_get_snapshot_urls, url)
|
<commit_before>class Shutter(object):
def setup_database(self, pool):
self.__dbpool = pool
def snapshots(self, url):
def _get_snapshot_urls(txn, url):
txn.execute("""SELECT s.file_path
FROM shutter.urls u
,shutter.snapshots s
WHERE u.url = %s
AND u.id = s.url_id
ORDER BY s.created_at DESC
""", [url])
return txn.fetchall()
return self.__dbpool.runInteraction(_get_snapshot_urls, url)<commit_msg>Remove additional nesting from Shutter.snapshots results
In order to remove the bug we flatten the return value of txn.fetchall().
After the change rerunning the tests confirms that the service is behaving accordingly to our expectations.<commit_after>
|
class Shutter(object):
def setup_database(self, pool):
self.__dbpool = pool
def snapshots(self, url):
def _get_snapshot_urls(txn, url):
txn.execute("""SELECT s.file_path
FROM shutter.urls u
,shutter.snapshots s
WHERE u.url = %s
AND u.id = s.url_id
ORDER BY s.created_at DESC
""", [url])
return [column[0] for column in txn.fetchall()]
return self.__dbpool.runInteraction(_get_snapshot_urls, url)
|
class Shutter(object):
def setup_database(self, pool):
self.__dbpool = pool
def snapshots(self, url):
def _get_snapshot_urls(txn, url):
txn.execute("""SELECT s.file_path
FROM shutter.urls u
,shutter.snapshots s
WHERE u.url = %s
AND u.id = s.url_id
ORDER BY s.created_at DESC
""", [url])
return txn.fetchall()
return self.__dbpool.runInteraction(_get_snapshot_urls, url)Remove additional nesting from Shutter.snapshots results
In order to remove the bug we flatten the return value of txn.fetchall().
After the change rerunning the tests confirms that the service is behaving accordingly to our expectations.class Shutter(object):
def setup_database(self, pool):
self.__dbpool = pool
def snapshots(self, url):
def _get_snapshot_urls(txn, url):
txn.execute("""SELECT s.file_path
FROM shutter.urls u
,shutter.snapshots s
WHERE u.url = %s
AND u.id = s.url_id
ORDER BY s.created_at DESC
""", [url])
return [column[0] for column in txn.fetchall()]
return self.__dbpool.runInteraction(_get_snapshot_urls, url)
|
<commit_before>class Shutter(object):
def setup_database(self, pool):
self.__dbpool = pool
def snapshots(self, url):
def _get_snapshot_urls(txn, url):
txn.execute("""SELECT s.file_path
FROM shutter.urls u
,shutter.snapshots s
WHERE u.url = %s
AND u.id = s.url_id
ORDER BY s.created_at DESC
""", [url])
return txn.fetchall()
return self.__dbpool.runInteraction(_get_snapshot_urls, url)<commit_msg>Remove additional nesting from Shutter.snapshots results
In order to remove the bug we flatten the return value of txn.fetchall().
After the change rerunning the tests confirms that the service is behaving accordingly to our expectations.<commit_after>class Shutter(object):
def setup_database(self, pool):
self.__dbpool = pool
def snapshots(self, url):
def _get_snapshot_urls(txn, url):
txn.execute("""SELECT s.file_path
FROM shutter.urls u
,shutter.snapshots s
WHERE u.url = %s
AND u.id = s.url_id
ORDER BY s.created_at DESC
""", [url])
return [column[0] for column in txn.fetchall()]
return self.__dbpool.runInteraction(_get_snapshot_urls, url)
|
2a8a39ef8ca1e8bca7c1e36783d1e0bc0a43df26
|
todo/__init__.py
|
todo/__init__.py
|
"""django todo"""
__version__ = '1.4.dev'
__author__ = 'Scot Hacker'
__email__ = 'shacker@birdhouse.org'
__url__ = 'https://github.com/shacker/django-todo'
__license__ = 'BSD License'
|
"""django todo"""
__version__ = '1.4'
__author__ = 'Scot Hacker'
__email__ = 'shacker@birdhouse.org'
__url__ = 'https://github.com/shacker/django-todo'
__license__ = 'BSD License'
|
Fix version number to upload to PyPI
|
Fix version number to upload to PyPI
|
Python
|
bsd-3-clause
|
carlosedb/django-todo,shacker/django-todo,jwiltshire/django-todo,carlosedb/django-todo,shacker/django-todo,carlosedb/django-todo,jwiltshire/django-todo,jwiltshire/django-todo,shacker/django-todo
|
"""django todo"""
__version__ = '1.4.dev'
__author__ = 'Scot Hacker'
__email__ = 'shacker@birdhouse.org'
__url__ = 'https://github.com/shacker/django-todo'
__license__ = 'BSD License'
Fix version number to upload to PyPI
|
"""django todo"""
__version__ = '1.4'
__author__ = 'Scot Hacker'
__email__ = 'shacker@birdhouse.org'
__url__ = 'https://github.com/shacker/django-todo'
__license__ = 'BSD License'
|
<commit_before>"""django todo"""
__version__ = '1.4.dev'
__author__ = 'Scot Hacker'
__email__ = 'shacker@birdhouse.org'
__url__ = 'https://github.com/shacker/django-todo'
__license__ = 'BSD License'
<commit_msg>Fix version number to upload to PyPI<commit_after>
|
"""django todo"""
__version__ = '1.4'
__author__ = 'Scot Hacker'
__email__ = 'shacker@birdhouse.org'
__url__ = 'https://github.com/shacker/django-todo'
__license__ = 'BSD License'
|
"""django todo"""
__version__ = '1.4.dev'
__author__ = 'Scot Hacker'
__email__ = 'shacker@birdhouse.org'
__url__ = 'https://github.com/shacker/django-todo'
__license__ = 'BSD License'
Fix version number to upload to PyPI"""django todo"""
__version__ = '1.4'
__author__ = 'Scot Hacker'
__email__ = 'shacker@birdhouse.org'
__url__ = 'https://github.com/shacker/django-todo'
__license__ = 'BSD License'
|
<commit_before>"""django todo"""
__version__ = '1.4.dev'
__author__ = 'Scot Hacker'
__email__ = 'shacker@birdhouse.org'
__url__ = 'https://github.com/shacker/django-todo'
__license__ = 'BSD License'
<commit_msg>Fix version number to upload to PyPI<commit_after>"""django todo"""
__version__ = '1.4'
__author__ = 'Scot Hacker'
__email__ = 'shacker@birdhouse.org'
__url__ = 'https://github.com/shacker/django-todo'
__license__ = 'BSD License'
|
a1cd2c326f9dba608ee3c6ce82dd425e93e8265d
|
python/xchainer/__init__.py
|
python/xchainer/__init__.py
|
from xchainer._core import * # NOQA
_global_context = Context()
_global_context.get_backend('native')
set_global_default_context(_global_context)
|
from xchainer._core import * # NOQA
_global_context = Context()
set_global_default_context(_global_context)
set_default_device('native')
|
Set the default device to native in Python binding
|
Set the default device to native in Python binding
|
Python
|
mit
|
ktnyt/chainer,hvy/chainer,hvy/chainer,pfnet/chainer,chainer/chainer,ktnyt/chainer,jnishi/chainer,okuta/chainer,hvy/chainer,ktnyt/chainer,keisuke-umezawa/chainer,keisuke-umezawa/chainer,wkentaro/chainer,ktnyt/chainer,niboshi/chainer,keisuke-umezawa/chainer,okuta/chainer,okuta/chainer,hvy/chainer,tkerola/chainer,chainer/chainer,niboshi/chainer,chainer/chainer,niboshi/chainer,chainer/chainer,wkentaro/chainer,niboshi/chainer,jnishi/chainer,jnishi/chainer,keisuke-umezawa/chainer,jnishi/chainer,wkentaro/chainer,wkentaro/chainer,okuta/chainer
|
from xchainer._core import * # NOQA
_global_context = Context()
_global_context.get_backend('native')
set_global_default_context(_global_context)
Set the default device to native in Python binding
|
from xchainer._core import * # NOQA
_global_context = Context()
set_global_default_context(_global_context)
set_default_device('native')
|
<commit_before>from xchainer._core import * # NOQA
_global_context = Context()
_global_context.get_backend('native')
set_global_default_context(_global_context)
<commit_msg>Set the default device to native in Python binding<commit_after>
|
from xchainer._core import * # NOQA
_global_context = Context()
set_global_default_context(_global_context)
set_default_device('native')
|
from xchainer._core import * # NOQA
_global_context = Context()
_global_context.get_backend('native')
set_global_default_context(_global_context)
Set the default device to native in Python bindingfrom xchainer._core import * # NOQA
_global_context = Context()
set_global_default_context(_global_context)
set_default_device('native')
|
<commit_before>from xchainer._core import * # NOQA
_global_context = Context()
_global_context.get_backend('native')
set_global_default_context(_global_context)
<commit_msg>Set the default device to native in Python binding<commit_after>from xchainer._core import * # NOQA
_global_context = Context()
set_global_default_context(_global_context)
set_default_device('native')
|
99dd45582cba9f54a5cc9042812d255fe57b1222
|
oauthclientbridge/__init__.py
|
oauthclientbridge/__init__.py
|
# flake8: noqa
from flask import Flask
from werkzeug.contrib.fixers import ProxyFix
__version__ = '1.0.1'
app = Flask(__name__)
app.config.from_object('oauthclientbridge.default_settings')
app.config.from_envvar('OAUTH_SETTINGS', silent=True)
if app.config['OAUTH_NUM_PROXIES']:
wrapper = ProxyFix(app.wsgi_app, app.config['OAUTH_NUM_PROXIES'])
app.wsgi_app = wrapper # type: ignore
try:
import sentry_sdk
from sentry_sdk.integrations.flask import FlaskIntegration
if app.config['OAUTH_SENTRY_DSN']:
sentry_sdk.init(
dsn=app.config['OAUTH_SENTRY_DSN'],
integrations=[FlaskIntegration()],
)
except ImportError as e:
app.logger.info('Failed to import sentry: %s', e)
import oauthclientbridge.cli
import oauthclientbridge.logging
import oauthclientbridge.views
|
# flake8: noqa
from flask import Flask
try:
from werkzeug.middleware.proxy_fix import ProxyFix
except ImportError:
from werkzeug.contrib.fixers import ProxyFix
__version__ = '1.0.1'
app = Flask(__name__)
app.config.from_object('oauthclientbridge.default_settings')
app.config.from_envvar('OAUTH_SETTINGS', silent=True)
if app.config['OAUTH_NUM_PROXIES']:
wrapper = ProxyFix(app.wsgi_app, app.config['OAUTH_NUM_PROXIES'])
app.wsgi_app = wrapper # type: ignore
try:
import sentry_sdk
from sentry_sdk.integrations.flask import FlaskIntegration
if app.config['OAUTH_SENTRY_DSN']:
sentry_sdk.init(
dsn=app.config['OAUTH_SENTRY_DSN'],
integrations=[FlaskIntegration()],
)
except ImportError as e:
app.logger.info('Failed to import sentry: %s', e)
import oauthclientbridge.cli
import oauthclientbridge.logging
import oauthclientbridge.views
|
Support new location of ProxyFix helper
|
Support new location of ProxyFix helper
|
Python
|
apache-2.0
|
adamcik/oauthclientbridge
|
# flake8: noqa
from flask import Flask
from werkzeug.contrib.fixers import ProxyFix
__version__ = '1.0.1'
app = Flask(__name__)
app.config.from_object('oauthclientbridge.default_settings')
app.config.from_envvar('OAUTH_SETTINGS', silent=True)
if app.config['OAUTH_NUM_PROXIES']:
wrapper = ProxyFix(app.wsgi_app, app.config['OAUTH_NUM_PROXIES'])
app.wsgi_app = wrapper # type: ignore
try:
import sentry_sdk
from sentry_sdk.integrations.flask import FlaskIntegration
if app.config['OAUTH_SENTRY_DSN']:
sentry_sdk.init(
dsn=app.config['OAUTH_SENTRY_DSN'],
integrations=[FlaskIntegration()],
)
except ImportError as e:
app.logger.info('Failed to import sentry: %s', e)
import oauthclientbridge.cli
import oauthclientbridge.logging
import oauthclientbridge.views
Support new location of ProxyFix helper
|
# flake8: noqa
from flask import Flask
try:
from werkzeug.middleware.proxy_fix import ProxyFix
except ImportError:
from werkzeug.contrib.fixers import ProxyFix
__version__ = '1.0.1'
app = Flask(__name__)
app.config.from_object('oauthclientbridge.default_settings')
app.config.from_envvar('OAUTH_SETTINGS', silent=True)
if app.config['OAUTH_NUM_PROXIES']:
wrapper = ProxyFix(app.wsgi_app, app.config['OAUTH_NUM_PROXIES'])
app.wsgi_app = wrapper # type: ignore
try:
import sentry_sdk
from sentry_sdk.integrations.flask import FlaskIntegration
if app.config['OAUTH_SENTRY_DSN']:
sentry_sdk.init(
dsn=app.config['OAUTH_SENTRY_DSN'],
integrations=[FlaskIntegration()],
)
except ImportError as e:
app.logger.info('Failed to import sentry: %s', e)
import oauthclientbridge.cli
import oauthclientbridge.logging
import oauthclientbridge.views
|
<commit_before># flake8: noqa
from flask import Flask
from werkzeug.contrib.fixers import ProxyFix
__version__ = '1.0.1'
app = Flask(__name__)
app.config.from_object('oauthclientbridge.default_settings')
app.config.from_envvar('OAUTH_SETTINGS', silent=True)
if app.config['OAUTH_NUM_PROXIES']:
wrapper = ProxyFix(app.wsgi_app, app.config['OAUTH_NUM_PROXIES'])
app.wsgi_app = wrapper # type: ignore
try:
import sentry_sdk
from sentry_sdk.integrations.flask import FlaskIntegration
if app.config['OAUTH_SENTRY_DSN']:
sentry_sdk.init(
dsn=app.config['OAUTH_SENTRY_DSN'],
integrations=[FlaskIntegration()],
)
except ImportError as e:
app.logger.info('Failed to import sentry: %s', e)
import oauthclientbridge.cli
import oauthclientbridge.logging
import oauthclientbridge.views
<commit_msg>Support new location of ProxyFix helper<commit_after>
|
# flake8: noqa
from flask import Flask
try:
from werkzeug.middleware.proxy_fix import ProxyFix
except ImportError:
from werkzeug.contrib.fixers import ProxyFix
__version__ = '1.0.1'
app = Flask(__name__)
app.config.from_object('oauthclientbridge.default_settings')
app.config.from_envvar('OAUTH_SETTINGS', silent=True)
if app.config['OAUTH_NUM_PROXIES']:
wrapper = ProxyFix(app.wsgi_app, app.config['OAUTH_NUM_PROXIES'])
app.wsgi_app = wrapper # type: ignore
try:
import sentry_sdk
from sentry_sdk.integrations.flask import FlaskIntegration
if app.config['OAUTH_SENTRY_DSN']:
sentry_sdk.init(
dsn=app.config['OAUTH_SENTRY_DSN'],
integrations=[FlaskIntegration()],
)
except ImportError as e:
app.logger.info('Failed to import sentry: %s', e)
import oauthclientbridge.cli
import oauthclientbridge.logging
import oauthclientbridge.views
|
# flake8: noqa
from flask import Flask
from werkzeug.contrib.fixers import ProxyFix
__version__ = '1.0.1'
app = Flask(__name__)
app.config.from_object('oauthclientbridge.default_settings')
app.config.from_envvar('OAUTH_SETTINGS', silent=True)
if app.config['OAUTH_NUM_PROXIES']:
wrapper = ProxyFix(app.wsgi_app, app.config['OAUTH_NUM_PROXIES'])
app.wsgi_app = wrapper # type: ignore
try:
import sentry_sdk
from sentry_sdk.integrations.flask import FlaskIntegration
if app.config['OAUTH_SENTRY_DSN']:
sentry_sdk.init(
dsn=app.config['OAUTH_SENTRY_DSN'],
integrations=[FlaskIntegration()],
)
except ImportError as e:
app.logger.info('Failed to import sentry: %s', e)
import oauthclientbridge.cli
import oauthclientbridge.logging
import oauthclientbridge.views
Support new location of ProxyFix helper# flake8: noqa
from flask import Flask
try:
from werkzeug.middleware.proxy_fix import ProxyFix
except ImportError:
from werkzeug.contrib.fixers import ProxyFix
__version__ = '1.0.1'
app = Flask(__name__)
app.config.from_object('oauthclientbridge.default_settings')
app.config.from_envvar('OAUTH_SETTINGS', silent=True)
if app.config['OAUTH_NUM_PROXIES']:
wrapper = ProxyFix(app.wsgi_app, app.config['OAUTH_NUM_PROXIES'])
app.wsgi_app = wrapper # type: ignore
try:
import sentry_sdk
from sentry_sdk.integrations.flask import FlaskIntegration
if app.config['OAUTH_SENTRY_DSN']:
sentry_sdk.init(
dsn=app.config['OAUTH_SENTRY_DSN'],
integrations=[FlaskIntegration()],
)
except ImportError as e:
app.logger.info('Failed to import sentry: %s', e)
import oauthclientbridge.cli
import oauthclientbridge.logging
import oauthclientbridge.views
|
<commit_before># flake8: noqa
from flask import Flask
from werkzeug.contrib.fixers import ProxyFix
__version__ = '1.0.1'
app = Flask(__name__)
app.config.from_object('oauthclientbridge.default_settings')
app.config.from_envvar('OAUTH_SETTINGS', silent=True)
if app.config['OAUTH_NUM_PROXIES']:
wrapper = ProxyFix(app.wsgi_app, app.config['OAUTH_NUM_PROXIES'])
app.wsgi_app = wrapper # type: ignore
try:
import sentry_sdk
from sentry_sdk.integrations.flask import FlaskIntegration
if app.config['OAUTH_SENTRY_DSN']:
sentry_sdk.init(
dsn=app.config['OAUTH_SENTRY_DSN'],
integrations=[FlaskIntegration()],
)
except ImportError as e:
app.logger.info('Failed to import sentry: %s', e)
import oauthclientbridge.cli
import oauthclientbridge.logging
import oauthclientbridge.views
<commit_msg>Support new location of ProxyFix helper<commit_after># flake8: noqa
from flask import Flask
try:
from werkzeug.middleware.proxy_fix import ProxyFix
except ImportError:
from werkzeug.contrib.fixers import ProxyFix
__version__ = '1.0.1'
app = Flask(__name__)
app.config.from_object('oauthclientbridge.default_settings')
app.config.from_envvar('OAUTH_SETTINGS', silent=True)
if app.config['OAUTH_NUM_PROXIES']:
wrapper = ProxyFix(app.wsgi_app, app.config['OAUTH_NUM_PROXIES'])
app.wsgi_app = wrapper # type: ignore
try:
import sentry_sdk
from sentry_sdk.integrations.flask import FlaskIntegration
if app.config['OAUTH_SENTRY_DSN']:
sentry_sdk.init(
dsn=app.config['OAUTH_SENTRY_DSN'],
integrations=[FlaskIntegration()],
)
except ImportError as e:
app.logger.info('Failed to import sentry: %s', e)
import oauthclientbridge.cli
import oauthclientbridge.logging
import oauthclientbridge.views
|
d48e59f4b1174529a4d2eca8731472a5bf371621
|
simpleseo/templatetags/seo.py
|
simpleseo/templatetags/seo.py
|
from django.template import Library
from django.utils.translation import get_language
from simpleseo import settings
from simpleseo.models import SeoMetadata
register = Library()
@register.filter
def single_quotes(description):
return description.replace('\"', '\'')
@register.inclusion_tag('simpleseo/metadata.html', takes_context=True)
def get_seo(context):
lang_code = get_language()[:2]
path = context['request'].path
try:
metadata = SeoMetadata.objects.get(path=path, lang_code=lang_code)
except SeoMetadata.DoesNotExist:
metadata = None
if metadata is None:
return {'title': settings.FALLBACK_TITLE,
'description': settings.FALLBACK_DESCRIPTION}
return {'title': metadata.title, 'description': metadata.description}
@register.simple_tag(takes_context=True)
def get_seo_title(context):
return get_seo(context)['title']
@register.simple_tag(takes_context=True)
def get_seo_description(context):
return get_seo(context)['description']
|
from django.forms.models import model_to_dict
from django.template import Library
from django.utils.translation import get_language
from simpleseo import settings
from simpleseo.models import SeoMetadata
register = Library()
@register.filter
def single_quotes(description):
return description.replace('\"', '\'')
@register.inclusion_tag('simpleseo/metadata.html', takes_context=True)
def get_seo(context, **kwargs):
path = context['request'].path
lang_code = get_language()[:2]
try:
metadata = model_to_dict(SeoMetadata.objects.get(path=path,
lang_code=lang_code))
except SeoMetadata.DoesNotExist:
metadata = {}
result = {}
for item in ['title', 'description']:
result[item] = (metadata.get(item) or
kwargs.get(item) or
getattr(settings, 'FALLBACK_{0}'.format(item.upper())))
return result
@register.simple_tag(takes_context=True)
def get_seo_title(context, default=''):
return get_seo(context, title=default).get('title')
@register.simple_tag(takes_context=True)
def get_seo_description(context, default=''):
return get_seo(context, description=default).get('description')
|
Allow to set default value in template
|
Allow to set default value in template
|
Python
|
bsd-3-clause
|
Glamping-Hub/django-painless-seo,Glamping-Hub/django-painless-seo,AMongeMoreno/django-painless-seo,AMongeMoreno/django-painless-seo
|
from django.template import Library
from django.utils.translation import get_language
from simpleseo import settings
from simpleseo.models import SeoMetadata
register = Library()
@register.filter
def single_quotes(description):
return description.replace('\"', '\'')
@register.inclusion_tag('simpleseo/metadata.html', takes_context=True)
def get_seo(context):
lang_code = get_language()[:2]
path = context['request'].path
try:
metadata = SeoMetadata.objects.get(path=path, lang_code=lang_code)
except SeoMetadata.DoesNotExist:
metadata = None
if metadata is None:
return {'title': settings.FALLBACK_TITLE,
'description': settings.FALLBACK_DESCRIPTION}
return {'title': metadata.title, 'description': metadata.description}
@register.simple_tag(takes_context=True)
def get_seo_title(context):
return get_seo(context)['title']
@register.simple_tag(takes_context=True)
def get_seo_description(context):
return get_seo(context)['description']
Allow to set default value in template
|
from django.forms.models import model_to_dict
from django.template import Library
from django.utils.translation import get_language
from simpleseo import settings
from simpleseo.models import SeoMetadata
register = Library()
@register.filter
def single_quotes(description):
return description.replace('\"', '\'')
@register.inclusion_tag('simpleseo/metadata.html', takes_context=True)
def get_seo(context, **kwargs):
path = context['request'].path
lang_code = get_language()[:2]
try:
metadata = model_to_dict(SeoMetadata.objects.get(path=path,
lang_code=lang_code))
except SeoMetadata.DoesNotExist:
metadata = {}
result = {}
for item in ['title', 'description']:
result[item] = (metadata.get(item) or
kwargs.get(item) or
getattr(settings, 'FALLBACK_{0}'.format(item.upper())))
return result
@register.simple_tag(takes_context=True)
def get_seo_title(context, default=''):
return get_seo(context, title=default).get('title')
@register.simple_tag(takes_context=True)
def get_seo_description(context, default=''):
return get_seo(context, description=default).get('description')
|
<commit_before>from django.template import Library
from django.utils.translation import get_language
from simpleseo import settings
from simpleseo.models import SeoMetadata
register = Library()
@register.filter
def single_quotes(description):
return description.replace('\"', '\'')
@register.inclusion_tag('simpleseo/metadata.html', takes_context=True)
def get_seo(context):
lang_code = get_language()[:2]
path = context['request'].path
try:
metadata = SeoMetadata.objects.get(path=path, lang_code=lang_code)
except SeoMetadata.DoesNotExist:
metadata = None
if metadata is None:
return {'title': settings.FALLBACK_TITLE,
'description': settings.FALLBACK_DESCRIPTION}
return {'title': metadata.title, 'description': metadata.description}
@register.simple_tag(takes_context=True)
def get_seo_title(context):
return get_seo(context)['title']
@register.simple_tag(takes_context=True)
def get_seo_description(context):
return get_seo(context)['description']
<commit_msg>Allow to set default value in template<commit_after>
|
from django.forms.models import model_to_dict
from django.template import Library
from django.utils.translation import get_language
from simpleseo import settings
from simpleseo.models import SeoMetadata
register = Library()
@register.filter
def single_quotes(description):
return description.replace('\"', '\'')
@register.inclusion_tag('simpleseo/metadata.html', takes_context=True)
def get_seo(context, **kwargs):
path = context['request'].path
lang_code = get_language()[:2]
try:
metadata = model_to_dict(SeoMetadata.objects.get(path=path,
lang_code=lang_code))
except SeoMetadata.DoesNotExist:
metadata = {}
result = {}
for item in ['title', 'description']:
result[item] = (metadata.get(item) or
kwargs.get(item) or
getattr(settings, 'FALLBACK_{0}'.format(item.upper())))
return result
@register.simple_tag(takes_context=True)
def get_seo_title(context, default=''):
return get_seo(context, title=default).get('title')
@register.simple_tag(takes_context=True)
def get_seo_description(context, default=''):
return get_seo(context, description=default).get('description')
|
from django.template import Library
from django.utils.translation import get_language
from simpleseo import settings
from simpleseo.models import SeoMetadata
register = Library()
@register.filter
def single_quotes(description):
return description.replace('\"', '\'')
@register.inclusion_tag('simpleseo/metadata.html', takes_context=True)
def get_seo(context):
lang_code = get_language()[:2]
path = context['request'].path
try:
metadata = SeoMetadata.objects.get(path=path, lang_code=lang_code)
except SeoMetadata.DoesNotExist:
metadata = None
if metadata is None:
return {'title': settings.FALLBACK_TITLE,
'description': settings.FALLBACK_DESCRIPTION}
return {'title': metadata.title, 'description': metadata.description}
@register.simple_tag(takes_context=True)
def get_seo_title(context):
return get_seo(context)['title']
@register.simple_tag(takes_context=True)
def get_seo_description(context):
return get_seo(context)['description']
Allow to set default value in templatefrom django.forms.models import model_to_dict
from django.template import Library
from django.utils.translation import get_language
from simpleseo import settings
from simpleseo.models import SeoMetadata
register = Library()
@register.filter
def single_quotes(description):
return description.replace('\"', '\'')
@register.inclusion_tag('simpleseo/metadata.html', takes_context=True)
def get_seo(context, **kwargs):
path = context['request'].path
lang_code = get_language()[:2]
try:
metadata = model_to_dict(SeoMetadata.objects.get(path=path,
lang_code=lang_code))
except SeoMetadata.DoesNotExist:
metadata = {}
result = {}
for item in ['title', 'description']:
result[item] = (metadata.get(item) or
kwargs.get(item) or
getattr(settings, 'FALLBACK_{0}'.format(item.upper())))
return result
@register.simple_tag(takes_context=True)
def get_seo_title(context, default=''):
return get_seo(context, title=default).get('title')
@register.simple_tag(takes_context=True)
def get_seo_description(context, default=''):
return get_seo(context, description=default).get('description')
|
<commit_before>from django.template import Library
from django.utils.translation import get_language
from simpleseo import settings
from simpleseo.models import SeoMetadata
register = Library()
@register.filter
def single_quotes(description):
return description.replace('\"', '\'')
@register.inclusion_tag('simpleseo/metadata.html', takes_context=True)
def get_seo(context):
lang_code = get_language()[:2]
path = context['request'].path
try:
metadata = SeoMetadata.objects.get(path=path, lang_code=lang_code)
except SeoMetadata.DoesNotExist:
metadata = None
if metadata is None:
return {'title': settings.FALLBACK_TITLE,
'description': settings.FALLBACK_DESCRIPTION}
return {'title': metadata.title, 'description': metadata.description}
@register.simple_tag(takes_context=True)
def get_seo_title(context):
return get_seo(context)['title']
@register.simple_tag(takes_context=True)
def get_seo_description(context):
return get_seo(context)['description']
<commit_msg>Allow to set default value in template<commit_after>from django.forms.models import model_to_dict
from django.template import Library
from django.utils.translation import get_language
from simpleseo import settings
from simpleseo.models import SeoMetadata
register = Library()
@register.filter
def single_quotes(description):
return description.replace('\"', '\'')
@register.inclusion_tag('simpleseo/metadata.html', takes_context=True)
def get_seo(context, **kwargs):
path = context['request'].path
lang_code = get_language()[:2]
try:
metadata = model_to_dict(SeoMetadata.objects.get(path=path,
lang_code=lang_code))
except SeoMetadata.DoesNotExist:
metadata = {}
result = {}
for item in ['title', 'description']:
result[item] = (metadata.get(item) or
kwargs.get(item) or
getattr(settings, 'FALLBACK_{0}'.format(item.upper())))
return result
@register.simple_tag(takes_context=True)
def get_seo_title(context, default=''):
return get_seo(context, title=default).get('title')
@register.simple_tag(takes_context=True)
def get_seo_description(context, default=''):
return get_seo(context, description=default).get('description')
|
91122afafa9fb5872f905dde391ce5b587d5d70a
|
frappe/patches/v8_0/install_new_build_system_requirements.py
|
frappe/patches/v8_0/install_new_build_system_requirements.py
|
import subprocess
def execute():
subprocess.call([
'npm', 'install',
'babel-core',
'chokidar',
'babel-preset-es2015',
'babel-preset-es2016',
'babel-preset-es2017',
'babel-preset-babili'
])
|
from subprocess import Popen, call, PIPE
def execute():
# update nodejs version if brew exists
p = Popen(['which', 'brew'], stdout=PIPE, stderr=PIPE)
output, err = p.communicate()
if output:
subprocess.call(['brew', 'upgrade', 'node'])
else:
print 'Please update your NodeJS version'
subprocess.call([
'npm', 'install',
'babel-core',
'less',
'chokidar',
'babel-preset-es2015',
'babel-preset-es2016',
'babel-preset-es2017',
'babel-preset-babili'
])
|
Update build system requirements patch
|
Update build system requirements patch
|
Python
|
mit
|
mhbu50/frappe,ESS-LLP/frappe,tmimori/frappe,frappe/frappe,mhbu50/frappe,ESS-LLP/frappe,mbauskar/frappe,bcornwellmott/frappe,bohlian/frappe,adityahase/frappe,tmimori/frappe,paurosello/frappe,almeidapaulopt/frappe,RicardoJohann/frappe,paurosello/frappe,ESS-LLP/frappe,mhbu50/frappe,bohlian/frappe,chdecultot/frappe,frappe/frappe,almeidapaulopt/frappe,StrellaGroup/frappe,saurabh6790/frappe,bcornwellmott/frappe,mbauskar/frappe,paurosello/frappe,RicardoJohann/frappe,chdecultot/frappe,maxtorete/frappe,rmehta/frappe,neilLasrado/frappe,maxtorete/frappe,tmimori/frappe,maxtorete/frappe,ESS-LLP/frappe,rmehta/frappe,saurabh6790/frappe,adityahase/frappe,mbauskar/frappe,mhbu50/frappe,bohlian/frappe,tundebabzy/frappe,saurabh6790/frappe,neilLasrado/frappe,tundebabzy/frappe,paurosello/frappe,almeidapaulopt/frappe,maxtorete/frappe,chdecultot/frappe,manassolanki/frappe,vjFaLk/frappe,tundebabzy/frappe,adityahase/frappe,adityahase/frappe,bohlian/frappe,neilLasrado/frappe,rmehta/frappe,bcornwellmott/frappe,bcornwellmott/frappe,StrellaGroup/frappe,rmehta/frappe,frappe/frappe,yashodhank/frappe,yashodhank/frappe,yashodhank/frappe,almeidapaulopt/frappe,manassolanki/frappe,tmimori/frappe,StrellaGroup/frappe,vjFaLk/frappe,RicardoJohann/frappe,yashodhank/frappe,mbauskar/frappe,tundebabzy/frappe,chdecultot/frappe,manassolanki/frappe,manassolanki/frappe,vjFaLk/frappe,neilLasrado/frappe,vjFaLk/frappe,RicardoJohann/frappe,saurabh6790/frappe
|
import subprocess
def execute():
subprocess.call([
'npm', 'install',
'babel-core',
'chokidar',
'babel-preset-es2015',
'babel-preset-es2016',
'babel-preset-es2017',
'babel-preset-babili'
])Update build system requirements patch
|
from subprocess import Popen, call, PIPE
def execute():
# update nodejs version if brew exists
p = Popen(['which', 'brew'], stdout=PIPE, stderr=PIPE)
output, err = p.communicate()
if output:
subprocess.call(['brew', 'upgrade', 'node'])
else:
print 'Please update your NodeJS version'
subprocess.call([
'npm', 'install',
'babel-core',
'less',
'chokidar',
'babel-preset-es2015',
'babel-preset-es2016',
'babel-preset-es2017',
'babel-preset-babili'
])
|
<commit_before>import subprocess
def execute():
subprocess.call([
'npm', 'install',
'babel-core',
'chokidar',
'babel-preset-es2015',
'babel-preset-es2016',
'babel-preset-es2017',
'babel-preset-babili'
])<commit_msg>Update build system requirements patch<commit_after>
|
from subprocess import Popen, call, PIPE
def execute():
# update nodejs version if brew exists
p = Popen(['which', 'brew'], stdout=PIPE, stderr=PIPE)
output, err = p.communicate()
if output:
subprocess.call(['brew', 'upgrade', 'node'])
else:
print 'Please update your NodeJS version'
subprocess.call([
'npm', 'install',
'babel-core',
'less',
'chokidar',
'babel-preset-es2015',
'babel-preset-es2016',
'babel-preset-es2017',
'babel-preset-babili'
])
|
import subprocess
def execute():
subprocess.call([
'npm', 'install',
'babel-core',
'chokidar',
'babel-preset-es2015',
'babel-preset-es2016',
'babel-preset-es2017',
'babel-preset-babili'
])Update build system requirements patchfrom subprocess import Popen, call, PIPE
def execute():
# update nodejs version if brew exists
p = Popen(['which', 'brew'], stdout=PIPE, stderr=PIPE)
output, err = p.communicate()
if output:
subprocess.call(['brew', 'upgrade', 'node'])
else:
print 'Please update your NodeJS version'
subprocess.call([
'npm', 'install',
'babel-core',
'less',
'chokidar',
'babel-preset-es2015',
'babel-preset-es2016',
'babel-preset-es2017',
'babel-preset-babili'
])
|
<commit_before>import subprocess
def execute():
subprocess.call([
'npm', 'install',
'babel-core',
'chokidar',
'babel-preset-es2015',
'babel-preset-es2016',
'babel-preset-es2017',
'babel-preset-babili'
])<commit_msg>Update build system requirements patch<commit_after>from subprocess import Popen, call, PIPE
def execute():
# update nodejs version if brew exists
p = Popen(['which', 'brew'], stdout=PIPE, stderr=PIPE)
output, err = p.communicate()
if output:
subprocess.call(['brew', 'upgrade', 'node'])
else:
print 'Please update your NodeJS version'
subprocess.call([
'npm', 'install',
'babel-core',
'less',
'chokidar',
'babel-preset-es2015',
'babel-preset-es2016',
'babel-preset-es2017',
'babel-preset-babili'
])
|
bd540e3a0bcc13c6c50c1d72f1982084ab5cb87e
|
django_enumfield/fields.py
|
django_enumfield/fields.py
|
from django.db import models
class EnumField(models.Field):
__metaclass__ = models.SubfieldBase
def __init__(self, enumeration, *args, **kwargs):
self.enumeration = enumeration
kwargs.setdefault('choices', enumeration.get_choices())
super(EnumField, self).__init__(*args, **kwargs)
def get_internal_type(self):
return 'IntegerField'
def to_python(self, value):
return self.enumeration.to_item(value)
def get_db_prep_save(self, value, connection=None):
if hasattr(value, 'value'):
return value.value
return value
def get_db_prep_lookup(self, lookup_type, value, connection=None, prepared=False):
def prepare(value):
v = self.to_python(value)
return self.get_db_prep_save(v, connection=connection)
if lookup_type == 'exact':
return [prepare(value)]
elif lookup_type == 'in':
return [prepare(v) for v in value]
elif lookup_type == 'isnull':
return []
raise TypeError("Lookup type %r not supported." % lookup_type)
|
from django.db import models
class EnumField(models.Field):
__metaclass__ = models.SubfieldBase
def __init__(self, enumeration, *args, **kwargs):
self.enumeration = enumeration
kwargs.setdefault('choices', enumeration.get_choices())
super(EnumField, self).__init__(*args, **kwargs)
def get_internal_type(self):
return 'IntegerField'
def to_python(self, value):
return self.enumeration.to_item(value)
def get_db_prep_save(self, value, connection=None):
return self.to_python(value).value
def get_db_prep_lookup(self, lookup_type, value, connection=None, prepared=False):
def prepare(value):
v = self.to_python(value)
return self.get_db_prep_save(v, connection=connection)
if lookup_type == 'exact':
return [prepare(value)]
elif lookup_type == 'in':
return [prepare(v) for v in value]
elif lookup_type == 'isnull':
return []
raise TypeError("Lookup type %r not supported." % lookup_type)
|
Allow string arguments (as slugs) when saving/updating EnumFields
|
Allow string arguments (as slugs) when saving/updating EnumFields
This fixes issues where:
MyModel.objects.update(my_enum_field='slug')
would result in SQL like:
UPDATE app_mymodel SET my_enum_field = 'slug'
.. instead of what that's slug's integer value is.
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@playfire.com>
|
Python
|
bsd-3-clause
|
playfire/django-enumfield
|
from django.db import models
class EnumField(models.Field):
__metaclass__ = models.SubfieldBase
def __init__(self, enumeration, *args, **kwargs):
self.enumeration = enumeration
kwargs.setdefault('choices', enumeration.get_choices())
super(EnumField, self).__init__(*args, **kwargs)
def get_internal_type(self):
return 'IntegerField'
def to_python(self, value):
return self.enumeration.to_item(value)
def get_db_prep_save(self, value, connection=None):
if hasattr(value, 'value'):
return value.value
return value
def get_db_prep_lookup(self, lookup_type, value, connection=None, prepared=False):
def prepare(value):
v = self.to_python(value)
return self.get_db_prep_save(v, connection=connection)
if lookup_type == 'exact':
return [prepare(value)]
elif lookup_type == 'in':
return [prepare(v) for v in value]
elif lookup_type == 'isnull':
return []
raise TypeError("Lookup type %r not supported." % lookup_type)
Allow string arguments (as slugs) when saving/updating EnumFields
This fixes issues where:
MyModel.objects.update(my_enum_field='slug')
would result in SQL like:
UPDATE app_mymodel SET my_enum_field = 'slug'
.. instead of what that's slug's integer value is.
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@playfire.com>
|
from django.db import models
class EnumField(models.Field):
__metaclass__ = models.SubfieldBase
def __init__(self, enumeration, *args, **kwargs):
self.enumeration = enumeration
kwargs.setdefault('choices', enumeration.get_choices())
super(EnumField, self).__init__(*args, **kwargs)
def get_internal_type(self):
return 'IntegerField'
def to_python(self, value):
return self.enumeration.to_item(value)
def get_db_prep_save(self, value, connection=None):
return self.to_python(value).value
def get_db_prep_lookup(self, lookup_type, value, connection=None, prepared=False):
def prepare(value):
v = self.to_python(value)
return self.get_db_prep_save(v, connection=connection)
if lookup_type == 'exact':
return [prepare(value)]
elif lookup_type == 'in':
return [prepare(v) for v in value]
elif lookup_type == 'isnull':
return []
raise TypeError("Lookup type %r not supported." % lookup_type)
|
<commit_before>from django.db import models
class EnumField(models.Field):
__metaclass__ = models.SubfieldBase
def __init__(self, enumeration, *args, **kwargs):
self.enumeration = enumeration
kwargs.setdefault('choices', enumeration.get_choices())
super(EnumField, self).__init__(*args, **kwargs)
def get_internal_type(self):
return 'IntegerField'
def to_python(self, value):
return self.enumeration.to_item(value)
def get_db_prep_save(self, value, connection=None):
if hasattr(value, 'value'):
return value.value
return value
def get_db_prep_lookup(self, lookup_type, value, connection=None, prepared=False):
def prepare(value):
v = self.to_python(value)
return self.get_db_prep_save(v, connection=connection)
if lookup_type == 'exact':
return [prepare(value)]
elif lookup_type == 'in':
return [prepare(v) for v in value]
elif lookup_type == 'isnull':
return []
raise TypeError("Lookup type %r not supported." % lookup_type)
<commit_msg>Allow string arguments (as slugs) when saving/updating EnumFields
This fixes issues where:
MyModel.objects.update(my_enum_field='slug')
would result in SQL like:
UPDATE app_mymodel SET my_enum_field = 'slug'
.. instead of what that's slug's integer value is.
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@playfire.com><commit_after>
|
from django.db import models
class EnumField(models.Field):
__metaclass__ = models.SubfieldBase
def __init__(self, enumeration, *args, **kwargs):
self.enumeration = enumeration
kwargs.setdefault('choices', enumeration.get_choices())
super(EnumField, self).__init__(*args, **kwargs)
def get_internal_type(self):
return 'IntegerField'
def to_python(self, value):
return self.enumeration.to_item(value)
def get_db_prep_save(self, value, connection=None):
return self.to_python(value).value
def get_db_prep_lookup(self, lookup_type, value, connection=None, prepared=False):
def prepare(value):
v = self.to_python(value)
return self.get_db_prep_save(v, connection=connection)
if lookup_type == 'exact':
return [prepare(value)]
elif lookup_type == 'in':
return [prepare(v) for v in value]
elif lookup_type == 'isnull':
return []
raise TypeError("Lookup type %r not supported." % lookup_type)
|
from django.db import models
class EnumField(models.Field):
__metaclass__ = models.SubfieldBase
def __init__(self, enumeration, *args, **kwargs):
self.enumeration = enumeration
kwargs.setdefault('choices', enumeration.get_choices())
super(EnumField, self).__init__(*args, **kwargs)
def get_internal_type(self):
return 'IntegerField'
def to_python(self, value):
return self.enumeration.to_item(value)
def get_db_prep_save(self, value, connection=None):
if hasattr(value, 'value'):
return value.value
return value
def get_db_prep_lookup(self, lookup_type, value, connection=None, prepared=False):
def prepare(value):
v = self.to_python(value)
return self.get_db_prep_save(v, connection=connection)
if lookup_type == 'exact':
return [prepare(value)]
elif lookup_type == 'in':
return [prepare(v) for v in value]
elif lookup_type == 'isnull':
return []
raise TypeError("Lookup type %r not supported." % lookup_type)
Allow string arguments (as slugs) when saving/updating EnumFields
This fixes issues where:
MyModel.objects.update(my_enum_field='slug')
would result in SQL like:
UPDATE app_mymodel SET my_enum_field = 'slug'
.. instead of what that's slug's integer value is.
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@playfire.com>from django.db import models
class EnumField(models.Field):
__metaclass__ = models.SubfieldBase
def __init__(self, enumeration, *args, **kwargs):
self.enumeration = enumeration
kwargs.setdefault('choices', enumeration.get_choices())
super(EnumField, self).__init__(*args, **kwargs)
def get_internal_type(self):
return 'IntegerField'
def to_python(self, value):
return self.enumeration.to_item(value)
def get_db_prep_save(self, value, connection=None):
return self.to_python(value).value
def get_db_prep_lookup(self, lookup_type, value, connection=None, prepared=False):
def prepare(value):
v = self.to_python(value)
return self.get_db_prep_save(v, connection=connection)
if lookup_type == 'exact':
return [prepare(value)]
elif lookup_type == 'in':
return [prepare(v) for v in value]
elif lookup_type == 'isnull':
return []
raise TypeError("Lookup type %r not supported." % lookup_type)
|
<commit_before>from django.db import models
class EnumField(models.Field):
__metaclass__ = models.SubfieldBase
def __init__(self, enumeration, *args, **kwargs):
self.enumeration = enumeration
kwargs.setdefault('choices', enumeration.get_choices())
super(EnumField, self).__init__(*args, **kwargs)
def get_internal_type(self):
return 'IntegerField'
def to_python(self, value):
return self.enumeration.to_item(value)
def get_db_prep_save(self, value, connection=None):
if hasattr(value, 'value'):
return value.value
return value
def get_db_prep_lookup(self, lookup_type, value, connection=None, prepared=False):
def prepare(value):
v = self.to_python(value)
return self.get_db_prep_save(v, connection=connection)
if lookup_type == 'exact':
return [prepare(value)]
elif lookup_type == 'in':
return [prepare(v) for v in value]
elif lookup_type == 'isnull':
return []
raise TypeError("Lookup type %r not supported." % lookup_type)
<commit_msg>Allow string arguments (as slugs) when saving/updating EnumFields
This fixes issues where:
MyModel.objects.update(my_enum_field='slug')
would result in SQL like:
UPDATE app_mymodel SET my_enum_field = 'slug'
.. instead of what that's slug's integer value is.
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@playfire.com><commit_after>from django.db import models
class EnumField(models.Field):
__metaclass__ = models.SubfieldBase
def __init__(self, enumeration, *args, **kwargs):
self.enumeration = enumeration
kwargs.setdefault('choices', enumeration.get_choices())
super(EnumField, self).__init__(*args, **kwargs)
def get_internal_type(self):
return 'IntegerField'
def to_python(self, value):
return self.enumeration.to_item(value)
def get_db_prep_save(self, value, connection=None):
return self.to_python(value).value
def get_db_prep_lookup(self, lookup_type, value, connection=None, prepared=False):
def prepare(value):
v = self.to_python(value)
return self.get_db_prep_save(v, connection=connection)
if lookup_type == 'exact':
return [prepare(value)]
elif lookup_type == 'in':
return [prepare(v) for v in value]
elif lookup_type == 'isnull':
return []
raise TypeError("Lookup type %r not supported." % lookup_type)
|
1f90629a0ccea80ef59ca865b80edd2486d31e68
|
tests/conftest.py
|
tests/conftest.py
|
# -*- coding: utf-8 -*-
import flask
import pytest
import webtest
import marshmallow as ma
class Bunch(object):
def __init__(self, **kwargs):
self.__dict__.update(**kwargs)
def items(self):
return self.__dict__.items()
@pytest.fixture
def app():
return flask.Flask(__name__)
@pytest.fixture
def client(app):
return webtest.TestApp(app)
@pytest.fixture
def models():
class Band(object):
def __init__(self, name, genre):
self.name = name
self.genre = genre
return Bunch(Band=Band)
@pytest.fixture
def schemas(models):
class BandSchema(ma.Schema):
name = ma.fields.Str()
genre = ma.fields.Str()
return Bunch(BandSchema=BandSchema)
|
# -*- coding: utf-8 -*-
import flask
import pytest
import webtest
import marshmallow as ma
class Bunch(object):
def __init__(self, **kwargs):
self.__dict__.update(**kwargs)
def items(self):
return self.__dict__.items()
@pytest.fixture
def app():
app_ = flask.Flask(__name__)
app_.debug = True
return app_
@pytest.fixture
def client(app):
return webtest.TestApp(app)
@pytest.fixture
def models():
class Band(object):
def __init__(self, name, genre):
self.name = name
self.genre = genre
return Bunch(Band=Band)
@pytest.fixture
def schemas(models):
class BandSchema(ma.Schema):
name = ma.fields.Str()
genre = ma.fields.Str()
return Bunch(BandSchema=BandSchema)
|
Use debug mode in tests
|
Use debug mode in tests
|
Python
|
mit
|
jmcarp/flask-apispec,jmcarp/flask-smore,jmcarp/flask-apispec,jmcarp/flask-smore
|
# -*- coding: utf-8 -*-
import flask
import pytest
import webtest
import marshmallow as ma
class Bunch(object):
def __init__(self, **kwargs):
self.__dict__.update(**kwargs)
def items(self):
return self.__dict__.items()
@pytest.fixture
def app():
return flask.Flask(__name__)
@pytest.fixture
def client(app):
return webtest.TestApp(app)
@pytest.fixture
def models():
class Band(object):
def __init__(self, name, genre):
self.name = name
self.genre = genre
return Bunch(Band=Band)
@pytest.fixture
def schemas(models):
class BandSchema(ma.Schema):
name = ma.fields.Str()
genre = ma.fields.Str()
return Bunch(BandSchema=BandSchema)
Use debug mode in tests
|
# -*- coding: utf-8 -*-
import flask
import pytest
import webtest
import marshmallow as ma
class Bunch(object):
def __init__(self, **kwargs):
self.__dict__.update(**kwargs)
def items(self):
return self.__dict__.items()
@pytest.fixture
def app():
app_ = flask.Flask(__name__)
app_.debug = True
return app_
@pytest.fixture
def client(app):
return webtest.TestApp(app)
@pytest.fixture
def models():
class Band(object):
def __init__(self, name, genre):
self.name = name
self.genre = genre
return Bunch(Band=Band)
@pytest.fixture
def schemas(models):
class BandSchema(ma.Schema):
name = ma.fields.Str()
genre = ma.fields.Str()
return Bunch(BandSchema=BandSchema)
|
<commit_before># -*- coding: utf-8 -*-
import flask
import pytest
import webtest
import marshmallow as ma
class Bunch(object):
def __init__(self, **kwargs):
self.__dict__.update(**kwargs)
def items(self):
return self.__dict__.items()
@pytest.fixture
def app():
return flask.Flask(__name__)
@pytest.fixture
def client(app):
return webtest.TestApp(app)
@pytest.fixture
def models():
class Band(object):
def __init__(self, name, genre):
self.name = name
self.genre = genre
return Bunch(Band=Band)
@pytest.fixture
def schemas(models):
class BandSchema(ma.Schema):
name = ma.fields.Str()
genre = ma.fields.Str()
return Bunch(BandSchema=BandSchema)
<commit_msg>Use debug mode in tests<commit_after>
|
# -*- coding: utf-8 -*-
import flask
import pytest
import webtest
import marshmallow as ma
class Bunch(object):
def __init__(self, **kwargs):
self.__dict__.update(**kwargs)
def items(self):
return self.__dict__.items()
@pytest.fixture
def app():
app_ = flask.Flask(__name__)
app_.debug = True
return app_
@pytest.fixture
def client(app):
return webtest.TestApp(app)
@pytest.fixture
def models():
class Band(object):
def __init__(self, name, genre):
self.name = name
self.genre = genre
return Bunch(Band=Band)
@pytest.fixture
def schemas(models):
class BandSchema(ma.Schema):
name = ma.fields.Str()
genre = ma.fields.Str()
return Bunch(BandSchema=BandSchema)
|
# -*- coding: utf-8 -*-
import flask
import pytest
import webtest
import marshmallow as ma
class Bunch(object):
def __init__(self, **kwargs):
self.__dict__.update(**kwargs)
def items(self):
return self.__dict__.items()
@pytest.fixture
def app():
return flask.Flask(__name__)
@pytest.fixture
def client(app):
return webtest.TestApp(app)
@pytest.fixture
def models():
class Band(object):
def __init__(self, name, genre):
self.name = name
self.genre = genre
return Bunch(Band=Band)
@pytest.fixture
def schemas(models):
class BandSchema(ma.Schema):
name = ma.fields.Str()
genre = ma.fields.Str()
return Bunch(BandSchema=BandSchema)
Use debug mode in tests# -*- coding: utf-8 -*-
import flask
import pytest
import webtest
import marshmallow as ma
class Bunch(object):
def __init__(self, **kwargs):
self.__dict__.update(**kwargs)
def items(self):
return self.__dict__.items()
@pytest.fixture
def app():
app_ = flask.Flask(__name__)
app_.debug = True
return app_
@pytest.fixture
def client(app):
return webtest.TestApp(app)
@pytest.fixture
def models():
class Band(object):
def __init__(self, name, genre):
self.name = name
self.genre = genre
return Bunch(Band=Band)
@pytest.fixture
def schemas(models):
class BandSchema(ma.Schema):
name = ma.fields.Str()
genre = ma.fields.Str()
return Bunch(BandSchema=BandSchema)
|
<commit_before># -*- coding: utf-8 -*-
import flask
import pytest
import webtest
import marshmallow as ma
class Bunch(object):
def __init__(self, **kwargs):
self.__dict__.update(**kwargs)
def items(self):
return self.__dict__.items()
@pytest.fixture
def app():
return flask.Flask(__name__)
@pytest.fixture
def client(app):
return webtest.TestApp(app)
@pytest.fixture
def models():
class Band(object):
def __init__(self, name, genre):
self.name = name
self.genre = genre
return Bunch(Band=Band)
@pytest.fixture
def schemas(models):
class BandSchema(ma.Schema):
name = ma.fields.Str()
genre = ma.fields.Str()
return Bunch(BandSchema=BandSchema)
<commit_msg>Use debug mode in tests<commit_after># -*- coding: utf-8 -*-
import flask
import pytest
import webtest
import marshmallow as ma
class Bunch(object):
def __init__(self, **kwargs):
self.__dict__.update(**kwargs)
def items(self):
return self.__dict__.items()
@pytest.fixture
def app():
app_ = flask.Flask(__name__)
app_.debug = True
return app_
@pytest.fixture
def client(app):
return webtest.TestApp(app)
@pytest.fixture
def models():
class Band(object):
def __init__(self, name, genre):
self.name = name
self.genre = genre
return Bunch(Band=Band)
@pytest.fixture
def schemas(models):
class BandSchema(ma.Schema):
name = ma.fields.Str()
genre = ma.fields.Str()
return Bunch(BandSchema=BandSchema)
|
e4dfb192d9984973888354ae73f2edc8486a9843
|
tests/conftest.py
|
tests/conftest.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Part of the masterfile package: https://github.com/njvack/masterfile
# Copyright (c) 2018 Board of Regents of the University of Wisconsin System
# Written by Nate Vack <njvack@wisc.edu> at the Center for Healthy Minds
# at the University of Wisconsin-Madison.
# Released under MIT licence; see LICENSE at the package root.
from os import path
from glob import glob
import pytest
import masterfile
# EXAMPLE_PATH =
# GOOD_PATH =
# GOOD_CSVS = glob(path.join(GOOD_PATH, '*csv'))
# PROBLEMS_PATH = path.join(EXAMPLE_PATH, 'problems')
@pytest.fixture
def example_path():
return path.join(path.dirname(path.abspath(__file__)), 'examples')
@pytest.fixture
def good_path():
return path.join(example_path(), 'good')
@pytest.fixture
def good_csvs():
return glob(path.join(good_path(), '*csv'))
@pytest.fixture
def problems_path():
return path.join(example_path(), 'problems')
@pytest.fixture
def good_mf():
return masterfile.load(good_path())
@pytest.fixture
def nosettings_mf():
return masterfile.load(example_path())
@pytest.fixture
def problems_mf():
return masterfile.load(problems_path())
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Part of the masterfile package: https://github.com/njvack/masterfile
# Copyright (c) 2018 Board of Regents of the University of Wisconsin System
# Written by Nate Vack <njvack@wisc.edu> at the Center for Healthy Minds
# at the University of Wisconsin-Madison.
# Released under MIT licence; see LICENSE at the package root.
from os import path
from glob import glob
import pytest
import masterfile
# EXAMPLE_PATH =
# GOOD_PATH =
# GOOD_CSVS = glob(path.join(GOOD_PATH, '*csv'))
# PROBLEMS_PATH = path.join(EXAMPLE_PATH, 'problems')
@pytest.fixture()
def example_path():
return path.join(path.dirname(path.abspath(__file__)), 'examples')
@pytest.fixture()
def good_path():
return path.join(example_path(), 'good')
@pytest.fixture()
def good_csvs():
return glob(path.join(good_path(), '*csv'))
@pytest.fixture()
def problems_path():
return path.join(example_path(), 'problems')
@pytest.fixture()
def good_mf():
return masterfile.load(good_path())
@pytest.fixture()
def nosettings_mf():
return masterfile.load(example_path())
@pytest.fixture()
def problems_mf():
return masterfile.load(problems_path())
|
Add () to fixture definitions
|
Add () to fixture definitions
|
Python
|
mit
|
njvack/masterfile
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Part of the masterfile package: https://github.com/njvack/masterfile
# Copyright (c) 2018 Board of Regents of the University of Wisconsin System
# Written by Nate Vack <njvack@wisc.edu> at the Center for Healthy Minds
# at the University of Wisconsin-Madison.
# Released under MIT licence; see LICENSE at the package root.
from os import path
from glob import glob
import pytest
import masterfile
# EXAMPLE_PATH =
# GOOD_PATH =
# GOOD_CSVS = glob(path.join(GOOD_PATH, '*csv'))
# PROBLEMS_PATH = path.join(EXAMPLE_PATH, 'problems')
@pytest.fixture
def example_path():
return path.join(path.dirname(path.abspath(__file__)), 'examples')
@pytest.fixture
def good_path():
return path.join(example_path(), 'good')
@pytest.fixture
def good_csvs():
return glob(path.join(good_path(), '*csv'))
@pytest.fixture
def problems_path():
return path.join(example_path(), 'problems')
@pytest.fixture
def good_mf():
return masterfile.load(good_path())
@pytest.fixture
def nosettings_mf():
return masterfile.load(example_path())
@pytest.fixture
def problems_mf():
return masterfile.load(problems_path())
Add () to fixture definitions
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Part of the masterfile package: https://github.com/njvack/masterfile
# Copyright (c) 2018 Board of Regents of the University of Wisconsin System
# Written by Nate Vack <njvack@wisc.edu> at the Center for Healthy Minds
# at the University of Wisconsin-Madison.
# Released under MIT licence; see LICENSE at the package root.
from os import path
from glob import glob
import pytest
import masterfile
# EXAMPLE_PATH =
# GOOD_PATH =
# GOOD_CSVS = glob(path.join(GOOD_PATH, '*csv'))
# PROBLEMS_PATH = path.join(EXAMPLE_PATH, 'problems')
@pytest.fixture()
def example_path():
return path.join(path.dirname(path.abspath(__file__)), 'examples')
@pytest.fixture()
def good_path():
return path.join(example_path(), 'good')
@pytest.fixture()
def good_csvs():
return glob(path.join(good_path(), '*csv'))
@pytest.fixture()
def problems_path():
return path.join(example_path(), 'problems')
@pytest.fixture()
def good_mf():
return masterfile.load(good_path())
@pytest.fixture()
def nosettings_mf():
return masterfile.load(example_path())
@pytest.fixture()
def problems_mf():
return masterfile.load(problems_path())
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Part of the masterfile package: https://github.com/njvack/masterfile
# Copyright (c) 2018 Board of Regents of the University of Wisconsin System
# Written by Nate Vack <njvack@wisc.edu> at the Center for Healthy Minds
# at the University of Wisconsin-Madison.
# Released under MIT licence; see LICENSE at the package root.
from os import path
from glob import glob
import pytest
import masterfile
# EXAMPLE_PATH =
# GOOD_PATH =
# GOOD_CSVS = glob(path.join(GOOD_PATH, '*csv'))
# PROBLEMS_PATH = path.join(EXAMPLE_PATH, 'problems')
@pytest.fixture
def example_path():
return path.join(path.dirname(path.abspath(__file__)), 'examples')
@pytest.fixture
def good_path():
return path.join(example_path(), 'good')
@pytest.fixture
def good_csvs():
return glob(path.join(good_path(), '*csv'))
@pytest.fixture
def problems_path():
return path.join(example_path(), 'problems')
@pytest.fixture
def good_mf():
return masterfile.load(good_path())
@pytest.fixture
def nosettings_mf():
return masterfile.load(example_path())
@pytest.fixture
def problems_mf():
return masterfile.load(problems_path())
<commit_msg>Add () to fixture definitions<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Part of the masterfile package: https://github.com/njvack/masterfile
# Copyright (c) 2018 Board of Regents of the University of Wisconsin System
# Written by Nate Vack <njvack@wisc.edu> at the Center for Healthy Minds
# at the University of Wisconsin-Madison.
# Released under MIT licence; see LICENSE at the package root.
from os import path
from glob import glob
import pytest
import masterfile
# EXAMPLE_PATH =
# GOOD_PATH =
# GOOD_CSVS = glob(path.join(GOOD_PATH, '*csv'))
# PROBLEMS_PATH = path.join(EXAMPLE_PATH, 'problems')
@pytest.fixture()
def example_path():
return path.join(path.dirname(path.abspath(__file__)), 'examples')
@pytest.fixture()
def good_path():
return path.join(example_path(), 'good')
@pytest.fixture()
def good_csvs():
return glob(path.join(good_path(), '*csv'))
@pytest.fixture()
def problems_path():
return path.join(example_path(), 'problems')
@pytest.fixture()
def good_mf():
return masterfile.load(good_path())
@pytest.fixture()
def nosettings_mf():
return masterfile.load(example_path())
@pytest.fixture()
def problems_mf():
return masterfile.load(problems_path())
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Part of the masterfile package: https://github.com/njvack/masterfile
# Copyright (c) 2018 Board of Regents of the University of Wisconsin System
# Written by Nate Vack <njvack@wisc.edu> at the Center for Healthy Minds
# at the University of Wisconsin-Madison.
# Released under MIT licence; see LICENSE at the package root.
from os import path
from glob import glob
import pytest
import masterfile
# EXAMPLE_PATH =
# GOOD_PATH =
# GOOD_CSVS = glob(path.join(GOOD_PATH, '*csv'))
# PROBLEMS_PATH = path.join(EXAMPLE_PATH, 'problems')
@pytest.fixture
def example_path():
return path.join(path.dirname(path.abspath(__file__)), 'examples')
@pytest.fixture
def good_path():
return path.join(example_path(), 'good')
@pytest.fixture
def good_csvs():
return glob(path.join(good_path(), '*csv'))
@pytest.fixture
def problems_path():
return path.join(example_path(), 'problems')
@pytest.fixture
def good_mf():
return masterfile.load(good_path())
@pytest.fixture
def nosettings_mf():
return masterfile.load(example_path())
@pytest.fixture
def problems_mf():
return masterfile.load(problems_path())
Add () to fixture definitions#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Part of the masterfile package: https://github.com/njvack/masterfile
# Copyright (c) 2018 Board of Regents of the University of Wisconsin System
# Written by Nate Vack <njvack@wisc.edu> at the Center for Healthy Minds
# at the University of Wisconsin-Madison.
# Released under MIT licence; see LICENSE at the package root.
from os import path
from glob import glob
import pytest
import masterfile
# EXAMPLE_PATH =
# GOOD_PATH =
# GOOD_CSVS = glob(path.join(GOOD_PATH, '*csv'))
# PROBLEMS_PATH = path.join(EXAMPLE_PATH, 'problems')
@pytest.fixture()
def example_path():
return path.join(path.dirname(path.abspath(__file__)), 'examples')
@pytest.fixture()
def good_path():
return path.join(example_path(), 'good')
@pytest.fixture()
def good_csvs():
return glob(path.join(good_path(), '*csv'))
@pytest.fixture()
def problems_path():
return path.join(example_path(), 'problems')
@pytest.fixture()
def good_mf():
return masterfile.load(good_path())
@pytest.fixture()
def nosettings_mf():
return masterfile.load(example_path())
@pytest.fixture()
def problems_mf():
return masterfile.load(problems_path())
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Part of the masterfile package: https://github.com/njvack/masterfile
# Copyright (c) 2018 Board of Regents of the University of Wisconsin System
# Written by Nate Vack <njvack@wisc.edu> at the Center for Healthy Minds
# at the University of Wisconsin-Madison.
# Released under MIT licence; see LICENSE at the package root.
from os import path
from glob import glob
import pytest
import masterfile
# EXAMPLE_PATH =
# GOOD_PATH =
# GOOD_CSVS = glob(path.join(GOOD_PATH, '*csv'))
# PROBLEMS_PATH = path.join(EXAMPLE_PATH, 'problems')
@pytest.fixture
def example_path():
return path.join(path.dirname(path.abspath(__file__)), 'examples')
@pytest.fixture
def good_path():
return path.join(example_path(), 'good')
@pytest.fixture
def good_csvs():
return glob(path.join(good_path(), '*csv'))
@pytest.fixture
def problems_path():
return path.join(example_path(), 'problems')
@pytest.fixture
def good_mf():
return masterfile.load(good_path())
@pytest.fixture
def nosettings_mf():
return masterfile.load(example_path())
@pytest.fixture
def problems_mf():
return masterfile.load(problems_path())
<commit_msg>Add () to fixture definitions<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Part of the masterfile package: https://github.com/njvack/masterfile
# Copyright (c) 2018 Board of Regents of the University of Wisconsin System
# Written by Nate Vack <njvack@wisc.edu> at the Center for Healthy Minds
# at the University of Wisconsin-Madison.
# Released under MIT licence; see LICENSE at the package root.
from os import path
from glob import glob
import pytest
import masterfile
# EXAMPLE_PATH =
# GOOD_PATH =
# GOOD_CSVS = glob(path.join(GOOD_PATH, '*csv'))
# PROBLEMS_PATH = path.join(EXAMPLE_PATH, 'problems')
@pytest.fixture()
def example_path():
return path.join(path.dirname(path.abspath(__file__)), 'examples')
@pytest.fixture()
def good_path():
return path.join(example_path(), 'good')
@pytest.fixture()
def good_csvs():
return glob(path.join(good_path(), '*csv'))
@pytest.fixture()
def problems_path():
return path.join(example_path(), 'problems')
@pytest.fixture()
def good_mf():
return masterfile.load(good_path())
@pytest.fixture()
def nosettings_mf():
return masterfile.load(example_path())
@pytest.fixture()
def problems_mf():
return masterfile.load(problems_path())
|
f829ed54e4eddce15b9da01d0d669a4242c86360
|
tests/test_db.py
|
tests/test_db.py
|
import pytest
try:
import cPickle as pickle
except ImportError:
import pickle
from .models import Post
pytestmark = pytest.mark.django_db
@pytest.mark.django_db
@pytest.fixture
def post():
return Post.objects.create(title='Pickling')
def test_equal(post):
restored = pickle.loads(pickle.dumps(post, -1))
assert restored == post
def test_packed(post):
stored = pickle.dumps(post)
assert b'model_unpickle' in stored # Our unpickling function is used
assert b'title' not in stored # Attributes are packed
def test_state_packed(post):
stored = pickle.dumps(post)
assert b'_state' not in stored
assert b'db' not in stored
assert b'adding' not in stored
def test_deferred(post):
p = Post.objects.defer('title').get(pk=post.pk)
restored = pickle.loads(pickle.dumps(p, -1))
assert restored == p
|
import pytest
try:
import cPickle as pickle
except ImportError:
import pickle
from .models import Post
pytestmark = pytest.mark.django_db
@pytest.mark.django_db
@pytest.fixture
def post():
return Post.objects.create(title='Pickling')
def test_equal(post):
restored = pickle.loads(pickle.dumps(post, -1))
assert restored == post
def test_packed(post):
stored = pickle.dumps(post)
assert b'model_unpickle' in stored # Our unpickling function is used
assert b'title' not in stored # Attributes are packed
def test_state_packed(post):
stored = pickle.dumps(post, -1)
assert b'_state' not in stored
assert b'db' not in stored
assert b'adding' not in stored
def test_deferred(post):
p = Post.objects.defer('title').get(pk=post.pk)
restored = pickle.loads(pickle.dumps(p, -1))
assert restored == p
|
Fix tests for Python 2.7
|
Fix tests for Python 2.7
|
Python
|
bsd-3-clause
|
Suor/django-pickling
|
import pytest
try:
import cPickle as pickle
except ImportError:
import pickle
from .models import Post
pytestmark = pytest.mark.django_db
@pytest.mark.django_db
@pytest.fixture
def post():
return Post.objects.create(title='Pickling')
def test_equal(post):
restored = pickle.loads(pickle.dumps(post, -1))
assert restored == post
def test_packed(post):
stored = pickle.dumps(post)
assert b'model_unpickle' in stored # Our unpickling function is used
assert b'title' not in stored # Attributes are packed
def test_state_packed(post):
stored = pickle.dumps(post)
assert b'_state' not in stored
assert b'db' not in stored
assert b'adding' not in stored
def test_deferred(post):
p = Post.objects.defer('title').get(pk=post.pk)
restored = pickle.loads(pickle.dumps(p, -1))
assert restored == p
Fix tests for Python 2.7
|
import pytest
try:
import cPickle as pickle
except ImportError:
import pickle
from .models import Post
pytestmark = pytest.mark.django_db
@pytest.mark.django_db
@pytest.fixture
def post():
return Post.objects.create(title='Pickling')
def test_equal(post):
restored = pickle.loads(pickle.dumps(post, -1))
assert restored == post
def test_packed(post):
stored = pickle.dumps(post)
assert b'model_unpickle' in stored # Our unpickling function is used
assert b'title' not in stored # Attributes are packed
def test_state_packed(post):
stored = pickle.dumps(post, -1)
assert b'_state' not in stored
assert b'db' not in stored
assert b'adding' not in stored
def test_deferred(post):
p = Post.objects.defer('title').get(pk=post.pk)
restored = pickle.loads(pickle.dumps(p, -1))
assert restored == p
|
<commit_before>import pytest
try:
import cPickle as pickle
except ImportError:
import pickle
from .models import Post
pytestmark = pytest.mark.django_db
@pytest.mark.django_db
@pytest.fixture
def post():
return Post.objects.create(title='Pickling')
def test_equal(post):
restored = pickle.loads(pickle.dumps(post, -1))
assert restored == post
def test_packed(post):
stored = pickle.dumps(post)
assert b'model_unpickle' in stored # Our unpickling function is used
assert b'title' not in stored # Attributes are packed
def test_state_packed(post):
stored = pickle.dumps(post)
assert b'_state' not in stored
assert b'db' not in stored
assert b'adding' not in stored
def test_deferred(post):
p = Post.objects.defer('title').get(pk=post.pk)
restored = pickle.loads(pickle.dumps(p, -1))
assert restored == p
<commit_msg>Fix tests for Python 2.7<commit_after>
|
import pytest
try:
import cPickle as pickle
except ImportError:
import pickle
from .models import Post
pytestmark = pytest.mark.django_db
@pytest.mark.django_db
@pytest.fixture
def post():
return Post.objects.create(title='Pickling')
def test_equal(post):
restored = pickle.loads(pickle.dumps(post, -1))
assert restored == post
def test_packed(post):
stored = pickle.dumps(post)
assert b'model_unpickle' in stored # Our unpickling function is used
assert b'title' not in stored # Attributes are packed
def test_state_packed(post):
stored = pickle.dumps(post, -1)
assert b'_state' not in stored
assert b'db' not in stored
assert b'adding' not in stored
def test_deferred(post):
p = Post.objects.defer('title').get(pk=post.pk)
restored = pickle.loads(pickle.dumps(p, -1))
assert restored == p
|
import pytest
try:
import cPickle as pickle
except ImportError:
import pickle
from .models import Post
pytestmark = pytest.mark.django_db
@pytest.mark.django_db
@pytest.fixture
def post():
return Post.objects.create(title='Pickling')
def test_equal(post):
restored = pickle.loads(pickle.dumps(post, -1))
assert restored == post
def test_packed(post):
stored = pickle.dumps(post)
assert b'model_unpickle' in stored # Our unpickling function is used
assert b'title' not in stored # Attributes are packed
def test_state_packed(post):
stored = pickle.dumps(post)
assert b'_state' not in stored
assert b'db' not in stored
assert b'adding' not in stored
def test_deferred(post):
p = Post.objects.defer('title').get(pk=post.pk)
restored = pickle.loads(pickle.dumps(p, -1))
assert restored == p
Fix tests for Python 2.7import pytest
try:
import cPickle as pickle
except ImportError:
import pickle
from .models import Post
pytestmark = pytest.mark.django_db
@pytest.mark.django_db
@pytest.fixture
def post():
return Post.objects.create(title='Pickling')
def test_equal(post):
restored = pickle.loads(pickle.dumps(post, -1))
assert restored == post
def test_packed(post):
stored = pickle.dumps(post)
assert b'model_unpickle' in stored # Our unpickling function is used
assert b'title' not in stored # Attributes are packed
def test_state_packed(post):
stored = pickle.dumps(post, -1)
assert b'_state' not in stored
assert b'db' not in stored
assert b'adding' not in stored
def test_deferred(post):
p = Post.objects.defer('title').get(pk=post.pk)
restored = pickle.loads(pickle.dumps(p, -1))
assert restored == p
|
<commit_before>import pytest
try:
import cPickle as pickle
except ImportError:
import pickle
from .models import Post
pytestmark = pytest.mark.django_db
@pytest.mark.django_db
@pytest.fixture
def post():
return Post.objects.create(title='Pickling')
def test_equal(post):
restored = pickle.loads(pickle.dumps(post, -1))
assert restored == post
def test_packed(post):
stored = pickle.dumps(post)
assert b'model_unpickle' in stored # Our unpickling function is used
assert b'title' not in stored # Attributes are packed
def test_state_packed(post):
stored = pickle.dumps(post)
assert b'_state' not in stored
assert b'db' not in stored
assert b'adding' not in stored
def test_deferred(post):
p = Post.objects.defer('title').get(pk=post.pk)
restored = pickle.loads(pickle.dumps(p, -1))
assert restored == p
<commit_msg>Fix tests for Python 2.7<commit_after>import pytest
try:
import cPickle as pickle
except ImportError:
import pickle
from .models import Post
pytestmark = pytest.mark.django_db
@pytest.mark.django_db
@pytest.fixture
def post():
return Post.objects.create(title='Pickling')
def test_equal(post):
restored = pickle.loads(pickle.dumps(post, -1))
assert restored == post
def test_packed(post):
stored = pickle.dumps(post)
assert b'model_unpickle' in stored # Our unpickling function is used
assert b'title' not in stored # Attributes are packed
def test_state_packed(post):
stored = pickle.dumps(post, -1)
assert b'_state' not in stored
assert b'db' not in stored
assert b'adding' not in stored
def test_deferred(post):
p = Post.objects.defer('title').get(pk=post.pk)
restored = pickle.loads(pickle.dumps(p, -1))
assert restored == p
|
aba661dccae7ef43bdb43d9909b9e84b632bb7a4
|
qipipe/helpers/file_helper.py
|
qipipe/helpers/file_helper.py
|
import os
class FileIterator(object):
"""
This FileIterator iterates over the paths contained in one or more directories.
"""
def __init__(self, *paths):
"""
@param paths: the file or directory paths.
"""
self._paths = paths
def __iter__(self):
return self.next()
def next(self):
for f in self._paths:
if os.path.isfile(f):
yield f
elif os.path.isdir(f):
for root, dirs, paths in os.walk(f):
for f in paths:
path = os.path.join(root, f)
yield path
|
import os
class FileIterator(object):
"""
This FileIterator iterates over the paths contained in one or more directories.
"""
def __init__(self, *paths):
"""
@param paths: the file or directory paths.
"""
self._paths = paths
def __iter__(self):
return self.next()
def next(self):
for f in self._paths:
if os.path.isfile(f):
yield f
elif os.path.isdir(f):
for root, dirs, fnames in os.walk(f):
for fn in fnames:
path = os.path.join(root, fn)
yield path
|
Rename paths variable to fnames.
|
Rename paths variable to fnames.
|
Python
|
bsd-2-clause
|
ohsu-qin/qipipe
|
import os
class FileIterator(object):
"""
This FileIterator iterates over the paths contained in one or more directories.
"""
def __init__(self, *paths):
"""
@param paths: the file or directory paths.
"""
self._paths = paths
def __iter__(self):
return self.next()
def next(self):
for f in self._paths:
if os.path.isfile(f):
yield f
elif os.path.isdir(f):
for root, dirs, paths in os.walk(f):
for f in paths:
path = os.path.join(root, f)
yield path
Rename paths variable to fnames.
|
import os
class FileIterator(object):
"""
This FileIterator iterates over the paths contained in one or more directories.
"""
def __init__(self, *paths):
"""
@param paths: the file or directory paths.
"""
self._paths = paths
def __iter__(self):
return self.next()
def next(self):
for f in self._paths:
if os.path.isfile(f):
yield f
elif os.path.isdir(f):
for root, dirs, fnames in os.walk(f):
for fn in fnames:
path = os.path.join(root, fn)
yield path
|
<commit_before>import os
class FileIterator(object):
"""
This FileIterator iterates over the paths contained in one or more directories.
"""
def __init__(self, *paths):
"""
@param paths: the file or directory paths.
"""
self._paths = paths
def __iter__(self):
return self.next()
def next(self):
for f in self._paths:
if os.path.isfile(f):
yield f
elif os.path.isdir(f):
for root, dirs, paths in os.walk(f):
for f in paths:
path = os.path.join(root, f)
yield path
<commit_msg>Rename paths variable to fnames.<commit_after>
|
import os
class FileIterator(object):
"""
This FileIterator iterates over the paths contained in one or more directories.
"""
def __init__(self, *paths):
"""
@param paths: the file or directory paths.
"""
self._paths = paths
def __iter__(self):
return self.next()
def next(self):
for f in self._paths:
if os.path.isfile(f):
yield f
elif os.path.isdir(f):
for root, dirs, fnames in os.walk(f):
for fn in fnames:
path = os.path.join(root, fn)
yield path
|
import os
class FileIterator(object):
"""
This FileIterator iterates over the paths contained in one or more directories.
"""
def __init__(self, *paths):
"""
@param paths: the file or directory paths.
"""
self._paths = paths
def __iter__(self):
return self.next()
def next(self):
for f in self._paths:
if os.path.isfile(f):
yield f
elif os.path.isdir(f):
for root, dirs, paths in os.walk(f):
for f in paths:
path = os.path.join(root, f)
yield path
Rename paths variable to fnames.import os
class FileIterator(object):
"""
This FileIterator iterates over the paths contained in one or more directories.
"""
def __init__(self, *paths):
"""
@param paths: the file or directory paths.
"""
self._paths = paths
def __iter__(self):
return self.next()
def next(self):
for f in self._paths:
if os.path.isfile(f):
yield f
elif os.path.isdir(f):
for root, dirs, fnames in os.walk(f):
for fn in fnames:
path = os.path.join(root, fn)
yield path
|
<commit_before>import os
class FileIterator(object):
"""
This FileIterator iterates over the paths contained in one or more directories.
"""
def __init__(self, *paths):
"""
@param paths: the file or directory paths.
"""
self._paths = paths
def __iter__(self):
return self.next()
def next(self):
for f in self._paths:
if os.path.isfile(f):
yield f
elif os.path.isdir(f):
for root, dirs, paths in os.walk(f):
for f in paths:
path = os.path.join(root, f)
yield path
<commit_msg>Rename paths variable to fnames.<commit_after>import os
class FileIterator(object):
"""
This FileIterator iterates over the paths contained in one or more directories.
"""
def __init__(self, *paths):
"""
@param paths: the file or directory paths.
"""
self._paths = paths
def __iter__(self):
return self.next()
def next(self):
for f in self._paths:
if os.path.isfile(f):
yield f
elif os.path.isdir(f):
for root, dirs, fnames in os.walk(f):
for fn in fnames:
path = os.path.join(root, fn)
yield path
|
ea3e327bb602689e136479ce41f568aa2ee47cf4
|
databot/utils/html.py
|
databot/utils/html.py
|
import bs4
import cgi
def get_content(data, errors='strict'):
headers = {k.lower(): v for k, v in data.get('headers', {}).items()}
content_type_header = headers.get('content-type', '')
content_type, params = cgi.parse_header(content_type_header)
if content_type.lower() in ('text/html', 'text/xml'):
soup = bs4.BeautifulSoup(data['content'], 'lxml', from_encoding=data['encoding'])
return data['content'].decode(soup.original_encoding, errors)
elif content_type.startswith('text/'):
return data['content'].decode(data['encoding'], errors)
else:
return data['content']
|
import bs4
import cgi
def get_page_encoding(soup, default_encoding=None):
for meta in soup.select('head > meta[http-equiv="Content-Type"]'):
content_type, params = cgi.parse_header(meta['content'])
if 'charset' in params:
return params['charset']
return default_encoding
def get_content(data, errors='strict'):
headers = {k.lower(): v for k, v in data.get('headers', {}).items()}
content_type_header = headers.get('content-type', '')
content_type, params = cgi.parse_header(content_type_header)
if content_type.lower() in ('text/html', 'text/xml'):
soup = bs4.BeautifulSoup(data['content'], 'lxml', from_encoding=data['encoding'])
encoding = get_page_encoding(soup, soup.original_encoding)
return data['content'].decode(encoding, errors)
elif content_type.startswith('text/'):
return data['content'].decode(data['encoding'], errors)
else:
return data['content']
|
Improve detection of page encoding
|
Improve detection of page encoding
|
Python
|
agpl-3.0
|
sirex/databot,sirex/databot
|
import bs4
import cgi
def get_content(data, errors='strict'):
headers = {k.lower(): v for k, v in data.get('headers', {}).items()}
content_type_header = headers.get('content-type', '')
content_type, params = cgi.parse_header(content_type_header)
if content_type.lower() in ('text/html', 'text/xml'):
soup = bs4.BeautifulSoup(data['content'], 'lxml', from_encoding=data['encoding'])
return data['content'].decode(soup.original_encoding, errors)
elif content_type.startswith('text/'):
return data['content'].decode(data['encoding'], errors)
else:
return data['content']
Improve detection of page encoding
|
import bs4
import cgi
def get_page_encoding(soup, default_encoding=None):
for meta in soup.select('head > meta[http-equiv="Content-Type"]'):
content_type, params = cgi.parse_header(meta['content'])
if 'charset' in params:
return params['charset']
return default_encoding
def get_content(data, errors='strict'):
headers = {k.lower(): v for k, v in data.get('headers', {}).items()}
content_type_header = headers.get('content-type', '')
content_type, params = cgi.parse_header(content_type_header)
if content_type.lower() in ('text/html', 'text/xml'):
soup = bs4.BeautifulSoup(data['content'], 'lxml', from_encoding=data['encoding'])
encoding = get_page_encoding(soup, soup.original_encoding)
return data['content'].decode(encoding, errors)
elif content_type.startswith('text/'):
return data['content'].decode(data['encoding'], errors)
else:
return data['content']
|
<commit_before>import bs4
import cgi
def get_content(data, errors='strict'):
headers = {k.lower(): v for k, v in data.get('headers', {}).items()}
content_type_header = headers.get('content-type', '')
content_type, params = cgi.parse_header(content_type_header)
if content_type.lower() in ('text/html', 'text/xml'):
soup = bs4.BeautifulSoup(data['content'], 'lxml', from_encoding=data['encoding'])
return data['content'].decode(soup.original_encoding, errors)
elif content_type.startswith('text/'):
return data['content'].decode(data['encoding'], errors)
else:
return data['content']
<commit_msg>Improve detection of page encoding<commit_after>
|
import bs4
import cgi
def get_page_encoding(soup, default_encoding=None):
for meta in soup.select('head > meta[http-equiv="Content-Type"]'):
content_type, params = cgi.parse_header(meta['content'])
if 'charset' in params:
return params['charset']
return default_encoding
def get_content(data, errors='strict'):
headers = {k.lower(): v for k, v in data.get('headers', {}).items()}
content_type_header = headers.get('content-type', '')
content_type, params = cgi.parse_header(content_type_header)
if content_type.lower() in ('text/html', 'text/xml'):
soup = bs4.BeautifulSoup(data['content'], 'lxml', from_encoding=data['encoding'])
encoding = get_page_encoding(soup, soup.original_encoding)
return data['content'].decode(encoding, errors)
elif content_type.startswith('text/'):
return data['content'].decode(data['encoding'], errors)
else:
return data['content']
|
import bs4
import cgi
def get_content(data, errors='strict'):
headers = {k.lower(): v for k, v in data.get('headers', {}).items()}
content_type_header = headers.get('content-type', '')
content_type, params = cgi.parse_header(content_type_header)
if content_type.lower() in ('text/html', 'text/xml'):
soup = bs4.BeautifulSoup(data['content'], 'lxml', from_encoding=data['encoding'])
return data['content'].decode(soup.original_encoding, errors)
elif content_type.startswith('text/'):
return data['content'].decode(data['encoding'], errors)
else:
return data['content']
Improve detection of page encodingimport bs4
import cgi
def get_page_encoding(soup, default_encoding=None):
for meta in soup.select('head > meta[http-equiv="Content-Type"]'):
content_type, params = cgi.parse_header(meta['content'])
if 'charset' in params:
return params['charset']
return default_encoding
def get_content(data, errors='strict'):
headers = {k.lower(): v for k, v in data.get('headers', {}).items()}
content_type_header = headers.get('content-type', '')
content_type, params = cgi.parse_header(content_type_header)
if content_type.lower() in ('text/html', 'text/xml'):
soup = bs4.BeautifulSoup(data['content'], 'lxml', from_encoding=data['encoding'])
encoding = get_page_encoding(soup, soup.original_encoding)
return data['content'].decode(encoding, errors)
elif content_type.startswith('text/'):
return data['content'].decode(data['encoding'], errors)
else:
return data['content']
|
<commit_before>import bs4
import cgi
def get_content(data, errors='strict'):
headers = {k.lower(): v for k, v in data.get('headers', {}).items()}
content_type_header = headers.get('content-type', '')
content_type, params = cgi.parse_header(content_type_header)
if content_type.lower() in ('text/html', 'text/xml'):
soup = bs4.BeautifulSoup(data['content'], 'lxml', from_encoding=data['encoding'])
return data['content'].decode(soup.original_encoding, errors)
elif content_type.startswith('text/'):
return data['content'].decode(data['encoding'], errors)
else:
return data['content']
<commit_msg>Improve detection of page encoding<commit_after>import bs4
import cgi
def get_page_encoding(soup, default_encoding=None):
for meta in soup.select('head > meta[http-equiv="Content-Type"]'):
content_type, params = cgi.parse_header(meta['content'])
if 'charset' in params:
return params['charset']
return default_encoding
def get_content(data, errors='strict'):
headers = {k.lower(): v for k, v in data.get('headers', {}).items()}
content_type_header = headers.get('content-type', '')
content_type, params = cgi.parse_header(content_type_header)
if content_type.lower() in ('text/html', 'text/xml'):
soup = bs4.BeautifulSoup(data['content'], 'lxml', from_encoding=data['encoding'])
encoding = get_page_encoding(soup, soup.original_encoding)
return data['content'].decode(encoding, errors)
elif content_type.startswith('text/'):
return data['content'].decode(data['encoding'], errors)
else:
return data['content']
|
52884380ce9a6dc79e44e3ce81b7e9757de6fb04
|
tests/integration/test_impersonation.py
|
tests/integration/test_impersonation.py
|
import pytest
from tenable_io.api.users import UserCreateRequest
from tests.base import BaseTest
from tests.config import TenableIOTestConfig
class TestImpersonation(BaseTest):
@pytest.fixture(scope='class')
def user(self, app, client):
user_id = client.users_api.create(UserCreateRequest(
username=app.session_name(u'test_impersonation%%s@%s' % TenableIOTestConfig.get('users_domain_name')),
name='test_impersonation',
password='test_impersonation',
permissions='16',
type='local',
email=app.session_name(u'test_user_email+%%s@%s' % TenableIOTestConfig.get('users_domain_name'))
))
user = client.users_api.get(user_id)
yield user
client.users_api.delete(user_id)
def test_impersonation(self, client, user):
impersonating_client = client.impersonate(user.username)
impersonating_user = impersonating_client.session_api.get()
assert impersonating_user.username == user.username, u'The current session user should be the impersonated user'
|
import pytest
from tenable_io.api.users import UserCreateRequest
from tests.base import BaseTest
from tests.config import TenableIOTestConfig
class TestImpersonation(BaseTest):
@pytest.fixture(scope='class')
def user(self, app, client):
user_id = client.users_api.create(UserCreateRequest(
username=app.session_name(u'test_impersonation%%s@%s' % TenableIOTestConfig.get('users_domain_name')),
name='test_impersonation',
password='Sdk!Test1',
permissions='16',
type='local',
email=app.session_name(u'test_user_email+%%s@%s' % TenableIOTestConfig.get('users_domain_name'))
))
user = client.users_api.get(user_id)
yield user
client.users_api.delete(user_id)
def test_impersonation(self, client, user):
impersonating_client = client.impersonate(user.username)
impersonating_user = impersonating_client.session_api.get()
assert impersonating_user.username == user.username, u'The current session user should be the impersonated user'
|
Update password in unit test to comply with password rules
|
Update password in unit test to comply with password rules
|
Python
|
mit
|
tenable/Tenable.io-SDK-for-Python
|
import pytest
from tenable_io.api.users import UserCreateRequest
from tests.base import BaseTest
from tests.config import TenableIOTestConfig
class TestImpersonation(BaseTest):
@pytest.fixture(scope='class')
def user(self, app, client):
user_id = client.users_api.create(UserCreateRequest(
username=app.session_name(u'test_impersonation%%s@%s' % TenableIOTestConfig.get('users_domain_name')),
name='test_impersonation',
password='test_impersonation',
permissions='16',
type='local',
email=app.session_name(u'test_user_email+%%s@%s' % TenableIOTestConfig.get('users_domain_name'))
))
user = client.users_api.get(user_id)
yield user
client.users_api.delete(user_id)
def test_impersonation(self, client, user):
impersonating_client = client.impersonate(user.username)
impersonating_user = impersonating_client.session_api.get()
assert impersonating_user.username == user.username, u'The current session user should be the impersonated user'
Update password in unit test to comply with password rules
|
import pytest
from tenable_io.api.users import UserCreateRequest
from tests.base import BaseTest
from tests.config import TenableIOTestConfig
class TestImpersonation(BaseTest):
@pytest.fixture(scope='class')
def user(self, app, client):
user_id = client.users_api.create(UserCreateRequest(
username=app.session_name(u'test_impersonation%%s@%s' % TenableIOTestConfig.get('users_domain_name')),
name='test_impersonation',
password='Sdk!Test1',
permissions='16',
type='local',
email=app.session_name(u'test_user_email+%%s@%s' % TenableIOTestConfig.get('users_domain_name'))
))
user = client.users_api.get(user_id)
yield user
client.users_api.delete(user_id)
def test_impersonation(self, client, user):
impersonating_client = client.impersonate(user.username)
impersonating_user = impersonating_client.session_api.get()
assert impersonating_user.username == user.username, u'The current session user should be the impersonated user'
|
<commit_before>import pytest
from tenable_io.api.users import UserCreateRequest
from tests.base import BaseTest
from tests.config import TenableIOTestConfig
class TestImpersonation(BaseTest):
@pytest.fixture(scope='class')
def user(self, app, client):
user_id = client.users_api.create(UserCreateRequest(
username=app.session_name(u'test_impersonation%%s@%s' % TenableIOTestConfig.get('users_domain_name')),
name='test_impersonation',
password='test_impersonation',
permissions='16',
type='local',
email=app.session_name(u'test_user_email+%%s@%s' % TenableIOTestConfig.get('users_domain_name'))
))
user = client.users_api.get(user_id)
yield user
client.users_api.delete(user_id)
def test_impersonation(self, client, user):
impersonating_client = client.impersonate(user.username)
impersonating_user = impersonating_client.session_api.get()
assert impersonating_user.username == user.username, u'The current session user should be the impersonated user'
<commit_msg>Update password in unit test to comply with password rules<commit_after>
|
import pytest
from tenable_io.api.users import UserCreateRequest
from tests.base import BaseTest
from tests.config import TenableIOTestConfig
class TestImpersonation(BaseTest):
@pytest.fixture(scope='class')
def user(self, app, client):
user_id = client.users_api.create(UserCreateRequest(
username=app.session_name(u'test_impersonation%%s@%s' % TenableIOTestConfig.get('users_domain_name')),
name='test_impersonation',
password='Sdk!Test1',
permissions='16',
type='local',
email=app.session_name(u'test_user_email+%%s@%s' % TenableIOTestConfig.get('users_domain_name'))
))
user = client.users_api.get(user_id)
yield user
client.users_api.delete(user_id)
def test_impersonation(self, client, user):
impersonating_client = client.impersonate(user.username)
impersonating_user = impersonating_client.session_api.get()
assert impersonating_user.username == user.username, u'The current session user should be the impersonated user'
|
import pytest
from tenable_io.api.users import UserCreateRequest
from tests.base import BaseTest
from tests.config import TenableIOTestConfig
class TestImpersonation(BaseTest):
@pytest.fixture(scope='class')
def user(self, app, client):
user_id = client.users_api.create(UserCreateRequest(
username=app.session_name(u'test_impersonation%%s@%s' % TenableIOTestConfig.get('users_domain_name')),
name='test_impersonation',
password='test_impersonation',
permissions='16',
type='local',
email=app.session_name(u'test_user_email+%%s@%s' % TenableIOTestConfig.get('users_domain_name'))
))
user = client.users_api.get(user_id)
yield user
client.users_api.delete(user_id)
def test_impersonation(self, client, user):
impersonating_client = client.impersonate(user.username)
impersonating_user = impersonating_client.session_api.get()
assert impersonating_user.username == user.username, u'The current session user should be the impersonated user'
Update password in unit test to comply with password rulesimport pytest
from tenable_io.api.users import UserCreateRequest
from tests.base import BaseTest
from tests.config import TenableIOTestConfig
class TestImpersonation(BaseTest):
@pytest.fixture(scope='class')
def user(self, app, client):
user_id = client.users_api.create(UserCreateRequest(
username=app.session_name(u'test_impersonation%%s@%s' % TenableIOTestConfig.get('users_domain_name')),
name='test_impersonation',
password='Sdk!Test1',
permissions='16',
type='local',
email=app.session_name(u'test_user_email+%%s@%s' % TenableIOTestConfig.get('users_domain_name'))
))
user = client.users_api.get(user_id)
yield user
client.users_api.delete(user_id)
def test_impersonation(self, client, user):
impersonating_client = client.impersonate(user.username)
impersonating_user = impersonating_client.session_api.get()
assert impersonating_user.username == user.username, u'The current session user should be the impersonated user'
|
<commit_before>import pytest
from tenable_io.api.users import UserCreateRequest
from tests.base import BaseTest
from tests.config import TenableIOTestConfig
class TestImpersonation(BaseTest):
@pytest.fixture(scope='class')
def user(self, app, client):
user_id = client.users_api.create(UserCreateRequest(
username=app.session_name(u'test_impersonation%%s@%s' % TenableIOTestConfig.get('users_domain_name')),
name='test_impersonation',
password='test_impersonation',
permissions='16',
type='local',
email=app.session_name(u'test_user_email+%%s@%s' % TenableIOTestConfig.get('users_domain_name'))
))
user = client.users_api.get(user_id)
yield user
client.users_api.delete(user_id)
def test_impersonation(self, client, user):
impersonating_client = client.impersonate(user.username)
impersonating_user = impersonating_client.session_api.get()
assert impersonating_user.username == user.username, u'The current session user should be the impersonated user'
<commit_msg>Update password in unit test to comply with password rules<commit_after>import pytest
from tenable_io.api.users import UserCreateRequest
from tests.base import BaseTest
from tests.config import TenableIOTestConfig
class TestImpersonation(BaseTest):
@pytest.fixture(scope='class')
def user(self, app, client):
user_id = client.users_api.create(UserCreateRequest(
username=app.session_name(u'test_impersonation%%s@%s' % TenableIOTestConfig.get('users_domain_name')),
name='test_impersonation',
password='Sdk!Test1',
permissions='16',
type='local',
email=app.session_name(u'test_user_email+%%s@%s' % TenableIOTestConfig.get('users_domain_name'))
))
user = client.users_api.get(user_id)
yield user
client.users_api.delete(user_id)
def test_impersonation(self, client, user):
impersonating_client = client.impersonate(user.username)
impersonating_user = impersonating_client.session_api.get()
assert impersonating_user.username == user.username, u'The current session user should be the impersonated user'
|
8f330d4d07ed548a9cab348895124f5f5d92a6e8
|
dask_ndmeasure/_test_utils.py
|
dask_ndmeasure/_test_utils.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import dask.array.utils
def _assert_eq_nan(a, b, **kwargs):
a = a.copy()
b = b.copy()
a_nan = (a != a)
b_nan = (b != b)
a[a_nan] = 0
b[b_nan] = 0
dask.array.utils.assert_eq(a_nan, b_nan, **kwargs)
dask.array.utils.assert_eq(a, b, **kwargs)
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import dask.array.utils
def _assert_eq_nan(a, b, **kwargs):
a = a.copy()
b = b.copy()
a = a[...]
b = b[...]
a_nan = (a != a)
b_nan = (b != b)
a[a_nan] = 0
b[b_nan] = 0
dask.array.utils.assert_eq(a_nan, b_nan, **kwargs)
dask.array.utils.assert_eq(a, b, **kwargs)
|
Handle scalar values in _assert_eq_nan
|
Handle scalar values in _assert_eq_nan
Add some special handling in `_assert_eq_nan` to handle having scalar
values passed in. Basically ensure that everything provided is an array.
This is a no-op for arrays, but converts scalars into 0-D arrays. By
doing this, we are able to use the same `nan` handling code. Also
convert the 0-D arrays back to scalars. This means a 0-D array will be
treated as a scalar in the end. However Dask doesn't really have a way
to differentiate the two. So this is fine.
|
Python
|
bsd-3-clause
|
dask-image/dask-ndmeasure
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import dask.array.utils
def _assert_eq_nan(a, b, **kwargs):
a = a.copy()
b = b.copy()
a_nan = (a != a)
b_nan = (b != b)
a[a_nan] = 0
b[b_nan] = 0
dask.array.utils.assert_eq(a_nan, b_nan, **kwargs)
dask.array.utils.assert_eq(a, b, **kwargs)
Handle scalar values in _assert_eq_nan
Add some special handling in `_assert_eq_nan` to handle having scalar
values passed in. Basically ensure that everything provided is an array.
This is a no-op for arrays, but converts scalars into 0-D arrays. By
doing this, we are able to use the same `nan` handling code. Also
convert the 0-D arrays back to scalars. This means a 0-D array will be
treated as a scalar in the end. However Dask doesn't really have a way
to differentiate the two. So this is fine.
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import dask.array.utils
def _assert_eq_nan(a, b, **kwargs):
a = a.copy()
b = b.copy()
a = a[...]
b = b[...]
a_nan = (a != a)
b_nan = (b != b)
a[a_nan] = 0
b[b_nan] = 0
dask.array.utils.assert_eq(a_nan, b_nan, **kwargs)
dask.array.utils.assert_eq(a, b, **kwargs)
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import absolute_import
import dask.array.utils
def _assert_eq_nan(a, b, **kwargs):
a = a.copy()
b = b.copy()
a_nan = (a != a)
b_nan = (b != b)
a[a_nan] = 0
b[b_nan] = 0
dask.array.utils.assert_eq(a_nan, b_nan, **kwargs)
dask.array.utils.assert_eq(a, b, **kwargs)
<commit_msg>Handle scalar values in _assert_eq_nan
Add some special handling in `_assert_eq_nan` to handle having scalar
values passed in. Basically ensure that everything provided is an array.
This is a no-op for arrays, but converts scalars into 0-D arrays. By
doing this, we are able to use the same `nan` handling code. Also
convert the 0-D arrays back to scalars. This means a 0-D array will be
treated as a scalar in the end. However Dask doesn't really have a way
to differentiate the two. So this is fine.<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import dask.array.utils
def _assert_eq_nan(a, b, **kwargs):
a = a.copy()
b = b.copy()
a = a[...]
b = b[...]
a_nan = (a != a)
b_nan = (b != b)
a[a_nan] = 0
b[b_nan] = 0
dask.array.utils.assert_eq(a_nan, b_nan, **kwargs)
dask.array.utils.assert_eq(a, b, **kwargs)
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import dask.array.utils
def _assert_eq_nan(a, b, **kwargs):
a = a.copy()
b = b.copy()
a_nan = (a != a)
b_nan = (b != b)
a[a_nan] = 0
b[b_nan] = 0
dask.array.utils.assert_eq(a_nan, b_nan, **kwargs)
dask.array.utils.assert_eq(a, b, **kwargs)
Handle scalar values in _assert_eq_nan
Add some special handling in `_assert_eq_nan` to handle having scalar
values passed in. Basically ensure that everything provided is an array.
This is a no-op for arrays, but converts scalars into 0-D arrays. By
doing this, we are able to use the same `nan` handling code. Also
convert the 0-D arrays back to scalars. This means a 0-D array will be
treated as a scalar in the end. However Dask doesn't really have a way
to differentiate the two. So this is fine.# -*- coding: utf-8 -*-
from __future__ import absolute_import
import dask.array.utils
def _assert_eq_nan(a, b, **kwargs):
a = a.copy()
b = b.copy()
a = a[...]
b = b[...]
a_nan = (a != a)
b_nan = (b != b)
a[a_nan] = 0
b[b_nan] = 0
dask.array.utils.assert_eq(a_nan, b_nan, **kwargs)
dask.array.utils.assert_eq(a, b, **kwargs)
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import absolute_import
import dask.array.utils
def _assert_eq_nan(a, b, **kwargs):
a = a.copy()
b = b.copy()
a_nan = (a != a)
b_nan = (b != b)
a[a_nan] = 0
b[b_nan] = 0
dask.array.utils.assert_eq(a_nan, b_nan, **kwargs)
dask.array.utils.assert_eq(a, b, **kwargs)
<commit_msg>Handle scalar values in _assert_eq_nan
Add some special handling in `_assert_eq_nan` to handle having scalar
values passed in. Basically ensure that everything provided is an array.
This is a no-op for arrays, but converts scalars into 0-D arrays. By
doing this, we are able to use the same `nan` handling code. Also
convert the 0-D arrays back to scalars. This means a 0-D array will be
treated as a scalar in the end. However Dask doesn't really have a way
to differentiate the two. So this is fine.<commit_after># -*- coding: utf-8 -*-
from __future__ import absolute_import
import dask.array.utils
def _assert_eq_nan(a, b, **kwargs):
a = a.copy()
b = b.copy()
a = a[...]
b = b[...]
a_nan = (a != a)
b_nan = (b != b)
a[a_nan] = 0
b[b_nan] = 0
dask.array.utils.assert_eq(a_nan, b_nan, **kwargs)
dask.array.utils.assert_eq(a, b, **kwargs)
|
892bc14cc087c47909778a178772d0895d2fb599
|
docker/chemml/src/run.py
|
docker/chemml/src/run.py
|
import json
from chemml.models.keras.trained import OrganicLorentzLorenz
from openbabel import OBMol, OBConversion
def ob_convert_str(str_data, in_format, out_format):
mol = OBMol()
conv = OBConversion()
conv.SetInFormat(in_format)
conv.SetOutFormat(out_format)
conv.ReadString(mol, str_data)
return (conv.WriteString(mol), conv.GetOutFormat().GetMIMEType())
def run_calculation(geometry_file, output_file):
# In the future we can probably accept SMILES directly from the molecule
# model. For now we need somewhere to put the output, so the CJSON makes
# more sense.
with open(geometry_file) as f:
cjson = json.load(f)
smiles = cjson_to_smiles(cjson)
cls = OrganicLorentzLorenz()
cls.load(summary=True)
pred = cls.predict(smiles)
calculated_properties = {
"refractiveIndex": {
"label": "LL refractive index",
"value": pred[0],
"units": ""
},
"polarizability": {
"label": "Polarizability",
"value": pred[1],
"units": "Bohr^3"
},
"density": {
"label": "Density",
"value": pred[2],
"units": "Kg/m^3"
}
}
cjson['calculatedProperties'] = calculated_properties
with open(output_file, 'w') as f:
json.dump(cjson, f)
|
import json
from chemml.models.keras.trained import OrganicLorentzLorenz
from openbabel import OBMol, OBConversion
def ob_convert_str(str_data, in_format, out_format):
mol = OBMol()
conv = OBConversion()
conv.SetInFormat(in_format)
conv.SetOutFormat(out_format)
conv.ReadString(mol, str_data)
return (conv.WriteString(mol), conv.GetOutFormat().GetMIMEType())
def run_calculation(geometry_file, output_file):
# In the future we can probably accept SMILES directly from the molecule
# model. For now we need somewhere to put the output, so the CJSON makes
# more sense.
with open(geometry_file) as f:
cjson = json.load(f)
smiles = cjson_to_smiles(cjson)
cls = OrganicLorentzLorenz()
cls.load(summary=True)
pred = cls.predict(smiles)
properties = {
"refractiveIndex": pred[0],
"polarizability": pred[1],
"density": pred[2]
}
cjson['properties'] = properties
with open(output_file, 'w') as f:
json.dump(cjson, f)
|
Change structure of the output properties
|
Change structure of the output properties
|
Python
|
bsd-3-clause
|
OpenChemistry/mongochemdeploy,OpenChemistry/mongochemdeploy
|
import json
from chemml.models.keras.trained import OrganicLorentzLorenz
from openbabel import OBMol, OBConversion
def ob_convert_str(str_data, in_format, out_format):
mol = OBMol()
conv = OBConversion()
conv.SetInFormat(in_format)
conv.SetOutFormat(out_format)
conv.ReadString(mol, str_data)
return (conv.WriteString(mol), conv.GetOutFormat().GetMIMEType())
def run_calculation(geometry_file, output_file):
# In the future we can probably accept SMILES directly from the molecule
# model. For now we need somewhere to put the output, so the CJSON makes
# more sense.
with open(geometry_file) as f:
cjson = json.load(f)
smiles = cjson_to_smiles(cjson)
cls = OrganicLorentzLorenz()
cls.load(summary=True)
pred = cls.predict(smiles)
calculated_properties = {
"refractiveIndex": {
"label": "LL refractive index",
"value": pred[0],
"units": ""
},
"polarizability": {
"label": "Polarizability",
"value": pred[1],
"units": "Bohr^3"
},
"density": {
"label": "Density",
"value": pred[2],
"units": "Kg/m^3"
}
}
cjson['calculatedProperties'] = calculated_properties
with open(output_file, 'w') as f:
json.dump(cjson, f)
Change structure of the output properties
|
import json
from chemml.models.keras.trained import OrganicLorentzLorenz
from openbabel import OBMol, OBConversion
def ob_convert_str(str_data, in_format, out_format):
mol = OBMol()
conv = OBConversion()
conv.SetInFormat(in_format)
conv.SetOutFormat(out_format)
conv.ReadString(mol, str_data)
return (conv.WriteString(mol), conv.GetOutFormat().GetMIMEType())
def run_calculation(geometry_file, output_file):
# In the future we can probably accept SMILES directly from the molecule
# model. For now we need somewhere to put the output, so the CJSON makes
# more sense.
with open(geometry_file) as f:
cjson = json.load(f)
smiles = cjson_to_smiles(cjson)
cls = OrganicLorentzLorenz()
cls.load(summary=True)
pred = cls.predict(smiles)
properties = {
"refractiveIndex": pred[0],
"polarizability": pred[1],
"density": pred[2]
}
cjson['properties'] = properties
with open(output_file, 'w') as f:
json.dump(cjson, f)
|
<commit_before>import json
from chemml.models.keras.trained import OrganicLorentzLorenz
from openbabel import OBMol, OBConversion
def ob_convert_str(str_data, in_format, out_format):
mol = OBMol()
conv = OBConversion()
conv.SetInFormat(in_format)
conv.SetOutFormat(out_format)
conv.ReadString(mol, str_data)
return (conv.WriteString(mol), conv.GetOutFormat().GetMIMEType())
def run_calculation(geometry_file, output_file):
# In the future we can probably accept SMILES directly from the molecule
# model. For now we need somewhere to put the output, so the CJSON makes
# more sense.
with open(geometry_file) as f:
cjson = json.load(f)
smiles = cjson_to_smiles(cjson)
cls = OrganicLorentzLorenz()
cls.load(summary=True)
pred = cls.predict(smiles)
calculated_properties = {
"refractiveIndex": {
"label": "LL refractive index",
"value": pred[0],
"units": ""
},
"polarizability": {
"label": "Polarizability",
"value": pred[1],
"units": "Bohr^3"
},
"density": {
"label": "Density",
"value": pred[2],
"units": "Kg/m^3"
}
}
cjson['calculatedProperties'] = calculated_properties
with open(output_file, 'w') as f:
json.dump(cjson, f)
<commit_msg>Change structure of the output properties<commit_after>
|
import json
from chemml.models.keras.trained import OrganicLorentzLorenz
from openbabel import OBMol, OBConversion
def ob_convert_str(str_data, in_format, out_format):
mol = OBMol()
conv = OBConversion()
conv.SetInFormat(in_format)
conv.SetOutFormat(out_format)
conv.ReadString(mol, str_data)
return (conv.WriteString(mol), conv.GetOutFormat().GetMIMEType())
def run_calculation(geometry_file, output_file):
# In the future we can probably accept SMILES directly from the molecule
# model. For now we need somewhere to put the output, so the CJSON makes
# more sense.
with open(geometry_file) as f:
cjson = json.load(f)
smiles = cjson_to_smiles(cjson)
cls = OrganicLorentzLorenz()
cls.load(summary=True)
pred = cls.predict(smiles)
properties = {
"refractiveIndex": pred[0],
"polarizability": pred[1],
"density": pred[2]
}
cjson['properties'] = properties
with open(output_file, 'w') as f:
json.dump(cjson, f)
|
import json
from chemml.models.keras.trained import OrganicLorentzLorenz
from openbabel import OBMol, OBConversion
def ob_convert_str(str_data, in_format, out_format):
mol = OBMol()
conv = OBConversion()
conv.SetInFormat(in_format)
conv.SetOutFormat(out_format)
conv.ReadString(mol, str_data)
return (conv.WriteString(mol), conv.GetOutFormat().GetMIMEType())
def run_calculation(geometry_file, output_file):
# In the future we can probably accept SMILES directly from the molecule
# model. For now we need somewhere to put the output, so the CJSON makes
# more sense.
with open(geometry_file) as f:
cjson = json.load(f)
smiles = cjson_to_smiles(cjson)
cls = OrganicLorentzLorenz()
cls.load(summary=True)
pred = cls.predict(smiles)
calculated_properties = {
"refractiveIndex": {
"label": "LL refractive index",
"value": pred[0],
"units": ""
},
"polarizability": {
"label": "Polarizability",
"value": pred[1],
"units": "Bohr^3"
},
"density": {
"label": "Density",
"value": pred[2],
"units": "Kg/m^3"
}
}
cjson['calculatedProperties'] = calculated_properties
with open(output_file, 'w') as f:
json.dump(cjson, f)
Change structure of the output propertiesimport json
from chemml.models.keras.trained import OrganicLorentzLorenz
from openbabel import OBMol, OBConversion
def ob_convert_str(str_data, in_format, out_format):
mol = OBMol()
conv = OBConversion()
conv.SetInFormat(in_format)
conv.SetOutFormat(out_format)
conv.ReadString(mol, str_data)
return (conv.WriteString(mol), conv.GetOutFormat().GetMIMEType())
def run_calculation(geometry_file, output_file):
# In the future we can probably accept SMILES directly from the molecule
# model. For now we need somewhere to put the output, so the CJSON makes
# more sense.
with open(geometry_file) as f:
cjson = json.load(f)
smiles = cjson_to_smiles(cjson)
cls = OrganicLorentzLorenz()
cls.load(summary=True)
pred = cls.predict(smiles)
properties = {
"refractiveIndex": pred[0],
"polarizability": pred[1],
"density": pred[2]
}
cjson['properties'] = properties
with open(output_file, 'w') as f:
json.dump(cjson, f)
|
<commit_before>import json
from chemml.models.keras.trained import OrganicLorentzLorenz
from openbabel import OBMol, OBConversion
def ob_convert_str(str_data, in_format, out_format):
mol = OBMol()
conv = OBConversion()
conv.SetInFormat(in_format)
conv.SetOutFormat(out_format)
conv.ReadString(mol, str_data)
return (conv.WriteString(mol), conv.GetOutFormat().GetMIMEType())
def run_calculation(geometry_file, output_file):
# In the future we can probably accept SMILES directly from the molecule
# model. For now we need somewhere to put the output, so the CJSON makes
# more sense.
with open(geometry_file) as f:
cjson = json.load(f)
smiles = cjson_to_smiles(cjson)
cls = OrganicLorentzLorenz()
cls.load(summary=True)
pred = cls.predict(smiles)
calculated_properties = {
"refractiveIndex": {
"label": "LL refractive index",
"value": pred[0],
"units": ""
},
"polarizability": {
"label": "Polarizability",
"value": pred[1],
"units": "Bohr^3"
},
"density": {
"label": "Density",
"value": pred[2],
"units": "Kg/m^3"
}
}
cjson['calculatedProperties'] = calculated_properties
with open(output_file, 'w') as f:
json.dump(cjson, f)
<commit_msg>Change structure of the output properties<commit_after>import json
from chemml.models.keras.trained import OrganicLorentzLorenz
from openbabel import OBMol, OBConversion
def ob_convert_str(str_data, in_format, out_format):
mol = OBMol()
conv = OBConversion()
conv.SetInFormat(in_format)
conv.SetOutFormat(out_format)
conv.ReadString(mol, str_data)
return (conv.WriteString(mol), conv.GetOutFormat().GetMIMEType())
def run_calculation(geometry_file, output_file):
# In the future we can probably accept SMILES directly from the molecule
# model. For now we need somewhere to put the output, so the CJSON makes
# more sense.
with open(geometry_file) as f:
cjson = json.load(f)
smiles = cjson_to_smiles(cjson)
cls = OrganicLorentzLorenz()
cls.load(summary=True)
pred = cls.predict(smiles)
properties = {
"refractiveIndex": pred[0],
"polarizability": pred[1],
"density": pred[2]
}
cjson['properties'] = properties
with open(output_file, 'w') as f:
json.dump(cjson, f)
|
5b0f7412f88400e61a05e694d4883389d812f3d2
|
tests/runtests.py
|
tests/runtests.py
|
#!/usr/bin/env python
import os
import sys
from unittest import defaultTestLoader, TextTestRunner, TestSuite
TESTS = ('form', 'fields', 'validators', 'widgets', 'webob_wrapper', 'translations', 'ext_csrf', 'ext_i18n')
def make_suite(prefix='', extra=()):
tests = TESTS + extra
test_names = list(prefix + x for x in tests)
suite = TestSuite()
suite.addTest(defaultTestLoader.loadTestsFromNames(test_names))
return suite
def additional_tests():
"""
This is called automatically by setup.py test
"""
return make_suite('tests.')
def main():
extra_tests = tuple(x for x in sys.argv[1:] if '-' not in x)
suite = make_suite('', )
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
runner = TextTestRunner(verbosity=(sys.argv.count('-v') - sys.argv.count('-q') + 1))
result = runner.run(suite)
sys.exit(not result.wasSuccessful())
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
import os
import sys
from unittest import defaultTestLoader, TextTestRunner, TestSuite
TESTS = ('form', 'fields', 'validators', 'widgets', 'webob_wrapper', 'translations', 'ext_csrf', 'ext_i18n')
def make_suite(prefix='', extra=()):
tests = TESTS + extra
test_names = list(prefix + x for x in tests)
suite = TestSuite()
suite.addTest(defaultTestLoader.loadTestsFromNames(test_names))
return suite
def additional_tests():
"""
This is called automatically by setup.py test
"""
return make_suite('tests.')
def main():
extra_tests = tuple(x for x in sys.argv[1:] if '-' not in x)
suite = make_suite('', extra_tests)
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
runner = TextTestRunner(verbosity=(sys.argv.count('-v') - sys.argv.count('-q') + 1))
result = runner.run(suite)
sys.exit(not result.wasSuccessful())
if __name__ == '__main__':
main()
|
Add back in running of extra tests
|
Add back in running of extra tests
|
Python
|
bsd-3-clause
|
maxcountryman/wtforms
|
#!/usr/bin/env python
import os
import sys
from unittest import defaultTestLoader, TextTestRunner, TestSuite
TESTS = ('form', 'fields', 'validators', 'widgets', 'webob_wrapper', 'translations', 'ext_csrf', 'ext_i18n')
def make_suite(prefix='', extra=()):
tests = TESTS + extra
test_names = list(prefix + x for x in tests)
suite = TestSuite()
suite.addTest(defaultTestLoader.loadTestsFromNames(test_names))
return suite
def additional_tests():
"""
This is called automatically by setup.py test
"""
return make_suite('tests.')
def main():
extra_tests = tuple(x for x in sys.argv[1:] if '-' not in x)
suite = make_suite('', )
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
runner = TextTestRunner(verbosity=(sys.argv.count('-v') - sys.argv.count('-q') + 1))
result = runner.run(suite)
sys.exit(not result.wasSuccessful())
if __name__ == '__main__':
main()
Add back in running of extra tests
|
#!/usr/bin/env python
import os
import sys
from unittest import defaultTestLoader, TextTestRunner, TestSuite
TESTS = ('form', 'fields', 'validators', 'widgets', 'webob_wrapper', 'translations', 'ext_csrf', 'ext_i18n')
def make_suite(prefix='', extra=()):
tests = TESTS + extra
test_names = list(prefix + x for x in tests)
suite = TestSuite()
suite.addTest(defaultTestLoader.loadTestsFromNames(test_names))
return suite
def additional_tests():
"""
This is called automatically by setup.py test
"""
return make_suite('tests.')
def main():
extra_tests = tuple(x for x in sys.argv[1:] if '-' not in x)
suite = make_suite('', extra_tests)
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
runner = TextTestRunner(verbosity=(sys.argv.count('-v') - sys.argv.count('-q') + 1))
result = runner.run(suite)
sys.exit(not result.wasSuccessful())
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
import os
import sys
from unittest import defaultTestLoader, TextTestRunner, TestSuite
TESTS = ('form', 'fields', 'validators', 'widgets', 'webob_wrapper', 'translations', 'ext_csrf', 'ext_i18n')
def make_suite(prefix='', extra=()):
tests = TESTS + extra
test_names = list(prefix + x for x in tests)
suite = TestSuite()
suite.addTest(defaultTestLoader.loadTestsFromNames(test_names))
return suite
def additional_tests():
"""
This is called automatically by setup.py test
"""
return make_suite('tests.')
def main():
extra_tests = tuple(x for x in sys.argv[1:] if '-' not in x)
suite = make_suite('', )
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
runner = TextTestRunner(verbosity=(sys.argv.count('-v') - sys.argv.count('-q') + 1))
result = runner.run(suite)
sys.exit(not result.wasSuccessful())
if __name__ == '__main__':
main()
<commit_msg>Add back in running of extra tests<commit_after>
|
#!/usr/bin/env python
import os
import sys
from unittest import defaultTestLoader, TextTestRunner, TestSuite
TESTS = ('form', 'fields', 'validators', 'widgets', 'webob_wrapper', 'translations', 'ext_csrf', 'ext_i18n')
def make_suite(prefix='', extra=()):
tests = TESTS + extra
test_names = list(prefix + x for x in tests)
suite = TestSuite()
suite.addTest(defaultTestLoader.loadTestsFromNames(test_names))
return suite
def additional_tests():
"""
This is called automatically by setup.py test
"""
return make_suite('tests.')
def main():
extra_tests = tuple(x for x in sys.argv[1:] if '-' not in x)
suite = make_suite('', extra_tests)
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
runner = TextTestRunner(verbosity=(sys.argv.count('-v') - sys.argv.count('-q') + 1))
result = runner.run(suite)
sys.exit(not result.wasSuccessful())
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
import os
import sys
from unittest import defaultTestLoader, TextTestRunner, TestSuite
TESTS = ('form', 'fields', 'validators', 'widgets', 'webob_wrapper', 'translations', 'ext_csrf', 'ext_i18n')
def make_suite(prefix='', extra=()):
tests = TESTS + extra
test_names = list(prefix + x for x in tests)
suite = TestSuite()
suite.addTest(defaultTestLoader.loadTestsFromNames(test_names))
return suite
def additional_tests():
"""
This is called automatically by setup.py test
"""
return make_suite('tests.')
def main():
extra_tests = tuple(x for x in sys.argv[1:] if '-' not in x)
suite = make_suite('', )
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
runner = TextTestRunner(verbosity=(sys.argv.count('-v') - sys.argv.count('-q') + 1))
result = runner.run(suite)
sys.exit(not result.wasSuccessful())
if __name__ == '__main__':
main()
Add back in running of extra tests#!/usr/bin/env python
import os
import sys
from unittest import defaultTestLoader, TextTestRunner, TestSuite
TESTS = ('form', 'fields', 'validators', 'widgets', 'webob_wrapper', 'translations', 'ext_csrf', 'ext_i18n')
def make_suite(prefix='', extra=()):
tests = TESTS + extra
test_names = list(prefix + x for x in tests)
suite = TestSuite()
suite.addTest(defaultTestLoader.loadTestsFromNames(test_names))
return suite
def additional_tests():
"""
This is called automatically by setup.py test
"""
return make_suite('tests.')
def main():
extra_tests = tuple(x for x in sys.argv[1:] if '-' not in x)
suite = make_suite('', extra_tests)
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
runner = TextTestRunner(verbosity=(sys.argv.count('-v') - sys.argv.count('-q') + 1))
result = runner.run(suite)
sys.exit(not result.wasSuccessful())
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
import os
import sys
from unittest import defaultTestLoader, TextTestRunner, TestSuite
TESTS = ('form', 'fields', 'validators', 'widgets', 'webob_wrapper', 'translations', 'ext_csrf', 'ext_i18n')
def make_suite(prefix='', extra=()):
tests = TESTS + extra
test_names = list(prefix + x for x in tests)
suite = TestSuite()
suite.addTest(defaultTestLoader.loadTestsFromNames(test_names))
return suite
def additional_tests():
"""
This is called automatically by setup.py test
"""
return make_suite('tests.')
def main():
extra_tests = tuple(x for x in sys.argv[1:] if '-' not in x)
suite = make_suite('', )
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
runner = TextTestRunner(verbosity=(sys.argv.count('-v') - sys.argv.count('-q') + 1))
result = runner.run(suite)
sys.exit(not result.wasSuccessful())
if __name__ == '__main__':
main()
<commit_msg>Add back in running of extra tests<commit_after>#!/usr/bin/env python
import os
import sys
from unittest import defaultTestLoader, TextTestRunner, TestSuite
TESTS = ('form', 'fields', 'validators', 'widgets', 'webob_wrapper', 'translations', 'ext_csrf', 'ext_i18n')
def make_suite(prefix='', extra=()):
tests = TESTS + extra
test_names = list(prefix + x for x in tests)
suite = TestSuite()
suite.addTest(defaultTestLoader.loadTestsFromNames(test_names))
return suite
def additional_tests():
"""
This is called automatically by setup.py test
"""
return make_suite('tests.')
def main():
extra_tests = tuple(x for x in sys.argv[1:] if '-' not in x)
suite = make_suite('', extra_tests)
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
runner = TextTestRunner(verbosity=(sys.argv.count('-v') - sys.argv.count('-q') + 1))
result = runner.run(suite)
sys.exit(not result.wasSuccessful())
if __name__ == '__main__':
main()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.