commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
77db2b0b01cda0565312430f84b35c901ad44c31
|
ktbs_bench/benchable_store.py
|
ktbs_bench/benchable_store.py
|
from rdflib import Graph
from ktbs_bench.bnsparqlstore import SPARQLStore
class BenchableStore:
"""Allows to use a store/graph for benchmarks.
Contains a rdflib.Graph with setup and teardown.
"""
def __init__(self, store, graph_id, store_config, store_create=False):
self.graph = Graph(store=store, identifier=graph_id)
self._store_config = store_config
self._store_create = store_create
def connect(self, store_create=None):
if store_create:
do_create = store_create
else:
do_create = self._store_create
self.graph.open(self._store_config, create=do_create)
def close(self, commit_pending_transaction=False):
self.graph.close(commit_pending_transaction=commit_pending_transaction)
def destroy(self):
if isinstance(self.graph.store, SPARQLStore):
self.sparql_destroy()
else:
self.graph.destroy(self._store_config)
def sparql_destroy(self):
"""Try to destroy the graph as if the current store is a SPARQLStore."""
# TODO improve destroy by using SPARQL CLEAR GRAPH if RDFLib supports it
# or execute something on the command line
for s, p, o in self.graph:
self.graph.remove((s, p, o))
|
from rdflib import Graph
from ktbs_bench.bnsparqlstore import SPARQLStore
class BenchableStore:
"""Allows to use a store/graph for benchmarks.
Contains a rdflib.Graph with setup and teardown.
"""
def __init__(self, store, graph_id, store_config, store_create=False):
self.graph = Graph(store=store, identifier=graph_id)
self._store_config = store_config
self._store_create = store_create
def connect(self):
return self.graph.open(configuration=self._store_config, create=self._store_create)
def close(self, commit_pending_transaction=False):
self.graph.close(commit_pending_transaction=commit_pending_transaction)
def destroy(self):
if isinstance(self.graph.store, SPARQLStore):
self.sparql_destroy()
else:
self.graph.destroy(self._store_config)
def sparql_destroy(self):
"""Try to destroy the graph as if the current store is a SPARQLStore."""
# TODO improve destroy by using SPARQL CLEAR GRAPH if RDFLib supports it
# or execute something on the command line
for s, p, o in self.graph:
self.graph.remove((s, p, o))
|
Simplify ux code for creating notsparqlstore tables
|
Simplify ux code for creating notsparqlstore tables
|
Python
|
mit
|
ktbs/ktbs-bench,ktbs/ktbs-bench
|
from rdflib import Graph
from ktbs_bench.bnsparqlstore import SPARQLStore
class BenchableStore:
"""Allows to use a store/graph for benchmarks.
Contains a rdflib.Graph with setup and teardown.
"""
def __init__(self, store, graph_id, store_config, store_create=False):
self.graph = Graph(store=store, identifier=graph_id)
self._store_config = store_config
self._store_create = store_create
def connect(self, store_create=None):
if store_create:
do_create = store_create
else:
do_create = self._store_create
self.graph.open(self._store_config, create=do_create)
def close(self, commit_pending_transaction=False):
self.graph.close(commit_pending_transaction=commit_pending_transaction)
def destroy(self):
if isinstance(self.graph.store, SPARQLStore):
self.sparql_destroy()
else:
self.graph.destroy(self._store_config)
def sparql_destroy(self):
"""Try to destroy the graph as if the current store is a SPARQLStore."""
# TODO improve destroy by using SPARQL CLEAR GRAPH if RDFLib supports it
# or execute something on the command line
for s, p, o in self.graph:
self.graph.remove((s, p, o))
Simplify ux code for creating notsparqlstore tables
|
from rdflib import Graph
from ktbs_bench.bnsparqlstore import SPARQLStore
class BenchableStore:
"""Allows to use a store/graph for benchmarks.
Contains a rdflib.Graph with setup and teardown.
"""
def __init__(self, store, graph_id, store_config, store_create=False):
self.graph = Graph(store=store, identifier=graph_id)
self._store_config = store_config
self._store_create = store_create
def connect(self):
return self.graph.open(configuration=self._store_config, create=self._store_create)
def close(self, commit_pending_transaction=False):
self.graph.close(commit_pending_transaction=commit_pending_transaction)
def destroy(self):
if isinstance(self.graph.store, SPARQLStore):
self.sparql_destroy()
else:
self.graph.destroy(self._store_config)
def sparql_destroy(self):
"""Try to destroy the graph as if the current store is a SPARQLStore."""
# TODO improve destroy by using SPARQL CLEAR GRAPH if RDFLib supports it
# or execute something on the command line
for s, p, o in self.graph:
self.graph.remove((s, p, o))
|
<commit_before>from rdflib import Graph
from ktbs_bench.bnsparqlstore import SPARQLStore
class BenchableStore:
"""Allows to use a store/graph for benchmarks.
Contains a rdflib.Graph with setup and teardown.
"""
def __init__(self, store, graph_id, store_config, store_create=False):
self.graph = Graph(store=store, identifier=graph_id)
self._store_config = store_config
self._store_create = store_create
def connect(self, store_create=None):
if store_create:
do_create = store_create
else:
do_create = self._store_create
self.graph.open(self._store_config, create=do_create)
def close(self, commit_pending_transaction=False):
self.graph.close(commit_pending_transaction=commit_pending_transaction)
def destroy(self):
if isinstance(self.graph.store, SPARQLStore):
self.sparql_destroy()
else:
self.graph.destroy(self._store_config)
def sparql_destroy(self):
"""Try to destroy the graph as if the current store is a SPARQLStore."""
# TODO improve destroy by using SPARQL CLEAR GRAPH if RDFLib supports it
# or execute something on the command line
for s, p, o in self.graph:
self.graph.remove((s, p, o))
<commit_msg>Simplify ux code for creating notsparqlstore tables<commit_after>
|
from rdflib import Graph
from ktbs_bench.bnsparqlstore import SPARQLStore
class BenchableStore:
"""Allows to use a store/graph for benchmarks.
Contains a rdflib.Graph with setup and teardown.
"""
def __init__(self, store, graph_id, store_config, store_create=False):
self.graph = Graph(store=store, identifier=graph_id)
self._store_config = store_config
self._store_create = store_create
def connect(self):
return self.graph.open(configuration=self._store_config, create=self._store_create)
def close(self, commit_pending_transaction=False):
self.graph.close(commit_pending_transaction=commit_pending_transaction)
def destroy(self):
if isinstance(self.graph.store, SPARQLStore):
self.sparql_destroy()
else:
self.graph.destroy(self._store_config)
def sparql_destroy(self):
"""Try to destroy the graph as if the current store is a SPARQLStore."""
# TODO improve destroy by using SPARQL CLEAR GRAPH if RDFLib supports it
# or execute something on the command line
for s, p, o in self.graph:
self.graph.remove((s, p, o))
|
from rdflib import Graph
from ktbs_bench.bnsparqlstore import SPARQLStore
class BenchableStore:
"""Allows to use a store/graph for benchmarks.
Contains a rdflib.Graph with setup and teardown.
"""
def __init__(self, store, graph_id, store_config, store_create=False):
self.graph = Graph(store=store, identifier=graph_id)
self._store_config = store_config
self._store_create = store_create
def connect(self, store_create=None):
if store_create:
do_create = store_create
else:
do_create = self._store_create
self.graph.open(self._store_config, create=do_create)
def close(self, commit_pending_transaction=False):
self.graph.close(commit_pending_transaction=commit_pending_transaction)
def destroy(self):
if isinstance(self.graph.store, SPARQLStore):
self.sparql_destroy()
else:
self.graph.destroy(self._store_config)
def sparql_destroy(self):
"""Try to destroy the graph as if the current store is a SPARQLStore."""
# TODO improve destroy by using SPARQL CLEAR GRAPH if RDFLib supports it
# or execute something on the command line
for s, p, o in self.graph:
self.graph.remove((s, p, o))
Simplify ux code for creating notsparqlstore tablesfrom rdflib import Graph
from ktbs_bench.bnsparqlstore import SPARQLStore
class BenchableStore:
"""Allows to use a store/graph for benchmarks.
Contains a rdflib.Graph with setup and teardown.
"""
def __init__(self, store, graph_id, store_config, store_create=False):
self.graph = Graph(store=store, identifier=graph_id)
self._store_config = store_config
self._store_create = store_create
def connect(self):
return self.graph.open(configuration=self._store_config, create=self._store_create)
def close(self, commit_pending_transaction=False):
self.graph.close(commit_pending_transaction=commit_pending_transaction)
def destroy(self):
if isinstance(self.graph.store, SPARQLStore):
self.sparql_destroy()
else:
self.graph.destroy(self._store_config)
def sparql_destroy(self):
"""Try to destroy the graph as if the current store is a SPARQLStore."""
# TODO improve destroy by using SPARQL CLEAR GRAPH if RDFLib supports it
# or execute something on the command line
for s, p, o in self.graph:
self.graph.remove((s, p, o))
|
<commit_before>from rdflib import Graph
from ktbs_bench.bnsparqlstore import SPARQLStore
class BenchableStore:
"""Allows to use a store/graph for benchmarks.
Contains a rdflib.Graph with setup and teardown.
"""
def __init__(self, store, graph_id, store_config, store_create=False):
self.graph = Graph(store=store, identifier=graph_id)
self._store_config = store_config
self._store_create = store_create
def connect(self, store_create=None):
if store_create:
do_create = store_create
else:
do_create = self._store_create
self.graph.open(self._store_config, create=do_create)
def close(self, commit_pending_transaction=False):
self.graph.close(commit_pending_transaction=commit_pending_transaction)
def destroy(self):
if isinstance(self.graph.store, SPARQLStore):
self.sparql_destroy()
else:
self.graph.destroy(self._store_config)
def sparql_destroy(self):
"""Try to destroy the graph as if the current store is a SPARQLStore."""
# TODO improve destroy by using SPARQL CLEAR GRAPH if RDFLib supports it
# or execute something on the command line
for s, p, o in self.graph:
self.graph.remove((s, p, o))
<commit_msg>Simplify ux code for creating notsparqlstore tables<commit_after>from rdflib import Graph
from ktbs_bench.bnsparqlstore import SPARQLStore
class BenchableStore:
"""Allows to use a store/graph for benchmarks.
Contains a rdflib.Graph with setup and teardown.
"""
def __init__(self, store, graph_id, store_config, store_create=False):
self.graph = Graph(store=store, identifier=graph_id)
self._store_config = store_config
self._store_create = store_create
def connect(self):
return self.graph.open(configuration=self._store_config, create=self._store_create)
def close(self, commit_pending_transaction=False):
self.graph.close(commit_pending_transaction=commit_pending_transaction)
def destroy(self):
if isinstance(self.graph.store, SPARQLStore):
self.sparql_destroy()
else:
self.graph.destroy(self._store_config)
def sparql_destroy(self):
"""Try to destroy the graph as if the current store is a SPARQLStore."""
# TODO improve destroy by using SPARQL CLEAR GRAPH if RDFLib supports it
# or execute something on the command line
for s, p, o in self.graph:
self.graph.remove((s, p, o))
|
04f36fab2168fb9cd34d3c6fc7f31533c90b9149
|
app/clients/statsd/statsd_client.py
|
app/clients/statsd/statsd_client.py
|
from statsd import StatsClient
class StatsdClient(StatsClient):
def init_app(self, app, *args, **kwargs):
self.active = app.config.get('STATSD_ENABLED')
self.namespace = app.config.get('NOTIFY_ENVIRONMENT') + ".notifications.api."
if self.active:
StatsClient.__init__(
self,
app.config.get('STATSD_HOST'),
app.config.get('STATSD_PORT'),
prefix=app.config.get('STATSD_PREFIX')
)
def format_stat_name(self, stat):
return self.namespace + stat
def incr(self, stat, count=1, rate=1):
if self.active:
super(StatsClient, self).incr(self.format_stat_name(stat), count, rate)
def timing(self, stat, delta, rate=1):
if self.active:
super(StatsClient, self).timing(self.format_stat_name(stat), delta, rate)
def timing_with_dates(self, stat, start, end, rate=1):
if self.active:
delta = (start - end).total_seconds()
super(StatsClient, self).timing(stat, delta, rate)
|
from statsd import StatsClient
class StatsdClient(StatsClient):
def init_app(self, app, *args, **kwargs):
self.active = app.config.get('STATSD_ENABLED')
self.namespace = app.config.get('NOTIFY_ENVIRONMENT') + ".notifications.api."
if self.active:
StatsClient.__init__(
self,
app.config.get('STATSD_HOST'),
app.config.get('STATSD_PORT'),
prefix=app.config.get('STATSD_PREFIX')
)
def format_stat_name(self, stat):
return self.namespace + stat
def incr(self, stat, count=1, rate=1):
if self.active:
super(StatsClient, self).incr(self.format_stat_name(stat), count, rate)
def timing(self, stat, delta, rate=1):
if self.active:
super(StatsClient, self).timing(self.format_stat_name(stat), delta, rate)
def timing_with_dates(self, stat, start, end, rate=1):
if self.active:
delta = (start - end).total_seconds()
super(StatsClient, self).timing(self.format_stat_name(stat), delta, rate)
|
Format the stat name with environmenbt
|
Format the stat name with environmenbt
|
Python
|
mit
|
alphagov/notifications-api,alphagov/notifications-api
|
from statsd import StatsClient
class StatsdClient(StatsClient):
def init_app(self, app, *args, **kwargs):
self.active = app.config.get('STATSD_ENABLED')
self.namespace = app.config.get('NOTIFY_ENVIRONMENT') + ".notifications.api."
if self.active:
StatsClient.__init__(
self,
app.config.get('STATSD_HOST'),
app.config.get('STATSD_PORT'),
prefix=app.config.get('STATSD_PREFIX')
)
def format_stat_name(self, stat):
return self.namespace + stat
def incr(self, stat, count=1, rate=1):
if self.active:
super(StatsClient, self).incr(self.format_stat_name(stat), count, rate)
def timing(self, stat, delta, rate=1):
if self.active:
super(StatsClient, self).timing(self.format_stat_name(stat), delta, rate)
def timing_with_dates(self, stat, start, end, rate=1):
if self.active:
delta = (start - end).total_seconds()
super(StatsClient, self).timing(stat, delta, rate)
Format the stat name with environmenbt
|
from statsd import StatsClient
class StatsdClient(StatsClient):
def init_app(self, app, *args, **kwargs):
self.active = app.config.get('STATSD_ENABLED')
self.namespace = app.config.get('NOTIFY_ENVIRONMENT') + ".notifications.api."
if self.active:
StatsClient.__init__(
self,
app.config.get('STATSD_HOST'),
app.config.get('STATSD_PORT'),
prefix=app.config.get('STATSD_PREFIX')
)
def format_stat_name(self, stat):
return self.namespace + stat
def incr(self, stat, count=1, rate=1):
if self.active:
super(StatsClient, self).incr(self.format_stat_name(stat), count, rate)
def timing(self, stat, delta, rate=1):
if self.active:
super(StatsClient, self).timing(self.format_stat_name(stat), delta, rate)
def timing_with_dates(self, stat, start, end, rate=1):
if self.active:
delta = (start - end).total_seconds()
super(StatsClient, self).timing(self.format_stat_name(stat), delta, rate)
|
<commit_before>from statsd import StatsClient
class StatsdClient(StatsClient):
def init_app(self, app, *args, **kwargs):
self.active = app.config.get('STATSD_ENABLED')
self.namespace = app.config.get('NOTIFY_ENVIRONMENT') + ".notifications.api."
if self.active:
StatsClient.__init__(
self,
app.config.get('STATSD_HOST'),
app.config.get('STATSD_PORT'),
prefix=app.config.get('STATSD_PREFIX')
)
def format_stat_name(self, stat):
return self.namespace + stat
def incr(self, stat, count=1, rate=1):
if self.active:
super(StatsClient, self).incr(self.format_stat_name(stat), count, rate)
def timing(self, stat, delta, rate=1):
if self.active:
super(StatsClient, self).timing(self.format_stat_name(stat), delta, rate)
def timing_with_dates(self, stat, start, end, rate=1):
if self.active:
delta = (start - end).total_seconds()
super(StatsClient, self).timing(stat, delta, rate)
<commit_msg>Format the stat name with environmenbt<commit_after>
|
from statsd import StatsClient
class StatsdClient(StatsClient):
def init_app(self, app, *args, **kwargs):
self.active = app.config.get('STATSD_ENABLED')
self.namespace = app.config.get('NOTIFY_ENVIRONMENT') + ".notifications.api."
if self.active:
StatsClient.__init__(
self,
app.config.get('STATSD_HOST'),
app.config.get('STATSD_PORT'),
prefix=app.config.get('STATSD_PREFIX')
)
def format_stat_name(self, stat):
return self.namespace + stat
def incr(self, stat, count=1, rate=1):
if self.active:
super(StatsClient, self).incr(self.format_stat_name(stat), count, rate)
def timing(self, stat, delta, rate=1):
if self.active:
super(StatsClient, self).timing(self.format_stat_name(stat), delta, rate)
def timing_with_dates(self, stat, start, end, rate=1):
if self.active:
delta = (start - end).total_seconds()
super(StatsClient, self).timing(self.format_stat_name(stat), delta, rate)
|
from statsd import StatsClient
class StatsdClient(StatsClient):
def init_app(self, app, *args, **kwargs):
self.active = app.config.get('STATSD_ENABLED')
self.namespace = app.config.get('NOTIFY_ENVIRONMENT') + ".notifications.api."
if self.active:
StatsClient.__init__(
self,
app.config.get('STATSD_HOST'),
app.config.get('STATSD_PORT'),
prefix=app.config.get('STATSD_PREFIX')
)
def format_stat_name(self, stat):
return self.namespace + stat
def incr(self, stat, count=1, rate=1):
if self.active:
super(StatsClient, self).incr(self.format_stat_name(stat), count, rate)
def timing(self, stat, delta, rate=1):
if self.active:
super(StatsClient, self).timing(self.format_stat_name(stat), delta, rate)
def timing_with_dates(self, stat, start, end, rate=1):
if self.active:
delta = (start - end).total_seconds()
super(StatsClient, self).timing(stat, delta, rate)
Format the stat name with environmenbtfrom statsd import StatsClient
class StatsdClient(StatsClient):
def init_app(self, app, *args, **kwargs):
self.active = app.config.get('STATSD_ENABLED')
self.namespace = app.config.get('NOTIFY_ENVIRONMENT') + ".notifications.api."
if self.active:
StatsClient.__init__(
self,
app.config.get('STATSD_HOST'),
app.config.get('STATSD_PORT'),
prefix=app.config.get('STATSD_PREFIX')
)
def format_stat_name(self, stat):
return self.namespace + stat
def incr(self, stat, count=1, rate=1):
if self.active:
super(StatsClient, self).incr(self.format_stat_name(stat), count, rate)
def timing(self, stat, delta, rate=1):
if self.active:
super(StatsClient, self).timing(self.format_stat_name(stat), delta, rate)
def timing_with_dates(self, stat, start, end, rate=1):
if self.active:
delta = (start - end).total_seconds()
super(StatsClient, self).timing(self.format_stat_name(stat), delta, rate)
|
<commit_before>from statsd import StatsClient
class StatsdClient(StatsClient):
def init_app(self, app, *args, **kwargs):
self.active = app.config.get('STATSD_ENABLED')
self.namespace = app.config.get('NOTIFY_ENVIRONMENT') + ".notifications.api."
if self.active:
StatsClient.__init__(
self,
app.config.get('STATSD_HOST'),
app.config.get('STATSD_PORT'),
prefix=app.config.get('STATSD_PREFIX')
)
def format_stat_name(self, stat):
return self.namespace + stat
def incr(self, stat, count=1, rate=1):
if self.active:
super(StatsClient, self).incr(self.format_stat_name(stat), count, rate)
def timing(self, stat, delta, rate=1):
if self.active:
super(StatsClient, self).timing(self.format_stat_name(stat), delta, rate)
def timing_with_dates(self, stat, start, end, rate=1):
if self.active:
delta = (start - end).total_seconds()
super(StatsClient, self).timing(stat, delta, rate)
<commit_msg>Format the stat name with environmenbt<commit_after>from statsd import StatsClient
class StatsdClient(StatsClient):
def init_app(self, app, *args, **kwargs):
self.active = app.config.get('STATSD_ENABLED')
self.namespace = app.config.get('NOTIFY_ENVIRONMENT') + ".notifications.api."
if self.active:
StatsClient.__init__(
self,
app.config.get('STATSD_HOST'),
app.config.get('STATSD_PORT'),
prefix=app.config.get('STATSD_PREFIX')
)
def format_stat_name(self, stat):
return self.namespace + stat
def incr(self, stat, count=1, rate=1):
if self.active:
super(StatsClient, self).incr(self.format_stat_name(stat), count, rate)
def timing(self, stat, delta, rate=1):
if self.active:
super(StatsClient, self).timing(self.format_stat_name(stat), delta, rate)
def timing_with_dates(self, stat, start, end, rate=1):
if self.active:
delta = (start - end).total_seconds()
super(StatsClient, self).timing(self.format_stat_name(stat), delta, rate)
|
da03ad3386d45d310514f2b5ef3145fbcf5b773d
|
dashboard/ratings/tests/factories.py
|
dashboard/ratings/tests/factories.py
|
"""
Contains factory classes for quickly generating test data.
It uses the factory_boy package.
Please see https://github.com/rbarrois/factory_boy for more info
"""
import datetime
import factory
import random
from django.utils import timezone
from ratings import models
class SubmissionFactory(factory.DjangoModelFactory):
class Meta:
model = models.Submission
application_date = timezone.now() - datetime.timedelta(days=random.randint(0, 5))
submission_date = timezone.now() + datetime.timedelta(days=random.randint(1, 5))
class MediaFactory(factory.DjangoModelFactory):
class Meta:
model = models.Media
filename = factory.Faker('file_name')
filetype = factory.Faker('pystr', max_chars=60)
submission = factory.SubFactory(SubmissionFactory)
class RatingFactory(factory.DjangoModelFactory):
class Meta:
model = models.Rating
score = factory.Faker('pydecimal', left_digits=2, right_digits=1, positive=True)
code_quality = random.randint(0,100)
documentation = random.randint(0,100)
problem_solving = random.randint(0,100)
effort = random.randint(0,100)
creativity = random.randint(0,100)
originality = random.randint(0,100)
submission = factory.SubFactory(SubmissionFactory)
|
"""
Contains factory classes for quickly generating test data.
It uses the factory_boy package.
Please see https://github.com/rbarrois/factory_boy for more info
"""
import datetime
import factory
import factory.fuzzy
import random
from django.utils import timezone
from ratings import models
class SubmissionFactory(factory.DjangoModelFactory):
class Meta:
model = models.Submission
application_date = factory.fuzzy.FuzzyDateTime(timezone.now(), timezone.now() + datetime.timedelta(days=30))
submission_date = factory.fuzzy.FuzzyDateTime(timezone.now(), timezone.now() + datetime.timedelta(days=100))
class MediaFactory(factory.DjangoModelFactory):
class Meta:
model = models.Media
filename = factory.Faker('file_name')
filetype = factory.Faker('pystr', max_chars=60)
submission = factory.SubFactory(SubmissionFactory)
class RatingFactory(factory.DjangoModelFactory):
class Meta:
model = models.Rating
score = factory.Faker('pydecimal', left_digits=2, right_digits=1, positive=True)
code_quality = factory.fuzzy.FuzzyInteger(0,100)
documentation = factory.fuzzy.FuzzyInteger(0,100)
problem_solving = factory.fuzzy.FuzzyInteger(0,100)
effort = factory.fuzzy.FuzzyInteger(0,100)
creativity = factory.fuzzy.FuzzyInteger(0,100)
originality = factory.fuzzy.FuzzyInteger(0,100)
submission = factory.SubFactory(SubmissionFactory)
|
Make sure seeder creates random values
|
Make sure seeder creates random values
|
Python
|
mit
|
daltonamitchell/rating-dashboard,daltonamitchell/rating-dashboard,daltonamitchell/rating-dashboard
|
"""
Contains factory classes for quickly generating test data.
It uses the factory_boy package.
Please see https://github.com/rbarrois/factory_boy for more info
"""
import datetime
import factory
import random
from django.utils import timezone
from ratings import models
class SubmissionFactory(factory.DjangoModelFactory):
class Meta:
model = models.Submission
application_date = timezone.now() - datetime.timedelta(days=random.randint(0, 5))
submission_date = timezone.now() + datetime.timedelta(days=random.randint(1, 5))
class MediaFactory(factory.DjangoModelFactory):
class Meta:
model = models.Media
filename = factory.Faker('file_name')
filetype = factory.Faker('pystr', max_chars=60)
submission = factory.SubFactory(SubmissionFactory)
class RatingFactory(factory.DjangoModelFactory):
class Meta:
model = models.Rating
score = factory.Faker('pydecimal', left_digits=2, right_digits=1, positive=True)
code_quality = random.randint(0,100)
documentation = random.randint(0,100)
problem_solving = random.randint(0,100)
effort = random.randint(0,100)
creativity = random.randint(0,100)
originality = random.randint(0,100)
submission = factory.SubFactory(SubmissionFactory)
Make sure seeder creates random values
|
"""
Contains factory classes for quickly generating test data.
It uses the factory_boy package.
Please see https://github.com/rbarrois/factory_boy for more info
"""
import datetime
import factory
import factory.fuzzy
import random
from django.utils import timezone
from ratings import models
class SubmissionFactory(factory.DjangoModelFactory):
class Meta:
model = models.Submission
application_date = factory.fuzzy.FuzzyDateTime(timezone.now(), timezone.now() + datetime.timedelta(days=30))
submission_date = factory.fuzzy.FuzzyDateTime(timezone.now(), timezone.now() + datetime.timedelta(days=100))
class MediaFactory(factory.DjangoModelFactory):
class Meta:
model = models.Media
filename = factory.Faker('file_name')
filetype = factory.Faker('pystr', max_chars=60)
submission = factory.SubFactory(SubmissionFactory)
class RatingFactory(factory.DjangoModelFactory):
class Meta:
model = models.Rating
score = factory.Faker('pydecimal', left_digits=2, right_digits=1, positive=True)
code_quality = factory.fuzzy.FuzzyInteger(0,100)
documentation = factory.fuzzy.FuzzyInteger(0,100)
problem_solving = factory.fuzzy.FuzzyInteger(0,100)
effort = factory.fuzzy.FuzzyInteger(0,100)
creativity = factory.fuzzy.FuzzyInteger(0,100)
originality = factory.fuzzy.FuzzyInteger(0,100)
submission = factory.SubFactory(SubmissionFactory)
|
<commit_before>"""
Contains factory classes for quickly generating test data.
It uses the factory_boy package.
Please see https://github.com/rbarrois/factory_boy for more info
"""
import datetime
import factory
import random
from django.utils import timezone
from ratings import models
class SubmissionFactory(factory.DjangoModelFactory):
class Meta:
model = models.Submission
application_date = timezone.now() - datetime.timedelta(days=random.randint(0, 5))
submission_date = timezone.now() + datetime.timedelta(days=random.randint(1, 5))
class MediaFactory(factory.DjangoModelFactory):
class Meta:
model = models.Media
filename = factory.Faker('file_name')
filetype = factory.Faker('pystr', max_chars=60)
submission = factory.SubFactory(SubmissionFactory)
class RatingFactory(factory.DjangoModelFactory):
class Meta:
model = models.Rating
score = factory.Faker('pydecimal', left_digits=2, right_digits=1, positive=True)
code_quality = random.randint(0,100)
documentation = random.randint(0,100)
problem_solving = random.randint(0,100)
effort = random.randint(0,100)
creativity = random.randint(0,100)
originality = random.randint(0,100)
submission = factory.SubFactory(SubmissionFactory)
<commit_msg>Make sure seeder creates random values<commit_after>
|
"""
Contains factory classes for quickly generating test data.
It uses the factory_boy package.
Please see https://github.com/rbarrois/factory_boy for more info
"""
import datetime
import factory
import factory.fuzzy
import random
from django.utils import timezone
from ratings import models
class SubmissionFactory(factory.DjangoModelFactory):
class Meta:
model = models.Submission
application_date = factory.fuzzy.FuzzyDateTime(timezone.now(), timezone.now() + datetime.timedelta(days=30))
submission_date = factory.fuzzy.FuzzyDateTime(timezone.now(), timezone.now() + datetime.timedelta(days=100))
class MediaFactory(factory.DjangoModelFactory):
class Meta:
model = models.Media
filename = factory.Faker('file_name')
filetype = factory.Faker('pystr', max_chars=60)
submission = factory.SubFactory(SubmissionFactory)
class RatingFactory(factory.DjangoModelFactory):
class Meta:
model = models.Rating
score = factory.Faker('pydecimal', left_digits=2, right_digits=1, positive=True)
code_quality = factory.fuzzy.FuzzyInteger(0,100)
documentation = factory.fuzzy.FuzzyInteger(0,100)
problem_solving = factory.fuzzy.FuzzyInteger(0,100)
effort = factory.fuzzy.FuzzyInteger(0,100)
creativity = factory.fuzzy.FuzzyInteger(0,100)
originality = factory.fuzzy.FuzzyInteger(0,100)
submission = factory.SubFactory(SubmissionFactory)
|
"""
Contains factory classes for quickly generating test data.
It uses the factory_boy package.
Please see https://github.com/rbarrois/factory_boy for more info
"""
import datetime
import factory
import random
from django.utils import timezone
from ratings import models
class SubmissionFactory(factory.DjangoModelFactory):
class Meta:
model = models.Submission
application_date = timezone.now() - datetime.timedelta(days=random.randint(0, 5))
submission_date = timezone.now() + datetime.timedelta(days=random.randint(1, 5))
class MediaFactory(factory.DjangoModelFactory):
class Meta:
model = models.Media
filename = factory.Faker('file_name')
filetype = factory.Faker('pystr', max_chars=60)
submission = factory.SubFactory(SubmissionFactory)
class RatingFactory(factory.DjangoModelFactory):
class Meta:
model = models.Rating
score = factory.Faker('pydecimal', left_digits=2, right_digits=1, positive=True)
code_quality = random.randint(0,100)
documentation = random.randint(0,100)
problem_solving = random.randint(0,100)
effort = random.randint(0,100)
creativity = random.randint(0,100)
originality = random.randint(0,100)
submission = factory.SubFactory(SubmissionFactory)
Make sure seeder creates random values"""
Contains factory classes for quickly generating test data.
It uses the factory_boy package.
Please see https://github.com/rbarrois/factory_boy for more info
"""
import datetime
import factory
import factory.fuzzy
import random
from django.utils import timezone
from ratings import models
class SubmissionFactory(factory.DjangoModelFactory):
class Meta:
model = models.Submission
application_date = factory.fuzzy.FuzzyDateTime(timezone.now(), timezone.now() + datetime.timedelta(days=30))
submission_date = factory.fuzzy.FuzzyDateTime(timezone.now(), timezone.now() + datetime.timedelta(days=100))
class MediaFactory(factory.DjangoModelFactory):
class Meta:
model = models.Media
filename = factory.Faker('file_name')
filetype = factory.Faker('pystr', max_chars=60)
submission = factory.SubFactory(SubmissionFactory)
class RatingFactory(factory.DjangoModelFactory):
class Meta:
model = models.Rating
score = factory.Faker('pydecimal', left_digits=2, right_digits=1, positive=True)
code_quality = factory.fuzzy.FuzzyInteger(0,100)
documentation = factory.fuzzy.FuzzyInteger(0,100)
problem_solving = factory.fuzzy.FuzzyInteger(0,100)
effort = factory.fuzzy.FuzzyInteger(0,100)
creativity = factory.fuzzy.FuzzyInteger(0,100)
originality = factory.fuzzy.FuzzyInteger(0,100)
submission = factory.SubFactory(SubmissionFactory)
|
<commit_before>"""
Contains factory classes for quickly generating test data.
It uses the factory_boy package.
Please see https://github.com/rbarrois/factory_boy for more info
"""
import datetime
import factory
import random
from django.utils import timezone
from ratings import models
class SubmissionFactory(factory.DjangoModelFactory):
class Meta:
model = models.Submission
application_date = timezone.now() - datetime.timedelta(days=random.randint(0, 5))
submission_date = timezone.now() + datetime.timedelta(days=random.randint(1, 5))
class MediaFactory(factory.DjangoModelFactory):
class Meta:
model = models.Media
filename = factory.Faker('file_name')
filetype = factory.Faker('pystr', max_chars=60)
submission = factory.SubFactory(SubmissionFactory)
class RatingFactory(factory.DjangoModelFactory):
class Meta:
model = models.Rating
score = factory.Faker('pydecimal', left_digits=2, right_digits=1, positive=True)
code_quality = random.randint(0,100)
documentation = random.randint(0,100)
problem_solving = random.randint(0,100)
effort = random.randint(0,100)
creativity = random.randint(0,100)
originality = random.randint(0,100)
submission = factory.SubFactory(SubmissionFactory)
<commit_msg>Make sure seeder creates random values<commit_after>"""
Contains factory classes for quickly generating test data.
It uses the factory_boy package.
Please see https://github.com/rbarrois/factory_boy for more info
"""
import datetime
import factory
import factory.fuzzy
import random
from django.utils import timezone
from ratings import models
class SubmissionFactory(factory.DjangoModelFactory):
class Meta:
model = models.Submission
application_date = factory.fuzzy.FuzzyDateTime(timezone.now(), timezone.now() + datetime.timedelta(days=30))
submission_date = factory.fuzzy.FuzzyDateTime(timezone.now(), timezone.now() + datetime.timedelta(days=100))
class MediaFactory(factory.DjangoModelFactory):
class Meta:
model = models.Media
filename = factory.Faker('file_name')
filetype = factory.Faker('pystr', max_chars=60)
submission = factory.SubFactory(SubmissionFactory)
class RatingFactory(factory.DjangoModelFactory):
class Meta:
model = models.Rating
score = factory.Faker('pydecimal', left_digits=2, right_digits=1, positive=True)
code_quality = factory.fuzzy.FuzzyInteger(0,100)
documentation = factory.fuzzy.FuzzyInteger(0,100)
problem_solving = factory.fuzzy.FuzzyInteger(0,100)
effort = factory.fuzzy.FuzzyInteger(0,100)
creativity = factory.fuzzy.FuzzyInteger(0,100)
originality = factory.fuzzy.FuzzyInteger(0,100)
submission = factory.SubFactory(SubmissionFactory)
|
79b0584887075eb1732770d1732ae07147ec21b6
|
tests/mpd/protocol/test_status.py
|
tests/mpd/protocol/test_status.py
|
from __future__ import absolute_import, unicode_literals
from mopidy.models import Track
from tests.mpd import protocol
class StatusHandlerTest(protocol.BaseTestCase):
def test_clearerror(self):
self.send_request('clearerror')
self.assertEqualResponse('ACK [0@0] {clearerror} Not implemented')
def test_currentsong(self):
track = Track()
self.core.tracklist.add([track])
self.core.playback.play()
self.send_request('currentsong')
self.assertInResponse('file: ')
self.assertInResponse('Time: 0')
self.assertInResponse('Artist: ')
self.assertInResponse('Title: ')
self.assertInResponse('Album: ')
self.assertInResponse('Track: 0')
self.assertNotInResponse('Date: ')
self.assertInResponse('Pos: 0')
self.assertInResponse('Id: 0')
self.assertInResponse('OK')
def test_currentsong_without_song(self):
self.send_request('currentsong')
self.assertInResponse('OK')
def test_stats_command(self):
self.send_request('stats')
self.assertInResponse('OK')
def test_status_command(self):
self.send_request('status')
self.assertInResponse('OK')
|
from __future__ import absolute_import, unicode_literals
from mopidy.models import Track
from tests.mpd import protocol
class StatusHandlerTest(protocol.BaseTestCase):
def test_clearerror(self):
self.send_request('clearerror')
self.assertEqualResponse('ACK [0@0] {clearerror} Not implemented')
def test_currentsong(self):
track = Track(uri='dummy:/a')
self.backend.library.dummy_library = [track]
self.core.tracklist.add(uris=[track.uri]).get()
self.core.playback.play()
self.send_request('currentsong')
self.assertInResponse('file: dummy:/a')
self.assertInResponse('Time: 0')
self.assertInResponse('Artist: ')
self.assertInResponse('Title: ')
self.assertInResponse('Album: ')
self.assertInResponse('Track: 0')
self.assertNotInResponse('Date: ')
self.assertInResponse('Pos: 0')
self.assertInResponse('Id: 0')
self.assertInResponse('OK')
def test_currentsong_without_song(self):
self.send_request('currentsong')
self.assertInResponse('OK')
def test_stats_command(self):
self.send_request('stats')
self.assertInResponse('OK')
def test_status_command(self):
self.send_request('status')
self.assertInResponse('OK')
|
Stop using tracklist add tracks in mpd status test
|
tests: Stop using tracklist add tracks in mpd status test
|
Python
|
apache-2.0
|
ZenithDK/mopidy,quartz55/mopidy,tkem/mopidy,dbrgn/mopidy,rawdlite/mopidy,ali/mopidy,glogiotatidis/mopidy,quartz55/mopidy,bacontext/mopidy,bencevans/mopidy,kingosticks/mopidy,ZenithDK/mopidy,tkem/mopidy,dbrgn/mopidy,tkem/mopidy,jmarsik/mopidy,glogiotatidis/mopidy,adamcik/mopidy,bacontext/mopidy,bacontext/mopidy,pacificIT/mopidy,diandiankan/mopidy,diandiankan/mopidy,swak/mopidy,ZenithDK/mopidy,tkem/mopidy,dbrgn/mopidy,jmarsik/mopidy,jcass77/mopidy,glogiotatidis/mopidy,kingosticks/mopidy,adamcik/mopidy,mopidy/mopidy,jodal/mopidy,hkariti/mopidy,jcass77/mopidy,ali/mopidy,swak/mopidy,diandiankan/mopidy,SuperStarPL/mopidy,mopidy/mopidy,bencevans/mopidy,pacificIT/mopidy,mokieyue/mopidy,diandiankan/mopidy,rawdlite/mopidy,pacificIT/mopidy,glogiotatidis/mopidy,ali/mopidy,jodal/mopidy,jcass77/mopidy,mopidy/mopidy,vrs01/mopidy,jmarsik/mopidy,bacontext/mopidy,SuperStarPL/mopidy,kingosticks/mopidy,mokieyue/mopidy,bencevans/mopidy,adamcik/mopidy,vrs01/mopidy,SuperStarPL/mopidy,quartz55/mopidy,vrs01/mopidy,pacificIT/mopidy,hkariti/mopidy,hkariti/mopidy,swak/mopidy,hkariti/mopidy,mokieyue/mopidy,jmarsik/mopidy,mokieyue/mopidy,dbrgn/mopidy,jodal/mopidy,SuperStarPL/mopidy,rawdlite/mopidy,bencevans/mopidy,quartz55/mopidy,rawdlite/mopidy,vrs01/mopidy,ali/mopidy,swak/mopidy,ZenithDK/mopidy
|
from __future__ import absolute_import, unicode_literals
from mopidy.models import Track
from tests.mpd import protocol
class StatusHandlerTest(protocol.BaseTestCase):
def test_clearerror(self):
self.send_request('clearerror')
self.assertEqualResponse('ACK [0@0] {clearerror} Not implemented')
def test_currentsong(self):
track = Track()
self.core.tracklist.add([track])
self.core.playback.play()
self.send_request('currentsong')
self.assertInResponse('file: ')
self.assertInResponse('Time: 0')
self.assertInResponse('Artist: ')
self.assertInResponse('Title: ')
self.assertInResponse('Album: ')
self.assertInResponse('Track: 0')
self.assertNotInResponse('Date: ')
self.assertInResponse('Pos: 0')
self.assertInResponse('Id: 0')
self.assertInResponse('OK')
def test_currentsong_without_song(self):
self.send_request('currentsong')
self.assertInResponse('OK')
def test_stats_command(self):
self.send_request('stats')
self.assertInResponse('OK')
def test_status_command(self):
self.send_request('status')
self.assertInResponse('OK')
tests: Stop using tracklist add tracks in mpd status test
|
from __future__ import absolute_import, unicode_literals
from mopidy.models import Track
from tests.mpd import protocol
class StatusHandlerTest(protocol.BaseTestCase):
def test_clearerror(self):
self.send_request('clearerror')
self.assertEqualResponse('ACK [0@0] {clearerror} Not implemented')
def test_currentsong(self):
track = Track(uri='dummy:/a')
self.backend.library.dummy_library = [track]
self.core.tracklist.add(uris=[track.uri]).get()
self.core.playback.play()
self.send_request('currentsong')
self.assertInResponse('file: dummy:/a')
self.assertInResponse('Time: 0')
self.assertInResponse('Artist: ')
self.assertInResponse('Title: ')
self.assertInResponse('Album: ')
self.assertInResponse('Track: 0')
self.assertNotInResponse('Date: ')
self.assertInResponse('Pos: 0')
self.assertInResponse('Id: 0')
self.assertInResponse('OK')
def test_currentsong_without_song(self):
self.send_request('currentsong')
self.assertInResponse('OK')
def test_stats_command(self):
self.send_request('stats')
self.assertInResponse('OK')
def test_status_command(self):
self.send_request('status')
self.assertInResponse('OK')
|
<commit_before>from __future__ import absolute_import, unicode_literals
from mopidy.models import Track
from tests.mpd import protocol
class StatusHandlerTest(protocol.BaseTestCase):
def test_clearerror(self):
self.send_request('clearerror')
self.assertEqualResponse('ACK [0@0] {clearerror} Not implemented')
def test_currentsong(self):
track = Track()
self.core.tracklist.add([track])
self.core.playback.play()
self.send_request('currentsong')
self.assertInResponse('file: ')
self.assertInResponse('Time: 0')
self.assertInResponse('Artist: ')
self.assertInResponse('Title: ')
self.assertInResponse('Album: ')
self.assertInResponse('Track: 0')
self.assertNotInResponse('Date: ')
self.assertInResponse('Pos: 0')
self.assertInResponse('Id: 0')
self.assertInResponse('OK')
def test_currentsong_without_song(self):
self.send_request('currentsong')
self.assertInResponse('OK')
def test_stats_command(self):
self.send_request('stats')
self.assertInResponse('OK')
def test_status_command(self):
self.send_request('status')
self.assertInResponse('OK')
<commit_msg>tests: Stop using tracklist add tracks in mpd status test<commit_after>
|
from __future__ import absolute_import, unicode_literals
from mopidy.models import Track
from tests.mpd import protocol
class StatusHandlerTest(protocol.BaseTestCase):
def test_clearerror(self):
self.send_request('clearerror')
self.assertEqualResponse('ACK [0@0] {clearerror} Not implemented')
def test_currentsong(self):
track = Track(uri='dummy:/a')
self.backend.library.dummy_library = [track]
self.core.tracklist.add(uris=[track.uri]).get()
self.core.playback.play()
self.send_request('currentsong')
self.assertInResponse('file: dummy:/a')
self.assertInResponse('Time: 0')
self.assertInResponse('Artist: ')
self.assertInResponse('Title: ')
self.assertInResponse('Album: ')
self.assertInResponse('Track: 0')
self.assertNotInResponse('Date: ')
self.assertInResponse('Pos: 0')
self.assertInResponse('Id: 0')
self.assertInResponse('OK')
def test_currentsong_without_song(self):
self.send_request('currentsong')
self.assertInResponse('OK')
def test_stats_command(self):
self.send_request('stats')
self.assertInResponse('OK')
def test_status_command(self):
self.send_request('status')
self.assertInResponse('OK')
|
from __future__ import absolute_import, unicode_literals
from mopidy.models import Track
from tests.mpd import protocol
class StatusHandlerTest(protocol.BaseTestCase):
def test_clearerror(self):
self.send_request('clearerror')
self.assertEqualResponse('ACK [0@0] {clearerror} Not implemented')
def test_currentsong(self):
track = Track()
self.core.tracklist.add([track])
self.core.playback.play()
self.send_request('currentsong')
self.assertInResponse('file: ')
self.assertInResponse('Time: 0')
self.assertInResponse('Artist: ')
self.assertInResponse('Title: ')
self.assertInResponse('Album: ')
self.assertInResponse('Track: 0')
self.assertNotInResponse('Date: ')
self.assertInResponse('Pos: 0')
self.assertInResponse('Id: 0')
self.assertInResponse('OK')
def test_currentsong_without_song(self):
self.send_request('currentsong')
self.assertInResponse('OK')
def test_stats_command(self):
self.send_request('stats')
self.assertInResponse('OK')
def test_status_command(self):
self.send_request('status')
self.assertInResponse('OK')
tests: Stop using tracklist add tracks in mpd status testfrom __future__ import absolute_import, unicode_literals
from mopidy.models import Track
from tests.mpd import protocol
class StatusHandlerTest(protocol.BaseTestCase):
def test_clearerror(self):
self.send_request('clearerror')
self.assertEqualResponse('ACK [0@0] {clearerror} Not implemented')
def test_currentsong(self):
track = Track(uri='dummy:/a')
self.backend.library.dummy_library = [track]
self.core.tracklist.add(uris=[track.uri]).get()
self.core.playback.play()
self.send_request('currentsong')
self.assertInResponse('file: dummy:/a')
self.assertInResponse('Time: 0')
self.assertInResponse('Artist: ')
self.assertInResponse('Title: ')
self.assertInResponse('Album: ')
self.assertInResponse('Track: 0')
self.assertNotInResponse('Date: ')
self.assertInResponse('Pos: 0')
self.assertInResponse('Id: 0')
self.assertInResponse('OK')
def test_currentsong_without_song(self):
self.send_request('currentsong')
self.assertInResponse('OK')
def test_stats_command(self):
self.send_request('stats')
self.assertInResponse('OK')
def test_status_command(self):
self.send_request('status')
self.assertInResponse('OK')
|
<commit_before>from __future__ import absolute_import, unicode_literals
from mopidy.models import Track
from tests.mpd import protocol
class StatusHandlerTest(protocol.BaseTestCase):
def test_clearerror(self):
self.send_request('clearerror')
self.assertEqualResponse('ACK [0@0] {clearerror} Not implemented')
def test_currentsong(self):
track = Track()
self.core.tracklist.add([track])
self.core.playback.play()
self.send_request('currentsong')
self.assertInResponse('file: ')
self.assertInResponse('Time: 0')
self.assertInResponse('Artist: ')
self.assertInResponse('Title: ')
self.assertInResponse('Album: ')
self.assertInResponse('Track: 0')
self.assertNotInResponse('Date: ')
self.assertInResponse('Pos: 0')
self.assertInResponse('Id: 0')
self.assertInResponse('OK')
def test_currentsong_without_song(self):
self.send_request('currentsong')
self.assertInResponse('OK')
def test_stats_command(self):
self.send_request('stats')
self.assertInResponse('OK')
def test_status_command(self):
self.send_request('status')
self.assertInResponse('OK')
<commit_msg>tests: Stop using tracklist add tracks in mpd status test<commit_after>from __future__ import absolute_import, unicode_literals
from mopidy.models import Track
from tests.mpd import protocol
class StatusHandlerTest(protocol.BaseTestCase):
def test_clearerror(self):
self.send_request('clearerror')
self.assertEqualResponse('ACK [0@0] {clearerror} Not implemented')
def test_currentsong(self):
track = Track(uri='dummy:/a')
self.backend.library.dummy_library = [track]
self.core.tracklist.add(uris=[track.uri]).get()
self.core.playback.play()
self.send_request('currentsong')
self.assertInResponse('file: dummy:/a')
self.assertInResponse('Time: 0')
self.assertInResponse('Artist: ')
self.assertInResponse('Title: ')
self.assertInResponse('Album: ')
self.assertInResponse('Track: 0')
self.assertNotInResponse('Date: ')
self.assertInResponse('Pos: 0')
self.assertInResponse('Id: 0')
self.assertInResponse('OK')
def test_currentsong_without_song(self):
self.send_request('currentsong')
self.assertInResponse('OK')
def test_stats_command(self):
self.send_request('stats')
self.assertInResponse('OK')
def test_status_command(self):
self.send_request('status')
self.assertInResponse('OK')
|
8f60ea444d2732b5e0f1b73a24cd8e753f160e79
|
corehq/apps/userreports/specs.py
|
corehq/apps/userreports/specs.py
|
from jsonobject import StringProperty
def TypeProperty(value):
"""
Shortcut for making a required property and restricting it to a single specified
value. This adds additional validation that the objects are being wrapped as expected
according to the type.
"""
return StringProperty(required=True, choices=[value])
class EvaluationContext(object):
"""
An evaluation context. Necessary for repeats to pass both the row of the repeat as well
as the root document and the iteration number.
"""
def __init__(self, root_doc, iteration):
self.root_doc = root_doc
self.iteration = iteration
|
from jsonobject import StringProperty
def TypeProperty(value):
"""
Shortcut for making a required property and restricting it to a single specified
value. This adds additional validation that the objects are being wrapped as expected
according to the type.
"""
return StringProperty(required=True, choices=[value])
class EvaluationContext(object):
"""
An evaluation context. Necessary for repeats to pass both the row of the repeat as well
as the root document and the iteration number.
"""
def __init__(self, root_doc, iteration=0):
self.root_doc = root_doc
self.iteration = iteration
|
Set default iteration on EvaluationContext initializer
|
Set default iteration on EvaluationContext initializer
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,puttarajubr/commcare-hq
|
from jsonobject import StringProperty
def TypeProperty(value):
"""
Shortcut for making a required property and restricting it to a single specified
value. This adds additional validation that the objects are being wrapped as expected
according to the type.
"""
return StringProperty(required=True, choices=[value])
class EvaluationContext(object):
"""
An evaluation context. Necessary for repeats to pass both the row of the repeat as well
as the root document and the iteration number.
"""
def __init__(self, root_doc, iteration):
self.root_doc = root_doc
self.iteration = iteration
Set default iteration on EvaluationContext initializer
|
from jsonobject import StringProperty
def TypeProperty(value):
"""
Shortcut for making a required property and restricting it to a single specified
value. This adds additional validation that the objects are being wrapped as expected
according to the type.
"""
return StringProperty(required=True, choices=[value])
class EvaluationContext(object):
"""
An evaluation context. Necessary for repeats to pass both the row of the repeat as well
as the root document and the iteration number.
"""
def __init__(self, root_doc, iteration=0):
self.root_doc = root_doc
self.iteration = iteration
|
<commit_before>from jsonobject import StringProperty
def TypeProperty(value):
"""
Shortcut for making a required property and restricting it to a single specified
value. This adds additional validation that the objects are being wrapped as expected
according to the type.
"""
return StringProperty(required=True, choices=[value])
class EvaluationContext(object):
"""
An evaluation context. Necessary for repeats to pass both the row of the repeat as well
as the root document and the iteration number.
"""
def __init__(self, root_doc, iteration):
self.root_doc = root_doc
self.iteration = iteration
<commit_msg>Set default iteration on EvaluationContext initializer<commit_after>
|
from jsonobject import StringProperty
def TypeProperty(value):
"""
Shortcut for making a required property and restricting it to a single specified
value. This adds additional validation that the objects are being wrapped as expected
according to the type.
"""
return StringProperty(required=True, choices=[value])
class EvaluationContext(object):
"""
An evaluation context. Necessary for repeats to pass both the row of the repeat as well
as the root document and the iteration number.
"""
def __init__(self, root_doc, iteration=0):
self.root_doc = root_doc
self.iteration = iteration
|
from jsonobject import StringProperty
def TypeProperty(value):
"""
Shortcut for making a required property and restricting it to a single specified
value. This adds additional validation that the objects are being wrapped as expected
according to the type.
"""
return StringProperty(required=True, choices=[value])
class EvaluationContext(object):
"""
An evaluation context. Necessary for repeats to pass both the row of the repeat as well
as the root document and the iteration number.
"""
def __init__(self, root_doc, iteration):
self.root_doc = root_doc
self.iteration = iteration
Set default iteration on EvaluationContext initializerfrom jsonobject import StringProperty
def TypeProperty(value):
"""
Shortcut for making a required property and restricting it to a single specified
value. This adds additional validation that the objects are being wrapped as expected
according to the type.
"""
return StringProperty(required=True, choices=[value])
class EvaluationContext(object):
"""
An evaluation context. Necessary for repeats to pass both the row of the repeat as well
as the root document and the iteration number.
"""
def __init__(self, root_doc, iteration=0):
self.root_doc = root_doc
self.iteration = iteration
|
<commit_before>from jsonobject import StringProperty
def TypeProperty(value):
"""
Shortcut for making a required property and restricting it to a single specified
value. This adds additional validation that the objects are being wrapped as expected
according to the type.
"""
return StringProperty(required=True, choices=[value])
class EvaluationContext(object):
"""
An evaluation context. Necessary for repeats to pass both the row of the repeat as well
as the root document and the iteration number.
"""
def __init__(self, root_doc, iteration):
self.root_doc = root_doc
self.iteration = iteration
<commit_msg>Set default iteration on EvaluationContext initializer<commit_after>from jsonobject import StringProperty
def TypeProperty(value):
"""
Shortcut for making a required property and restricting it to a single specified
value. This adds additional validation that the objects are being wrapped as expected
according to the type.
"""
return StringProperty(required=True, choices=[value])
class EvaluationContext(object):
"""
An evaluation context. Necessary for repeats to pass both the row of the repeat as well
as the root document and the iteration number.
"""
def __init__(self, root_doc, iteration=0):
self.root_doc = root_doc
self.iteration = iteration
|
31dd9f5ec73db577bf00d7411ecffeba30691d0c
|
django_lean/lean_analytics/models.py
|
django_lean/lean_analytics/models.py
|
from django_lean.experiments.models import GoalRecord
from django_lean.experiments.signals import goal_recorded, user_enrolled
from django_lean.lean_analytics import get_all_analytics
def analytics_goalrecord(sender, goal_record, experiment_user, *args, **kwargs):
for analytics in get_all_analytics():
analytics.record(goal_record=goal_record,
experiment_user=experiment_user)
goal_recorded.connect(analytics_goalrecord, sender=GoalRecord)
def analytics_enrolled(sender, experiment, experiment_user, group_id,
*args, **kwargs):
for analytics in get_all_analytics():
analytics.enroll(experiment=experiment,
experiment_user=experiment_user,
group_id=group_id)
user_enrolled.connect(analytics_enrolled)
|
from django.conf import settings
from django_lean.experiments.models import GoalRecord
from django_lean.experiments.signals import goal_recorded, user_enrolled
from django_lean.lean_analytics import get_all_analytics
def analytics_goalrecord(sender, goal_record, experiment_user, *args, **kwargs):
if getattr(settings, 'LEAN_ANALYTICS_FOR_EXPERIMENTS', False):
for analytics in get_all_analytics():
analytics.record(goal_record=goal_record,
experiment_user=experiment_user)
goal_recorded.connect(analytics_goalrecord, sender=GoalRecord)
def analytics_enrolled(sender, experiment, experiment_user, group_id,
*args, **kwargs):
if getattr(settings, 'LEAN_ANALYTICS_FOR_EXPERIMENTS', False):
for analytics in get_all_analytics():
analytics.enroll(experiment=experiment,
experiment_user=experiment_user,
group_id=group_id)
user_enrolled.connect(analytics_enrolled)
|
Make it possible to disable enrollment and goal record analytics.
|
Make it possible to disable enrollment and goal record analytics.
|
Python
|
bsd-3-clause
|
e-loue/django-lean,e-loue/django-lean
|
from django_lean.experiments.models import GoalRecord
from django_lean.experiments.signals import goal_recorded, user_enrolled
from django_lean.lean_analytics import get_all_analytics
def analytics_goalrecord(sender, goal_record, experiment_user, *args, **kwargs):
for analytics in get_all_analytics():
analytics.record(goal_record=goal_record,
experiment_user=experiment_user)
goal_recorded.connect(analytics_goalrecord, sender=GoalRecord)
def analytics_enrolled(sender, experiment, experiment_user, group_id,
*args, **kwargs):
for analytics in get_all_analytics():
analytics.enroll(experiment=experiment,
experiment_user=experiment_user,
group_id=group_id)
user_enrolled.connect(analytics_enrolled)
Make it possible to disable enrollment and goal record analytics.
|
from django.conf import settings
from django_lean.experiments.models import GoalRecord
from django_lean.experiments.signals import goal_recorded, user_enrolled
from django_lean.lean_analytics import get_all_analytics
def analytics_goalrecord(sender, goal_record, experiment_user, *args, **kwargs):
if getattr(settings, 'LEAN_ANALYTICS_FOR_EXPERIMENTS', False):
for analytics in get_all_analytics():
analytics.record(goal_record=goal_record,
experiment_user=experiment_user)
goal_recorded.connect(analytics_goalrecord, sender=GoalRecord)
def analytics_enrolled(sender, experiment, experiment_user, group_id,
*args, **kwargs):
if getattr(settings, 'LEAN_ANALYTICS_FOR_EXPERIMENTS', False):
for analytics in get_all_analytics():
analytics.enroll(experiment=experiment,
experiment_user=experiment_user,
group_id=group_id)
user_enrolled.connect(analytics_enrolled)
|
<commit_before>from django_lean.experiments.models import GoalRecord
from django_lean.experiments.signals import goal_recorded, user_enrolled
from django_lean.lean_analytics import get_all_analytics
def analytics_goalrecord(sender, goal_record, experiment_user, *args, **kwargs):
for analytics in get_all_analytics():
analytics.record(goal_record=goal_record,
experiment_user=experiment_user)
goal_recorded.connect(analytics_goalrecord, sender=GoalRecord)
def analytics_enrolled(sender, experiment, experiment_user, group_id,
*args, **kwargs):
for analytics in get_all_analytics():
analytics.enroll(experiment=experiment,
experiment_user=experiment_user,
group_id=group_id)
user_enrolled.connect(analytics_enrolled)
<commit_msg>Make it possible to disable enrollment and goal record analytics.<commit_after>
|
from django.conf import settings
from django_lean.experiments.models import GoalRecord
from django_lean.experiments.signals import goal_recorded, user_enrolled
from django_lean.lean_analytics import get_all_analytics
def analytics_goalrecord(sender, goal_record, experiment_user, *args, **kwargs):
if getattr(settings, 'LEAN_ANALYTICS_FOR_EXPERIMENTS', False):
for analytics in get_all_analytics():
analytics.record(goal_record=goal_record,
experiment_user=experiment_user)
goal_recorded.connect(analytics_goalrecord, sender=GoalRecord)
def analytics_enrolled(sender, experiment, experiment_user, group_id,
*args, **kwargs):
if getattr(settings, 'LEAN_ANALYTICS_FOR_EXPERIMENTS', False):
for analytics in get_all_analytics():
analytics.enroll(experiment=experiment,
experiment_user=experiment_user,
group_id=group_id)
user_enrolled.connect(analytics_enrolled)
|
from django_lean.experiments.models import GoalRecord
from django_lean.experiments.signals import goal_recorded, user_enrolled
from django_lean.lean_analytics import get_all_analytics
def analytics_goalrecord(sender, goal_record, experiment_user, *args, **kwargs):
for analytics in get_all_analytics():
analytics.record(goal_record=goal_record,
experiment_user=experiment_user)
goal_recorded.connect(analytics_goalrecord, sender=GoalRecord)
def analytics_enrolled(sender, experiment, experiment_user, group_id,
*args, **kwargs):
for analytics in get_all_analytics():
analytics.enroll(experiment=experiment,
experiment_user=experiment_user,
group_id=group_id)
user_enrolled.connect(analytics_enrolled)
Make it possible to disable enrollment and goal record analytics.from django.conf import settings
from django_lean.experiments.models import GoalRecord
from django_lean.experiments.signals import goal_recorded, user_enrolled
from django_lean.lean_analytics import get_all_analytics
def analytics_goalrecord(sender, goal_record, experiment_user, *args, **kwargs):
if getattr(settings, 'LEAN_ANALYTICS_FOR_EXPERIMENTS', False):
for analytics in get_all_analytics():
analytics.record(goal_record=goal_record,
experiment_user=experiment_user)
goal_recorded.connect(analytics_goalrecord, sender=GoalRecord)
def analytics_enrolled(sender, experiment, experiment_user, group_id,
*args, **kwargs):
if getattr(settings, 'LEAN_ANALYTICS_FOR_EXPERIMENTS', False):
for analytics in get_all_analytics():
analytics.enroll(experiment=experiment,
experiment_user=experiment_user,
group_id=group_id)
user_enrolled.connect(analytics_enrolled)
|
<commit_before>from django_lean.experiments.models import GoalRecord
from django_lean.experiments.signals import goal_recorded, user_enrolled
from django_lean.lean_analytics import get_all_analytics
def analytics_goalrecord(sender, goal_record, experiment_user, *args, **kwargs):
for analytics in get_all_analytics():
analytics.record(goal_record=goal_record,
experiment_user=experiment_user)
goal_recorded.connect(analytics_goalrecord, sender=GoalRecord)
def analytics_enrolled(sender, experiment, experiment_user, group_id,
*args, **kwargs):
for analytics in get_all_analytics():
analytics.enroll(experiment=experiment,
experiment_user=experiment_user,
group_id=group_id)
user_enrolled.connect(analytics_enrolled)
<commit_msg>Make it possible to disable enrollment and goal record analytics.<commit_after>from django.conf import settings
from django_lean.experiments.models import GoalRecord
from django_lean.experiments.signals import goal_recorded, user_enrolled
from django_lean.lean_analytics import get_all_analytics
def analytics_goalrecord(sender, goal_record, experiment_user, *args, **kwargs):
if getattr(settings, 'LEAN_ANALYTICS_FOR_EXPERIMENTS', False):
for analytics in get_all_analytics():
analytics.record(goal_record=goal_record,
experiment_user=experiment_user)
goal_recorded.connect(analytics_goalrecord, sender=GoalRecord)
def analytics_enrolled(sender, experiment, experiment_user, group_id,
*args, **kwargs):
if getattr(settings, 'LEAN_ANALYTICS_FOR_EXPERIMENTS', False):
for analytics in get_all_analytics():
analytics.enroll(experiment=experiment,
experiment_user=experiment_user,
group_id=group_id)
user_enrolled.connect(analytics_enrolled)
|
7da561d7bf3affecce8b10b50818591ccebe0ba2
|
dog/core/cog.py
|
dog/core/cog.py
|
class Cog:
""" The Cog baseclass that all cogs should inherit from. """
def __init__(self, bot):
self.bot = bot
|
import logging
class Cog:
""" The Cog baseclass that all cogs should inherit from. """
def __init__(self, bot):
self.bot = bot
self.logger = logging.getLogger('cog.' + type(self).__name__.lower())
|
Add logger attribute in Cog baseclass
|
Add logger attribute in Cog baseclass
I don't feel like refactoring all of my cog code to use this attribute at the moment, so I'll just leave this here for now.
|
Python
|
mit
|
sliceofcode/dogbot,slice/dogbot,slice/dogbot,sliceofcode/dogbot,slice/dogbot
|
class Cog:
""" The Cog baseclass that all cogs should inherit from. """
def __init__(self, bot):
self.bot = bot
Add logger attribute in Cog baseclass
I don't feel like refactoring all of my cog code to use this attribute at the moment, so I'll just leave this here for now.
|
import logging
class Cog:
""" The Cog baseclass that all cogs should inherit from. """
def __init__(self, bot):
self.bot = bot
self.logger = logging.getLogger('cog.' + type(self).__name__.lower())
|
<commit_before>class Cog:
""" The Cog baseclass that all cogs should inherit from. """
def __init__(self, bot):
self.bot = bot
<commit_msg>Add logger attribute in Cog baseclass
I don't feel like refactoring all of my cog code to use this attribute at the moment, so I'll just leave this here for now.<commit_after>
|
import logging
class Cog:
""" The Cog baseclass that all cogs should inherit from. """
def __init__(self, bot):
self.bot = bot
self.logger = logging.getLogger('cog.' + type(self).__name__.lower())
|
class Cog:
""" The Cog baseclass that all cogs should inherit from. """
def __init__(self, bot):
self.bot = bot
Add logger attribute in Cog baseclass
I don't feel like refactoring all of my cog code to use this attribute at the moment, so I'll just leave this here for now.import logging
class Cog:
""" The Cog baseclass that all cogs should inherit from. """
def __init__(self, bot):
self.bot = bot
self.logger = logging.getLogger('cog.' + type(self).__name__.lower())
|
<commit_before>class Cog:
""" The Cog baseclass that all cogs should inherit from. """
def __init__(self, bot):
self.bot = bot
<commit_msg>Add logger attribute in Cog baseclass
I don't feel like refactoring all of my cog code to use this attribute at the moment, so I'll just leave this here for now.<commit_after>import logging
class Cog:
""" The Cog baseclass that all cogs should inherit from. """
def __init__(self, bot):
self.bot = bot
self.logger = logging.getLogger('cog.' + type(self).__name__.lower())
|
eafafd3d90024c552a6a607871c1441e358eb927
|
Bar.py
|
Bar.py
|
import pylab
from matplotlib import pyplot
from PlotInfo import *
class Bar(PlotInfo):
"""
A bar chart consisting of a single series of bars.
"""
def __init__(self):
PlotInfo.__init__(self, "bar")
self.width=0.8
self.color="black"
self.edgeColor=None
self.hatch=None
def draw(self, axis):
PlotInfo.draw(self, axis)
kwdict = self.getAttributes()
return [[axis.bar(self.xValues, self.yValues, **kwdict)[0]],
[self.label]]
def getAttributes(self):
kwdict = {}
kwdict["color"] = self.color
kwdict["label"] = self.label
kwdict["width"] = self.width
if self.hatch is not None:
kwdict["hatch"] = self.hatch
print >>sys.stderr, "WARNING: Setting hash for bar charts only seems to work when exporting to svg or png"
if self.edgeColor is not None:
kwdict["edgecolor"] = self.edgeColor
return kwdict
|
import pylab
from matplotlib import pyplot
from PlotInfo import *
class Bar(PlotInfo):
"""
A bar chart consisting of a single series of bars.
"""
def __init__(self):
PlotInfo.__init__(self, "bar")
self.width=0.8
self.color="black"
self.edgeColor=None
self.hatch=None
def draw(self, axis):
if self.xTickLabelPoints is None:
self.xTickLabelPoints = \
[x + (self.width / 2.0) for x in self.xValues]
if self.xTickLabels is None:
self.xTickLabels = self.xValues
PlotInfo.draw(self, axis)
kwdict = self.getAttributes()
return [[axis.bar(self.xValues, self.yValues, **kwdict)[0]],
[self.label]]
def getAttributes(self):
kwdict = {}
kwdict["color"] = self.color
kwdict["label"] = self.label
kwdict["width"] = self.width
if self.hatch is not None:
kwdict["hatch"] = self.hatch
print >>sys.stderr, "WARNING: Setting hash for bar charts only seems to work when exporting to svg or png"
if self.edgeColor is not None:
kwdict["edgecolor"] = self.edgeColor
return kwdict
|
Fix bar graph x-axis centering.
|
Fix bar graph x-axis centering.
|
Python
|
bsd-3-clause
|
alexras/boomslang
|
import pylab
from matplotlib import pyplot
from PlotInfo import *
class Bar(PlotInfo):
"""
A bar chart consisting of a single series of bars.
"""
def __init__(self):
PlotInfo.__init__(self, "bar")
self.width=0.8
self.color="black"
self.edgeColor=None
self.hatch=None
def draw(self, axis):
PlotInfo.draw(self, axis)
kwdict = self.getAttributes()
return [[axis.bar(self.xValues, self.yValues, **kwdict)[0]],
[self.label]]
def getAttributes(self):
kwdict = {}
kwdict["color"] = self.color
kwdict["label"] = self.label
kwdict["width"] = self.width
if self.hatch is not None:
kwdict["hatch"] = self.hatch
print >>sys.stderr, "WARNING: Setting hash for bar charts only seems to work when exporting to svg or png"
if self.edgeColor is not None:
kwdict["edgecolor"] = self.edgeColor
return kwdict
Fix bar graph x-axis centering.
|
import pylab
from matplotlib import pyplot
from PlotInfo import *
class Bar(PlotInfo):
"""
A bar chart consisting of a single series of bars.
"""
def __init__(self):
PlotInfo.__init__(self, "bar")
self.width=0.8
self.color="black"
self.edgeColor=None
self.hatch=None
def draw(self, axis):
if self.xTickLabelPoints is None:
self.xTickLabelPoints = \
[x + (self.width / 2.0) for x in self.xValues]
if self.xTickLabels is None:
self.xTickLabels = self.xValues
PlotInfo.draw(self, axis)
kwdict = self.getAttributes()
return [[axis.bar(self.xValues, self.yValues, **kwdict)[0]],
[self.label]]
def getAttributes(self):
kwdict = {}
kwdict["color"] = self.color
kwdict["label"] = self.label
kwdict["width"] = self.width
if self.hatch is not None:
kwdict["hatch"] = self.hatch
print >>sys.stderr, "WARNING: Setting hash for bar charts only seems to work when exporting to svg or png"
if self.edgeColor is not None:
kwdict["edgecolor"] = self.edgeColor
return kwdict
|
<commit_before>import pylab
from matplotlib import pyplot
from PlotInfo import *
class Bar(PlotInfo):
"""
A bar chart consisting of a single series of bars.
"""
def __init__(self):
PlotInfo.__init__(self, "bar")
self.width=0.8
self.color="black"
self.edgeColor=None
self.hatch=None
def draw(self, axis):
PlotInfo.draw(self, axis)
kwdict = self.getAttributes()
return [[axis.bar(self.xValues, self.yValues, **kwdict)[0]],
[self.label]]
def getAttributes(self):
kwdict = {}
kwdict["color"] = self.color
kwdict["label"] = self.label
kwdict["width"] = self.width
if self.hatch is not None:
kwdict["hatch"] = self.hatch
print >>sys.stderr, "WARNING: Setting hash for bar charts only seems to work when exporting to svg or png"
if self.edgeColor is not None:
kwdict["edgecolor"] = self.edgeColor
return kwdict
<commit_msg>Fix bar graph x-axis centering.<commit_after>
|
import pylab
from matplotlib import pyplot
from PlotInfo import *
class Bar(PlotInfo):
"""
A bar chart consisting of a single series of bars.
"""
def __init__(self):
PlotInfo.__init__(self, "bar")
self.width=0.8
self.color="black"
self.edgeColor=None
self.hatch=None
def draw(self, axis):
if self.xTickLabelPoints is None:
self.xTickLabelPoints = \
[x + (self.width / 2.0) for x in self.xValues]
if self.xTickLabels is None:
self.xTickLabels = self.xValues
PlotInfo.draw(self, axis)
kwdict = self.getAttributes()
return [[axis.bar(self.xValues, self.yValues, **kwdict)[0]],
[self.label]]
def getAttributes(self):
kwdict = {}
kwdict["color"] = self.color
kwdict["label"] = self.label
kwdict["width"] = self.width
if self.hatch is not None:
kwdict["hatch"] = self.hatch
print >>sys.stderr, "WARNING: Setting hash for bar charts only seems to work when exporting to svg or png"
if self.edgeColor is not None:
kwdict["edgecolor"] = self.edgeColor
return kwdict
|
import pylab
from matplotlib import pyplot
from PlotInfo import *
class Bar(PlotInfo):
"""
A bar chart consisting of a single series of bars.
"""
def __init__(self):
PlotInfo.__init__(self, "bar")
self.width=0.8
self.color="black"
self.edgeColor=None
self.hatch=None
def draw(self, axis):
PlotInfo.draw(self, axis)
kwdict = self.getAttributes()
return [[axis.bar(self.xValues, self.yValues, **kwdict)[0]],
[self.label]]
def getAttributes(self):
kwdict = {}
kwdict["color"] = self.color
kwdict["label"] = self.label
kwdict["width"] = self.width
if self.hatch is not None:
kwdict["hatch"] = self.hatch
print >>sys.stderr, "WARNING: Setting hash for bar charts only seems to work when exporting to svg or png"
if self.edgeColor is not None:
kwdict["edgecolor"] = self.edgeColor
return kwdict
Fix bar graph x-axis centering.import pylab
from matplotlib import pyplot
from PlotInfo import *
class Bar(PlotInfo):
"""
A bar chart consisting of a single series of bars.
"""
def __init__(self):
PlotInfo.__init__(self, "bar")
self.width=0.8
self.color="black"
self.edgeColor=None
self.hatch=None
def draw(self, axis):
if self.xTickLabelPoints is None:
self.xTickLabelPoints = \
[x + (self.width / 2.0) for x in self.xValues]
if self.xTickLabels is None:
self.xTickLabels = self.xValues
PlotInfo.draw(self, axis)
kwdict = self.getAttributes()
return [[axis.bar(self.xValues, self.yValues, **kwdict)[0]],
[self.label]]
def getAttributes(self):
kwdict = {}
kwdict["color"] = self.color
kwdict["label"] = self.label
kwdict["width"] = self.width
if self.hatch is not None:
kwdict["hatch"] = self.hatch
print >>sys.stderr, "WARNING: Setting hash for bar charts only seems to work when exporting to svg or png"
if self.edgeColor is not None:
kwdict["edgecolor"] = self.edgeColor
return kwdict
|
<commit_before>import pylab
from matplotlib import pyplot
from PlotInfo import *
class Bar(PlotInfo):
"""
A bar chart consisting of a single series of bars.
"""
def __init__(self):
PlotInfo.__init__(self, "bar")
self.width=0.8
self.color="black"
self.edgeColor=None
self.hatch=None
def draw(self, axis):
PlotInfo.draw(self, axis)
kwdict = self.getAttributes()
return [[axis.bar(self.xValues, self.yValues, **kwdict)[0]],
[self.label]]
def getAttributes(self):
kwdict = {}
kwdict["color"] = self.color
kwdict["label"] = self.label
kwdict["width"] = self.width
if self.hatch is not None:
kwdict["hatch"] = self.hatch
print >>sys.stderr, "WARNING: Setting hash for bar charts only seems to work when exporting to svg or png"
if self.edgeColor is not None:
kwdict["edgecolor"] = self.edgeColor
return kwdict
<commit_msg>Fix bar graph x-axis centering.<commit_after>import pylab
from matplotlib import pyplot
from PlotInfo import *
class Bar(PlotInfo):
"""
A bar chart consisting of a single series of bars.
"""
def __init__(self):
PlotInfo.__init__(self, "bar")
self.width=0.8
self.color="black"
self.edgeColor=None
self.hatch=None
def draw(self, axis):
if self.xTickLabelPoints is None:
self.xTickLabelPoints = \
[x + (self.width / 2.0) for x in self.xValues]
if self.xTickLabels is None:
self.xTickLabels = self.xValues
PlotInfo.draw(self, axis)
kwdict = self.getAttributes()
return [[axis.bar(self.xValues, self.yValues, **kwdict)[0]],
[self.label]]
def getAttributes(self):
kwdict = {}
kwdict["color"] = self.color
kwdict["label"] = self.label
kwdict["width"] = self.width
if self.hatch is not None:
kwdict["hatch"] = self.hatch
print >>sys.stderr, "WARNING: Setting hash for bar charts only seems to work when exporting to svg or png"
if self.edgeColor is not None:
kwdict["edgecolor"] = self.edgeColor
return kwdict
|
320214ca1636415bc4d677ba9e3b40f0bf24c8f9
|
openprescribing/frontend/migrations/0008_create_searchbookmark.py
|
openprescribing/frontend/migrations/0008_create_searchbookmark.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-07-07 11:58
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('frontend', '0007_auto_20160908_0811'),
]
operations = [
migrations.CreateModel(
name='SearchBookmark',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
('low_is_good', models.NullBooleanField()),
('url', models.CharField(max_length=200)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
]
)
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-07-07 11:58
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('frontend', '0007_add_cost_per_fields'),
]
operations = [
migrations.CreateModel(
name='SearchBookmark',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
('low_is_good', models.NullBooleanField()),
('url', models.CharField(max_length=200)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
]
)
]
|
Fix multiple leaf nodes in migrations
|
Fix multiple leaf nodes in migrations
|
Python
|
mit
|
ebmdatalab/openprescribing,ebmdatalab/openprescribing,ebmdatalab/openprescribing,annapowellsmith/openpresc,annapowellsmith/openpresc,ebmdatalab/openprescribing,annapowellsmith/openpresc,annapowellsmith/openpresc
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-07-07 11:58
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('frontend', '0007_auto_20160908_0811'),
]
operations = [
migrations.CreateModel(
name='SearchBookmark',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
('low_is_good', models.NullBooleanField()),
('url', models.CharField(max_length=200)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
]
)
]
Fix multiple leaf nodes in migrations
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-07-07 11:58
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('frontend', '0007_add_cost_per_fields'),
]
operations = [
migrations.CreateModel(
name='SearchBookmark',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
('low_is_good', models.NullBooleanField()),
('url', models.CharField(max_length=200)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
]
)
]
|
<commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-07-07 11:58
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('frontend', '0007_auto_20160908_0811'),
]
operations = [
migrations.CreateModel(
name='SearchBookmark',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
('low_is_good', models.NullBooleanField()),
('url', models.CharField(max_length=200)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
]
)
]
<commit_msg>Fix multiple leaf nodes in migrations<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-07-07 11:58
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('frontend', '0007_add_cost_per_fields'),
]
operations = [
migrations.CreateModel(
name='SearchBookmark',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
('low_is_good', models.NullBooleanField()),
('url', models.CharField(max_length=200)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
]
)
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-07-07 11:58
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('frontend', '0007_auto_20160908_0811'),
]
operations = [
migrations.CreateModel(
name='SearchBookmark',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
('low_is_good', models.NullBooleanField()),
('url', models.CharField(max_length=200)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
]
)
]
Fix multiple leaf nodes in migrations# -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-07-07 11:58
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('frontend', '0007_add_cost_per_fields'),
]
operations = [
migrations.CreateModel(
name='SearchBookmark',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
('low_is_good', models.NullBooleanField()),
('url', models.CharField(max_length=200)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
]
)
]
|
<commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-07-07 11:58
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('frontend', '0007_auto_20160908_0811'),
]
operations = [
migrations.CreateModel(
name='SearchBookmark',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
('low_is_good', models.NullBooleanField()),
('url', models.CharField(max_length=200)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
]
)
]
<commit_msg>Fix multiple leaf nodes in migrations<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-07-07 11:58
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('frontend', '0007_add_cost_per_fields'),
]
operations = [
migrations.CreateModel(
name='SearchBookmark',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
('low_is_good', models.NullBooleanField()),
('url', models.CharField(max_length=200)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
]
)
]
|
106eaf7d22bf4039756c0ae32c125d475eb4c109
|
utils/html.py
|
utils/html.py
|
#coding=UTF-8
__author__ = 'Gareth Coles'
from HTMLParser import HTMLParser
import htmlentitydefs
class HTMLTextExtractor(HTMLParser):
def __init__(self):
HTMLParser.__init__(self)
self.result = []
def handle_data(self, d):
self.result.append(d)
def handle_charref(self, number):
if number[0] in (u'x', u'X'):
codepoint = int(number[1:], 16)
else:
codepoint = int(number)
self.result.append(unichr(codepoint))
def handle_entityref(self, name):
codepoint = htmlentitydefs.name2codepoint[name]
self.result.append(unichr(codepoint))
def get_text(self):
return u''.join(self.result)
def html_to_text(html):
s = HTMLTextExtractor()
s.feed(html)
return s.get_text()
|
#coding=UTF-8
__author__ = 'Gareth Coles'
from HTMLParser import HTMLParser
import htmlentitydefs
class HTMLTextExtractor(HTMLParser):
def __init__(self, newlines=True):
HTMLParser.__init__(self)
self.result = []
self.newlines = newlines
def handle_starttag(self, tag, attrs):
if self.newlines:
if tag == 'br':
self.result.append('\n')
elif tag == 'p':
self.result.append('\n')
def handle_endtag(self, tag):
if self.newlines:
if tag == 'p':
self.result.append('\n')
def handle_data(self, d):
self.result.append(d)
def handle_charref(self, number):
if number[0] in (u'x', u'X'):
codepoint = int(number[1:], 16)
else:
codepoint = int(number)
self.result.append(unichr(codepoint))
def handle_entityref(self, name):
codepoint = htmlentitydefs.name2codepoint[name]
self.result.append(unichr(codepoint))
def get_text(self):
return u''.join(self.result)
def html_to_text(html, newlines=True):
s = HTMLTextExtractor(newlines)
s.feed(html)
return s.get_text()
|
Add new-line support to HTML text extractor
|
Add new-line support to HTML text extractor
|
Python
|
artistic-2.0
|
UltrosBot/Ultros,UltrosBot/Ultros
|
#coding=UTF-8
__author__ = 'Gareth Coles'
from HTMLParser import HTMLParser
import htmlentitydefs
class HTMLTextExtractor(HTMLParser):
def __init__(self):
HTMLParser.__init__(self)
self.result = []
def handle_data(self, d):
self.result.append(d)
def handle_charref(self, number):
if number[0] in (u'x', u'X'):
codepoint = int(number[1:], 16)
else:
codepoint = int(number)
self.result.append(unichr(codepoint))
def handle_entityref(self, name):
codepoint = htmlentitydefs.name2codepoint[name]
self.result.append(unichr(codepoint))
def get_text(self):
return u''.join(self.result)
def html_to_text(html):
s = HTMLTextExtractor()
s.feed(html)
return s.get_text()
Add new-line support to HTML text extractor
|
#coding=UTF-8
__author__ = 'Gareth Coles'
from HTMLParser import HTMLParser
import htmlentitydefs
class HTMLTextExtractor(HTMLParser):
def __init__(self, newlines=True):
HTMLParser.__init__(self)
self.result = []
self.newlines = newlines
def handle_starttag(self, tag, attrs):
if self.newlines:
if tag == 'br':
self.result.append('\n')
elif tag == 'p':
self.result.append('\n')
def handle_endtag(self, tag):
if self.newlines:
if tag == 'p':
self.result.append('\n')
def handle_data(self, d):
self.result.append(d)
def handle_charref(self, number):
if number[0] in (u'x', u'X'):
codepoint = int(number[1:], 16)
else:
codepoint = int(number)
self.result.append(unichr(codepoint))
def handle_entityref(self, name):
codepoint = htmlentitydefs.name2codepoint[name]
self.result.append(unichr(codepoint))
def get_text(self):
return u''.join(self.result)
def html_to_text(html, newlines=True):
s = HTMLTextExtractor(newlines)
s.feed(html)
return s.get_text()
|
<commit_before>#coding=UTF-8
__author__ = 'Gareth Coles'
from HTMLParser import HTMLParser
import htmlentitydefs
class HTMLTextExtractor(HTMLParser):
def __init__(self):
HTMLParser.__init__(self)
self.result = []
def handle_data(self, d):
self.result.append(d)
def handle_charref(self, number):
if number[0] in (u'x', u'X'):
codepoint = int(number[1:], 16)
else:
codepoint = int(number)
self.result.append(unichr(codepoint))
def handle_entityref(self, name):
codepoint = htmlentitydefs.name2codepoint[name]
self.result.append(unichr(codepoint))
def get_text(self):
return u''.join(self.result)
def html_to_text(html):
s = HTMLTextExtractor()
s.feed(html)
return s.get_text()
<commit_msg>Add new-line support to HTML text extractor<commit_after>
|
#coding=UTF-8
__author__ = 'Gareth Coles'
from HTMLParser import HTMLParser
import htmlentitydefs
class HTMLTextExtractor(HTMLParser):
def __init__(self, newlines=True):
HTMLParser.__init__(self)
self.result = []
self.newlines = newlines
def handle_starttag(self, tag, attrs):
if self.newlines:
if tag == 'br':
self.result.append('\n')
elif tag == 'p':
self.result.append('\n')
def handle_endtag(self, tag):
if self.newlines:
if tag == 'p':
self.result.append('\n')
def handle_data(self, d):
self.result.append(d)
def handle_charref(self, number):
if number[0] in (u'x', u'X'):
codepoint = int(number[1:], 16)
else:
codepoint = int(number)
self.result.append(unichr(codepoint))
def handle_entityref(self, name):
codepoint = htmlentitydefs.name2codepoint[name]
self.result.append(unichr(codepoint))
def get_text(self):
return u''.join(self.result)
def html_to_text(html, newlines=True):
s = HTMLTextExtractor(newlines)
s.feed(html)
return s.get_text()
|
#coding=UTF-8
__author__ = 'Gareth Coles'
from HTMLParser import HTMLParser
import htmlentitydefs
class HTMLTextExtractor(HTMLParser):
def __init__(self):
HTMLParser.__init__(self)
self.result = []
def handle_data(self, d):
self.result.append(d)
def handle_charref(self, number):
if number[0] in (u'x', u'X'):
codepoint = int(number[1:], 16)
else:
codepoint = int(number)
self.result.append(unichr(codepoint))
def handle_entityref(self, name):
codepoint = htmlentitydefs.name2codepoint[name]
self.result.append(unichr(codepoint))
def get_text(self):
return u''.join(self.result)
def html_to_text(html):
s = HTMLTextExtractor()
s.feed(html)
return s.get_text()
Add new-line support to HTML text extractor#coding=UTF-8
__author__ = 'Gareth Coles'
from HTMLParser import HTMLParser
import htmlentitydefs
class HTMLTextExtractor(HTMLParser):
def __init__(self, newlines=True):
HTMLParser.__init__(self)
self.result = []
self.newlines = newlines
def handle_starttag(self, tag, attrs):
if self.newlines:
if tag == 'br':
self.result.append('\n')
elif tag == 'p':
self.result.append('\n')
def handle_endtag(self, tag):
if self.newlines:
if tag == 'p':
self.result.append('\n')
def handle_data(self, d):
self.result.append(d)
def handle_charref(self, number):
if number[0] in (u'x', u'X'):
codepoint = int(number[1:], 16)
else:
codepoint = int(number)
self.result.append(unichr(codepoint))
def handle_entityref(self, name):
codepoint = htmlentitydefs.name2codepoint[name]
self.result.append(unichr(codepoint))
def get_text(self):
return u''.join(self.result)
def html_to_text(html, newlines=True):
s = HTMLTextExtractor(newlines)
s.feed(html)
return s.get_text()
|
<commit_before>#coding=UTF-8
__author__ = 'Gareth Coles'
from HTMLParser import HTMLParser
import htmlentitydefs
class HTMLTextExtractor(HTMLParser):
def __init__(self):
HTMLParser.__init__(self)
self.result = []
def handle_data(self, d):
self.result.append(d)
def handle_charref(self, number):
if number[0] in (u'x', u'X'):
codepoint = int(number[1:], 16)
else:
codepoint = int(number)
self.result.append(unichr(codepoint))
def handle_entityref(self, name):
codepoint = htmlentitydefs.name2codepoint[name]
self.result.append(unichr(codepoint))
def get_text(self):
return u''.join(self.result)
def html_to_text(html):
s = HTMLTextExtractor()
s.feed(html)
return s.get_text()
<commit_msg>Add new-line support to HTML text extractor<commit_after>#coding=UTF-8
__author__ = 'Gareth Coles'
from HTMLParser import HTMLParser
import htmlentitydefs
class HTMLTextExtractor(HTMLParser):
def __init__(self, newlines=True):
HTMLParser.__init__(self)
self.result = []
self.newlines = newlines
def handle_starttag(self, tag, attrs):
if self.newlines:
if tag == 'br':
self.result.append('\n')
elif tag == 'p':
self.result.append('\n')
def handle_endtag(self, tag):
if self.newlines:
if tag == 'p':
self.result.append('\n')
def handle_data(self, d):
self.result.append(d)
def handle_charref(self, number):
if number[0] in (u'x', u'X'):
codepoint = int(number[1:], 16)
else:
codepoint = int(number)
self.result.append(unichr(codepoint))
def handle_entityref(self, name):
codepoint = htmlentitydefs.name2codepoint[name]
self.result.append(unichr(codepoint))
def get_text(self):
return u''.join(self.result)
def html_to_text(html, newlines=True):
s = HTMLTextExtractor(newlines)
s.feed(html)
return s.get_text()
|
45bd76bbaafdeaeab28bb86ae719bdeefabbf95b
|
tests/test_rubymine.py
|
tests/test_rubymine.py
|
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
'.molecule/ansible_inventory').get_hosts('all')
desktop_file_location = "/root/.local/share/applications/rubymine-2017.2.desktop"
def test_desktop_file_exists(File):
f = File(desktop_file_location)
assert f.exists
assert f.is_file
def test_desktop_file_contains_fullpath(File):
f = File(desktop_file_location)
assert f.contains("/root/Tools/RubyMine-2017.2/bin/rubymine.png")
assert f.contains("/root/Tools/RubyMine-2017.2/bin/rubymine.sh")
def test_desktop_file_contains_right_name(File):
f = File(desktop_file_location)
assert f.contains("rubymine 2017.2")
def test_start_file_exists(File):
f = File('/root/Tools/RubyMine-2017.2/bin/rubymine.sh')
assert f.exists
assert f.is_file
|
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
'.molecule/ansible_inventory').get_hosts('all')
desktop_file_location = "/root/.local/share/applications/rubymine-2017.2.desktop"
def test_desktop_file_exists(File):
f = File(desktop_file_location)
assert f.exists
assert f.is_file
def test_desktop_file_contains_fullpath(File):
f = File(desktop_file_location)
assert f.contains("/root/Tools/RubyMine-2017.2/bin/rubymine.png")
assert f.contains("/root/Tools/RubyMine-2017.2/bin/rubymine.sh")
def test_desktop_file_contains_right_name(File):
f = File(desktop_file_location)
assert f.contains("RubyMine 2017.2")
def test_start_file_exists(File):
f = File('/root/Tools/RubyMine-2017.2/bin/rubymine.sh')
assert f.exists
assert f.is_file
|
Update testcases with proper casing
|
Update testcases with proper casing
|
Python
|
mit
|
henriklynggaard/ansible-role-rubymine
|
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
'.molecule/ansible_inventory').get_hosts('all')
desktop_file_location = "/root/.local/share/applications/rubymine-2017.2.desktop"
def test_desktop_file_exists(File):
f = File(desktop_file_location)
assert f.exists
assert f.is_file
def test_desktop_file_contains_fullpath(File):
f = File(desktop_file_location)
assert f.contains("/root/Tools/RubyMine-2017.2/bin/rubymine.png")
assert f.contains("/root/Tools/RubyMine-2017.2/bin/rubymine.sh")
def test_desktop_file_contains_right_name(File):
f = File(desktop_file_location)
assert f.contains("rubymine 2017.2")
def test_start_file_exists(File):
f = File('/root/Tools/RubyMine-2017.2/bin/rubymine.sh')
assert f.exists
assert f.is_file
Update testcases with proper casing
|
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
'.molecule/ansible_inventory').get_hosts('all')
desktop_file_location = "/root/.local/share/applications/rubymine-2017.2.desktop"
def test_desktop_file_exists(File):
f = File(desktop_file_location)
assert f.exists
assert f.is_file
def test_desktop_file_contains_fullpath(File):
f = File(desktop_file_location)
assert f.contains("/root/Tools/RubyMine-2017.2/bin/rubymine.png")
assert f.contains("/root/Tools/RubyMine-2017.2/bin/rubymine.sh")
def test_desktop_file_contains_right_name(File):
f = File(desktop_file_location)
assert f.contains("RubyMine 2017.2")
def test_start_file_exists(File):
f = File('/root/Tools/RubyMine-2017.2/bin/rubymine.sh')
assert f.exists
assert f.is_file
|
<commit_before>import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
'.molecule/ansible_inventory').get_hosts('all')
desktop_file_location = "/root/.local/share/applications/rubymine-2017.2.desktop"
def test_desktop_file_exists(File):
f = File(desktop_file_location)
assert f.exists
assert f.is_file
def test_desktop_file_contains_fullpath(File):
f = File(desktop_file_location)
assert f.contains("/root/Tools/RubyMine-2017.2/bin/rubymine.png")
assert f.contains("/root/Tools/RubyMine-2017.2/bin/rubymine.sh")
def test_desktop_file_contains_right_name(File):
f = File(desktop_file_location)
assert f.contains("rubymine 2017.2")
def test_start_file_exists(File):
f = File('/root/Tools/RubyMine-2017.2/bin/rubymine.sh')
assert f.exists
assert f.is_file
<commit_msg>Update testcases with proper casing<commit_after>
|
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
'.molecule/ansible_inventory').get_hosts('all')
desktop_file_location = "/root/.local/share/applications/rubymine-2017.2.desktop"
def test_desktop_file_exists(File):
f = File(desktop_file_location)
assert f.exists
assert f.is_file
def test_desktop_file_contains_fullpath(File):
f = File(desktop_file_location)
assert f.contains("/root/Tools/RubyMine-2017.2/bin/rubymine.png")
assert f.contains("/root/Tools/RubyMine-2017.2/bin/rubymine.sh")
def test_desktop_file_contains_right_name(File):
f = File(desktop_file_location)
assert f.contains("RubyMine 2017.2")
def test_start_file_exists(File):
f = File('/root/Tools/RubyMine-2017.2/bin/rubymine.sh')
assert f.exists
assert f.is_file
|
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
'.molecule/ansible_inventory').get_hosts('all')
desktop_file_location = "/root/.local/share/applications/rubymine-2017.2.desktop"
def test_desktop_file_exists(File):
f = File(desktop_file_location)
assert f.exists
assert f.is_file
def test_desktop_file_contains_fullpath(File):
f = File(desktop_file_location)
assert f.contains("/root/Tools/RubyMine-2017.2/bin/rubymine.png")
assert f.contains("/root/Tools/RubyMine-2017.2/bin/rubymine.sh")
def test_desktop_file_contains_right_name(File):
f = File(desktop_file_location)
assert f.contains("rubymine 2017.2")
def test_start_file_exists(File):
f = File('/root/Tools/RubyMine-2017.2/bin/rubymine.sh')
assert f.exists
assert f.is_file
Update testcases with proper casingimport testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
'.molecule/ansible_inventory').get_hosts('all')
desktop_file_location = "/root/.local/share/applications/rubymine-2017.2.desktop"
def test_desktop_file_exists(File):
f = File(desktop_file_location)
assert f.exists
assert f.is_file
def test_desktop_file_contains_fullpath(File):
f = File(desktop_file_location)
assert f.contains("/root/Tools/RubyMine-2017.2/bin/rubymine.png")
assert f.contains("/root/Tools/RubyMine-2017.2/bin/rubymine.sh")
def test_desktop_file_contains_right_name(File):
f = File(desktop_file_location)
assert f.contains("RubyMine 2017.2")
def test_start_file_exists(File):
f = File('/root/Tools/RubyMine-2017.2/bin/rubymine.sh')
assert f.exists
assert f.is_file
|
<commit_before>import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
'.molecule/ansible_inventory').get_hosts('all')
desktop_file_location = "/root/.local/share/applications/rubymine-2017.2.desktop"
def test_desktop_file_exists(File):
f = File(desktop_file_location)
assert f.exists
assert f.is_file
def test_desktop_file_contains_fullpath(File):
f = File(desktop_file_location)
assert f.contains("/root/Tools/RubyMine-2017.2/bin/rubymine.png")
assert f.contains("/root/Tools/RubyMine-2017.2/bin/rubymine.sh")
def test_desktop_file_contains_right_name(File):
f = File(desktop_file_location)
assert f.contains("rubymine 2017.2")
def test_start_file_exists(File):
f = File('/root/Tools/RubyMine-2017.2/bin/rubymine.sh')
assert f.exists
assert f.is_file
<commit_msg>Update testcases with proper casing<commit_after>import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
'.molecule/ansible_inventory').get_hosts('all')
desktop_file_location = "/root/.local/share/applications/rubymine-2017.2.desktop"
def test_desktop_file_exists(File):
f = File(desktop_file_location)
assert f.exists
assert f.is_file
def test_desktop_file_contains_fullpath(File):
f = File(desktop_file_location)
assert f.contains("/root/Tools/RubyMine-2017.2/bin/rubymine.png")
assert f.contains("/root/Tools/RubyMine-2017.2/bin/rubymine.sh")
def test_desktop_file_contains_right_name(File):
f = File(desktop_file_location)
assert f.contains("RubyMine 2017.2")
def test_start_file_exists(File):
f = File('/root/Tools/RubyMine-2017.2/bin/rubymine.sh')
assert f.exists
assert f.is_file
|
d48fd8b11fe2d9edef0ca7044df8659244a13821
|
Telegram/Telegram_Harmonbot.py
|
Telegram/Telegram_Harmonbot.py
|
import telegram
import telegram.ext
import os
import dotenv
version = "0.1.4"
# Load credentials from .env
dotenv.load_dotenv()
token = os.getenv("TELEGRAM_BOT_API_TOKEN")
bot = telegram.Bot(token = token)
updater = telegram.ext.Updater(token = token)
def test(bot, update):
bot.sendMessage(chat_id = update.message.chat_id, text = "Hello, World!")
def ping(bot, update):
bot.sendMessage(chat_id = update.message.chat_id, text = "pong")
test_handler = telegram.ext.CommandHandler("test", test)
updater.dispatcher.add_handler(test_handler)
ping_handler = telegram.ext.CommandHandler("ping", ping)
updater.dispatcher.add_handler(ping_handler)
updater.start_polling()
bot_info = bot.getMe()
print(f"Started up Telegram Harmonbot ({bot_info['username']}) ({bot_info['id']})")
if os.getenv("CI") or os.getenv("GITHUB_ACTION"):
updater.stop()
|
import telegram
import telegram.ext
import os
import dotenv
version = "0.2.0"
# Load credentials from .env
dotenv.load_dotenv()
token = os.getenv("TELEGRAM_BOT_API_TOKEN")
bot = telegram.Bot(token = token)
updater = telegram.ext.Updater(token = token, use_context = True)
def test(update, context):
context.bot.sendMessage(chat_id = update.message.chat_id, text = "Hello, World!")
def ping(update, context):
context.bot.sendMessage(chat_id = update.message.chat_id, text = "pong")
test_handler = telegram.ext.CommandHandler("test", test)
updater.dispatcher.add_handler(test_handler)
ping_handler = telegram.ext.CommandHandler("ping", ping)
updater.dispatcher.add_handler(ping_handler)
updater.start_polling()
bot_info = bot.getMe()
print(f"Started up Telegram Harmonbot ({bot_info['username']}) ({bot_info['id']})")
if os.getenv("CI") or os.getenv("GITHUB_ACTION"):
updater.stop()
|
Update to context based callbacks
|
[Telegram] Update to context based callbacks
|
Python
|
mit
|
Harmon758/Harmonbot,Harmon758/Harmonbot
|
import telegram
import telegram.ext
import os
import dotenv
version = "0.1.4"
# Load credentials from .env
dotenv.load_dotenv()
token = os.getenv("TELEGRAM_BOT_API_TOKEN")
bot = telegram.Bot(token = token)
updater = telegram.ext.Updater(token = token)
def test(bot, update):
bot.sendMessage(chat_id = update.message.chat_id, text = "Hello, World!")
def ping(bot, update):
bot.sendMessage(chat_id = update.message.chat_id, text = "pong")
test_handler = telegram.ext.CommandHandler("test", test)
updater.dispatcher.add_handler(test_handler)
ping_handler = telegram.ext.CommandHandler("ping", ping)
updater.dispatcher.add_handler(ping_handler)
updater.start_polling()
bot_info = bot.getMe()
print(f"Started up Telegram Harmonbot ({bot_info['username']}) ({bot_info['id']})")
if os.getenv("CI") or os.getenv("GITHUB_ACTION"):
updater.stop()
[Telegram] Update to context based callbacks
|
import telegram
import telegram.ext
import os
import dotenv
version = "0.2.0"
# Load credentials from .env
dotenv.load_dotenv()
token = os.getenv("TELEGRAM_BOT_API_TOKEN")
bot = telegram.Bot(token = token)
updater = telegram.ext.Updater(token = token, use_context = True)
def test(update, context):
context.bot.sendMessage(chat_id = update.message.chat_id, text = "Hello, World!")
def ping(update, context):
context.bot.sendMessage(chat_id = update.message.chat_id, text = "pong")
test_handler = telegram.ext.CommandHandler("test", test)
updater.dispatcher.add_handler(test_handler)
ping_handler = telegram.ext.CommandHandler("ping", ping)
updater.dispatcher.add_handler(ping_handler)
updater.start_polling()
bot_info = bot.getMe()
print(f"Started up Telegram Harmonbot ({bot_info['username']}) ({bot_info['id']})")
if os.getenv("CI") or os.getenv("GITHUB_ACTION"):
updater.stop()
|
<commit_before>
import telegram
import telegram.ext
import os
import dotenv
version = "0.1.4"
# Load credentials from .env
dotenv.load_dotenv()
token = os.getenv("TELEGRAM_BOT_API_TOKEN")
bot = telegram.Bot(token = token)
updater = telegram.ext.Updater(token = token)
def test(bot, update):
bot.sendMessage(chat_id = update.message.chat_id, text = "Hello, World!")
def ping(bot, update):
bot.sendMessage(chat_id = update.message.chat_id, text = "pong")
test_handler = telegram.ext.CommandHandler("test", test)
updater.dispatcher.add_handler(test_handler)
ping_handler = telegram.ext.CommandHandler("ping", ping)
updater.dispatcher.add_handler(ping_handler)
updater.start_polling()
bot_info = bot.getMe()
print(f"Started up Telegram Harmonbot ({bot_info['username']}) ({bot_info['id']})")
if os.getenv("CI") or os.getenv("GITHUB_ACTION"):
updater.stop()
<commit_msg>[Telegram] Update to context based callbacks<commit_after>
|
import telegram
import telegram.ext
import os
import dotenv
version = "0.2.0"
# Load credentials from .env
dotenv.load_dotenv()
token = os.getenv("TELEGRAM_BOT_API_TOKEN")
bot = telegram.Bot(token = token)
updater = telegram.ext.Updater(token = token, use_context = True)
def test(update, context):
context.bot.sendMessage(chat_id = update.message.chat_id, text = "Hello, World!")
def ping(update, context):
context.bot.sendMessage(chat_id = update.message.chat_id, text = "pong")
test_handler = telegram.ext.CommandHandler("test", test)
updater.dispatcher.add_handler(test_handler)
ping_handler = telegram.ext.CommandHandler("ping", ping)
updater.dispatcher.add_handler(ping_handler)
updater.start_polling()
bot_info = bot.getMe()
print(f"Started up Telegram Harmonbot ({bot_info['username']}) ({bot_info['id']})")
if os.getenv("CI") or os.getenv("GITHUB_ACTION"):
updater.stop()
|
import telegram
import telegram.ext
import os
import dotenv
version = "0.1.4"
# Load credentials from .env
dotenv.load_dotenv()
token = os.getenv("TELEGRAM_BOT_API_TOKEN")
bot = telegram.Bot(token = token)
updater = telegram.ext.Updater(token = token)
def test(bot, update):
bot.sendMessage(chat_id = update.message.chat_id, text = "Hello, World!")
def ping(bot, update):
bot.sendMessage(chat_id = update.message.chat_id, text = "pong")
test_handler = telegram.ext.CommandHandler("test", test)
updater.dispatcher.add_handler(test_handler)
ping_handler = telegram.ext.CommandHandler("ping", ping)
updater.dispatcher.add_handler(ping_handler)
updater.start_polling()
bot_info = bot.getMe()
print(f"Started up Telegram Harmonbot ({bot_info['username']}) ({bot_info['id']})")
if os.getenv("CI") or os.getenv("GITHUB_ACTION"):
updater.stop()
[Telegram] Update to context based callbacks
import telegram
import telegram.ext
import os
import dotenv
version = "0.2.0"
# Load credentials from .env
dotenv.load_dotenv()
token = os.getenv("TELEGRAM_BOT_API_TOKEN")
bot = telegram.Bot(token = token)
updater = telegram.ext.Updater(token = token, use_context = True)
def test(update, context):
context.bot.sendMessage(chat_id = update.message.chat_id, text = "Hello, World!")
def ping(update, context):
context.bot.sendMessage(chat_id = update.message.chat_id, text = "pong")
test_handler = telegram.ext.CommandHandler("test", test)
updater.dispatcher.add_handler(test_handler)
ping_handler = telegram.ext.CommandHandler("ping", ping)
updater.dispatcher.add_handler(ping_handler)
updater.start_polling()
bot_info = bot.getMe()
print(f"Started up Telegram Harmonbot ({bot_info['username']}) ({bot_info['id']})")
if os.getenv("CI") or os.getenv("GITHUB_ACTION"):
updater.stop()
|
<commit_before>
import telegram
import telegram.ext
import os
import dotenv
version = "0.1.4"
# Load credentials from .env
dotenv.load_dotenv()
token = os.getenv("TELEGRAM_BOT_API_TOKEN")
bot = telegram.Bot(token = token)
updater = telegram.ext.Updater(token = token)
def test(bot, update):
bot.sendMessage(chat_id = update.message.chat_id, text = "Hello, World!")
def ping(bot, update):
bot.sendMessage(chat_id = update.message.chat_id, text = "pong")
test_handler = telegram.ext.CommandHandler("test", test)
updater.dispatcher.add_handler(test_handler)
ping_handler = telegram.ext.CommandHandler("ping", ping)
updater.dispatcher.add_handler(ping_handler)
updater.start_polling()
bot_info = bot.getMe()
print(f"Started up Telegram Harmonbot ({bot_info['username']}) ({bot_info['id']})")
if os.getenv("CI") or os.getenv("GITHUB_ACTION"):
updater.stop()
<commit_msg>[Telegram] Update to context based callbacks<commit_after>
import telegram
import telegram.ext
import os
import dotenv
version = "0.2.0"
# Load credentials from .env
dotenv.load_dotenv()
token = os.getenv("TELEGRAM_BOT_API_TOKEN")
bot = telegram.Bot(token = token)
updater = telegram.ext.Updater(token = token, use_context = True)
def test(update, context):
context.bot.sendMessage(chat_id = update.message.chat_id, text = "Hello, World!")
def ping(update, context):
context.bot.sendMessage(chat_id = update.message.chat_id, text = "pong")
test_handler = telegram.ext.CommandHandler("test", test)
updater.dispatcher.add_handler(test_handler)
ping_handler = telegram.ext.CommandHandler("ping", ping)
updater.dispatcher.add_handler(ping_handler)
updater.start_polling()
bot_info = bot.getMe()
print(f"Started up Telegram Harmonbot ({bot_info['username']}) ({bot_info['id']})")
if os.getenv("CI") or os.getenv("GITHUB_ACTION"):
updater.stop()
|
a174b827b36293d90babfcdf557bdbb9c9d0b655
|
ibei/__init__.py
|
ibei/__init__.py
|
# -*- coding: utf-8 -*-
"""
=========================
Base Library (:mod:`ibei`)
=========================
.. currentmodule:: ibei
"""
from main import uibei, SQSolarcell, DeVosSolarcell
|
# -*- coding: utf-8 -*-
"""
=========================
Base Library (:mod:`ibei`)
=========================
.. currentmodule:: ibei
"""
from main import uibei, SQSolarcell, DeVosSolarcell
__version__ = "0.0.2"
|
Add version information in module
|
Add version information in module
|
Python
|
mit
|
jrsmith3/tec,jrsmith3/ibei,jrsmith3/tec
|
# -*- coding: utf-8 -*-
"""
=========================
Base Library (:mod:`ibei`)
=========================
.. currentmodule:: ibei
"""
from main import uibei, SQSolarcell, DeVosSolarcell
Add version information in module
|
# -*- coding: utf-8 -*-
"""
=========================
Base Library (:mod:`ibei`)
=========================
.. currentmodule:: ibei
"""
from main import uibei, SQSolarcell, DeVosSolarcell
__version__ = "0.0.2"
|
<commit_before># -*- coding: utf-8 -*-
"""
=========================
Base Library (:mod:`ibei`)
=========================
.. currentmodule:: ibei
"""
from main import uibei, SQSolarcell, DeVosSolarcell
<commit_msg>Add version information in module<commit_after>
|
# -*- coding: utf-8 -*-
"""
=========================
Base Library (:mod:`ibei`)
=========================
.. currentmodule:: ibei
"""
from main import uibei, SQSolarcell, DeVosSolarcell
__version__ = "0.0.2"
|
# -*- coding: utf-8 -*-
"""
=========================
Base Library (:mod:`ibei`)
=========================
.. currentmodule:: ibei
"""
from main import uibei, SQSolarcell, DeVosSolarcell
Add version information in module# -*- coding: utf-8 -*-
"""
=========================
Base Library (:mod:`ibei`)
=========================
.. currentmodule:: ibei
"""
from main import uibei, SQSolarcell, DeVosSolarcell
__version__ = "0.0.2"
|
<commit_before># -*- coding: utf-8 -*-
"""
=========================
Base Library (:mod:`ibei`)
=========================
.. currentmodule:: ibei
"""
from main import uibei, SQSolarcell, DeVosSolarcell
<commit_msg>Add version information in module<commit_after># -*- coding: utf-8 -*-
"""
=========================
Base Library (:mod:`ibei`)
=========================
.. currentmodule:: ibei
"""
from main import uibei, SQSolarcell, DeVosSolarcell
__version__ = "0.0.2"
|
aeb3ce72205051039e6339f83a2b7dec37f8b8c9
|
idlk/__init__.py
|
idlk/__init__.py
|
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import os
import sys
import idlk.base41
if sys.version_info[0] == 3:
_get_byte = lambda c: c
else:
_get_byte = ord
def hash_macroman(data):
h = 0
for c in data:
h = ((h << 8) + h) + _get_byte(c)
return h % 0xFFFEECED
def idlk(filename):
# Convert to lowercase first.
filename = filename.lower()
# The original algorithm seems to prefer Mac Roman encoding as long as
# there are no non-mappable characters in the file name.
try:
macroman_name = filename.encode("macroman")
except UnicodeEncodeError:
pass
else:
hashed = base41.encode(hash_macroman(macroman_name))
base, ext = os.path.splitext(macroman_name)
return "~{:s}~{:s}.idlk".format(base[0:18].decode("macroman"), hashed)
# Regrettably the encoding / hashing algorithm for unicode filenames is
# not currently known. Please file a feature request/patch if you
# discover a working implementation.
return False
|
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import os
import sys
import unicodedata
import idlk.base41
if sys.version_info[0] == 3:
_get_byte = lambda c: c
else:
_get_byte = ord
def hash_macroman(data):
h = 0
for c in data:
h = ((h << 8) + h) + _get_byte(c)
return h % 0xFFFEECED
def idlk(filename):
# Normalize to NFC.
filename = unicodedata.normalize('NFC', filename)
# Convert to lowercase first.
filename = filename.lower()
# The original algorithm seems to prefer Mac Roman encoding as long as
# there are no non-mappable characters in the file name.
try:
macroman_name = filename.encode("macroman")
except UnicodeEncodeError:
pass
else:
hashed = base41.encode(hash_macroman(macroman_name))
base, ext = os.path.splitext(macroman_name)
return "~{:s}~{:s}.idlk".format(base[0:18].decode("macroman"), hashed)
# Regrettably the encoding / hashing algorithm for unicode filenames is
# not currently known. Please file a feature request/patch if you
# discover a working implementation.
return False
|
Normalize filename to NFC before computing the hash
|
Normalize filename to NFC before computing the hash
|
Python
|
mit
|
znerol/py-idlk
|
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import os
import sys
import idlk.base41
if sys.version_info[0] == 3:
_get_byte = lambda c: c
else:
_get_byte = ord
def hash_macroman(data):
h = 0
for c in data:
h = ((h << 8) + h) + _get_byte(c)
return h % 0xFFFEECED
def idlk(filename):
# Convert to lowercase first.
filename = filename.lower()
# The original algorithm seems to prefer Mac Roman encoding as long as
# there are no non-mappable characters in the file name.
try:
macroman_name = filename.encode("macroman")
except UnicodeEncodeError:
pass
else:
hashed = base41.encode(hash_macroman(macroman_name))
base, ext = os.path.splitext(macroman_name)
return "~{:s}~{:s}.idlk".format(base[0:18].decode("macroman"), hashed)
# Regrettably the encoding / hashing algorithm for unicode filenames is
# not currently known. Please file a feature request/patch if you
# discover a working implementation.
return False
Normalize filename to NFC before computing the hash
|
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import os
import sys
import unicodedata
import idlk.base41
if sys.version_info[0] == 3:
_get_byte = lambda c: c
else:
_get_byte = ord
def hash_macroman(data):
h = 0
for c in data:
h = ((h << 8) + h) + _get_byte(c)
return h % 0xFFFEECED
def idlk(filename):
# Normalize to NFC.
filename = unicodedata.normalize('NFC', filename)
# Convert to lowercase first.
filename = filename.lower()
# The original algorithm seems to prefer Mac Roman encoding as long as
# there are no non-mappable characters in the file name.
try:
macroman_name = filename.encode("macroman")
except UnicodeEncodeError:
pass
else:
hashed = base41.encode(hash_macroman(macroman_name))
base, ext = os.path.splitext(macroman_name)
return "~{:s}~{:s}.idlk".format(base[0:18].decode("macroman"), hashed)
# Regrettably the encoding / hashing algorithm for unicode filenames is
# not currently known. Please file a feature request/patch if you
# discover a working implementation.
return False
|
<commit_before>from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import os
import sys
import idlk.base41
if sys.version_info[0] == 3:
_get_byte = lambda c: c
else:
_get_byte = ord
def hash_macroman(data):
h = 0
for c in data:
h = ((h << 8) + h) + _get_byte(c)
return h % 0xFFFEECED
def idlk(filename):
# Convert to lowercase first.
filename = filename.lower()
# The original algorithm seems to prefer Mac Roman encoding as long as
# there are no non-mappable characters in the file name.
try:
macroman_name = filename.encode("macroman")
except UnicodeEncodeError:
pass
else:
hashed = base41.encode(hash_macroman(macroman_name))
base, ext = os.path.splitext(macroman_name)
return "~{:s}~{:s}.idlk".format(base[0:18].decode("macroman"), hashed)
# Regrettably the encoding / hashing algorithm for unicode filenames is
# not currently known. Please file a feature request/patch if you
# discover a working implementation.
return False
<commit_msg>Normalize filename to NFC before computing the hash<commit_after>
|
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import os
import sys
import unicodedata
import idlk.base41
if sys.version_info[0] == 3:
_get_byte = lambda c: c
else:
_get_byte = ord
def hash_macroman(data):
h = 0
for c in data:
h = ((h << 8) + h) + _get_byte(c)
return h % 0xFFFEECED
def idlk(filename):
# Normalize to NFC.
filename = unicodedata.normalize('NFC', filename)
# Convert to lowercase first.
filename = filename.lower()
# The original algorithm seems to prefer Mac Roman encoding as long as
# there are no non-mappable characters in the file name.
try:
macroman_name = filename.encode("macroman")
except UnicodeEncodeError:
pass
else:
hashed = base41.encode(hash_macroman(macroman_name))
base, ext = os.path.splitext(macroman_name)
return "~{:s}~{:s}.idlk".format(base[0:18].decode("macroman"), hashed)
# Regrettably the encoding / hashing algorithm for unicode filenames is
# not currently known. Please file a feature request/patch if you
# discover a working implementation.
return False
|
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import os
import sys
import idlk.base41
if sys.version_info[0] == 3:
_get_byte = lambda c: c
else:
_get_byte = ord
def hash_macroman(data):
h = 0
for c in data:
h = ((h << 8) + h) + _get_byte(c)
return h % 0xFFFEECED
def idlk(filename):
# Convert to lowercase first.
filename = filename.lower()
# The original algorithm seems to prefer Mac Roman encoding as long as
# there are no non-mappable characters in the file name.
try:
macroman_name = filename.encode("macroman")
except UnicodeEncodeError:
pass
else:
hashed = base41.encode(hash_macroman(macroman_name))
base, ext = os.path.splitext(macroman_name)
return "~{:s}~{:s}.idlk".format(base[0:18].decode("macroman"), hashed)
# Regrettably the encoding / hashing algorithm for unicode filenames is
# not currently known. Please file a feature request/patch if you
# discover a working implementation.
return False
Normalize filename to NFC before computing the hashfrom __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import os
import sys
import unicodedata
import idlk.base41
if sys.version_info[0] == 3:
_get_byte = lambda c: c
else:
_get_byte = ord
def hash_macroman(data):
h = 0
for c in data:
h = ((h << 8) + h) + _get_byte(c)
return h % 0xFFFEECED
def idlk(filename):
# Normalize to NFC.
filename = unicodedata.normalize('NFC', filename)
# Convert to lowercase first.
filename = filename.lower()
# The original algorithm seems to prefer Mac Roman encoding as long as
# there are no non-mappable characters in the file name.
try:
macroman_name = filename.encode("macroman")
except UnicodeEncodeError:
pass
else:
hashed = base41.encode(hash_macroman(macroman_name))
base, ext = os.path.splitext(macroman_name)
return "~{:s}~{:s}.idlk".format(base[0:18].decode("macroman"), hashed)
# Regrettably the encoding / hashing algorithm for unicode filenames is
# not currently known. Please file a feature request/patch if you
# discover a working implementation.
return False
|
<commit_before>from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import os
import sys
import idlk.base41
if sys.version_info[0] == 3:
_get_byte = lambda c: c
else:
_get_byte = ord
def hash_macroman(data):
h = 0
for c in data:
h = ((h << 8) + h) + _get_byte(c)
return h % 0xFFFEECED
def idlk(filename):
# Convert to lowercase first.
filename = filename.lower()
# The original algorithm seems to prefer Mac Roman encoding as long as
# there are no non-mappable characters in the file name.
try:
macroman_name = filename.encode("macroman")
except UnicodeEncodeError:
pass
else:
hashed = base41.encode(hash_macroman(macroman_name))
base, ext = os.path.splitext(macroman_name)
return "~{:s}~{:s}.idlk".format(base[0:18].decode("macroman"), hashed)
# Regrettably the encoding / hashing algorithm for unicode filenames is
# not currently known. Please file a feature request/patch if you
# discover a working implementation.
return False
<commit_msg>Normalize filename to NFC before computing the hash<commit_after>from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import os
import sys
import unicodedata
import idlk.base41
if sys.version_info[0] == 3:
_get_byte = lambda c: c
else:
_get_byte = ord
def hash_macroman(data):
h = 0
for c in data:
h = ((h << 8) + h) + _get_byte(c)
return h % 0xFFFEECED
def idlk(filename):
# Normalize to NFC.
filename = unicodedata.normalize('NFC', filename)
# Convert to lowercase first.
filename = filename.lower()
# The original algorithm seems to prefer Mac Roman encoding as long as
# there are no non-mappable characters in the file name.
try:
macroman_name = filename.encode("macroman")
except UnicodeEncodeError:
pass
else:
hashed = base41.encode(hash_macroman(macroman_name))
base, ext = os.path.splitext(macroman_name)
return "~{:s}~{:s}.idlk".format(base[0:18].decode("macroman"), hashed)
# Regrettably the encoding / hashing algorithm for unicode filenames is
# not currently known. Please file a feature request/patch if you
# discover a working implementation.
return False
|
9fb8b0a72740ba155c76a5812706612b656980f4
|
openprocurement/auctions/flash/constants.py
|
openprocurement/auctions/flash/constants.py
|
# -*- coding: utf-8 -*-
VIEW_LOCATIONS = [
"openprocurement.auctions.flash.views",
"openprocurement.auctions.core.plugins",
]
|
# -*- coding: utf-8 -*-
VIEW_LOCATIONS = [
"openprocurement.auctions.flash.views",
]
|
Add view_locations for plugins in core
|
Add view_locations for plugins in core
|
Python
|
apache-2.0
|
openprocurement/openprocurement.auctions.flash
|
# -*- coding: utf-8 -*-
VIEW_LOCATIONS = [
"openprocurement.auctions.flash.views",
"openprocurement.auctions.core.plugins",
]
Add view_locations for plugins in core
|
# -*- coding: utf-8 -*-
VIEW_LOCATIONS = [
"openprocurement.auctions.flash.views",
]
|
<commit_before># -*- coding: utf-8 -*-
VIEW_LOCATIONS = [
"openprocurement.auctions.flash.views",
"openprocurement.auctions.core.plugins",
]
<commit_msg>Add view_locations for plugins in core<commit_after>
|
# -*- coding: utf-8 -*-
VIEW_LOCATIONS = [
"openprocurement.auctions.flash.views",
]
|
# -*- coding: utf-8 -*-
VIEW_LOCATIONS = [
"openprocurement.auctions.flash.views",
"openprocurement.auctions.core.plugins",
]
Add view_locations for plugins in core# -*- coding: utf-8 -*-
VIEW_LOCATIONS = [
"openprocurement.auctions.flash.views",
]
|
<commit_before># -*- coding: utf-8 -*-
VIEW_LOCATIONS = [
"openprocurement.auctions.flash.views",
"openprocurement.auctions.core.plugins",
]
<commit_msg>Add view_locations for plugins in core<commit_after># -*- coding: utf-8 -*-
VIEW_LOCATIONS = [
"openprocurement.auctions.flash.views",
]
|
b66b9a2e329bf7a68c41bf07a1444c9d49a0b6c8
|
app.py
|
app.py
|
# coding: utf-8
import os
import time
from twython import Twython
import requests
APP_KEY = os.environ.get('APP_KEY')
APP_SECRET = os.environ.get('APP_SECRET')
OAUTH_TOKEN = os.environ.get('OAUTH_TOKEN')
OAUTH_TOKEN_SECRET = os.environ.get('OAUTH_TOKEN_SECRET')
twitter = Twython(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET)
def post_tweet(currency):
template = """
{name} - {symbol}
Price: ${price_usd}
Change in 1h: {percent_change_1h}%
Market cap: ${market_cap_usd}
Ranking: {rank}
#{name} #{symbol}
"""
if currency['percent_change_1h'] > 0:
currency['percent_change_1h'] = '+{}'.format(currency['percent_change_1h'])
twitter.update_status(status=template.format(**currency))
def main():
response = requests.get('https://api.coinmarketcap.com/v1/ticker/')
for currency in sorted(response.json(), key=lambda x: x['rank'])[:10]:
post_tweet(currency)
time.sleep(5)
if __name__ == '__main__':
main()
|
# coding: utf-8
import os
import time
from twython import Twython
import requests
APP_KEY = os.environ.get('APP_KEY')
APP_SECRET = os.environ.get('APP_SECRET')
OAUTH_TOKEN = os.environ.get('OAUTH_TOKEN')
OAUTH_TOKEN_SECRET = os.environ.get('OAUTH_TOKEN_SECRET')
twitter = Twython(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET)
def post_tweet(currency):
template = """
{name} - {symbol}
Price: ${price_usd}
Change in 1h: {percent_change_1h}%
Market cap: ${market_cap_usd}
Ranking: {rank}
#{name} #{symbol}
"""
if currency['percent_change_1h'] > 0:
currency['percent_change_1h'] = '+{}'.format(currency['percent_change_1h'])
twitter.update_status(status=template.format(**currency))
def main():
response = requests.get('https://api.coinmarketcap.com/v1/ticker/')
for currency in sorted(response.json(), key=lambda x: int(x['rank']))[:10]:
post_tweet(currency)
time.sleep(5)
if __name__ == '__main__':
main()
|
Fix error with string rank value
|
Fix error with string rank value
|
Python
|
mit
|
erickgnavar/coinstats
|
# coding: utf-8
import os
import time
from twython import Twython
import requests
APP_KEY = os.environ.get('APP_KEY')
APP_SECRET = os.environ.get('APP_SECRET')
OAUTH_TOKEN = os.environ.get('OAUTH_TOKEN')
OAUTH_TOKEN_SECRET = os.environ.get('OAUTH_TOKEN_SECRET')
twitter = Twython(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET)
def post_tweet(currency):
template = """
{name} - {symbol}
Price: ${price_usd}
Change in 1h: {percent_change_1h}%
Market cap: ${market_cap_usd}
Ranking: {rank}
#{name} #{symbol}
"""
if currency['percent_change_1h'] > 0:
currency['percent_change_1h'] = '+{}'.format(currency['percent_change_1h'])
twitter.update_status(status=template.format(**currency))
def main():
response = requests.get('https://api.coinmarketcap.com/v1/ticker/')
for currency in sorted(response.json(), key=lambda x: x['rank'])[:10]:
post_tweet(currency)
time.sleep(5)
if __name__ == '__main__':
main()
Fix error with string rank value
|
# coding: utf-8
import os
import time
from twython import Twython
import requests
APP_KEY = os.environ.get('APP_KEY')
APP_SECRET = os.environ.get('APP_SECRET')
OAUTH_TOKEN = os.environ.get('OAUTH_TOKEN')
OAUTH_TOKEN_SECRET = os.environ.get('OAUTH_TOKEN_SECRET')
twitter = Twython(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET)
def post_tweet(currency):
template = """
{name} - {symbol}
Price: ${price_usd}
Change in 1h: {percent_change_1h}%
Market cap: ${market_cap_usd}
Ranking: {rank}
#{name} #{symbol}
"""
if currency['percent_change_1h'] > 0:
currency['percent_change_1h'] = '+{}'.format(currency['percent_change_1h'])
twitter.update_status(status=template.format(**currency))
def main():
response = requests.get('https://api.coinmarketcap.com/v1/ticker/')
for currency in sorted(response.json(), key=lambda x: int(x['rank']))[:10]:
post_tweet(currency)
time.sleep(5)
if __name__ == '__main__':
main()
|
<commit_before># coding: utf-8
import os
import time
from twython import Twython
import requests
APP_KEY = os.environ.get('APP_KEY')
APP_SECRET = os.environ.get('APP_SECRET')
OAUTH_TOKEN = os.environ.get('OAUTH_TOKEN')
OAUTH_TOKEN_SECRET = os.environ.get('OAUTH_TOKEN_SECRET')
twitter = Twython(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET)
def post_tweet(currency):
template = """
{name} - {symbol}
Price: ${price_usd}
Change in 1h: {percent_change_1h}%
Market cap: ${market_cap_usd}
Ranking: {rank}
#{name} #{symbol}
"""
if currency['percent_change_1h'] > 0:
currency['percent_change_1h'] = '+{}'.format(currency['percent_change_1h'])
twitter.update_status(status=template.format(**currency))
def main():
response = requests.get('https://api.coinmarketcap.com/v1/ticker/')
for currency in sorted(response.json(), key=lambda x: x['rank'])[:10]:
post_tweet(currency)
time.sleep(5)
if __name__ == '__main__':
main()
<commit_msg>Fix error with string rank value<commit_after>
|
# coding: utf-8
import os
import time
from twython import Twython
import requests
APP_KEY = os.environ.get('APP_KEY')
APP_SECRET = os.environ.get('APP_SECRET')
OAUTH_TOKEN = os.environ.get('OAUTH_TOKEN')
OAUTH_TOKEN_SECRET = os.environ.get('OAUTH_TOKEN_SECRET')
twitter = Twython(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET)
def post_tweet(currency):
template = """
{name} - {symbol}
Price: ${price_usd}
Change in 1h: {percent_change_1h}%
Market cap: ${market_cap_usd}
Ranking: {rank}
#{name} #{symbol}
"""
if currency['percent_change_1h'] > 0:
currency['percent_change_1h'] = '+{}'.format(currency['percent_change_1h'])
twitter.update_status(status=template.format(**currency))
def main():
response = requests.get('https://api.coinmarketcap.com/v1/ticker/')
for currency in sorted(response.json(), key=lambda x: int(x['rank']))[:10]:
post_tweet(currency)
time.sleep(5)
if __name__ == '__main__':
main()
|
# coding: utf-8
import os
import time
from twython import Twython
import requests
APP_KEY = os.environ.get('APP_KEY')
APP_SECRET = os.environ.get('APP_SECRET')
OAUTH_TOKEN = os.environ.get('OAUTH_TOKEN')
OAUTH_TOKEN_SECRET = os.environ.get('OAUTH_TOKEN_SECRET')
twitter = Twython(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET)
def post_tweet(currency):
template = """
{name} - {symbol}
Price: ${price_usd}
Change in 1h: {percent_change_1h}%
Market cap: ${market_cap_usd}
Ranking: {rank}
#{name} #{symbol}
"""
if currency['percent_change_1h'] > 0:
currency['percent_change_1h'] = '+{}'.format(currency['percent_change_1h'])
twitter.update_status(status=template.format(**currency))
def main():
response = requests.get('https://api.coinmarketcap.com/v1/ticker/')
for currency in sorted(response.json(), key=lambda x: x['rank'])[:10]:
post_tweet(currency)
time.sleep(5)
if __name__ == '__main__':
main()
Fix error with string rank value# coding: utf-8
import os
import time
from twython import Twython
import requests
APP_KEY = os.environ.get('APP_KEY')
APP_SECRET = os.environ.get('APP_SECRET')
OAUTH_TOKEN = os.environ.get('OAUTH_TOKEN')
OAUTH_TOKEN_SECRET = os.environ.get('OAUTH_TOKEN_SECRET')
twitter = Twython(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET)
def post_tweet(currency):
template = """
{name} - {symbol}
Price: ${price_usd}
Change in 1h: {percent_change_1h}%
Market cap: ${market_cap_usd}
Ranking: {rank}
#{name} #{symbol}
"""
if currency['percent_change_1h'] > 0:
currency['percent_change_1h'] = '+{}'.format(currency['percent_change_1h'])
twitter.update_status(status=template.format(**currency))
def main():
response = requests.get('https://api.coinmarketcap.com/v1/ticker/')
for currency in sorted(response.json(), key=lambda x: int(x['rank']))[:10]:
post_tweet(currency)
time.sleep(5)
if __name__ == '__main__':
main()
|
<commit_before># coding: utf-8
import os
import time
from twython import Twython
import requests
APP_KEY = os.environ.get('APP_KEY')
APP_SECRET = os.environ.get('APP_SECRET')
OAUTH_TOKEN = os.environ.get('OAUTH_TOKEN')
OAUTH_TOKEN_SECRET = os.environ.get('OAUTH_TOKEN_SECRET')
twitter = Twython(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET)
def post_tweet(currency):
template = """
{name} - {symbol}
Price: ${price_usd}
Change in 1h: {percent_change_1h}%
Market cap: ${market_cap_usd}
Ranking: {rank}
#{name} #{symbol}
"""
if currency['percent_change_1h'] > 0:
currency['percent_change_1h'] = '+{}'.format(currency['percent_change_1h'])
twitter.update_status(status=template.format(**currency))
def main():
response = requests.get('https://api.coinmarketcap.com/v1/ticker/')
for currency in sorted(response.json(), key=lambda x: x['rank'])[:10]:
post_tweet(currency)
time.sleep(5)
if __name__ == '__main__':
main()
<commit_msg>Fix error with string rank value<commit_after># coding: utf-8
import os
import time
from twython import Twython
import requests
APP_KEY = os.environ.get('APP_KEY')
APP_SECRET = os.environ.get('APP_SECRET')
OAUTH_TOKEN = os.environ.get('OAUTH_TOKEN')
OAUTH_TOKEN_SECRET = os.environ.get('OAUTH_TOKEN_SECRET')
twitter = Twython(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET)
def post_tweet(currency):
template = """
{name} - {symbol}
Price: ${price_usd}
Change in 1h: {percent_change_1h}%
Market cap: ${market_cap_usd}
Ranking: {rank}
#{name} #{symbol}
"""
if currency['percent_change_1h'] > 0:
currency['percent_change_1h'] = '+{}'.format(currency['percent_change_1h'])
twitter.update_status(status=template.format(**currency))
def main():
response = requests.get('https://api.coinmarketcap.com/v1/ticker/')
for currency in sorted(response.json(), key=lambda x: int(x['rank']))[:10]:
post_tweet(currency)
time.sleep(5)
if __name__ == '__main__':
main()
|
8d9f3214cc5663dc29f7dcf3a03bc373a51d010b
|
core/admin/start.py
|
core/admin/start.py
|
#!/usr/bin/python3
import os
import logging as log
import sys
log.basicConfig(stream=sys.stderr, level=os.environ.get("LOG_LEVEL", "INFO"))
os.system("flask mailu advertise")
os.system("flask db upgrade")
account = os.environ.get("INITIAL_ADMIN_ACCOUNT")
domain = os.environ.get("INITIAL_ADMIN_DOMAIN")
password = os.environ.get("INITIAL_ADMIN_PW")
if account is not None and domain is not None and password is not None:
mode = os.environ.get("INITIAL_ADMIN_MODE", default="ifmissing")
log.info("Creating initial admin accout %s@%s with mode %s",account,domain,mode)
os.system("flask mailu admin %s %s '%s' --mode %s" % (account, domain, password, mode))
start_command="".join([
"gunicorn -w 4 -b :80 ",
"--access-logfile - " if (log.root.level<=log.INFO) else "",
"--error-logfile - ",
"--preload ",
"'mailu:create_app()'"])
os.system(start_command)
|
#!/usr/bin/python3
import os
import logging as log
import sys
log.basicConfig(stream=sys.stderr, level=os.environ.get("LOG_LEVEL", "INFO"))
os.system("flask mailu advertise")
os.system("flask db upgrade")
account = os.environ.get("INITIAL_ADMIN_ACCOUNT")
domain = os.environ.get("INITIAL_ADMIN_DOMAIN")
password = os.environ.get("INITIAL_ADMIN_PW")
if account is not None and domain is not None and password is not None:
mode = os.environ.get("INITIAL_ADMIN_MODE", default="ifmissing")
log.info("Creating initial admin accout %s@%s with mode %s",account,domain,mode)
os.system("flask mailu admin %s %s '%s' --mode %s" % (account, domain, password, mode))
start_command="".join([
"gunicorn --threads ", str(os.cpu_count()),
" -b :80 ",
"--access-logfile - " if (log.root.level<=log.INFO) else "",
"--error-logfile - ",
"--preload ",
"'mailu:create_app()'"])
os.system(start_command)
|
Use threads in gunicorn rather than processes
|
Use threads in gunicorn rather than processes
This ensures that we share the auth-cache... will enable memory savings
and may improve performances when a higher number of cores is available
"smarter default"
|
Python
|
mit
|
kaiyou/freeposte.io,kaiyou/freeposte.io,kaiyou/freeposte.io,kaiyou/freeposte.io
|
#!/usr/bin/python3
import os
import logging as log
import sys
log.basicConfig(stream=sys.stderr, level=os.environ.get("LOG_LEVEL", "INFO"))
os.system("flask mailu advertise")
os.system("flask db upgrade")
account = os.environ.get("INITIAL_ADMIN_ACCOUNT")
domain = os.environ.get("INITIAL_ADMIN_DOMAIN")
password = os.environ.get("INITIAL_ADMIN_PW")
if account is not None and domain is not None and password is not None:
mode = os.environ.get("INITIAL_ADMIN_MODE", default="ifmissing")
log.info("Creating initial admin accout %s@%s with mode %s",account,domain,mode)
os.system("flask mailu admin %s %s '%s' --mode %s" % (account, domain, password, mode))
start_command="".join([
"gunicorn -w 4 -b :80 ",
"--access-logfile - " if (log.root.level<=log.INFO) else "",
"--error-logfile - ",
"--preload ",
"'mailu:create_app()'"])
os.system(start_command)
Use threads in gunicorn rather than processes
This ensures that we share the auth-cache... will enable memory savings
and may improve performances when a higher number of cores is available
"smarter default"
|
#!/usr/bin/python3
import os
import logging as log
import sys
log.basicConfig(stream=sys.stderr, level=os.environ.get("LOG_LEVEL", "INFO"))
os.system("flask mailu advertise")
os.system("flask db upgrade")
account = os.environ.get("INITIAL_ADMIN_ACCOUNT")
domain = os.environ.get("INITIAL_ADMIN_DOMAIN")
password = os.environ.get("INITIAL_ADMIN_PW")
if account is not None and domain is not None and password is not None:
mode = os.environ.get("INITIAL_ADMIN_MODE", default="ifmissing")
log.info("Creating initial admin accout %s@%s with mode %s",account,domain,mode)
os.system("flask mailu admin %s %s '%s' --mode %s" % (account, domain, password, mode))
start_command="".join([
"gunicorn --threads ", str(os.cpu_count()),
" -b :80 ",
"--access-logfile - " if (log.root.level<=log.INFO) else "",
"--error-logfile - ",
"--preload ",
"'mailu:create_app()'"])
os.system(start_command)
|
<commit_before>#!/usr/bin/python3
import os
import logging as log
import sys
log.basicConfig(stream=sys.stderr, level=os.environ.get("LOG_LEVEL", "INFO"))
os.system("flask mailu advertise")
os.system("flask db upgrade")
account = os.environ.get("INITIAL_ADMIN_ACCOUNT")
domain = os.environ.get("INITIAL_ADMIN_DOMAIN")
password = os.environ.get("INITIAL_ADMIN_PW")
if account is not None and domain is not None and password is not None:
mode = os.environ.get("INITIAL_ADMIN_MODE", default="ifmissing")
log.info("Creating initial admin accout %s@%s with mode %s",account,domain,mode)
os.system("flask mailu admin %s %s '%s' --mode %s" % (account, domain, password, mode))
start_command="".join([
"gunicorn -w 4 -b :80 ",
"--access-logfile - " if (log.root.level<=log.INFO) else "",
"--error-logfile - ",
"--preload ",
"'mailu:create_app()'"])
os.system(start_command)
<commit_msg>Use threads in gunicorn rather than processes
This ensures that we share the auth-cache... will enable memory savings
and may improve performances when a higher number of cores is available
"smarter default"<commit_after>
|
#!/usr/bin/python3
import os
import logging as log
import sys
log.basicConfig(stream=sys.stderr, level=os.environ.get("LOG_LEVEL", "INFO"))
os.system("flask mailu advertise")
os.system("flask db upgrade")
account = os.environ.get("INITIAL_ADMIN_ACCOUNT")
domain = os.environ.get("INITIAL_ADMIN_DOMAIN")
password = os.environ.get("INITIAL_ADMIN_PW")
if account is not None and domain is not None and password is not None:
mode = os.environ.get("INITIAL_ADMIN_MODE", default="ifmissing")
log.info("Creating initial admin accout %s@%s with mode %s",account,domain,mode)
os.system("flask mailu admin %s %s '%s' --mode %s" % (account, domain, password, mode))
start_command="".join([
"gunicorn --threads ", str(os.cpu_count()),
" -b :80 ",
"--access-logfile - " if (log.root.level<=log.INFO) else "",
"--error-logfile - ",
"--preload ",
"'mailu:create_app()'"])
os.system(start_command)
|
#!/usr/bin/python3
import os
import logging as log
import sys
log.basicConfig(stream=sys.stderr, level=os.environ.get("LOG_LEVEL", "INFO"))
os.system("flask mailu advertise")
os.system("flask db upgrade")
account = os.environ.get("INITIAL_ADMIN_ACCOUNT")
domain = os.environ.get("INITIAL_ADMIN_DOMAIN")
password = os.environ.get("INITIAL_ADMIN_PW")
if account is not None and domain is not None and password is not None:
mode = os.environ.get("INITIAL_ADMIN_MODE", default="ifmissing")
log.info("Creating initial admin accout %s@%s with mode %s",account,domain,mode)
os.system("flask mailu admin %s %s '%s' --mode %s" % (account, domain, password, mode))
start_command="".join([
"gunicorn -w 4 -b :80 ",
"--access-logfile - " if (log.root.level<=log.INFO) else "",
"--error-logfile - ",
"--preload ",
"'mailu:create_app()'"])
os.system(start_command)
Use threads in gunicorn rather than processes
This ensures that we share the auth-cache... will enable memory savings
and may improve performances when a higher number of cores is available
"smarter default"#!/usr/bin/python3
import os
import logging as log
import sys
log.basicConfig(stream=sys.stderr, level=os.environ.get("LOG_LEVEL", "INFO"))
os.system("flask mailu advertise")
os.system("flask db upgrade")
account = os.environ.get("INITIAL_ADMIN_ACCOUNT")
domain = os.environ.get("INITIAL_ADMIN_DOMAIN")
password = os.environ.get("INITIAL_ADMIN_PW")
if account is not None and domain is not None and password is not None:
mode = os.environ.get("INITIAL_ADMIN_MODE", default="ifmissing")
log.info("Creating initial admin accout %s@%s with mode %s",account,domain,mode)
os.system("flask mailu admin %s %s '%s' --mode %s" % (account, domain, password, mode))
start_command="".join([
"gunicorn --threads ", str(os.cpu_count()),
" -b :80 ",
"--access-logfile - " if (log.root.level<=log.INFO) else "",
"--error-logfile - ",
"--preload ",
"'mailu:create_app()'"])
os.system(start_command)
|
<commit_before>#!/usr/bin/python3
import os
import logging as log
import sys
log.basicConfig(stream=sys.stderr, level=os.environ.get("LOG_LEVEL", "INFO"))
os.system("flask mailu advertise")
os.system("flask db upgrade")
account = os.environ.get("INITIAL_ADMIN_ACCOUNT")
domain = os.environ.get("INITIAL_ADMIN_DOMAIN")
password = os.environ.get("INITIAL_ADMIN_PW")
if account is not None and domain is not None and password is not None:
mode = os.environ.get("INITIAL_ADMIN_MODE", default="ifmissing")
log.info("Creating initial admin accout %s@%s with mode %s",account,domain,mode)
os.system("flask mailu admin %s %s '%s' --mode %s" % (account, domain, password, mode))
start_command="".join([
"gunicorn -w 4 -b :80 ",
"--access-logfile - " if (log.root.level<=log.INFO) else "",
"--error-logfile - ",
"--preload ",
"'mailu:create_app()'"])
os.system(start_command)
<commit_msg>Use threads in gunicorn rather than processes
This ensures that we share the auth-cache... will enable memory savings
and may improve performances when a higher number of cores is available
"smarter default"<commit_after>#!/usr/bin/python3
import os
import logging as log
import sys
log.basicConfig(stream=sys.stderr, level=os.environ.get("LOG_LEVEL", "INFO"))
os.system("flask mailu advertise")
os.system("flask db upgrade")
account = os.environ.get("INITIAL_ADMIN_ACCOUNT")
domain = os.environ.get("INITIAL_ADMIN_DOMAIN")
password = os.environ.get("INITIAL_ADMIN_PW")
if account is not None and domain is not None and password is not None:
mode = os.environ.get("INITIAL_ADMIN_MODE", default="ifmissing")
log.info("Creating initial admin accout %s@%s with mode %s",account,domain,mode)
os.system("flask mailu admin %s %s '%s' --mode %s" % (account, domain, password, mode))
start_command="".join([
"gunicorn --threads ", str(os.cpu_count()),
" -b :80 ",
"--access-logfile - " if (log.root.level<=log.INFO) else "",
"--error-logfile - ",
"--preload ",
"'mailu:create_app()'"])
os.system(start_command)
|
e8ac68b33b3b7bf54baa36b89ac90e9e5a666599
|
magnum/conf/services.py
|
magnum/conf/services.py
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_config import cfg
from magnum.i18n import _
service_opts = [
cfg.StrOpt('host',
help=_('Name of this node. This can be an opaque identifier. '
'It is not necessarily a hostname, FQDN, or IP address. '
'However, the node name must be valid within '
'an AMQP key, and if using ZeroMQ, a valid '
'hostname, FQDN, or IP address.')),
]
def register_opts(conf):
conf.register_opts(service_opts)
def list_opts():
return {
"DEFAULT": service_opts
}
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_config import cfg
from magnum.i18n import _
service_opts = [
cfg.HostAddressOpt('host',
help=_('Name of this node. This can be an opaque '
'identifier. It is not necessarily a hostname, '
'FQDN, or IP address. However, the node name '
'must be valid within an AMQP key, and if using '
'ZeroMQ, a valid hostname, FQDN, or IP '
'address.')),
]
def register_opts(conf):
conf.register_opts(service_opts)
def list_opts():
return {
"DEFAULT": service_opts
}
|
Use HostAddressOpt for opts that accept IP and hostnames
|
Use HostAddressOpt for opts that accept IP and hostnames
Some configuration options were accepting both IP addresses
and hostnames. Since there was no specific OSLO opt type to
support this, we were using ``StrOpt``. The change [1] that
added support for ``HostAddressOpt`` type was merged in Ocata
and became available for use with oslo version 3.22.
This patch changes the opt type of configuration options to use
this more relevant opt type - HostAddressOpt.
[1] I77bdb64b7e6e56ce761d76696bc4448a9bd325eb
Change-Id: Id179ad55d4344a7dc2214896290890862b560e0c
|
Python
|
apache-2.0
|
openstack/magnum,ArchiFleKs/magnum,ArchiFleKs/magnum,openstack/magnum
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_config import cfg
from magnum.i18n import _
service_opts = [
cfg.StrOpt('host',
help=_('Name of this node. This can be an opaque identifier. '
'It is not necessarily a hostname, FQDN, or IP address. '
'However, the node name must be valid within '
'an AMQP key, and if using ZeroMQ, a valid '
'hostname, FQDN, or IP address.')),
]
def register_opts(conf):
conf.register_opts(service_opts)
def list_opts():
return {
"DEFAULT": service_opts
}
Use HostAddressOpt for opts that accept IP and hostnames
Some configuration options were accepting both IP addresses
and hostnames. Since there was no specific OSLO opt type to
support this, we were using ``StrOpt``. The change [1] that
added support for ``HostAddressOpt`` type was merged in Ocata
and became available for use with oslo version 3.22.
This patch changes the opt type of configuration options to use
this more relevant opt type - HostAddressOpt.
[1] I77bdb64b7e6e56ce761d76696bc4448a9bd325eb
Change-Id: Id179ad55d4344a7dc2214896290890862b560e0c
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_config import cfg
from magnum.i18n import _
service_opts = [
cfg.HostAddressOpt('host',
help=_('Name of this node. This can be an opaque '
'identifier. It is not necessarily a hostname, '
'FQDN, or IP address. However, the node name '
'must be valid within an AMQP key, and if using '
'ZeroMQ, a valid hostname, FQDN, or IP '
'address.')),
]
def register_opts(conf):
conf.register_opts(service_opts)
def list_opts():
return {
"DEFAULT": service_opts
}
|
<commit_before># Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_config import cfg
from magnum.i18n import _
service_opts = [
cfg.StrOpt('host',
help=_('Name of this node. This can be an opaque identifier. '
'It is not necessarily a hostname, FQDN, or IP address. '
'However, the node name must be valid within '
'an AMQP key, and if using ZeroMQ, a valid '
'hostname, FQDN, or IP address.')),
]
def register_opts(conf):
conf.register_opts(service_opts)
def list_opts():
return {
"DEFAULT": service_opts
}
<commit_msg>Use HostAddressOpt for opts that accept IP and hostnames
Some configuration options were accepting both IP addresses
and hostnames. Since there was no specific OSLO opt type to
support this, we were using ``StrOpt``. The change [1] that
added support for ``HostAddressOpt`` type was merged in Ocata
and became available for use with oslo version 3.22.
This patch changes the opt type of configuration options to use
this more relevant opt type - HostAddressOpt.
[1] I77bdb64b7e6e56ce761d76696bc4448a9bd325eb
Change-Id: Id179ad55d4344a7dc2214896290890862b560e0c<commit_after>
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_config import cfg
from magnum.i18n import _
service_opts = [
cfg.HostAddressOpt('host',
help=_('Name of this node. This can be an opaque '
'identifier. It is not necessarily a hostname, '
'FQDN, or IP address. However, the node name '
'must be valid within an AMQP key, and if using '
'ZeroMQ, a valid hostname, FQDN, or IP '
'address.')),
]
def register_opts(conf):
conf.register_opts(service_opts)
def list_opts():
return {
"DEFAULT": service_opts
}
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_config import cfg
from magnum.i18n import _
service_opts = [
cfg.StrOpt('host',
help=_('Name of this node. This can be an opaque identifier. '
'It is not necessarily a hostname, FQDN, or IP address. '
'However, the node name must be valid within '
'an AMQP key, and if using ZeroMQ, a valid '
'hostname, FQDN, or IP address.')),
]
def register_opts(conf):
conf.register_opts(service_opts)
def list_opts():
return {
"DEFAULT": service_opts
}
Use HostAddressOpt for opts that accept IP and hostnames
Some configuration options were accepting both IP addresses
and hostnames. Since there was no specific OSLO opt type to
support this, we were using ``StrOpt``. The change [1] that
added support for ``HostAddressOpt`` type was merged in Ocata
and became available for use with oslo version 3.22.
This patch changes the opt type of configuration options to use
this more relevant opt type - HostAddressOpt.
[1] I77bdb64b7e6e56ce761d76696bc4448a9bd325eb
Change-Id: Id179ad55d4344a7dc2214896290890862b560e0c# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_config import cfg
from magnum.i18n import _
service_opts = [
cfg.HostAddressOpt('host',
help=_('Name of this node. This can be an opaque '
'identifier. It is not necessarily a hostname, '
'FQDN, or IP address. However, the node name '
'must be valid within an AMQP key, and if using '
'ZeroMQ, a valid hostname, FQDN, or IP '
'address.')),
]
def register_opts(conf):
conf.register_opts(service_opts)
def list_opts():
return {
"DEFAULT": service_opts
}
|
<commit_before># Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_config import cfg
from magnum.i18n import _
service_opts = [
cfg.StrOpt('host',
help=_('Name of this node. This can be an opaque identifier. '
'It is not necessarily a hostname, FQDN, or IP address. '
'However, the node name must be valid within '
'an AMQP key, and if using ZeroMQ, a valid '
'hostname, FQDN, or IP address.')),
]
def register_opts(conf):
conf.register_opts(service_opts)
def list_opts():
return {
"DEFAULT": service_opts
}
<commit_msg>Use HostAddressOpt for opts that accept IP and hostnames
Some configuration options were accepting both IP addresses
and hostnames. Since there was no specific OSLO opt type to
support this, we were using ``StrOpt``. The change [1] that
added support for ``HostAddressOpt`` type was merged in Ocata
and became available for use with oslo version 3.22.
This patch changes the opt type of configuration options to use
this more relevant opt type - HostAddressOpt.
[1] I77bdb64b7e6e56ce761d76696bc4448a9bd325eb
Change-Id: Id179ad55d4344a7dc2214896290890862b560e0c<commit_after># Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_config import cfg
from magnum.i18n import _
service_opts = [
cfg.HostAddressOpt('host',
help=_('Name of this node. This can be an opaque '
'identifier. It is not necessarily a hostname, '
'FQDN, or IP address. However, the node name '
'must be valid within an AMQP key, and if using '
'ZeroMQ, a valid hostname, FQDN, or IP '
'address.')),
]
def register_opts(conf):
conf.register_opts(service_opts)
def list_opts():
return {
"DEFAULT": service_opts
}
|
381cf72695185fda93d0d9685fad887d445b4a72
|
mesonwrap/inventory.py
|
mesonwrap/inventory.py
|
RESTRICTED_PROJECTS = [
'dubtestproject',
'meson',
'meson-ci',
'mesonbuild.github.io',
'mesonwrap',
'wrapdb',
'wrapdevtools',
'wrapweb',
]
ISSUE_TRACKER = 'wrapdb'
class Inventory:
def __init__(self, organization):
self.organization = organization
self.restricted_projects = [
organization + '/' + proj for proj in RESTRICTED_PROJECTS
]
self.issue_tracker = organization + '/' + ISSUE_TRACKER
DEFAULT = Inventory('mesonbuild')
def is_wrap_project_name(project: str) -> bool:
return project not in RESTRICTED_PROJECTS
def is_wrap_full_project_name(full_project: str) -> bool:
return full_project not in DEFAULT.restricted_projects
|
RESTRICTED_PROJECTS = [
'cidata',
'dubtestproject',
'meson',
'meson-ci',
'mesonbuild.github.io',
'mesonwrap',
'wrapdb',
'wrapdevtools',
'wrapweb',
]
ISSUE_TRACKER = 'wrapdb'
class Inventory:
def __init__(self, organization):
self.organization = organization
self.restricted_projects = [
organization + '/' + proj for proj in RESTRICTED_PROJECTS
]
self.issue_tracker = organization + '/' + ISSUE_TRACKER
DEFAULT = Inventory('mesonbuild')
def is_wrap_project_name(project: str) -> bool:
return project not in RESTRICTED_PROJECTS
def is_wrap_full_project_name(full_project: str) -> bool:
return full_project not in DEFAULT.restricted_projects
|
Add cidata to the list of restricted projects
|
Add cidata to the list of restricted projects
|
Python
|
apache-2.0
|
mesonbuild/wrapweb,mesonbuild/wrapweb,mesonbuild/wrapweb
|
RESTRICTED_PROJECTS = [
'dubtestproject',
'meson',
'meson-ci',
'mesonbuild.github.io',
'mesonwrap',
'wrapdb',
'wrapdevtools',
'wrapweb',
]
ISSUE_TRACKER = 'wrapdb'
class Inventory:
def __init__(self, organization):
self.organization = organization
self.restricted_projects = [
organization + '/' + proj for proj in RESTRICTED_PROJECTS
]
self.issue_tracker = organization + '/' + ISSUE_TRACKER
DEFAULT = Inventory('mesonbuild')
def is_wrap_project_name(project: str) -> bool:
return project not in RESTRICTED_PROJECTS
def is_wrap_full_project_name(full_project: str) -> bool:
return full_project not in DEFAULT.restricted_projects
Add cidata to the list of restricted projects
|
RESTRICTED_PROJECTS = [
'cidata',
'dubtestproject',
'meson',
'meson-ci',
'mesonbuild.github.io',
'mesonwrap',
'wrapdb',
'wrapdevtools',
'wrapweb',
]
ISSUE_TRACKER = 'wrapdb'
class Inventory:
def __init__(self, organization):
self.organization = organization
self.restricted_projects = [
organization + '/' + proj for proj in RESTRICTED_PROJECTS
]
self.issue_tracker = organization + '/' + ISSUE_TRACKER
DEFAULT = Inventory('mesonbuild')
def is_wrap_project_name(project: str) -> bool:
return project not in RESTRICTED_PROJECTS
def is_wrap_full_project_name(full_project: str) -> bool:
return full_project not in DEFAULT.restricted_projects
|
<commit_before>RESTRICTED_PROJECTS = [
'dubtestproject',
'meson',
'meson-ci',
'mesonbuild.github.io',
'mesonwrap',
'wrapdb',
'wrapdevtools',
'wrapweb',
]
ISSUE_TRACKER = 'wrapdb'
class Inventory:
def __init__(self, organization):
self.organization = organization
self.restricted_projects = [
organization + '/' + proj for proj in RESTRICTED_PROJECTS
]
self.issue_tracker = organization + '/' + ISSUE_TRACKER
DEFAULT = Inventory('mesonbuild')
def is_wrap_project_name(project: str) -> bool:
return project not in RESTRICTED_PROJECTS
def is_wrap_full_project_name(full_project: str) -> bool:
return full_project not in DEFAULT.restricted_projects
<commit_msg>Add cidata to the list of restricted projects<commit_after>
|
RESTRICTED_PROJECTS = [
'cidata',
'dubtestproject',
'meson',
'meson-ci',
'mesonbuild.github.io',
'mesonwrap',
'wrapdb',
'wrapdevtools',
'wrapweb',
]
ISSUE_TRACKER = 'wrapdb'
class Inventory:
def __init__(self, organization):
self.organization = organization
self.restricted_projects = [
organization + '/' + proj for proj in RESTRICTED_PROJECTS
]
self.issue_tracker = organization + '/' + ISSUE_TRACKER
DEFAULT = Inventory('mesonbuild')
def is_wrap_project_name(project: str) -> bool:
return project not in RESTRICTED_PROJECTS
def is_wrap_full_project_name(full_project: str) -> bool:
return full_project not in DEFAULT.restricted_projects
|
RESTRICTED_PROJECTS = [
'dubtestproject',
'meson',
'meson-ci',
'mesonbuild.github.io',
'mesonwrap',
'wrapdb',
'wrapdevtools',
'wrapweb',
]
ISSUE_TRACKER = 'wrapdb'
class Inventory:
def __init__(self, organization):
self.organization = organization
self.restricted_projects = [
organization + '/' + proj for proj in RESTRICTED_PROJECTS
]
self.issue_tracker = organization + '/' + ISSUE_TRACKER
DEFAULT = Inventory('mesonbuild')
def is_wrap_project_name(project: str) -> bool:
return project not in RESTRICTED_PROJECTS
def is_wrap_full_project_name(full_project: str) -> bool:
return full_project not in DEFAULT.restricted_projects
Add cidata to the list of restricted projectsRESTRICTED_PROJECTS = [
'cidata',
'dubtestproject',
'meson',
'meson-ci',
'mesonbuild.github.io',
'mesonwrap',
'wrapdb',
'wrapdevtools',
'wrapweb',
]
ISSUE_TRACKER = 'wrapdb'
class Inventory:
def __init__(self, organization):
self.organization = organization
self.restricted_projects = [
organization + '/' + proj for proj in RESTRICTED_PROJECTS
]
self.issue_tracker = organization + '/' + ISSUE_TRACKER
DEFAULT = Inventory('mesonbuild')
def is_wrap_project_name(project: str) -> bool:
return project not in RESTRICTED_PROJECTS
def is_wrap_full_project_name(full_project: str) -> bool:
return full_project not in DEFAULT.restricted_projects
|
<commit_before>RESTRICTED_PROJECTS = [
'dubtestproject',
'meson',
'meson-ci',
'mesonbuild.github.io',
'mesonwrap',
'wrapdb',
'wrapdevtools',
'wrapweb',
]
ISSUE_TRACKER = 'wrapdb'
class Inventory:
def __init__(self, organization):
self.organization = organization
self.restricted_projects = [
organization + '/' + proj for proj in RESTRICTED_PROJECTS
]
self.issue_tracker = organization + '/' + ISSUE_TRACKER
DEFAULT = Inventory('mesonbuild')
def is_wrap_project_name(project: str) -> bool:
return project not in RESTRICTED_PROJECTS
def is_wrap_full_project_name(full_project: str) -> bool:
return full_project not in DEFAULT.restricted_projects
<commit_msg>Add cidata to the list of restricted projects<commit_after>RESTRICTED_PROJECTS = [
'cidata',
'dubtestproject',
'meson',
'meson-ci',
'mesonbuild.github.io',
'mesonwrap',
'wrapdb',
'wrapdevtools',
'wrapweb',
]
ISSUE_TRACKER = 'wrapdb'
class Inventory:
def __init__(self, organization):
self.organization = organization
self.restricted_projects = [
organization + '/' + proj for proj in RESTRICTED_PROJECTS
]
self.issue_tracker = organization + '/' + ISSUE_TRACKER
DEFAULT = Inventory('mesonbuild')
def is_wrap_project_name(project: str) -> bool:
return project not in RESTRICTED_PROJECTS
def is_wrap_full_project_name(full_project: str) -> bool:
return full_project not in DEFAULT.restricted_projects
|
71b8ee305e70d3822bc5efe13de4eede7f13b65e
|
__init__.py
|
__init__.py
|
from __future__ import absolute_import, division, print_function
import sys
# Hack to disable any DIALS banner showing up.
# To work properly this requires *this* file here to be essentially empty.
# Load *this* file here as dials.util.banner, so any future import
# will do exactly nothing.
sys.modules['dials.util.banner'] = __import__('xia2')
|
Hide DIALS banner during xia2 execution
|
Hide DIALS banner during xia2 execution
There probably should be a neater way to achieve this.
|
Python
|
bsd-3-clause
|
xia2/xia2,xia2/xia2
|
Hide DIALS banner during xia2 execution
There probably should be a neater way to achieve this.
|
from __future__ import absolute_import, division, print_function
import sys
# Hack to disable any DIALS banner showing up.
# To work properly this requires *this* file here to be essentially empty.
# Load *this* file here as dials.util.banner, so any future import
# will do exactly nothing.
sys.modules['dials.util.banner'] = __import__('xia2')
|
<commit_before><commit_msg>Hide DIALS banner during xia2 execution
There probably should be a neater way to achieve this.<commit_after>
|
from __future__ import absolute_import, division, print_function
import sys
# Hack to disable any DIALS banner showing up.
# To work properly this requires *this* file here to be essentially empty.
# Load *this* file here as dials.util.banner, so any future import
# will do exactly nothing.
sys.modules['dials.util.banner'] = __import__('xia2')
|
Hide DIALS banner during xia2 execution
There probably should be a neater way to achieve this.from __future__ import absolute_import, division, print_function
import sys
# Hack to disable any DIALS banner showing up.
# To work properly this requires *this* file here to be essentially empty.
# Load *this* file here as dials.util.banner, so any future import
# will do exactly nothing.
sys.modules['dials.util.banner'] = __import__('xia2')
|
<commit_before><commit_msg>Hide DIALS banner during xia2 execution
There probably should be a neater way to achieve this.<commit_after>from __future__ import absolute_import, division, print_function
import sys
# Hack to disable any DIALS banner showing up.
# To work properly this requires *this* file here to be essentially empty.
# Load *this* file here as dials.util.banner, so any future import
# will do exactly nothing.
sys.modules['dials.util.banner'] = __import__('xia2')
|
|
8ffd6ffecd7ce713446385b6cd108e50fb041403
|
__main__.py
|
__main__.py
|
from . import *
ps1 = '\n% '
ps2 = '| '
try:
from blessings import Terminal
term = Terminal()
ps1 = term.bold_blue(ps1)
ps2 = term.bold_blue(ps2)
def fancy_movement():
print(term.move_up() + term.clear_eol() + term.move_up())
except ImportError:
def fancy_movement():
pass
def getfilefunc(mod, droplast=True):
return Func(fixtags(flattenbody(mod, droplast=droplast)))
def runfile(fname):
invoke(getfilefunc(parseFile(fname)), stdlib())
def readProgram():
try:
yield input(ps1)
while True:
line = input(ps2)
if not line:
fancy_movement()
return
yield line
except EOFError:
print()
raise SystemExit
def interactive():
env = stdlib()
while True:
try:
retval, = invoke(getfilefunc(parseString('\n'.join(readProgram())), droplast=False), env)
if retval is not None:
print(arepr(retval))
except KeyboardInterrupt:
print()
except Exception as e:
print(e)
import sys
if len(sys.argv) > 1:
runfile(sys.argv[1])
else:
interactive()
|
from . import *
import readline
ps1 = '\n% '
ps2 = '| '
try:
from blessings import Terminal
term = Terminal()
ps1 = term.bold_blue(ps1)
ps2 = term.bold_blue(ps2)
def fancy_movement():
print(term.move_up() + term.clear_eol() + term.move_up())
except ImportError:
def fancy_movement():
pass
def getfilefunc(mod, droplast=True):
return Func(fixtags(flattenbody(mod, droplast=droplast)))
def runfile(fname):
invoke(getfilefunc(parseFile(fname)), stdlib())
def readProgram():
try:
yield input(ps1)
while True:
line = input(ps2)
if not line:
fancy_movement()
return
yield line
except EOFError:
print()
raise SystemExit
def interactive():
env = stdlib()
while True:
try:
retval, = invoke(getfilefunc(parseString('\n'.join(readProgram())), droplast=False), env)
if retval is not None:
print(arepr(retval))
except KeyboardInterrupt:
print()
except Exception as e:
print(e)
import sys
if len(sys.argv) > 1:
runfile(sys.argv[1])
else:
interactive()
|
Add readline support for the REPL
|
Add readline support for the REPL
|
Python
|
isc
|
gvx/isle
|
from . import *
ps1 = '\n% '
ps2 = '| '
try:
from blessings import Terminal
term = Terminal()
ps1 = term.bold_blue(ps1)
ps2 = term.bold_blue(ps2)
def fancy_movement():
print(term.move_up() + term.clear_eol() + term.move_up())
except ImportError:
def fancy_movement():
pass
def getfilefunc(mod, droplast=True):
return Func(fixtags(flattenbody(mod, droplast=droplast)))
def runfile(fname):
invoke(getfilefunc(parseFile(fname)), stdlib())
def readProgram():
try:
yield input(ps1)
while True:
line = input(ps2)
if not line:
fancy_movement()
return
yield line
except EOFError:
print()
raise SystemExit
def interactive():
env = stdlib()
while True:
try:
retval, = invoke(getfilefunc(parseString('\n'.join(readProgram())), droplast=False), env)
if retval is not None:
print(arepr(retval))
except KeyboardInterrupt:
print()
except Exception as e:
print(e)
import sys
if len(sys.argv) > 1:
runfile(sys.argv[1])
else:
interactive()
Add readline support for the REPL
|
from . import *
import readline
ps1 = '\n% '
ps2 = '| '
try:
from blessings import Terminal
term = Terminal()
ps1 = term.bold_blue(ps1)
ps2 = term.bold_blue(ps2)
def fancy_movement():
print(term.move_up() + term.clear_eol() + term.move_up())
except ImportError:
def fancy_movement():
pass
def getfilefunc(mod, droplast=True):
return Func(fixtags(flattenbody(mod, droplast=droplast)))
def runfile(fname):
invoke(getfilefunc(parseFile(fname)), stdlib())
def readProgram():
try:
yield input(ps1)
while True:
line = input(ps2)
if not line:
fancy_movement()
return
yield line
except EOFError:
print()
raise SystemExit
def interactive():
env = stdlib()
while True:
try:
retval, = invoke(getfilefunc(parseString('\n'.join(readProgram())), droplast=False), env)
if retval is not None:
print(arepr(retval))
except KeyboardInterrupt:
print()
except Exception as e:
print(e)
import sys
if len(sys.argv) > 1:
runfile(sys.argv[1])
else:
interactive()
|
<commit_before>from . import *
ps1 = '\n% '
ps2 = '| '
try:
from blessings import Terminal
term = Terminal()
ps1 = term.bold_blue(ps1)
ps2 = term.bold_blue(ps2)
def fancy_movement():
print(term.move_up() + term.clear_eol() + term.move_up())
except ImportError:
def fancy_movement():
pass
def getfilefunc(mod, droplast=True):
return Func(fixtags(flattenbody(mod, droplast=droplast)))
def runfile(fname):
invoke(getfilefunc(parseFile(fname)), stdlib())
def readProgram():
try:
yield input(ps1)
while True:
line = input(ps2)
if not line:
fancy_movement()
return
yield line
except EOFError:
print()
raise SystemExit
def interactive():
env = stdlib()
while True:
try:
retval, = invoke(getfilefunc(parseString('\n'.join(readProgram())), droplast=False), env)
if retval is not None:
print(arepr(retval))
except KeyboardInterrupt:
print()
except Exception as e:
print(e)
import sys
if len(sys.argv) > 1:
runfile(sys.argv[1])
else:
interactive()
<commit_msg>Add readline support for the REPL<commit_after>
|
from . import *
import readline
ps1 = '\n% '
ps2 = '| '
try:
from blessings import Terminal
term = Terminal()
ps1 = term.bold_blue(ps1)
ps2 = term.bold_blue(ps2)
def fancy_movement():
print(term.move_up() + term.clear_eol() + term.move_up())
except ImportError:
def fancy_movement():
pass
def getfilefunc(mod, droplast=True):
return Func(fixtags(flattenbody(mod, droplast=droplast)))
def runfile(fname):
invoke(getfilefunc(parseFile(fname)), stdlib())
def readProgram():
try:
yield input(ps1)
while True:
line = input(ps2)
if not line:
fancy_movement()
return
yield line
except EOFError:
print()
raise SystemExit
def interactive():
env = stdlib()
while True:
try:
retval, = invoke(getfilefunc(parseString('\n'.join(readProgram())), droplast=False), env)
if retval is not None:
print(arepr(retval))
except KeyboardInterrupt:
print()
except Exception as e:
print(e)
import sys
if len(sys.argv) > 1:
runfile(sys.argv[1])
else:
interactive()
|
from . import *
ps1 = '\n% '
ps2 = '| '
try:
from blessings import Terminal
term = Terminal()
ps1 = term.bold_blue(ps1)
ps2 = term.bold_blue(ps2)
def fancy_movement():
print(term.move_up() + term.clear_eol() + term.move_up())
except ImportError:
def fancy_movement():
pass
def getfilefunc(mod, droplast=True):
return Func(fixtags(flattenbody(mod, droplast=droplast)))
def runfile(fname):
invoke(getfilefunc(parseFile(fname)), stdlib())
def readProgram():
try:
yield input(ps1)
while True:
line = input(ps2)
if not line:
fancy_movement()
return
yield line
except EOFError:
print()
raise SystemExit
def interactive():
env = stdlib()
while True:
try:
retval, = invoke(getfilefunc(parseString('\n'.join(readProgram())), droplast=False), env)
if retval is not None:
print(arepr(retval))
except KeyboardInterrupt:
print()
except Exception as e:
print(e)
import sys
if len(sys.argv) > 1:
runfile(sys.argv[1])
else:
interactive()
Add readline support for the REPLfrom . import *
import readline
ps1 = '\n% '
ps2 = '| '
try:
from blessings import Terminal
term = Terminal()
ps1 = term.bold_blue(ps1)
ps2 = term.bold_blue(ps2)
def fancy_movement():
print(term.move_up() + term.clear_eol() + term.move_up())
except ImportError:
def fancy_movement():
pass
def getfilefunc(mod, droplast=True):
return Func(fixtags(flattenbody(mod, droplast=droplast)))
def runfile(fname):
invoke(getfilefunc(parseFile(fname)), stdlib())
def readProgram():
try:
yield input(ps1)
while True:
line = input(ps2)
if not line:
fancy_movement()
return
yield line
except EOFError:
print()
raise SystemExit
def interactive():
env = stdlib()
while True:
try:
retval, = invoke(getfilefunc(parseString('\n'.join(readProgram())), droplast=False), env)
if retval is not None:
print(arepr(retval))
except KeyboardInterrupt:
print()
except Exception as e:
print(e)
import sys
if len(sys.argv) > 1:
runfile(sys.argv[1])
else:
interactive()
|
<commit_before>from . import *
ps1 = '\n% '
ps2 = '| '
try:
from blessings import Terminal
term = Terminal()
ps1 = term.bold_blue(ps1)
ps2 = term.bold_blue(ps2)
def fancy_movement():
print(term.move_up() + term.clear_eol() + term.move_up())
except ImportError:
def fancy_movement():
pass
def getfilefunc(mod, droplast=True):
return Func(fixtags(flattenbody(mod, droplast=droplast)))
def runfile(fname):
invoke(getfilefunc(parseFile(fname)), stdlib())
def readProgram():
try:
yield input(ps1)
while True:
line = input(ps2)
if not line:
fancy_movement()
return
yield line
except EOFError:
print()
raise SystemExit
def interactive():
env = stdlib()
while True:
try:
retval, = invoke(getfilefunc(parseString('\n'.join(readProgram())), droplast=False), env)
if retval is not None:
print(arepr(retval))
except KeyboardInterrupt:
print()
except Exception as e:
print(e)
import sys
if len(sys.argv) > 1:
runfile(sys.argv[1])
else:
interactive()
<commit_msg>Add readline support for the REPL<commit_after>from . import *
import readline
ps1 = '\n% '
ps2 = '| '
try:
from blessings import Terminal
term = Terminal()
ps1 = term.bold_blue(ps1)
ps2 = term.bold_blue(ps2)
def fancy_movement():
print(term.move_up() + term.clear_eol() + term.move_up())
except ImportError:
def fancy_movement():
pass
def getfilefunc(mod, droplast=True):
return Func(fixtags(flattenbody(mod, droplast=droplast)))
def runfile(fname):
invoke(getfilefunc(parseFile(fname)), stdlib())
def readProgram():
try:
yield input(ps1)
while True:
line = input(ps2)
if not line:
fancy_movement()
return
yield line
except EOFError:
print()
raise SystemExit
def interactive():
env = stdlib()
while True:
try:
retval, = invoke(getfilefunc(parseString('\n'.join(readProgram())), droplast=False), env)
if retval is not None:
print(arepr(retval))
except KeyboardInterrupt:
print()
except Exception as e:
print(e)
import sys
if len(sys.argv) > 1:
runfile(sys.argv[1])
else:
interactive()
|
c654bc1fdacdb355b7e03c853ebcdc919ac5f91d
|
tests/capture/test_capture.py
|
tests/capture/test_capture.py
|
from pyshark.capture.capture import Capture
def test_capture_gets_decoding_parameters():
c = Capture(decode_as={'tcp.port==8888': 'http'})
params = c.get_parameters()
decode_index = params.index('-d')
assert params[decode_index + 1] == 'tcp.port==8888,http'
def test_capture_gets_multiple_decoding_parameters():
c = Capture(decode_as={'tcp.port==8888': 'http', 'tcp.port==6666': 'dns'})
params = c.get_parameters()
decode_index = params.index('-d')
assert params[decode_index + 1] == 'tcp.port==8888,http'
decode_index = params.index('-d', decode_index + 1)
assert params[decode_index + 1] == 'tcp.port==6666,dns'
|
from pyshark.capture.capture import Capture
def test_capture_gets_decoding_parameters():
c = Capture(decode_as={'tcp.port==8888': 'http'})
params = c.get_parameters()
decode_index = params.index('-d')
assert params[decode_index + 1] == 'tcp.port==8888,http'
def test_capture_gets_multiple_decoding_parameters():
c = Capture(decode_as={'tcp.port==8888': 'http', 'tcp.port==6666': 'dns'})
params = c.get_parameters()
decode_index = params.index('-d')
possible_results = ['tcp.port==8888,http', 'tcp.port==6666,dns']
assert params[decode_index + 1] in possible_results
possible_results.remove(params[decode_index + 1])
decode_index = params.index('-d', decode_index + 1)
assert params[decode_index + 1] == possible_results[0]
|
Fix tests to avoid dict ordering problem
|
Fix tests to avoid dict ordering problem
|
Python
|
mit
|
KimiNewt/pyshark,eaufavor/pyshark-ssl
|
from pyshark.capture.capture import Capture
def test_capture_gets_decoding_parameters():
c = Capture(decode_as={'tcp.port==8888': 'http'})
params = c.get_parameters()
decode_index = params.index('-d')
assert params[decode_index + 1] == 'tcp.port==8888,http'
def test_capture_gets_multiple_decoding_parameters():
c = Capture(decode_as={'tcp.port==8888': 'http', 'tcp.port==6666': 'dns'})
params = c.get_parameters()
decode_index = params.index('-d')
assert params[decode_index + 1] == 'tcp.port==8888,http'
decode_index = params.index('-d', decode_index + 1)
assert params[decode_index + 1] == 'tcp.port==6666,dns'Fix tests to avoid dict ordering problem
|
from pyshark.capture.capture import Capture
def test_capture_gets_decoding_parameters():
c = Capture(decode_as={'tcp.port==8888': 'http'})
params = c.get_parameters()
decode_index = params.index('-d')
assert params[decode_index + 1] == 'tcp.port==8888,http'
def test_capture_gets_multiple_decoding_parameters():
c = Capture(decode_as={'tcp.port==8888': 'http', 'tcp.port==6666': 'dns'})
params = c.get_parameters()
decode_index = params.index('-d')
possible_results = ['tcp.port==8888,http', 'tcp.port==6666,dns']
assert params[decode_index + 1] in possible_results
possible_results.remove(params[decode_index + 1])
decode_index = params.index('-d', decode_index + 1)
assert params[decode_index + 1] == possible_results[0]
|
<commit_before>from pyshark.capture.capture import Capture
def test_capture_gets_decoding_parameters():
c = Capture(decode_as={'tcp.port==8888': 'http'})
params = c.get_parameters()
decode_index = params.index('-d')
assert params[decode_index + 1] == 'tcp.port==8888,http'
def test_capture_gets_multiple_decoding_parameters():
c = Capture(decode_as={'tcp.port==8888': 'http', 'tcp.port==6666': 'dns'})
params = c.get_parameters()
decode_index = params.index('-d')
assert params[decode_index + 1] == 'tcp.port==8888,http'
decode_index = params.index('-d', decode_index + 1)
assert params[decode_index + 1] == 'tcp.port==6666,dns'<commit_msg>Fix tests to avoid dict ordering problem<commit_after>
|
from pyshark.capture.capture import Capture
def test_capture_gets_decoding_parameters():
c = Capture(decode_as={'tcp.port==8888': 'http'})
params = c.get_parameters()
decode_index = params.index('-d')
assert params[decode_index + 1] == 'tcp.port==8888,http'
def test_capture_gets_multiple_decoding_parameters():
c = Capture(decode_as={'tcp.port==8888': 'http', 'tcp.port==6666': 'dns'})
params = c.get_parameters()
decode_index = params.index('-d')
possible_results = ['tcp.port==8888,http', 'tcp.port==6666,dns']
assert params[decode_index + 1] in possible_results
possible_results.remove(params[decode_index + 1])
decode_index = params.index('-d', decode_index + 1)
assert params[decode_index + 1] == possible_results[0]
|
from pyshark.capture.capture import Capture
def test_capture_gets_decoding_parameters():
c = Capture(decode_as={'tcp.port==8888': 'http'})
params = c.get_parameters()
decode_index = params.index('-d')
assert params[decode_index + 1] == 'tcp.port==8888,http'
def test_capture_gets_multiple_decoding_parameters():
c = Capture(decode_as={'tcp.port==8888': 'http', 'tcp.port==6666': 'dns'})
params = c.get_parameters()
decode_index = params.index('-d')
assert params[decode_index + 1] == 'tcp.port==8888,http'
decode_index = params.index('-d', decode_index + 1)
assert params[decode_index + 1] == 'tcp.port==6666,dns'Fix tests to avoid dict ordering problemfrom pyshark.capture.capture import Capture
def test_capture_gets_decoding_parameters():
c = Capture(decode_as={'tcp.port==8888': 'http'})
params = c.get_parameters()
decode_index = params.index('-d')
assert params[decode_index + 1] == 'tcp.port==8888,http'
def test_capture_gets_multiple_decoding_parameters():
c = Capture(decode_as={'tcp.port==8888': 'http', 'tcp.port==6666': 'dns'})
params = c.get_parameters()
decode_index = params.index('-d')
possible_results = ['tcp.port==8888,http', 'tcp.port==6666,dns']
assert params[decode_index + 1] in possible_results
possible_results.remove(params[decode_index + 1])
decode_index = params.index('-d', decode_index + 1)
assert params[decode_index + 1] == possible_results[0]
|
<commit_before>from pyshark.capture.capture import Capture
def test_capture_gets_decoding_parameters():
c = Capture(decode_as={'tcp.port==8888': 'http'})
params = c.get_parameters()
decode_index = params.index('-d')
assert params[decode_index + 1] == 'tcp.port==8888,http'
def test_capture_gets_multiple_decoding_parameters():
c = Capture(decode_as={'tcp.port==8888': 'http', 'tcp.port==6666': 'dns'})
params = c.get_parameters()
decode_index = params.index('-d')
assert params[decode_index + 1] == 'tcp.port==8888,http'
decode_index = params.index('-d', decode_index + 1)
assert params[decode_index + 1] == 'tcp.port==6666,dns'<commit_msg>Fix tests to avoid dict ordering problem<commit_after>from pyshark.capture.capture import Capture
def test_capture_gets_decoding_parameters():
c = Capture(decode_as={'tcp.port==8888': 'http'})
params = c.get_parameters()
decode_index = params.index('-d')
assert params[decode_index + 1] == 'tcp.port==8888,http'
def test_capture_gets_multiple_decoding_parameters():
c = Capture(decode_as={'tcp.port==8888': 'http', 'tcp.port==6666': 'dns'})
params = c.get_parameters()
decode_index = params.index('-d')
possible_results = ['tcp.port==8888,http', 'tcp.port==6666,dns']
assert params[decode_index + 1] in possible_results
possible_results.remove(params[decode_index + 1])
decode_index = params.index('-d', decode_index + 1)
assert params[decode_index + 1] == possible_results[0]
|
3e9a4f27ad05b3ecd2a4c013ff0f3b04e5fe44aa
|
tests/test_list_generators.py
|
tests/test_list_generators.py
|
import unittest
import craft_ai
from . import settings
from .utils import generate_entity_id
from .data import valid_data
class TestListGenerators(unittest.TestCase):
"""Checks that the client succeeds when getting an agent with OK input"""
@classmethod
def setUpClass(cls):
cls.client = craft_ai.Client(settings.CRAFT_CFG)
cls.n_generators = 5
cls.generators_id = [
generate_entity_id("list_generators") for i in range(cls.n_generators)
]
cls.agent_id = generate_entity_id("list_generators_agent")
def setUp(self):
self.client.delete_agent(self.agent_id)
self.client.create_agent(
valid_data.VALID_GENERATOR_CONFIGURATION, self.agent_id
)
for generators_id in self.generators_id:
self.client.delete_generator(generators_id)
self.client.create_generator(
valid_data.VALID_GENERATOR_CONFIGURATION, generators_id
)
def tearDown(self):
# Makes sure that no generator with the standard ID remains
for generator_id in self.generators_id:
self.client.delete_generator(generator_id)
self.client.delete_agent(self.agent_id)
def test_list_generators(self):
"""list_generators should returns the list of generators in the current project."""
generators_list = self.client.list_generators()
self.assertIsInstance(generators_list, list)
for generator_id in self.generators_id:
self.assertTrue(generator_id in generators_list)
|
import unittest
import craft_ai
from . import settings
from .utils import generate_entity_id
from .data import valid_data
class TestListGenerators(unittest.TestCase):
"""Checks that the client succeeds when getting an agent with OK input"""
@classmethod
def setUpClass(cls):
cls.client = craft_ai.Client(settings.CRAFT_CFG)
cls.n_generators = 5
cls.generators_id = [
generate_entity_id("list_generators") for i in range(cls.n_generators)
]
cls.agent_id = generate_entity_id("list_generators_agent")
def setUp(self):
self.client.delete_agent(self.agent_id)
self.client.create_agent(valid_data.VALID_CONFIGURATION, self.agent_id)
for generators_id in self.generators_id:
self.client.delete_generator(generators_id)
self.client.create_generator(
valid_data.VALID_GENERATOR_CONFIGURATION, generators_id
)
def tearDown(self):
# Makes sure that no generator with the standard ID remains
for generator_id in self.generators_id:
self.client.delete_generator(generator_id)
self.client.delete_agent(self.agent_id)
def test_list_generators(self):
"""list_generators should returns the list of generators in the current project."""
generators_list = self.client.list_generators()
self.assertIsInstance(generators_list, list)
for generator_id in self.generators_id:
self.assertTrue(generator_id in generators_list)
|
Fix agent creation configuration to make tests great again
|
Fix agent creation configuration to make tests great again
lint
|
Python
|
bsd-3-clause
|
craft-ai/craft-ai-client-python,craft-ai/craft-ai-client-python
|
import unittest
import craft_ai
from . import settings
from .utils import generate_entity_id
from .data import valid_data
class TestListGenerators(unittest.TestCase):
"""Checks that the client succeeds when getting an agent with OK input"""
@classmethod
def setUpClass(cls):
cls.client = craft_ai.Client(settings.CRAFT_CFG)
cls.n_generators = 5
cls.generators_id = [
generate_entity_id("list_generators") for i in range(cls.n_generators)
]
cls.agent_id = generate_entity_id("list_generators_agent")
def setUp(self):
self.client.delete_agent(self.agent_id)
self.client.create_agent(
valid_data.VALID_GENERATOR_CONFIGURATION, self.agent_id
)
for generators_id in self.generators_id:
self.client.delete_generator(generators_id)
self.client.create_generator(
valid_data.VALID_GENERATOR_CONFIGURATION, generators_id
)
def tearDown(self):
# Makes sure that no generator with the standard ID remains
for generator_id in self.generators_id:
self.client.delete_generator(generator_id)
self.client.delete_agent(self.agent_id)
def test_list_generators(self):
"""list_generators should returns the list of generators in the current project."""
generators_list = self.client.list_generators()
self.assertIsInstance(generators_list, list)
for generator_id in self.generators_id:
self.assertTrue(generator_id in generators_list)
Fix agent creation configuration to make tests great again
lint
|
import unittest
import craft_ai
from . import settings
from .utils import generate_entity_id
from .data import valid_data
class TestListGenerators(unittest.TestCase):
"""Checks that the client succeeds when getting an agent with OK input"""
@classmethod
def setUpClass(cls):
cls.client = craft_ai.Client(settings.CRAFT_CFG)
cls.n_generators = 5
cls.generators_id = [
generate_entity_id("list_generators") for i in range(cls.n_generators)
]
cls.agent_id = generate_entity_id("list_generators_agent")
def setUp(self):
self.client.delete_agent(self.agent_id)
self.client.create_agent(valid_data.VALID_CONFIGURATION, self.agent_id)
for generators_id in self.generators_id:
self.client.delete_generator(generators_id)
self.client.create_generator(
valid_data.VALID_GENERATOR_CONFIGURATION, generators_id
)
def tearDown(self):
# Makes sure that no generator with the standard ID remains
for generator_id in self.generators_id:
self.client.delete_generator(generator_id)
self.client.delete_agent(self.agent_id)
def test_list_generators(self):
"""list_generators should returns the list of generators in the current project."""
generators_list = self.client.list_generators()
self.assertIsInstance(generators_list, list)
for generator_id in self.generators_id:
self.assertTrue(generator_id in generators_list)
|
<commit_before>import unittest
import craft_ai
from . import settings
from .utils import generate_entity_id
from .data import valid_data
class TestListGenerators(unittest.TestCase):
"""Checks that the client succeeds when getting an agent with OK input"""
@classmethod
def setUpClass(cls):
cls.client = craft_ai.Client(settings.CRAFT_CFG)
cls.n_generators = 5
cls.generators_id = [
generate_entity_id("list_generators") for i in range(cls.n_generators)
]
cls.agent_id = generate_entity_id("list_generators_agent")
def setUp(self):
self.client.delete_agent(self.agent_id)
self.client.create_agent(
valid_data.VALID_GENERATOR_CONFIGURATION, self.agent_id
)
for generators_id in self.generators_id:
self.client.delete_generator(generators_id)
self.client.create_generator(
valid_data.VALID_GENERATOR_CONFIGURATION, generators_id
)
def tearDown(self):
# Makes sure that no generator with the standard ID remains
for generator_id in self.generators_id:
self.client.delete_generator(generator_id)
self.client.delete_agent(self.agent_id)
def test_list_generators(self):
"""list_generators should returns the list of generators in the current project."""
generators_list = self.client.list_generators()
self.assertIsInstance(generators_list, list)
for generator_id in self.generators_id:
self.assertTrue(generator_id in generators_list)
<commit_msg>Fix agent creation configuration to make tests great again
lint<commit_after>
|
import unittest
import craft_ai
from . import settings
from .utils import generate_entity_id
from .data import valid_data
class TestListGenerators(unittest.TestCase):
"""Checks that the client succeeds when getting an agent with OK input"""
@classmethod
def setUpClass(cls):
cls.client = craft_ai.Client(settings.CRAFT_CFG)
cls.n_generators = 5
cls.generators_id = [
generate_entity_id("list_generators") for i in range(cls.n_generators)
]
cls.agent_id = generate_entity_id("list_generators_agent")
def setUp(self):
self.client.delete_agent(self.agent_id)
self.client.create_agent(valid_data.VALID_CONFIGURATION, self.agent_id)
for generators_id in self.generators_id:
self.client.delete_generator(generators_id)
self.client.create_generator(
valid_data.VALID_GENERATOR_CONFIGURATION, generators_id
)
def tearDown(self):
# Makes sure that no generator with the standard ID remains
for generator_id in self.generators_id:
self.client.delete_generator(generator_id)
self.client.delete_agent(self.agent_id)
def test_list_generators(self):
"""list_generators should returns the list of generators in the current project."""
generators_list = self.client.list_generators()
self.assertIsInstance(generators_list, list)
for generator_id in self.generators_id:
self.assertTrue(generator_id in generators_list)
|
import unittest
import craft_ai
from . import settings
from .utils import generate_entity_id
from .data import valid_data
class TestListGenerators(unittest.TestCase):
"""Checks that the client succeeds when getting an agent with OK input"""
@classmethod
def setUpClass(cls):
cls.client = craft_ai.Client(settings.CRAFT_CFG)
cls.n_generators = 5
cls.generators_id = [
generate_entity_id("list_generators") for i in range(cls.n_generators)
]
cls.agent_id = generate_entity_id("list_generators_agent")
def setUp(self):
self.client.delete_agent(self.agent_id)
self.client.create_agent(
valid_data.VALID_GENERATOR_CONFIGURATION, self.agent_id
)
for generators_id in self.generators_id:
self.client.delete_generator(generators_id)
self.client.create_generator(
valid_data.VALID_GENERATOR_CONFIGURATION, generators_id
)
def tearDown(self):
# Makes sure that no generator with the standard ID remains
for generator_id in self.generators_id:
self.client.delete_generator(generator_id)
self.client.delete_agent(self.agent_id)
def test_list_generators(self):
"""list_generators should returns the list of generators in the current project."""
generators_list = self.client.list_generators()
self.assertIsInstance(generators_list, list)
for generator_id in self.generators_id:
self.assertTrue(generator_id in generators_list)
Fix agent creation configuration to make tests great again
lintimport unittest
import craft_ai
from . import settings
from .utils import generate_entity_id
from .data import valid_data
class TestListGenerators(unittest.TestCase):
"""Checks that the client succeeds when getting an agent with OK input"""
@classmethod
def setUpClass(cls):
cls.client = craft_ai.Client(settings.CRAFT_CFG)
cls.n_generators = 5
cls.generators_id = [
generate_entity_id("list_generators") for i in range(cls.n_generators)
]
cls.agent_id = generate_entity_id("list_generators_agent")
def setUp(self):
self.client.delete_agent(self.agent_id)
self.client.create_agent(valid_data.VALID_CONFIGURATION, self.agent_id)
for generators_id in self.generators_id:
self.client.delete_generator(generators_id)
self.client.create_generator(
valid_data.VALID_GENERATOR_CONFIGURATION, generators_id
)
def tearDown(self):
# Makes sure that no generator with the standard ID remains
for generator_id in self.generators_id:
self.client.delete_generator(generator_id)
self.client.delete_agent(self.agent_id)
def test_list_generators(self):
"""list_generators should returns the list of generators in the current project."""
generators_list = self.client.list_generators()
self.assertIsInstance(generators_list, list)
for generator_id in self.generators_id:
self.assertTrue(generator_id in generators_list)
|
<commit_before>import unittest
import craft_ai
from . import settings
from .utils import generate_entity_id
from .data import valid_data
class TestListGenerators(unittest.TestCase):
"""Checks that the client succeeds when getting an agent with OK input"""
@classmethod
def setUpClass(cls):
cls.client = craft_ai.Client(settings.CRAFT_CFG)
cls.n_generators = 5
cls.generators_id = [
generate_entity_id("list_generators") for i in range(cls.n_generators)
]
cls.agent_id = generate_entity_id("list_generators_agent")
def setUp(self):
self.client.delete_agent(self.agent_id)
self.client.create_agent(
valid_data.VALID_GENERATOR_CONFIGURATION, self.agent_id
)
for generators_id in self.generators_id:
self.client.delete_generator(generators_id)
self.client.create_generator(
valid_data.VALID_GENERATOR_CONFIGURATION, generators_id
)
def tearDown(self):
# Makes sure that no generator with the standard ID remains
for generator_id in self.generators_id:
self.client.delete_generator(generator_id)
self.client.delete_agent(self.agent_id)
def test_list_generators(self):
"""list_generators should returns the list of generators in the current project."""
generators_list = self.client.list_generators()
self.assertIsInstance(generators_list, list)
for generator_id in self.generators_id:
self.assertTrue(generator_id in generators_list)
<commit_msg>Fix agent creation configuration to make tests great again
lint<commit_after>import unittest
import craft_ai
from . import settings
from .utils import generate_entity_id
from .data import valid_data
class TestListGenerators(unittest.TestCase):
"""Checks that the client succeeds when getting an agent with OK input"""
@classmethod
def setUpClass(cls):
cls.client = craft_ai.Client(settings.CRAFT_CFG)
cls.n_generators = 5
cls.generators_id = [
generate_entity_id("list_generators") for i in range(cls.n_generators)
]
cls.agent_id = generate_entity_id("list_generators_agent")
def setUp(self):
self.client.delete_agent(self.agent_id)
self.client.create_agent(valid_data.VALID_CONFIGURATION, self.agent_id)
for generators_id in self.generators_id:
self.client.delete_generator(generators_id)
self.client.create_generator(
valid_data.VALID_GENERATOR_CONFIGURATION, generators_id
)
def tearDown(self):
# Makes sure that no generator with the standard ID remains
for generator_id in self.generators_id:
self.client.delete_generator(generator_id)
self.client.delete_agent(self.agent_id)
def test_list_generators(self):
"""list_generators should returns the list of generators in the current project."""
generators_list = self.client.list_generators()
self.assertIsInstance(generators_list, list)
for generator_id in self.generators_id:
self.assertTrue(generator_id in generators_list)
|
4420eb020d96004c5373584781c7b130de7b90e9
|
reg/__init__.py
|
reg/__init__.py
|
# flake8: noqa
from .implicit import implicit, NoImplicitLookupError
from .registry import ClassRegistry, Registry, IRegistry, IClassLookup
from .lookup import Lookup, ComponentLookupError, Matcher
from .predicate import (PredicateRegistry, Predicate, KeyIndex,
PredicateRegistryError)
from .compose import ListClassLookup, ChainClassLookup, CachingClassLookup
from .generic import generic
from .mapply import mapply
|
# flake8: noqa
from .implicit import implicit, NoImplicitLookupError
from .registry import ClassRegistry, Registry, IRegistry, IClassLookup
from .lookup import Lookup, ComponentLookupError, Matcher
from .predicate import (PredicateRegistry, Predicate, KeyIndex,
PredicateRegistryError)
from .compose import ListClassLookup, ChainClassLookup, CachingClassLookup
from .generic import generic
from .mapply import mapply
from .sentinel import Sentinel
|
Make sentinel available to outside.
|
Make sentinel available to outside.
|
Python
|
bsd-3-clause
|
taschini/reg,morepath/reg
|
# flake8: noqa
from .implicit import implicit, NoImplicitLookupError
from .registry import ClassRegistry, Registry, IRegistry, IClassLookup
from .lookup import Lookup, ComponentLookupError, Matcher
from .predicate import (PredicateRegistry, Predicate, KeyIndex,
PredicateRegistryError)
from .compose import ListClassLookup, ChainClassLookup, CachingClassLookup
from .generic import generic
from .mapply import mapply
Make sentinel available to outside.
|
# flake8: noqa
from .implicit import implicit, NoImplicitLookupError
from .registry import ClassRegistry, Registry, IRegistry, IClassLookup
from .lookup import Lookup, ComponentLookupError, Matcher
from .predicate import (PredicateRegistry, Predicate, KeyIndex,
PredicateRegistryError)
from .compose import ListClassLookup, ChainClassLookup, CachingClassLookup
from .generic import generic
from .mapply import mapply
from .sentinel import Sentinel
|
<commit_before># flake8: noqa
from .implicit import implicit, NoImplicitLookupError
from .registry import ClassRegistry, Registry, IRegistry, IClassLookup
from .lookup import Lookup, ComponentLookupError, Matcher
from .predicate import (PredicateRegistry, Predicate, KeyIndex,
PredicateRegistryError)
from .compose import ListClassLookup, ChainClassLookup, CachingClassLookup
from .generic import generic
from .mapply import mapply
<commit_msg>Make sentinel available to outside.<commit_after>
|
# flake8: noqa
from .implicit import implicit, NoImplicitLookupError
from .registry import ClassRegistry, Registry, IRegistry, IClassLookup
from .lookup import Lookup, ComponentLookupError, Matcher
from .predicate import (PredicateRegistry, Predicate, KeyIndex,
PredicateRegistryError)
from .compose import ListClassLookup, ChainClassLookup, CachingClassLookup
from .generic import generic
from .mapply import mapply
from .sentinel import Sentinel
|
# flake8: noqa
from .implicit import implicit, NoImplicitLookupError
from .registry import ClassRegistry, Registry, IRegistry, IClassLookup
from .lookup import Lookup, ComponentLookupError, Matcher
from .predicate import (PredicateRegistry, Predicate, KeyIndex,
PredicateRegistryError)
from .compose import ListClassLookup, ChainClassLookup, CachingClassLookup
from .generic import generic
from .mapply import mapply
Make sentinel available to outside.# flake8: noqa
from .implicit import implicit, NoImplicitLookupError
from .registry import ClassRegistry, Registry, IRegistry, IClassLookup
from .lookup import Lookup, ComponentLookupError, Matcher
from .predicate import (PredicateRegistry, Predicate, KeyIndex,
PredicateRegistryError)
from .compose import ListClassLookup, ChainClassLookup, CachingClassLookup
from .generic import generic
from .mapply import mapply
from .sentinel import Sentinel
|
<commit_before># flake8: noqa
from .implicit import implicit, NoImplicitLookupError
from .registry import ClassRegistry, Registry, IRegistry, IClassLookup
from .lookup import Lookup, ComponentLookupError, Matcher
from .predicate import (PredicateRegistry, Predicate, KeyIndex,
PredicateRegistryError)
from .compose import ListClassLookup, ChainClassLookup, CachingClassLookup
from .generic import generic
from .mapply import mapply
<commit_msg>Make sentinel available to outside.<commit_after># flake8: noqa
from .implicit import implicit, NoImplicitLookupError
from .registry import ClassRegistry, Registry, IRegistry, IClassLookup
from .lookup import Lookup, ComponentLookupError, Matcher
from .predicate import (PredicateRegistry, Predicate, KeyIndex,
PredicateRegistryError)
from .compose import ListClassLookup, ChainClassLookup, CachingClassLookup
from .generic import generic
from .mapply import mapply
from .sentinel import Sentinel
|
268c4458161ce754a82e3986787f6703f9122e3e
|
trackmybmi/users/factories.py
|
trackmybmi/users/factories.py
|
import factory
from django.contrib.auth.hashers import make_password
from .models import Friendship, User
class UserFactory(factory.django.DjangoModelFactory):
"""Create users with default attributes."""
class Meta:
model = User
email = factory.Sequence(lambda n: 'user.{}@test.test'.format(n))
password = make_password('password')
class FriendshipFactory(factory.django.DjangoModelFactory):
class Meta:
model = Friendship
initiator = factory.SubFactory(UserFactory)
recipient = factory.SubFactory(UserFactory)
|
import factory
from django.contrib.auth import get_user_model
from django.contrib.auth.hashers import make_password
from .models import Friendship
User = get_user_model()
class UserFactory(factory.django.DjangoModelFactory):
"""Create users with default attributes."""
class Meta:
model = User
email = factory.Sequence(lambda n: 'user.{}@test.test'.format(n))
password = make_password('password')
class FriendshipFactory(factory.django.DjangoModelFactory):
class Meta:
model = Friendship
initiator = factory.SubFactory(UserFactory)
recipient = factory.SubFactory(UserFactory)
|
Replace User import with call to get_user_model()
|
Replace User import with call to get_user_model()
|
Python
|
mit
|
ojh/trackmybmi
|
import factory
from django.contrib.auth.hashers import make_password
from .models import Friendship, User
class UserFactory(factory.django.DjangoModelFactory):
"""Create users with default attributes."""
class Meta:
model = User
email = factory.Sequence(lambda n: 'user.{}@test.test'.format(n))
password = make_password('password')
class FriendshipFactory(factory.django.DjangoModelFactory):
class Meta:
model = Friendship
initiator = factory.SubFactory(UserFactory)
recipient = factory.SubFactory(UserFactory)
Replace User import with call to get_user_model()
|
import factory
from django.contrib.auth import get_user_model
from django.contrib.auth.hashers import make_password
from .models import Friendship
User = get_user_model()
class UserFactory(factory.django.DjangoModelFactory):
"""Create users with default attributes."""
class Meta:
model = User
email = factory.Sequence(lambda n: 'user.{}@test.test'.format(n))
password = make_password('password')
class FriendshipFactory(factory.django.DjangoModelFactory):
class Meta:
model = Friendship
initiator = factory.SubFactory(UserFactory)
recipient = factory.SubFactory(UserFactory)
|
<commit_before>import factory
from django.contrib.auth.hashers import make_password
from .models import Friendship, User
class UserFactory(factory.django.DjangoModelFactory):
"""Create users with default attributes."""
class Meta:
model = User
email = factory.Sequence(lambda n: 'user.{}@test.test'.format(n))
password = make_password('password')
class FriendshipFactory(factory.django.DjangoModelFactory):
class Meta:
model = Friendship
initiator = factory.SubFactory(UserFactory)
recipient = factory.SubFactory(UserFactory)
<commit_msg>Replace User import with call to get_user_model()<commit_after>
|
import factory
from django.contrib.auth import get_user_model
from django.contrib.auth.hashers import make_password
from .models import Friendship
User = get_user_model()
class UserFactory(factory.django.DjangoModelFactory):
"""Create users with default attributes."""
class Meta:
model = User
email = factory.Sequence(lambda n: 'user.{}@test.test'.format(n))
password = make_password('password')
class FriendshipFactory(factory.django.DjangoModelFactory):
class Meta:
model = Friendship
initiator = factory.SubFactory(UserFactory)
recipient = factory.SubFactory(UserFactory)
|
import factory
from django.contrib.auth.hashers import make_password
from .models import Friendship, User
class UserFactory(factory.django.DjangoModelFactory):
"""Create users with default attributes."""
class Meta:
model = User
email = factory.Sequence(lambda n: 'user.{}@test.test'.format(n))
password = make_password('password')
class FriendshipFactory(factory.django.DjangoModelFactory):
class Meta:
model = Friendship
initiator = factory.SubFactory(UserFactory)
recipient = factory.SubFactory(UserFactory)
Replace User import with call to get_user_model()import factory
from django.contrib.auth import get_user_model
from django.contrib.auth.hashers import make_password
from .models import Friendship
User = get_user_model()
class UserFactory(factory.django.DjangoModelFactory):
"""Create users with default attributes."""
class Meta:
model = User
email = factory.Sequence(lambda n: 'user.{}@test.test'.format(n))
password = make_password('password')
class FriendshipFactory(factory.django.DjangoModelFactory):
class Meta:
model = Friendship
initiator = factory.SubFactory(UserFactory)
recipient = factory.SubFactory(UserFactory)
|
<commit_before>import factory
from django.contrib.auth.hashers import make_password
from .models import Friendship, User
class UserFactory(factory.django.DjangoModelFactory):
"""Create users with default attributes."""
class Meta:
model = User
email = factory.Sequence(lambda n: 'user.{}@test.test'.format(n))
password = make_password('password')
class FriendshipFactory(factory.django.DjangoModelFactory):
class Meta:
model = Friendship
initiator = factory.SubFactory(UserFactory)
recipient = factory.SubFactory(UserFactory)
<commit_msg>Replace User import with call to get_user_model()<commit_after>import factory
from django.contrib.auth import get_user_model
from django.contrib.auth.hashers import make_password
from .models import Friendship
User = get_user_model()
class UserFactory(factory.django.DjangoModelFactory):
"""Create users with default attributes."""
class Meta:
model = User
email = factory.Sequence(lambda n: 'user.{}@test.test'.format(n))
password = make_password('password')
class FriendshipFactory(factory.django.DjangoModelFactory):
class Meta:
model = Friendship
initiator = factory.SubFactory(UserFactory)
recipient = factory.SubFactory(UserFactory)
|
b9ccbb2addd8dcaeb100bb5e95768caa2a97c280
|
srttools/core/__init__.py
|
srttools/core/__init__.py
|
import warnings
try:
import matplotlib
# matplotlib.use('TkAgg')
HAS_MPL = True
except ImportError:
HAS_MPL = False
try:
import statsmodels.api as sm
HAS_STATSM = True
except ImportError:
HAS_STATSM = False
try:
from numba import jit, vectorize
except ImportError:
warnings.warn("Numba not installed. Faking it")
def jit(fun):
return fun
def vectorize(*args, **kwargs):
return jit
|
import warnings
DEFAULT_MPL_BACKEND = 'TkAgg'
try:
import matplotlib
# This is necessary. Random backends might respond incorrectly.
matplotlib.use(DEFAULT_MPL_BACKEND)
HAS_MPL = True
except ImportError:
HAS_MPL = False
try:
import statsmodels.api as sm
version = [int(i) for i in sm.version.version.split('.')]
# Minimum version 0.8.0
if version < (0, 8, 0):
warnings.warn("Please update statsmodels")
raise ImportError
HAS_STATSM = True
except ImportError:
HAS_STATSM = False
try:
from numba import jit, vectorize
except ImportError:
warnings.warn("Numba not installed. Faking it")
def jit(fun):
return fun
def vectorize(*args, **kwargs):
return jit
|
Set default backend, and minimum statsmodels version
|
Set default backend, and minimum statsmodels version
|
Python
|
bsd-3-clause
|
matteobachetti/srt-single-dish-tools
|
import warnings
try:
import matplotlib
# matplotlib.use('TkAgg')
HAS_MPL = True
except ImportError:
HAS_MPL = False
try:
import statsmodels.api as sm
HAS_STATSM = True
except ImportError:
HAS_STATSM = False
try:
from numba import jit, vectorize
except ImportError:
warnings.warn("Numba not installed. Faking it")
def jit(fun):
return fun
def vectorize(*args, **kwargs):
return jit
Set default backend, and minimum statsmodels version
|
import warnings
DEFAULT_MPL_BACKEND = 'TkAgg'
try:
import matplotlib
# This is necessary. Random backends might respond incorrectly.
matplotlib.use(DEFAULT_MPL_BACKEND)
HAS_MPL = True
except ImportError:
HAS_MPL = False
try:
import statsmodels.api as sm
version = [int(i) for i in sm.version.version.split('.')]
# Minimum version 0.8.0
if version < (0, 8, 0):
warnings.warn("Please update statsmodels")
raise ImportError
HAS_STATSM = True
except ImportError:
HAS_STATSM = False
try:
from numba import jit, vectorize
except ImportError:
warnings.warn("Numba not installed. Faking it")
def jit(fun):
return fun
def vectorize(*args, **kwargs):
return jit
|
<commit_before>import warnings
try:
import matplotlib
# matplotlib.use('TkAgg')
HAS_MPL = True
except ImportError:
HAS_MPL = False
try:
import statsmodels.api as sm
HAS_STATSM = True
except ImportError:
HAS_STATSM = False
try:
from numba import jit, vectorize
except ImportError:
warnings.warn("Numba not installed. Faking it")
def jit(fun):
return fun
def vectorize(*args, **kwargs):
return jit
<commit_msg>Set default backend, and minimum statsmodels version<commit_after>
|
import warnings
DEFAULT_MPL_BACKEND = 'TkAgg'
try:
import matplotlib
# This is necessary. Random backends might respond incorrectly.
matplotlib.use(DEFAULT_MPL_BACKEND)
HAS_MPL = True
except ImportError:
HAS_MPL = False
try:
import statsmodels.api as sm
version = [int(i) for i in sm.version.version.split('.')]
# Minimum version 0.8.0
if version < (0, 8, 0):
warnings.warn("Please update statsmodels")
raise ImportError
HAS_STATSM = True
except ImportError:
HAS_STATSM = False
try:
from numba import jit, vectorize
except ImportError:
warnings.warn("Numba not installed. Faking it")
def jit(fun):
return fun
def vectorize(*args, **kwargs):
return jit
|
import warnings
try:
import matplotlib
# matplotlib.use('TkAgg')
HAS_MPL = True
except ImportError:
HAS_MPL = False
try:
import statsmodels.api as sm
HAS_STATSM = True
except ImportError:
HAS_STATSM = False
try:
from numba import jit, vectorize
except ImportError:
warnings.warn("Numba not installed. Faking it")
def jit(fun):
return fun
def vectorize(*args, **kwargs):
return jit
Set default backend, and minimum statsmodels versionimport warnings
DEFAULT_MPL_BACKEND = 'TkAgg'
try:
import matplotlib
# This is necessary. Random backends might respond incorrectly.
matplotlib.use(DEFAULT_MPL_BACKEND)
HAS_MPL = True
except ImportError:
HAS_MPL = False
try:
import statsmodels.api as sm
version = [int(i) for i in sm.version.version.split('.')]
# Minimum version 0.8.0
if version < (0, 8, 0):
warnings.warn("Please update statsmodels")
raise ImportError
HAS_STATSM = True
except ImportError:
HAS_STATSM = False
try:
from numba import jit, vectorize
except ImportError:
warnings.warn("Numba not installed. Faking it")
def jit(fun):
return fun
def vectorize(*args, **kwargs):
return jit
|
<commit_before>import warnings
try:
import matplotlib
# matplotlib.use('TkAgg')
HAS_MPL = True
except ImportError:
HAS_MPL = False
try:
import statsmodels.api as sm
HAS_STATSM = True
except ImportError:
HAS_STATSM = False
try:
from numba import jit, vectorize
except ImportError:
warnings.warn("Numba not installed. Faking it")
def jit(fun):
return fun
def vectorize(*args, **kwargs):
return jit
<commit_msg>Set default backend, and minimum statsmodels version<commit_after>import warnings
DEFAULT_MPL_BACKEND = 'TkAgg'
try:
import matplotlib
# This is necessary. Random backends might respond incorrectly.
matplotlib.use(DEFAULT_MPL_BACKEND)
HAS_MPL = True
except ImportError:
HAS_MPL = False
try:
import statsmodels.api as sm
version = [int(i) for i in sm.version.version.split('.')]
# Minimum version 0.8.0
if version < (0, 8, 0):
warnings.warn("Please update statsmodels")
raise ImportError
HAS_STATSM = True
except ImportError:
HAS_STATSM = False
try:
from numba import jit, vectorize
except ImportError:
warnings.warn("Numba not installed. Faking it")
def jit(fun):
return fun
def vectorize(*args, **kwargs):
return jit
|
ab02c54cc713cc10c60f09dde3cae2fca3c2a9a4
|
conference/management/commands/make_speaker_profiles_public.py
|
conference/management/commands/make_speaker_profiles_public.py
|
from django.core.management.base import BaseCommand
from conference import models as cmodels
def make_speaker_profiles_public_for_conference(conference):
# Get speaker records
speakers = set()
talks = cmodels.Talk.objects.accepted(conference)
for t in talks:
speakers |= set(t.get_all_speakers())
for speaker in speakers:
user = speaker.user
profile = cmodels.AttendeeProfile.objects.get(user=user)
if profile.visibility != 'p':
print ('Setting profile %r to public' % profile)
profile.visibility = 'p'
profile.save()
class Command(BaseCommand):
""" When accepting talks via database updates, the speaker profiles are
not automatically set to public. This command fixes this.
Argument: <conference year>
"""
args = '<conference>'
def handle(self, *args, **options):
try:
conference = args[0]
except IndexError:
raise CommandError('conference not specified')
make_speaker_profiles_public_for_conference(conference)
|
from django.core.management.base import BaseCommand
from conference import models as cmodels
def make_speaker_profiles_public_for_conference(conference):
# Get speaker records
speakers = set()
talks = cmodels.Talk.objects.accepted(conference)
for t in talks:
speakers |= set(t.get_all_speakers())
for speaker in speakers:
user = speaker.user
profile = cmodels.AttendeeProfile.objects.get(user=user)
if profile.visibility != 'p':
print ('Setting profile %r to public' % profile)
profile.visibility = 'p'
profile.save()
class Command(BaseCommand):
""" When accepting talks via database updates, the speaker profiles are
not automatically set to public. This command fixes this.
Argument: <conference year>
"""
def add_arguments(self, parser):
# Positional arguments
parser.add_argument('conference')
def handle(self, *args, **options):
try:
conference = options['conference']
except KeyError:
raise CommandError('conference not specified')
make_speaker_profiles_public_for_conference(conference)
|
Fix script to make speaker profiles public.
|
Fix script to make speaker profiles public.
|
Python
|
bsd-2-clause
|
EuroPython/epcon,EuroPython/epcon,EuroPython/epcon,EuroPython/epcon
|
from django.core.management.base import BaseCommand
from conference import models as cmodels
def make_speaker_profiles_public_for_conference(conference):
# Get speaker records
speakers = set()
talks = cmodels.Talk.objects.accepted(conference)
for t in talks:
speakers |= set(t.get_all_speakers())
for speaker in speakers:
user = speaker.user
profile = cmodels.AttendeeProfile.objects.get(user=user)
if profile.visibility != 'p':
print ('Setting profile %r to public' % profile)
profile.visibility = 'p'
profile.save()
class Command(BaseCommand):
""" When accepting talks via database updates, the speaker profiles are
not automatically set to public. This command fixes this.
Argument: <conference year>
"""
args = '<conference>'
def handle(self, *args, **options):
try:
conference = args[0]
except IndexError:
raise CommandError('conference not specified')
make_speaker_profiles_public_for_conference(conference)
Fix script to make speaker profiles public.
|
from django.core.management.base import BaseCommand
from conference import models as cmodels
def make_speaker_profiles_public_for_conference(conference):
# Get speaker records
speakers = set()
talks = cmodels.Talk.objects.accepted(conference)
for t in talks:
speakers |= set(t.get_all_speakers())
for speaker in speakers:
user = speaker.user
profile = cmodels.AttendeeProfile.objects.get(user=user)
if profile.visibility != 'p':
print ('Setting profile %r to public' % profile)
profile.visibility = 'p'
profile.save()
class Command(BaseCommand):
""" When accepting talks via database updates, the speaker profiles are
not automatically set to public. This command fixes this.
Argument: <conference year>
"""
def add_arguments(self, parser):
# Positional arguments
parser.add_argument('conference')
def handle(self, *args, **options):
try:
conference = options['conference']
except KeyError:
raise CommandError('conference not specified')
make_speaker_profiles_public_for_conference(conference)
|
<commit_before>
from django.core.management.base import BaseCommand
from conference import models as cmodels
def make_speaker_profiles_public_for_conference(conference):
# Get speaker records
speakers = set()
talks = cmodels.Talk.objects.accepted(conference)
for t in talks:
speakers |= set(t.get_all_speakers())
for speaker in speakers:
user = speaker.user
profile = cmodels.AttendeeProfile.objects.get(user=user)
if profile.visibility != 'p':
print ('Setting profile %r to public' % profile)
profile.visibility = 'p'
profile.save()
class Command(BaseCommand):
""" When accepting talks via database updates, the speaker profiles are
not automatically set to public. This command fixes this.
Argument: <conference year>
"""
args = '<conference>'
def handle(self, *args, **options):
try:
conference = args[0]
except IndexError:
raise CommandError('conference not specified')
make_speaker_profiles_public_for_conference(conference)
<commit_msg>Fix script to make speaker profiles public.<commit_after>
|
from django.core.management.base import BaseCommand
from conference import models as cmodels
def make_speaker_profiles_public_for_conference(conference):
# Get speaker records
speakers = set()
talks = cmodels.Talk.objects.accepted(conference)
for t in talks:
speakers |= set(t.get_all_speakers())
for speaker in speakers:
user = speaker.user
profile = cmodels.AttendeeProfile.objects.get(user=user)
if profile.visibility != 'p':
print ('Setting profile %r to public' % profile)
profile.visibility = 'p'
profile.save()
class Command(BaseCommand):
""" When accepting talks via database updates, the speaker profiles are
not automatically set to public. This command fixes this.
Argument: <conference year>
"""
def add_arguments(self, parser):
# Positional arguments
parser.add_argument('conference')
def handle(self, *args, **options):
try:
conference = options['conference']
except KeyError:
raise CommandError('conference not specified')
make_speaker_profiles_public_for_conference(conference)
|
from django.core.management.base import BaseCommand
from conference import models as cmodels
def make_speaker_profiles_public_for_conference(conference):
# Get speaker records
speakers = set()
talks = cmodels.Talk.objects.accepted(conference)
for t in talks:
speakers |= set(t.get_all_speakers())
for speaker in speakers:
user = speaker.user
profile = cmodels.AttendeeProfile.objects.get(user=user)
if profile.visibility != 'p':
print ('Setting profile %r to public' % profile)
profile.visibility = 'p'
profile.save()
class Command(BaseCommand):
""" When accepting talks via database updates, the speaker profiles are
not automatically set to public. This command fixes this.
Argument: <conference year>
"""
args = '<conference>'
def handle(self, *args, **options):
try:
conference = args[0]
except IndexError:
raise CommandError('conference not specified')
make_speaker_profiles_public_for_conference(conference)
Fix script to make speaker profiles public.
from django.core.management.base import BaseCommand
from conference import models as cmodels
def make_speaker_profiles_public_for_conference(conference):
# Get speaker records
speakers = set()
talks = cmodels.Talk.objects.accepted(conference)
for t in talks:
speakers |= set(t.get_all_speakers())
for speaker in speakers:
user = speaker.user
profile = cmodels.AttendeeProfile.objects.get(user=user)
if profile.visibility != 'p':
print ('Setting profile %r to public' % profile)
profile.visibility = 'p'
profile.save()
class Command(BaseCommand):
""" When accepting talks via database updates, the speaker profiles are
not automatically set to public. This command fixes this.
Argument: <conference year>
"""
def add_arguments(self, parser):
# Positional arguments
parser.add_argument('conference')
def handle(self, *args, **options):
try:
conference = options['conference']
except KeyError:
raise CommandError('conference not specified')
make_speaker_profiles_public_for_conference(conference)
|
<commit_before>
from django.core.management.base import BaseCommand
from conference import models as cmodels
def make_speaker_profiles_public_for_conference(conference):
# Get speaker records
speakers = set()
talks = cmodels.Talk.objects.accepted(conference)
for t in talks:
speakers |= set(t.get_all_speakers())
for speaker in speakers:
user = speaker.user
profile = cmodels.AttendeeProfile.objects.get(user=user)
if profile.visibility != 'p':
print ('Setting profile %r to public' % profile)
profile.visibility = 'p'
profile.save()
class Command(BaseCommand):
""" When accepting talks via database updates, the speaker profiles are
not automatically set to public. This command fixes this.
Argument: <conference year>
"""
args = '<conference>'
def handle(self, *args, **options):
try:
conference = args[0]
except IndexError:
raise CommandError('conference not specified')
make_speaker_profiles_public_for_conference(conference)
<commit_msg>Fix script to make speaker profiles public.<commit_after>
from django.core.management.base import BaseCommand
from conference import models as cmodels
def make_speaker_profiles_public_for_conference(conference):
# Get speaker records
speakers = set()
talks = cmodels.Talk.objects.accepted(conference)
for t in talks:
speakers |= set(t.get_all_speakers())
for speaker in speakers:
user = speaker.user
profile = cmodels.AttendeeProfile.objects.get(user=user)
if profile.visibility != 'p':
print ('Setting profile %r to public' % profile)
profile.visibility = 'p'
profile.save()
class Command(BaseCommand):
""" When accepting talks via database updates, the speaker profiles are
not automatically set to public. This command fixes this.
Argument: <conference year>
"""
def add_arguments(self, parser):
# Positional arguments
parser.add_argument('conference')
def handle(self, *args, **options):
try:
conference = options['conference']
except KeyError:
raise CommandError('conference not specified')
make_speaker_profiles_public_for_conference(conference)
|
6ce05a55b2318f1ad567c8e4345fb286777b53e6
|
ndohyep/settings/production.py
|
ndohyep/settings/production.py
|
from .base import *
# Disable debug mode
DEBUG = False
TEMPLATE_DEBUG = False
# Compress static files offline
# http://django-compressor.readthedocs.org/en/latest/settings/#django.conf.settings.COMPRESS_OFFLINE
COMPRESS_OFFLINE = True
# Send notification emails as a background task using Celery,
# to prevent this from blocking web server threads
# (requires the django-celery package):
# http://celery.readthedocs.org/en/latest/configuration.html
# import djcelery
#
# djcelery.setup_loader()
#
# CELERY_SEND_TASK_ERROR_EMAILS = True
# BROKER_URL = 'redis://'
EMAIL_SUBJECT_PREFIX = '[b-wise] '
WAGTAILADMIN_NOTIFICATION_FROM_EMAIL = 'no-reply@b-wise.mobi'
# Use Redis as the cache backend for extra performance
# (requires the django-redis-cache package):
# http://wagtail.readthedocs.org/en/latest/howto/performance.html#cache
# CACHES = {
# 'default': {
# 'BACKEND': 'redis_cache.cache.RedisCache',
# 'LOCATION': '127.0.0.1:6379',
# 'KEY_PREFIX': 'base',
# 'OPTIONS': {
# 'CLIENT_CLASS': 'redis_cache.client.DefaultClient',
# }
# }
# }
try:
from .local import *
except ImportError:
pass
|
from .base import *
# Disable debug mode
DEBUG = True
TEMPLATE_DEBUG = True
# Compress static files offline
# http://django-compressor.readthedocs.org/en/latest/settings/#django.conf.settings.COMPRESS_OFFLINE
COMPRESS_OFFLINE = True
# Send notification emails as a background task using Celery,
# to prevent this from blocking web server threads
# (requires the django-celery package):
# http://celery.readthedocs.org/en/latest/configuration.html
# import djcelery
#
# djcelery.setup_loader()
#
# CELERY_SEND_TASK_ERROR_EMAILS = True
# BROKER_URL = 'redis://'
EMAIL_SUBJECT_PREFIX = '[b-wise] '
WAGTAILADMIN_NOTIFICATION_FROM_EMAIL = 'no-reply@b-wise.mobi'
# Use Redis as the cache backend for extra performance
# (requires the django-redis-cache package):
# http://wagtail.readthedocs.org/en/latest/howto/performance.html#cache
# CACHES = {
# 'default': {
# 'BACKEND': 'redis_cache.cache.RedisCache',
# 'LOCATION': '127.0.0.1:6379',
# 'KEY_PREFIX': 'base',
# 'OPTIONS': {
# 'CLIENT_CLASS': 'redis_cache.client.DefaultClient',
# }
# }
# }
try:
from .local import *
except ImportError:
pass
|
Set debug to true for template debugging
|
Set debug to true for template debugging
|
Python
|
bsd-2-clause
|
praekelt/molo-ndoh-yep,praekelt/molo-ndoh-yep,praekelt/molo-ndoh-yep,praekelt/molo-ndoh-yep
|
from .base import *
# Disable debug mode
DEBUG = False
TEMPLATE_DEBUG = False
# Compress static files offline
# http://django-compressor.readthedocs.org/en/latest/settings/#django.conf.settings.COMPRESS_OFFLINE
COMPRESS_OFFLINE = True
# Send notification emails as a background task using Celery,
# to prevent this from blocking web server threads
# (requires the django-celery package):
# http://celery.readthedocs.org/en/latest/configuration.html
# import djcelery
#
# djcelery.setup_loader()
#
# CELERY_SEND_TASK_ERROR_EMAILS = True
# BROKER_URL = 'redis://'
EMAIL_SUBJECT_PREFIX = '[b-wise] '
WAGTAILADMIN_NOTIFICATION_FROM_EMAIL = 'no-reply@b-wise.mobi'
# Use Redis as the cache backend for extra performance
# (requires the django-redis-cache package):
# http://wagtail.readthedocs.org/en/latest/howto/performance.html#cache
# CACHES = {
# 'default': {
# 'BACKEND': 'redis_cache.cache.RedisCache',
# 'LOCATION': '127.0.0.1:6379',
# 'KEY_PREFIX': 'base',
# 'OPTIONS': {
# 'CLIENT_CLASS': 'redis_cache.client.DefaultClient',
# }
# }
# }
try:
from .local import *
except ImportError:
pass
Set debug to true for template debugging
|
from .base import *
# Disable debug mode
DEBUG = True
TEMPLATE_DEBUG = True
# Compress static files offline
# http://django-compressor.readthedocs.org/en/latest/settings/#django.conf.settings.COMPRESS_OFFLINE
COMPRESS_OFFLINE = True
# Send notification emails as a background task using Celery,
# to prevent this from blocking web server threads
# (requires the django-celery package):
# http://celery.readthedocs.org/en/latest/configuration.html
# import djcelery
#
# djcelery.setup_loader()
#
# CELERY_SEND_TASK_ERROR_EMAILS = True
# BROKER_URL = 'redis://'
EMAIL_SUBJECT_PREFIX = '[b-wise] '
WAGTAILADMIN_NOTIFICATION_FROM_EMAIL = 'no-reply@b-wise.mobi'
# Use Redis as the cache backend for extra performance
# (requires the django-redis-cache package):
# http://wagtail.readthedocs.org/en/latest/howto/performance.html#cache
# CACHES = {
# 'default': {
# 'BACKEND': 'redis_cache.cache.RedisCache',
# 'LOCATION': '127.0.0.1:6379',
# 'KEY_PREFIX': 'base',
# 'OPTIONS': {
# 'CLIENT_CLASS': 'redis_cache.client.DefaultClient',
# }
# }
# }
try:
from .local import *
except ImportError:
pass
|
<commit_before>from .base import *
# Disable debug mode
DEBUG = False
TEMPLATE_DEBUG = False
# Compress static files offline
# http://django-compressor.readthedocs.org/en/latest/settings/#django.conf.settings.COMPRESS_OFFLINE
COMPRESS_OFFLINE = True
# Send notification emails as a background task using Celery,
# to prevent this from blocking web server threads
# (requires the django-celery package):
# http://celery.readthedocs.org/en/latest/configuration.html
# import djcelery
#
# djcelery.setup_loader()
#
# CELERY_SEND_TASK_ERROR_EMAILS = True
# BROKER_URL = 'redis://'
EMAIL_SUBJECT_PREFIX = '[b-wise] '
WAGTAILADMIN_NOTIFICATION_FROM_EMAIL = 'no-reply@b-wise.mobi'
# Use Redis as the cache backend for extra performance
# (requires the django-redis-cache package):
# http://wagtail.readthedocs.org/en/latest/howto/performance.html#cache
# CACHES = {
# 'default': {
# 'BACKEND': 'redis_cache.cache.RedisCache',
# 'LOCATION': '127.0.0.1:6379',
# 'KEY_PREFIX': 'base',
# 'OPTIONS': {
# 'CLIENT_CLASS': 'redis_cache.client.DefaultClient',
# }
# }
# }
try:
from .local import *
except ImportError:
pass
<commit_msg>Set debug to true for template debugging<commit_after>
|
from .base import *
# Disable debug mode
DEBUG = True
TEMPLATE_DEBUG = True
# Compress static files offline
# http://django-compressor.readthedocs.org/en/latest/settings/#django.conf.settings.COMPRESS_OFFLINE
COMPRESS_OFFLINE = True
# Send notification emails as a background task using Celery,
# to prevent this from blocking web server threads
# (requires the django-celery package):
# http://celery.readthedocs.org/en/latest/configuration.html
# import djcelery
#
# djcelery.setup_loader()
#
# CELERY_SEND_TASK_ERROR_EMAILS = True
# BROKER_URL = 'redis://'
EMAIL_SUBJECT_PREFIX = '[b-wise] '
WAGTAILADMIN_NOTIFICATION_FROM_EMAIL = 'no-reply@b-wise.mobi'
# Use Redis as the cache backend for extra performance
# (requires the django-redis-cache package):
# http://wagtail.readthedocs.org/en/latest/howto/performance.html#cache
# CACHES = {
# 'default': {
# 'BACKEND': 'redis_cache.cache.RedisCache',
# 'LOCATION': '127.0.0.1:6379',
# 'KEY_PREFIX': 'base',
# 'OPTIONS': {
# 'CLIENT_CLASS': 'redis_cache.client.DefaultClient',
# }
# }
# }
try:
from .local import *
except ImportError:
pass
|
from .base import *
# Disable debug mode
DEBUG = False
TEMPLATE_DEBUG = False
# Compress static files offline
# http://django-compressor.readthedocs.org/en/latest/settings/#django.conf.settings.COMPRESS_OFFLINE
COMPRESS_OFFLINE = True
# Send notification emails as a background task using Celery,
# to prevent this from blocking web server threads
# (requires the django-celery package):
# http://celery.readthedocs.org/en/latest/configuration.html
# import djcelery
#
# djcelery.setup_loader()
#
# CELERY_SEND_TASK_ERROR_EMAILS = True
# BROKER_URL = 'redis://'
EMAIL_SUBJECT_PREFIX = '[b-wise] '
WAGTAILADMIN_NOTIFICATION_FROM_EMAIL = 'no-reply@b-wise.mobi'
# Use Redis as the cache backend for extra performance
# (requires the django-redis-cache package):
# http://wagtail.readthedocs.org/en/latest/howto/performance.html#cache
# CACHES = {
# 'default': {
# 'BACKEND': 'redis_cache.cache.RedisCache',
# 'LOCATION': '127.0.0.1:6379',
# 'KEY_PREFIX': 'base',
# 'OPTIONS': {
# 'CLIENT_CLASS': 'redis_cache.client.DefaultClient',
# }
# }
# }
try:
from .local import *
except ImportError:
pass
Set debug to true for template debuggingfrom .base import *
# Disable debug mode
DEBUG = True
TEMPLATE_DEBUG = True
# Compress static files offline
# http://django-compressor.readthedocs.org/en/latest/settings/#django.conf.settings.COMPRESS_OFFLINE
COMPRESS_OFFLINE = True
# Send notification emails as a background task using Celery,
# to prevent this from blocking web server threads
# (requires the django-celery package):
# http://celery.readthedocs.org/en/latest/configuration.html
# import djcelery
#
# djcelery.setup_loader()
#
# CELERY_SEND_TASK_ERROR_EMAILS = True
# BROKER_URL = 'redis://'
EMAIL_SUBJECT_PREFIX = '[b-wise] '
WAGTAILADMIN_NOTIFICATION_FROM_EMAIL = 'no-reply@b-wise.mobi'
# Use Redis as the cache backend for extra performance
# (requires the django-redis-cache package):
# http://wagtail.readthedocs.org/en/latest/howto/performance.html#cache
# CACHES = {
# 'default': {
# 'BACKEND': 'redis_cache.cache.RedisCache',
# 'LOCATION': '127.0.0.1:6379',
# 'KEY_PREFIX': 'base',
# 'OPTIONS': {
# 'CLIENT_CLASS': 'redis_cache.client.DefaultClient',
# }
# }
# }
try:
from .local import *
except ImportError:
pass
|
<commit_before>from .base import *
# Disable debug mode
DEBUG = False
TEMPLATE_DEBUG = False
# Compress static files offline
# http://django-compressor.readthedocs.org/en/latest/settings/#django.conf.settings.COMPRESS_OFFLINE
COMPRESS_OFFLINE = True
# Send notification emails as a background task using Celery,
# to prevent this from blocking web server threads
# (requires the django-celery package):
# http://celery.readthedocs.org/en/latest/configuration.html
# import djcelery
#
# djcelery.setup_loader()
#
# CELERY_SEND_TASK_ERROR_EMAILS = True
# BROKER_URL = 'redis://'
EMAIL_SUBJECT_PREFIX = '[b-wise] '
WAGTAILADMIN_NOTIFICATION_FROM_EMAIL = 'no-reply@b-wise.mobi'
# Use Redis as the cache backend for extra performance
# (requires the django-redis-cache package):
# http://wagtail.readthedocs.org/en/latest/howto/performance.html#cache
# CACHES = {
# 'default': {
# 'BACKEND': 'redis_cache.cache.RedisCache',
# 'LOCATION': '127.0.0.1:6379',
# 'KEY_PREFIX': 'base',
# 'OPTIONS': {
# 'CLIENT_CLASS': 'redis_cache.client.DefaultClient',
# }
# }
# }
try:
from .local import *
except ImportError:
pass
<commit_msg>Set debug to true for template debugging<commit_after>from .base import *
# Disable debug mode
DEBUG = True
TEMPLATE_DEBUG = True
# Compress static files offline
# http://django-compressor.readthedocs.org/en/latest/settings/#django.conf.settings.COMPRESS_OFFLINE
COMPRESS_OFFLINE = True
# Send notification emails as a background task using Celery,
# to prevent this from blocking web server threads
# (requires the django-celery package):
# http://celery.readthedocs.org/en/latest/configuration.html
# import djcelery
#
# djcelery.setup_loader()
#
# CELERY_SEND_TASK_ERROR_EMAILS = True
# BROKER_URL = 'redis://'
EMAIL_SUBJECT_PREFIX = '[b-wise] '
WAGTAILADMIN_NOTIFICATION_FROM_EMAIL = 'no-reply@b-wise.mobi'
# Use Redis as the cache backend for extra performance
# (requires the django-redis-cache package):
# http://wagtail.readthedocs.org/en/latest/howto/performance.html#cache
# CACHES = {
# 'default': {
# 'BACKEND': 'redis_cache.cache.RedisCache',
# 'LOCATION': '127.0.0.1:6379',
# 'KEY_PREFIX': 'base',
# 'OPTIONS': {
# 'CLIENT_CLASS': 'redis_cache.client.DefaultClient',
# }
# }
# }
try:
from .local import *
except ImportError:
pass
|
b875f457d7a4926f5028428ead4cecc75af90c2e
|
examples/launch_cloud_harness.py
|
examples/launch_cloud_harness.py
|
import json
import os
from osgeo import gdal
from gbdxtools import Interface
from gbdx_task_template import TaskTemplate, Task, InputPort, OutputPort
gbdx = Interface()
# data = "s3://receiving-dgcs-tdgplatform-com/054813633050_01_003" # WV02 Image over San Francisco
# aoptask = gbdx.Task("AOP_Strip_Processor", data=data, enable_acomp=True, enable_pansharpen=True)
class RasterMetaApp(TaskTemplate):
task = Task("RasterMetaTask")
task.input_raster = InputPort(value="/Users/michaelconnor/demo_image")
task.output_meta = OutputPort(value="/Users/michaelconnor")
def invoke(self):
images = self.task.input_raster.list_files(extensions=[".tiff", ".tif"])
# Magic Starts here
for img in images:
header = "META FOR %s\n\n" % os.path.basename(img)
gtif = gdal.Open(img)
self.task.output_meta.write('metadata.txt', header)
self.task.output_meta.write('metadata.txt', json.dumps(gtif.GetMetadata(), indent=2))
# Create a cloud-harness
ch_task = gbdx.Task(RasterMetaApp)
# NOTE: This will override the value in the class definition above.
ch_task.inputs.input_raster = 's3://test-tdgplatform-com/data/envi_src/sm_tiff' # Overwrite the value from
workflow = gbdx.Workflow([ch_task])
# workflow = gbdx.Workflow([aoptask, ch_task])
workflow.savedata(ch_task.outputs.output_meta, location='CH_OUT')
# workflow.savedata(aoptask.outputs.data, location='AOP_OUT')
# NOTE: Always required because the source bundle must be uploaded.
ch_task.upload_input_ports()
print(workflow.generate_workflow_description())
print(workflow.execute())
|
from gbdxtools import Interface
gbdx = Interface()
# Create a cloud-harness gbdxtools Task
from ch_tasks.cp_task import CopyTask
cp_task = gbdx.Task(CopyTask)
from ch_tasks.raster_meta import RasterMetaTask
ch_task = gbdx.Task(RasterMetaTask)
# NOTE: This will override the value in the class definition.
ch_task.inputs.input_raster = cp_task.outputs.output_data.value # Overwrite the value from
workflow = gbdx.Workflow([cp_task, ch_task])
workflow.savedata(cp_task.outputs.output_data, location='CH_Demo/output_data')
workflow.savedata(ch_task.outputs.output_meta, location='CH_Demo/output_meta')
print(workflow.execute()) # Will upload cloud-harness ports before executing
# print(workflow.generate_workflow_description())
|
Remove the cloud-harness task and add second cloud-harness task for chaining.
|
Remove the cloud-harness task and add second cloud-harness task for chaining.
|
Python
|
mit
|
michaelconnor00/gbdxtools,michaelconnor00/gbdxtools
|
import json
import os
from osgeo import gdal
from gbdxtools import Interface
from gbdx_task_template import TaskTemplate, Task, InputPort, OutputPort
gbdx = Interface()
# data = "s3://receiving-dgcs-tdgplatform-com/054813633050_01_003" # WV02 Image over San Francisco
# aoptask = gbdx.Task("AOP_Strip_Processor", data=data, enable_acomp=True, enable_pansharpen=True)
class RasterMetaApp(TaskTemplate):
task = Task("RasterMetaTask")
task.input_raster = InputPort(value="/Users/michaelconnor/demo_image")
task.output_meta = OutputPort(value="/Users/michaelconnor")
def invoke(self):
images = self.task.input_raster.list_files(extensions=[".tiff", ".tif"])
# Magic Starts here
for img in images:
header = "META FOR %s\n\n" % os.path.basename(img)
gtif = gdal.Open(img)
self.task.output_meta.write('metadata.txt', header)
self.task.output_meta.write('metadata.txt', json.dumps(gtif.GetMetadata(), indent=2))
# Create a cloud-harness
ch_task = gbdx.Task(RasterMetaApp)
# NOTE: This will override the value in the class definition above.
ch_task.inputs.input_raster = 's3://test-tdgplatform-com/data/envi_src/sm_tiff' # Overwrite the value from
workflow = gbdx.Workflow([ch_task])
# workflow = gbdx.Workflow([aoptask, ch_task])
workflow.savedata(ch_task.outputs.output_meta, location='CH_OUT')
# workflow.savedata(aoptask.outputs.data, location='AOP_OUT')
# NOTE: Always required because the source bundle must be uploaded.
ch_task.upload_input_ports()
print(workflow.generate_workflow_description())
print(workflow.execute())
Remove the cloud-harness task and add second cloud-harness task for chaining.
|
from gbdxtools import Interface
gbdx = Interface()
# Create a cloud-harness gbdxtools Task
from ch_tasks.cp_task import CopyTask
cp_task = gbdx.Task(CopyTask)
from ch_tasks.raster_meta import RasterMetaTask
ch_task = gbdx.Task(RasterMetaTask)
# NOTE: This will override the value in the class definition.
ch_task.inputs.input_raster = cp_task.outputs.output_data.value # Overwrite the value from
workflow = gbdx.Workflow([cp_task, ch_task])
workflow.savedata(cp_task.outputs.output_data, location='CH_Demo/output_data')
workflow.savedata(ch_task.outputs.output_meta, location='CH_Demo/output_meta')
print(workflow.execute()) # Will upload cloud-harness ports before executing
# print(workflow.generate_workflow_description())
|
<commit_before>import json
import os
from osgeo import gdal
from gbdxtools import Interface
from gbdx_task_template import TaskTemplate, Task, InputPort, OutputPort
gbdx = Interface()
# data = "s3://receiving-dgcs-tdgplatform-com/054813633050_01_003" # WV02 Image over San Francisco
# aoptask = gbdx.Task("AOP_Strip_Processor", data=data, enable_acomp=True, enable_pansharpen=True)
class RasterMetaApp(TaskTemplate):
task = Task("RasterMetaTask")
task.input_raster = InputPort(value="/Users/michaelconnor/demo_image")
task.output_meta = OutputPort(value="/Users/michaelconnor")
def invoke(self):
images = self.task.input_raster.list_files(extensions=[".tiff", ".tif"])
# Magic Starts here
for img in images:
header = "META FOR %s\n\n" % os.path.basename(img)
gtif = gdal.Open(img)
self.task.output_meta.write('metadata.txt', header)
self.task.output_meta.write('metadata.txt', json.dumps(gtif.GetMetadata(), indent=2))
# Create a cloud-harness
ch_task = gbdx.Task(RasterMetaApp)
# NOTE: This will override the value in the class definition above.
ch_task.inputs.input_raster = 's3://test-tdgplatform-com/data/envi_src/sm_tiff' # Overwrite the value from
workflow = gbdx.Workflow([ch_task])
# workflow = gbdx.Workflow([aoptask, ch_task])
workflow.savedata(ch_task.outputs.output_meta, location='CH_OUT')
# workflow.savedata(aoptask.outputs.data, location='AOP_OUT')
# NOTE: Always required because the source bundle must be uploaded.
ch_task.upload_input_ports()
print(workflow.generate_workflow_description())
print(workflow.execute())
<commit_msg>Remove the cloud-harness task and add second cloud-harness task for chaining.<commit_after>
|
from gbdxtools import Interface
gbdx = Interface()
# Create a cloud-harness gbdxtools Task
from ch_tasks.cp_task import CopyTask
cp_task = gbdx.Task(CopyTask)
from ch_tasks.raster_meta import RasterMetaTask
ch_task = gbdx.Task(RasterMetaTask)
# NOTE: This will override the value in the class definition.
ch_task.inputs.input_raster = cp_task.outputs.output_data.value # Overwrite the value from
workflow = gbdx.Workflow([cp_task, ch_task])
workflow.savedata(cp_task.outputs.output_data, location='CH_Demo/output_data')
workflow.savedata(ch_task.outputs.output_meta, location='CH_Demo/output_meta')
print(workflow.execute()) # Will upload cloud-harness ports before executing
# print(workflow.generate_workflow_description())
|
import json
import os
from osgeo import gdal
from gbdxtools import Interface
from gbdx_task_template import TaskTemplate, Task, InputPort, OutputPort
gbdx = Interface()
# data = "s3://receiving-dgcs-tdgplatform-com/054813633050_01_003" # WV02 Image over San Francisco
# aoptask = gbdx.Task("AOP_Strip_Processor", data=data, enable_acomp=True, enable_pansharpen=True)
class RasterMetaApp(TaskTemplate):
task = Task("RasterMetaTask")
task.input_raster = InputPort(value="/Users/michaelconnor/demo_image")
task.output_meta = OutputPort(value="/Users/michaelconnor")
def invoke(self):
images = self.task.input_raster.list_files(extensions=[".tiff", ".tif"])
# Magic Starts here
for img in images:
header = "META FOR %s\n\n" % os.path.basename(img)
gtif = gdal.Open(img)
self.task.output_meta.write('metadata.txt', header)
self.task.output_meta.write('metadata.txt', json.dumps(gtif.GetMetadata(), indent=2))
# Create a cloud-harness
ch_task = gbdx.Task(RasterMetaApp)
# NOTE: This will override the value in the class definition above.
ch_task.inputs.input_raster = 's3://test-tdgplatform-com/data/envi_src/sm_tiff' # Overwrite the value from
workflow = gbdx.Workflow([ch_task])
# workflow = gbdx.Workflow([aoptask, ch_task])
workflow.savedata(ch_task.outputs.output_meta, location='CH_OUT')
# workflow.savedata(aoptask.outputs.data, location='AOP_OUT')
# NOTE: Always required because the source bundle must be uploaded.
ch_task.upload_input_ports()
print(workflow.generate_workflow_description())
print(workflow.execute())
Remove the cloud-harness task and add second cloud-harness task for chaining.from gbdxtools import Interface
gbdx = Interface()
# Create a cloud-harness gbdxtools Task
from ch_tasks.cp_task import CopyTask
cp_task = gbdx.Task(CopyTask)
from ch_tasks.raster_meta import RasterMetaTask
ch_task = gbdx.Task(RasterMetaTask)
# NOTE: This will override the value in the class definition.
ch_task.inputs.input_raster = cp_task.outputs.output_data.value # Overwrite the value from
workflow = gbdx.Workflow([cp_task, ch_task])
workflow.savedata(cp_task.outputs.output_data, location='CH_Demo/output_data')
workflow.savedata(ch_task.outputs.output_meta, location='CH_Demo/output_meta')
print(workflow.execute()) # Will upload cloud-harness ports before executing
# print(workflow.generate_workflow_description())
|
<commit_before>import json
import os
from osgeo import gdal
from gbdxtools import Interface
from gbdx_task_template import TaskTemplate, Task, InputPort, OutputPort
gbdx = Interface()
# data = "s3://receiving-dgcs-tdgplatform-com/054813633050_01_003" # WV02 Image over San Francisco
# aoptask = gbdx.Task("AOP_Strip_Processor", data=data, enable_acomp=True, enable_pansharpen=True)
class RasterMetaApp(TaskTemplate):
task = Task("RasterMetaTask")
task.input_raster = InputPort(value="/Users/michaelconnor/demo_image")
task.output_meta = OutputPort(value="/Users/michaelconnor")
def invoke(self):
images = self.task.input_raster.list_files(extensions=[".tiff", ".tif"])
# Magic Starts here
for img in images:
header = "META FOR %s\n\n" % os.path.basename(img)
gtif = gdal.Open(img)
self.task.output_meta.write('metadata.txt', header)
self.task.output_meta.write('metadata.txt', json.dumps(gtif.GetMetadata(), indent=2))
# Create a cloud-harness
ch_task = gbdx.Task(RasterMetaApp)
# NOTE: This will override the value in the class definition above.
ch_task.inputs.input_raster = 's3://test-tdgplatform-com/data/envi_src/sm_tiff' # Overwrite the value from
workflow = gbdx.Workflow([ch_task])
# workflow = gbdx.Workflow([aoptask, ch_task])
workflow.savedata(ch_task.outputs.output_meta, location='CH_OUT')
# workflow.savedata(aoptask.outputs.data, location='AOP_OUT')
# NOTE: Always required because the source bundle must be uploaded.
ch_task.upload_input_ports()
print(workflow.generate_workflow_description())
print(workflow.execute())
<commit_msg>Remove the cloud-harness task and add second cloud-harness task for chaining.<commit_after>from gbdxtools import Interface
gbdx = Interface()
# Create a cloud-harness gbdxtools Task
from ch_tasks.cp_task import CopyTask
cp_task = gbdx.Task(CopyTask)
from ch_tasks.raster_meta import RasterMetaTask
ch_task = gbdx.Task(RasterMetaTask)
# NOTE: This will override the value in the class definition.
ch_task.inputs.input_raster = cp_task.outputs.output_data.value # Overwrite the value from
workflow = gbdx.Workflow([cp_task, ch_task])
workflow.savedata(cp_task.outputs.output_data, location='CH_Demo/output_data')
workflow.savedata(ch_task.outputs.output_meta, location='CH_Demo/output_meta')
print(workflow.execute()) # Will upload cloud-harness ports before executing
# print(workflow.generate_workflow_description())
|
4f46fe7abf5efcd93bc161f2cfccc58df4ab1ee4
|
whats_fresh/whats_fresh_api/tests/views/entry/test_list_preparations.py
|
whats_fresh/whats_fresh_api/tests/views/entry/test_list_preparations.py
|
from django.test import TestCase
from django.core.urlresolvers import reverse
from whats_fresh_api.models import *
from django.contrib.gis.db import models
import json
class ListPreparationTestCase(TestCase):
fixtures = ['test_fixtures']
def test_url_endpoint(self):
url = reverse('entry-list-preparations')
self.assertEqual(url, '/entry/preparations')
def test_list_items(self):
"""
Tests to see if the list of preparations
contains the proper preparations
"""
response = self.client.get(reverse('entry-list-preparations'))
items = response.context['item_list']
for preparation in Preparation.objects.all():
self.assertEqual(
items[preparation.id-1]['description'], preparation.description)
self.assertEqual(
items[preparation.id-1]['name'], preparation.name)
self.assertEqual(
items[preparation.id-1]['link'],
reverse('edit-preparation', kwargs={'id': preparation.id}))
|
from django.test import TestCase
from django.core.urlresolvers import reverse
from whats_fresh_api.models import *
from django.contrib.gis.db import models
import json
class ListPreparationTestCase(TestCase):
fixtures = ['test_fixtures']
def test_url_endpoint(self):
url = reverse('entry-list-preparations')
self.assertEqual(url, '/entry/preparations')
def test_list_items(self):
"""
Tests to see if the list of preparations
contains the proper preparations
"""
response = self.client.get(reverse('entry-list-preparations'))
items = response.context['item_list']
for preparation in Preparation.objects.all():
self.assertEqual(
items[preparation.id-1]['description'],
preparation.description)
self.assertEqual(
items[preparation.id-1]['name'], preparation.name)
self.assertEqual(
items[preparation.id-1]['link'],
reverse('edit-preparation', kwargs={'id': preparation.id}))
|
Rewrite preparations list test to get ID from URL
|
Rewrite preparations list test to get ID from URL
|
Python
|
apache-2.0
|
iCHAIT/whats-fresh-api,osu-cass/whats-fresh-api,iCHAIT/whats-fresh-api,osu-cass/whats-fresh-api,iCHAIT/whats-fresh-api,osu-cass/whats-fresh-api,osu-cass/whats-fresh-api,iCHAIT/whats-fresh-api
|
from django.test import TestCase
from django.core.urlresolvers import reverse
from whats_fresh_api.models import *
from django.contrib.gis.db import models
import json
class ListPreparationTestCase(TestCase):
fixtures = ['test_fixtures']
def test_url_endpoint(self):
url = reverse('entry-list-preparations')
self.assertEqual(url, '/entry/preparations')
def test_list_items(self):
"""
Tests to see if the list of preparations
contains the proper preparations
"""
response = self.client.get(reverse('entry-list-preparations'))
items = response.context['item_list']
for preparation in Preparation.objects.all():
self.assertEqual(
items[preparation.id-1]['description'], preparation.description)
self.assertEqual(
items[preparation.id-1]['name'], preparation.name)
self.assertEqual(
items[preparation.id-1]['link'],
reverse('edit-preparation', kwargs={'id': preparation.id}))
Rewrite preparations list test to get ID from URL
|
from django.test import TestCase
from django.core.urlresolvers import reverse
from whats_fresh_api.models import *
from django.contrib.gis.db import models
import json
class ListPreparationTestCase(TestCase):
fixtures = ['test_fixtures']
def test_url_endpoint(self):
url = reverse('entry-list-preparations')
self.assertEqual(url, '/entry/preparations')
def test_list_items(self):
"""
Tests to see if the list of preparations
contains the proper preparations
"""
response = self.client.get(reverse('entry-list-preparations'))
items = response.context['item_list']
for preparation in Preparation.objects.all():
self.assertEqual(
items[preparation.id-1]['description'],
preparation.description)
self.assertEqual(
items[preparation.id-1]['name'], preparation.name)
self.assertEqual(
items[preparation.id-1]['link'],
reverse('edit-preparation', kwargs={'id': preparation.id}))
|
<commit_before>from django.test import TestCase
from django.core.urlresolvers import reverse
from whats_fresh_api.models import *
from django.contrib.gis.db import models
import json
class ListPreparationTestCase(TestCase):
fixtures = ['test_fixtures']
def test_url_endpoint(self):
url = reverse('entry-list-preparations')
self.assertEqual(url, '/entry/preparations')
def test_list_items(self):
"""
Tests to see if the list of preparations
contains the proper preparations
"""
response = self.client.get(reverse('entry-list-preparations'))
items = response.context['item_list']
for preparation in Preparation.objects.all():
self.assertEqual(
items[preparation.id-1]['description'], preparation.description)
self.assertEqual(
items[preparation.id-1]['name'], preparation.name)
self.assertEqual(
items[preparation.id-1]['link'],
reverse('edit-preparation', kwargs={'id': preparation.id}))
<commit_msg>Rewrite preparations list test to get ID from URL<commit_after>
|
from django.test import TestCase
from django.core.urlresolvers import reverse
from whats_fresh_api.models import *
from django.contrib.gis.db import models
import json
class ListPreparationTestCase(TestCase):
fixtures = ['test_fixtures']
def test_url_endpoint(self):
url = reverse('entry-list-preparations')
self.assertEqual(url, '/entry/preparations')
def test_list_items(self):
"""
Tests to see if the list of preparations
contains the proper preparations
"""
response = self.client.get(reverse('entry-list-preparations'))
items = response.context['item_list']
for preparation in Preparation.objects.all():
self.assertEqual(
items[preparation.id-1]['description'],
preparation.description)
self.assertEqual(
items[preparation.id-1]['name'], preparation.name)
self.assertEqual(
items[preparation.id-1]['link'],
reverse('edit-preparation', kwargs={'id': preparation.id}))
|
from django.test import TestCase
from django.core.urlresolvers import reverse
from whats_fresh_api.models import *
from django.contrib.gis.db import models
import json
class ListPreparationTestCase(TestCase):
fixtures = ['test_fixtures']
def test_url_endpoint(self):
url = reverse('entry-list-preparations')
self.assertEqual(url, '/entry/preparations')
def test_list_items(self):
"""
Tests to see if the list of preparations
contains the proper preparations
"""
response = self.client.get(reverse('entry-list-preparations'))
items = response.context['item_list']
for preparation in Preparation.objects.all():
self.assertEqual(
items[preparation.id-1]['description'], preparation.description)
self.assertEqual(
items[preparation.id-1]['name'], preparation.name)
self.assertEqual(
items[preparation.id-1]['link'],
reverse('edit-preparation', kwargs={'id': preparation.id}))
Rewrite preparations list test to get ID from URLfrom django.test import TestCase
from django.core.urlresolvers import reverse
from whats_fresh_api.models import *
from django.contrib.gis.db import models
import json
class ListPreparationTestCase(TestCase):
fixtures = ['test_fixtures']
def test_url_endpoint(self):
url = reverse('entry-list-preparations')
self.assertEqual(url, '/entry/preparations')
def test_list_items(self):
"""
Tests to see if the list of preparations
contains the proper preparations
"""
response = self.client.get(reverse('entry-list-preparations'))
items = response.context['item_list']
for preparation in Preparation.objects.all():
self.assertEqual(
items[preparation.id-1]['description'],
preparation.description)
self.assertEqual(
items[preparation.id-1]['name'], preparation.name)
self.assertEqual(
items[preparation.id-1]['link'],
reverse('edit-preparation', kwargs={'id': preparation.id}))
|
<commit_before>from django.test import TestCase
from django.core.urlresolvers import reverse
from whats_fresh_api.models import *
from django.contrib.gis.db import models
import json
class ListPreparationTestCase(TestCase):
fixtures = ['test_fixtures']
def test_url_endpoint(self):
url = reverse('entry-list-preparations')
self.assertEqual(url, '/entry/preparations')
def test_list_items(self):
"""
Tests to see if the list of preparations
contains the proper preparations
"""
response = self.client.get(reverse('entry-list-preparations'))
items = response.context['item_list']
for preparation in Preparation.objects.all():
self.assertEqual(
items[preparation.id-1]['description'], preparation.description)
self.assertEqual(
items[preparation.id-1]['name'], preparation.name)
self.assertEqual(
items[preparation.id-1]['link'],
reverse('edit-preparation', kwargs={'id': preparation.id}))
<commit_msg>Rewrite preparations list test to get ID from URL<commit_after>from django.test import TestCase
from django.core.urlresolvers import reverse
from whats_fresh_api.models import *
from django.contrib.gis.db import models
import json
class ListPreparationTestCase(TestCase):
fixtures = ['test_fixtures']
def test_url_endpoint(self):
url = reverse('entry-list-preparations')
self.assertEqual(url, '/entry/preparations')
def test_list_items(self):
"""
Tests to see if the list of preparations
contains the proper preparations
"""
response = self.client.get(reverse('entry-list-preparations'))
items = response.context['item_list']
for preparation in Preparation.objects.all():
self.assertEqual(
items[preparation.id-1]['description'],
preparation.description)
self.assertEqual(
items[preparation.id-1]['name'], preparation.name)
self.assertEqual(
items[preparation.id-1]['link'],
reverse('edit-preparation', kwargs={'id': preparation.id}))
|
8d014f6bc3994fabf3c0658e6884648ad9a8f2c2
|
quizalicious.py
|
quizalicious.py
|
from flask import Flask, render_template
from redis import StrictRedis
import random
import config
app = Flask(__name__)
app.debug = config.DEBUG
db = StrictRedis(host=config.REDIS_HOST, port=config.REDIS_PORT)
@app.route('/')
def main():
available_quizzes = db.smembers('quizzes')
return render_template('templates/main.html', quizzes=available_quizzes)
@app.route('/quiz/start/<quiz_name>')
def start_quiz(quiz_name):
return render_template('templates/quiz-start.html', quiz_name=quiz_name)
@app.route('/quiz/<quiz_name>')
def quiz(quiz_name):
#FIXME: naive random implementation, needs to be score-weighted
all_questions = db.hkeys(quiz_name)
question = random.choice(all_questions)
answer = db.hget(quiz_name, question)
return render_template('templates/quiz.html', quiz_name=quiz_name,
question=question, answer=answer)
|
from flask import Flask, render_template
from redis import StrictRedis
import random
import config
app = Flask(__name__)
app.debug = config.DEBUG
db = StrictRedis(host=config.REDIS_HOST, port=config.REDIS_PORT)
@app.route('/')
def main():
available_quizzes = db.smembers('quizzes')
return render_template('main.html', quizzes=available_quizzes)
@app.route('/quiz/start/<quiz_url_name>')
def start_quiz(quiz_url_name):
description, created_by, quiz_name = db.hmget(quiz_url_name,
'description', 'created_by', 'quiz_name')
return render_template('quiz-start.html', quiz_name=quiz_name,
description=description, created_by=created_by,
quiz_url_name=quiz_url_name)
@app.route('/quiz/<quiz_url_name>')
def quiz(quiz_url_name):
#FIXME: naive random implementation, needs to be score-weighted
hash_name = "{0}:questions".format(quiz_url_name)
all_questions = db.hkeys(hash_name)
question = random.choice(all_questions)
answer = db.hget(hash_name, question)
quiz_name = db.hget(quiz_url_name, 'quiz_name')
return render_template('quiz.html', quiz_name=quiz_name,
question=question, answer=answer,
quiz_url_name=quiz_url_name)
if __name__ == '__main__':
app.run()
|
Change key lookups and fix typos
|
Change key lookups and fix typos
Revamped the way URLs were handled from Redis by differentiating the URL
friendly name from the actual name. Fixed bad paths in render_template
for all routes.
|
Python
|
bsd-2-clause
|
estreeper/quizalicious,estreeper/quizalicious,estreeper/quizalicious
|
from flask import Flask, render_template
from redis import StrictRedis
import random
import config
app = Flask(__name__)
app.debug = config.DEBUG
db = StrictRedis(host=config.REDIS_HOST, port=config.REDIS_PORT)
@app.route('/')
def main():
available_quizzes = db.smembers('quizzes')
return render_template('templates/main.html', quizzes=available_quizzes)
@app.route('/quiz/start/<quiz_name>')
def start_quiz(quiz_name):
return render_template('templates/quiz-start.html', quiz_name=quiz_name)
@app.route('/quiz/<quiz_name>')
def quiz(quiz_name):
#FIXME: naive random implementation, needs to be score-weighted
all_questions = db.hkeys(quiz_name)
question = random.choice(all_questions)
answer = db.hget(quiz_name, question)
return render_template('templates/quiz.html', quiz_name=quiz_name,
question=question, answer=answer)
Change key lookups and fix typos
Revamped the way URLs were handled from Redis by differentiating the URL
friendly name from the actual name. Fixed bad paths in render_template
for all routes.
|
from flask import Flask, render_template
from redis import StrictRedis
import random
import config
app = Flask(__name__)
app.debug = config.DEBUG
db = StrictRedis(host=config.REDIS_HOST, port=config.REDIS_PORT)
@app.route('/')
def main():
available_quizzes = db.smembers('quizzes')
return render_template('main.html', quizzes=available_quizzes)
@app.route('/quiz/start/<quiz_url_name>')
def start_quiz(quiz_url_name):
description, created_by, quiz_name = db.hmget(quiz_url_name,
'description', 'created_by', 'quiz_name')
return render_template('quiz-start.html', quiz_name=quiz_name,
description=description, created_by=created_by,
quiz_url_name=quiz_url_name)
@app.route('/quiz/<quiz_url_name>')
def quiz(quiz_url_name):
#FIXME: naive random implementation, needs to be score-weighted
hash_name = "{0}:questions".format(quiz_url_name)
all_questions = db.hkeys(hash_name)
question = random.choice(all_questions)
answer = db.hget(hash_name, question)
quiz_name = db.hget(quiz_url_name, 'quiz_name')
return render_template('quiz.html', quiz_name=quiz_name,
question=question, answer=answer,
quiz_url_name=quiz_url_name)
if __name__ == '__main__':
app.run()
|
<commit_before>from flask import Flask, render_template
from redis import StrictRedis
import random
import config
app = Flask(__name__)
app.debug = config.DEBUG
db = StrictRedis(host=config.REDIS_HOST, port=config.REDIS_PORT)
@app.route('/')
def main():
available_quizzes = db.smembers('quizzes')
return render_template('templates/main.html', quizzes=available_quizzes)
@app.route('/quiz/start/<quiz_name>')
def start_quiz(quiz_name):
return render_template('templates/quiz-start.html', quiz_name=quiz_name)
@app.route('/quiz/<quiz_name>')
def quiz(quiz_name):
#FIXME: naive random implementation, needs to be score-weighted
all_questions = db.hkeys(quiz_name)
question = random.choice(all_questions)
answer = db.hget(quiz_name, question)
return render_template('templates/quiz.html', quiz_name=quiz_name,
question=question, answer=answer)
<commit_msg>Change key lookups and fix typos
Revamped the way URLs were handled from Redis by differentiating the URL
friendly name from the actual name. Fixed bad paths in render_template
for all routes.<commit_after>
|
from flask import Flask, render_template
from redis import StrictRedis
import random
import config
app = Flask(__name__)
app.debug = config.DEBUG
db = StrictRedis(host=config.REDIS_HOST, port=config.REDIS_PORT)
@app.route('/')
def main():
available_quizzes = db.smembers('quizzes')
return render_template('main.html', quizzes=available_quizzes)
@app.route('/quiz/start/<quiz_url_name>')
def start_quiz(quiz_url_name):
description, created_by, quiz_name = db.hmget(quiz_url_name,
'description', 'created_by', 'quiz_name')
return render_template('quiz-start.html', quiz_name=quiz_name,
description=description, created_by=created_by,
quiz_url_name=quiz_url_name)
@app.route('/quiz/<quiz_url_name>')
def quiz(quiz_url_name):
#FIXME: naive random implementation, needs to be score-weighted
hash_name = "{0}:questions".format(quiz_url_name)
all_questions = db.hkeys(hash_name)
question = random.choice(all_questions)
answer = db.hget(hash_name, question)
quiz_name = db.hget(quiz_url_name, 'quiz_name')
return render_template('quiz.html', quiz_name=quiz_name,
question=question, answer=answer,
quiz_url_name=quiz_url_name)
if __name__ == '__main__':
app.run()
|
from flask import Flask, render_template
from redis import StrictRedis
import random
import config
app = Flask(__name__)
app.debug = config.DEBUG
db = StrictRedis(host=config.REDIS_HOST, port=config.REDIS_PORT)
@app.route('/')
def main():
available_quizzes = db.smembers('quizzes')
return render_template('templates/main.html', quizzes=available_quizzes)
@app.route('/quiz/start/<quiz_name>')
def start_quiz(quiz_name):
return render_template('templates/quiz-start.html', quiz_name=quiz_name)
@app.route('/quiz/<quiz_name>')
def quiz(quiz_name):
#FIXME: naive random implementation, needs to be score-weighted
all_questions = db.hkeys(quiz_name)
question = random.choice(all_questions)
answer = db.hget(quiz_name, question)
return render_template('templates/quiz.html', quiz_name=quiz_name,
question=question, answer=answer)
Change key lookups and fix typos
Revamped the way URLs were handled from Redis by differentiating the URL
friendly name from the actual name. Fixed bad paths in render_template
for all routes.from flask import Flask, render_template
from redis import StrictRedis
import random
import config
app = Flask(__name__)
app.debug = config.DEBUG
db = StrictRedis(host=config.REDIS_HOST, port=config.REDIS_PORT)
@app.route('/')
def main():
available_quizzes = db.smembers('quizzes')
return render_template('main.html', quizzes=available_quizzes)
@app.route('/quiz/start/<quiz_url_name>')
def start_quiz(quiz_url_name):
description, created_by, quiz_name = db.hmget(quiz_url_name,
'description', 'created_by', 'quiz_name')
return render_template('quiz-start.html', quiz_name=quiz_name,
description=description, created_by=created_by,
quiz_url_name=quiz_url_name)
@app.route('/quiz/<quiz_url_name>')
def quiz(quiz_url_name):
#FIXME: naive random implementation, needs to be score-weighted
hash_name = "{0}:questions".format(quiz_url_name)
all_questions = db.hkeys(hash_name)
question = random.choice(all_questions)
answer = db.hget(hash_name, question)
quiz_name = db.hget(quiz_url_name, 'quiz_name')
return render_template('quiz.html', quiz_name=quiz_name,
question=question, answer=answer,
quiz_url_name=quiz_url_name)
if __name__ == '__main__':
app.run()
|
<commit_before>from flask import Flask, render_template
from redis import StrictRedis
import random
import config
app = Flask(__name__)
app.debug = config.DEBUG
db = StrictRedis(host=config.REDIS_HOST, port=config.REDIS_PORT)
@app.route('/')
def main():
available_quizzes = db.smembers('quizzes')
return render_template('templates/main.html', quizzes=available_quizzes)
@app.route('/quiz/start/<quiz_name>')
def start_quiz(quiz_name):
return render_template('templates/quiz-start.html', quiz_name=quiz_name)
@app.route('/quiz/<quiz_name>')
def quiz(quiz_name):
#FIXME: naive random implementation, needs to be score-weighted
all_questions = db.hkeys(quiz_name)
question = random.choice(all_questions)
answer = db.hget(quiz_name, question)
return render_template('templates/quiz.html', quiz_name=quiz_name,
question=question, answer=answer)
<commit_msg>Change key lookups and fix typos
Revamped the way URLs were handled from Redis by differentiating the URL
friendly name from the actual name. Fixed bad paths in render_template
for all routes.<commit_after>from flask import Flask, render_template
from redis import StrictRedis
import random
import config
app = Flask(__name__)
app.debug = config.DEBUG
db = StrictRedis(host=config.REDIS_HOST, port=config.REDIS_PORT)
@app.route('/')
def main():
available_quizzes = db.smembers('quizzes')
return render_template('main.html', quizzes=available_quizzes)
@app.route('/quiz/start/<quiz_url_name>')
def start_quiz(quiz_url_name):
description, created_by, quiz_name = db.hmget(quiz_url_name,
'description', 'created_by', 'quiz_name')
return render_template('quiz-start.html', quiz_name=quiz_name,
description=description, created_by=created_by,
quiz_url_name=quiz_url_name)
@app.route('/quiz/<quiz_url_name>')
def quiz(quiz_url_name):
#FIXME: naive random implementation, needs to be score-weighted
hash_name = "{0}:questions".format(quiz_url_name)
all_questions = db.hkeys(hash_name)
question = random.choice(all_questions)
answer = db.hget(hash_name, question)
quiz_name = db.hget(quiz_url_name, 'quiz_name')
return render_template('quiz.html', quiz_name=quiz_name,
question=question, answer=answer,
quiz_url_name=quiz_url_name)
if __name__ == '__main__':
app.run()
|
7b10375eaae7c79a4d90b8f3835e8a1fe06c5f31
|
hermes/feeds.py
|
hermes/feeds.py
|
from django.contrib.syndication.views import Feed
from .models import Post
from .settings import (
SYNDICATION_FEED_TITLE, SYNDICATION_FEED_LINK,
SYNDICATION_FEED_DESCRIPTION, SYNDICATION_FEED_TYPE
)
class LatestPostFeed(Feed):
title = SYNDICATION_FEED_TITLE
link = SYNDICATION_FEED_LINK
description = SYNDICATION_FEED_DESCRIPTION
feed_type = SYNDICATION_FEED_TYPE
description_template = 'feed_post_description.html
def items(self):
return Post.objects.recent()
def item_title(self, item):
return item.subject
def item_description(self, item):
return item.body
def item_pubdate(self, item):
return item.created_on
def item_updateddate(self, item):
return item.modified_on
def item_categories(self, item):
return [category.title for category in item.category.hierarchy()]
def item_author_name(self, item):
return "{first_name} {last_name}".format(
first_name=item.author.first_name,
last_name=item.author.last_name,
)
def item_author_email(self, item):
return item.author.email
|
from django.contrib.syndication.views import Feed
from .models import Post
from .settings import (
SYNDICATION_FEED_TITLE, SYNDICATION_FEED_LINK,
SYNDICATION_FEED_DESCRIPTION, SYNDICATION_FEED_TYPE
)
class LatestPostFeed(Feed):
title = SYNDICATION_FEED_TITLE
link = SYNDICATION_FEED_LINK
description = SYNDICATION_FEED_DESCRIPTION
feed_type = SYNDICATION_FEED_TYPE
description_template = 'hermes/feed_post_description.html
def items(self):
return Post.objects.recent()
def item_title(self, item):
return item.subject
def item_description(self, item):
return item.body
def item_pubdate(self, item):
return item.created_on
def item_updateddate(self, item):
return item.modified_on
def item_categories(self, item):
return [category.title for category in item.category.hierarchy()]
def item_author_name(self, item):
return "{first_name} {last_name}".format(
first_name=item.author.first_name,
last_name=item.author.last_name,
)
def item_author_email(self, item):
return item.author.email
|
Use actual path to template
|
Use actual path to template
|
Python
|
mit
|
DemocracyClub/django-hermes,DemocracyClub/django-hermes
|
from django.contrib.syndication.views import Feed
from .models import Post
from .settings import (
SYNDICATION_FEED_TITLE, SYNDICATION_FEED_LINK,
SYNDICATION_FEED_DESCRIPTION, SYNDICATION_FEED_TYPE
)
class LatestPostFeed(Feed):
title = SYNDICATION_FEED_TITLE
link = SYNDICATION_FEED_LINK
description = SYNDICATION_FEED_DESCRIPTION
feed_type = SYNDICATION_FEED_TYPE
description_template = 'feed_post_description.html
def items(self):
return Post.objects.recent()
def item_title(self, item):
return item.subject
def item_description(self, item):
return item.body
def item_pubdate(self, item):
return item.created_on
def item_updateddate(self, item):
return item.modified_on
def item_categories(self, item):
return [category.title for category in item.category.hierarchy()]
def item_author_name(self, item):
return "{first_name} {last_name}".format(
first_name=item.author.first_name,
last_name=item.author.last_name,
)
def item_author_email(self, item):
return item.author.email
Use actual path to template
|
from django.contrib.syndication.views import Feed
from .models import Post
from .settings import (
SYNDICATION_FEED_TITLE, SYNDICATION_FEED_LINK,
SYNDICATION_FEED_DESCRIPTION, SYNDICATION_FEED_TYPE
)
class LatestPostFeed(Feed):
title = SYNDICATION_FEED_TITLE
link = SYNDICATION_FEED_LINK
description = SYNDICATION_FEED_DESCRIPTION
feed_type = SYNDICATION_FEED_TYPE
description_template = 'hermes/feed_post_description.html
def items(self):
return Post.objects.recent()
def item_title(self, item):
return item.subject
def item_description(self, item):
return item.body
def item_pubdate(self, item):
return item.created_on
def item_updateddate(self, item):
return item.modified_on
def item_categories(self, item):
return [category.title for category in item.category.hierarchy()]
def item_author_name(self, item):
return "{first_name} {last_name}".format(
first_name=item.author.first_name,
last_name=item.author.last_name,
)
def item_author_email(self, item):
return item.author.email
|
<commit_before>from django.contrib.syndication.views import Feed
from .models import Post
from .settings import (
SYNDICATION_FEED_TITLE, SYNDICATION_FEED_LINK,
SYNDICATION_FEED_DESCRIPTION, SYNDICATION_FEED_TYPE
)
class LatestPostFeed(Feed):
title = SYNDICATION_FEED_TITLE
link = SYNDICATION_FEED_LINK
description = SYNDICATION_FEED_DESCRIPTION
feed_type = SYNDICATION_FEED_TYPE
description_template = 'feed_post_description.html
def items(self):
return Post.objects.recent()
def item_title(self, item):
return item.subject
def item_description(self, item):
return item.body
def item_pubdate(self, item):
return item.created_on
def item_updateddate(self, item):
return item.modified_on
def item_categories(self, item):
return [category.title for category in item.category.hierarchy()]
def item_author_name(self, item):
return "{first_name} {last_name}".format(
first_name=item.author.first_name,
last_name=item.author.last_name,
)
def item_author_email(self, item):
return item.author.email
<commit_msg>Use actual path to template<commit_after>
|
from django.contrib.syndication.views import Feed
from .models import Post
from .settings import (
SYNDICATION_FEED_TITLE, SYNDICATION_FEED_LINK,
SYNDICATION_FEED_DESCRIPTION, SYNDICATION_FEED_TYPE
)
class LatestPostFeed(Feed):
title = SYNDICATION_FEED_TITLE
link = SYNDICATION_FEED_LINK
description = SYNDICATION_FEED_DESCRIPTION
feed_type = SYNDICATION_FEED_TYPE
description_template = 'hermes/feed_post_description.html
def items(self):
return Post.objects.recent()
def item_title(self, item):
return item.subject
def item_description(self, item):
return item.body
def item_pubdate(self, item):
return item.created_on
def item_updateddate(self, item):
return item.modified_on
def item_categories(self, item):
return [category.title for category in item.category.hierarchy()]
def item_author_name(self, item):
return "{first_name} {last_name}".format(
first_name=item.author.first_name,
last_name=item.author.last_name,
)
def item_author_email(self, item):
return item.author.email
|
from django.contrib.syndication.views import Feed
from .models import Post
from .settings import (
SYNDICATION_FEED_TITLE, SYNDICATION_FEED_LINK,
SYNDICATION_FEED_DESCRIPTION, SYNDICATION_FEED_TYPE
)
class LatestPostFeed(Feed):
title = SYNDICATION_FEED_TITLE
link = SYNDICATION_FEED_LINK
description = SYNDICATION_FEED_DESCRIPTION
feed_type = SYNDICATION_FEED_TYPE
description_template = 'feed_post_description.html
def items(self):
return Post.objects.recent()
def item_title(self, item):
return item.subject
def item_description(self, item):
return item.body
def item_pubdate(self, item):
return item.created_on
def item_updateddate(self, item):
return item.modified_on
def item_categories(self, item):
return [category.title for category in item.category.hierarchy()]
def item_author_name(self, item):
return "{first_name} {last_name}".format(
first_name=item.author.first_name,
last_name=item.author.last_name,
)
def item_author_email(self, item):
return item.author.email
Use actual path to templatefrom django.contrib.syndication.views import Feed
from .models import Post
from .settings import (
SYNDICATION_FEED_TITLE, SYNDICATION_FEED_LINK,
SYNDICATION_FEED_DESCRIPTION, SYNDICATION_FEED_TYPE
)
class LatestPostFeed(Feed):
title = SYNDICATION_FEED_TITLE
link = SYNDICATION_FEED_LINK
description = SYNDICATION_FEED_DESCRIPTION
feed_type = SYNDICATION_FEED_TYPE
description_template = 'hermes/feed_post_description.html
def items(self):
return Post.objects.recent()
def item_title(self, item):
return item.subject
def item_description(self, item):
return item.body
def item_pubdate(self, item):
return item.created_on
def item_updateddate(self, item):
return item.modified_on
def item_categories(self, item):
return [category.title for category in item.category.hierarchy()]
def item_author_name(self, item):
return "{first_name} {last_name}".format(
first_name=item.author.first_name,
last_name=item.author.last_name,
)
def item_author_email(self, item):
return item.author.email
|
<commit_before>from django.contrib.syndication.views import Feed
from .models import Post
from .settings import (
SYNDICATION_FEED_TITLE, SYNDICATION_FEED_LINK,
SYNDICATION_FEED_DESCRIPTION, SYNDICATION_FEED_TYPE
)
class LatestPostFeed(Feed):
title = SYNDICATION_FEED_TITLE
link = SYNDICATION_FEED_LINK
description = SYNDICATION_FEED_DESCRIPTION
feed_type = SYNDICATION_FEED_TYPE
description_template = 'feed_post_description.html
def items(self):
return Post.objects.recent()
def item_title(self, item):
return item.subject
def item_description(self, item):
return item.body
def item_pubdate(self, item):
return item.created_on
def item_updateddate(self, item):
return item.modified_on
def item_categories(self, item):
return [category.title for category in item.category.hierarchy()]
def item_author_name(self, item):
return "{first_name} {last_name}".format(
first_name=item.author.first_name,
last_name=item.author.last_name,
)
def item_author_email(self, item):
return item.author.email
<commit_msg>Use actual path to template<commit_after>from django.contrib.syndication.views import Feed
from .models import Post
from .settings import (
SYNDICATION_FEED_TITLE, SYNDICATION_FEED_LINK,
SYNDICATION_FEED_DESCRIPTION, SYNDICATION_FEED_TYPE
)
class LatestPostFeed(Feed):
title = SYNDICATION_FEED_TITLE
link = SYNDICATION_FEED_LINK
description = SYNDICATION_FEED_DESCRIPTION
feed_type = SYNDICATION_FEED_TYPE
description_template = 'hermes/feed_post_description.html
def items(self):
return Post.objects.recent()
def item_title(self, item):
return item.subject
def item_description(self, item):
return item.body
def item_pubdate(self, item):
return item.created_on
def item_updateddate(self, item):
return item.modified_on
def item_categories(self, item):
return [category.title for category in item.category.hierarchy()]
def item_author_name(self, item):
return "{first_name} {last_name}".format(
first_name=item.author.first_name,
last_name=item.author.last_name,
)
def item_author_email(self, item):
return item.author.email
|
f9a59247155b5d8f356ae09d25573fb703d58e52
|
hijack/views.py
|
hijack/views.py
|
from django.contrib.admin.views.decorators import staff_member_required
from django.contrib.auth.decorators import login_required
from django.shortcuts import get_object_or_404
from django.http import HttpResponseBadRequest, HttpResponseRedirect
from hijack.helpers import login_user
from hijack.helpers import release_hijack as release_hijack_fx
from compat import get_user_model
@staff_member_required
def login_with_id(request, userId):
# input(userId) is unicode
try:
userId = int(userId)
except:
return HttpResponseBadRequest('userId must be an integer value.')
user = get_object_or_404(get_user_model(), pk=userId)
return login_user(request, user)
@staff_member_required
def login_with_email(request, email):
user = get_object_or_404(get_user_model(), email=email)
return login_user(request, user)
@staff_member_required
def login_with_username(request, username):
user = get_object_or_404(get_user_model(), username=username)
return login_user(request, user)
@login_required
def release_hijack(request):
return release_hijack_fx(request)
@login_required
def disable_hijack_warning(request):
request.session['is_hijacked_user']=False
return HttpResponseRedirect(request.GET.get('next','/'))
|
from django.contrib.admin.views.decorators import staff_member_required
from django.contrib.auth.decorators import login_required
from django.shortcuts import get_object_or_404
from django.http import HttpResponseBadRequest, HttpResponseRedirect
from hijack.helpers import login_user
from hijack.helpers import release_hijack as release_hijack_fx
from compat import get_user_model
@staff_member_required
def login_with_id(request, userId):
# input(userId) is unicode
try:
userId = int(userId)
except:
return HttpResponseBadRequest('userId must be an integer value.')
user = get_object_or_404(get_user_model(), pk=userId)
return login_user(request, user)
@staff_member_required
def login_with_email(request, email):
user = get_object_or_404(get_user_model(), email=email)
return login_user(request, user)
@staff_member_required
def login_with_username(request, username):
user = get_object_or_404(get_user_model(), username=username)
return login_user(request, user)
@login_required
def release_hijack(request):
return release_hijack_fx(request)
@login_required
def disable_hijack_warning(request):
request.session['is_hijacked_user']=False
return HttpResponseRedirect(request.GET.get('next','/'))
|
Remove extra whitespace from imports
|
Remove extra whitespace from imports
|
Python
|
mit
|
arteria/django-hijack,arteria/django-hijack,arteria/django-hijack
|
from django.contrib.admin.views.decorators import staff_member_required
from django.contrib.auth.decorators import login_required
from django.shortcuts import get_object_or_404
from django.http import HttpResponseBadRequest, HttpResponseRedirect
from hijack.helpers import login_user
from hijack.helpers import release_hijack as release_hijack_fx
from compat import get_user_model
@staff_member_required
def login_with_id(request, userId):
# input(userId) is unicode
try:
userId = int(userId)
except:
return HttpResponseBadRequest('userId must be an integer value.')
user = get_object_or_404(get_user_model(), pk=userId)
return login_user(request, user)
@staff_member_required
def login_with_email(request, email):
user = get_object_or_404(get_user_model(), email=email)
return login_user(request, user)
@staff_member_required
def login_with_username(request, username):
user = get_object_or_404(get_user_model(), username=username)
return login_user(request, user)
@login_required
def release_hijack(request):
return release_hijack_fx(request)
@login_required
def disable_hijack_warning(request):
request.session['is_hijacked_user']=False
return HttpResponseRedirect(request.GET.get('next','/'))
Remove extra whitespace from imports
|
from django.contrib.admin.views.decorators import staff_member_required
from django.contrib.auth.decorators import login_required
from django.shortcuts import get_object_or_404
from django.http import HttpResponseBadRequest, HttpResponseRedirect
from hijack.helpers import login_user
from hijack.helpers import release_hijack as release_hijack_fx
from compat import get_user_model
@staff_member_required
def login_with_id(request, userId):
# input(userId) is unicode
try:
userId = int(userId)
except:
return HttpResponseBadRequest('userId must be an integer value.')
user = get_object_or_404(get_user_model(), pk=userId)
return login_user(request, user)
@staff_member_required
def login_with_email(request, email):
user = get_object_or_404(get_user_model(), email=email)
return login_user(request, user)
@staff_member_required
def login_with_username(request, username):
user = get_object_or_404(get_user_model(), username=username)
return login_user(request, user)
@login_required
def release_hijack(request):
return release_hijack_fx(request)
@login_required
def disable_hijack_warning(request):
request.session['is_hijacked_user']=False
return HttpResponseRedirect(request.GET.get('next','/'))
|
<commit_before>from django.contrib.admin.views.decorators import staff_member_required
from django.contrib.auth.decorators import login_required
from django.shortcuts import get_object_or_404
from django.http import HttpResponseBadRequest, HttpResponseRedirect
from hijack.helpers import login_user
from hijack.helpers import release_hijack as release_hijack_fx
from compat import get_user_model
@staff_member_required
def login_with_id(request, userId):
# input(userId) is unicode
try:
userId = int(userId)
except:
return HttpResponseBadRequest('userId must be an integer value.')
user = get_object_or_404(get_user_model(), pk=userId)
return login_user(request, user)
@staff_member_required
def login_with_email(request, email):
user = get_object_or_404(get_user_model(), email=email)
return login_user(request, user)
@staff_member_required
def login_with_username(request, username):
user = get_object_or_404(get_user_model(), username=username)
return login_user(request, user)
@login_required
def release_hijack(request):
return release_hijack_fx(request)
@login_required
def disable_hijack_warning(request):
request.session['is_hijacked_user']=False
return HttpResponseRedirect(request.GET.get('next','/'))
<commit_msg>Remove extra whitespace from imports<commit_after>
|
from django.contrib.admin.views.decorators import staff_member_required
from django.contrib.auth.decorators import login_required
from django.shortcuts import get_object_or_404
from django.http import HttpResponseBadRequest, HttpResponseRedirect
from hijack.helpers import login_user
from hijack.helpers import release_hijack as release_hijack_fx
from compat import get_user_model
@staff_member_required
def login_with_id(request, userId):
# input(userId) is unicode
try:
userId = int(userId)
except:
return HttpResponseBadRequest('userId must be an integer value.')
user = get_object_or_404(get_user_model(), pk=userId)
return login_user(request, user)
@staff_member_required
def login_with_email(request, email):
user = get_object_or_404(get_user_model(), email=email)
return login_user(request, user)
@staff_member_required
def login_with_username(request, username):
user = get_object_or_404(get_user_model(), username=username)
return login_user(request, user)
@login_required
def release_hijack(request):
return release_hijack_fx(request)
@login_required
def disable_hijack_warning(request):
request.session['is_hijacked_user']=False
return HttpResponseRedirect(request.GET.get('next','/'))
|
from django.contrib.admin.views.decorators import staff_member_required
from django.contrib.auth.decorators import login_required
from django.shortcuts import get_object_or_404
from django.http import HttpResponseBadRequest, HttpResponseRedirect
from hijack.helpers import login_user
from hijack.helpers import release_hijack as release_hijack_fx
from compat import get_user_model
@staff_member_required
def login_with_id(request, userId):
# input(userId) is unicode
try:
userId = int(userId)
except:
return HttpResponseBadRequest('userId must be an integer value.')
user = get_object_or_404(get_user_model(), pk=userId)
return login_user(request, user)
@staff_member_required
def login_with_email(request, email):
user = get_object_or_404(get_user_model(), email=email)
return login_user(request, user)
@staff_member_required
def login_with_username(request, username):
user = get_object_or_404(get_user_model(), username=username)
return login_user(request, user)
@login_required
def release_hijack(request):
return release_hijack_fx(request)
@login_required
def disable_hijack_warning(request):
request.session['is_hijacked_user']=False
return HttpResponseRedirect(request.GET.get('next','/'))
Remove extra whitespace from importsfrom django.contrib.admin.views.decorators import staff_member_required
from django.contrib.auth.decorators import login_required
from django.shortcuts import get_object_or_404
from django.http import HttpResponseBadRequest, HttpResponseRedirect
from hijack.helpers import login_user
from hijack.helpers import release_hijack as release_hijack_fx
from compat import get_user_model
@staff_member_required
def login_with_id(request, userId):
# input(userId) is unicode
try:
userId = int(userId)
except:
return HttpResponseBadRequest('userId must be an integer value.')
user = get_object_or_404(get_user_model(), pk=userId)
return login_user(request, user)
@staff_member_required
def login_with_email(request, email):
user = get_object_or_404(get_user_model(), email=email)
return login_user(request, user)
@staff_member_required
def login_with_username(request, username):
user = get_object_or_404(get_user_model(), username=username)
return login_user(request, user)
@login_required
def release_hijack(request):
return release_hijack_fx(request)
@login_required
def disable_hijack_warning(request):
request.session['is_hijacked_user']=False
return HttpResponseRedirect(request.GET.get('next','/'))
|
<commit_before>from django.contrib.admin.views.decorators import staff_member_required
from django.contrib.auth.decorators import login_required
from django.shortcuts import get_object_or_404
from django.http import HttpResponseBadRequest, HttpResponseRedirect
from hijack.helpers import login_user
from hijack.helpers import release_hijack as release_hijack_fx
from compat import get_user_model
@staff_member_required
def login_with_id(request, userId):
# input(userId) is unicode
try:
userId = int(userId)
except:
return HttpResponseBadRequest('userId must be an integer value.')
user = get_object_or_404(get_user_model(), pk=userId)
return login_user(request, user)
@staff_member_required
def login_with_email(request, email):
user = get_object_or_404(get_user_model(), email=email)
return login_user(request, user)
@staff_member_required
def login_with_username(request, username):
user = get_object_or_404(get_user_model(), username=username)
return login_user(request, user)
@login_required
def release_hijack(request):
return release_hijack_fx(request)
@login_required
def disable_hijack_warning(request):
request.session['is_hijacked_user']=False
return HttpResponseRedirect(request.GET.get('next','/'))
<commit_msg>Remove extra whitespace from imports<commit_after>from django.contrib.admin.views.decorators import staff_member_required
from django.contrib.auth.decorators import login_required
from django.shortcuts import get_object_or_404
from django.http import HttpResponseBadRequest, HttpResponseRedirect
from hijack.helpers import login_user
from hijack.helpers import release_hijack as release_hijack_fx
from compat import get_user_model
@staff_member_required
def login_with_id(request, userId):
# input(userId) is unicode
try:
userId = int(userId)
except:
return HttpResponseBadRequest('userId must be an integer value.')
user = get_object_or_404(get_user_model(), pk=userId)
return login_user(request, user)
@staff_member_required
def login_with_email(request, email):
user = get_object_or_404(get_user_model(), email=email)
return login_user(request, user)
@staff_member_required
def login_with_username(request, username):
user = get_object_or_404(get_user_model(), username=username)
return login_user(request, user)
@login_required
def release_hijack(request):
return release_hijack_fx(request)
@login_required
def disable_hijack_warning(request):
request.session['is_hijacked_user']=False
return HttpResponseRedirect(request.GET.get('next','/'))
|
e0d0c9726766dc3281411e265c4d16ff66ecc595
|
regression/pages/studio/terms_of_service.py
|
regression/pages/studio/terms_of_service.py
|
"""
Terms of Service page
"""
from bok_choy.page_object import PageObject
from regression.pages.studio import LOGIN_BASE_URL
class TermsOfService(PageObject):
"""
Terms of Service page
"""
url = LOGIN_BASE_URL + '/edx-terms-service'
def is_browser_on_page(self):
return "Please read these Terms of Service" in self.q(
css='.field-page-body'
).text[0]
|
"""
Terms of Service page
"""
from bok_choy.page_object import PageObject
from regression.pages.studio import LOGIN_BASE_URL
class TermsOfService(PageObject):
"""
Terms of Service page
"""
url = LOGIN_BASE_URL + '/edx-terms-service'
def is_browser_on_page(self):
return "Please read these Terms of Service" in self.q(
css='.content-section'
).text[0]
|
Fix target css for TOS page
|
Fix target css for TOS page
|
Python
|
agpl-3.0
|
edx/edx-e2e-tests,edx/edx-e2e-tests
|
"""
Terms of Service page
"""
from bok_choy.page_object import PageObject
from regression.pages.studio import LOGIN_BASE_URL
class TermsOfService(PageObject):
"""
Terms of Service page
"""
url = LOGIN_BASE_URL + '/edx-terms-service'
def is_browser_on_page(self):
return "Please read these Terms of Service" in self.q(
css='.field-page-body'
).text[0]
Fix target css for TOS page
|
"""
Terms of Service page
"""
from bok_choy.page_object import PageObject
from regression.pages.studio import LOGIN_BASE_URL
class TermsOfService(PageObject):
"""
Terms of Service page
"""
url = LOGIN_BASE_URL + '/edx-terms-service'
def is_browser_on_page(self):
return "Please read these Terms of Service" in self.q(
css='.content-section'
).text[0]
|
<commit_before>"""
Terms of Service page
"""
from bok_choy.page_object import PageObject
from regression.pages.studio import LOGIN_BASE_URL
class TermsOfService(PageObject):
"""
Terms of Service page
"""
url = LOGIN_BASE_URL + '/edx-terms-service'
def is_browser_on_page(self):
return "Please read these Terms of Service" in self.q(
css='.field-page-body'
).text[0]
<commit_msg>Fix target css for TOS page<commit_after>
|
"""
Terms of Service page
"""
from bok_choy.page_object import PageObject
from regression.pages.studio import LOGIN_BASE_URL
class TermsOfService(PageObject):
"""
Terms of Service page
"""
url = LOGIN_BASE_URL + '/edx-terms-service'
def is_browser_on_page(self):
return "Please read these Terms of Service" in self.q(
css='.content-section'
).text[0]
|
"""
Terms of Service page
"""
from bok_choy.page_object import PageObject
from regression.pages.studio import LOGIN_BASE_URL
class TermsOfService(PageObject):
"""
Terms of Service page
"""
url = LOGIN_BASE_URL + '/edx-terms-service'
def is_browser_on_page(self):
return "Please read these Terms of Service" in self.q(
css='.field-page-body'
).text[0]
Fix target css for TOS page"""
Terms of Service page
"""
from bok_choy.page_object import PageObject
from regression.pages.studio import LOGIN_BASE_URL
class TermsOfService(PageObject):
"""
Terms of Service page
"""
url = LOGIN_BASE_URL + '/edx-terms-service'
def is_browser_on_page(self):
return "Please read these Terms of Service" in self.q(
css='.content-section'
).text[0]
|
<commit_before>"""
Terms of Service page
"""
from bok_choy.page_object import PageObject
from regression.pages.studio import LOGIN_BASE_URL
class TermsOfService(PageObject):
"""
Terms of Service page
"""
url = LOGIN_BASE_URL + '/edx-terms-service'
def is_browser_on_page(self):
return "Please read these Terms of Service" in self.q(
css='.field-page-body'
).text[0]
<commit_msg>Fix target css for TOS page<commit_after>"""
Terms of Service page
"""
from bok_choy.page_object import PageObject
from regression.pages.studio import LOGIN_BASE_URL
class TermsOfService(PageObject):
"""
Terms of Service page
"""
url = LOGIN_BASE_URL + '/edx-terms-service'
def is_browser_on_page(self):
return "Please read these Terms of Service" in self.q(
css='.content-section'
).text[0]
|
649c70527ae602512cfa6ea62b60ebc43fc69797
|
lab/run_trace.py
|
lab/run_trace.py
|
# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
"""Run a simple trace function on a file of Python code."""
import os, sys
nest = 0
def trace(frame, event, arg):
global nest
if nest is None:
# This can happen when Python is shutting down.
return None
print("%s%s %s %d @%d" % (
" " * nest,
event,
os.path.basename(frame.f_code.co_filename),
frame.f_lineno,
frame.f_lasti,
))
if event == 'call':
nest += 1
if event == 'return':
nest -= 1
return trace
the_program = sys.argv[1]
sys.settrace(trace)
execfile(the_program)
|
# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
"""Run a simple trace function on a file of Python code."""
import os, sys
nest = 0
def trace(frame, event, arg):
global nest
if nest is None:
# This can happen when Python is shutting down.
return None
print("%s%s %s %d @%d" % (
" " * nest,
event,
os.path.basename(frame.f_code.co_filename),
frame.f_lineno,
frame.f_lasti,
))
if event == 'call':
nest += 1
if event == 'return':
nest -= 1
return trace
the_program = sys.argv[1]
code = open(the_program).read()
sys.settrace(trace)
exec(code)
|
Make this useful for py3 also
|
Make this useful for py3 also
|
Python
|
apache-2.0
|
hugovk/coveragepy,hugovk/coveragepy,nedbat/coveragepy,hugovk/coveragepy,hugovk/coveragepy,nedbat/coveragepy,nedbat/coveragepy,nedbat/coveragepy,nedbat/coveragepy,hugovk/coveragepy
|
# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
"""Run a simple trace function on a file of Python code."""
import os, sys
nest = 0
def trace(frame, event, arg):
global nest
if nest is None:
# This can happen when Python is shutting down.
return None
print("%s%s %s %d @%d" % (
" " * nest,
event,
os.path.basename(frame.f_code.co_filename),
frame.f_lineno,
frame.f_lasti,
))
if event == 'call':
nest += 1
if event == 'return':
nest -= 1
return trace
the_program = sys.argv[1]
sys.settrace(trace)
execfile(the_program)
Make this useful for py3 also
|
# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
"""Run a simple trace function on a file of Python code."""
import os, sys
nest = 0
def trace(frame, event, arg):
global nest
if nest is None:
# This can happen when Python is shutting down.
return None
print("%s%s %s %d @%d" % (
" " * nest,
event,
os.path.basename(frame.f_code.co_filename),
frame.f_lineno,
frame.f_lasti,
))
if event == 'call':
nest += 1
if event == 'return':
nest -= 1
return trace
the_program = sys.argv[1]
code = open(the_program).read()
sys.settrace(trace)
exec(code)
|
<commit_before># Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
"""Run a simple trace function on a file of Python code."""
import os, sys
nest = 0
def trace(frame, event, arg):
global nest
if nest is None:
# This can happen when Python is shutting down.
return None
print("%s%s %s %d @%d" % (
" " * nest,
event,
os.path.basename(frame.f_code.co_filename),
frame.f_lineno,
frame.f_lasti,
))
if event == 'call':
nest += 1
if event == 'return':
nest -= 1
return trace
the_program = sys.argv[1]
sys.settrace(trace)
execfile(the_program)
<commit_msg>Make this useful for py3 also<commit_after>
|
# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
"""Run a simple trace function on a file of Python code."""
import os, sys
nest = 0
def trace(frame, event, arg):
global nest
if nest is None:
# This can happen when Python is shutting down.
return None
print("%s%s %s %d @%d" % (
" " * nest,
event,
os.path.basename(frame.f_code.co_filename),
frame.f_lineno,
frame.f_lasti,
))
if event == 'call':
nest += 1
if event == 'return':
nest -= 1
return trace
the_program = sys.argv[1]
code = open(the_program).read()
sys.settrace(trace)
exec(code)
|
# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
"""Run a simple trace function on a file of Python code."""
import os, sys
nest = 0
def trace(frame, event, arg):
global nest
if nest is None:
# This can happen when Python is shutting down.
return None
print("%s%s %s %d @%d" % (
" " * nest,
event,
os.path.basename(frame.f_code.co_filename),
frame.f_lineno,
frame.f_lasti,
))
if event == 'call':
nest += 1
if event == 'return':
nest -= 1
return trace
the_program = sys.argv[1]
sys.settrace(trace)
execfile(the_program)
Make this useful for py3 also# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
"""Run a simple trace function on a file of Python code."""
import os, sys
nest = 0
def trace(frame, event, arg):
global nest
if nest is None:
# This can happen when Python is shutting down.
return None
print("%s%s %s %d @%d" % (
" " * nest,
event,
os.path.basename(frame.f_code.co_filename),
frame.f_lineno,
frame.f_lasti,
))
if event == 'call':
nest += 1
if event == 'return':
nest -= 1
return trace
the_program = sys.argv[1]
code = open(the_program).read()
sys.settrace(trace)
exec(code)
|
<commit_before># Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
"""Run a simple trace function on a file of Python code."""
import os, sys
nest = 0
def trace(frame, event, arg):
global nest
if nest is None:
# This can happen when Python is shutting down.
return None
print("%s%s %s %d @%d" % (
" " * nest,
event,
os.path.basename(frame.f_code.co_filename),
frame.f_lineno,
frame.f_lasti,
))
if event == 'call':
nest += 1
if event == 'return':
nest -= 1
return trace
the_program = sys.argv[1]
sys.settrace(trace)
execfile(the_program)
<commit_msg>Make this useful for py3 also<commit_after># Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
"""Run a simple trace function on a file of Python code."""
import os, sys
nest = 0
def trace(frame, event, arg):
global nest
if nest is None:
# This can happen when Python is shutting down.
return None
print("%s%s %s %d @%d" % (
" " * nest,
event,
os.path.basename(frame.f_code.co_filename),
frame.f_lineno,
frame.f_lasti,
))
if event == 'call':
nest += 1
if event == 'return':
nest -= 1
return trace
the_program = sys.argv[1]
code = open(the_program).read()
sys.settrace(trace)
exec(code)
|
89bbc555ecf520ee34a9b1292a2bdb5c937b18e2
|
addons/hw_drivers/iot_handlers/interfaces/PrinterInterface.py
|
addons/hw_drivers/iot_handlers/interfaces/PrinterInterface.py
|
from cups import Connection as cups_connection
from re import sub
from threading import Lock
from odoo.addons.hw_drivers.controllers.driver import Interface
conn = cups_connection()
PPDs = conn.getPPDs()
cups_lock = Lock() # We can only make one call to Cups at a time
class PrinterInterface(Interface):
_loop_delay = 120
connection_type = 'printer'
def get_devices(self):
printer_devices = {}
with cups_lock:
printers = conn.getPrinters()
for printer in printers:
printers[printer]['supported'] = True # these printers are automatically supported
printers[printer]['device-make-and-model'] = printers[printer]['printer-make-and-model']
if 'usb' in printers[printer]['device-uri']:
printers[printer]['device-class'] = 'direct'
else:
printers[printer]['device-class'] = 'network'
devices = conn.getDevices()
if printers:
devices.update(printers)
for path in devices:
if 'uuid=' in path:
identifier = sub('[^a-zA-Z0-9_]', '', path.split('uuid=')[1])
elif 'serial=' in path:
identifier = sub('[^a-zA-Z0-9_]', '', path.split('serial=')[1])
else:
identifier = sub('[^a-zA-Z0-9_]', '', path)
devices[path]['identifier'] = identifier
devices[path]['url'] = path
printer_devices[identifier] = devices[path]
return printer_devices
|
from cups import Connection as cups_connection
from re import sub
from threading import Lock
from odoo.addons.hw_drivers.controllers.driver import Interface
conn = cups_connection()
PPDs = conn.getPPDs()
cups_lock = Lock() # We can only make one call to Cups at a time
class PrinterInterface(Interface):
_loop_delay = 120
connection_type = 'printer'
def get_devices(self):
printer_devices = {}
with cups_lock:
printers = conn.getPrinters()
devices = conn.getDevices()
for printer in printers:
path = printers.get(printer).get('device-uri', False)
if path and path in devices:
devices.get(path).update({'supported': True}) # these printers are automatically supported
for path in devices:
if 'uuid=' in path:
identifier = sub('[^a-zA-Z0-9_]', '', path.split('uuid=')[1])
elif 'serial=' in path:
identifier = sub('[^a-zA-Z0-9_]', '', path.split('serial=')[1])
else:
identifier = sub('[^a-zA-Z0-9_]', '', path)
devices[path]['identifier'] = identifier
devices[path]['url'] = path
printer_devices[identifier] = devices[path]
return printer_devices
|
Fix issue with printer device-id
|
[FIX] hw_drivers: Fix issue with printer device-id
When we print a ticket status with a thermal printer we need printer's device-id
But if we add manually a printer this device-id doesn't exist
So now we update de devices list with a supported = True if
printer are manually added
closes odoo/odoo#53043
Signed-off-by: Quentin Lejeune (qle) <5d1c09880ce30fdcf63039932dac0c183ef0bdc7@odoo.com>
|
Python
|
agpl-3.0
|
ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo
|
from cups import Connection as cups_connection
from re import sub
from threading import Lock
from odoo.addons.hw_drivers.controllers.driver import Interface
conn = cups_connection()
PPDs = conn.getPPDs()
cups_lock = Lock() # We can only make one call to Cups at a time
class PrinterInterface(Interface):
_loop_delay = 120
connection_type = 'printer'
def get_devices(self):
printer_devices = {}
with cups_lock:
printers = conn.getPrinters()
for printer in printers:
printers[printer]['supported'] = True # these printers are automatically supported
printers[printer]['device-make-and-model'] = printers[printer]['printer-make-and-model']
if 'usb' in printers[printer]['device-uri']:
printers[printer]['device-class'] = 'direct'
else:
printers[printer]['device-class'] = 'network'
devices = conn.getDevices()
if printers:
devices.update(printers)
for path in devices:
if 'uuid=' in path:
identifier = sub('[^a-zA-Z0-9_]', '', path.split('uuid=')[1])
elif 'serial=' in path:
identifier = sub('[^a-zA-Z0-9_]', '', path.split('serial=')[1])
else:
identifier = sub('[^a-zA-Z0-9_]', '', path)
devices[path]['identifier'] = identifier
devices[path]['url'] = path
printer_devices[identifier] = devices[path]
return printer_devices
[FIX] hw_drivers: Fix issue with printer device-id
When we print a ticket status with a thermal printer we need printer's device-id
But if we add manually a printer this device-id doesn't exist
So now we update de devices list with a supported = True if
printer are manually added
closes odoo/odoo#53043
Signed-off-by: Quentin Lejeune (qle) <5d1c09880ce30fdcf63039932dac0c183ef0bdc7@odoo.com>
|
from cups import Connection as cups_connection
from re import sub
from threading import Lock
from odoo.addons.hw_drivers.controllers.driver import Interface
conn = cups_connection()
PPDs = conn.getPPDs()
cups_lock = Lock() # We can only make one call to Cups at a time
class PrinterInterface(Interface):
_loop_delay = 120
connection_type = 'printer'
def get_devices(self):
printer_devices = {}
with cups_lock:
printers = conn.getPrinters()
devices = conn.getDevices()
for printer in printers:
path = printers.get(printer).get('device-uri', False)
if path and path in devices:
devices.get(path).update({'supported': True}) # these printers are automatically supported
for path in devices:
if 'uuid=' in path:
identifier = sub('[^a-zA-Z0-9_]', '', path.split('uuid=')[1])
elif 'serial=' in path:
identifier = sub('[^a-zA-Z0-9_]', '', path.split('serial=')[1])
else:
identifier = sub('[^a-zA-Z0-9_]', '', path)
devices[path]['identifier'] = identifier
devices[path]['url'] = path
printer_devices[identifier] = devices[path]
return printer_devices
|
<commit_before>from cups import Connection as cups_connection
from re import sub
from threading import Lock
from odoo.addons.hw_drivers.controllers.driver import Interface
conn = cups_connection()
PPDs = conn.getPPDs()
cups_lock = Lock() # We can only make one call to Cups at a time
class PrinterInterface(Interface):
_loop_delay = 120
connection_type = 'printer'
def get_devices(self):
printer_devices = {}
with cups_lock:
printers = conn.getPrinters()
for printer in printers:
printers[printer]['supported'] = True # these printers are automatically supported
printers[printer]['device-make-and-model'] = printers[printer]['printer-make-and-model']
if 'usb' in printers[printer]['device-uri']:
printers[printer]['device-class'] = 'direct'
else:
printers[printer]['device-class'] = 'network'
devices = conn.getDevices()
if printers:
devices.update(printers)
for path in devices:
if 'uuid=' in path:
identifier = sub('[^a-zA-Z0-9_]', '', path.split('uuid=')[1])
elif 'serial=' in path:
identifier = sub('[^a-zA-Z0-9_]', '', path.split('serial=')[1])
else:
identifier = sub('[^a-zA-Z0-9_]', '', path)
devices[path]['identifier'] = identifier
devices[path]['url'] = path
printer_devices[identifier] = devices[path]
return printer_devices
<commit_msg>[FIX] hw_drivers: Fix issue with printer device-id
When we print a ticket status with a thermal printer we need printer's device-id
But if we add manually a printer this device-id doesn't exist
So now we update de devices list with a supported = True if
printer are manually added
closes odoo/odoo#53043
Signed-off-by: Quentin Lejeune (qle) <5d1c09880ce30fdcf63039932dac0c183ef0bdc7@odoo.com><commit_after>
|
from cups import Connection as cups_connection
from re import sub
from threading import Lock
from odoo.addons.hw_drivers.controllers.driver import Interface
conn = cups_connection()
PPDs = conn.getPPDs()
cups_lock = Lock() # We can only make one call to Cups at a time
class PrinterInterface(Interface):
_loop_delay = 120
connection_type = 'printer'
def get_devices(self):
printer_devices = {}
with cups_lock:
printers = conn.getPrinters()
devices = conn.getDevices()
for printer in printers:
path = printers.get(printer).get('device-uri', False)
if path and path in devices:
devices.get(path).update({'supported': True}) # these printers are automatically supported
for path in devices:
if 'uuid=' in path:
identifier = sub('[^a-zA-Z0-9_]', '', path.split('uuid=')[1])
elif 'serial=' in path:
identifier = sub('[^a-zA-Z0-9_]', '', path.split('serial=')[1])
else:
identifier = sub('[^a-zA-Z0-9_]', '', path)
devices[path]['identifier'] = identifier
devices[path]['url'] = path
printer_devices[identifier] = devices[path]
return printer_devices
|
from cups import Connection as cups_connection
from re import sub
from threading import Lock
from odoo.addons.hw_drivers.controllers.driver import Interface
conn = cups_connection()
PPDs = conn.getPPDs()
cups_lock = Lock() # We can only make one call to Cups at a time
class PrinterInterface(Interface):
_loop_delay = 120
connection_type = 'printer'
def get_devices(self):
printer_devices = {}
with cups_lock:
printers = conn.getPrinters()
for printer in printers:
printers[printer]['supported'] = True # these printers are automatically supported
printers[printer]['device-make-and-model'] = printers[printer]['printer-make-and-model']
if 'usb' in printers[printer]['device-uri']:
printers[printer]['device-class'] = 'direct'
else:
printers[printer]['device-class'] = 'network'
devices = conn.getDevices()
if printers:
devices.update(printers)
for path in devices:
if 'uuid=' in path:
identifier = sub('[^a-zA-Z0-9_]', '', path.split('uuid=')[1])
elif 'serial=' in path:
identifier = sub('[^a-zA-Z0-9_]', '', path.split('serial=')[1])
else:
identifier = sub('[^a-zA-Z0-9_]', '', path)
devices[path]['identifier'] = identifier
devices[path]['url'] = path
printer_devices[identifier] = devices[path]
return printer_devices
[FIX] hw_drivers: Fix issue with printer device-id
When we print a ticket status with a thermal printer we need printer's device-id
But if we add manually a printer this device-id doesn't exist
So now we update de devices list with a supported = True if
printer are manually added
closes odoo/odoo#53043
Signed-off-by: Quentin Lejeune (qle) <5d1c09880ce30fdcf63039932dac0c183ef0bdc7@odoo.com>from cups import Connection as cups_connection
from re import sub
from threading import Lock
from odoo.addons.hw_drivers.controllers.driver import Interface
conn = cups_connection()
PPDs = conn.getPPDs()
cups_lock = Lock() # We can only make one call to Cups at a time
class PrinterInterface(Interface):
_loop_delay = 120
connection_type = 'printer'
def get_devices(self):
printer_devices = {}
with cups_lock:
printers = conn.getPrinters()
devices = conn.getDevices()
for printer in printers:
path = printers.get(printer).get('device-uri', False)
if path and path in devices:
devices.get(path).update({'supported': True}) # these printers are automatically supported
for path in devices:
if 'uuid=' in path:
identifier = sub('[^a-zA-Z0-9_]', '', path.split('uuid=')[1])
elif 'serial=' in path:
identifier = sub('[^a-zA-Z0-9_]', '', path.split('serial=')[1])
else:
identifier = sub('[^a-zA-Z0-9_]', '', path)
devices[path]['identifier'] = identifier
devices[path]['url'] = path
printer_devices[identifier] = devices[path]
return printer_devices
|
<commit_before>from cups import Connection as cups_connection
from re import sub
from threading import Lock
from odoo.addons.hw_drivers.controllers.driver import Interface
conn = cups_connection()
PPDs = conn.getPPDs()
cups_lock = Lock() # We can only make one call to Cups at a time
class PrinterInterface(Interface):
_loop_delay = 120
connection_type = 'printer'
def get_devices(self):
printer_devices = {}
with cups_lock:
printers = conn.getPrinters()
for printer in printers:
printers[printer]['supported'] = True # these printers are automatically supported
printers[printer]['device-make-and-model'] = printers[printer]['printer-make-and-model']
if 'usb' in printers[printer]['device-uri']:
printers[printer]['device-class'] = 'direct'
else:
printers[printer]['device-class'] = 'network'
devices = conn.getDevices()
if printers:
devices.update(printers)
for path in devices:
if 'uuid=' in path:
identifier = sub('[^a-zA-Z0-9_]', '', path.split('uuid=')[1])
elif 'serial=' in path:
identifier = sub('[^a-zA-Z0-9_]', '', path.split('serial=')[1])
else:
identifier = sub('[^a-zA-Z0-9_]', '', path)
devices[path]['identifier'] = identifier
devices[path]['url'] = path
printer_devices[identifier] = devices[path]
return printer_devices
<commit_msg>[FIX] hw_drivers: Fix issue with printer device-id
When we print a ticket status with a thermal printer we need printer's device-id
But if we add manually a printer this device-id doesn't exist
So now we update de devices list with a supported = True if
printer are manually added
closes odoo/odoo#53043
Signed-off-by: Quentin Lejeune (qle) <5d1c09880ce30fdcf63039932dac0c183ef0bdc7@odoo.com><commit_after>from cups import Connection as cups_connection
from re import sub
from threading import Lock
from odoo.addons.hw_drivers.controllers.driver import Interface
conn = cups_connection()
PPDs = conn.getPPDs()
cups_lock = Lock() # We can only make one call to Cups at a time
class PrinterInterface(Interface):
_loop_delay = 120
connection_type = 'printer'
def get_devices(self):
printer_devices = {}
with cups_lock:
printers = conn.getPrinters()
devices = conn.getDevices()
for printer in printers:
path = printers.get(printer).get('device-uri', False)
if path and path in devices:
devices.get(path).update({'supported': True}) # these printers are automatically supported
for path in devices:
if 'uuid=' in path:
identifier = sub('[^a-zA-Z0-9_]', '', path.split('uuid=')[1])
elif 'serial=' in path:
identifier = sub('[^a-zA-Z0-9_]', '', path.split('serial=')[1])
else:
identifier = sub('[^a-zA-Z0-9_]', '', path)
devices[path]['identifier'] = identifier
devices[path]['url'] = path
printer_devices[identifier] = devices[path]
return printer_devices
|
460ed562a64b7aacbd690a2e62f39b11bfcb092f
|
src/MCPClient/lib/clientScripts/examineContents.py
|
src/MCPClient/lib/clientScripts/examineContents.py
|
#!/usr/bin/env python2
import os
import subprocess
import sys
def main(target, output):
args = [
'bulk_extractor', target, '-o', output,
'-M', '250', '-q', '-1'
]
try:
os.makedirs(output)
subprocess.call(args)
return 0
except Exception as e:
return e
if __name__ == '__main__':
target = sys.argv[1]
sipdir = sys.argv[2]
file_uuid = sys.argv[3]
output = os.path.join(sipdir, 'logs', 'bulk-' + file_uuid)
sys.exit(main(target, output))
|
#!/usr/bin/env python2
import os
import subprocess
import sys
def main(target, output):
args = [
'bulk_extractor', target, '-o', output,
'-M', '250', '-q', '-1'
]
try:
os.makedirs(output)
subprocess.call(args)
# remove empty BulkExtractor logs
for filename in os.listdir(output):
filepath = os.path.join(output,filename)
if os.path.getsize(filepath) == 0:
os.remove(filepath)
return 0
except Exception as e:
return e
if __name__ == '__main__':
target = sys.argv[1]
sipdir = sys.argv[2]
file_uuid = sys.argv[3]
output = os.path.join(sipdir, 'logs', 'bulk-' + file_uuid)
sys.exit(main(target, output))
|
Remove empty bulk extractor logs
|
Remove empty bulk extractor logs
Squashed commit of the following:
commit c923667809bb5d828144b09d03bd53554229a9bd
Author: Aaron Elkiss <aelkiss@umich.edu>
Date: Thu Dec 8 09:34:47 2016 -0500
fix spacing & variable name
commit df597f69e19c3a3b4210c1131a79550eb147e412
Author: Aaron Daniel Elkiss <aelkiss@umich.edu>
Date: Mon Oct 31 13:57:32 2016 +0000
remove empty bulkextractor logs
|
Python
|
agpl-3.0
|
artefactual/archivematica,artefactual/archivematica,artefactual/archivematica,artefactual/archivematica
|
#!/usr/bin/env python2
import os
import subprocess
import sys
def main(target, output):
args = [
'bulk_extractor', target, '-o', output,
'-M', '250', '-q', '-1'
]
try:
os.makedirs(output)
subprocess.call(args)
return 0
except Exception as e:
return e
if __name__ == '__main__':
target = sys.argv[1]
sipdir = sys.argv[2]
file_uuid = sys.argv[3]
output = os.path.join(sipdir, 'logs', 'bulk-' + file_uuid)
sys.exit(main(target, output))
Remove empty bulk extractor logs
Squashed commit of the following:
commit c923667809bb5d828144b09d03bd53554229a9bd
Author: Aaron Elkiss <aelkiss@umich.edu>
Date: Thu Dec 8 09:34:47 2016 -0500
fix spacing & variable name
commit df597f69e19c3a3b4210c1131a79550eb147e412
Author: Aaron Daniel Elkiss <aelkiss@umich.edu>
Date: Mon Oct 31 13:57:32 2016 +0000
remove empty bulkextractor logs
|
#!/usr/bin/env python2
import os
import subprocess
import sys
def main(target, output):
args = [
'bulk_extractor', target, '-o', output,
'-M', '250', '-q', '-1'
]
try:
os.makedirs(output)
subprocess.call(args)
# remove empty BulkExtractor logs
for filename in os.listdir(output):
filepath = os.path.join(output,filename)
if os.path.getsize(filepath) == 0:
os.remove(filepath)
return 0
except Exception as e:
return e
if __name__ == '__main__':
target = sys.argv[1]
sipdir = sys.argv[2]
file_uuid = sys.argv[3]
output = os.path.join(sipdir, 'logs', 'bulk-' + file_uuid)
sys.exit(main(target, output))
|
<commit_before>#!/usr/bin/env python2
import os
import subprocess
import sys
def main(target, output):
args = [
'bulk_extractor', target, '-o', output,
'-M', '250', '-q', '-1'
]
try:
os.makedirs(output)
subprocess.call(args)
return 0
except Exception as e:
return e
if __name__ == '__main__':
target = sys.argv[1]
sipdir = sys.argv[2]
file_uuid = sys.argv[3]
output = os.path.join(sipdir, 'logs', 'bulk-' + file_uuid)
sys.exit(main(target, output))
<commit_msg>Remove empty bulk extractor logs
Squashed commit of the following:
commit c923667809bb5d828144b09d03bd53554229a9bd
Author: Aaron Elkiss <aelkiss@umich.edu>
Date: Thu Dec 8 09:34:47 2016 -0500
fix spacing & variable name
commit df597f69e19c3a3b4210c1131a79550eb147e412
Author: Aaron Daniel Elkiss <aelkiss@umich.edu>
Date: Mon Oct 31 13:57:32 2016 +0000
remove empty bulkextractor logs<commit_after>
|
#!/usr/bin/env python2
import os
import subprocess
import sys
def main(target, output):
args = [
'bulk_extractor', target, '-o', output,
'-M', '250', '-q', '-1'
]
try:
os.makedirs(output)
subprocess.call(args)
# remove empty BulkExtractor logs
for filename in os.listdir(output):
filepath = os.path.join(output,filename)
if os.path.getsize(filepath) == 0:
os.remove(filepath)
return 0
except Exception as e:
return e
if __name__ == '__main__':
target = sys.argv[1]
sipdir = sys.argv[2]
file_uuid = sys.argv[3]
output = os.path.join(sipdir, 'logs', 'bulk-' + file_uuid)
sys.exit(main(target, output))
|
#!/usr/bin/env python2
import os
import subprocess
import sys
def main(target, output):
args = [
'bulk_extractor', target, '-o', output,
'-M', '250', '-q', '-1'
]
try:
os.makedirs(output)
subprocess.call(args)
return 0
except Exception as e:
return e
if __name__ == '__main__':
target = sys.argv[1]
sipdir = sys.argv[2]
file_uuid = sys.argv[3]
output = os.path.join(sipdir, 'logs', 'bulk-' + file_uuid)
sys.exit(main(target, output))
Remove empty bulk extractor logs
Squashed commit of the following:
commit c923667809bb5d828144b09d03bd53554229a9bd
Author: Aaron Elkiss <aelkiss@umich.edu>
Date: Thu Dec 8 09:34:47 2016 -0500
fix spacing & variable name
commit df597f69e19c3a3b4210c1131a79550eb147e412
Author: Aaron Daniel Elkiss <aelkiss@umich.edu>
Date: Mon Oct 31 13:57:32 2016 +0000
remove empty bulkextractor logs#!/usr/bin/env python2
import os
import subprocess
import sys
def main(target, output):
args = [
'bulk_extractor', target, '-o', output,
'-M', '250', '-q', '-1'
]
try:
os.makedirs(output)
subprocess.call(args)
# remove empty BulkExtractor logs
for filename in os.listdir(output):
filepath = os.path.join(output,filename)
if os.path.getsize(filepath) == 0:
os.remove(filepath)
return 0
except Exception as e:
return e
if __name__ == '__main__':
target = sys.argv[1]
sipdir = sys.argv[2]
file_uuid = sys.argv[3]
output = os.path.join(sipdir, 'logs', 'bulk-' + file_uuid)
sys.exit(main(target, output))
|
<commit_before>#!/usr/bin/env python2
import os
import subprocess
import sys
def main(target, output):
args = [
'bulk_extractor', target, '-o', output,
'-M', '250', '-q', '-1'
]
try:
os.makedirs(output)
subprocess.call(args)
return 0
except Exception as e:
return e
if __name__ == '__main__':
target = sys.argv[1]
sipdir = sys.argv[2]
file_uuid = sys.argv[3]
output = os.path.join(sipdir, 'logs', 'bulk-' + file_uuid)
sys.exit(main(target, output))
<commit_msg>Remove empty bulk extractor logs
Squashed commit of the following:
commit c923667809bb5d828144b09d03bd53554229a9bd
Author: Aaron Elkiss <aelkiss@umich.edu>
Date: Thu Dec 8 09:34:47 2016 -0500
fix spacing & variable name
commit df597f69e19c3a3b4210c1131a79550eb147e412
Author: Aaron Daniel Elkiss <aelkiss@umich.edu>
Date: Mon Oct 31 13:57:32 2016 +0000
remove empty bulkextractor logs<commit_after>#!/usr/bin/env python2
import os
import subprocess
import sys
def main(target, output):
args = [
'bulk_extractor', target, '-o', output,
'-M', '250', '-q', '-1'
]
try:
os.makedirs(output)
subprocess.call(args)
# remove empty BulkExtractor logs
for filename in os.listdir(output):
filepath = os.path.join(output,filename)
if os.path.getsize(filepath) == 0:
os.remove(filepath)
return 0
except Exception as e:
return e
if __name__ == '__main__':
target = sys.argv[1]
sipdir = sys.argv[2]
file_uuid = sys.argv[3]
output = os.path.join(sipdir, 'logs', 'bulk-' + file_uuid)
sys.exit(main(target, output))
|
5a15ca8b790dda7b2ea11af5d1c179f9e7d9f2ac
|
pages/search_indexes.py
|
pages/search_indexes.py
|
"""Django haystack `SearchIndex` module."""
from pages.models import Page
from django.conf import settings
from haystack.indexes import SearchIndex, CharField, DateTimeField, RealTimeSearchIndex
from haystack import site
class PageIndex(SearchIndex):
"""Search index for pages content."""
text = CharField(document=True, use_template=True)
title = CharField(model_attr='title')
url = CharField(model_attr='get_absolute_url')
publication_date = DateTimeField(model_attr='publication_date')
def get_queryset(self):
"""Used when the entire index for model is updated."""
return Page.objects.published()
class RealTimePageIndex(RealTimeSearchIndex):
"""Search index for pages content."""
text = CharField(document=True, use_template=True)
title = CharField(model_attr='title')
url = CharField(model_attr='get_absolute_url')
publication_date = DateTimeField(model_attr='publication_date')
def get_queryset(self):
"""Used when the entire index for model is updated."""
return Page.objects.published()
if settings.PAGE_REAL_TIME_SEARCH:
site.register(Page, RealTimePageIndex)
else:
site.register(Page, PageIndex)
|
"""Django haystack `SearchIndex` module."""
from pages.models import Page
from gerbi import settings
from haystack.indexes import SearchIndex, CharField, DateTimeField, RealTimeSearchIndex
from haystack import site
class PageIndex(SearchIndex):
"""Search index for pages content."""
text = CharField(document=True, use_template=True)
title = CharField(model_attr='title')
url = CharField(model_attr='get_absolute_url')
publication_date = DateTimeField(model_attr='publication_date')
def get_queryset(self):
"""Used when the entire index for model is updated."""
return Page.objects.published()
class RealTimePageIndex(RealTimeSearchIndex):
"""Search index for pages content."""
text = CharField(document=True, use_template=True)
title = CharField(model_attr='title')
url = CharField(model_attr='get_absolute_url')
publication_date = DateTimeField(model_attr='publication_date')
def get_queryset(self):
"""Used when the entire index for model is updated."""
return Page.objects.published()
if settings.PAGE_REAL_TIME_SEARCH:
site.register(Page, RealTimePageIndex)
else:
site.register(Page, PageIndex)
|
Use gerbi setting not global settings
|
Use gerbi setting not global settings
|
Python
|
bsd-3-clause
|
pombredanne/django-page-cms-1,akaihola/django-page-cms,remik/django-page-cms,akaihola/django-page-cms,batiste/django-page-cms,remik/django-page-cms,remik/django-page-cms,pombredanne/django-page-cms-1,batiste/django-page-cms,batiste/django-page-cms,pombredanne/django-page-cms-1,remik/django-page-cms,akaihola/django-page-cms
|
"""Django haystack `SearchIndex` module."""
from pages.models import Page
from django.conf import settings
from haystack.indexes import SearchIndex, CharField, DateTimeField, RealTimeSearchIndex
from haystack import site
class PageIndex(SearchIndex):
"""Search index for pages content."""
text = CharField(document=True, use_template=True)
title = CharField(model_attr='title')
url = CharField(model_attr='get_absolute_url')
publication_date = DateTimeField(model_attr='publication_date')
def get_queryset(self):
"""Used when the entire index for model is updated."""
return Page.objects.published()
class RealTimePageIndex(RealTimeSearchIndex):
"""Search index for pages content."""
text = CharField(document=True, use_template=True)
title = CharField(model_attr='title')
url = CharField(model_attr='get_absolute_url')
publication_date = DateTimeField(model_attr='publication_date')
def get_queryset(self):
"""Used when the entire index for model is updated."""
return Page.objects.published()
if settings.PAGE_REAL_TIME_SEARCH:
site.register(Page, RealTimePageIndex)
else:
site.register(Page, PageIndex)
Use gerbi setting not global settings
|
"""Django haystack `SearchIndex` module."""
from pages.models import Page
from gerbi import settings
from haystack.indexes import SearchIndex, CharField, DateTimeField, RealTimeSearchIndex
from haystack import site
class PageIndex(SearchIndex):
"""Search index for pages content."""
text = CharField(document=True, use_template=True)
title = CharField(model_attr='title')
url = CharField(model_attr='get_absolute_url')
publication_date = DateTimeField(model_attr='publication_date')
def get_queryset(self):
"""Used when the entire index for model is updated."""
return Page.objects.published()
class RealTimePageIndex(RealTimeSearchIndex):
"""Search index for pages content."""
text = CharField(document=True, use_template=True)
title = CharField(model_attr='title')
url = CharField(model_attr='get_absolute_url')
publication_date = DateTimeField(model_attr='publication_date')
def get_queryset(self):
"""Used when the entire index for model is updated."""
return Page.objects.published()
if settings.PAGE_REAL_TIME_SEARCH:
site.register(Page, RealTimePageIndex)
else:
site.register(Page, PageIndex)
|
<commit_before>"""Django haystack `SearchIndex` module."""
from pages.models import Page
from django.conf import settings
from haystack.indexes import SearchIndex, CharField, DateTimeField, RealTimeSearchIndex
from haystack import site
class PageIndex(SearchIndex):
"""Search index for pages content."""
text = CharField(document=True, use_template=True)
title = CharField(model_attr='title')
url = CharField(model_attr='get_absolute_url')
publication_date = DateTimeField(model_attr='publication_date')
def get_queryset(self):
"""Used when the entire index for model is updated."""
return Page.objects.published()
class RealTimePageIndex(RealTimeSearchIndex):
"""Search index for pages content."""
text = CharField(document=True, use_template=True)
title = CharField(model_attr='title')
url = CharField(model_attr='get_absolute_url')
publication_date = DateTimeField(model_attr='publication_date')
def get_queryset(self):
"""Used when the entire index for model is updated."""
return Page.objects.published()
if settings.PAGE_REAL_TIME_SEARCH:
site.register(Page, RealTimePageIndex)
else:
site.register(Page, PageIndex)
<commit_msg>Use gerbi setting not global settings<commit_after>
|
"""Django haystack `SearchIndex` module."""
from pages.models import Page
from gerbi import settings
from haystack.indexes import SearchIndex, CharField, DateTimeField, RealTimeSearchIndex
from haystack import site
class PageIndex(SearchIndex):
"""Search index for pages content."""
text = CharField(document=True, use_template=True)
title = CharField(model_attr='title')
url = CharField(model_attr='get_absolute_url')
publication_date = DateTimeField(model_attr='publication_date')
def get_queryset(self):
"""Used when the entire index for model is updated."""
return Page.objects.published()
class RealTimePageIndex(RealTimeSearchIndex):
"""Search index for pages content."""
text = CharField(document=True, use_template=True)
title = CharField(model_attr='title')
url = CharField(model_attr='get_absolute_url')
publication_date = DateTimeField(model_attr='publication_date')
def get_queryset(self):
"""Used when the entire index for model is updated."""
return Page.objects.published()
if settings.PAGE_REAL_TIME_SEARCH:
site.register(Page, RealTimePageIndex)
else:
site.register(Page, PageIndex)
|
"""Django haystack `SearchIndex` module."""
from pages.models import Page
from django.conf import settings
from haystack.indexes import SearchIndex, CharField, DateTimeField, RealTimeSearchIndex
from haystack import site
class PageIndex(SearchIndex):
"""Search index for pages content."""
text = CharField(document=True, use_template=True)
title = CharField(model_attr='title')
url = CharField(model_attr='get_absolute_url')
publication_date = DateTimeField(model_attr='publication_date')
def get_queryset(self):
"""Used when the entire index for model is updated."""
return Page.objects.published()
class RealTimePageIndex(RealTimeSearchIndex):
"""Search index for pages content."""
text = CharField(document=True, use_template=True)
title = CharField(model_attr='title')
url = CharField(model_attr='get_absolute_url')
publication_date = DateTimeField(model_attr='publication_date')
def get_queryset(self):
"""Used when the entire index for model is updated."""
return Page.objects.published()
if settings.PAGE_REAL_TIME_SEARCH:
site.register(Page, RealTimePageIndex)
else:
site.register(Page, PageIndex)
Use gerbi setting not global settings"""Django haystack `SearchIndex` module."""
from pages.models import Page
from gerbi import settings
from haystack.indexes import SearchIndex, CharField, DateTimeField, RealTimeSearchIndex
from haystack import site
class PageIndex(SearchIndex):
"""Search index for pages content."""
text = CharField(document=True, use_template=True)
title = CharField(model_attr='title')
url = CharField(model_attr='get_absolute_url')
publication_date = DateTimeField(model_attr='publication_date')
def get_queryset(self):
"""Used when the entire index for model is updated."""
return Page.objects.published()
class RealTimePageIndex(RealTimeSearchIndex):
"""Search index for pages content."""
text = CharField(document=True, use_template=True)
title = CharField(model_attr='title')
url = CharField(model_attr='get_absolute_url')
publication_date = DateTimeField(model_attr='publication_date')
def get_queryset(self):
"""Used when the entire index for model is updated."""
return Page.objects.published()
if settings.PAGE_REAL_TIME_SEARCH:
site.register(Page, RealTimePageIndex)
else:
site.register(Page, PageIndex)
|
<commit_before>"""Django haystack `SearchIndex` module."""
from pages.models import Page
from django.conf import settings
from haystack.indexes import SearchIndex, CharField, DateTimeField, RealTimeSearchIndex
from haystack import site
class PageIndex(SearchIndex):
"""Search index for pages content."""
text = CharField(document=True, use_template=True)
title = CharField(model_attr='title')
url = CharField(model_attr='get_absolute_url')
publication_date = DateTimeField(model_attr='publication_date')
def get_queryset(self):
"""Used when the entire index for model is updated."""
return Page.objects.published()
class RealTimePageIndex(RealTimeSearchIndex):
"""Search index for pages content."""
text = CharField(document=True, use_template=True)
title = CharField(model_attr='title')
url = CharField(model_attr='get_absolute_url')
publication_date = DateTimeField(model_attr='publication_date')
def get_queryset(self):
"""Used when the entire index for model is updated."""
return Page.objects.published()
if settings.PAGE_REAL_TIME_SEARCH:
site.register(Page, RealTimePageIndex)
else:
site.register(Page, PageIndex)
<commit_msg>Use gerbi setting not global settings<commit_after>"""Django haystack `SearchIndex` module."""
from pages.models import Page
from gerbi import settings
from haystack.indexes import SearchIndex, CharField, DateTimeField, RealTimeSearchIndex
from haystack import site
class PageIndex(SearchIndex):
"""Search index for pages content."""
text = CharField(document=True, use_template=True)
title = CharField(model_attr='title')
url = CharField(model_attr='get_absolute_url')
publication_date = DateTimeField(model_attr='publication_date')
def get_queryset(self):
"""Used when the entire index for model is updated."""
return Page.objects.published()
class RealTimePageIndex(RealTimeSearchIndex):
"""Search index for pages content."""
text = CharField(document=True, use_template=True)
title = CharField(model_attr='title')
url = CharField(model_attr='get_absolute_url')
publication_date = DateTimeField(model_attr='publication_date')
def get_queryset(self):
"""Used when the entire index for model is updated."""
return Page.objects.published()
if settings.PAGE_REAL_TIME_SEARCH:
site.register(Page, RealTimePageIndex)
else:
site.register(Page, PageIndex)
|
6c4c3ac1dde0519d08ab461ab60ccc1d8b9d3d38
|
CodeFights/createDie.py
|
CodeFights/createDie.py
|
#!/usr/local/bin/python
# Code Fights Create Die Problem
import random
def createDie(seed, n):
class Die(object):
pass
class Game(object):
die = Die(seed, n)
return Game.die
def main():
tests = [
[37237, 5, 3],
[36706, 12, 9],
[21498, 10, 10],
[2998, 6, 3],
[5509, 10, 4]
]
for t in tests:
res = createDie(t[0], t[1])
ans = t[2]
if ans == res:
print("PASSED: createDie({}, {}) returned {}"
.format(t[0], t[1], res))
else:
print("FAILED: createDie({}, {}) returned {}, answer: {}"
.format(t[0], t[1], res, ans))
if __name__ == '__main__':
main()
|
#!/usr/local/bin/python
# Code Fights Create Die Problem
import random
def createDie(seed, n):
class Die(object):
def __new__(self, seed, n):
random.seed(seed)
return int(random.random() * n) + 1
class Game(object):
die = Die(seed, n)
return Game.die
def main():
tests = [
[37237, 5, 3],
[36706, 12, 9],
[21498, 10, 10],
[2998, 6, 3],
[5509, 10, 4]
]
for t in tests:
res = createDie(t[0], t[1])
ans = t[2]
if ans == res:
print("PASSED: createDie({}, {}) returned {}"
.format(t[0], t[1], res))
else:
print("FAILED: createDie({}, {}) returned {}, answer: {}"
.format(t[0], t[1], res, ans))
if __name__ == '__main__':
main()
|
Solve Code Fights create die problem
|
Solve Code Fights create die problem
|
Python
|
mit
|
HKuz/Test_Code
|
#!/usr/local/bin/python
# Code Fights Create Die Problem
import random
def createDie(seed, n):
class Die(object):
pass
class Game(object):
die = Die(seed, n)
return Game.die
def main():
tests = [
[37237, 5, 3],
[36706, 12, 9],
[21498, 10, 10],
[2998, 6, 3],
[5509, 10, 4]
]
for t in tests:
res = createDie(t[0], t[1])
ans = t[2]
if ans == res:
print("PASSED: createDie({}, {}) returned {}"
.format(t[0], t[1], res))
else:
print("FAILED: createDie({}, {}) returned {}, answer: {}"
.format(t[0], t[1], res, ans))
if __name__ == '__main__':
main()
Solve Code Fights create die problem
|
#!/usr/local/bin/python
# Code Fights Create Die Problem
import random
def createDie(seed, n):
class Die(object):
def __new__(self, seed, n):
random.seed(seed)
return int(random.random() * n) + 1
class Game(object):
die = Die(seed, n)
return Game.die
def main():
tests = [
[37237, 5, 3],
[36706, 12, 9],
[21498, 10, 10],
[2998, 6, 3],
[5509, 10, 4]
]
for t in tests:
res = createDie(t[0], t[1])
ans = t[2]
if ans == res:
print("PASSED: createDie({}, {}) returned {}"
.format(t[0], t[1], res))
else:
print("FAILED: createDie({}, {}) returned {}, answer: {}"
.format(t[0], t[1], res, ans))
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/local/bin/python
# Code Fights Create Die Problem
import random
def createDie(seed, n):
class Die(object):
pass
class Game(object):
die = Die(seed, n)
return Game.die
def main():
tests = [
[37237, 5, 3],
[36706, 12, 9],
[21498, 10, 10],
[2998, 6, 3],
[5509, 10, 4]
]
for t in tests:
res = createDie(t[0], t[1])
ans = t[2]
if ans == res:
print("PASSED: createDie({}, {}) returned {}"
.format(t[0], t[1], res))
else:
print("FAILED: createDie({}, {}) returned {}, answer: {}"
.format(t[0], t[1], res, ans))
if __name__ == '__main__':
main()
<commit_msg>Solve Code Fights create die problem<commit_after>
|
#!/usr/local/bin/python
# Code Fights Create Die Problem
import random
def createDie(seed, n):
class Die(object):
def __new__(self, seed, n):
random.seed(seed)
return int(random.random() * n) + 1
class Game(object):
die = Die(seed, n)
return Game.die
def main():
tests = [
[37237, 5, 3],
[36706, 12, 9],
[21498, 10, 10],
[2998, 6, 3],
[5509, 10, 4]
]
for t in tests:
res = createDie(t[0], t[1])
ans = t[2]
if ans == res:
print("PASSED: createDie({}, {}) returned {}"
.format(t[0], t[1], res))
else:
print("FAILED: createDie({}, {}) returned {}, answer: {}"
.format(t[0], t[1], res, ans))
if __name__ == '__main__':
main()
|
#!/usr/local/bin/python
# Code Fights Create Die Problem
import random
def createDie(seed, n):
class Die(object):
pass
class Game(object):
die = Die(seed, n)
return Game.die
def main():
tests = [
[37237, 5, 3],
[36706, 12, 9],
[21498, 10, 10],
[2998, 6, 3],
[5509, 10, 4]
]
for t in tests:
res = createDie(t[0], t[1])
ans = t[2]
if ans == res:
print("PASSED: createDie({}, {}) returned {}"
.format(t[0], t[1], res))
else:
print("FAILED: createDie({}, {}) returned {}, answer: {}"
.format(t[0], t[1], res, ans))
if __name__ == '__main__':
main()
Solve Code Fights create die problem#!/usr/local/bin/python
# Code Fights Create Die Problem
import random
def createDie(seed, n):
class Die(object):
def __new__(self, seed, n):
random.seed(seed)
return int(random.random() * n) + 1
class Game(object):
die = Die(seed, n)
return Game.die
def main():
tests = [
[37237, 5, 3],
[36706, 12, 9],
[21498, 10, 10],
[2998, 6, 3],
[5509, 10, 4]
]
for t in tests:
res = createDie(t[0], t[1])
ans = t[2]
if ans == res:
print("PASSED: createDie({}, {}) returned {}"
.format(t[0], t[1], res))
else:
print("FAILED: createDie({}, {}) returned {}, answer: {}"
.format(t[0], t[1], res, ans))
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/local/bin/python
# Code Fights Create Die Problem
import random
def createDie(seed, n):
class Die(object):
pass
class Game(object):
die = Die(seed, n)
return Game.die
def main():
tests = [
[37237, 5, 3],
[36706, 12, 9],
[21498, 10, 10],
[2998, 6, 3],
[5509, 10, 4]
]
for t in tests:
res = createDie(t[0], t[1])
ans = t[2]
if ans == res:
print("PASSED: createDie({}, {}) returned {}"
.format(t[0], t[1], res))
else:
print("FAILED: createDie({}, {}) returned {}, answer: {}"
.format(t[0], t[1], res, ans))
if __name__ == '__main__':
main()
<commit_msg>Solve Code Fights create die problem<commit_after>#!/usr/local/bin/python
# Code Fights Create Die Problem
import random
def createDie(seed, n):
class Die(object):
def __new__(self, seed, n):
random.seed(seed)
return int(random.random() * n) + 1
class Game(object):
die = Die(seed, n)
return Game.die
def main():
tests = [
[37237, 5, 3],
[36706, 12, 9],
[21498, 10, 10],
[2998, 6, 3],
[5509, 10, 4]
]
for t in tests:
res = createDie(t[0], t[1])
ans = t[2]
if ans == res:
print("PASSED: createDie({}, {}) returned {}"
.format(t[0], t[1], res))
else:
print("FAILED: createDie({}, {}) returned {}, answer: {}"
.format(t[0], t[1], res, ans))
if __name__ == '__main__':
main()
|
b57a599640c6fa8bf23f081c914b7437e3f04dcd
|
course_discovery/apps/courses/management/commands/refresh_all_courses.py
|
course_discovery/apps/courses/management/commands/refresh_all_courses.py
|
import logging
from optparse import make_option
from django.core.management import BaseCommand, CommandError
from course_discovery.apps.courses.models import Course
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = 'Refresh course data from external sources.'
option_list = BaseCommand.option_list + (
make_option('--access_token',
action='store',
dest='access_token',
default=None,
help='OAuth2 access token used to authenticate API calls.'),
)
def handle(self, *args, **options):
access_token = options.get('access_token')
if not access_token:
msg = 'Courses cannot be migrated if no access token is supplied.'
logger.error(msg)
raise CommandError(msg)
Course.refresh_all(access_token=access_token)
|
import logging
from django.core.management import BaseCommand, CommandError
from course_discovery.apps.courses.models import Course
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = 'Refresh course data from external sources.'
def add_arguments(self, parser):
parser.add_argument(
'--access_token',
action='store',
dest='access_token',
default=None,
help='OAuth2 access token used to authenticate API calls.'
)
def handle(self, *args, **options):
access_token = options.get('access_token')
if not access_token:
msg = 'Courses cannot be migrated if no access token is supplied.'
logger.error(msg)
raise CommandError(msg)
Course.refresh_all(access_token=access_token)
|
Switch to argparse for management command argument parsing
|
Switch to argparse for management command argument parsing
|
Python
|
agpl-3.0
|
edx/course-discovery,edx/course-discovery,edx/course-discovery,edx/course-discovery
|
import logging
from optparse import make_option
from django.core.management import BaseCommand, CommandError
from course_discovery.apps.courses.models import Course
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = 'Refresh course data from external sources.'
option_list = BaseCommand.option_list + (
make_option('--access_token',
action='store',
dest='access_token',
default=None,
help='OAuth2 access token used to authenticate API calls.'),
)
def handle(self, *args, **options):
access_token = options.get('access_token')
if not access_token:
msg = 'Courses cannot be migrated if no access token is supplied.'
logger.error(msg)
raise CommandError(msg)
Course.refresh_all(access_token=access_token)
Switch to argparse for management command argument parsing
|
import logging
from django.core.management import BaseCommand, CommandError
from course_discovery.apps.courses.models import Course
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = 'Refresh course data from external sources.'
def add_arguments(self, parser):
parser.add_argument(
'--access_token',
action='store',
dest='access_token',
default=None,
help='OAuth2 access token used to authenticate API calls.'
)
def handle(self, *args, **options):
access_token = options.get('access_token')
if not access_token:
msg = 'Courses cannot be migrated if no access token is supplied.'
logger.error(msg)
raise CommandError(msg)
Course.refresh_all(access_token=access_token)
|
<commit_before>import logging
from optparse import make_option
from django.core.management import BaseCommand, CommandError
from course_discovery.apps.courses.models import Course
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = 'Refresh course data from external sources.'
option_list = BaseCommand.option_list + (
make_option('--access_token',
action='store',
dest='access_token',
default=None,
help='OAuth2 access token used to authenticate API calls.'),
)
def handle(self, *args, **options):
access_token = options.get('access_token')
if not access_token:
msg = 'Courses cannot be migrated if no access token is supplied.'
logger.error(msg)
raise CommandError(msg)
Course.refresh_all(access_token=access_token)
<commit_msg>Switch to argparse for management command argument parsing<commit_after>
|
import logging
from django.core.management import BaseCommand, CommandError
from course_discovery.apps.courses.models import Course
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = 'Refresh course data from external sources.'
def add_arguments(self, parser):
parser.add_argument(
'--access_token',
action='store',
dest='access_token',
default=None,
help='OAuth2 access token used to authenticate API calls.'
)
def handle(self, *args, **options):
access_token = options.get('access_token')
if not access_token:
msg = 'Courses cannot be migrated if no access token is supplied.'
logger.error(msg)
raise CommandError(msg)
Course.refresh_all(access_token=access_token)
|
import logging
from optparse import make_option
from django.core.management import BaseCommand, CommandError
from course_discovery.apps.courses.models import Course
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = 'Refresh course data from external sources.'
option_list = BaseCommand.option_list + (
make_option('--access_token',
action='store',
dest='access_token',
default=None,
help='OAuth2 access token used to authenticate API calls.'),
)
def handle(self, *args, **options):
access_token = options.get('access_token')
if not access_token:
msg = 'Courses cannot be migrated if no access token is supplied.'
logger.error(msg)
raise CommandError(msg)
Course.refresh_all(access_token=access_token)
Switch to argparse for management command argument parsingimport logging
from django.core.management import BaseCommand, CommandError
from course_discovery.apps.courses.models import Course
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = 'Refresh course data from external sources.'
def add_arguments(self, parser):
parser.add_argument(
'--access_token',
action='store',
dest='access_token',
default=None,
help='OAuth2 access token used to authenticate API calls.'
)
def handle(self, *args, **options):
access_token = options.get('access_token')
if not access_token:
msg = 'Courses cannot be migrated if no access token is supplied.'
logger.error(msg)
raise CommandError(msg)
Course.refresh_all(access_token=access_token)
|
<commit_before>import logging
from optparse import make_option
from django.core.management import BaseCommand, CommandError
from course_discovery.apps.courses.models import Course
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = 'Refresh course data from external sources.'
option_list = BaseCommand.option_list + (
make_option('--access_token',
action='store',
dest='access_token',
default=None,
help='OAuth2 access token used to authenticate API calls.'),
)
def handle(self, *args, **options):
access_token = options.get('access_token')
if not access_token:
msg = 'Courses cannot be migrated if no access token is supplied.'
logger.error(msg)
raise CommandError(msg)
Course.refresh_all(access_token=access_token)
<commit_msg>Switch to argparse for management command argument parsing<commit_after>import logging
from django.core.management import BaseCommand, CommandError
from course_discovery.apps.courses.models import Course
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = 'Refresh course data from external sources.'
def add_arguments(self, parser):
parser.add_argument(
'--access_token',
action='store',
dest='access_token',
default=None,
help='OAuth2 access token used to authenticate API calls.'
)
def handle(self, *args, **options):
access_token = options.get('access_token')
if not access_token:
msg = 'Courses cannot be migrated if no access token is supplied.'
logger.error(msg)
raise CommandError(msg)
Course.refresh_all(access_token=access_token)
|
e321b47a5ee2252ce71fabb992e50e5f455a217f
|
blaze/tests/test_blfuncs.py
|
blaze/tests/test_blfuncs.py
|
from blaze.blfuncs import BlazeFunc
from blaze.datashape import double, complex128 as c128
from blaze.blaze_kernels import BlazeElementKernel
import blaze
def _add(a,b):
return a + b
def _mul(a,b):
return a * b
add = BlazeFunc('add',[(_add, 'f8(f8,f8)'),
(_add, 'c16(c16,c16)')])
mul = BlazeFunc('mul', {(double,)*3: _mul})
a = blaze.array([1,2,3],dshape=double)
b = blaze.array([2,3,4],dshape=double)
c = add(a,b)
d = mul(c,c)
d._data = d._data.fuse()
|
from blaze.blfuncs import BlazeFunc
from blaze.datashape import double, complex128 as c128
from blaze.blaze_kernels import BlazeElementKernel
import blaze
def _add(a,b):
return a + b
def _mul(a,b):
return a * b
add = BlazeFunc('add',[('f8(f8,f8)', _add),
('c16(c16,c16)', _add)])
mul = BlazeFunc('mul', {(double,)*3: _mul})
a = blaze.array([1,2,3],dshape=double)
b = blaze.array([2,3,4],dshape=double)
c = add(a,b)
d = mul(c,c)
d._data = d._data.fuse()
|
Fix usage of urlparse. and re-order list of key, value dict specification.
|
Fix usage of urlparse. and re-order list of key, value dict specification.
|
Python
|
bsd-3-clause
|
ContinuumIO/blaze,dwillmer/blaze,dwillmer/blaze,ContinuumIO/blaze,mwiebe/blaze,markflorisson/blaze-core,AbhiAgarwal/blaze,LiaoPan/blaze,ChinaQuants/blaze,markflorisson/blaze-core,FrancescAlted/blaze,caseyclements/blaze,FrancescAlted/blaze,caseyclements/blaze,jcrist/blaze,mwiebe/blaze,AbhiAgarwal/blaze,jcrist/blaze,cpcloud/blaze,cpcloud/blaze,ChinaQuants/blaze,AbhiAgarwal/blaze,maxalbert/blaze,FrancescAlted/blaze,alexmojaki/blaze,aterrel/blaze,jdmcbr/blaze,mwiebe/blaze,aterrel/blaze,mrocklin/blaze,xlhtc007/blaze,FrancescAlted/blaze,LiaoPan/blaze,markflorisson/blaze-core,aterrel/blaze,nkhuyu/blaze,jdmcbr/blaze,mrocklin/blaze,maxalbert/blaze,nkhuyu/blaze,scls19fr/blaze,cowlicks/blaze,xlhtc007/blaze,markflorisson/blaze-core,cowlicks/blaze,AbhiAgarwal/blaze,scls19fr/blaze,mwiebe/blaze,alexmojaki/blaze
|
from blaze.blfuncs import BlazeFunc
from blaze.datashape import double, complex128 as c128
from blaze.blaze_kernels import BlazeElementKernel
import blaze
def _add(a,b):
return a + b
def _mul(a,b):
return a * b
add = BlazeFunc('add',[(_add, 'f8(f8,f8)'),
(_add, 'c16(c16,c16)')])
mul = BlazeFunc('mul', {(double,)*3: _mul})
a = blaze.array([1,2,3],dshape=double)
b = blaze.array([2,3,4],dshape=double)
c = add(a,b)
d = mul(c,c)
d._data = d._data.fuse()
Fix usage of urlparse. and re-order list of key, value dict specification.
|
from blaze.blfuncs import BlazeFunc
from blaze.datashape import double, complex128 as c128
from blaze.blaze_kernels import BlazeElementKernel
import blaze
def _add(a,b):
return a + b
def _mul(a,b):
return a * b
add = BlazeFunc('add',[('f8(f8,f8)', _add),
('c16(c16,c16)', _add)])
mul = BlazeFunc('mul', {(double,)*3: _mul})
a = blaze.array([1,2,3],dshape=double)
b = blaze.array([2,3,4],dshape=double)
c = add(a,b)
d = mul(c,c)
d._data = d._data.fuse()
|
<commit_before>from blaze.blfuncs import BlazeFunc
from blaze.datashape import double, complex128 as c128
from blaze.blaze_kernels import BlazeElementKernel
import blaze
def _add(a,b):
return a + b
def _mul(a,b):
return a * b
add = BlazeFunc('add',[(_add, 'f8(f8,f8)'),
(_add, 'c16(c16,c16)')])
mul = BlazeFunc('mul', {(double,)*3: _mul})
a = blaze.array([1,2,3],dshape=double)
b = blaze.array([2,3,4],dshape=double)
c = add(a,b)
d = mul(c,c)
d._data = d._data.fuse()
<commit_msg>Fix usage of urlparse. and re-order list of key, value dict specification.<commit_after>
|
from blaze.blfuncs import BlazeFunc
from blaze.datashape import double, complex128 as c128
from blaze.blaze_kernels import BlazeElementKernel
import blaze
def _add(a,b):
return a + b
def _mul(a,b):
return a * b
add = BlazeFunc('add',[('f8(f8,f8)', _add),
('c16(c16,c16)', _add)])
mul = BlazeFunc('mul', {(double,)*3: _mul})
a = blaze.array([1,2,3],dshape=double)
b = blaze.array([2,3,4],dshape=double)
c = add(a,b)
d = mul(c,c)
d._data = d._data.fuse()
|
from blaze.blfuncs import BlazeFunc
from blaze.datashape import double, complex128 as c128
from blaze.blaze_kernels import BlazeElementKernel
import blaze
def _add(a,b):
return a + b
def _mul(a,b):
return a * b
add = BlazeFunc('add',[(_add, 'f8(f8,f8)'),
(_add, 'c16(c16,c16)')])
mul = BlazeFunc('mul', {(double,)*3: _mul})
a = blaze.array([1,2,3],dshape=double)
b = blaze.array([2,3,4],dshape=double)
c = add(a,b)
d = mul(c,c)
d._data = d._data.fuse()
Fix usage of urlparse. and re-order list of key, value dict specification.from blaze.blfuncs import BlazeFunc
from blaze.datashape import double, complex128 as c128
from blaze.blaze_kernels import BlazeElementKernel
import blaze
def _add(a,b):
return a + b
def _mul(a,b):
return a * b
add = BlazeFunc('add',[('f8(f8,f8)', _add),
('c16(c16,c16)', _add)])
mul = BlazeFunc('mul', {(double,)*3: _mul})
a = blaze.array([1,2,3],dshape=double)
b = blaze.array([2,3,4],dshape=double)
c = add(a,b)
d = mul(c,c)
d._data = d._data.fuse()
|
<commit_before>from blaze.blfuncs import BlazeFunc
from blaze.datashape import double, complex128 as c128
from blaze.blaze_kernels import BlazeElementKernel
import blaze
def _add(a,b):
return a + b
def _mul(a,b):
return a * b
add = BlazeFunc('add',[(_add, 'f8(f8,f8)'),
(_add, 'c16(c16,c16)')])
mul = BlazeFunc('mul', {(double,)*3: _mul})
a = blaze.array([1,2,3],dshape=double)
b = blaze.array([2,3,4],dshape=double)
c = add(a,b)
d = mul(c,c)
d._data = d._data.fuse()
<commit_msg>Fix usage of urlparse. and re-order list of key, value dict specification.<commit_after>from blaze.blfuncs import BlazeFunc
from blaze.datashape import double, complex128 as c128
from blaze.blaze_kernels import BlazeElementKernel
import blaze
def _add(a,b):
return a + b
def _mul(a,b):
return a * b
add = BlazeFunc('add',[('f8(f8,f8)', _add),
('c16(c16,c16)', _add)])
mul = BlazeFunc('mul', {(double,)*3: _mul})
a = blaze.array([1,2,3],dshape=double)
b = blaze.array([2,3,4],dshape=double)
c = add(a,b)
d = mul(c,c)
d._data = d._data.fuse()
|
54be27f1c2e6c288465f2b59e41f5a4deed00fe7
|
atompos/atompos/main/views.py
|
atompos/atompos/main/views.py
|
import json as simplejson
from django.http import HttpResponse, Http404
from django.shortcuts import render
from django.views.decorators.csrf import csrf_exempt
from atompos.main import settings
from util import get_atom_pos, get_positions_atb
def index(request):
return render(request, 'index.html')
def _get_positions(request, position_function):
if request.method != 'POST':
raise Http404
params = request.POST.dict()
if 'csrfmiddlewaretoken' in params:
params.pop('csrfmiddlewaretoken')
pos = position_function(params)
if "error" in pos:
res = pos
res.update({"version": settings.VERSION})
else:
res = {
"molecule": pos,
"version": settings.VERSION
}
return HttpResponse(
simplejson.dumps(res, indent=2),
mimetype="application/json"
)
@csrf_exempt
def generate(request):
return _get_positions(request, get_atom_pos)
@csrf_exempt
def load_atb(request):
return _get_positions(request, get_positions_atb)
|
import json as simplejson
from django.http import HttpResponse, Http404
from django.shortcuts import render
from django.views.decorators.csrf import csrf_exempt
from atompos.main import settings
from util import get_atom_pos, get_positions_atb
def index(request):
return render(request, 'index.html')
def _get_positions(request, position_function):
if request.method != 'POST':
raise Http404
params = request.POST.dict()
if 'csrfmiddlewaretoken' in params:
params.pop('csrfmiddlewaretoken')
pos = position_function(params)
if "error" in pos:
res = pos
res.update({"version": settings.VERSION})
else:
res = {
"molecule": pos,
"version": settings.VERSION
}
return HttpResponse(
simplejson.dumps(res, indent=2),
content_type="application/json"
)
@csrf_exempt
def generate(request):
return _get_positions(request, get_atom_pos)
@csrf_exempt
def load_atb(request):
return _get_positions(request, get_positions_atb)
|
Fix django-1.7 deprecated mimetype keyword argument
|
Fix django-1.7 deprecated mimetype keyword argument
Source: https://docs.djangoproject.com/en/1.5/ref/request-response/#django.http.HttpResponse.__init__
|
Python
|
mit
|
bertrand-caron/OAPoC,bertrand-caron/OAPoC
|
import json as simplejson
from django.http import HttpResponse, Http404
from django.shortcuts import render
from django.views.decorators.csrf import csrf_exempt
from atompos.main import settings
from util import get_atom_pos, get_positions_atb
def index(request):
return render(request, 'index.html')
def _get_positions(request, position_function):
if request.method != 'POST':
raise Http404
params = request.POST.dict()
if 'csrfmiddlewaretoken' in params:
params.pop('csrfmiddlewaretoken')
pos = position_function(params)
if "error" in pos:
res = pos
res.update({"version": settings.VERSION})
else:
res = {
"molecule": pos,
"version": settings.VERSION
}
return HttpResponse(
simplejson.dumps(res, indent=2),
mimetype="application/json"
)
@csrf_exempt
def generate(request):
return _get_positions(request, get_atom_pos)
@csrf_exempt
def load_atb(request):
return _get_positions(request, get_positions_atb)
Fix django-1.7 deprecated mimetype keyword argument
Source: https://docs.djangoproject.com/en/1.5/ref/request-response/#django.http.HttpResponse.__init__
|
import json as simplejson
from django.http import HttpResponse, Http404
from django.shortcuts import render
from django.views.decorators.csrf import csrf_exempt
from atompos.main import settings
from util import get_atom_pos, get_positions_atb
def index(request):
return render(request, 'index.html')
def _get_positions(request, position_function):
if request.method != 'POST':
raise Http404
params = request.POST.dict()
if 'csrfmiddlewaretoken' in params:
params.pop('csrfmiddlewaretoken')
pos = position_function(params)
if "error" in pos:
res = pos
res.update({"version": settings.VERSION})
else:
res = {
"molecule": pos,
"version": settings.VERSION
}
return HttpResponse(
simplejson.dumps(res, indent=2),
content_type="application/json"
)
@csrf_exempt
def generate(request):
return _get_positions(request, get_atom_pos)
@csrf_exempt
def load_atb(request):
return _get_positions(request, get_positions_atb)
|
<commit_before>import json as simplejson
from django.http import HttpResponse, Http404
from django.shortcuts import render
from django.views.decorators.csrf import csrf_exempt
from atompos.main import settings
from util import get_atom_pos, get_positions_atb
def index(request):
return render(request, 'index.html')
def _get_positions(request, position_function):
if request.method != 'POST':
raise Http404
params = request.POST.dict()
if 'csrfmiddlewaretoken' in params:
params.pop('csrfmiddlewaretoken')
pos = position_function(params)
if "error" in pos:
res = pos
res.update({"version": settings.VERSION})
else:
res = {
"molecule": pos,
"version": settings.VERSION
}
return HttpResponse(
simplejson.dumps(res, indent=2),
mimetype="application/json"
)
@csrf_exempt
def generate(request):
return _get_positions(request, get_atom_pos)
@csrf_exempt
def load_atb(request):
return _get_positions(request, get_positions_atb)
<commit_msg>Fix django-1.7 deprecated mimetype keyword argument
Source: https://docs.djangoproject.com/en/1.5/ref/request-response/#django.http.HttpResponse.__init__<commit_after>
|
import json as simplejson
from django.http import HttpResponse, Http404
from django.shortcuts import render
from django.views.decorators.csrf import csrf_exempt
from atompos.main import settings
from util import get_atom_pos, get_positions_atb
def index(request):
return render(request, 'index.html')
def _get_positions(request, position_function):
if request.method != 'POST':
raise Http404
params = request.POST.dict()
if 'csrfmiddlewaretoken' in params:
params.pop('csrfmiddlewaretoken')
pos = position_function(params)
if "error" in pos:
res = pos
res.update({"version": settings.VERSION})
else:
res = {
"molecule": pos,
"version": settings.VERSION
}
return HttpResponse(
simplejson.dumps(res, indent=2),
content_type="application/json"
)
@csrf_exempt
def generate(request):
return _get_positions(request, get_atom_pos)
@csrf_exempt
def load_atb(request):
return _get_positions(request, get_positions_atb)
|
import json as simplejson
from django.http import HttpResponse, Http404
from django.shortcuts import render
from django.views.decorators.csrf import csrf_exempt
from atompos.main import settings
from util import get_atom_pos, get_positions_atb
def index(request):
return render(request, 'index.html')
def _get_positions(request, position_function):
if request.method != 'POST':
raise Http404
params = request.POST.dict()
if 'csrfmiddlewaretoken' in params:
params.pop('csrfmiddlewaretoken')
pos = position_function(params)
if "error" in pos:
res = pos
res.update({"version": settings.VERSION})
else:
res = {
"molecule": pos,
"version": settings.VERSION
}
return HttpResponse(
simplejson.dumps(res, indent=2),
mimetype="application/json"
)
@csrf_exempt
def generate(request):
return _get_positions(request, get_atom_pos)
@csrf_exempt
def load_atb(request):
return _get_positions(request, get_positions_atb)
Fix django-1.7 deprecated mimetype keyword argument
Source: https://docs.djangoproject.com/en/1.5/ref/request-response/#django.http.HttpResponse.__init__import json as simplejson
from django.http import HttpResponse, Http404
from django.shortcuts import render
from django.views.decorators.csrf import csrf_exempt
from atompos.main import settings
from util import get_atom_pos, get_positions_atb
def index(request):
return render(request, 'index.html')
def _get_positions(request, position_function):
if request.method != 'POST':
raise Http404
params = request.POST.dict()
if 'csrfmiddlewaretoken' in params:
params.pop('csrfmiddlewaretoken')
pos = position_function(params)
if "error" in pos:
res = pos
res.update({"version": settings.VERSION})
else:
res = {
"molecule": pos,
"version": settings.VERSION
}
return HttpResponse(
simplejson.dumps(res, indent=2),
content_type="application/json"
)
@csrf_exempt
def generate(request):
return _get_positions(request, get_atom_pos)
@csrf_exempt
def load_atb(request):
return _get_positions(request, get_positions_atb)
|
<commit_before>import json as simplejson
from django.http import HttpResponse, Http404
from django.shortcuts import render
from django.views.decorators.csrf import csrf_exempt
from atompos.main import settings
from util import get_atom_pos, get_positions_atb
def index(request):
return render(request, 'index.html')
def _get_positions(request, position_function):
if request.method != 'POST':
raise Http404
params = request.POST.dict()
if 'csrfmiddlewaretoken' in params:
params.pop('csrfmiddlewaretoken')
pos = position_function(params)
if "error" in pos:
res = pos
res.update({"version": settings.VERSION})
else:
res = {
"molecule": pos,
"version": settings.VERSION
}
return HttpResponse(
simplejson.dumps(res, indent=2),
mimetype="application/json"
)
@csrf_exempt
def generate(request):
return _get_positions(request, get_atom_pos)
@csrf_exempt
def load_atb(request):
return _get_positions(request, get_positions_atb)
<commit_msg>Fix django-1.7 deprecated mimetype keyword argument
Source: https://docs.djangoproject.com/en/1.5/ref/request-response/#django.http.HttpResponse.__init__<commit_after>import json as simplejson
from django.http import HttpResponse, Http404
from django.shortcuts import render
from django.views.decorators.csrf import csrf_exempt
from atompos.main import settings
from util import get_atom_pos, get_positions_atb
def index(request):
return render(request, 'index.html')
def _get_positions(request, position_function):
if request.method != 'POST':
raise Http404
params = request.POST.dict()
if 'csrfmiddlewaretoken' in params:
params.pop('csrfmiddlewaretoken')
pos = position_function(params)
if "error" in pos:
res = pos
res.update({"version": settings.VERSION})
else:
res = {
"molecule": pos,
"version": settings.VERSION
}
return HttpResponse(
simplejson.dumps(res, indent=2),
content_type="application/json"
)
@csrf_exempt
def generate(request):
return _get_positions(request, get_atom_pos)
@csrf_exempt
def load_atb(request):
return _get_positions(request, get_positions_atb)
|
9581334db472c8ad8dbff0766ec74ed6dfa20d6f
|
tests/test_api_request.py
|
tests/test_api_request.py
|
#!/usr/bin/env python
# coding=utf-8
from binance.client import Client
from binance.exceptions import BinanceAPIException, BinanceRequestException
import pytest
import requests_mock
client = Client('api_key', 'api_secret')
def test_invalid_json():
"""Test Invalid response Exception"""
with pytest.raises(BinanceRequestException):
with requests_mock.mock() as m:
m.get('https://www.binance.com/exchange/public/product', text='<head></html>')
client.get_products()
def test_api_exception():
"""Test API response Exception"""
with pytest.raises(BinanceAPIException):
with requests_mock.mock() as m:
json_obj = {"code": 1002, "msg": "Invalid API call"}
m.get('https://www.binance.com/api/v1/time', json=json_obj, status_code=400)
client.get_server_time()
|
#!/usr/bin/env python
# coding=utf-8
from binance.client import Client
from binance.exceptions import BinanceAPIException, BinanceRequestException, BinanceWithdrawException
import pytest
import requests_mock
client = Client('api_key', 'api_secret')
def test_invalid_json():
"""Test Invalid response Exception"""
with pytest.raises(BinanceRequestException):
with requests_mock.mock() as m:
m.get('https://www.binance.com/exchange/public/product', text='<head></html>')
client.get_products()
def test_api_exception():
"""Test API response Exception"""
with pytest.raises(BinanceAPIException):
with requests_mock.mock() as m:
json_obj = {"code": 1002, "msg": "Invalid API call"}
m.get('https://www.binance.com/api/v1/time', json=json_obj, status_code=400)
client.get_server_time()
def test_withdraw_api_exception():
"""Test Withdraw API response Exception"""
with pytest.raises(BinanceWithdrawException):
with requests_mock.mock() as m:
json_obj = {"success": False, "msg": "Insufficient funds"}
m.register_uri('POST', requests_mock.ANY, json=json_obj, status_code=200)
client.withdraw(asset='BTC', address='BTCADDRESS', amount=100)
|
Add test for withdraw exception response
|
Add test for withdraw exception response
|
Python
|
mit
|
sammchardy/python-binance
|
#!/usr/bin/env python
# coding=utf-8
from binance.client import Client
from binance.exceptions import BinanceAPIException, BinanceRequestException
import pytest
import requests_mock
client = Client('api_key', 'api_secret')
def test_invalid_json():
"""Test Invalid response Exception"""
with pytest.raises(BinanceRequestException):
with requests_mock.mock() as m:
m.get('https://www.binance.com/exchange/public/product', text='<head></html>')
client.get_products()
def test_api_exception():
"""Test API response Exception"""
with pytest.raises(BinanceAPIException):
with requests_mock.mock() as m:
json_obj = {"code": 1002, "msg": "Invalid API call"}
m.get('https://www.binance.com/api/v1/time', json=json_obj, status_code=400)
client.get_server_time()
Add test for withdraw exception response
|
#!/usr/bin/env python
# coding=utf-8
from binance.client import Client
from binance.exceptions import BinanceAPIException, BinanceRequestException, BinanceWithdrawException
import pytest
import requests_mock
client = Client('api_key', 'api_secret')
def test_invalid_json():
"""Test Invalid response Exception"""
with pytest.raises(BinanceRequestException):
with requests_mock.mock() as m:
m.get('https://www.binance.com/exchange/public/product', text='<head></html>')
client.get_products()
def test_api_exception():
"""Test API response Exception"""
with pytest.raises(BinanceAPIException):
with requests_mock.mock() as m:
json_obj = {"code": 1002, "msg": "Invalid API call"}
m.get('https://www.binance.com/api/v1/time', json=json_obj, status_code=400)
client.get_server_time()
def test_withdraw_api_exception():
"""Test Withdraw API response Exception"""
with pytest.raises(BinanceWithdrawException):
with requests_mock.mock() as m:
json_obj = {"success": False, "msg": "Insufficient funds"}
m.register_uri('POST', requests_mock.ANY, json=json_obj, status_code=200)
client.withdraw(asset='BTC', address='BTCADDRESS', amount=100)
|
<commit_before>#!/usr/bin/env python
# coding=utf-8
from binance.client import Client
from binance.exceptions import BinanceAPIException, BinanceRequestException
import pytest
import requests_mock
client = Client('api_key', 'api_secret')
def test_invalid_json():
"""Test Invalid response Exception"""
with pytest.raises(BinanceRequestException):
with requests_mock.mock() as m:
m.get('https://www.binance.com/exchange/public/product', text='<head></html>')
client.get_products()
def test_api_exception():
"""Test API response Exception"""
with pytest.raises(BinanceAPIException):
with requests_mock.mock() as m:
json_obj = {"code": 1002, "msg": "Invalid API call"}
m.get('https://www.binance.com/api/v1/time', json=json_obj, status_code=400)
client.get_server_time()
<commit_msg>Add test for withdraw exception response<commit_after>
|
#!/usr/bin/env python
# coding=utf-8
from binance.client import Client
from binance.exceptions import BinanceAPIException, BinanceRequestException, BinanceWithdrawException
import pytest
import requests_mock
client = Client('api_key', 'api_secret')
def test_invalid_json():
"""Test Invalid response Exception"""
with pytest.raises(BinanceRequestException):
with requests_mock.mock() as m:
m.get('https://www.binance.com/exchange/public/product', text='<head></html>')
client.get_products()
def test_api_exception():
"""Test API response Exception"""
with pytest.raises(BinanceAPIException):
with requests_mock.mock() as m:
json_obj = {"code": 1002, "msg": "Invalid API call"}
m.get('https://www.binance.com/api/v1/time', json=json_obj, status_code=400)
client.get_server_time()
def test_withdraw_api_exception():
"""Test Withdraw API response Exception"""
with pytest.raises(BinanceWithdrawException):
with requests_mock.mock() as m:
json_obj = {"success": False, "msg": "Insufficient funds"}
m.register_uri('POST', requests_mock.ANY, json=json_obj, status_code=200)
client.withdraw(asset='BTC', address='BTCADDRESS', amount=100)
|
#!/usr/bin/env python
# coding=utf-8
from binance.client import Client
from binance.exceptions import BinanceAPIException, BinanceRequestException
import pytest
import requests_mock
client = Client('api_key', 'api_secret')
def test_invalid_json():
"""Test Invalid response Exception"""
with pytest.raises(BinanceRequestException):
with requests_mock.mock() as m:
m.get('https://www.binance.com/exchange/public/product', text='<head></html>')
client.get_products()
def test_api_exception():
"""Test API response Exception"""
with pytest.raises(BinanceAPIException):
with requests_mock.mock() as m:
json_obj = {"code": 1002, "msg": "Invalid API call"}
m.get('https://www.binance.com/api/v1/time', json=json_obj, status_code=400)
client.get_server_time()
Add test for withdraw exception response#!/usr/bin/env python
# coding=utf-8
from binance.client import Client
from binance.exceptions import BinanceAPIException, BinanceRequestException, BinanceWithdrawException
import pytest
import requests_mock
client = Client('api_key', 'api_secret')
def test_invalid_json():
"""Test Invalid response Exception"""
with pytest.raises(BinanceRequestException):
with requests_mock.mock() as m:
m.get('https://www.binance.com/exchange/public/product', text='<head></html>')
client.get_products()
def test_api_exception():
"""Test API response Exception"""
with pytest.raises(BinanceAPIException):
with requests_mock.mock() as m:
json_obj = {"code": 1002, "msg": "Invalid API call"}
m.get('https://www.binance.com/api/v1/time', json=json_obj, status_code=400)
client.get_server_time()
def test_withdraw_api_exception():
"""Test Withdraw API response Exception"""
with pytest.raises(BinanceWithdrawException):
with requests_mock.mock() as m:
json_obj = {"success": False, "msg": "Insufficient funds"}
m.register_uri('POST', requests_mock.ANY, json=json_obj, status_code=200)
client.withdraw(asset='BTC', address='BTCADDRESS', amount=100)
|
<commit_before>#!/usr/bin/env python
# coding=utf-8
from binance.client import Client
from binance.exceptions import BinanceAPIException, BinanceRequestException
import pytest
import requests_mock
client = Client('api_key', 'api_secret')
def test_invalid_json():
"""Test Invalid response Exception"""
with pytest.raises(BinanceRequestException):
with requests_mock.mock() as m:
m.get('https://www.binance.com/exchange/public/product', text='<head></html>')
client.get_products()
def test_api_exception():
"""Test API response Exception"""
with pytest.raises(BinanceAPIException):
with requests_mock.mock() as m:
json_obj = {"code": 1002, "msg": "Invalid API call"}
m.get('https://www.binance.com/api/v1/time', json=json_obj, status_code=400)
client.get_server_time()
<commit_msg>Add test for withdraw exception response<commit_after>#!/usr/bin/env python
# coding=utf-8
from binance.client import Client
from binance.exceptions import BinanceAPIException, BinanceRequestException, BinanceWithdrawException
import pytest
import requests_mock
client = Client('api_key', 'api_secret')
def test_invalid_json():
"""Test Invalid response Exception"""
with pytest.raises(BinanceRequestException):
with requests_mock.mock() as m:
m.get('https://www.binance.com/exchange/public/product', text='<head></html>')
client.get_products()
def test_api_exception():
"""Test API response Exception"""
with pytest.raises(BinanceAPIException):
with requests_mock.mock() as m:
json_obj = {"code": 1002, "msg": "Invalid API call"}
m.get('https://www.binance.com/api/v1/time', json=json_obj, status_code=400)
client.get_server_time()
def test_withdraw_api_exception():
"""Test Withdraw API response Exception"""
with pytest.raises(BinanceWithdrawException):
with requests_mock.mock() as m:
json_obj = {"success": False, "msg": "Insufficient funds"}
m.register_uri('POST', requests_mock.ANY, json=json_obj, status_code=200)
client.withdraw(asset='BTC', address='BTCADDRESS', amount=100)
|
c73572f2a9b63d35daf8b5935c4a1e6a0422c122
|
pinax/documents/receivers.py
|
pinax/documents/receivers.py
|
from django.db.models.signals import post_save
from django.dispatch import receiver
from .conf import settings
from .models import UserStorage
@receiver(post_save, sender=settings.AUTH_USER_MODEL)
def ensure_userstorage(sender, **kwargs):
if kwargs["created"]:
user = kwargs["instance"]
UserStorage.objects.create(user=user, bytes_total=(1024 * 1024 * 50))
|
from django.db.models.signals import post_save, pre_delete
from django.dispatch import receiver
from .conf import settings
from .models import UserStorage, Document
@receiver(post_save, sender=settings.AUTH_USER_MODEL)
def ensure_userstorage(sender, **kwargs):
if kwargs["created"]:
user = kwargs["instance"]
UserStorage.objects.create(user=user, bytes_total=(1024 * 1024 * 50))
# Receive the pre_delete signal and delete the file associated with the model instance.
@receiver(pre_delete, sender=Document)
def document_delete(sender, instance, **kwargs):
# Pass false so FileField doesn't save the model.
instance.file.delete(False)
|
Implement deletion of file object via Document model pre_save signal.
|
Implement deletion of file object via Document model pre_save signal.
|
Python
|
mit
|
pinax/pinax-documents
|
from django.db.models.signals import post_save
from django.dispatch import receiver
from .conf import settings
from .models import UserStorage
@receiver(post_save, sender=settings.AUTH_USER_MODEL)
def ensure_userstorage(sender, **kwargs):
if kwargs["created"]:
user = kwargs["instance"]
UserStorage.objects.create(user=user, bytes_total=(1024 * 1024 * 50))
Implement deletion of file object via Document model pre_save signal.
|
from django.db.models.signals import post_save, pre_delete
from django.dispatch import receiver
from .conf import settings
from .models import UserStorage, Document
@receiver(post_save, sender=settings.AUTH_USER_MODEL)
def ensure_userstorage(sender, **kwargs):
if kwargs["created"]:
user = kwargs["instance"]
UserStorage.objects.create(user=user, bytes_total=(1024 * 1024 * 50))
# Receive the pre_delete signal and delete the file associated with the model instance.
@receiver(pre_delete, sender=Document)
def document_delete(sender, instance, **kwargs):
# Pass false so FileField doesn't save the model.
instance.file.delete(False)
|
<commit_before>from django.db.models.signals import post_save
from django.dispatch import receiver
from .conf import settings
from .models import UserStorage
@receiver(post_save, sender=settings.AUTH_USER_MODEL)
def ensure_userstorage(sender, **kwargs):
if kwargs["created"]:
user = kwargs["instance"]
UserStorage.objects.create(user=user, bytes_total=(1024 * 1024 * 50))
<commit_msg>Implement deletion of file object via Document model pre_save signal.<commit_after>
|
from django.db.models.signals import post_save, pre_delete
from django.dispatch import receiver
from .conf import settings
from .models import UserStorage, Document
@receiver(post_save, sender=settings.AUTH_USER_MODEL)
def ensure_userstorage(sender, **kwargs):
if kwargs["created"]:
user = kwargs["instance"]
UserStorage.objects.create(user=user, bytes_total=(1024 * 1024 * 50))
# Receive the pre_delete signal and delete the file associated with the model instance.
@receiver(pre_delete, sender=Document)
def document_delete(sender, instance, **kwargs):
# Pass false so FileField doesn't save the model.
instance.file.delete(False)
|
from django.db.models.signals import post_save
from django.dispatch import receiver
from .conf import settings
from .models import UserStorage
@receiver(post_save, sender=settings.AUTH_USER_MODEL)
def ensure_userstorage(sender, **kwargs):
if kwargs["created"]:
user = kwargs["instance"]
UserStorage.objects.create(user=user, bytes_total=(1024 * 1024 * 50))
Implement deletion of file object via Document model pre_save signal.from django.db.models.signals import post_save, pre_delete
from django.dispatch import receiver
from .conf import settings
from .models import UserStorage, Document
@receiver(post_save, sender=settings.AUTH_USER_MODEL)
def ensure_userstorage(sender, **kwargs):
if kwargs["created"]:
user = kwargs["instance"]
UserStorage.objects.create(user=user, bytes_total=(1024 * 1024 * 50))
# Receive the pre_delete signal and delete the file associated with the model instance.
@receiver(pre_delete, sender=Document)
def document_delete(sender, instance, **kwargs):
# Pass false so FileField doesn't save the model.
instance.file.delete(False)
|
<commit_before>from django.db.models.signals import post_save
from django.dispatch import receiver
from .conf import settings
from .models import UserStorage
@receiver(post_save, sender=settings.AUTH_USER_MODEL)
def ensure_userstorage(sender, **kwargs):
if kwargs["created"]:
user = kwargs["instance"]
UserStorage.objects.create(user=user, bytes_total=(1024 * 1024 * 50))
<commit_msg>Implement deletion of file object via Document model pre_save signal.<commit_after>from django.db.models.signals import post_save, pre_delete
from django.dispatch import receiver
from .conf import settings
from .models import UserStorage, Document
@receiver(post_save, sender=settings.AUTH_USER_MODEL)
def ensure_userstorage(sender, **kwargs):
if kwargs["created"]:
user = kwargs["instance"]
UserStorage.objects.create(user=user, bytes_total=(1024 * 1024 * 50))
# Receive the pre_delete signal and delete the file associated with the model instance.
@receiver(pre_delete, sender=Document)
def document_delete(sender, instance, **kwargs):
# Pass false so FileField doesn't save the model.
instance.file.delete(False)
|
9c48cd08ee0805cfd9a8115d77da139e8c09d7a9
|
plyer/platforms/linux/cpu.py
|
plyer/platforms/linux/cpu.py
|
from subprocess import Popen, PIPE
from plyer.facades import CPU
from plyer.utils import whereis_exe
from os import environ
class LinuxProcessors(CPU):
def _cpus(self):
old_lang = environ.get('LANG', '')
environ['LANG'] = 'C'
cpus = {
'physical': None, # cores
'logical': None # cores * threads
}
logical = Popen(
['nproc', '--all'],
stdout=PIPE
)
output = logical.communicate()[0].decode('utf-8').strip()
environ['LANG'] = old_lang
if output:
cpus['logical'] = int(output)
return cpus
def instance():
import sys
if whereis_exe('nproc'):
return LinuxProcessors()
sys.stderr.write("nproc not found.")
return CPU()
|
from subprocess import Popen, PIPE
from plyer.facades import CPU
from plyer.utils import whereis_exe
from os import environ
class LinuxProcessors(CPU):
def _cpus(self):
old_lang = environ.get('LANG', '')
environ['LANG'] = 'C'
cpus = {
'physical': None, # cores
'logical': None # cores * threads
}
physical = [] # list of CPU ids from kernel
# open Linux kernel data file for CPU
with open('/proc/cpuinfo', 'rb') as fle:
lines = fle.readlines()
# go through the lines and obtain CPU core ids
for line in lines:
line = line.decode('utf-8')
if 'core id' not in line:
continue
cpuid = line.split(':')[1].strip()
physical.append(cpuid)
# total cores (socket * core per socket)
# is the length of unique CPU ids from kernel
physical = len(set(physical))
cpus['physical'] = physical
logical = Popen(
['nproc', '--all'],
stdout=PIPE
)
output = logical.communicate()[0].decode('utf-8').strip()
if output:
cpus['logical'] = int(output)
environ['LANG'] = old_lang
return cpus
def instance():
import sys
if whereis_exe('nproc'):
return LinuxProcessors()
sys.stderr.write("nproc not found.")
return CPU()
|
Add CPU count for GNU/Linux
|
Add CPU count for GNU/Linux
|
Python
|
mit
|
kivy/plyer,KeyWeeUsr/plyer,kivy/plyer,kivy/plyer,KeyWeeUsr/plyer,KeyWeeUsr/plyer
|
from subprocess import Popen, PIPE
from plyer.facades import CPU
from plyer.utils import whereis_exe
from os import environ
class LinuxProcessors(CPU):
def _cpus(self):
old_lang = environ.get('LANG', '')
environ['LANG'] = 'C'
cpus = {
'physical': None, # cores
'logical': None # cores * threads
}
logical = Popen(
['nproc', '--all'],
stdout=PIPE
)
output = logical.communicate()[0].decode('utf-8').strip()
environ['LANG'] = old_lang
if output:
cpus['logical'] = int(output)
return cpus
def instance():
import sys
if whereis_exe('nproc'):
return LinuxProcessors()
sys.stderr.write("nproc not found.")
return CPU()
Add CPU count for GNU/Linux
|
from subprocess import Popen, PIPE
from plyer.facades import CPU
from plyer.utils import whereis_exe
from os import environ
class LinuxProcessors(CPU):
def _cpus(self):
old_lang = environ.get('LANG', '')
environ['LANG'] = 'C'
cpus = {
'physical': None, # cores
'logical': None # cores * threads
}
physical = [] # list of CPU ids from kernel
# open Linux kernel data file for CPU
with open('/proc/cpuinfo', 'rb') as fle:
lines = fle.readlines()
# go through the lines and obtain CPU core ids
for line in lines:
line = line.decode('utf-8')
if 'core id' not in line:
continue
cpuid = line.split(':')[1].strip()
physical.append(cpuid)
# total cores (socket * core per socket)
# is the length of unique CPU ids from kernel
physical = len(set(physical))
cpus['physical'] = physical
logical = Popen(
['nproc', '--all'],
stdout=PIPE
)
output = logical.communicate()[0].decode('utf-8').strip()
if output:
cpus['logical'] = int(output)
environ['LANG'] = old_lang
return cpus
def instance():
import sys
if whereis_exe('nproc'):
return LinuxProcessors()
sys.stderr.write("nproc not found.")
return CPU()
|
<commit_before>from subprocess import Popen, PIPE
from plyer.facades import CPU
from plyer.utils import whereis_exe
from os import environ
class LinuxProcessors(CPU):
def _cpus(self):
old_lang = environ.get('LANG', '')
environ['LANG'] = 'C'
cpus = {
'physical': None, # cores
'logical': None # cores * threads
}
logical = Popen(
['nproc', '--all'],
stdout=PIPE
)
output = logical.communicate()[0].decode('utf-8').strip()
environ['LANG'] = old_lang
if output:
cpus['logical'] = int(output)
return cpus
def instance():
import sys
if whereis_exe('nproc'):
return LinuxProcessors()
sys.stderr.write("nproc not found.")
return CPU()
<commit_msg>Add CPU count for GNU/Linux<commit_after>
|
from subprocess import Popen, PIPE
from plyer.facades import CPU
from plyer.utils import whereis_exe
from os import environ
class LinuxProcessors(CPU):
def _cpus(self):
old_lang = environ.get('LANG', '')
environ['LANG'] = 'C'
cpus = {
'physical': None, # cores
'logical': None # cores * threads
}
physical = [] # list of CPU ids from kernel
# open Linux kernel data file for CPU
with open('/proc/cpuinfo', 'rb') as fle:
lines = fle.readlines()
# go through the lines and obtain CPU core ids
for line in lines:
line = line.decode('utf-8')
if 'core id' not in line:
continue
cpuid = line.split(':')[1].strip()
physical.append(cpuid)
# total cores (socket * core per socket)
# is the length of unique CPU ids from kernel
physical = len(set(physical))
cpus['physical'] = physical
logical = Popen(
['nproc', '--all'],
stdout=PIPE
)
output = logical.communicate()[0].decode('utf-8').strip()
if output:
cpus['logical'] = int(output)
environ['LANG'] = old_lang
return cpus
def instance():
import sys
if whereis_exe('nproc'):
return LinuxProcessors()
sys.stderr.write("nproc not found.")
return CPU()
|
from subprocess import Popen, PIPE
from plyer.facades import CPU
from plyer.utils import whereis_exe
from os import environ
class LinuxProcessors(CPU):
def _cpus(self):
old_lang = environ.get('LANG', '')
environ['LANG'] = 'C'
cpus = {
'physical': None, # cores
'logical': None # cores * threads
}
logical = Popen(
['nproc', '--all'],
stdout=PIPE
)
output = logical.communicate()[0].decode('utf-8').strip()
environ['LANG'] = old_lang
if output:
cpus['logical'] = int(output)
return cpus
def instance():
import sys
if whereis_exe('nproc'):
return LinuxProcessors()
sys.stderr.write("nproc not found.")
return CPU()
Add CPU count for GNU/Linuxfrom subprocess import Popen, PIPE
from plyer.facades import CPU
from plyer.utils import whereis_exe
from os import environ
class LinuxProcessors(CPU):
def _cpus(self):
old_lang = environ.get('LANG', '')
environ['LANG'] = 'C'
cpus = {
'physical': None, # cores
'logical': None # cores * threads
}
physical = [] # list of CPU ids from kernel
# open Linux kernel data file for CPU
with open('/proc/cpuinfo', 'rb') as fle:
lines = fle.readlines()
# go through the lines and obtain CPU core ids
for line in lines:
line = line.decode('utf-8')
if 'core id' not in line:
continue
cpuid = line.split(':')[1].strip()
physical.append(cpuid)
# total cores (socket * core per socket)
# is the length of unique CPU ids from kernel
physical = len(set(physical))
cpus['physical'] = physical
logical = Popen(
['nproc', '--all'],
stdout=PIPE
)
output = logical.communicate()[0].decode('utf-8').strip()
if output:
cpus['logical'] = int(output)
environ['LANG'] = old_lang
return cpus
def instance():
import sys
if whereis_exe('nproc'):
return LinuxProcessors()
sys.stderr.write("nproc not found.")
return CPU()
|
<commit_before>from subprocess import Popen, PIPE
from plyer.facades import CPU
from plyer.utils import whereis_exe
from os import environ
class LinuxProcessors(CPU):
def _cpus(self):
old_lang = environ.get('LANG', '')
environ['LANG'] = 'C'
cpus = {
'physical': None, # cores
'logical': None # cores * threads
}
logical = Popen(
['nproc', '--all'],
stdout=PIPE
)
output = logical.communicate()[0].decode('utf-8').strip()
environ['LANG'] = old_lang
if output:
cpus['logical'] = int(output)
return cpus
def instance():
import sys
if whereis_exe('nproc'):
return LinuxProcessors()
sys.stderr.write("nproc not found.")
return CPU()
<commit_msg>Add CPU count for GNU/Linux<commit_after>from subprocess import Popen, PIPE
from plyer.facades import CPU
from plyer.utils import whereis_exe
from os import environ
class LinuxProcessors(CPU):
def _cpus(self):
old_lang = environ.get('LANG', '')
environ['LANG'] = 'C'
cpus = {
'physical': None, # cores
'logical': None # cores * threads
}
physical = [] # list of CPU ids from kernel
# open Linux kernel data file for CPU
with open('/proc/cpuinfo', 'rb') as fle:
lines = fle.readlines()
# go through the lines and obtain CPU core ids
for line in lines:
line = line.decode('utf-8')
if 'core id' not in line:
continue
cpuid = line.split(':')[1].strip()
physical.append(cpuid)
# total cores (socket * core per socket)
# is the length of unique CPU ids from kernel
physical = len(set(physical))
cpus['physical'] = physical
logical = Popen(
['nproc', '--all'],
stdout=PIPE
)
output = logical.communicate()[0].decode('utf-8').strip()
if output:
cpus['logical'] = int(output)
environ['LANG'] = old_lang
return cpus
def instance():
import sys
if whereis_exe('nproc'):
return LinuxProcessors()
sys.stderr.write("nproc not found.")
return CPU()
|
4c7336fbe1e82bd3d7d091429feda40932d73e67
|
bin/pear.py
|
bin/pear.py
|
"""
PEAR task
A task to detect whether a specific PEAR package is installed or not
"""
import os
from fabric.api import *
from fabric.colors import red, green
def pear_detect(package):
"""
Detect if a pear package is installed.
"""
if which('pear'):
pear_out = local('pear list -a', True)
if pear_out.find(package) == -1:
return False
else:
return True
else:
print(red('pear is not installed', True))
return False
|
"""
PEAR task
A task to detect whether a specific PEAR package is installed or not
"""
import os
from fabric.api import *
from fabric.colors import red, green
import shell
def pear_detect(package):
"""
Detect if a pear package is installed.
"""
if shell.which('pear'):
pear_out = local('pear list -a', True)
if pear_out.find(package) == -1:
return False
else:
return True
else:
print(red('pear is not installed', True))
return False
|
Add missing import for shell module
|
Add missing import for shell module
|
Python
|
mit
|
hglattergotz/sfdeploy
|
"""
PEAR task
A task to detect whether a specific PEAR package is installed or not
"""
import os
from fabric.api import *
from fabric.colors import red, green
def pear_detect(package):
"""
Detect if a pear package is installed.
"""
if which('pear'):
pear_out = local('pear list -a', True)
if pear_out.find(package) == -1:
return False
else:
return True
else:
print(red('pear is not installed', True))
return False
Add missing import for shell module
|
"""
PEAR task
A task to detect whether a specific PEAR package is installed or not
"""
import os
from fabric.api import *
from fabric.colors import red, green
import shell
def pear_detect(package):
"""
Detect if a pear package is installed.
"""
if shell.which('pear'):
pear_out = local('pear list -a', True)
if pear_out.find(package) == -1:
return False
else:
return True
else:
print(red('pear is not installed', True))
return False
|
<commit_before>"""
PEAR task
A task to detect whether a specific PEAR package is installed or not
"""
import os
from fabric.api import *
from fabric.colors import red, green
def pear_detect(package):
"""
Detect if a pear package is installed.
"""
if which('pear'):
pear_out = local('pear list -a', True)
if pear_out.find(package) == -1:
return False
else:
return True
else:
print(red('pear is not installed', True))
return False
<commit_msg>Add missing import for shell module<commit_after>
|
"""
PEAR task
A task to detect whether a specific PEAR package is installed or not
"""
import os
from fabric.api import *
from fabric.colors import red, green
import shell
def pear_detect(package):
"""
Detect if a pear package is installed.
"""
if shell.which('pear'):
pear_out = local('pear list -a', True)
if pear_out.find(package) == -1:
return False
else:
return True
else:
print(red('pear is not installed', True))
return False
|
"""
PEAR task
A task to detect whether a specific PEAR package is installed or not
"""
import os
from fabric.api import *
from fabric.colors import red, green
def pear_detect(package):
"""
Detect if a pear package is installed.
"""
if which('pear'):
pear_out = local('pear list -a', True)
if pear_out.find(package) == -1:
return False
else:
return True
else:
print(red('pear is not installed', True))
return False
Add missing import for shell module"""
PEAR task
A task to detect whether a specific PEAR package is installed or not
"""
import os
from fabric.api import *
from fabric.colors import red, green
import shell
def pear_detect(package):
"""
Detect if a pear package is installed.
"""
if shell.which('pear'):
pear_out = local('pear list -a', True)
if pear_out.find(package) == -1:
return False
else:
return True
else:
print(red('pear is not installed', True))
return False
|
<commit_before>"""
PEAR task
A task to detect whether a specific PEAR package is installed or not
"""
import os
from fabric.api import *
from fabric.colors import red, green
def pear_detect(package):
"""
Detect if a pear package is installed.
"""
if which('pear'):
pear_out = local('pear list -a', True)
if pear_out.find(package) == -1:
return False
else:
return True
else:
print(red('pear is not installed', True))
return False
<commit_msg>Add missing import for shell module<commit_after>"""
PEAR task
A task to detect whether a specific PEAR package is installed or not
"""
import os
from fabric.api import *
from fabric.colors import red, green
import shell
def pear_detect(package):
"""
Detect if a pear package is installed.
"""
if shell.which('pear'):
pear_out = local('pear list -a', True)
if pear_out.find(package) == -1:
return False
else:
return True
else:
print(red('pear is not installed', True))
return False
|
ccd681ab4cb840461d5cdc8197242af16e0c12d0
|
app.py
|
app.py
|
from flask import Flask
app = Flask(__name__)
@app.route("/")
def hello():
return "Hello world!"
if __name__ == "__main__":
app.run()
|
from flask import Flask
app = Flask(__name__)
app.debug = True
@app.route("/")
def home():
return "Skill Camp!"
@app.route("/create")
def create():
return "Make a new thing!"
@app.route("/<int:uid>/view")
def view(uid):
return "Look at %d" % (uid,)
@app.route("/<int:uid>/edit")
def edit(uid):
return "I'm editing %d" % (uid,)
if __name__ == "__main__":
app.run()
|
Add all of our routes
|
Add all of our routes
|
Python
|
mit
|
codeforamerica/skillcamp,codeforamerica/skillcamp,codeforamerica/skillcamp,codeforamerica/skillcamp
|
from flask import Flask
app = Flask(__name__)
@app.route("/")
def hello():
return "Hello world!"
if __name__ == "__main__":
app.run()
Add all of our routes
|
from flask import Flask
app = Flask(__name__)
app.debug = True
@app.route("/")
def home():
return "Skill Camp!"
@app.route("/create")
def create():
return "Make a new thing!"
@app.route("/<int:uid>/view")
def view(uid):
return "Look at %d" % (uid,)
@app.route("/<int:uid>/edit")
def edit(uid):
return "I'm editing %d" % (uid,)
if __name__ == "__main__":
app.run()
|
<commit_before>from flask import Flask
app = Flask(__name__)
@app.route("/")
def hello():
return "Hello world!"
if __name__ == "__main__":
app.run()
<commit_msg>Add all of our routes<commit_after>
|
from flask import Flask
app = Flask(__name__)
app.debug = True
@app.route("/")
def home():
return "Skill Camp!"
@app.route("/create")
def create():
return "Make a new thing!"
@app.route("/<int:uid>/view")
def view(uid):
return "Look at %d" % (uid,)
@app.route("/<int:uid>/edit")
def edit(uid):
return "I'm editing %d" % (uid,)
if __name__ == "__main__":
app.run()
|
from flask import Flask
app = Flask(__name__)
@app.route("/")
def hello():
return "Hello world!"
if __name__ == "__main__":
app.run()
Add all of our routesfrom flask import Flask
app = Flask(__name__)
app.debug = True
@app.route("/")
def home():
return "Skill Camp!"
@app.route("/create")
def create():
return "Make a new thing!"
@app.route("/<int:uid>/view")
def view(uid):
return "Look at %d" % (uid,)
@app.route("/<int:uid>/edit")
def edit(uid):
return "I'm editing %d" % (uid,)
if __name__ == "__main__":
app.run()
|
<commit_before>from flask import Flask
app = Flask(__name__)
@app.route("/")
def hello():
return "Hello world!"
if __name__ == "__main__":
app.run()
<commit_msg>Add all of our routes<commit_after>from flask import Flask
app = Flask(__name__)
app.debug = True
@app.route("/")
def home():
return "Skill Camp!"
@app.route("/create")
def create():
return "Make a new thing!"
@app.route("/<int:uid>/view")
def view(uid):
return "Look at %d" % (uid,)
@app.route("/<int:uid>/edit")
def edit(uid):
return "I'm editing %d" % (uid,)
if __name__ == "__main__":
app.run()
|
60f101e4fc3ac6822c7cf254afa9e98004eb07a1
|
bot.py
|
bot.py
|
#!/usr/bin/python3
import tweepy
import random
import os
from secrets import *
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
twitter = tweepy.API(auth)
photo_file = os.path.join("polaroids", os.listdir("polaroids")[0])
comment = random.choice([
"Hmm ...",
"Remember this party?",
"Oh dear.",
"Huh.",
"Uh ...",
"I totally forgot about this.",
"Oh geeze.",
"This one's going in my scrapbook.",
"...",
"Oh wow, remember this?",
"Whose house even was this?",
"I don't remember this at all.",
"Er ...",
"Those were the days.",
"I miss that crew."
])
tweet = twitter.update_with_media(photo_file, comment)
os.remove(photo_file)
|
#!/usr/bin/python3
"""
Copyright (c) 2017 Finn Ellis.
Free to use and modify under the terms of the MIT license.
See included LICENSE file for details.
"""
import tweepy
import random
import os
from secrets import *
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
twitter = tweepy.API(auth)
photo_file = os.path.join("polaroids", os.listdir("polaroids")[0])
comment = random.choice([
"Hmm ...",
"Remember this party?",
"Oh dear.",
"Huh.",
"Uh ...",
"I totally forgot about this.",
"Oh geeze.",
"This one's going in my scrapbook.",
"...",
"Oh wow, remember this?",
"Whose house even was this?",
"I don't remember this at all.",
"Er ...",
"Those were the days.",
"I miss that crew."
])
tweet = twitter.update_with_media(photo_file, comment)
os.remove(photo_file)
|
Add copyright and license information.
|
Add copyright and license information.
|
Python
|
mit
|
relsqui/awkward_polaroid,relsqui/awkward_polaroid
|
#!/usr/bin/python3
import tweepy
import random
import os
from secrets import *
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
twitter = tweepy.API(auth)
photo_file = os.path.join("polaroids", os.listdir("polaroids")[0])
comment = random.choice([
"Hmm ...",
"Remember this party?",
"Oh dear.",
"Huh.",
"Uh ...",
"I totally forgot about this.",
"Oh geeze.",
"This one's going in my scrapbook.",
"...",
"Oh wow, remember this?",
"Whose house even was this?",
"I don't remember this at all.",
"Er ...",
"Those were the days.",
"I miss that crew."
])
tweet = twitter.update_with_media(photo_file, comment)
os.remove(photo_file)
Add copyright and license information.
|
#!/usr/bin/python3
"""
Copyright (c) 2017 Finn Ellis.
Free to use and modify under the terms of the MIT license.
See included LICENSE file for details.
"""
import tweepy
import random
import os
from secrets import *
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
twitter = tweepy.API(auth)
photo_file = os.path.join("polaroids", os.listdir("polaroids")[0])
comment = random.choice([
"Hmm ...",
"Remember this party?",
"Oh dear.",
"Huh.",
"Uh ...",
"I totally forgot about this.",
"Oh geeze.",
"This one's going in my scrapbook.",
"...",
"Oh wow, remember this?",
"Whose house even was this?",
"I don't remember this at all.",
"Er ...",
"Those were the days.",
"I miss that crew."
])
tweet = twitter.update_with_media(photo_file, comment)
os.remove(photo_file)
|
<commit_before>#!/usr/bin/python3
import tweepy
import random
import os
from secrets import *
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
twitter = tweepy.API(auth)
photo_file = os.path.join("polaroids", os.listdir("polaroids")[0])
comment = random.choice([
"Hmm ...",
"Remember this party?",
"Oh dear.",
"Huh.",
"Uh ...",
"I totally forgot about this.",
"Oh geeze.",
"This one's going in my scrapbook.",
"...",
"Oh wow, remember this?",
"Whose house even was this?",
"I don't remember this at all.",
"Er ...",
"Those were the days.",
"I miss that crew."
])
tweet = twitter.update_with_media(photo_file, comment)
os.remove(photo_file)
<commit_msg>Add copyright and license information.<commit_after>
|
#!/usr/bin/python3
"""
Copyright (c) 2017 Finn Ellis.
Free to use and modify under the terms of the MIT license.
See included LICENSE file for details.
"""
import tweepy
import random
import os
from secrets import *
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
twitter = tweepy.API(auth)
photo_file = os.path.join("polaroids", os.listdir("polaroids")[0])
comment = random.choice([
"Hmm ...",
"Remember this party?",
"Oh dear.",
"Huh.",
"Uh ...",
"I totally forgot about this.",
"Oh geeze.",
"This one's going in my scrapbook.",
"...",
"Oh wow, remember this?",
"Whose house even was this?",
"I don't remember this at all.",
"Er ...",
"Those were the days.",
"I miss that crew."
])
tweet = twitter.update_with_media(photo_file, comment)
os.remove(photo_file)
|
#!/usr/bin/python3
import tweepy
import random
import os
from secrets import *
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
twitter = tweepy.API(auth)
photo_file = os.path.join("polaroids", os.listdir("polaroids")[0])
comment = random.choice([
"Hmm ...",
"Remember this party?",
"Oh dear.",
"Huh.",
"Uh ...",
"I totally forgot about this.",
"Oh geeze.",
"This one's going in my scrapbook.",
"...",
"Oh wow, remember this?",
"Whose house even was this?",
"I don't remember this at all.",
"Er ...",
"Those were the days.",
"I miss that crew."
])
tweet = twitter.update_with_media(photo_file, comment)
os.remove(photo_file)
Add copyright and license information.#!/usr/bin/python3
"""
Copyright (c) 2017 Finn Ellis.
Free to use and modify under the terms of the MIT license.
See included LICENSE file for details.
"""
import tweepy
import random
import os
from secrets import *
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
twitter = tweepy.API(auth)
photo_file = os.path.join("polaroids", os.listdir("polaroids")[0])
comment = random.choice([
"Hmm ...",
"Remember this party?",
"Oh dear.",
"Huh.",
"Uh ...",
"I totally forgot about this.",
"Oh geeze.",
"This one's going in my scrapbook.",
"...",
"Oh wow, remember this?",
"Whose house even was this?",
"I don't remember this at all.",
"Er ...",
"Those were the days.",
"I miss that crew."
])
tweet = twitter.update_with_media(photo_file, comment)
os.remove(photo_file)
|
<commit_before>#!/usr/bin/python3
import tweepy
import random
import os
from secrets import *
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
twitter = tweepy.API(auth)
photo_file = os.path.join("polaroids", os.listdir("polaroids")[0])
comment = random.choice([
"Hmm ...",
"Remember this party?",
"Oh dear.",
"Huh.",
"Uh ...",
"I totally forgot about this.",
"Oh geeze.",
"This one's going in my scrapbook.",
"...",
"Oh wow, remember this?",
"Whose house even was this?",
"I don't remember this at all.",
"Er ...",
"Those were the days.",
"I miss that crew."
])
tweet = twitter.update_with_media(photo_file, comment)
os.remove(photo_file)
<commit_msg>Add copyright and license information.<commit_after>#!/usr/bin/python3
"""
Copyright (c) 2017 Finn Ellis.
Free to use and modify under the terms of the MIT license.
See included LICENSE file for details.
"""
import tweepy
import random
import os
from secrets import *
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
twitter = tweepy.API(auth)
photo_file = os.path.join("polaroids", os.listdir("polaroids")[0])
comment = random.choice([
"Hmm ...",
"Remember this party?",
"Oh dear.",
"Huh.",
"Uh ...",
"I totally forgot about this.",
"Oh geeze.",
"This one's going in my scrapbook.",
"...",
"Oh wow, remember this?",
"Whose house even was this?",
"I don't remember this at all.",
"Er ...",
"Those were the days.",
"I miss that crew."
])
tweet = twitter.update_with_media(photo_file, comment)
os.remove(photo_file)
|
cd611cee6843ff9056d98d26d08091188cd20172
|
app/rest.py
|
app/rest.py
|
from flask import Blueprint, jsonify, current_app
from app import db
from app.errors import register_errors
base_blueprint = Blueprint('', __name__)
register_errors(base_blueprint)
@base_blueprint.route('/')
def get_info():
current_app.logger.info('get_info')
query = 'SELECT version_num FROM alembic_version'
full_name = db.session.execute(query).fetchone()[0]
return jsonify(
environment=current_app.config['ENVIRONMENT'],
info=full_name,
commit=current_app.config['TRAVIS_COMMIT']
)
|
from flask import Blueprint, jsonify, current_app
from app import db
from app.errors import register_errors
base_blueprint = Blueprint('', __name__)
register_errors(base_blueprint)
@base_blueprint.route('/')
def get_info():
current_app.logger.info('get_info')
query = 'SELECT version_num FROM alembic_version'
try:
full_name = db.session.execute(query).fetchone()[0]
except Exception as e:
current_app.logger.error('Database exception: %r', e)
full_name = 'Database error, check logs'
return jsonify(
environment=current_app.config['ENVIRONMENT'],
info=full_name,
commit=current_app.config['TRAVIS_COMMIT']
)
|
Handle db exceptions when getting api info
|
Handle db exceptions when getting api info
|
Python
|
mit
|
NewAcropolis/api,NewAcropolis/api,NewAcropolis/api
|
from flask import Blueprint, jsonify, current_app
from app import db
from app.errors import register_errors
base_blueprint = Blueprint('', __name__)
register_errors(base_blueprint)
@base_blueprint.route('/')
def get_info():
current_app.logger.info('get_info')
query = 'SELECT version_num FROM alembic_version'
full_name = db.session.execute(query).fetchone()[0]
return jsonify(
environment=current_app.config['ENVIRONMENT'],
info=full_name,
commit=current_app.config['TRAVIS_COMMIT']
)
Handle db exceptions when getting api info
|
from flask import Blueprint, jsonify, current_app
from app import db
from app.errors import register_errors
base_blueprint = Blueprint('', __name__)
register_errors(base_blueprint)
@base_blueprint.route('/')
def get_info():
current_app.logger.info('get_info')
query = 'SELECT version_num FROM alembic_version'
try:
full_name = db.session.execute(query).fetchone()[0]
except Exception as e:
current_app.logger.error('Database exception: %r', e)
full_name = 'Database error, check logs'
return jsonify(
environment=current_app.config['ENVIRONMENT'],
info=full_name,
commit=current_app.config['TRAVIS_COMMIT']
)
|
<commit_before>from flask import Blueprint, jsonify, current_app
from app import db
from app.errors import register_errors
base_blueprint = Blueprint('', __name__)
register_errors(base_blueprint)
@base_blueprint.route('/')
def get_info():
current_app.logger.info('get_info')
query = 'SELECT version_num FROM alembic_version'
full_name = db.session.execute(query).fetchone()[0]
return jsonify(
environment=current_app.config['ENVIRONMENT'],
info=full_name,
commit=current_app.config['TRAVIS_COMMIT']
)
<commit_msg>Handle db exceptions when getting api info<commit_after>
|
from flask import Blueprint, jsonify, current_app
from app import db
from app.errors import register_errors
base_blueprint = Blueprint('', __name__)
register_errors(base_blueprint)
@base_blueprint.route('/')
def get_info():
current_app.logger.info('get_info')
query = 'SELECT version_num FROM alembic_version'
try:
full_name = db.session.execute(query).fetchone()[0]
except Exception as e:
current_app.logger.error('Database exception: %r', e)
full_name = 'Database error, check logs'
return jsonify(
environment=current_app.config['ENVIRONMENT'],
info=full_name,
commit=current_app.config['TRAVIS_COMMIT']
)
|
from flask import Blueprint, jsonify, current_app
from app import db
from app.errors import register_errors
base_blueprint = Blueprint('', __name__)
register_errors(base_blueprint)
@base_blueprint.route('/')
def get_info():
current_app.logger.info('get_info')
query = 'SELECT version_num FROM alembic_version'
full_name = db.session.execute(query).fetchone()[0]
return jsonify(
environment=current_app.config['ENVIRONMENT'],
info=full_name,
commit=current_app.config['TRAVIS_COMMIT']
)
Handle db exceptions when getting api infofrom flask import Blueprint, jsonify, current_app
from app import db
from app.errors import register_errors
base_blueprint = Blueprint('', __name__)
register_errors(base_blueprint)
@base_blueprint.route('/')
def get_info():
current_app.logger.info('get_info')
query = 'SELECT version_num FROM alembic_version'
try:
full_name = db.session.execute(query).fetchone()[0]
except Exception as e:
current_app.logger.error('Database exception: %r', e)
full_name = 'Database error, check logs'
return jsonify(
environment=current_app.config['ENVIRONMENT'],
info=full_name,
commit=current_app.config['TRAVIS_COMMIT']
)
|
<commit_before>from flask import Blueprint, jsonify, current_app
from app import db
from app.errors import register_errors
base_blueprint = Blueprint('', __name__)
register_errors(base_blueprint)
@base_blueprint.route('/')
def get_info():
current_app.logger.info('get_info')
query = 'SELECT version_num FROM alembic_version'
full_name = db.session.execute(query).fetchone()[0]
return jsonify(
environment=current_app.config['ENVIRONMENT'],
info=full_name,
commit=current_app.config['TRAVIS_COMMIT']
)
<commit_msg>Handle db exceptions when getting api info<commit_after>from flask import Blueprint, jsonify, current_app
from app import db
from app.errors import register_errors
base_blueprint = Blueprint('', __name__)
register_errors(base_blueprint)
@base_blueprint.route('/')
def get_info():
current_app.logger.info('get_info')
query = 'SELECT version_num FROM alembic_version'
try:
full_name = db.session.execute(query).fetchone()[0]
except Exception as e:
current_app.logger.error('Database exception: %r', e)
full_name = 'Database error, check logs'
return jsonify(
environment=current_app.config['ENVIRONMENT'],
info=full_name,
commit=current_app.config['TRAVIS_COMMIT']
)
|
f779905c1b7a48a8f49da6ad061ae7d67e677052
|
cartoframes/viz/legend_list.py
|
cartoframes/viz/legend_list.py
|
from .legend import Legend
from .constants import SINGLE_LEGEND
class LegendList:
"""LegendList
Args:
legends (list, Legend): List of legends for a layer.
"""
def __init__(self, legends=None, default_legend=None, geom_type=None):
self._legends = self._init_legends(legends, default_legend, geom_type)
def _init_legends(self, legends, default_legend, layer_type):
if isinstance(legends, list):
legend_list = []
for legend in legends:
if isinstance(legend, Legend):
if legend._type == 'default' or legend._type == 'basic':
legend._type = _get_simple_legend_geometry_type(layer_type)
if legend._type == 'default' and default_legend:
legend._prop = default_legend._prop
legend_list.append(legend)
else:
raise ValueError('Legends list contains invalid elements')
return legend_list
else:
return []
def get_info(self):
legends_info = []
for legend in self._legends:
if legend:
legends_info.append(legend.get_info())
return legends_info
def _get_simple_legend_geometry_type(layer_type):
return SINGLE_LEGEND + '-' + layer_type
|
from .legend import Legend
from .constants import SINGLE_LEGEND
class LegendList:
"""LegendList
Args:
legends (list, Legend): List of legends for a layer.
"""
def __init__(self, legends=None, default_legend=None, geom_type=None):
self._legends = self._init_legends(legends, default_legend, geom_type)
def _init_legends(self, legends, default_legend, layer_type):
if isinstance(legends, list):
legend_list = []
for legend in legends:
if isinstance(legend, Legend):
if legend._type == 'basic':
legend._type = _get_simple_legend_geometry_type(layer_type)
elif legend._type == 'default' and default_legend:
legend._type = default_legend._type
legend._prop = default_legend._prop
legend_list.append(legend)
else:
raise ValueError('Legends list contains invalid elements')
return legend_list
else:
return []
def get_info(self):
legends_info = []
for legend in self._legends:
if legend:
legends_info.append(legend.get_info())
return legends_info
def _get_simple_legend_geometry_type(layer_type):
return SINGLE_LEGEND + '-' + layer_type
|
Fix default legend type detection
|
Fix default legend type detection
|
Python
|
bsd-3-clause
|
CartoDB/cartoframes,CartoDB/cartoframes
|
from .legend import Legend
from .constants import SINGLE_LEGEND
class LegendList:
"""LegendList
Args:
legends (list, Legend): List of legends for a layer.
"""
def __init__(self, legends=None, default_legend=None, geom_type=None):
self._legends = self._init_legends(legends, default_legend, geom_type)
def _init_legends(self, legends, default_legend, layer_type):
if isinstance(legends, list):
legend_list = []
for legend in legends:
if isinstance(legend, Legend):
if legend._type == 'default' or legend._type == 'basic':
legend._type = _get_simple_legend_geometry_type(layer_type)
if legend._type == 'default' and default_legend:
legend._prop = default_legend._prop
legend_list.append(legend)
else:
raise ValueError('Legends list contains invalid elements')
return legend_list
else:
return []
def get_info(self):
legends_info = []
for legend in self._legends:
if legend:
legends_info.append(legend.get_info())
return legends_info
def _get_simple_legend_geometry_type(layer_type):
return SINGLE_LEGEND + '-' + layer_type
Fix default legend type detection
|
from .legend import Legend
from .constants import SINGLE_LEGEND
class LegendList:
"""LegendList
Args:
legends (list, Legend): List of legends for a layer.
"""
def __init__(self, legends=None, default_legend=None, geom_type=None):
self._legends = self._init_legends(legends, default_legend, geom_type)
def _init_legends(self, legends, default_legend, layer_type):
if isinstance(legends, list):
legend_list = []
for legend in legends:
if isinstance(legend, Legend):
if legend._type == 'basic':
legend._type = _get_simple_legend_geometry_type(layer_type)
elif legend._type == 'default' and default_legend:
legend._type = default_legend._type
legend._prop = default_legend._prop
legend_list.append(legend)
else:
raise ValueError('Legends list contains invalid elements')
return legend_list
else:
return []
def get_info(self):
legends_info = []
for legend in self._legends:
if legend:
legends_info.append(legend.get_info())
return legends_info
def _get_simple_legend_geometry_type(layer_type):
return SINGLE_LEGEND + '-' + layer_type
|
<commit_before>from .legend import Legend
from .constants import SINGLE_LEGEND
class LegendList:
"""LegendList
Args:
legends (list, Legend): List of legends for a layer.
"""
def __init__(self, legends=None, default_legend=None, geom_type=None):
self._legends = self._init_legends(legends, default_legend, geom_type)
def _init_legends(self, legends, default_legend, layer_type):
if isinstance(legends, list):
legend_list = []
for legend in legends:
if isinstance(legend, Legend):
if legend._type == 'default' or legend._type == 'basic':
legend._type = _get_simple_legend_geometry_type(layer_type)
if legend._type == 'default' and default_legend:
legend._prop = default_legend._prop
legend_list.append(legend)
else:
raise ValueError('Legends list contains invalid elements')
return legend_list
else:
return []
def get_info(self):
legends_info = []
for legend in self._legends:
if legend:
legends_info.append(legend.get_info())
return legends_info
def _get_simple_legend_geometry_type(layer_type):
return SINGLE_LEGEND + '-' + layer_type
<commit_msg>Fix default legend type detection<commit_after>
|
from .legend import Legend
from .constants import SINGLE_LEGEND
class LegendList:
"""LegendList
Args:
legends (list, Legend): List of legends for a layer.
"""
def __init__(self, legends=None, default_legend=None, geom_type=None):
self._legends = self._init_legends(legends, default_legend, geom_type)
def _init_legends(self, legends, default_legend, layer_type):
if isinstance(legends, list):
legend_list = []
for legend in legends:
if isinstance(legend, Legend):
if legend._type == 'basic':
legend._type = _get_simple_legend_geometry_type(layer_type)
elif legend._type == 'default' and default_legend:
legend._type = default_legend._type
legend._prop = default_legend._prop
legend_list.append(legend)
else:
raise ValueError('Legends list contains invalid elements')
return legend_list
else:
return []
def get_info(self):
legends_info = []
for legend in self._legends:
if legend:
legends_info.append(legend.get_info())
return legends_info
def _get_simple_legend_geometry_type(layer_type):
return SINGLE_LEGEND + '-' + layer_type
|
from .legend import Legend
from .constants import SINGLE_LEGEND
class LegendList:
"""LegendList
Args:
legends (list, Legend): List of legends for a layer.
"""
def __init__(self, legends=None, default_legend=None, geom_type=None):
self._legends = self._init_legends(legends, default_legend, geom_type)
def _init_legends(self, legends, default_legend, layer_type):
if isinstance(legends, list):
legend_list = []
for legend in legends:
if isinstance(legend, Legend):
if legend._type == 'default' or legend._type == 'basic':
legend._type = _get_simple_legend_geometry_type(layer_type)
if legend._type == 'default' and default_legend:
legend._prop = default_legend._prop
legend_list.append(legend)
else:
raise ValueError('Legends list contains invalid elements')
return legend_list
else:
return []
def get_info(self):
legends_info = []
for legend in self._legends:
if legend:
legends_info.append(legend.get_info())
return legends_info
def _get_simple_legend_geometry_type(layer_type):
return SINGLE_LEGEND + '-' + layer_type
Fix default legend type detectionfrom .legend import Legend
from .constants import SINGLE_LEGEND
class LegendList:
"""LegendList
Args:
legends (list, Legend): List of legends for a layer.
"""
def __init__(self, legends=None, default_legend=None, geom_type=None):
self._legends = self._init_legends(legends, default_legend, geom_type)
def _init_legends(self, legends, default_legend, layer_type):
if isinstance(legends, list):
legend_list = []
for legend in legends:
if isinstance(legend, Legend):
if legend._type == 'basic':
legend._type = _get_simple_legend_geometry_type(layer_type)
elif legend._type == 'default' and default_legend:
legend._type = default_legend._type
legend._prop = default_legend._prop
legend_list.append(legend)
else:
raise ValueError('Legends list contains invalid elements')
return legend_list
else:
return []
def get_info(self):
legends_info = []
for legend in self._legends:
if legend:
legends_info.append(legend.get_info())
return legends_info
def _get_simple_legend_geometry_type(layer_type):
return SINGLE_LEGEND + '-' + layer_type
|
<commit_before>from .legend import Legend
from .constants import SINGLE_LEGEND
class LegendList:
"""LegendList
Args:
legends (list, Legend): List of legends for a layer.
"""
def __init__(self, legends=None, default_legend=None, geom_type=None):
self._legends = self._init_legends(legends, default_legend, geom_type)
def _init_legends(self, legends, default_legend, layer_type):
if isinstance(legends, list):
legend_list = []
for legend in legends:
if isinstance(legend, Legend):
if legend._type == 'default' or legend._type == 'basic':
legend._type = _get_simple_legend_geometry_type(layer_type)
if legend._type == 'default' and default_legend:
legend._prop = default_legend._prop
legend_list.append(legend)
else:
raise ValueError('Legends list contains invalid elements')
return legend_list
else:
return []
def get_info(self):
legends_info = []
for legend in self._legends:
if legend:
legends_info.append(legend.get_info())
return legends_info
def _get_simple_legend_geometry_type(layer_type):
return SINGLE_LEGEND + '-' + layer_type
<commit_msg>Fix default legend type detection<commit_after>from .legend import Legend
from .constants import SINGLE_LEGEND
class LegendList:
"""LegendList
Args:
legends (list, Legend): List of legends for a layer.
"""
def __init__(self, legends=None, default_legend=None, geom_type=None):
self._legends = self._init_legends(legends, default_legend, geom_type)
def _init_legends(self, legends, default_legend, layer_type):
if isinstance(legends, list):
legend_list = []
for legend in legends:
if isinstance(legend, Legend):
if legend._type == 'basic':
legend._type = _get_simple_legend_geometry_type(layer_type)
elif legend._type == 'default' and default_legend:
legend._type = default_legend._type
legend._prop = default_legend._prop
legend_list.append(legend)
else:
raise ValueError('Legends list contains invalid elements')
return legend_list
else:
return []
def get_info(self):
legends_info = []
for legend in self._legends:
if legend:
legends_info.append(legend.get_info())
return legends_info
def _get_simple_legend_geometry_type(layer_type):
return SINGLE_LEGEND + '-' + layer_type
|
4b3ec77a6e1639dc156135fd42ca215c58c082a3
|
pyecore/notification.py
|
pyecore/notification.py
|
"""
This module gives the "listener" classes for the PyEcore notification layer.
The main class to create a new listener is "EObserver" which is triggered
each time a modification is perfomed on an observed element.
"""
class ENotifer(object):
def notify(self, notification):
notification.notifier = notification.notifier or self
for listener in self._eternal_listener + self.listeners:
listener.notifyChanged(notification)
@unique
class Kind(Enum):
ADD = 0
ADD_MANY = 1
MOVE = 2
REMOVE = 3
REMOVE_MANY = 4
SET = 5
UNSET = 6
class Notification(object):
def __init__(self, notifier=None, kind=None, old=None, new=None,
feature=None):
self.notifier = notifier
self.kind = kind
self.old = old
self.new = new
self.feature = feature
def __repr__(self):
return ('[{0}] old={1} new={2} obj={3} #{4}'
.format(self.kind.name,
self.old,
self.new,
self.notifier,
self.feature))
class EObserver(object):
def __init__(self, notifier=None, notifyChanged=None):
if notifier:
notifier.listeners.append(self)
if notifyChanged:
self.notifyChanged = notifyChanged
def observe(self, notifier):
notifier.listeners.append(self)
def notifyChanged(self, notification):
pass
|
"""
This module gives the "listener" classes for the PyEcore notification layer.
The main class to create a new listener is "EObserver" which is triggered
each time a modification is perfomed on an observed element.
"""
try:
from enum34 import unique, Enum
except ImportError:
from enum import unique, Enum
class ENotifer(object):
def notify(self, notification):
notification.notifier = notification.notifier or self
for listener in self._eternal_listener + self.listeners:
listener.notifyChanged(notification)
@unique
class Kind(Enum):
ADD = 0
ADD_MANY = 1
MOVE = 2
REMOVE = 3
REMOVE_MANY = 4
SET = 5
UNSET = 6
class Notification(object):
def __init__(self, notifier=None, kind=None, old=None, new=None,
feature=None):
self.notifier = notifier
self.kind = kind
self.old = old
self.new = new
self.feature = feature
def __repr__(self):
return ('[{0}] old={1} new={2} obj={3} #{4}'
.format(self.kind.name,
self.old,
self.new,
self.notifier,
self.feature))
class EObserver(object):
def __init__(self, notifier=None, notifyChanged=None):
if notifier:
notifier.listeners.append(self)
if notifyChanged:
self.notifyChanged = notifyChanged
def observe(self, notifier):
notifier.listeners.append(self)
def notifyChanged(self, notification):
pass
|
Add conditional import of the enum34 library
|
Add conditional import of the enum34 library
This lib is used to bing enumerations to Python <= 3.3.
|
Python
|
bsd-3-clause
|
aranega/pyecore,pyecore/pyecore
|
"""
This module gives the "listener" classes for the PyEcore notification layer.
The main class to create a new listener is "EObserver" which is triggered
each time a modification is perfomed on an observed element.
"""
class ENotifer(object):
def notify(self, notification):
notification.notifier = notification.notifier or self
for listener in self._eternal_listener + self.listeners:
listener.notifyChanged(notification)
@unique
class Kind(Enum):
ADD = 0
ADD_MANY = 1
MOVE = 2
REMOVE = 3
REMOVE_MANY = 4
SET = 5
UNSET = 6
class Notification(object):
def __init__(self, notifier=None, kind=None, old=None, new=None,
feature=None):
self.notifier = notifier
self.kind = kind
self.old = old
self.new = new
self.feature = feature
def __repr__(self):
return ('[{0}] old={1} new={2} obj={3} #{4}'
.format(self.kind.name,
self.old,
self.new,
self.notifier,
self.feature))
class EObserver(object):
def __init__(self, notifier=None, notifyChanged=None):
if notifier:
notifier.listeners.append(self)
if notifyChanged:
self.notifyChanged = notifyChanged
def observe(self, notifier):
notifier.listeners.append(self)
def notifyChanged(self, notification):
pass
Add conditional import of the enum34 library
This lib is used to bing enumerations to Python <= 3.3.
|
"""
This module gives the "listener" classes for the PyEcore notification layer.
The main class to create a new listener is "EObserver" which is triggered
each time a modification is perfomed on an observed element.
"""
try:
from enum34 import unique, Enum
except ImportError:
from enum import unique, Enum
class ENotifer(object):
def notify(self, notification):
notification.notifier = notification.notifier or self
for listener in self._eternal_listener + self.listeners:
listener.notifyChanged(notification)
@unique
class Kind(Enum):
ADD = 0
ADD_MANY = 1
MOVE = 2
REMOVE = 3
REMOVE_MANY = 4
SET = 5
UNSET = 6
class Notification(object):
def __init__(self, notifier=None, kind=None, old=None, new=None,
feature=None):
self.notifier = notifier
self.kind = kind
self.old = old
self.new = new
self.feature = feature
def __repr__(self):
return ('[{0}] old={1} new={2} obj={3} #{4}'
.format(self.kind.name,
self.old,
self.new,
self.notifier,
self.feature))
class EObserver(object):
def __init__(self, notifier=None, notifyChanged=None):
if notifier:
notifier.listeners.append(self)
if notifyChanged:
self.notifyChanged = notifyChanged
def observe(self, notifier):
notifier.listeners.append(self)
def notifyChanged(self, notification):
pass
|
<commit_before>"""
This module gives the "listener" classes for the PyEcore notification layer.
The main class to create a new listener is "EObserver" which is triggered
each time a modification is perfomed on an observed element.
"""
class ENotifer(object):
def notify(self, notification):
notification.notifier = notification.notifier or self
for listener in self._eternal_listener + self.listeners:
listener.notifyChanged(notification)
@unique
class Kind(Enum):
ADD = 0
ADD_MANY = 1
MOVE = 2
REMOVE = 3
REMOVE_MANY = 4
SET = 5
UNSET = 6
class Notification(object):
def __init__(self, notifier=None, kind=None, old=None, new=None,
feature=None):
self.notifier = notifier
self.kind = kind
self.old = old
self.new = new
self.feature = feature
def __repr__(self):
return ('[{0}] old={1} new={2} obj={3} #{4}'
.format(self.kind.name,
self.old,
self.new,
self.notifier,
self.feature))
class EObserver(object):
def __init__(self, notifier=None, notifyChanged=None):
if notifier:
notifier.listeners.append(self)
if notifyChanged:
self.notifyChanged = notifyChanged
def observe(self, notifier):
notifier.listeners.append(self)
def notifyChanged(self, notification):
pass
<commit_msg>Add conditional import of the enum34 library
This lib is used to bing enumerations to Python <= 3.3.<commit_after>
|
"""
This module gives the "listener" classes for the PyEcore notification layer.
The main class to create a new listener is "EObserver" which is triggered
each time a modification is perfomed on an observed element.
"""
try:
from enum34 import unique, Enum
except ImportError:
from enum import unique, Enum
class ENotifer(object):
def notify(self, notification):
notification.notifier = notification.notifier or self
for listener in self._eternal_listener + self.listeners:
listener.notifyChanged(notification)
@unique
class Kind(Enum):
ADD = 0
ADD_MANY = 1
MOVE = 2
REMOVE = 3
REMOVE_MANY = 4
SET = 5
UNSET = 6
class Notification(object):
def __init__(self, notifier=None, kind=None, old=None, new=None,
feature=None):
self.notifier = notifier
self.kind = kind
self.old = old
self.new = new
self.feature = feature
def __repr__(self):
return ('[{0}] old={1} new={2} obj={3} #{4}'
.format(self.kind.name,
self.old,
self.new,
self.notifier,
self.feature))
class EObserver(object):
def __init__(self, notifier=None, notifyChanged=None):
if notifier:
notifier.listeners.append(self)
if notifyChanged:
self.notifyChanged = notifyChanged
def observe(self, notifier):
notifier.listeners.append(self)
def notifyChanged(self, notification):
pass
|
"""
This module gives the "listener" classes for the PyEcore notification layer.
The main class to create a new listener is "EObserver" which is triggered
each time a modification is perfomed on an observed element.
"""
class ENotifer(object):
def notify(self, notification):
notification.notifier = notification.notifier or self
for listener in self._eternal_listener + self.listeners:
listener.notifyChanged(notification)
@unique
class Kind(Enum):
ADD = 0
ADD_MANY = 1
MOVE = 2
REMOVE = 3
REMOVE_MANY = 4
SET = 5
UNSET = 6
class Notification(object):
def __init__(self, notifier=None, kind=None, old=None, new=None,
feature=None):
self.notifier = notifier
self.kind = kind
self.old = old
self.new = new
self.feature = feature
def __repr__(self):
return ('[{0}] old={1} new={2} obj={3} #{4}'
.format(self.kind.name,
self.old,
self.new,
self.notifier,
self.feature))
class EObserver(object):
def __init__(self, notifier=None, notifyChanged=None):
if notifier:
notifier.listeners.append(self)
if notifyChanged:
self.notifyChanged = notifyChanged
def observe(self, notifier):
notifier.listeners.append(self)
def notifyChanged(self, notification):
pass
Add conditional import of the enum34 library
This lib is used to bing enumerations to Python <= 3.3."""
This module gives the "listener" classes for the PyEcore notification layer.
The main class to create a new listener is "EObserver" which is triggered
each time a modification is perfomed on an observed element.
"""
try:
from enum34 import unique, Enum
except ImportError:
from enum import unique, Enum
class ENotifer(object):
def notify(self, notification):
notification.notifier = notification.notifier or self
for listener in self._eternal_listener + self.listeners:
listener.notifyChanged(notification)
@unique
class Kind(Enum):
ADD = 0
ADD_MANY = 1
MOVE = 2
REMOVE = 3
REMOVE_MANY = 4
SET = 5
UNSET = 6
class Notification(object):
def __init__(self, notifier=None, kind=None, old=None, new=None,
feature=None):
self.notifier = notifier
self.kind = kind
self.old = old
self.new = new
self.feature = feature
def __repr__(self):
return ('[{0}] old={1} new={2} obj={3} #{4}'
.format(self.kind.name,
self.old,
self.new,
self.notifier,
self.feature))
class EObserver(object):
def __init__(self, notifier=None, notifyChanged=None):
if notifier:
notifier.listeners.append(self)
if notifyChanged:
self.notifyChanged = notifyChanged
def observe(self, notifier):
notifier.listeners.append(self)
def notifyChanged(self, notification):
pass
|
<commit_before>"""
This module gives the "listener" classes for the PyEcore notification layer.
The main class to create a new listener is "EObserver" which is triggered
each time a modification is perfomed on an observed element.
"""
class ENotifer(object):
def notify(self, notification):
notification.notifier = notification.notifier or self
for listener in self._eternal_listener + self.listeners:
listener.notifyChanged(notification)
@unique
class Kind(Enum):
ADD = 0
ADD_MANY = 1
MOVE = 2
REMOVE = 3
REMOVE_MANY = 4
SET = 5
UNSET = 6
class Notification(object):
def __init__(self, notifier=None, kind=None, old=None, new=None,
feature=None):
self.notifier = notifier
self.kind = kind
self.old = old
self.new = new
self.feature = feature
def __repr__(self):
return ('[{0}] old={1} new={2} obj={3} #{4}'
.format(self.kind.name,
self.old,
self.new,
self.notifier,
self.feature))
class EObserver(object):
def __init__(self, notifier=None, notifyChanged=None):
if notifier:
notifier.listeners.append(self)
if notifyChanged:
self.notifyChanged = notifyChanged
def observe(self, notifier):
notifier.listeners.append(self)
def notifyChanged(self, notification):
pass
<commit_msg>Add conditional import of the enum34 library
This lib is used to bing enumerations to Python <= 3.3.<commit_after>"""
This module gives the "listener" classes for the PyEcore notification layer.
The main class to create a new listener is "EObserver" which is triggered
each time a modification is perfomed on an observed element.
"""
try:
from enum34 import unique, Enum
except ImportError:
from enum import unique, Enum
class ENotifer(object):
def notify(self, notification):
notification.notifier = notification.notifier or self
for listener in self._eternal_listener + self.listeners:
listener.notifyChanged(notification)
@unique
class Kind(Enum):
ADD = 0
ADD_MANY = 1
MOVE = 2
REMOVE = 3
REMOVE_MANY = 4
SET = 5
UNSET = 6
class Notification(object):
def __init__(self, notifier=None, kind=None, old=None, new=None,
feature=None):
self.notifier = notifier
self.kind = kind
self.old = old
self.new = new
self.feature = feature
def __repr__(self):
return ('[{0}] old={1} new={2} obj={3} #{4}'
.format(self.kind.name,
self.old,
self.new,
self.notifier,
self.feature))
class EObserver(object):
def __init__(self, notifier=None, notifyChanged=None):
if notifier:
notifier.listeners.append(self)
if notifyChanged:
self.notifyChanged = notifyChanged
def observe(self, notifier):
notifier.listeners.append(self)
def notifyChanged(self, notification):
pass
|
f18ea85f3599e16c60cfc2b652c30ff64997e95b
|
pytablereader/loadermanager/_base.py
|
pytablereader/loadermanager/_base.py
|
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import
from ..interface import TableLoaderInterface
class TableLoaderManager(TableLoaderInterface):
def __init__(self, loader):
self.__loader = loader
@property
def format_name(self):
return self.__loader.format_name
@property
def source_type(self):
return self.__loader.source_type
@property
def encoding(self):
try:
return self.__loader.encoding
except AttributeError:
return None
@encoding.setter
def encoding(self, codec_name):
self.__loader.encoding = codec_name
def load(self):
return self.__loader.load()
def inc_table_count(self):
self.__loader.inc_table_count()
|
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import
from ..interface import TableLoaderInterface
class TableLoaderManager(TableLoaderInterface):
def __init__(self, loader):
self.__loader = loader
@property
def loader(self):
return self.__loader
@property
def format_name(self):
return self.__loader.format_name
@property
def source_type(self):
return self.__loader.source_type
@property
def encoding(self):
try:
return self.__loader.encoding
except AttributeError:
return None
@encoding.setter
def encoding(self, codec_name):
self.__loader.encoding = codec_name
def load(self):
return self.__loader.load()
def inc_table_count(self):
self.__loader.inc_table_count()
|
Add an interface to get the loader
|
Add an interface to get the loader
|
Python
|
mit
|
thombashi/pytablereader,thombashi/pytablereader,thombashi/pytablereader
|
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import
from ..interface import TableLoaderInterface
class TableLoaderManager(TableLoaderInterface):
def __init__(self, loader):
self.__loader = loader
@property
def format_name(self):
return self.__loader.format_name
@property
def source_type(self):
return self.__loader.source_type
@property
def encoding(self):
try:
return self.__loader.encoding
except AttributeError:
return None
@encoding.setter
def encoding(self, codec_name):
self.__loader.encoding = codec_name
def load(self):
return self.__loader.load()
def inc_table_count(self):
self.__loader.inc_table_count()
Add an interface to get the loader
|
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import
from ..interface import TableLoaderInterface
class TableLoaderManager(TableLoaderInterface):
def __init__(self, loader):
self.__loader = loader
@property
def loader(self):
return self.__loader
@property
def format_name(self):
return self.__loader.format_name
@property
def source_type(self):
return self.__loader.source_type
@property
def encoding(self):
try:
return self.__loader.encoding
except AttributeError:
return None
@encoding.setter
def encoding(self, codec_name):
self.__loader.encoding = codec_name
def load(self):
return self.__loader.load()
def inc_table_count(self):
self.__loader.inc_table_count()
|
<commit_before># encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import
from ..interface import TableLoaderInterface
class TableLoaderManager(TableLoaderInterface):
def __init__(self, loader):
self.__loader = loader
@property
def format_name(self):
return self.__loader.format_name
@property
def source_type(self):
return self.__loader.source_type
@property
def encoding(self):
try:
return self.__loader.encoding
except AttributeError:
return None
@encoding.setter
def encoding(self, codec_name):
self.__loader.encoding = codec_name
def load(self):
return self.__loader.load()
def inc_table_count(self):
self.__loader.inc_table_count()
<commit_msg>Add an interface to get the loader<commit_after>
|
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import
from ..interface import TableLoaderInterface
class TableLoaderManager(TableLoaderInterface):
def __init__(self, loader):
self.__loader = loader
@property
def loader(self):
return self.__loader
@property
def format_name(self):
return self.__loader.format_name
@property
def source_type(self):
return self.__loader.source_type
@property
def encoding(self):
try:
return self.__loader.encoding
except AttributeError:
return None
@encoding.setter
def encoding(self, codec_name):
self.__loader.encoding = codec_name
def load(self):
return self.__loader.load()
def inc_table_count(self):
self.__loader.inc_table_count()
|
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import
from ..interface import TableLoaderInterface
class TableLoaderManager(TableLoaderInterface):
def __init__(self, loader):
self.__loader = loader
@property
def format_name(self):
return self.__loader.format_name
@property
def source_type(self):
return self.__loader.source_type
@property
def encoding(self):
try:
return self.__loader.encoding
except AttributeError:
return None
@encoding.setter
def encoding(self, codec_name):
self.__loader.encoding = codec_name
def load(self):
return self.__loader.load()
def inc_table_count(self):
self.__loader.inc_table_count()
Add an interface to get the loader# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import
from ..interface import TableLoaderInterface
class TableLoaderManager(TableLoaderInterface):
def __init__(self, loader):
self.__loader = loader
@property
def loader(self):
return self.__loader
@property
def format_name(self):
return self.__loader.format_name
@property
def source_type(self):
return self.__loader.source_type
@property
def encoding(self):
try:
return self.__loader.encoding
except AttributeError:
return None
@encoding.setter
def encoding(self, codec_name):
self.__loader.encoding = codec_name
def load(self):
return self.__loader.load()
def inc_table_count(self):
self.__loader.inc_table_count()
|
<commit_before># encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import
from ..interface import TableLoaderInterface
class TableLoaderManager(TableLoaderInterface):
def __init__(self, loader):
self.__loader = loader
@property
def format_name(self):
return self.__loader.format_name
@property
def source_type(self):
return self.__loader.source_type
@property
def encoding(self):
try:
return self.__loader.encoding
except AttributeError:
return None
@encoding.setter
def encoding(self, codec_name):
self.__loader.encoding = codec_name
def load(self):
return self.__loader.load()
def inc_table_count(self):
self.__loader.inc_table_count()
<commit_msg>Add an interface to get the loader<commit_after># encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import
from ..interface import TableLoaderInterface
class TableLoaderManager(TableLoaderInterface):
def __init__(self, loader):
self.__loader = loader
@property
def loader(self):
return self.__loader
@property
def format_name(self):
return self.__loader.format_name
@property
def source_type(self):
return self.__loader.source_type
@property
def encoding(self):
try:
return self.__loader.encoding
except AttributeError:
return None
@encoding.setter
def encoding(self, codec_name):
self.__loader.encoding = codec_name
def load(self):
return self.__loader.load()
def inc_table_count(self):
self.__loader.inc_table_count()
|
ddb12a892d42e8a6ffdd8146149ec306dea48a12
|
pydmrs/pydelphin_interface.py
|
pydmrs/pydelphin_interface.py
|
from delphin.interfaces import ace
from delphin.mrs import simplemrs, dmrx
from pydmrs.core import ListDmrs
from pydmrs.utils import load_config, get_config_option
DEFAULT_CONFIG_FILE = 'default_interface.conf'
config = load_config(DEFAULT_CONFIG_FILE)
DEFAULT_ERG_FILE = get_config_option(config, 'Grammar', 'ERG')
def parse(sentence, cls=ListDmrs, erg_file=DEFAULT_ERG_FILE):
results = []
for result in ace.parse(erg_file, sentence)['RESULTS']: # cmdargs=['-r', 'root_informal']
mrs = result['MRS']
xmrs = simplemrs.loads_one(mrs)
dmrs_xml = dmrx.dumps_one(xmrs)[11:-12]
dmrs = cls.loads_xml(dmrs_xml)
results.append(dmrs)
return results
def generate(dmrs, erg_file=DEFAULT_ERG_FILE):
dmrs_xml = '<dmrs-list>' + dmrs.dumps_xml(encoding='utf-8') + '</dmrs-list>'
xmrs = dmrx.loads_one(dmrs_xml)
mrs = simplemrs.dumps_one(xmrs)
results = []
for result in ace.generate(erg_file, mrs)['RESULTS']:
sentence = result['SENT']
results.append(sentence)
return results
|
from delphin.interfaces import ace
from delphin.mrs import simplemrs, dmrx
from pydmrs.core import ListDmrs
from pydmrs.utils import load_config, get_config_option
DEFAULT_CONFIG_FILE = 'default_interface.conf'
config = load_config(DEFAULT_CONFIG_FILE)
DEFAULT_ERG_FILE = get_config_option(config, 'Grammar', 'ERG')
def parse(sentence, cls=ListDmrs, erg_file=DEFAULT_ERG_FILE):
results = []
for result in ace.parse(erg_file, sentence).results(): # cmdargs=['-r', 'root_informal']
xmrs = result.mrs()
dmrs_xml = dmrx.dumps_one(xmrs)[11:-12]
dmrs = cls.loads_xml(dmrs_xml)
results.append(dmrs)
return results
def generate(dmrs, erg_file=DEFAULT_ERG_FILE):
dmrs_xml = '<dmrs-list>' + dmrs.dumps_xml(encoding='utf-8') + '</dmrs-list>'
xmrs = dmrx.loads_one(dmrs_xml)
mrs = simplemrs.dumps_one(xmrs)
results = []
for result in ace.generate(erg_file, mrs).results():
sentence = result['surface']
results.append(sentence)
return results
|
Update PyDelphin interface to recent version
|
Update PyDelphin interface to recent version
|
Python
|
mit
|
delph-in/pydmrs,delph-in/pydmrs,delph-in/pydmrs
|
from delphin.interfaces import ace
from delphin.mrs import simplemrs, dmrx
from pydmrs.core import ListDmrs
from pydmrs.utils import load_config, get_config_option
DEFAULT_CONFIG_FILE = 'default_interface.conf'
config = load_config(DEFAULT_CONFIG_FILE)
DEFAULT_ERG_FILE = get_config_option(config, 'Grammar', 'ERG')
def parse(sentence, cls=ListDmrs, erg_file=DEFAULT_ERG_FILE):
results = []
for result in ace.parse(erg_file, sentence)['RESULTS']: # cmdargs=['-r', 'root_informal']
mrs = result['MRS']
xmrs = simplemrs.loads_one(mrs)
dmrs_xml = dmrx.dumps_one(xmrs)[11:-12]
dmrs = cls.loads_xml(dmrs_xml)
results.append(dmrs)
return results
def generate(dmrs, erg_file=DEFAULT_ERG_FILE):
dmrs_xml = '<dmrs-list>' + dmrs.dumps_xml(encoding='utf-8') + '</dmrs-list>'
xmrs = dmrx.loads_one(dmrs_xml)
mrs = simplemrs.dumps_one(xmrs)
results = []
for result in ace.generate(erg_file, mrs)['RESULTS']:
sentence = result['SENT']
results.append(sentence)
return results
Update PyDelphin interface to recent version
|
from delphin.interfaces import ace
from delphin.mrs import simplemrs, dmrx
from pydmrs.core import ListDmrs
from pydmrs.utils import load_config, get_config_option
DEFAULT_CONFIG_FILE = 'default_interface.conf'
config = load_config(DEFAULT_CONFIG_FILE)
DEFAULT_ERG_FILE = get_config_option(config, 'Grammar', 'ERG')
def parse(sentence, cls=ListDmrs, erg_file=DEFAULT_ERG_FILE):
results = []
for result in ace.parse(erg_file, sentence).results(): # cmdargs=['-r', 'root_informal']
xmrs = result.mrs()
dmrs_xml = dmrx.dumps_one(xmrs)[11:-12]
dmrs = cls.loads_xml(dmrs_xml)
results.append(dmrs)
return results
def generate(dmrs, erg_file=DEFAULT_ERG_FILE):
dmrs_xml = '<dmrs-list>' + dmrs.dumps_xml(encoding='utf-8') + '</dmrs-list>'
xmrs = dmrx.loads_one(dmrs_xml)
mrs = simplemrs.dumps_one(xmrs)
results = []
for result in ace.generate(erg_file, mrs).results():
sentence = result['surface']
results.append(sentence)
return results
|
<commit_before>from delphin.interfaces import ace
from delphin.mrs import simplemrs, dmrx
from pydmrs.core import ListDmrs
from pydmrs.utils import load_config, get_config_option
DEFAULT_CONFIG_FILE = 'default_interface.conf'
config = load_config(DEFAULT_CONFIG_FILE)
DEFAULT_ERG_FILE = get_config_option(config, 'Grammar', 'ERG')
def parse(sentence, cls=ListDmrs, erg_file=DEFAULT_ERG_FILE):
results = []
for result in ace.parse(erg_file, sentence)['RESULTS']: # cmdargs=['-r', 'root_informal']
mrs = result['MRS']
xmrs = simplemrs.loads_one(mrs)
dmrs_xml = dmrx.dumps_one(xmrs)[11:-12]
dmrs = cls.loads_xml(dmrs_xml)
results.append(dmrs)
return results
def generate(dmrs, erg_file=DEFAULT_ERG_FILE):
dmrs_xml = '<dmrs-list>' + dmrs.dumps_xml(encoding='utf-8') + '</dmrs-list>'
xmrs = dmrx.loads_one(dmrs_xml)
mrs = simplemrs.dumps_one(xmrs)
results = []
for result in ace.generate(erg_file, mrs)['RESULTS']:
sentence = result['SENT']
results.append(sentence)
return results
<commit_msg>Update PyDelphin interface to recent version<commit_after>
|
from delphin.interfaces import ace
from delphin.mrs import simplemrs, dmrx
from pydmrs.core import ListDmrs
from pydmrs.utils import load_config, get_config_option
DEFAULT_CONFIG_FILE = 'default_interface.conf'
config = load_config(DEFAULT_CONFIG_FILE)
DEFAULT_ERG_FILE = get_config_option(config, 'Grammar', 'ERG')
def parse(sentence, cls=ListDmrs, erg_file=DEFAULT_ERG_FILE):
results = []
for result in ace.parse(erg_file, sentence).results(): # cmdargs=['-r', 'root_informal']
xmrs = result.mrs()
dmrs_xml = dmrx.dumps_one(xmrs)[11:-12]
dmrs = cls.loads_xml(dmrs_xml)
results.append(dmrs)
return results
def generate(dmrs, erg_file=DEFAULT_ERG_FILE):
dmrs_xml = '<dmrs-list>' + dmrs.dumps_xml(encoding='utf-8') + '</dmrs-list>'
xmrs = dmrx.loads_one(dmrs_xml)
mrs = simplemrs.dumps_one(xmrs)
results = []
for result in ace.generate(erg_file, mrs).results():
sentence = result['surface']
results.append(sentence)
return results
|
from delphin.interfaces import ace
from delphin.mrs import simplemrs, dmrx
from pydmrs.core import ListDmrs
from pydmrs.utils import load_config, get_config_option
DEFAULT_CONFIG_FILE = 'default_interface.conf'
config = load_config(DEFAULT_CONFIG_FILE)
DEFAULT_ERG_FILE = get_config_option(config, 'Grammar', 'ERG')
def parse(sentence, cls=ListDmrs, erg_file=DEFAULT_ERG_FILE):
results = []
for result in ace.parse(erg_file, sentence)['RESULTS']: # cmdargs=['-r', 'root_informal']
mrs = result['MRS']
xmrs = simplemrs.loads_one(mrs)
dmrs_xml = dmrx.dumps_one(xmrs)[11:-12]
dmrs = cls.loads_xml(dmrs_xml)
results.append(dmrs)
return results
def generate(dmrs, erg_file=DEFAULT_ERG_FILE):
dmrs_xml = '<dmrs-list>' + dmrs.dumps_xml(encoding='utf-8') + '</dmrs-list>'
xmrs = dmrx.loads_one(dmrs_xml)
mrs = simplemrs.dumps_one(xmrs)
results = []
for result in ace.generate(erg_file, mrs)['RESULTS']:
sentence = result['SENT']
results.append(sentence)
return results
Update PyDelphin interface to recent versionfrom delphin.interfaces import ace
from delphin.mrs import simplemrs, dmrx
from pydmrs.core import ListDmrs
from pydmrs.utils import load_config, get_config_option
DEFAULT_CONFIG_FILE = 'default_interface.conf'
config = load_config(DEFAULT_CONFIG_FILE)
DEFAULT_ERG_FILE = get_config_option(config, 'Grammar', 'ERG')
def parse(sentence, cls=ListDmrs, erg_file=DEFAULT_ERG_FILE):
results = []
for result in ace.parse(erg_file, sentence).results(): # cmdargs=['-r', 'root_informal']
xmrs = result.mrs()
dmrs_xml = dmrx.dumps_one(xmrs)[11:-12]
dmrs = cls.loads_xml(dmrs_xml)
results.append(dmrs)
return results
def generate(dmrs, erg_file=DEFAULT_ERG_FILE):
dmrs_xml = '<dmrs-list>' + dmrs.dumps_xml(encoding='utf-8') + '</dmrs-list>'
xmrs = dmrx.loads_one(dmrs_xml)
mrs = simplemrs.dumps_one(xmrs)
results = []
for result in ace.generate(erg_file, mrs).results():
sentence = result['surface']
results.append(sentence)
return results
|
<commit_before>from delphin.interfaces import ace
from delphin.mrs import simplemrs, dmrx
from pydmrs.core import ListDmrs
from pydmrs.utils import load_config, get_config_option
DEFAULT_CONFIG_FILE = 'default_interface.conf'
config = load_config(DEFAULT_CONFIG_FILE)
DEFAULT_ERG_FILE = get_config_option(config, 'Grammar', 'ERG')
def parse(sentence, cls=ListDmrs, erg_file=DEFAULT_ERG_FILE):
results = []
for result in ace.parse(erg_file, sentence)['RESULTS']: # cmdargs=['-r', 'root_informal']
mrs = result['MRS']
xmrs = simplemrs.loads_one(mrs)
dmrs_xml = dmrx.dumps_one(xmrs)[11:-12]
dmrs = cls.loads_xml(dmrs_xml)
results.append(dmrs)
return results
def generate(dmrs, erg_file=DEFAULT_ERG_FILE):
dmrs_xml = '<dmrs-list>' + dmrs.dumps_xml(encoding='utf-8') + '</dmrs-list>'
xmrs = dmrx.loads_one(dmrs_xml)
mrs = simplemrs.dumps_one(xmrs)
results = []
for result in ace.generate(erg_file, mrs)['RESULTS']:
sentence = result['SENT']
results.append(sentence)
return results
<commit_msg>Update PyDelphin interface to recent version<commit_after>from delphin.interfaces import ace
from delphin.mrs import simplemrs, dmrx
from pydmrs.core import ListDmrs
from pydmrs.utils import load_config, get_config_option
DEFAULT_CONFIG_FILE = 'default_interface.conf'
config = load_config(DEFAULT_CONFIG_FILE)
DEFAULT_ERG_FILE = get_config_option(config, 'Grammar', 'ERG')
def parse(sentence, cls=ListDmrs, erg_file=DEFAULT_ERG_FILE):
results = []
for result in ace.parse(erg_file, sentence).results(): # cmdargs=['-r', 'root_informal']
xmrs = result.mrs()
dmrs_xml = dmrx.dumps_one(xmrs)[11:-12]
dmrs = cls.loads_xml(dmrs_xml)
results.append(dmrs)
return results
def generate(dmrs, erg_file=DEFAULT_ERG_FILE):
dmrs_xml = '<dmrs-list>' + dmrs.dumps_xml(encoding='utf-8') + '</dmrs-list>'
xmrs = dmrx.loads_one(dmrs_xml)
mrs = simplemrs.dumps_one(xmrs)
results = []
for result in ace.generate(erg_file, mrs).results():
sentence = result['surface']
results.append(sentence)
return results
|
c26a7f83b1e9689496b5cf3b5e42fb85611c1ded
|
ideascube/conf/idb_aus_queensland.py
|
ideascube/conf/idb_aus_queensland.py
|
# -*- coding: utf-8 -*-
"""Queensland box in Australia"""
from .idb import * # noqa
from django.utils.translation import ugettext_lazy as _
IDEASCUBE_NAME = u"Queensland"
IDEASCUBE_PLACE_NAME = _("the community")
COUNTRIES_FIRST = ['AU']
TIME_ZONE = 'Australia/Darwin'
LANGUAGE_CODE = 'en'
LOAN_DURATION = 14
MONITORING_ENTRY_EXPORT_FIELDS = ['serial', 'user_id', 'birth_year', 'gender']
USER_FORM_FIELDS = (
('Ideasbox', ['serial', 'box_awareness']),
(_('Personal informations'), ['short_name', 'full_name', 'birth_year', 'gender', 'id_card_number']), # noqa
(_('Family'), ['marital_status', 'family_status', 'children_under_12', 'children_under_18', 'children_above_18']), # noqa
(_('In the town'), ['current_occupation', 'school_level']),
(_('Language skills'), ['en_level']),
)
|
# -*- coding: utf-8 -*-
"""Queensland box in Australia"""
from .idb import * # noqa
from django.utils.translation import ugettext_lazy as _
IDEASCUBE_NAME = u"Queensland"
IDEASCUBE_PLACE_NAME = _("the community")
COUNTRIES_FIRST = ['AU']
TIME_ZONE = 'Australia/Darwin'
LANGUAGE_CODE = 'en'
LOAN_DURATION = 14
MONITORING_ENTRY_EXPORT_FIELDS = ['serial', 'user_id', 'birth_year', 'gender']
USER_FORM_FIELDS = (
('Ideasbox', ['serial', 'box_awareness']),
(_('Personal informations'), ['short_name', 'full_name', 'birth_year', 'gender', 'id_card_number']), # noqa
(_('Family'), ['marital_status', 'family_status', 'children_under_12', 'children_under_18', 'children_above_18']), # noqa
(_('In the town'), ['current_occupation', 'school_level']),
(_('Language skills'), ['en_level']),
)
STAFF_HOME_CARDS = [c for c in STAFF_HOME_CARDS
if c['url'] not in ['server:battery']]
HOME_CARDS = STAFF_HOME_CARDS + [
{
'id': 'blog',
},
{
'id': 'mediacenter',
},
{
'id': 'library',
},
{
'id': 'wikipedia',
'languages': ['en']
},
{
'id': 'khanacademy',
},
{
'id': 'gutenberg',
'lang': 'en',
},
{
'id': 'vikidia',
'languages': ['en']
},
]
|
Change cards for the new version
|
Change cards for the new version
We setup a new version of the server and installed the ZIM file with the
catalog, so the cards has to change from the old version to the new
version to match with the ideascube catalog policy
|
Python
|
agpl-3.0
|
ideascube/ideascube,ideascube/ideascube,ideascube/ideascube,ideascube/ideascube
|
# -*- coding: utf-8 -*-
"""Queensland box in Australia"""
from .idb import * # noqa
from django.utils.translation import ugettext_lazy as _
IDEASCUBE_NAME = u"Queensland"
IDEASCUBE_PLACE_NAME = _("the community")
COUNTRIES_FIRST = ['AU']
TIME_ZONE = 'Australia/Darwin'
LANGUAGE_CODE = 'en'
LOAN_DURATION = 14
MONITORING_ENTRY_EXPORT_FIELDS = ['serial', 'user_id', 'birth_year', 'gender']
USER_FORM_FIELDS = (
('Ideasbox', ['serial', 'box_awareness']),
(_('Personal informations'), ['short_name', 'full_name', 'birth_year', 'gender', 'id_card_number']), # noqa
(_('Family'), ['marital_status', 'family_status', 'children_under_12', 'children_under_18', 'children_above_18']), # noqa
(_('In the town'), ['current_occupation', 'school_level']),
(_('Language skills'), ['en_level']),
)
Change cards for the new version
We setup a new version of the server and installed the ZIM file with the
catalog, so the cards has to change from the old version to the new
version to match with the ideascube catalog policy
|
# -*- coding: utf-8 -*-
"""Queensland box in Australia"""
from .idb import * # noqa
from django.utils.translation import ugettext_lazy as _
IDEASCUBE_NAME = u"Queensland"
IDEASCUBE_PLACE_NAME = _("the community")
COUNTRIES_FIRST = ['AU']
TIME_ZONE = 'Australia/Darwin'
LANGUAGE_CODE = 'en'
LOAN_DURATION = 14
MONITORING_ENTRY_EXPORT_FIELDS = ['serial', 'user_id', 'birth_year', 'gender']
USER_FORM_FIELDS = (
('Ideasbox', ['serial', 'box_awareness']),
(_('Personal informations'), ['short_name', 'full_name', 'birth_year', 'gender', 'id_card_number']), # noqa
(_('Family'), ['marital_status', 'family_status', 'children_under_12', 'children_under_18', 'children_above_18']), # noqa
(_('In the town'), ['current_occupation', 'school_level']),
(_('Language skills'), ['en_level']),
)
STAFF_HOME_CARDS = [c for c in STAFF_HOME_CARDS
if c['url'] not in ['server:battery']]
HOME_CARDS = STAFF_HOME_CARDS + [
{
'id': 'blog',
},
{
'id': 'mediacenter',
},
{
'id': 'library',
},
{
'id': 'wikipedia',
'languages': ['en']
},
{
'id': 'khanacademy',
},
{
'id': 'gutenberg',
'lang': 'en',
},
{
'id': 'vikidia',
'languages': ['en']
},
]
|
<commit_before># -*- coding: utf-8 -*-
"""Queensland box in Australia"""
from .idb import * # noqa
from django.utils.translation import ugettext_lazy as _
IDEASCUBE_NAME = u"Queensland"
IDEASCUBE_PLACE_NAME = _("the community")
COUNTRIES_FIRST = ['AU']
TIME_ZONE = 'Australia/Darwin'
LANGUAGE_CODE = 'en'
LOAN_DURATION = 14
MONITORING_ENTRY_EXPORT_FIELDS = ['serial', 'user_id', 'birth_year', 'gender']
USER_FORM_FIELDS = (
('Ideasbox', ['serial', 'box_awareness']),
(_('Personal informations'), ['short_name', 'full_name', 'birth_year', 'gender', 'id_card_number']), # noqa
(_('Family'), ['marital_status', 'family_status', 'children_under_12', 'children_under_18', 'children_above_18']), # noqa
(_('In the town'), ['current_occupation', 'school_level']),
(_('Language skills'), ['en_level']),
)
<commit_msg>Change cards for the new version
We setup a new version of the server and installed the ZIM file with the
catalog, so the cards has to change from the old version to the new
version to match with the ideascube catalog policy<commit_after>
|
# -*- coding: utf-8 -*-
"""Queensland box in Australia"""
from .idb import * # noqa
from django.utils.translation import ugettext_lazy as _
IDEASCUBE_NAME = u"Queensland"
IDEASCUBE_PLACE_NAME = _("the community")
COUNTRIES_FIRST = ['AU']
TIME_ZONE = 'Australia/Darwin'
LANGUAGE_CODE = 'en'
LOAN_DURATION = 14
MONITORING_ENTRY_EXPORT_FIELDS = ['serial', 'user_id', 'birth_year', 'gender']
USER_FORM_FIELDS = (
('Ideasbox', ['serial', 'box_awareness']),
(_('Personal informations'), ['short_name', 'full_name', 'birth_year', 'gender', 'id_card_number']), # noqa
(_('Family'), ['marital_status', 'family_status', 'children_under_12', 'children_under_18', 'children_above_18']), # noqa
(_('In the town'), ['current_occupation', 'school_level']),
(_('Language skills'), ['en_level']),
)
STAFF_HOME_CARDS = [c for c in STAFF_HOME_CARDS
if c['url'] not in ['server:battery']]
HOME_CARDS = STAFF_HOME_CARDS + [
{
'id': 'blog',
},
{
'id': 'mediacenter',
},
{
'id': 'library',
},
{
'id': 'wikipedia',
'languages': ['en']
},
{
'id': 'khanacademy',
},
{
'id': 'gutenberg',
'lang': 'en',
},
{
'id': 'vikidia',
'languages': ['en']
},
]
|
# -*- coding: utf-8 -*-
"""Queensland box in Australia"""
from .idb import * # noqa
from django.utils.translation import ugettext_lazy as _
IDEASCUBE_NAME = u"Queensland"
IDEASCUBE_PLACE_NAME = _("the community")
COUNTRIES_FIRST = ['AU']
TIME_ZONE = 'Australia/Darwin'
LANGUAGE_CODE = 'en'
LOAN_DURATION = 14
MONITORING_ENTRY_EXPORT_FIELDS = ['serial', 'user_id', 'birth_year', 'gender']
USER_FORM_FIELDS = (
('Ideasbox', ['serial', 'box_awareness']),
(_('Personal informations'), ['short_name', 'full_name', 'birth_year', 'gender', 'id_card_number']), # noqa
(_('Family'), ['marital_status', 'family_status', 'children_under_12', 'children_under_18', 'children_above_18']), # noqa
(_('In the town'), ['current_occupation', 'school_level']),
(_('Language skills'), ['en_level']),
)
Change cards for the new version
We setup a new version of the server and installed the ZIM file with the
catalog, so the cards has to change from the old version to the new
version to match with the ideascube catalog policy# -*- coding: utf-8 -*-
"""Queensland box in Australia"""
from .idb import * # noqa
from django.utils.translation import ugettext_lazy as _
IDEASCUBE_NAME = u"Queensland"
IDEASCUBE_PLACE_NAME = _("the community")
COUNTRIES_FIRST = ['AU']
TIME_ZONE = 'Australia/Darwin'
LANGUAGE_CODE = 'en'
LOAN_DURATION = 14
MONITORING_ENTRY_EXPORT_FIELDS = ['serial', 'user_id', 'birth_year', 'gender']
USER_FORM_FIELDS = (
('Ideasbox', ['serial', 'box_awareness']),
(_('Personal informations'), ['short_name', 'full_name', 'birth_year', 'gender', 'id_card_number']), # noqa
(_('Family'), ['marital_status', 'family_status', 'children_under_12', 'children_under_18', 'children_above_18']), # noqa
(_('In the town'), ['current_occupation', 'school_level']),
(_('Language skills'), ['en_level']),
)
STAFF_HOME_CARDS = [c for c in STAFF_HOME_CARDS
if c['url'] not in ['server:battery']]
HOME_CARDS = STAFF_HOME_CARDS + [
{
'id': 'blog',
},
{
'id': 'mediacenter',
},
{
'id': 'library',
},
{
'id': 'wikipedia',
'languages': ['en']
},
{
'id': 'khanacademy',
},
{
'id': 'gutenberg',
'lang': 'en',
},
{
'id': 'vikidia',
'languages': ['en']
},
]
|
<commit_before># -*- coding: utf-8 -*-
"""Queensland box in Australia"""
from .idb import * # noqa
from django.utils.translation import ugettext_lazy as _
IDEASCUBE_NAME = u"Queensland"
IDEASCUBE_PLACE_NAME = _("the community")
COUNTRIES_FIRST = ['AU']
TIME_ZONE = 'Australia/Darwin'
LANGUAGE_CODE = 'en'
LOAN_DURATION = 14
MONITORING_ENTRY_EXPORT_FIELDS = ['serial', 'user_id', 'birth_year', 'gender']
USER_FORM_FIELDS = (
('Ideasbox', ['serial', 'box_awareness']),
(_('Personal informations'), ['short_name', 'full_name', 'birth_year', 'gender', 'id_card_number']), # noqa
(_('Family'), ['marital_status', 'family_status', 'children_under_12', 'children_under_18', 'children_above_18']), # noqa
(_('In the town'), ['current_occupation', 'school_level']),
(_('Language skills'), ['en_level']),
)
<commit_msg>Change cards for the new version
We setup a new version of the server and installed the ZIM file with the
catalog, so the cards has to change from the old version to the new
version to match with the ideascube catalog policy<commit_after># -*- coding: utf-8 -*-
"""Queensland box in Australia"""
from .idb import * # noqa
from django.utils.translation import ugettext_lazy as _
IDEASCUBE_NAME = u"Queensland"
IDEASCUBE_PLACE_NAME = _("the community")
COUNTRIES_FIRST = ['AU']
TIME_ZONE = 'Australia/Darwin'
LANGUAGE_CODE = 'en'
LOAN_DURATION = 14
MONITORING_ENTRY_EXPORT_FIELDS = ['serial', 'user_id', 'birth_year', 'gender']
USER_FORM_FIELDS = (
('Ideasbox', ['serial', 'box_awareness']),
(_('Personal informations'), ['short_name', 'full_name', 'birth_year', 'gender', 'id_card_number']), # noqa
(_('Family'), ['marital_status', 'family_status', 'children_under_12', 'children_under_18', 'children_above_18']), # noqa
(_('In the town'), ['current_occupation', 'school_level']),
(_('Language skills'), ['en_level']),
)
STAFF_HOME_CARDS = [c for c in STAFF_HOME_CARDS
if c['url'] not in ['server:battery']]
HOME_CARDS = STAFF_HOME_CARDS + [
{
'id': 'blog',
},
{
'id': 'mediacenter',
},
{
'id': 'library',
},
{
'id': 'wikipedia',
'languages': ['en']
},
{
'id': 'khanacademy',
},
{
'id': 'gutenberg',
'lang': 'en',
},
{
'id': 'vikidia',
'languages': ['en']
},
]
|
6894bd3cfc010c371478e7ae9e5e0b3ba108e165
|
plugins/configuration/configurationtype/configuration_registrar.py
|
plugins/configuration/configurationtype/configuration_registrar.py
|
#!/usr/bin/env python
#-*- coding: utf-8 -*-
#This software is distributed under the Creative Commons license (CC0) version 1.0. A copy of this license should have been distributed with this software.
#The license can also be read online: <https://creativecommons.org/publicdomain/zero/1.0/>. If this online license differs from the license provided with this software, the license provided with this software should be applied.
import luna.plugins
_configurations = {}
"""
The configuration classes that have been registered here so far, keyed by their
identities.
"""
def register(identity, metadata):
"""
Registers a new configuration plug-in to track configuration with.
This expects the metadata to already be verified as configuration's
metadata.
:param identity: The identity of the plug-in to register.
:param metadata: The metadata of a configuration plug-in.
"""
if identity in _configurations:
luna.plugins.api("logger").warning("Configuration {configuration} is already registered.", configuration=identity)
return
_configurations[identity] = metadata["configuration"]["class"]
def unregister(identity):
raise Exception("Not implemented yet.")
def validate_metadata(metadata):
raise Exception("Not implemented yet.")
|
#!/usr/bin/env python
#-*- coding: utf-8 -*-
#This software is distributed under the Creative Commons license (CC0) version 1.0. A copy of this license should have been distributed with this software.
#The license can also be read online: <https://creativecommons.org/publicdomain/zero/1.0/>. If this online license differs from the license provided with this software, the license provided with this software should be applied.
import luna.plugins
_configurations = {}
"""
The configuration classes that have been registered here so far, keyed by their
identities.
"""
def register(identity, metadata):
"""
Registers a new configuration plug-in to track configuration with.
This expects the metadata to already be verified as configuration's
metadata.
:param identity: The identity of the plug-in to register.
:param metadata: The metadata of a configuration plug-in.
"""
if identity in _configurations:
luna.plugins.api("logger").warning("Configuration {configuration} is already registered.", configuration=identity)
return
_configurations[identity] = metadata["configuration"]["class"]
def unregister(identity):
"""
Undoes the registration of a configuration plug-in.
The configuration plug-in will no longer keep track of any configuration.
Existing configuration will be stored persistently.
:param identity: The identity of the plug-in to unregister.
"""
if identity not in _configurations:
luna.plugins.api("logger").warning("Configuration {configuration} is not registered, so I can't unregister it.", configuration=identity)
return
del _configurations[identity] #The actual unregistration.
def validate_metadata(metadata):
raise Exception("Not implemented yet.")
|
Implement unregistration of configuration plug-ins
|
Implement unregistration of configuration plug-ins
Perhaps we should not give a warning, but instead an exception, when registering or unregistering fails?
|
Python
|
cc0-1.0
|
Ghostkeeper/Luna
|
#!/usr/bin/env python
#-*- coding: utf-8 -*-
#This software is distributed under the Creative Commons license (CC0) version 1.0. A copy of this license should have been distributed with this software.
#The license can also be read online: <https://creativecommons.org/publicdomain/zero/1.0/>. If this online license differs from the license provided with this software, the license provided with this software should be applied.
import luna.plugins
_configurations = {}
"""
The configuration classes that have been registered here so far, keyed by their
identities.
"""
def register(identity, metadata):
"""
Registers a new configuration plug-in to track configuration with.
This expects the metadata to already be verified as configuration's
metadata.
:param identity: The identity of the plug-in to register.
:param metadata: The metadata of a configuration plug-in.
"""
if identity in _configurations:
luna.plugins.api("logger").warning("Configuration {configuration} is already registered.", configuration=identity)
return
_configurations[identity] = metadata["configuration"]["class"]
def unregister(identity):
raise Exception("Not implemented yet.")
def validate_metadata(metadata):
raise Exception("Not implemented yet.")Implement unregistration of configuration plug-ins
Perhaps we should not give a warning, but instead an exception, when registering or unregistering fails?
|
#!/usr/bin/env python
#-*- coding: utf-8 -*-
#This software is distributed under the Creative Commons license (CC0) version 1.0. A copy of this license should have been distributed with this software.
#The license can also be read online: <https://creativecommons.org/publicdomain/zero/1.0/>. If this online license differs from the license provided with this software, the license provided with this software should be applied.
import luna.plugins
_configurations = {}
"""
The configuration classes that have been registered here so far, keyed by their
identities.
"""
def register(identity, metadata):
"""
Registers a new configuration plug-in to track configuration with.
This expects the metadata to already be verified as configuration's
metadata.
:param identity: The identity of the plug-in to register.
:param metadata: The metadata of a configuration plug-in.
"""
if identity in _configurations:
luna.plugins.api("logger").warning("Configuration {configuration} is already registered.", configuration=identity)
return
_configurations[identity] = metadata["configuration"]["class"]
def unregister(identity):
"""
Undoes the registration of a configuration plug-in.
The configuration plug-in will no longer keep track of any configuration.
Existing configuration will be stored persistently.
:param identity: The identity of the plug-in to unregister.
"""
if identity not in _configurations:
luna.plugins.api("logger").warning("Configuration {configuration} is not registered, so I can't unregister it.", configuration=identity)
return
del _configurations[identity] #The actual unregistration.
def validate_metadata(metadata):
raise Exception("Not implemented yet.")
|
<commit_before>#!/usr/bin/env python
#-*- coding: utf-8 -*-
#This software is distributed under the Creative Commons license (CC0) version 1.0. A copy of this license should have been distributed with this software.
#The license can also be read online: <https://creativecommons.org/publicdomain/zero/1.0/>. If this online license differs from the license provided with this software, the license provided with this software should be applied.
import luna.plugins
_configurations = {}
"""
The configuration classes that have been registered here so far, keyed by their
identities.
"""
def register(identity, metadata):
"""
Registers a new configuration plug-in to track configuration with.
This expects the metadata to already be verified as configuration's
metadata.
:param identity: The identity of the plug-in to register.
:param metadata: The metadata of a configuration plug-in.
"""
if identity in _configurations:
luna.plugins.api("logger").warning("Configuration {configuration} is already registered.", configuration=identity)
return
_configurations[identity] = metadata["configuration"]["class"]
def unregister(identity):
raise Exception("Not implemented yet.")
def validate_metadata(metadata):
raise Exception("Not implemented yet.")<commit_msg>Implement unregistration of configuration plug-ins
Perhaps we should not give a warning, but instead an exception, when registering or unregistering fails?<commit_after>
|
#!/usr/bin/env python
#-*- coding: utf-8 -*-
#This software is distributed under the Creative Commons license (CC0) version 1.0. A copy of this license should have been distributed with this software.
#The license can also be read online: <https://creativecommons.org/publicdomain/zero/1.0/>. If this online license differs from the license provided with this software, the license provided with this software should be applied.
import luna.plugins
_configurations = {}
"""
The configuration classes that have been registered here so far, keyed by their
identities.
"""
def register(identity, metadata):
"""
Registers a new configuration plug-in to track configuration with.
This expects the metadata to already be verified as configuration's
metadata.
:param identity: The identity of the plug-in to register.
:param metadata: The metadata of a configuration plug-in.
"""
if identity in _configurations:
luna.plugins.api("logger").warning("Configuration {configuration} is already registered.", configuration=identity)
return
_configurations[identity] = metadata["configuration"]["class"]
def unregister(identity):
"""
Undoes the registration of a configuration plug-in.
The configuration plug-in will no longer keep track of any configuration.
Existing configuration will be stored persistently.
:param identity: The identity of the plug-in to unregister.
"""
if identity not in _configurations:
luna.plugins.api("logger").warning("Configuration {configuration} is not registered, so I can't unregister it.", configuration=identity)
return
del _configurations[identity] #The actual unregistration.
def validate_metadata(metadata):
raise Exception("Not implemented yet.")
|
#!/usr/bin/env python
#-*- coding: utf-8 -*-
#This software is distributed under the Creative Commons license (CC0) version 1.0. A copy of this license should have been distributed with this software.
#The license can also be read online: <https://creativecommons.org/publicdomain/zero/1.0/>. If this online license differs from the license provided with this software, the license provided with this software should be applied.
import luna.plugins
_configurations = {}
"""
The configuration classes that have been registered here so far, keyed by their
identities.
"""
def register(identity, metadata):
"""
Registers a new configuration plug-in to track configuration with.
This expects the metadata to already be verified as configuration's
metadata.
:param identity: The identity of the plug-in to register.
:param metadata: The metadata of a configuration plug-in.
"""
if identity in _configurations:
luna.plugins.api("logger").warning("Configuration {configuration} is already registered.", configuration=identity)
return
_configurations[identity] = metadata["configuration"]["class"]
def unregister(identity):
raise Exception("Not implemented yet.")
def validate_metadata(metadata):
raise Exception("Not implemented yet.")Implement unregistration of configuration plug-ins
Perhaps we should not give a warning, but instead an exception, when registering or unregistering fails?#!/usr/bin/env python
#-*- coding: utf-8 -*-
#This software is distributed under the Creative Commons license (CC0) version 1.0. A copy of this license should have been distributed with this software.
#The license can also be read online: <https://creativecommons.org/publicdomain/zero/1.0/>. If this online license differs from the license provided with this software, the license provided with this software should be applied.
import luna.plugins
_configurations = {}
"""
The configuration classes that have been registered here so far, keyed by their
identities.
"""
def register(identity, metadata):
"""
Registers a new configuration plug-in to track configuration with.
This expects the metadata to already be verified as configuration's
metadata.
:param identity: The identity of the plug-in to register.
:param metadata: The metadata of a configuration plug-in.
"""
if identity in _configurations:
luna.plugins.api("logger").warning("Configuration {configuration} is already registered.", configuration=identity)
return
_configurations[identity] = metadata["configuration"]["class"]
def unregister(identity):
"""
Undoes the registration of a configuration plug-in.
The configuration plug-in will no longer keep track of any configuration.
Existing configuration will be stored persistently.
:param identity: The identity of the plug-in to unregister.
"""
if identity not in _configurations:
luna.plugins.api("logger").warning("Configuration {configuration} is not registered, so I can't unregister it.", configuration=identity)
return
del _configurations[identity] #The actual unregistration.
def validate_metadata(metadata):
raise Exception("Not implemented yet.")
|
<commit_before>#!/usr/bin/env python
#-*- coding: utf-8 -*-
#This software is distributed under the Creative Commons license (CC0) version 1.0. A copy of this license should have been distributed with this software.
#The license can also be read online: <https://creativecommons.org/publicdomain/zero/1.0/>. If this online license differs from the license provided with this software, the license provided with this software should be applied.
import luna.plugins
_configurations = {}
"""
The configuration classes that have been registered here so far, keyed by their
identities.
"""
def register(identity, metadata):
"""
Registers a new configuration plug-in to track configuration with.
This expects the metadata to already be verified as configuration's
metadata.
:param identity: The identity of the plug-in to register.
:param metadata: The metadata of a configuration plug-in.
"""
if identity in _configurations:
luna.plugins.api("logger").warning("Configuration {configuration} is already registered.", configuration=identity)
return
_configurations[identity] = metadata["configuration"]["class"]
def unregister(identity):
raise Exception("Not implemented yet.")
def validate_metadata(metadata):
raise Exception("Not implemented yet.")<commit_msg>Implement unregistration of configuration plug-ins
Perhaps we should not give a warning, but instead an exception, when registering or unregistering fails?<commit_after>#!/usr/bin/env python
#-*- coding: utf-8 -*-
#This software is distributed under the Creative Commons license (CC0) version 1.0. A copy of this license should have been distributed with this software.
#The license can also be read online: <https://creativecommons.org/publicdomain/zero/1.0/>. If this online license differs from the license provided with this software, the license provided with this software should be applied.
import luna.plugins
_configurations = {}
"""
The configuration classes that have been registered here so far, keyed by their
identities.
"""
def register(identity, metadata):
"""
Registers a new configuration plug-in to track configuration with.
This expects the metadata to already be verified as configuration's
metadata.
:param identity: The identity of the plug-in to register.
:param metadata: The metadata of a configuration plug-in.
"""
if identity in _configurations:
luna.plugins.api("logger").warning("Configuration {configuration} is already registered.", configuration=identity)
return
_configurations[identity] = metadata["configuration"]["class"]
def unregister(identity):
"""
Undoes the registration of a configuration plug-in.
The configuration plug-in will no longer keep track of any configuration.
Existing configuration will be stored persistently.
:param identity: The identity of the plug-in to unregister.
"""
if identity not in _configurations:
luna.plugins.api("logger").warning("Configuration {configuration} is not registered, so I can't unregister it.", configuration=identity)
return
del _configurations[identity] #The actual unregistration.
def validate_metadata(metadata):
raise Exception("Not implemented yet.")
|
7b83e8fbe8e6a249ab82db38e358774ba78b4ea8
|
pyflation/analysis/__init__.py
|
pyflation/analysis/__init__.py
|
""" analysis package - Provides modules to analyse results from cosmomodels runs.
Author: Ian Huston
For license and copyright information see LICENSE.txt which was distributed with this file.
"""
from adiabatic import Pr, Pzeta, scaled_Pr
from nonadiabatic import deltaPspectrum, deltaPnadspectrum, deltarhospectrum
|
""" analysis package - Provides modules to analyse results from cosmomodels runs.
Author: Ian Huston
For license and copyright information see LICENSE.txt which was distributed with this file.
"""
from adiabatic import Pr, Pzeta, scaled_Pr, scaled_Pzeta
from nonadiabatic import deltaPspectrum, deltaPnadspectrum, deltarhospectrum,\
Sspectrum, scaled_dP_spectrum, scaled_dPnad_spectrum,\
scaled_S_spectrum
|
Add new S spectrum functions into package initializer.
|
Add new S spectrum functions into package initializer.
|
Python
|
bsd-3-clause
|
ihuston/pyflation,ihuston/pyflation
|
""" analysis package - Provides modules to analyse results from cosmomodels runs.
Author: Ian Huston
For license and copyright information see LICENSE.txt which was distributed with this file.
"""
from adiabatic import Pr, Pzeta, scaled_Pr
from nonadiabatic import deltaPspectrum, deltaPnadspectrum, deltarhospectrumAdd new S spectrum functions into package initializer.
|
""" analysis package - Provides modules to analyse results from cosmomodels runs.
Author: Ian Huston
For license and copyright information see LICENSE.txt which was distributed with this file.
"""
from adiabatic import Pr, Pzeta, scaled_Pr, scaled_Pzeta
from nonadiabatic import deltaPspectrum, deltaPnadspectrum, deltarhospectrum,\
Sspectrum, scaled_dP_spectrum, scaled_dPnad_spectrum,\
scaled_S_spectrum
|
<commit_before>""" analysis package - Provides modules to analyse results from cosmomodels runs.
Author: Ian Huston
For license and copyright information see LICENSE.txt which was distributed with this file.
"""
from adiabatic import Pr, Pzeta, scaled_Pr
from nonadiabatic import deltaPspectrum, deltaPnadspectrum, deltarhospectrum<commit_msg>Add new S spectrum functions into package initializer.<commit_after>
|
""" analysis package - Provides modules to analyse results from cosmomodels runs.
Author: Ian Huston
For license and copyright information see LICENSE.txt which was distributed with this file.
"""
from adiabatic import Pr, Pzeta, scaled_Pr, scaled_Pzeta
from nonadiabatic import deltaPspectrum, deltaPnadspectrum, deltarhospectrum,\
Sspectrum, scaled_dP_spectrum, scaled_dPnad_spectrum,\
scaled_S_spectrum
|
""" analysis package - Provides modules to analyse results from cosmomodels runs.
Author: Ian Huston
For license and copyright information see LICENSE.txt which was distributed with this file.
"""
from adiabatic import Pr, Pzeta, scaled_Pr
from nonadiabatic import deltaPspectrum, deltaPnadspectrum, deltarhospectrumAdd new S spectrum functions into package initializer.""" analysis package - Provides modules to analyse results from cosmomodels runs.
Author: Ian Huston
For license and copyright information see LICENSE.txt which was distributed with this file.
"""
from adiabatic import Pr, Pzeta, scaled_Pr, scaled_Pzeta
from nonadiabatic import deltaPspectrum, deltaPnadspectrum, deltarhospectrum,\
Sspectrum, scaled_dP_spectrum, scaled_dPnad_spectrum,\
scaled_S_spectrum
|
<commit_before>""" analysis package - Provides modules to analyse results from cosmomodels runs.
Author: Ian Huston
For license and copyright information see LICENSE.txt which was distributed with this file.
"""
from adiabatic import Pr, Pzeta, scaled_Pr
from nonadiabatic import deltaPspectrum, deltaPnadspectrum, deltarhospectrum<commit_msg>Add new S spectrum functions into package initializer.<commit_after>""" analysis package - Provides modules to analyse results from cosmomodels runs.
Author: Ian Huston
For license and copyright information see LICENSE.txt which was distributed with this file.
"""
from adiabatic import Pr, Pzeta, scaled_Pr, scaled_Pzeta
from nonadiabatic import deltaPspectrum, deltaPnadspectrum, deltarhospectrum,\
Sspectrum, scaled_dP_spectrum, scaled_dPnad_spectrum,\
scaled_S_spectrum
|
6212f78597dff977a7e7348544d09c7a649aa470
|
bitbots_transform/src/bitbots_transform/transform_ball.py
|
bitbots_transform/src/bitbots_transform/transform_ball.py
|
#!/usr/bin/env python2.7
import rospy
from bitbots_transform.transform_helper import transf
from humanoid_league_msgs.msg import BallRelative, BallInImage
from sensor_msgs.msg import CameraInfo
class TransformLines(object):
def __init__(self):
rospy.Subscriber("ball_in_image", BallInImage, self._callback_ball, queue_size=1)
rospy.Subscriber("camera/camera_info", CameraInfo, self._callback_camera_info)
self.line_relative_pub = rospy.Publisher("ball_relative", BallRelative, queue_size=10)
self.caminfo = None # type:CameraInfo
rospy.init_node("transform_ball")
rospy.spin()
def _callback_ball(self, ballinfo):
if not self.caminfo:
return # No camaraInfo available
self.work(ballinfo)
def work(self, ballinfo):
p = transf(ballinfo.center.x, ballinfo.center.y, self.caminfo)
br = BallRelative()
br.header.stamp = ballinfo.header.stamp
br.header.frame_id = "base_link"
br.ball_relative.x = p[0]
br.ball_relative.y = p[1]
br.ball_relative.z = p[2]
self.line_relative_pub.publish(br)
def _callback_camera_info(self, camerainfo):
self.caminfo = camerainfo
if __name__ == "__main__":
TransformLines()
|
#!/usr/bin/env python2.7
import rospy
from bitbots_transform.transform_helper import transf
from humanoid_league_msgs.msg import BallRelative, BallInImage
from sensor_msgs.msg import CameraInfo
class TransformBall(object):
def __init__(self):
rospy.Subscriber("ball_in_image", BallInImage, self._callback_ball, queue_size=1)
rospy.Subscriber("minibot/camera/camera_info", CameraInfo, self._callback_camera_info)
self.ball_relative_pub = rospy.Publisher("ball_relative", BallRelative, queue_size=10)
self.caminfo = None # type:CameraInfo
rospy.init_node("transform_ball")
rospy.spin()
def _callback_ball(self, ballinfo):
if not self.caminfo:
return # No camaraInfo available
self.work(ballinfo)
def work(self, ballinfo):
p = transf(ballinfo.center.x, ballinfo.center.y - ballinfo.diameter // 2, self.caminfo)
br = BallRelative()
br.header.stamp = ballinfo.header.stamp
br.header.frame_id = "base_link"
br.ball_relative.x = p[0]
br.ball_relative.y = p[1]
br.ball_relative.z = p[2]
self.ball_relative_pub.publish(br)
def _callback_camera_info(self, camerainfo):
self.caminfo = camerainfo
if __name__ == "__main__":
TransformBall()
|
Transform Ball: Fixed wrong names
|
Transform Ball: Fixed wrong names
|
Python
|
mit
|
bit-bots/bitbots_misc,bit-bots/bitbots_misc,bit-bots/bitbots_misc
|
#!/usr/bin/env python2.7
import rospy
from bitbots_transform.transform_helper import transf
from humanoid_league_msgs.msg import BallRelative, BallInImage
from sensor_msgs.msg import CameraInfo
class TransformLines(object):
def __init__(self):
rospy.Subscriber("ball_in_image", BallInImage, self._callback_ball, queue_size=1)
rospy.Subscriber("camera/camera_info", CameraInfo, self._callback_camera_info)
self.line_relative_pub = rospy.Publisher("ball_relative", BallRelative, queue_size=10)
self.caminfo = None # type:CameraInfo
rospy.init_node("transform_ball")
rospy.spin()
def _callback_ball(self, ballinfo):
if not self.caminfo:
return # No camaraInfo available
self.work(ballinfo)
def work(self, ballinfo):
p = transf(ballinfo.center.x, ballinfo.center.y, self.caminfo)
br = BallRelative()
br.header.stamp = ballinfo.header.stamp
br.header.frame_id = "base_link"
br.ball_relative.x = p[0]
br.ball_relative.y = p[1]
br.ball_relative.z = p[2]
self.line_relative_pub.publish(br)
def _callback_camera_info(self, camerainfo):
self.caminfo = camerainfo
if __name__ == "__main__":
TransformLines()
Transform Ball: Fixed wrong names
|
#!/usr/bin/env python2.7
import rospy
from bitbots_transform.transform_helper import transf
from humanoid_league_msgs.msg import BallRelative, BallInImage
from sensor_msgs.msg import CameraInfo
class TransformBall(object):
def __init__(self):
rospy.Subscriber("ball_in_image", BallInImage, self._callback_ball, queue_size=1)
rospy.Subscriber("minibot/camera/camera_info", CameraInfo, self._callback_camera_info)
self.ball_relative_pub = rospy.Publisher("ball_relative", BallRelative, queue_size=10)
self.caminfo = None # type:CameraInfo
rospy.init_node("transform_ball")
rospy.spin()
def _callback_ball(self, ballinfo):
if not self.caminfo:
return # No camaraInfo available
self.work(ballinfo)
def work(self, ballinfo):
p = transf(ballinfo.center.x, ballinfo.center.y - ballinfo.diameter // 2, self.caminfo)
br = BallRelative()
br.header.stamp = ballinfo.header.stamp
br.header.frame_id = "base_link"
br.ball_relative.x = p[0]
br.ball_relative.y = p[1]
br.ball_relative.z = p[2]
self.ball_relative_pub.publish(br)
def _callback_camera_info(self, camerainfo):
self.caminfo = camerainfo
if __name__ == "__main__":
TransformBall()
|
<commit_before>#!/usr/bin/env python2.7
import rospy
from bitbots_transform.transform_helper import transf
from humanoid_league_msgs.msg import BallRelative, BallInImage
from sensor_msgs.msg import CameraInfo
class TransformLines(object):
def __init__(self):
rospy.Subscriber("ball_in_image", BallInImage, self._callback_ball, queue_size=1)
rospy.Subscriber("camera/camera_info", CameraInfo, self._callback_camera_info)
self.line_relative_pub = rospy.Publisher("ball_relative", BallRelative, queue_size=10)
self.caminfo = None # type:CameraInfo
rospy.init_node("transform_ball")
rospy.spin()
def _callback_ball(self, ballinfo):
if not self.caminfo:
return # No camaraInfo available
self.work(ballinfo)
def work(self, ballinfo):
p = transf(ballinfo.center.x, ballinfo.center.y, self.caminfo)
br = BallRelative()
br.header.stamp = ballinfo.header.stamp
br.header.frame_id = "base_link"
br.ball_relative.x = p[0]
br.ball_relative.y = p[1]
br.ball_relative.z = p[2]
self.line_relative_pub.publish(br)
def _callback_camera_info(self, camerainfo):
self.caminfo = camerainfo
if __name__ == "__main__":
TransformLines()
<commit_msg>Transform Ball: Fixed wrong names<commit_after>
|
#!/usr/bin/env python2.7
import rospy
from bitbots_transform.transform_helper import transf
from humanoid_league_msgs.msg import BallRelative, BallInImage
from sensor_msgs.msg import CameraInfo
class TransformBall(object):
def __init__(self):
rospy.Subscriber("ball_in_image", BallInImage, self._callback_ball, queue_size=1)
rospy.Subscriber("minibot/camera/camera_info", CameraInfo, self._callback_camera_info)
self.ball_relative_pub = rospy.Publisher("ball_relative", BallRelative, queue_size=10)
self.caminfo = None # type:CameraInfo
rospy.init_node("transform_ball")
rospy.spin()
def _callback_ball(self, ballinfo):
if not self.caminfo:
return # No camaraInfo available
self.work(ballinfo)
def work(self, ballinfo):
p = transf(ballinfo.center.x, ballinfo.center.y - ballinfo.diameter // 2, self.caminfo)
br = BallRelative()
br.header.stamp = ballinfo.header.stamp
br.header.frame_id = "base_link"
br.ball_relative.x = p[0]
br.ball_relative.y = p[1]
br.ball_relative.z = p[2]
self.ball_relative_pub.publish(br)
def _callback_camera_info(self, camerainfo):
self.caminfo = camerainfo
if __name__ == "__main__":
TransformBall()
|
#!/usr/bin/env python2.7
import rospy
from bitbots_transform.transform_helper import transf
from humanoid_league_msgs.msg import BallRelative, BallInImage
from sensor_msgs.msg import CameraInfo
class TransformLines(object):
def __init__(self):
rospy.Subscriber("ball_in_image", BallInImage, self._callback_ball, queue_size=1)
rospy.Subscriber("camera/camera_info", CameraInfo, self._callback_camera_info)
self.line_relative_pub = rospy.Publisher("ball_relative", BallRelative, queue_size=10)
self.caminfo = None # type:CameraInfo
rospy.init_node("transform_ball")
rospy.spin()
def _callback_ball(self, ballinfo):
if not self.caminfo:
return # No camaraInfo available
self.work(ballinfo)
def work(self, ballinfo):
p = transf(ballinfo.center.x, ballinfo.center.y, self.caminfo)
br = BallRelative()
br.header.stamp = ballinfo.header.stamp
br.header.frame_id = "base_link"
br.ball_relative.x = p[0]
br.ball_relative.y = p[1]
br.ball_relative.z = p[2]
self.line_relative_pub.publish(br)
def _callback_camera_info(self, camerainfo):
self.caminfo = camerainfo
if __name__ == "__main__":
TransformLines()
Transform Ball: Fixed wrong names#!/usr/bin/env python2.7
import rospy
from bitbots_transform.transform_helper import transf
from humanoid_league_msgs.msg import BallRelative, BallInImage
from sensor_msgs.msg import CameraInfo
class TransformBall(object):
def __init__(self):
rospy.Subscriber("ball_in_image", BallInImage, self._callback_ball, queue_size=1)
rospy.Subscriber("minibot/camera/camera_info", CameraInfo, self._callback_camera_info)
self.ball_relative_pub = rospy.Publisher("ball_relative", BallRelative, queue_size=10)
self.caminfo = None # type:CameraInfo
rospy.init_node("transform_ball")
rospy.spin()
def _callback_ball(self, ballinfo):
if not self.caminfo:
return # No camaraInfo available
self.work(ballinfo)
def work(self, ballinfo):
p = transf(ballinfo.center.x, ballinfo.center.y - ballinfo.diameter // 2, self.caminfo)
br = BallRelative()
br.header.stamp = ballinfo.header.stamp
br.header.frame_id = "base_link"
br.ball_relative.x = p[0]
br.ball_relative.y = p[1]
br.ball_relative.z = p[2]
self.ball_relative_pub.publish(br)
def _callback_camera_info(self, camerainfo):
self.caminfo = camerainfo
if __name__ == "__main__":
TransformBall()
|
<commit_before>#!/usr/bin/env python2.7
import rospy
from bitbots_transform.transform_helper import transf
from humanoid_league_msgs.msg import BallRelative, BallInImage
from sensor_msgs.msg import CameraInfo
class TransformLines(object):
def __init__(self):
rospy.Subscriber("ball_in_image", BallInImage, self._callback_ball, queue_size=1)
rospy.Subscriber("camera/camera_info", CameraInfo, self._callback_camera_info)
self.line_relative_pub = rospy.Publisher("ball_relative", BallRelative, queue_size=10)
self.caminfo = None # type:CameraInfo
rospy.init_node("transform_ball")
rospy.spin()
def _callback_ball(self, ballinfo):
if not self.caminfo:
return # No camaraInfo available
self.work(ballinfo)
def work(self, ballinfo):
p = transf(ballinfo.center.x, ballinfo.center.y, self.caminfo)
br = BallRelative()
br.header.stamp = ballinfo.header.stamp
br.header.frame_id = "base_link"
br.ball_relative.x = p[0]
br.ball_relative.y = p[1]
br.ball_relative.z = p[2]
self.line_relative_pub.publish(br)
def _callback_camera_info(self, camerainfo):
self.caminfo = camerainfo
if __name__ == "__main__":
TransformLines()
<commit_msg>Transform Ball: Fixed wrong names<commit_after>#!/usr/bin/env python2.7
import rospy
from bitbots_transform.transform_helper import transf
from humanoid_league_msgs.msg import BallRelative, BallInImage
from sensor_msgs.msg import CameraInfo
class TransformBall(object):
def __init__(self):
rospy.Subscriber("ball_in_image", BallInImage, self._callback_ball, queue_size=1)
rospy.Subscriber("minibot/camera/camera_info", CameraInfo, self._callback_camera_info)
self.ball_relative_pub = rospy.Publisher("ball_relative", BallRelative, queue_size=10)
self.caminfo = None # type:CameraInfo
rospy.init_node("transform_ball")
rospy.spin()
def _callback_ball(self, ballinfo):
if not self.caminfo:
return # No camaraInfo available
self.work(ballinfo)
def work(self, ballinfo):
p = transf(ballinfo.center.x, ballinfo.center.y - ballinfo.diameter // 2, self.caminfo)
br = BallRelative()
br.header.stamp = ballinfo.header.stamp
br.header.frame_id = "base_link"
br.ball_relative.x = p[0]
br.ball_relative.y = p[1]
br.ball_relative.z = p[2]
self.ball_relative_pub.publish(br)
def _callback_camera_info(self, camerainfo):
self.caminfo = camerainfo
if __name__ == "__main__":
TransformBall()
|
b28b4bb834d8ab70e8820c43ed8cf11242c1b5b6
|
keystoneclient/v2_0/endpoints.py
|
keystoneclient/v2_0/endpoints.py
|
# Copyright 2012 Canonical Ltd.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from keystoneclient import base
class Endpoint(base.Resource):
"""Represents a Keystone endpoint."""
def __repr__(self):
return "<Endpoint %s>" % self._info
class EndpointManager(base.ManagerWithFind):
"""Manager class for manipulating Keystone endpoints."""
resource_class = Endpoint
def list(self):
"""List all available endpoints."""
return self._list('/endpoints', 'endpoints')
def create(self, region, service_id, publicurl, adminurl, internalurl):
"""Create a new endpoint."""
body = {'endpoint': {'region': region,
'service_id': service_id,
'publicurl': publicurl,
'adminurl': adminurl,
'internalurl': internalurl}}
return self._create('/endpoints', body, 'endpoint')
def delete(self, id):
"""Delete an endpoint."""
return self._delete('/endpoints/%s' % id)
|
# Copyright 2012 Canonical Ltd.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from keystoneclient import base
class Endpoint(base.Resource):
"""Represents a Keystone endpoint."""
def __repr__(self):
return "<Endpoint %s>" % self._info
class EndpointManager(base.ManagerWithFind):
"""Manager class for manipulating Keystone endpoints."""
resource_class = Endpoint
def list(self):
"""List all available endpoints."""
return self._list('/endpoints', 'endpoints')
def create(self, region, service_id, publicurl, adminurl=None,
internalurl=None):
"""Create a new endpoint."""
body = {'endpoint': {'region': region,
'service_id': service_id,
'publicurl': publicurl,
'adminurl': adminurl,
'internalurl': internalurl}}
return self._create('/endpoints', body, 'endpoint')
def delete(self, id):
"""Delete an endpoint."""
return self._delete('/endpoints/%s' % id)
|
Make parameters in EndpointManager optional
|
Make parameters in EndpointManager optional
Change adminurl and internalurl parameters in EndpointManager create()
to optional parameters.
Change-Id: I490e35b89f7ae7c6cdbced6ba8d3b82d5132c19d
Closes-Bug: #1318436
|
Python
|
apache-2.0
|
magic0704/python-keystoneclient,jamielennox/python-keystoneclient,klmitch/python-keystoneclient,ging/python-keystoneclient,alexpilotti/python-keystoneclient,klmitch/python-keystoneclient,darren-wang/ksc,alexpilotti/python-keystoneclient,magic0704/python-keystoneclient,Mercador/python-keystoneclient,ging/python-keystoneclient,Mercador/python-keystoneclient,jamielennox/python-keystoneclient,sdpp/python-keystoneclient,sdpp/python-keystoneclient,darren-wang/ksc,jamielennox/python-keystoneclient
|
# Copyright 2012 Canonical Ltd.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from keystoneclient import base
class Endpoint(base.Resource):
"""Represents a Keystone endpoint."""
def __repr__(self):
return "<Endpoint %s>" % self._info
class EndpointManager(base.ManagerWithFind):
"""Manager class for manipulating Keystone endpoints."""
resource_class = Endpoint
def list(self):
"""List all available endpoints."""
return self._list('/endpoints', 'endpoints')
def create(self, region, service_id, publicurl, adminurl, internalurl):
"""Create a new endpoint."""
body = {'endpoint': {'region': region,
'service_id': service_id,
'publicurl': publicurl,
'adminurl': adminurl,
'internalurl': internalurl}}
return self._create('/endpoints', body, 'endpoint')
def delete(self, id):
"""Delete an endpoint."""
return self._delete('/endpoints/%s' % id)
Make parameters in EndpointManager optional
Change adminurl and internalurl parameters in EndpointManager create()
to optional parameters.
Change-Id: I490e35b89f7ae7c6cdbced6ba8d3b82d5132c19d
Closes-Bug: #1318436
|
# Copyright 2012 Canonical Ltd.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from keystoneclient import base
class Endpoint(base.Resource):
"""Represents a Keystone endpoint."""
def __repr__(self):
return "<Endpoint %s>" % self._info
class EndpointManager(base.ManagerWithFind):
"""Manager class for manipulating Keystone endpoints."""
resource_class = Endpoint
def list(self):
"""List all available endpoints."""
return self._list('/endpoints', 'endpoints')
def create(self, region, service_id, publicurl, adminurl=None,
internalurl=None):
"""Create a new endpoint."""
body = {'endpoint': {'region': region,
'service_id': service_id,
'publicurl': publicurl,
'adminurl': adminurl,
'internalurl': internalurl}}
return self._create('/endpoints', body, 'endpoint')
def delete(self, id):
"""Delete an endpoint."""
return self._delete('/endpoints/%s' % id)
|
<commit_before># Copyright 2012 Canonical Ltd.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from keystoneclient import base
class Endpoint(base.Resource):
"""Represents a Keystone endpoint."""
def __repr__(self):
return "<Endpoint %s>" % self._info
class EndpointManager(base.ManagerWithFind):
"""Manager class for manipulating Keystone endpoints."""
resource_class = Endpoint
def list(self):
"""List all available endpoints."""
return self._list('/endpoints', 'endpoints')
def create(self, region, service_id, publicurl, adminurl, internalurl):
"""Create a new endpoint."""
body = {'endpoint': {'region': region,
'service_id': service_id,
'publicurl': publicurl,
'adminurl': adminurl,
'internalurl': internalurl}}
return self._create('/endpoints', body, 'endpoint')
def delete(self, id):
"""Delete an endpoint."""
return self._delete('/endpoints/%s' % id)
<commit_msg>Make parameters in EndpointManager optional
Change adminurl and internalurl parameters in EndpointManager create()
to optional parameters.
Change-Id: I490e35b89f7ae7c6cdbced6ba8d3b82d5132c19d
Closes-Bug: #1318436<commit_after>
|
# Copyright 2012 Canonical Ltd.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from keystoneclient import base
class Endpoint(base.Resource):
"""Represents a Keystone endpoint."""
def __repr__(self):
return "<Endpoint %s>" % self._info
class EndpointManager(base.ManagerWithFind):
"""Manager class for manipulating Keystone endpoints."""
resource_class = Endpoint
def list(self):
"""List all available endpoints."""
return self._list('/endpoints', 'endpoints')
def create(self, region, service_id, publicurl, adminurl=None,
internalurl=None):
"""Create a new endpoint."""
body = {'endpoint': {'region': region,
'service_id': service_id,
'publicurl': publicurl,
'adminurl': adminurl,
'internalurl': internalurl}}
return self._create('/endpoints', body, 'endpoint')
def delete(self, id):
"""Delete an endpoint."""
return self._delete('/endpoints/%s' % id)
|
# Copyright 2012 Canonical Ltd.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from keystoneclient import base
class Endpoint(base.Resource):
"""Represents a Keystone endpoint."""
def __repr__(self):
return "<Endpoint %s>" % self._info
class EndpointManager(base.ManagerWithFind):
"""Manager class for manipulating Keystone endpoints."""
resource_class = Endpoint
def list(self):
"""List all available endpoints."""
return self._list('/endpoints', 'endpoints')
def create(self, region, service_id, publicurl, adminurl, internalurl):
"""Create a new endpoint."""
body = {'endpoint': {'region': region,
'service_id': service_id,
'publicurl': publicurl,
'adminurl': adminurl,
'internalurl': internalurl}}
return self._create('/endpoints', body, 'endpoint')
def delete(self, id):
"""Delete an endpoint."""
return self._delete('/endpoints/%s' % id)
Make parameters in EndpointManager optional
Change adminurl and internalurl parameters in EndpointManager create()
to optional parameters.
Change-Id: I490e35b89f7ae7c6cdbced6ba8d3b82d5132c19d
Closes-Bug: #1318436# Copyright 2012 Canonical Ltd.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from keystoneclient import base
class Endpoint(base.Resource):
"""Represents a Keystone endpoint."""
def __repr__(self):
return "<Endpoint %s>" % self._info
class EndpointManager(base.ManagerWithFind):
"""Manager class for manipulating Keystone endpoints."""
resource_class = Endpoint
def list(self):
"""List all available endpoints."""
return self._list('/endpoints', 'endpoints')
def create(self, region, service_id, publicurl, adminurl=None,
internalurl=None):
"""Create a new endpoint."""
body = {'endpoint': {'region': region,
'service_id': service_id,
'publicurl': publicurl,
'adminurl': adminurl,
'internalurl': internalurl}}
return self._create('/endpoints', body, 'endpoint')
def delete(self, id):
"""Delete an endpoint."""
return self._delete('/endpoints/%s' % id)
|
<commit_before># Copyright 2012 Canonical Ltd.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from keystoneclient import base
class Endpoint(base.Resource):
"""Represents a Keystone endpoint."""
def __repr__(self):
return "<Endpoint %s>" % self._info
class EndpointManager(base.ManagerWithFind):
"""Manager class for manipulating Keystone endpoints."""
resource_class = Endpoint
def list(self):
"""List all available endpoints."""
return self._list('/endpoints', 'endpoints')
def create(self, region, service_id, publicurl, adminurl, internalurl):
"""Create a new endpoint."""
body = {'endpoint': {'region': region,
'service_id': service_id,
'publicurl': publicurl,
'adminurl': adminurl,
'internalurl': internalurl}}
return self._create('/endpoints', body, 'endpoint')
def delete(self, id):
"""Delete an endpoint."""
return self._delete('/endpoints/%s' % id)
<commit_msg>Make parameters in EndpointManager optional
Change adminurl and internalurl parameters in EndpointManager create()
to optional parameters.
Change-Id: I490e35b89f7ae7c6cdbced6ba8d3b82d5132c19d
Closes-Bug: #1318436<commit_after># Copyright 2012 Canonical Ltd.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from keystoneclient import base
class Endpoint(base.Resource):
"""Represents a Keystone endpoint."""
def __repr__(self):
return "<Endpoint %s>" % self._info
class EndpointManager(base.ManagerWithFind):
"""Manager class for manipulating Keystone endpoints."""
resource_class = Endpoint
def list(self):
"""List all available endpoints."""
return self._list('/endpoints', 'endpoints')
def create(self, region, service_id, publicurl, adminurl=None,
internalurl=None):
"""Create a new endpoint."""
body = {'endpoint': {'region': region,
'service_id': service_id,
'publicurl': publicurl,
'adminurl': adminurl,
'internalurl': internalurl}}
return self._create('/endpoints', body, 'endpoint')
def delete(self, id):
"""Delete an endpoint."""
return self._delete('/endpoints/%s' % id)
|
1005a41bd6fb3f854f75bd9d4d6ab69290778ba9
|
kolibri/core/lessons/viewsets.py
|
kolibri/core/lessons/viewsets.py
|
from rest_framework.viewsets import ModelViewSet
from .serializers import LessonSerializer
from kolibri.core.lessons.models import Lesson
class LessonViewset(ModelViewSet):
serializer_class = LessonSerializer
def get_queryset(self):
return Lesson.objects.filter(is_archived=False)
|
from rest_framework.viewsets import ModelViewSet
from .serializers import LessonSerializer
from kolibri.core.lessons.models import Lesson
class LessonViewset(ModelViewSet):
serializer_class = LessonSerializer
def get_queryset(self):
queryset = Lesson.objects.filter(is_archived=False)
classid = self.request.query_params.get('classid', None)
if classid is not None:
queryset = queryset.filter(collection_id=classid)
return queryset
|
Add classid filter for Lessons
|
Add classid filter for Lessons
|
Python
|
mit
|
learningequality/kolibri,mrpau/kolibri,mrpau/kolibri,lyw07/kolibri,christianmemije/kolibri,christianmemije/kolibri,indirectlylit/kolibri,indirectlylit/kolibri,jonboiser/kolibri,benjaoming/kolibri,lyw07/kolibri,jonboiser/kolibri,learningequality/kolibri,christianmemije/kolibri,jonboiser/kolibri,DXCanas/kolibri,mrpau/kolibri,christianmemije/kolibri,lyw07/kolibri,benjaoming/kolibri,learningequality/kolibri,benjaoming/kolibri,mrpau/kolibri,indirectlylit/kolibri,DXCanas/kolibri,learningequality/kolibri,jonboiser/kolibri,benjaoming/kolibri,DXCanas/kolibri,indirectlylit/kolibri,lyw07/kolibri,DXCanas/kolibri
|
from rest_framework.viewsets import ModelViewSet
from .serializers import LessonSerializer
from kolibri.core.lessons.models import Lesson
class LessonViewset(ModelViewSet):
serializer_class = LessonSerializer
def get_queryset(self):
return Lesson.objects.filter(is_archived=False)
Add classid filter for Lessons
|
from rest_framework.viewsets import ModelViewSet
from .serializers import LessonSerializer
from kolibri.core.lessons.models import Lesson
class LessonViewset(ModelViewSet):
serializer_class = LessonSerializer
def get_queryset(self):
queryset = Lesson.objects.filter(is_archived=False)
classid = self.request.query_params.get('classid', None)
if classid is not None:
queryset = queryset.filter(collection_id=classid)
return queryset
|
<commit_before>from rest_framework.viewsets import ModelViewSet
from .serializers import LessonSerializer
from kolibri.core.lessons.models import Lesson
class LessonViewset(ModelViewSet):
serializer_class = LessonSerializer
def get_queryset(self):
return Lesson.objects.filter(is_archived=False)
<commit_msg>Add classid filter for Lessons<commit_after>
|
from rest_framework.viewsets import ModelViewSet
from .serializers import LessonSerializer
from kolibri.core.lessons.models import Lesson
class LessonViewset(ModelViewSet):
serializer_class = LessonSerializer
def get_queryset(self):
queryset = Lesson.objects.filter(is_archived=False)
classid = self.request.query_params.get('classid', None)
if classid is not None:
queryset = queryset.filter(collection_id=classid)
return queryset
|
from rest_framework.viewsets import ModelViewSet
from .serializers import LessonSerializer
from kolibri.core.lessons.models import Lesson
class LessonViewset(ModelViewSet):
serializer_class = LessonSerializer
def get_queryset(self):
return Lesson.objects.filter(is_archived=False)
Add classid filter for Lessonsfrom rest_framework.viewsets import ModelViewSet
from .serializers import LessonSerializer
from kolibri.core.lessons.models import Lesson
class LessonViewset(ModelViewSet):
serializer_class = LessonSerializer
def get_queryset(self):
queryset = Lesson.objects.filter(is_archived=False)
classid = self.request.query_params.get('classid', None)
if classid is not None:
queryset = queryset.filter(collection_id=classid)
return queryset
|
<commit_before>from rest_framework.viewsets import ModelViewSet
from .serializers import LessonSerializer
from kolibri.core.lessons.models import Lesson
class LessonViewset(ModelViewSet):
serializer_class = LessonSerializer
def get_queryset(self):
return Lesson.objects.filter(is_archived=False)
<commit_msg>Add classid filter for Lessons<commit_after>from rest_framework.viewsets import ModelViewSet
from .serializers import LessonSerializer
from kolibri.core.lessons.models import Lesson
class LessonViewset(ModelViewSet):
serializer_class = LessonSerializer
def get_queryset(self):
queryset = Lesson.objects.filter(is_archived=False)
classid = self.request.query_params.get('classid', None)
if classid is not None:
queryset = queryset.filter(collection_id=classid)
return queryset
|
bd5844aa6c59c8d34df12e358e5e06eefcb55f9d
|
qiita_pet/handlers/download.py
|
qiita_pet/handlers/download.py
|
from tornado.web import authenticated
from os.path import split
from .base_handlers import BaseHandler
from qiita_pet.exceptions import QiitaPetAuthorizationError
from qiita_db.util import filepath_id_to_rel_path
from qiita_db.meta_util import get_accessible_filepath_ids
class DownloadHandler(BaseHandler):
@authenticated
def get(self, filepath_id):
filepath_id = int(filepath_id)
# Check access to file
accessible_filepaths = get_accessible_filepath_ids(self.current_user)
if filepath_id not in accessible_filepaths:
raise QiitaPetAuthorizationError(
self.current_user, 'filepath id %d' % filepath_id)
relpath = filepath_id_to_rel_path(filepath_id)
fname = split(relpath)[-1]
self.set_header('Content-Description', 'File Transfer')
self.set_header('Content-Type', 'application/octet-stream')
self.set_header('Content-Transfer-Encoding', 'binary')
self.set_header('Expires', '0')
self.set_header('X-Accel-Redirect', '/protected/' + relpath)
self.set_header('Content-Disposition',
'attachment; filename=%s' % fname)
self.finish()
|
from tornado.web import authenticated
from os.path import basename
from .base_handlers import BaseHandler
from qiita_pet.exceptions import QiitaPetAuthorizationError
from qiita_db.util import filepath_id_to_rel_path
from qiita_db.meta_util import get_accessible_filepath_ids
class DownloadHandler(BaseHandler):
@authenticated
def get(self, filepath_id):
filepath_id = int(filepath_id)
# Check access to file
accessible_filepaths = get_accessible_filepath_ids(self.current_user)
if filepath_id not in accessible_filepaths:
raise QiitaPetAuthorizationError(
self.current_user, 'filepath id %d' % filepath_id)
relpath = filepath_id_to_rel_path(filepath_id)
fname = basename(relpath)
self.set_header('Content-Description', 'File Transfer')
self.set_header('Content-Type', 'application/octet-stream')
self.set_header('Content-Transfer-Encoding', 'binary')
self.set_header('Expires', '0')
self.set_header('X-Accel-Redirect', '/protected/' + relpath)
self.set_header('Content-Disposition',
'attachment; filename=%s' % fname)
self.finish()
|
Use basename instead of os.path.split(...)[-1]
|
Use basename instead of os.path.split(...)[-1]
|
Python
|
bsd-3-clause
|
ElDeveloper/qiita,josenavas/QiiTa,RNAer/qiita,squirrelo/qiita,RNAer/qiita,ElDeveloper/qiita,antgonza/qiita,adamrp/qiita,wasade/qiita,antgonza/qiita,squirrelo/qiita,biocore/qiita,adamrp/qiita,josenavas/QiiTa,biocore/qiita,ElDeveloper/qiita,adamrp/qiita,antgonza/qiita,RNAer/qiita,squirrelo/qiita,ElDeveloper/qiita,wasade/qiita,josenavas/QiiTa,wasade/qiita,antgonza/qiita,biocore/qiita,josenavas/QiiTa,biocore/qiita,squirrelo/qiita,RNAer/qiita,adamrp/qiita
|
from tornado.web import authenticated
from os.path import split
from .base_handlers import BaseHandler
from qiita_pet.exceptions import QiitaPetAuthorizationError
from qiita_db.util import filepath_id_to_rel_path
from qiita_db.meta_util import get_accessible_filepath_ids
class DownloadHandler(BaseHandler):
@authenticated
def get(self, filepath_id):
filepath_id = int(filepath_id)
# Check access to file
accessible_filepaths = get_accessible_filepath_ids(self.current_user)
if filepath_id not in accessible_filepaths:
raise QiitaPetAuthorizationError(
self.current_user, 'filepath id %d' % filepath_id)
relpath = filepath_id_to_rel_path(filepath_id)
fname = split(relpath)[-1]
self.set_header('Content-Description', 'File Transfer')
self.set_header('Content-Type', 'application/octet-stream')
self.set_header('Content-Transfer-Encoding', 'binary')
self.set_header('Expires', '0')
self.set_header('X-Accel-Redirect', '/protected/' + relpath)
self.set_header('Content-Disposition',
'attachment; filename=%s' % fname)
self.finish()
Use basename instead of os.path.split(...)[-1]
|
from tornado.web import authenticated
from os.path import basename
from .base_handlers import BaseHandler
from qiita_pet.exceptions import QiitaPetAuthorizationError
from qiita_db.util import filepath_id_to_rel_path
from qiita_db.meta_util import get_accessible_filepath_ids
class DownloadHandler(BaseHandler):
@authenticated
def get(self, filepath_id):
filepath_id = int(filepath_id)
# Check access to file
accessible_filepaths = get_accessible_filepath_ids(self.current_user)
if filepath_id not in accessible_filepaths:
raise QiitaPetAuthorizationError(
self.current_user, 'filepath id %d' % filepath_id)
relpath = filepath_id_to_rel_path(filepath_id)
fname = basename(relpath)
self.set_header('Content-Description', 'File Transfer')
self.set_header('Content-Type', 'application/octet-stream')
self.set_header('Content-Transfer-Encoding', 'binary')
self.set_header('Expires', '0')
self.set_header('X-Accel-Redirect', '/protected/' + relpath)
self.set_header('Content-Disposition',
'attachment; filename=%s' % fname)
self.finish()
|
<commit_before>from tornado.web import authenticated
from os.path import split
from .base_handlers import BaseHandler
from qiita_pet.exceptions import QiitaPetAuthorizationError
from qiita_db.util import filepath_id_to_rel_path
from qiita_db.meta_util import get_accessible_filepath_ids
class DownloadHandler(BaseHandler):
@authenticated
def get(self, filepath_id):
filepath_id = int(filepath_id)
# Check access to file
accessible_filepaths = get_accessible_filepath_ids(self.current_user)
if filepath_id not in accessible_filepaths:
raise QiitaPetAuthorizationError(
self.current_user, 'filepath id %d' % filepath_id)
relpath = filepath_id_to_rel_path(filepath_id)
fname = split(relpath)[-1]
self.set_header('Content-Description', 'File Transfer')
self.set_header('Content-Type', 'application/octet-stream')
self.set_header('Content-Transfer-Encoding', 'binary')
self.set_header('Expires', '0')
self.set_header('X-Accel-Redirect', '/protected/' + relpath)
self.set_header('Content-Disposition',
'attachment; filename=%s' % fname)
self.finish()
<commit_msg>Use basename instead of os.path.split(...)[-1]<commit_after>
|
from tornado.web import authenticated
from os.path import basename
from .base_handlers import BaseHandler
from qiita_pet.exceptions import QiitaPetAuthorizationError
from qiita_db.util import filepath_id_to_rel_path
from qiita_db.meta_util import get_accessible_filepath_ids
class DownloadHandler(BaseHandler):
@authenticated
def get(self, filepath_id):
filepath_id = int(filepath_id)
# Check access to file
accessible_filepaths = get_accessible_filepath_ids(self.current_user)
if filepath_id not in accessible_filepaths:
raise QiitaPetAuthorizationError(
self.current_user, 'filepath id %d' % filepath_id)
relpath = filepath_id_to_rel_path(filepath_id)
fname = basename(relpath)
self.set_header('Content-Description', 'File Transfer')
self.set_header('Content-Type', 'application/octet-stream')
self.set_header('Content-Transfer-Encoding', 'binary')
self.set_header('Expires', '0')
self.set_header('X-Accel-Redirect', '/protected/' + relpath)
self.set_header('Content-Disposition',
'attachment; filename=%s' % fname)
self.finish()
|
from tornado.web import authenticated
from os.path import split
from .base_handlers import BaseHandler
from qiita_pet.exceptions import QiitaPetAuthorizationError
from qiita_db.util import filepath_id_to_rel_path
from qiita_db.meta_util import get_accessible_filepath_ids
class DownloadHandler(BaseHandler):
@authenticated
def get(self, filepath_id):
filepath_id = int(filepath_id)
# Check access to file
accessible_filepaths = get_accessible_filepath_ids(self.current_user)
if filepath_id not in accessible_filepaths:
raise QiitaPetAuthorizationError(
self.current_user, 'filepath id %d' % filepath_id)
relpath = filepath_id_to_rel_path(filepath_id)
fname = split(relpath)[-1]
self.set_header('Content-Description', 'File Transfer')
self.set_header('Content-Type', 'application/octet-stream')
self.set_header('Content-Transfer-Encoding', 'binary')
self.set_header('Expires', '0')
self.set_header('X-Accel-Redirect', '/protected/' + relpath)
self.set_header('Content-Disposition',
'attachment; filename=%s' % fname)
self.finish()
Use basename instead of os.path.split(...)[-1]from tornado.web import authenticated
from os.path import basename
from .base_handlers import BaseHandler
from qiita_pet.exceptions import QiitaPetAuthorizationError
from qiita_db.util import filepath_id_to_rel_path
from qiita_db.meta_util import get_accessible_filepath_ids
class DownloadHandler(BaseHandler):
@authenticated
def get(self, filepath_id):
filepath_id = int(filepath_id)
# Check access to file
accessible_filepaths = get_accessible_filepath_ids(self.current_user)
if filepath_id not in accessible_filepaths:
raise QiitaPetAuthorizationError(
self.current_user, 'filepath id %d' % filepath_id)
relpath = filepath_id_to_rel_path(filepath_id)
fname = basename(relpath)
self.set_header('Content-Description', 'File Transfer')
self.set_header('Content-Type', 'application/octet-stream')
self.set_header('Content-Transfer-Encoding', 'binary')
self.set_header('Expires', '0')
self.set_header('X-Accel-Redirect', '/protected/' + relpath)
self.set_header('Content-Disposition',
'attachment; filename=%s' % fname)
self.finish()
|
<commit_before>from tornado.web import authenticated
from os.path import split
from .base_handlers import BaseHandler
from qiita_pet.exceptions import QiitaPetAuthorizationError
from qiita_db.util import filepath_id_to_rel_path
from qiita_db.meta_util import get_accessible_filepath_ids
class DownloadHandler(BaseHandler):
@authenticated
def get(self, filepath_id):
filepath_id = int(filepath_id)
# Check access to file
accessible_filepaths = get_accessible_filepath_ids(self.current_user)
if filepath_id not in accessible_filepaths:
raise QiitaPetAuthorizationError(
self.current_user, 'filepath id %d' % filepath_id)
relpath = filepath_id_to_rel_path(filepath_id)
fname = split(relpath)[-1]
self.set_header('Content-Description', 'File Transfer')
self.set_header('Content-Type', 'application/octet-stream')
self.set_header('Content-Transfer-Encoding', 'binary')
self.set_header('Expires', '0')
self.set_header('X-Accel-Redirect', '/protected/' + relpath)
self.set_header('Content-Disposition',
'attachment; filename=%s' % fname)
self.finish()
<commit_msg>Use basename instead of os.path.split(...)[-1]<commit_after>from tornado.web import authenticated
from os.path import basename
from .base_handlers import BaseHandler
from qiita_pet.exceptions import QiitaPetAuthorizationError
from qiita_db.util import filepath_id_to_rel_path
from qiita_db.meta_util import get_accessible_filepath_ids
class DownloadHandler(BaseHandler):
@authenticated
def get(self, filepath_id):
filepath_id = int(filepath_id)
# Check access to file
accessible_filepaths = get_accessible_filepath_ids(self.current_user)
if filepath_id not in accessible_filepaths:
raise QiitaPetAuthorizationError(
self.current_user, 'filepath id %d' % filepath_id)
relpath = filepath_id_to_rel_path(filepath_id)
fname = basename(relpath)
self.set_header('Content-Description', 'File Transfer')
self.set_header('Content-Type', 'application/octet-stream')
self.set_header('Content-Transfer-Encoding', 'binary')
self.set_header('Expires', '0')
self.set_header('X-Accel-Redirect', '/protected/' + relpath)
self.set_header('Content-Disposition',
'attachment; filename=%s' % fname)
self.finish()
|
d20e1a1fba39b688a21bfbf02fe32a2039232949
|
lib/speedway.py
|
lib/speedway.py
|
#!/usr/bin/python2.4
#
# Copyright 2011 Google Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Speedway iptables generator.
This is a subclass of Iptables library. The primary difference is
that this library produced 'iptable-restore' compatible output."""
__author__ = 'watson@google.com (Tony Watson)'
import iptables
class Term(iptables.Term):
"""Generate Iptables policy terms."""
_PLATFORM = 'speedway'
_PREJUMP_FORMAT = None
_POSTJUMP_FORMAT = '-A %s -j %s'
class Speedway(iptables.Iptables):
"""Generates filters and terms from provided policy object."""
_PLATFORM = 'speedway'
_DEFAULT_PROTOCOL = 'all'
_SUFFIX = '.ipt'
_RENDER_PREFIX = '*filter'
_RENDER_SUFFIX = 'COMMIT'
_DEFAULTACTION_FORMAT = ':%s %s'
_TERM = Term
|
#!/usr/bin/python2.4
#
# Copyright 2011 Google Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Speedway iptables generator.
This is a subclass of Iptables library. The primary difference is
that this library produced 'iptable-restore' compatible output."""
__author__ = 'watson@google.com (Tony Watson)'
import iptables
class Term(iptables.Term):
"""Generate Iptables policy terms."""
_PLATFORM = 'speedway'
_PREJUMP_FORMAT = None
_POSTJUMP_FORMAT = '-A %s -j %s'
class Speedway(iptables.Iptables):
"""Generates filters and terms from provided policy object."""
_PLATFORM = 'speedway'
_DEFAULT_PROTOCOL = 'all'
_SUFFIX = '.ipt'
_RENDER_PREFIX = '*filter'
_RENDER_SUFFIX = 'COMMIT\n'
_DEFAULTACTION_FORMAT = ':%s %s'
_TERM = Term
|
Append newline after 'COMMIT' in iptables policies. Without newline, the iptables-restore command complains.
|
Append newline after 'COMMIT' in iptables policies.
Without newline, the iptables-restore command complains.
|
Python
|
apache-2.0
|
FlorianHeigl/capirca,haykeh/capirca,FlorianHeigl/capirca,haykeh/capirca
|
#!/usr/bin/python2.4
#
# Copyright 2011 Google Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Speedway iptables generator.
This is a subclass of Iptables library. The primary difference is
that this library produced 'iptable-restore' compatible output."""
__author__ = 'watson@google.com (Tony Watson)'
import iptables
class Term(iptables.Term):
"""Generate Iptables policy terms."""
_PLATFORM = 'speedway'
_PREJUMP_FORMAT = None
_POSTJUMP_FORMAT = '-A %s -j %s'
class Speedway(iptables.Iptables):
"""Generates filters and terms from provided policy object."""
_PLATFORM = 'speedway'
_DEFAULT_PROTOCOL = 'all'
_SUFFIX = '.ipt'
_RENDER_PREFIX = '*filter'
_RENDER_SUFFIX = 'COMMIT'
_DEFAULTACTION_FORMAT = ':%s %s'
_TERM = Term
Append newline after 'COMMIT' in iptables policies.
Without newline, the iptables-restore command complains.
|
#!/usr/bin/python2.4
#
# Copyright 2011 Google Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Speedway iptables generator.
This is a subclass of Iptables library. The primary difference is
that this library produced 'iptable-restore' compatible output."""
__author__ = 'watson@google.com (Tony Watson)'
import iptables
class Term(iptables.Term):
"""Generate Iptables policy terms."""
_PLATFORM = 'speedway'
_PREJUMP_FORMAT = None
_POSTJUMP_FORMAT = '-A %s -j %s'
class Speedway(iptables.Iptables):
"""Generates filters and terms from provided policy object."""
_PLATFORM = 'speedway'
_DEFAULT_PROTOCOL = 'all'
_SUFFIX = '.ipt'
_RENDER_PREFIX = '*filter'
_RENDER_SUFFIX = 'COMMIT\n'
_DEFAULTACTION_FORMAT = ':%s %s'
_TERM = Term
|
<commit_before>#!/usr/bin/python2.4
#
# Copyright 2011 Google Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Speedway iptables generator.
This is a subclass of Iptables library. The primary difference is
that this library produced 'iptable-restore' compatible output."""
__author__ = 'watson@google.com (Tony Watson)'
import iptables
class Term(iptables.Term):
"""Generate Iptables policy terms."""
_PLATFORM = 'speedway'
_PREJUMP_FORMAT = None
_POSTJUMP_FORMAT = '-A %s -j %s'
class Speedway(iptables.Iptables):
"""Generates filters and terms from provided policy object."""
_PLATFORM = 'speedway'
_DEFAULT_PROTOCOL = 'all'
_SUFFIX = '.ipt'
_RENDER_PREFIX = '*filter'
_RENDER_SUFFIX = 'COMMIT'
_DEFAULTACTION_FORMAT = ':%s %s'
_TERM = Term
<commit_msg>Append newline after 'COMMIT' in iptables policies.
Without newline, the iptables-restore command complains.<commit_after>
|
#!/usr/bin/python2.4
#
# Copyright 2011 Google Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Speedway iptables generator.
This is a subclass of Iptables library. The primary difference is
that this library produced 'iptable-restore' compatible output."""
__author__ = 'watson@google.com (Tony Watson)'
import iptables
class Term(iptables.Term):
"""Generate Iptables policy terms."""
_PLATFORM = 'speedway'
_PREJUMP_FORMAT = None
_POSTJUMP_FORMAT = '-A %s -j %s'
class Speedway(iptables.Iptables):
"""Generates filters and terms from provided policy object."""
_PLATFORM = 'speedway'
_DEFAULT_PROTOCOL = 'all'
_SUFFIX = '.ipt'
_RENDER_PREFIX = '*filter'
_RENDER_SUFFIX = 'COMMIT\n'
_DEFAULTACTION_FORMAT = ':%s %s'
_TERM = Term
|
#!/usr/bin/python2.4
#
# Copyright 2011 Google Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Speedway iptables generator.
This is a subclass of Iptables library. The primary difference is
that this library produced 'iptable-restore' compatible output."""
__author__ = 'watson@google.com (Tony Watson)'
import iptables
class Term(iptables.Term):
"""Generate Iptables policy terms."""
_PLATFORM = 'speedway'
_PREJUMP_FORMAT = None
_POSTJUMP_FORMAT = '-A %s -j %s'
class Speedway(iptables.Iptables):
"""Generates filters and terms from provided policy object."""
_PLATFORM = 'speedway'
_DEFAULT_PROTOCOL = 'all'
_SUFFIX = '.ipt'
_RENDER_PREFIX = '*filter'
_RENDER_SUFFIX = 'COMMIT'
_DEFAULTACTION_FORMAT = ':%s %s'
_TERM = Term
Append newline after 'COMMIT' in iptables policies.
Without newline, the iptables-restore command complains.#!/usr/bin/python2.4
#
# Copyright 2011 Google Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Speedway iptables generator.
This is a subclass of Iptables library. The primary difference is
that this library produced 'iptable-restore' compatible output."""
__author__ = 'watson@google.com (Tony Watson)'
import iptables
class Term(iptables.Term):
"""Generate Iptables policy terms."""
_PLATFORM = 'speedway'
_PREJUMP_FORMAT = None
_POSTJUMP_FORMAT = '-A %s -j %s'
class Speedway(iptables.Iptables):
"""Generates filters and terms from provided policy object."""
_PLATFORM = 'speedway'
_DEFAULT_PROTOCOL = 'all'
_SUFFIX = '.ipt'
_RENDER_PREFIX = '*filter'
_RENDER_SUFFIX = 'COMMIT\n'
_DEFAULTACTION_FORMAT = ':%s %s'
_TERM = Term
|
<commit_before>#!/usr/bin/python2.4
#
# Copyright 2011 Google Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Speedway iptables generator.
This is a subclass of Iptables library. The primary difference is
that this library produced 'iptable-restore' compatible output."""
__author__ = 'watson@google.com (Tony Watson)'
import iptables
class Term(iptables.Term):
"""Generate Iptables policy terms."""
_PLATFORM = 'speedway'
_PREJUMP_FORMAT = None
_POSTJUMP_FORMAT = '-A %s -j %s'
class Speedway(iptables.Iptables):
"""Generates filters and terms from provided policy object."""
_PLATFORM = 'speedway'
_DEFAULT_PROTOCOL = 'all'
_SUFFIX = '.ipt'
_RENDER_PREFIX = '*filter'
_RENDER_SUFFIX = 'COMMIT'
_DEFAULTACTION_FORMAT = ':%s %s'
_TERM = Term
<commit_msg>Append newline after 'COMMIT' in iptables policies.
Without newline, the iptables-restore command complains.<commit_after>#!/usr/bin/python2.4
#
# Copyright 2011 Google Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Speedway iptables generator.
This is a subclass of Iptables library. The primary difference is
that this library produced 'iptable-restore' compatible output."""
__author__ = 'watson@google.com (Tony Watson)'
import iptables
class Term(iptables.Term):
"""Generate Iptables policy terms."""
_PLATFORM = 'speedway'
_PREJUMP_FORMAT = None
_POSTJUMP_FORMAT = '-A %s -j %s'
class Speedway(iptables.Iptables):
"""Generates filters and terms from provided policy object."""
_PLATFORM = 'speedway'
_DEFAULT_PROTOCOL = 'all'
_SUFFIX = '.ipt'
_RENDER_PREFIX = '*filter'
_RENDER_SUFFIX = 'COMMIT\n'
_DEFAULTACTION_FORMAT = ':%s %s'
_TERM = Term
|
23a3f80d44592d4a86878f29eaa873d727ad31ee
|
london_commute_alert.py
|
london_commute_alert.py
|
import datetime
import os
import requests
def update():
requests.packages.urllib3.disable_warnings()
resp = requests.get('http://api.tfl.gov.uk/Line/Mode/tube/Status').json()
return {el['id']: el['lineStatuses'][0]['statusSeverityDescription']
for el in resp}
def email(lines):
with open('curl_raw_command.sh') as f:
raw_command = f.read()
if lines:
subject = 'Tube delays for commute'
body = ', '.join(': '.join([line.capitalize(), s]) for line, s in status.items())
else:
subject = 'Good service for commute'
body = 'Good service on all lines'
# We must have this running on PythonAnywhere - Monday to Sunday.
# Ignore Saturday and Sunday
if datetime.date.today().isoweekday() in range(1, 6):
os.system(raw_command.format(subject=subject, body=body))
def main():
commute_lines = ['metropolitan', 'jubilee', 'central']
status = update()
delays = {c: status[c] for c in commute_lines if status[c] != 'Good Service'}
email(delays)
if __name__ == '__main__':
main()
|
import datetime
import os
import requests
def update():
requests.packages.urllib3.disable_warnings()
resp = requests.get('http://api.tfl.gov.uk/Line/Mode/tube/Status').json()
return {el['id']: el['lineStatuses'][0]['statusSeverityDescription'] for el in resp}
def email(lines):
with open('curl_raw_command.sh') as f:
raw_command = f.read()
if lines:
subject = 'Tube delays for commute'
body = ', '.join(': '.join([line.capitalize(), s]) for line, s in status.items())
else:
subject = 'Good service for commute'
body = 'Good service on all lines'
# We must have this running on PythonAnywhere - Monday to Sunday.
# Ignore Saturday and Sunday
if datetime.date.today().isoweekday() in range(1, 6):
os.system(raw_command.format(subject=subject, body=body))
def main():
commute_lines = ['metropolitan', 'jubilee', 'central']
status = update()
delays = {c: status[c] for c in commute_lines if status[c] != 'Good Service'}
email(delays)
if __name__ == '__main__':
main()
|
Correct for problem on webfaction
|
Correct for problem on webfaction
|
Python
|
mit
|
noelevans/sandpit,noelevans/sandpit,noelevans/sandpit,noelevans/sandpit,noelevans/sandpit,noelevans/sandpit
|
import datetime
import os
import requests
def update():
requests.packages.urllib3.disable_warnings()
resp = requests.get('http://api.tfl.gov.uk/Line/Mode/tube/Status').json()
return {el['id']: el['lineStatuses'][0]['statusSeverityDescription']
for el in resp}
def email(lines):
with open('curl_raw_command.sh') as f:
raw_command = f.read()
if lines:
subject = 'Tube delays for commute'
body = ', '.join(': '.join([line.capitalize(), s]) for line, s in status.items())
else:
subject = 'Good service for commute'
body = 'Good service on all lines'
# We must have this running on PythonAnywhere - Monday to Sunday.
# Ignore Saturday and Sunday
if datetime.date.today().isoweekday() in range(1, 6):
os.system(raw_command.format(subject=subject, body=body))
def main():
commute_lines = ['metropolitan', 'jubilee', 'central']
status = update()
delays = {c: status[c] for c in commute_lines if status[c] != 'Good Service'}
email(delays)
if __name__ == '__main__':
main()
Correct for problem on webfaction
|
import datetime
import os
import requests
def update():
requests.packages.urllib3.disable_warnings()
resp = requests.get('http://api.tfl.gov.uk/Line/Mode/tube/Status').json()
return {el['id']: el['lineStatuses'][0]['statusSeverityDescription'] for el in resp}
def email(lines):
with open('curl_raw_command.sh') as f:
raw_command = f.read()
if lines:
subject = 'Tube delays for commute'
body = ', '.join(': '.join([line.capitalize(), s]) for line, s in status.items())
else:
subject = 'Good service for commute'
body = 'Good service on all lines'
# We must have this running on PythonAnywhere - Monday to Sunday.
# Ignore Saturday and Sunday
if datetime.date.today().isoweekday() in range(1, 6):
os.system(raw_command.format(subject=subject, body=body))
def main():
commute_lines = ['metropolitan', 'jubilee', 'central']
status = update()
delays = {c: status[c] for c in commute_lines if status[c] != 'Good Service'}
email(delays)
if __name__ == '__main__':
main()
|
<commit_before>import datetime
import os
import requests
def update():
requests.packages.urllib3.disable_warnings()
resp = requests.get('http://api.tfl.gov.uk/Line/Mode/tube/Status').json()
return {el['id']: el['lineStatuses'][0]['statusSeverityDescription']
for el in resp}
def email(lines):
with open('curl_raw_command.sh') as f:
raw_command = f.read()
if lines:
subject = 'Tube delays for commute'
body = ', '.join(': '.join([line.capitalize(), s]) for line, s in status.items())
else:
subject = 'Good service for commute'
body = 'Good service on all lines'
# We must have this running on PythonAnywhere - Monday to Sunday.
# Ignore Saturday and Sunday
if datetime.date.today().isoweekday() in range(1, 6):
os.system(raw_command.format(subject=subject, body=body))
def main():
commute_lines = ['metropolitan', 'jubilee', 'central']
status = update()
delays = {c: status[c] for c in commute_lines if status[c] != 'Good Service'}
email(delays)
if __name__ == '__main__':
main()
<commit_msg>Correct for problem on webfaction<commit_after>
|
import datetime
import os
import requests
def update():
requests.packages.urllib3.disable_warnings()
resp = requests.get('http://api.tfl.gov.uk/Line/Mode/tube/Status').json()
return {el['id']: el['lineStatuses'][0]['statusSeverityDescription'] for el in resp}
def email(lines):
with open('curl_raw_command.sh') as f:
raw_command = f.read()
if lines:
subject = 'Tube delays for commute'
body = ', '.join(': '.join([line.capitalize(), s]) for line, s in status.items())
else:
subject = 'Good service for commute'
body = 'Good service on all lines'
# We must have this running on PythonAnywhere - Monday to Sunday.
# Ignore Saturday and Sunday
if datetime.date.today().isoweekday() in range(1, 6):
os.system(raw_command.format(subject=subject, body=body))
def main():
commute_lines = ['metropolitan', 'jubilee', 'central']
status = update()
delays = {c: status[c] for c in commute_lines if status[c] != 'Good Service'}
email(delays)
if __name__ == '__main__':
main()
|
import datetime
import os
import requests
def update():
requests.packages.urllib3.disable_warnings()
resp = requests.get('http://api.tfl.gov.uk/Line/Mode/tube/Status').json()
return {el['id']: el['lineStatuses'][0]['statusSeverityDescription']
for el in resp}
def email(lines):
with open('curl_raw_command.sh') as f:
raw_command = f.read()
if lines:
subject = 'Tube delays for commute'
body = ', '.join(': '.join([line.capitalize(), s]) for line, s in status.items())
else:
subject = 'Good service for commute'
body = 'Good service on all lines'
# We must have this running on PythonAnywhere - Monday to Sunday.
# Ignore Saturday and Sunday
if datetime.date.today().isoweekday() in range(1, 6):
os.system(raw_command.format(subject=subject, body=body))
def main():
commute_lines = ['metropolitan', 'jubilee', 'central']
status = update()
delays = {c: status[c] for c in commute_lines if status[c] != 'Good Service'}
email(delays)
if __name__ == '__main__':
main()
Correct for problem on webfactionimport datetime
import os
import requests
def update():
requests.packages.urllib3.disable_warnings()
resp = requests.get('http://api.tfl.gov.uk/Line/Mode/tube/Status').json()
return {el['id']: el['lineStatuses'][0]['statusSeverityDescription'] for el in resp}
def email(lines):
with open('curl_raw_command.sh') as f:
raw_command = f.read()
if lines:
subject = 'Tube delays for commute'
body = ', '.join(': '.join([line.capitalize(), s]) for line, s in status.items())
else:
subject = 'Good service for commute'
body = 'Good service on all lines'
# We must have this running on PythonAnywhere - Monday to Sunday.
# Ignore Saturday and Sunday
if datetime.date.today().isoweekday() in range(1, 6):
os.system(raw_command.format(subject=subject, body=body))
def main():
commute_lines = ['metropolitan', 'jubilee', 'central']
status = update()
delays = {c: status[c] for c in commute_lines if status[c] != 'Good Service'}
email(delays)
if __name__ == '__main__':
main()
|
<commit_before>import datetime
import os
import requests
def update():
requests.packages.urllib3.disable_warnings()
resp = requests.get('http://api.tfl.gov.uk/Line/Mode/tube/Status').json()
return {el['id']: el['lineStatuses'][0]['statusSeverityDescription']
for el in resp}
def email(lines):
with open('curl_raw_command.sh') as f:
raw_command = f.read()
if lines:
subject = 'Tube delays for commute'
body = ', '.join(': '.join([line.capitalize(), s]) for line, s in status.items())
else:
subject = 'Good service for commute'
body = 'Good service on all lines'
# We must have this running on PythonAnywhere - Monday to Sunday.
# Ignore Saturday and Sunday
if datetime.date.today().isoweekday() in range(1, 6):
os.system(raw_command.format(subject=subject, body=body))
def main():
commute_lines = ['metropolitan', 'jubilee', 'central']
status = update()
delays = {c: status[c] for c in commute_lines if status[c] != 'Good Service'}
email(delays)
if __name__ == '__main__':
main()
<commit_msg>Correct for problem on webfaction<commit_after>import datetime
import os
import requests
def update():
requests.packages.urllib3.disable_warnings()
resp = requests.get('http://api.tfl.gov.uk/Line/Mode/tube/Status').json()
return {el['id']: el['lineStatuses'][0]['statusSeverityDescription'] for el in resp}
def email(lines):
with open('curl_raw_command.sh') as f:
raw_command = f.read()
if lines:
subject = 'Tube delays for commute'
body = ', '.join(': '.join([line.capitalize(), s]) for line, s in status.items())
else:
subject = 'Good service for commute'
body = 'Good service on all lines'
# We must have this running on PythonAnywhere - Monday to Sunday.
# Ignore Saturday and Sunday
if datetime.date.today().isoweekday() in range(1, 6):
os.system(raw_command.format(subject=subject, body=body))
def main():
commute_lines = ['metropolitan', 'jubilee', 'central']
status = update()
delays = {c: status[c] for c in commute_lines if status[c] != 'Good Service'}
email(delays)
if __name__ == '__main__':
main()
|
3504baa66ada0bde545ed2b111b71335f23d1838
|
PyTestStub/Templates.py
|
PyTestStub/Templates.py
|
functionTest = '''
def test_%s(self):
raise NotImplementedError() #TODO: test %s'''
classTest = '''class %sTest(unittest.TestCase):
"""
%s
"""
@staticmethod
def setUpClass(cls):
pass #TODO
@staticmethod
def tearDownClass(cls):
pass #TODO
def setUp(self):
pass #TODO
def tearDown(self):
pass #TODO
%s'''
unitTestBase = '''
import unittest
%s
'''
|
functionTest = '''
def test_%s(self):
raise NotImplementedError() #TODO: test %s'''
classTest = '''class %sTest(unittest.TestCase):
"""
%s
"""
@classmethod
def setUpClass(cls):
pass #TODO
@classmethod
def tearDownClass(cls):
pass #TODO
def setUp(self):
pass #TODO
def tearDown(self):
pass #TODO
%s'''
unitTestBase = '''
import unittest
%s
'''
|
Fix error in unit test template
|
Fix error in unit test template
|
Python
|
mit
|
AgalmicVentures/PyTestStub
|
functionTest = '''
def test_%s(self):
raise NotImplementedError() #TODO: test %s'''
classTest = '''class %sTest(unittest.TestCase):
"""
%s
"""
@staticmethod
def setUpClass(cls):
pass #TODO
@staticmethod
def tearDownClass(cls):
pass #TODO
def setUp(self):
pass #TODO
def tearDown(self):
pass #TODO
%s'''
unitTestBase = '''
import unittest
%s
'''
Fix error in unit test template
|
functionTest = '''
def test_%s(self):
raise NotImplementedError() #TODO: test %s'''
classTest = '''class %sTest(unittest.TestCase):
"""
%s
"""
@classmethod
def setUpClass(cls):
pass #TODO
@classmethod
def tearDownClass(cls):
pass #TODO
def setUp(self):
pass #TODO
def tearDown(self):
pass #TODO
%s'''
unitTestBase = '''
import unittest
%s
'''
|
<commit_before>
functionTest = '''
def test_%s(self):
raise NotImplementedError() #TODO: test %s'''
classTest = '''class %sTest(unittest.TestCase):
"""
%s
"""
@staticmethod
def setUpClass(cls):
pass #TODO
@staticmethod
def tearDownClass(cls):
pass #TODO
def setUp(self):
pass #TODO
def tearDown(self):
pass #TODO
%s'''
unitTestBase = '''
import unittest
%s
'''
<commit_msg>Fix error in unit test template<commit_after>
|
functionTest = '''
def test_%s(self):
raise NotImplementedError() #TODO: test %s'''
classTest = '''class %sTest(unittest.TestCase):
"""
%s
"""
@classmethod
def setUpClass(cls):
pass #TODO
@classmethod
def tearDownClass(cls):
pass #TODO
def setUp(self):
pass #TODO
def tearDown(self):
pass #TODO
%s'''
unitTestBase = '''
import unittest
%s
'''
|
functionTest = '''
def test_%s(self):
raise NotImplementedError() #TODO: test %s'''
classTest = '''class %sTest(unittest.TestCase):
"""
%s
"""
@staticmethod
def setUpClass(cls):
pass #TODO
@staticmethod
def tearDownClass(cls):
pass #TODO
def setUp(self):
pass #TODO
def tearDown(self):
pass #TODO
%s'''
unitTestBase = '''
import unittest
%s
'''
Fix error in unit test template
functionTest = '''
def test_%s(self):
raise NotImplementedError() #TODO: test %s'''
classTest = '''class %sTest(unittest.TestCase):
"""
%s
"""
@classmethod
def setUpClass(cls):
pass #TODO
@classmethod
def tearDownClass(cls):
pass #TODO
def setUp(self):
pass #TODO
def tearDown(self):
pass #TODO
%s'''
unitTestBase = '''
import unittest
%s
'''
|
<commit_before>
functionTest = '''
def test_%s(self):
raise NotImplementedError() #TODO: test %s'''
classTest = '''class %sTest(unittest.TestCase):
"""
%s
"""
@staticmethod
def setUpClass(cls):
pass #TODO
@staticmethod
def tearDownClass(cls):
pass #TODO
def setUp(self):
pass #TODO
def tearDown(self):
pass #TODO
%s'''
unitTestBase = '''
import unittest
%s
'''
<commit_msg>Fix error in unit test template<commit_after>
functionTest = '''
def test_%s(self):
raise NotImplementedError() #TODO: test %s'''
classTest = '''class %sTest(unittest.TestCase):
"""
%s
"""
@classmethod
def setUpClass(cls):
pass #TODO
@classmethod
def tearDownClass(cls):
pass #TODO
def setUp(self):
pass #TODO
def tearDown(self):
pass #TODO
%s'''
unitTestBase = '''
import unittest
%s
'''
|
816ceb19e224f23bf3ba2fd06f7f3e2296ee5622
|
asp/__init__.py
|
asp/__init__.py
|
# From http://stackoverflow.com/questions/458550/standard-way-to-embed-version-into-python-package
# Author: James Antill (http://stackoverflow.com/users/10314/james-antill)
__version__ = '0.1.3.0'
__version_info__ = tuple([ int(num) for num in __version__.split('.')])
class SpecializationError(Exception):
"""
Exception that caused specialization not to occur.
Attributes:
msg -- the message/explanation to the user
phase -- which phase of specialization caused the error
"""
def __init__(self, msg, phase="Unknown phase"):
self.msg = msg
|
# From http://stackoverflow.com/questions/458550/standard-way-to-embed-version-into-python-package
# Author: James Antill (http://stackoverflow.com/users/10314/james-antill)
__version__ = '0.1.3.1'
__version_info__ = tuple([ int(num) for num in __version__.split('.')])
class SpecializationError(Exception):
"""
Exception that caused specialization not to occur.
Attributes:
msg -- the message/explanation to the user
phase -- which phase of specialization caused the error
"""
def __init__(self, msg, phase="Unknown phase"):
self.msg = msg
|
Bump version number for avro fix.
|
Bump version number for avro fix.
|
Python
|
bsd-3-clause
|
shoaibkamil/asp,shoaibkamil/asp,shoaibkamil/asp
|
# From http://stackoverflow.com/questions/458550/standard-way-to-embed-version-into-python-package
# Author: James Antill (http://stackoverflow.com/users/10314/james-antill)
__version__ = '0.1.3.0'
__version_info__ = tuple([ int(num) for num in __version__.split('.')])
class SpecializationError(Exception):
"""
Exception that caused specialization not to occur.
Attributes:
msg -- the message/explanation to the user
phase -- which phase of specialization caused the error
"""
def __init__(self, msg, phase="Unknown phase"):
self.msg = msg
Bump version number for avro fix.
|
# From http://stackoverflow.com/questions/458550/standard-way-to-embed-version-into-python-package
# Author: James Antill (http://stackoverflow.com/users/10314/james-antill)
__version__ = '0.1.3.1'
__version_info__ = tuple([ int(num) for num in __version__.split('.')])
class SpecializationError(Exception):
"""
Exception that caused specialization not to occur.
Attributes:
msg -- the message/explanation to the user
phase -- which phase of specialization caused the error
"""
def __init__(self, msg, phase="Unknown phase"):
self.msg = msg
|
<commit_before># From http://stackoverflow.com/questions/458550/standard-way-to-embed-version-into-python-package
# Author: James Antill (http://stackoverflow.com/users/10314/james-antill)
__version__ = '0.1.3.0'
__version_info__ = tuple([ int(num) for num in __version__.split('.')])
class SpecializationError(Exception):
"""
Exception that caused specialization not to occur.
Attributes:
msg -- the message/explanation to the user
phase -- which phase of specialization caused the error
"""
def __init__(self, msg, phase="Unknown phase"):
self.msg = msg
<commit_msg>Bump version number for avro fix.<commit_after>
|
# From http://stackoverflow.com/questions/458550/standard-way-to-embed-version-into-python-package
# Author: James Antill (http://stackoverflow.com/users/10314/james-antill)
__version__ = '0.1.3.1'
__version_info__ = tuple([ int(num) for num in __version__.split('.')])
class SpecializationError(Exception):
"""
Exception that caused specialization not to occur.
Attributes:
msg -- the message/explanation to the user
phase -- which phase of specialization caused the error
"""
def __init__(self, msg, phase="Unknown phase"):
self.msg = msg
|
# From http://stackoverflow.com/questions/458550/standard-way-to-embed-version-into-python-package
# Author: James Antill (http://stackoverflow.com/users/10314/james-antill)
__version__ = '0.1.3.0'
__version_info__ = tuple([ int(num) for num in __version__.split('.')])
class SpecializationError(Exception):
"""
Exception that caused specialization not to occur.
Attributes:
msg -- the message/explanation to the user
phase -- which phase of specialization caused the error
"""
def __init__(self, msg, phase="Unknown phase"):
self.msg = msg
Bump version number for avro fix.# From http://stackoverflow.com/questions/458550/standard-way-to-embed-version-into-python-package
# Author: James Antill (http://stackoverflow.com/users/10314/james-antill)
__version__ = '0.1.3.1'
__version_info__ = tuple([ int(num) for num in __version__.split('.')])
class SpecializationError(Exception):
"""
Exception that caused specialization not to occur.
Attributes:
msg -- the message/explanation to the user
phase -- which phase of specialization caused the error
"""
def __init__(self, msg, phase="Unknown phase"):
self.msg = msg
|
<commit_before># From http://stackoverflow.com/questions/458550/standard-way-to-embed-version-into-python-package
# Author: James Antill (http://stackoverflow.com/users/10314/james-antill)
__version__ = '0.1.3.0'
__version_info__ = tuple([ int(num) for num in __version__.split('.')])
class SpecializationError(Exception):
"""
Exception that caused specialization not to occur.
Attributes:
msg -- the message/explanation to the user
phase -- which phase of specialization caused the error
"""
def __init__(self, msg, phase="Unknown phase"):
self.msg = msg
<commit_msg>Bump version number for avro fix.<commit_after># From http://stackoverflow.com/questions/458550/standard-way-to-embed-version-into-python-package
# Author: James Antill (http://stackoverflow.com/users/10314/james-antill)
__version__ = '0.1.3.1'
__version_info__ = tuple([ int(num) for num in __version__.split('.')])
class SpecializationError(Exception):
"""
Exception that caused specialization not to occur.
Attributes:
msg -- the message/explanation to the user
phase -- which phase of specialization caused the error
"""
def __init__(self, msg, phase="Unknown phase"):
self.msg = msg
|
598e21a7c397c0c429a78f008a36e5800c1b23e3
|
conftest.py
|
conftest.py
|
import os
import dj_database_url
import pytest
from django.conf import settings
pytest_plugins = [
"saleor.tests.fixtures",
"saleor.plugins.tests.fixtures",
"saleor.graphql.tests.fixtures",
"saleor.graphql.channel.tests.fixtures",
"saleor.graphql.account.tests.benchmark.fixtures",
"saleor.graphql.order.tests.benchmark.fixtures",
"saleor.graphql.giftcard.tests.benchmark.fixtures",
"saleor.graphql.webhook.tests.benchmark.fixtures",
"saleor.plugins.webhook.tests.subscription_webhooks.fixtures",
]
if os.environ.get("PYTEST_DB_URL"):
@pytest.fixture(scope="session")
def django_db_setup():
settings.DATABASES = {
settings.DATABASE_CONNECTION_DEFAULT_NAME: dj_database_url.config(
default=os.environ.get("PYTEST_DB_URL"), conn_max_age=600
),
}
|
import os
import dj_database_url
import pytest
from django.conf import settings
pytest_plugins = [
"saleor.tests.fixtures",
"saleor.plugins.tests.fixtures",
"saleor.graphql.tests.fixtures",
"saleor.graphql.channel.tests.fixtures",
"saleor.graphql.account.tests.benchmark.fixtures",
"saleor.graphql.order.tests.benchmark.fixtures",
"saleor.graphql.giftcard.tests.benchmark.fixtures",
"saleor.graphql.webhook.tests.benchmark.fixtures",
"saleor.plugins.webhook.tests.subscription_webhooks.fixtures",
]
if os.environ.get("PYTEST_DB_URL"):
@pytest.fixture(scope="session")
def django_db_setup():
settings.DATABASES = {
settings.DATABASE_CONNECTION_DEFAULT_NAME: dj_database_url.config(
env="PYTEST_DB_URL", conn_max_age=600
),
}
|
Fix picking invalid env variable for tests
|
Fix picking invalid env variable for tests
|
Python
|
bsd-3-clause
|
mociepka/saleor,mociepka/saleor,mociepka/saleor
|
import os
import dj_database_url
import pytest
from django.conf import settings
pytest_plugins = [
"saleor.tests.fixtures",
"saleor.plugins.tests.fixtures",
"saleor.graphql.tests.fixtures",
"saleor.graphql.channel.tests.fixtures",
"saleor.graphql.account.tests.benchmark.fixtures",
"saleor.graphql.order.tests.benchmark.fixtures",
"saleor.graphql.giftcard.tests.benchmark.fixtures",
"saleor.graphql.webhook.tests.benchmark.fixtures",
"saleor.plugins.webhook.tests.subscription_webhooks.fixtures",
]
if os.environ.get("PYTEST_DB_URL"):
@pytest.fixture(scope="session")
def django_db_setup():
settings.DATABASES = {
settings.DATABASE_CONNECTION_DEFAULT_NAME: dj_database_url.config(
default=os.environ.get("PYTEST_DB_URL"), conn_max_age=600
),
}
Fix picking invalid env variable for tests
|
import os
import dj_database_url
import pytest
from django.conf import settings
pytest_plugins = [
"saleor.tests.fixtures",
"saleor.plugins.tests.fixtures",
"saleor.graphql.tests.fixtures",
"saleor.graphql.channel.tests.fixtures",
"saleor.graphql.account.tests.benchmark.fixtures",
"saleor.graphql.order.tests.benchmark.fixtures",
"saleor.graphql.giftcard.tests.benchmark.fixtures",
"saleor.graphql.webhook.tests.benchmark.fixtures",
"saleor.plugins.webhook.tests.subscription_webhooks.fixtures",
]
if os.environ.get("PYTEST_DB_URL"):
@pytest.fixture(scope="session")
def django_db_setup():
settings.DATABASES = {
settings.DATABASE_CONNECTION_DEFAULT_NAME: dj_database_url.config(
env="PYTEST_DB_URL", conn_max_age=600
),
}
|
<commit_before>import os
import dj_database_url
import pytest
from django.conf import settings
pytest_plugins = [
"saleor.tests.fixtures",
"saleor.plugins.tests.fixtures",
"saleor.graphql.tests.fixtures",
"saleor.graphql.channel.tests.fixtures",
"saleor.graphql.account.tests.benchmark.fixtures",
"saleor.graphql.order.tests.benchmark.fixtures",
"saleor.graphql.giftcard.tests.benchmark.fixtures",
"saleor.graphql.webhook.tests.benchmark.fixtures",
"saleor.plugins.webhook.tests.subscription_webhooks.fixtures",
]
if os.environ.get("PYTEST_DB_URL"):
@pytest.fixture(scope="session")
def django_db_setup():
settings.DATABASES = {
settings.DATABASE_CONNECTION_DEFAULT_NAME: dj_database_url.config(
default=os.environ.get("PYTEST_DB_URL"), conn_max_age=600
),
}
<commit_msg>Fix picking invalid env variable for tests<commit_after>
|
import os
import dj_database_url
import pytest
from django.conf import settings
pytest_plugins = [
"saleor.tests.fixtures",
"saleor.plugins.tests.fixtures",
"saleor.graphql.tests.fixtures",
"saleor.graphql.channel.tests.fixtures",
"saleor.graphql.account.tests.benchmark.fixtures",
"saleor.graphql.order.tests.benchmark.fixtures",
"saleor.graphql.giftcard.tests.benchmark.fixtures",
"saleor.graphql.webhook.tests.benchmark.fixtures",
"saleor.plugins.webhook.tests.subscription_webhooks.fixtures",
]
if os.environ.get("PYTEST_DB_URL"):
@pytest.fixture(scope="session")
def django_db_setup():
settings.DATABASES = {
settings.DATABASE_CONNECTION_DEFAULT_NAME: dj_database_url.config(
env="PYTEST_DB_URL", conn_max_age=600
),
}
|
import os
import dj_database_url
import pytest
from django.conf import settings
pytest_plugins = [
"saleor.tests.fixtures",
"saleor.plugins.tests.fixtures",
"saleor.graphql.tests.fixtures",
"saleor.graphql.channel.tests.fixtures",
"saleor.graphql.account.tests.benchmark.fixtures",
"saleor.graphql.order.tests.benchmark.fixtures",
"saleor.graphql.giftcard.tests.benchmark.fixtures",
"saleor.graphql.webhook.tests.benchmark.fixtures",
"saleor.plugins.webhook.tests.subscription_webhooks.fixtures",
]
if os.environ.get("PYTEST_DB_URL"):
@pytest.fixture(scope="session")
def django_db_setup():
settings.DATABASES = {
settings.DATABASE_CONNECTION_DEFAULT_NAME: dj_database_url.config(
default=os.environ.get("PYTEST_DB_URL"), conn_max_age=600
),
}
Fix picking invalid env variable for testsimport os
import dj_database_url
import pytest
from django.conf import settings
pytest_plugins = [
"saleor.tests.fixtures",
"saleor.plugins.tests.fixtures",
"saleor.graphql.tests.fixtures",
"saleor.graphql.channel.tests.fixtures",
"saleor.graphql.account.tests.benchmark.fixtures",
"saleor.graphql.order.tests.benchmark.fixtures",
"saleor.graphql.giftcard.tests.benchmark.fixtures",
"saleor.graphql.webhook.tests.benchmark.fixtures",
"saleor.plugins.webhook.tests.subscription_webhooks.fixtures",
]
if os.environ.get("PYTEST_DB_URL"):
@pytest.fixture(scope="session")
def django_db_setup():
settings.DATABASES = {
settings.DATABASE_CONNECTION_DEFAULT_NAME: dj_database_url.config(
env="PYTEST_DB_URL", conn_max_age=600
),
}
|
<commit_before>import os
import dj_database_url
import pytest
from django.conf import settings
pytest_plugins = [
"saleor.tests.fixtures",
"saleor.plugins.tests.fixtures",
"saleor.graphql.tests.fixtures",
"saleor.graphql.channel.tests.fixtures",
"saleor.graphql.account.tests.benchmark.fixtures",
"saleor.graphql.order.tests.benchmark.fixtures",
"saleor.graphql.giftcard.tests.benchmark.fixtures",
"saleor.graphql.webhook.tests.benchmark.fixtures",
"saleor.plugins.webhook.tests.subscription_webhooks.fixtures",
]
if os.environ.get("PYTEST_DB_URL"):
@pytest.fixture(scope="session")
def django_db_setup():
settings.DATABASES = {
settings.DATABASE_CONNECTION_DEFAULT_NAME: dj_database_url.config(
default=os.environ.get("PYTEST_DB_URL"), conn_max_age=600
),
}
<commit_msg>Fix picking invalid env variable for tests<commit_after>import os
import dj_database_url
import pytest
from django.conf import settings
pytest_plugins = [
"saleor.tests.fixtures",
"saleor.plugins.tests.fixtures",
"saleor.graphql.tests.fixtures",
"saleor.graphql.channel.tests.fixtures",
"saleor.graphql.account.tests.benchmark.fixtures",
"saleor.graphql.order.tests.benchmark.fixtures",
"saleor.graphql.giftcard.tests.benchmark.fixtures",
"saleor.graphql.webhook.tests.benchmark.fixtures",
"saleor.plugins.webhook.tests.subscription_webhooks.fixtures",
]
if os.environ.get("PYTEST_DB_URL"):
@pytest.fixture(scope="session")
def django_db_setup():
settings.DATABASES = {
settings.DATABASE_CONNECTION_DEFAULT_NAME: dj_database_url.config(
env="PYTEST_DB_URL", conn_max_age=600
),
}
|
3643c0c4959f5d27c5faab2533fa5c3a7952cbb8
|
test_titanic.py
|
test_titanic.py
|
import titanic
buildername = 'Ubuntu HW 12.04 x64 mozilla-inbound pgo talos svgr'
branch = 'mozilla-inbound'
delta = 30
# NOTE: This API might take long to run.
# Usually takes around a minute to run, may take longer
revList, buildList = titanic.runAnalysis(
branch, buildername, '6ffcd2030ed8', delta)
# NOTE: runAnalysis argument 'delta' is optional. If not provided, it will default to 7.
# See example below:
# revList, buildList = titanic.runAnalysis(
# branch, buildername, 'ceff7d54080f')
for buildRev in buildList:
print titanic.getBuildCommands(branch, buildername, buildRev)
for rev in revList:
print titanic.getTriggerCommands(branch, buildername, rev)
# Uncomment the following lines if you want to test the trigger functionality
# of the code
# print 'Building Rev ' + str(buildList[0])
# titanic.triggerBuild(branch, buildername, buildList[0])
# print 'You should find the status at this URL: \
# https://secure.pub.build.mozilla.org/buildapi/self-serve/mozilla-inbound/rev/' \
# + str(buildList[0])
|
import titanic
import sys
buildername = 'Windows 7 32-bit mozilla-central debug test mochitest-1'
branch = 'mozilla-central'
delta = 30
revision = 'cd2acc7ab2f8'
revList, buildList = titanic.runAnalysis(
branch, buildername, revision, delta)
for rev in buildList:
if not (titanic.isBuildPending(branch, buildername, rev) \
or titanic.isBuildRunning(branch, buildername, rev)):
titanic.triggerBuild(branch, buildername, rev)
else:
if not titanic.isBuildSuccessful(branch, buildername, revision):
print 'Builds are yet to be completed for revision ' + rev + ' ...'
print 'If the builds have been running for a very long time make sure the builds have not failed!'
if buildList != []:
sys.exit(1)
print 'All builds are completed. Starting Jobs...'
for rev in revList:
if not (titanic.isJobPending(branch, buildername, rev) \
or titanic.isJobRunning(branch, buildername, rev)):
titanic.triggerJob(branch, buildername, rev)
else:
print 'Job has already been triggered'
|
Update Sample Code for Backfill
|
Update Sample Code for Backfill
Update Sample Code that could be used to automatically
trigger builds and jobs
|
Python
|
mpl-2.0
|
gakiwate/titanic
|
import titanic
buildername = 'Ubuntu HW 12.04 x64 mozilla-inbound pgo talos svgr'
branch = 'mozilla-inbound'
delta = 30
# NOTE: This API might take long to run.
# Usually takes around a minute to run, may take longer
revList, buildList = titanic.runAnalysis(
branch, buildername, '6ffcd2030ed8', delta)
# NOTE: runAnalysis argument 'delta' is optional. If not provided, it will default to 7.
# See example below:
# revList, buildList = titanic.runAnalysis(
# branch, buildername, 'ceff7d54080f')
for buildRev in buildList:
print titanic.getBuildCommands(branch, buildername, buildRev)
for rev in revList:
print titanic.getTriggerCommands(branch, buildername, rev)
# Uncomment the following lines if you want to test the trigger functionality
# of the code
# print 'Building Rev ' + str(buildList[0])
# titanic.triggerBuild(branch, buildername, buildList[0])
# print 'You should find the status at this URL: \
# https://secure.pub.build.mozilla.org/buildapi/self-serve/mozilla-inbound/rev/' \
# + str(buildList[0])
Update Sample Code for Backfill
Update Sample Code that could be used to automatically
trigger builds and jobs
|
import titanic
import sys
buildername = 'Windows 7 32-bit mozilla-central debug test mochitest-1'
branch = 'mozilla-central'
delta = 30
revision = 'cd2acc7ab2f8'
revList, buildList = titanic.runAnalysis(
branch, buildername, revision, delta)
for rev in buildList:
if not (titanic.isBuildPending(branch, buildername, rev) \
or titanic.isBuildRunning(branch, buildername, rev)):
titanic.triggerBuild(branch, buildername, rev)
else:
if not titanic.isBuildSuccessful(branch, buildername, revision):
print 'Builds are yet to be completed for revision ' + rev + ' ...'
print 'If the builds have been running for a very long time make sure the builds have not failed!'
if buildList != []:
sys.exit(1)
print 'All builds are completed. Starting Jobs...'
for rev in revList:
if not (titanic.isJobPending(branch, buildername, rev) \
or titanic.isJobRunning(branch, buildername, rev)):
titanic.triggerJob(branch, buildername, rev)
else:
print 'Job has already been triggered'
|
<commit_before>import titanic
buildername = 'Ubuntu HW 12.04 x64 mozilla-inbound pgo talos svgr'
branch = 'mozilla-inbound'
delta = 30
# NOTE: This API might take long to run.
# Usually takes around a minute to run, may take longer
revList, buildList = titanic.runAnalysis(
branch, buildername, '6ffcd2030ed8', delta)
# NOTE: runAnalysis argument 'delta' is optional. If not provided, it will default to 7.
# See example below:
# revList, buildList = titanic.runAnalysis(
# branch, buildername, 'ceff7d54080f')
for buildRev in buildList:
print titanic.getBuildCommands(branch, buildername, buildRev)
for rev in revList:
print titanic.getTriggerCommands(branch, buildername, rev)
# Uncomment the following lines if you want to test the trigger functionality
# of the code
# print 'Building Rev ' + str(buildList[0])
# titanic.triggerBuild(branch, buildername, buildList[0])
# print 'You should find the status at this URL: \
# https://secure.pub.build.mozilla.org/buildapi/self-serve/mozilla-inbound/rev/' \
# + str(buildList[0])
<commit_msg>Update Sample Code for Backfill
Update Sample Code that could be used to automatically
trigger builds and jobs<commit_after>
|
import titanic
import sys
buildername = 'Windows 7 32-bit mozilla-central debug test mochitest-1'
branch = 'mozilla-central'
delta = 30
revision = 'cd2acc7ab2f8'
revList, buildList = titanic.runAnalysis(
branch, buildername, revision, delta)
for rev in buildList:
if not (titanic.isBuildPending(branch, buildername, rev) \
or titanic.isBuildRunning(branch, buildername, rev)):
titanic.triggerBuild(branch, buildername, rev)
else:
if not titanic.isBuildSuccessful(branch, buildername, revision):
print 'Builds are yet to be completed for revision ' + rev + ' ...'
print 'If the builds have been running for a very long time make sure the builds have not failed!'
if buildList != []:
sys.exit(1)
print 'All builds are completed. Starting Jobs...'
for rev in revList:
if not (titanic.isJobPending(branch, buildername, rev) \
or titanic.isJobRunning(branch, buildername, rev)):
titanic.triggerJob(branch, buildername, rev)
else:
print 'Job has already been triggered'
|
import titanic
buildername = 'Ubuntu HW 12.04 x64 mozilla-inbound pgo talos svgr'
branch = 'mozilla-inbound'
delta = 30
# NOTE: This API might take long to run.
# Usually takes around a minute to run, may take longer
revList, buildList = titanic.runAnalysis(
branch, buildername, '6ffcd2030ed8', delta)
# NOTE: runAnalysis argument 'delta' is optional. If not provided, it will default to 7.
# See example below:
# revList, buildList = titanic.runAnalysis(
# branch, buildername, 'ceff7d54080f')
for buildRev in buildList:
print titanic.getBuildCommands(branch, buildername, buildRev)
for rev in revList:
print titanic.getTriggerCommands(branch, buildername, rev)
# Uncomment the following lines if you want to test the trigger functionality
# of the code
# print 'Building Rev ' + str(buildList[0])
# titanic.triggerBuild(branch, buildername, buildList[0])
# print 'You should find the status at this URL: \
# https://secure.pub.build.mozilla.org/buildapi/self-serve/mozilla-inbound/rev/' \
# + str(buildList[0])
Update Sample Code for Backfill
Update Sample Code that could be used to automatically
trigger builds and jobsimport titanic
import sys
buildername = 'Windows 7 32-bit mozilla-central debug test mochitest-1'
branch = 'mozilla-central'
delta = 30
revision = 'cd2acc7ab2f8'
revList, buildList = titanic.runAnalysis(
branch, buildername, revision, delta)
for rev in buildList:
if not (titanic.isBuildPending(branch, buildername, rev) \
or titanic.isBuildRunning(branch, buildername, rev)):
titanic.triggerBuild(branch, buildername, rev)
else:
if not titanic.isBuildSuccessful(branch, buildername, revision):
print 'Builds are yet to be completed for revision ' + rev + ' ...'
print 'If the builds have been running for a very long time make sure the builds have not failed!'
if buildList != []:
sys.exit(1)
print 'All builds are completed. Starting Jobs...'
for rev in revList:
if not (titanic.isJobPending(branch, buildername, rev) \
or titanic.isJobRunning(branch, buildername, rev)):
titanic.triggerJob(branch, buildername, rev)
else:
print 'Job has already been triggered'
|
<commit_before>import titanic
buildername = 'Ubuntu HW 12.04 x64 mozilla-inbound pgo talos svgr'
branch = 'mozilla-inbound'
delta = 30
# NOTE: This API might take long to run.
# Usually takes around a minute to run, may take longer
revList, buildList = titanic.runAnalysis(
branch, buildername, '6ffcd2030ed8', delta)
# NOTE: runAnalysis argument 'delta' is optional. If not provided, it will default to 7.
# See example below:
# revList, buildList = titanic.runAnalysis(
# branch, buildername, 'ceff7d54080f')
for buildRev in buildList:
print titanic.getBuildCommands(branch, buildername, buildRev)
for rev in revList:
print titanic.getTriggerCommands(branch, buildername, rev)
# Uncomment the following lines if you want to test the trigger functionality
# of the code
# print 'Building Rev ' + str(buildList[0])
# titanic.triggerBuild(branch, buildername, buildList[0])
# print 'You should find the status at this URL: \
# https://secure.pub.build.mozilla.org/buildapi/self-serve/mozilla-inbound/rev/' \
# + str(buildList[0])
<commit_msg>Update Sample Code for Backfill
Update Sample Code that could be used to automatically
trigger builds and jobs<commit_after>import titanic
import sys
buildername = 'Windows 7 32-bit mozilla-central debug test mochitest-1'
branch = 'mozilla-central'
delta = 30
revision = 'cd2acc7ab2f8'
revList, buildList = titanic.runAnalysis(
branch, buildername, revision, delta)
for rev in buildList:
if not (titanic.isBuildPending(branch, buildername, rev) \
or titanic.isBuildRunning(branch, buildername, rev)):
titanic.triggerBuild(branch, buildername, rev)
else:
if not titanic.isBuildSuccessful(branch, buildername, revision):
print 'Builds are yet to be completed for revision ' + rev + ' ...'
print 'If the builds have been running for a very long time make sure the builds have not failed!'
if buildList != []:
sys.exit(1)
print 'All builds are completed. Starting Jobs...'
for rev in revList:
if not (titanic.isJobPending(branch, buildername, rev) \
or titanic.isJobRunning(branch, buildername, rev)):
titanic.triggerJob(branch, buildername, rev)
else:
print 'Job has already been triggered'
|
c265f3a24ba26800a15ddf54ad3aa7515695fb3f
|
app/__init__.py
|
app/__init__.py
|
from flask import Flask
from .extensions import db
from . import views
def create_app(config):
""" Create a Flask App base on a config obejct. """
app = Flask(__name__)
app.config.from_object(config)
register_extensions(app)
register_views(app)
# @app.route("/")
# def index():
# return "Hello World!"
return app
def register_extensions(app):
""" Register all extensions with the app. """
db.init_app(app)
def register_views(app):
""" Register all views class. """
views.Main.register(app)
views.Post.register(app)
|
from flask import Flask
from flask_user import UserManager
from . import views
from .extensions import db, mail, toolbar
from .models import DataStoreAdapter, UserModel
def create_app(config):
""" Create a Flask App base on a config obejct. """
app = Flask(__name__)
app.config.from_object(config)
register_extensions(app)
register_views(app)
return app
def register_extensions(app):
""" Register all extensions with the app. """
db.init_app(app)
mail.init_app(app)
toolbar.init_app(app)
# Cannot put it in extension files
# due to will create a circular import.
db_adapter = DataStoreAdapter(db, UserModel)
user_manager = UserManager(db_adapter, app)
def register_views(app):
""" Register all views class. """
views.Main.register(app)
views.Post.register(app)
|
Update app init to user flask user, mail and toolbar ext
|
Update app init to user flask user, mail and toolbar ext
|
Python
|
mit
|
oldani/nanodegree-blog,oldani/nanodegree-blog,oldani/nanodegree-blog
|
from flask import Flask
from .extensions import db
from . import views
def create_app(config):
""" Create a Flask App base on a config obejct. """
app = Flask(__name__)
app.config.from_object(config)
register_extensions(app)
register_views(app)
# @app.route("/")
# def index():
# return "Hello World!"
return app
def register_extensions(app):
""" Register all extensions with the app. """
db.init_app(app)
def register_views(app):
""" Register all views class. """
views.Main.register(app)
views.Post.register(app)
Update app init to user flask user, mail and toolbar ext
|
from flask import Flask
from flask_user import UserManager
from . import views
from .extensions import db, mail, toolbar
from .models import DataStoreAdapter, UserModel
def create_app(config):
""" Create a Flask App base on a config obejct. """
app = Flask(__name__)
app.config.from_object(config)
register_extensions(app)
register_views(app)
return app
def register_extensions(app):
""" Register all extensions with the app. """
db.init_app(app)
mail.init_app(app)
toolbar.init_app(app)
# Cannot put it in extension files
# due to will create a circular import.
db_adapter = DataStoreAdapter(db, UserModel)
user_manager = UserManager(db_adapter, app)
def register_views(app):
""" Register all views class. """
views.Main.register(app)
views.Post.register(app)
|
<commit_before>from flask import Flask
from .extensions import db
from . import views
def create_app(config):
""" Create a Flask App base on a config obejct. """
app = Flask(__name__)
app.config.from_object(config)
register_extensions(app)
register_views(app)
# @app.route("/")
# def index():
# return "Hello World!"
return app
def register_extensions(app):
""" Register all extensions with the app. """
db.init_app(app)
def register_views(app):
""" Register all views class. """
views.Main.register(app)
views.Post.register(app)
<commit_msg>Update app init to user flask user, mail and toolbar ext<commit_after>
|
from flask import Flask
from flask_user import UserManager
from . import views
from .extensions import db, mail, toolbar
from .models import DataStoreAdapter, UserModel
def create_app(config):
""" Create a Flask App base on a config obejct. """
app = Flask(__name__)
app.config.from_object(config)
register_extensions(app)
register_views(app)
return app
def register_extensions(app):
""" Register all extensions with the app. """
db.init_app(app)
mail.init_app(app)
toolbar.init_app(app)
# Cannot put it in extension files
# due to will create a circular import.
db_adapter = DataStoreAdapter(db, UserModel)
user_manager = UserManager(db_adapter, app)
def register_views(app):
""" Register all views class. """
views.Main.register(app)
views.Post.register(app)
|
from flask import Flask
from .extensions import db
from . import views
def create_app(config):
""" Create a Flask App base on a config obejct. """
app = Flask(__name__)
app.config.from_object(config)
register_extensions(app)
register_views(app)
# @app.route("/")
# def index():
# return "Hello World!"
return app
def register_extensions(app):
""" Register all extensions with the app. """
db.init_app(app)
def register_views(app):
""" Register all views class. """
views.Main.register(app)
views.Post.register(app)
Update app init to user flask user, mail and toolbar extfrom flask import Flask
from flask_user import UserManager
from . import views
from .extensions import db, mail, toolbar
from .models import DataStoreAdapter, UserModel
def create_app(config):
""" Create a Flask App base on a config obejct. """
app = Flask(__name__)
app.config.from_object(config)
register_extensions(app)
register_views(app)
return app
def register_extensions(app):
""" Register all extensions with the app. """
db.init_app(app)
mail.init_app(app)
toolbar.init_app(app)
# Cannot put it in extension files
# due to will create a circular import.
db_adapter = DataStoreAdapter(db, UserModel)
user_manager = UserManager(db_adapter, app)
def register_views(app):
""" Register all views class. """
views.Main.register(app)
views.Post.register(app)
|
<commit_before>from flask import Flask
from .extensions import db
from . import views
def create_app(config):
""" Create a Flask App base on a config obejct. """
app = Flask(__name__)
app.config.from_object(config)
register_extensions(app)
register_views(app)
# @app.route("/")
# def index():
# return "Hello World!"
return app
def register_extensions(app):
""" Register all extensions with the app. """
db.init_app(app)
def register_views(app):
""" Register all views class. """
views.Main.register(app)
views.Post.register(app)
<commit_msg>Update app init to user flask user, mail and toolbar ext<commit_after>from flask import Flask
from flask_user import UserManager
from . import views
from .extensions import db, mail, toolbar
from .models import DataStoreAdapter, UserModel
def create_app(config):
""" Create a Flask App base on a config obejct. """
app = Flask(__name__)
app.config.from_object(config)
register_extensions(app)
register_views(app)
return app
def register_extensions(app):
""" Register all extensions with the app. """
db.init_app(app)
mail.init_app(app)
toolbar.init_app(app)
# Cannot put it in extension files
# due to will create a circular import.
db_adapter = DataStoreAdapter(db, UserModel)
user_manager = UserManager(db_adapter, app)
def register_views(app):
""" Register all views class. """
views.Main.register(app)
views.Post.register(app)
|
e0bbdd0aac905aa0fc16837b63ce7545099e019f
|
controlcenter/app_settings.py
|
controlcenter/app_settings.py
|
import sys
from django.utils import six
# I know, it's ugly, but I just can't write:
# gettattr(settings, 'CONTROLCENTER_CHARTIST_COLORS', 'default')
# This is way better: app_settings.CHARTIST_COLORS
# TODO: move to separate project
def proxy(attr, default):
def wrapper(self):
# It has to be most recent,
# to override settings in tests
from django.conf import settings
return getattr(settings, attr, default)
# Do I need this?
wrapper.__name__ = attr
return property(wrapper)
class AppSettingsMeta(type):
def __new__(mcs, name, bases, attrs):
prefix = name.upper() + '_'
for attr, value in attrs.items():
if not attr.startswith('__'):
attrs[attr] = proxy(prefix + attr, value)
abstract = attrs.pop('__abstract__', False)
cls = super(AppSettingsMeta, mcs).__new__(mcs, name, bases, attrs)
if not abstract:
# http://mail.python.org/pipermail/python-ideas/2012-May/
# 014969.html
ins = cls()
ins.__name__ = __name__
sys.modules[__name__] = ins
return cls
class AppSettings(six.with_metaclass(AppSettingsMeta)):
__abstract__ = True
class ControlCenter(AppSettings):
DASHBOARDS = []
CHARTIST_COLORS = 'default'
SHARP = '#'
|
import sys
from django.utils import six
# I know, it's ugly, but I just can't write:
# gettattr(settings, 'CONTROLCENTER_CHARTIST_COLORS', 'default')
# This is way better: app_settings.CHARTIST_COLORS
# TODO: move to separate project
def proxy(attr, default):
def wrapper(self):
# It has to be most recent,
# to override settings in tests
from django.conf import settings
return getattr(settings, attr, default)
# Do I need this?
wrapper.__name__ = attr
return property(wrapper)
class AppSettingsMeta(type):
def __new__(mcs, name, bases, attrs):
prefix = name.upper() + '_'
for attr, value in attrs.items():
if not attr.startswith('__'):
attrs[attr] = proxy(prefix + attr, value)
abstract = attrs.pop('__abstract__', False)
cls = super(AppSettingsMeta, mcs).__new__(mcs, name, bases, attrs)
if not abstract:
# http://mail.python.org/pipermail/python-ideas/2012-May/
# 014969.html
ins = cls()
ins.__name__ = ins.__module__
sys.modules[ins.__module__] = ins
return cls
class AppSettings(six.with_metaclass(AppSettingsMeta)):
__abstract__ = True
class ControlCenter(AppSettings):
DASHBOARDS = []
CHARTIST_COLORS = 'default'
SHARP = '#'
|
Replace local variable with class attribute
|
Replace local variable with class attribute
|
Python
|
bsd-3-clause
|
byashimov/django-controlcenter,byashimov/django-controlcenter,byashimov/django-controlcenter
|
import sys
from django.utils import six
# I know, it's ugly, but I just can't write:
# gettattr(settings, 'CONTROLCENTER_CHARTIST_COLORS', 'default')
# This is way better: app_settings.CHARTIST_COLORS
# TODO: move to separate project
def proxy(attr, default):
def wrapper(self):
# It has to be most recent,
# to override settings in tests
from django.conf import settings
return getattr(settings, attr, default)
# Do I need this?
wrapper.__name__ = attr
return property(wrapper)
class AppSettingsMeta(type):
def __new__(mcs, name, bases, attrs):
prefix = name.upper() + '_'
for attr, value in attrs.items():
if not attr.startswith('__'):
attrs[attr] = proxy(prefix + attr, value)
abstract = attrs.pop('__abstract__', False)
cls = super(AppSettingsMeta, mcs).__new__(mcs, name, bases, attrs)
if not abstract:
# http://mail.python.org/pipermail/python-ideas/2012-May/
# 014969.html
ins = cls()
ins.__name__ = __name__
sys.modules[__name__] = ins
return cls
class AppSettings(six.with_metaclass(AppSettingsMeta)):
__abstract__ = True
class ControlCenter(AppSettings):
DASHBOARDS = []
CHARTIST_COLORS = 'default'
SHARP = '#'
Replace local variable with class attribute
|
import sys
from django.utils import six
# I know, it's ugly, but I just can't write:
# gettattr(settings, 'CONTROLCENTER_CHARTIST_COLORS', 'default')
# This is way better: app_settings.CHARTIST_COLORS
# TODO: move to separate project
def proxy(attr, default):
def wrapper(self):
# It has to be most recent,
# to override settings in tests
from django.conf import settings
return getattr(settings, attr, default)
# Do I need this?
wrapper.__name__ = attr
return property(wrapper)
class AppSettingsMeta(type):
def __new__(mcs, name, bases, attrs):
prefix = name.upper() + '_'
for attr, value in attrs.items():
if not attr.startswith('__'):
attrs[attr] = proxy(prefix + attr, value)
abstract = attrs.pop('__abstract__', False)
cls = super(AppSettingsMeta, mcs).__new__(mcs, name, bases, attrs)
if not abstract:
# http://mail.python.org/pipermail/python-ideas/2012-May/
# 014969.html
ins = cls()
ins.__name__ = ins.__module__
sys.modules[ins.__module__] = ins
return cls
class AppSettings(six.with_metaclass(AppSettingsMeta)):
__abstract__ = True
class ControlCenter(AppSettings):
DASHBOARDS = []
CHARTIST_COLORS = 'default'
SHARP = '#'
|
<commit_before>import sys
from django.utils import six
# I know, it's ugly, but I just can't write:
# gettattr(settings, 'CONTROLCENTER_CHARTIST_COLORS', 'default')
# This is way better: app_settings.CHARTIST_COLORS
# TODO: move to separate project
def proxy(attr, default):
def wrapper(self):
# It has to be most recent,
# to override settings in tests
from django.conf import settings
return getattr(settings, attr, default)
# Do I need this?
wrapper.__name__ = attr
return property(wrapper)
class AppSettingsMeta(type):
def __new__(mcs, name, bases, attrs):
prefix = name.upper() + '_'
for attr, value in attrs.items():
if not attr.startswith('__'):
attrs[attr] = proxy(prefix + attr, value)
abstract = attrs.pop('__abstract__', False)
cls = super(AppSettingsMeta, mcs).__new__(mcs, name, bases, attrs)
if not abstract:
# http://mail.python.org/pipermail/python-ideas/2012-May/
# 014969.html
ins = cls()
ins.__name__ = __name__
sys.modules[__name__] = ins
return cls
class AppSettings(six.with_metaclass(AppSettingsMeta)):
__abstract__ = True
class ControlCenter(AppSettings):
DASHBOARDS = []
CHARTIST_COLORS = 'default'
SHARP = '#'
<commit_msg>Replace local variable with class attribute<commit_after>
|
import sys
from django.utils import six
# I know, it's ugly, but I just can't write:
# gettattr(settings, 'CONTROLCENTER_CHARTIST_COLORS', 'default')
# This is way better: app_settings.CHARTIST_COLORS
# TODO: move to separate project
def proxy(attr, default):
def wrapper(self):
# It has to be most recent,
# to override settings in tests
from django.conf import settings
return getattr(settings, attr, default)
# Do I need this?
wrapper.__name__ = attr
return property(wrapper)
class AppSettingsMeta(type):
def __new__(mcs, name, bases, attrs):
prefix = name.upper() + '_'
for attr, value in attrs.items():
if not attr.startswith('__'):
attrs[attr] = proxy(prefix + attr, value)
abstract = attrs.pop('__abstract__', False)
cls = super(AppSettingsMeta, mcs).__new__(mcs, name, bases, attrs)
if not abstract:
# http://mail.python.org/pipermail/python-ideas/2012-May/
# 014969.html
ins = cls()
ins.__name__ = ins.__module__
sys.modules[ins.__module__] = ins
return cls
class AppSettings(six.with_metaclass(AppSettingsMeta)):
__abstract__ = True
class ControlCenter(AppSettings):
DASHBOARDS = []
CHARTIST_COLORS = 'default'
SHARP = '#'
|
import sys
from django.utils import six
# I know, it's ugly, but I just can't write:
# gettattr(settings, 'CONTROLCENTER_CHARTIST_COLORS', 'default')
# This is way better: app_settings.CHARTIST_COLORS
# TODO: move to separate project
def proxy(attr, default):
def wrapper(self):
# It has to be most recent,
# to override settings in tests
from django.conf import settings
return getattr(settings, attr, default)
# Do I need this?
wrapper.__name__ = attr
return property(wrapper)
class AppSettingsMeta(type):
def __new__(mcs, name, bases, attrs):
prefix = name.upper() + '_'
for attr, value in attrs.items():
if not attr.startswith('__'):
attrs[attr] = proxy(prefix + attr, value)
abstract = attrs.pop('__abstract__', False)
cls = super(AppSettingsMeta, mcs).__new__(mcs, name, bases, attrs)
if not abstract:
# http://mail.python.org/pipermail/python-ideas/2012-May/
# 014969.html
ins = cls()
ins.__name__ = __name__
sys.modules[__name__] = ins
return cls
class AppSettings(six.with_metaclass(AppSettingsMeta)):
__abstract__ = True
class ControlCenter(AppSettings):
DASHBOARDS = []
CHARTIST_COLORS = 'default'
SHARP = '#'
Replace local variable with class attributeimport sys
from django.utils import six
# I know, it's ugly, but I just can't write:
# gettattr(settings, 'CONTROLCENTER_CHARTIST_COLORS', 'default')
# This is way better: app_settings.CHARTIST_COLORS
# TODO: move to separate project
def proxy(attr, default):
def wrapper(self):
# It has to be most recent,
# to override settings in tests
from django.conf import settings
return getattr(settings, attr, default)
# Do I need this?
wrapper.__name__ = attr
return property(wrapper)
class AppSettingsMeta(type):
def __new__(mcs, name, bases, attrs):
prefix = name.upper() + '_'
for attr, value in attrs.items():
if not attr.startswith('__'):
attrs[attr] = proxy(prefix + attr, value)
abstract = attrs.pop('__abstract__', False)
cls = super(AppSettingsMeta, mcs).__new__(mcs, name, bases, attrs)
if not abstract:
# http://mail.python.org/pipermail/python-ideas/2012-May/
# 014969.html
ins = cls()
ins.__name__ = ins.__module__
sys.modules[ins.__module__] = ins
return cls
class AppSettings(six.with_metaclass(AppSettingsMeta)):
__abstract__ = True
class ControlCenter(AppSettings):
DASHBOARDS = []
CHARTIST_COLORS = 'default'
SHARP = '#'
|
<commit_before>import sys
from django.utils import six
# I know, it's ugly, but I just can't write:
# gettattr(settings, 'CONTROLCENTER_CHARTIST_COLORS', 'default')
# This is way better: app_settings.CHARTIST_COLORS
# TODO: move to separate project
def proxy(attr, default):
def wrapper(self):
# It has to be most recent,
# to override settings in tests
from django.conf import settings
return getattr(settings, attr, default)
# Do I need this?
wrapper.__name__ = attr
return property(wrapper)
class AppSettingsMeta(type):
def __new__(mcs, name, bases, attrs):
prefix = name.upper() + '_'
for attr, value in attrs.items():
if not attr.startswith('__'):
attrs[attr] = proxy(prefix + attr, value)
abstract = attrs.pop('__abstract__', False)
cls = super(AppSettingsMeta, mcs).__new__(mcs, name, bases, attrs)
if not abstract:
# http://mail.python.org/pipermail/python-ideas/2012-May/
# 014969.html
ins = cls()
ins.__name__ = __name__
sys.modules[__name__] = ins
return cls
class AppSettings(six.with_metaclass(AppSettingsMeta)):
__abstract__ = True
class ControlCenter(AppSettings):
DASHBOARDS = []
CHARTIST_COLORS = 'default'
SHARP = '#'
<commit_msg>Replace local variable with class attribute<commit_after>import sys
from django.utils import six
# I know, it's ugly, but I just can't write:
# gettattr(settings, 'CONTROLCENTER_CHARTIST_COLORS', 'default')
# This is way better: app_settings.CHARTIST_COLORS
# TODO: move to separate project
def proxy(attr, default):
def wrapper(self):
# It has to be most recent,
# to override settings in tests
from django.conf import settings
return getattr(settings, attr, default)
# Do I need this?
wrapper.__name__ = attr
return property(wrapper)
class AppSettingsMeta(type):
def __new__(mcs, name, bases, attrs):
prefix = name.upper() + '_'
for attr, value in attrs.items():
if not attr.startswith('__'):
attrs[attr] = proxy(prefix + attr, value)
abstract = attrs.pop('__abstract__', False)
cls = super(AppSettingsMeta, mcs).__new__(mcs, name, bases, attrs)
if not abstract:
# http://mail.python.org/pipermail/python-ideas/2012-May/
# 014969.html
ins = cls()
ins.__name__ = ins.__module__
sys.modules[ins.__module__] = ins
return cls
class AppSettings(six.with_metaclass(AppSettingsMeta)):
__abstract__ = True
class ControlCenter(AppSettings):
DASHBOARDS = []
CHARTIST_COLORS = 'default'
SHARP = '#'
|
d00377ae301163debec253b9261ea41eeaa0e176
|
src/dbbrankingparser/httpclient.py
|
src/dbbrankingparser/httpclient.py
|
"""
dbbrankingparser.httpclient
~~~~~~~~~~~~~~~~~~~~~~~~~~~
HTTP client utilities
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: MIT, see LICENSE for details.
"""
from urllib.request import Request, urlopen
USER_AGENT = (
'Mozilla/5.0 (X11; Linux x86_64; rv:38.0) '
'Gecko/20100101 Firefox/38.0 Iceweasel/38.6.0'
) # type: str
def assemble_url(league_id: int) -> str:
"""Assemble the ranking HTML's URL for the league with that ID."""
template = (
'http://www.basketball-bund.net/public/tabelle.jsp'
'?print=1'
'&viewDescKey=sport.dbb.views.TabellePublicView/index.jsp_'
'&liga_id={:d}'
)
return template.format(league_id)
def fetch_content(url: str) -> str:
"""Retrieve and return the content of that URL."""
request = _create_request(url)
return urlopen(request).read().decode('utf-8')
def _create_request(url: str) -> Request:
"""Create an HTTP GET request."""
headers = {'User-Agent': USER_AGENT}
return Request(url, headers=headers)
|
"""
dbbrankingparser.httpclient
~~~~~~~~~~~~~~~~~~~~~~~~~~~
HTTP client utilities
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: MIT, see LICENSE for details.
"""
from urllib.request import Request, urlopen
USER_AGENT = (
'Mozilla/5.0 (X11; Linux x86_64; rv:38.0) '
'Gecko/20100101 Firefox/38.0 Iceweasel/38.6.0'
) # type: str
def assemble_url(league_id: int) -> str:
"""Assemble the ranking HTML's URL for the league with that ID."""
template = (
'https://www.basketball-bund.net/public/tabelle.jsp'
'?print=1'
'&viewDescKey=sport.dbb.views.TabellePublicView/index.jsp_'
'&liga_id={:d}'
)
return template.format(league_id)
def fetch_content(url: str) -> str:
"""Retrieve and return the content of that URL."""
request = _create_request(url)
return urlopen(request).read().decode('utf-8')
def _create_request(url: str) -> Request:
"""Create an HTTP GET request."""
headers = {'User-Agent': USER_AGENT}
return Request(url, headers=headers)
|
Use HTTPS to retrieve ranking from DBB
|
Use HTTPS to retrieve ranking from DBB
|
Python
|
mit
|
homeworkprod/dbb-ranking-parser
|
"""
dbbrankingparser.httpclient
~~~~~~~~~~~~~~~~~~~~~~~~~~~
HTTP client utilities
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: MIT, see LICENSE for details.
"""
from urllib.request import Request, urlopen
USER_AGENT = (
'Mozilla/5.0 (X11; Linux x86_64; rv:38.0) '
'Gecko/20100101 Firefox/38.0 Iceweasel/38.6.0'
) # type: str
def assemble_url(league_id: int) -> str:
"""Assemble the ranking HTML's URL for the league with that ID."""
template = (
'http://www.basketball-bund.net/public/tabelle.jsp'
'?print=1'
'&viewDescKey=sport.dbb.views.TabellePublicView/index.jsp_'
'&liga_id={:d}'
)
return template.format(league_id)
def fetch_content(url: str) -> str:
"""Retrieve and return the content of that URL."""
request = _create_request(url)
return urlopen(request).read().decode('utf-8')
def _create_request(url: str) -> Request:
"""Create an HTTP GET request."""
headers = {'User-Agent': USER_AGENT}
return Request(url, headers=headers)
Use HTTPS to retrieve ranking from DBB
|
"""
dbbrankingparser.httpclient
~~~~~~~~~~~~~~~~~~~~~~~~~~~
HTTP client utilities
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: MIT, see LICENSE for details.
"""
from urllib.request import Request, urlopen
USER_AGENT = (
'Mozilla/5.0 (X11; Linux x86_64; rv:38.0) '
'Gecko/20100101 Firefox/38.0 Iceweasel/38.6.0'
) # type: str
def assemble_url(league_id: int) -> str:
"""Assemble the ranking HTML's URL for the league with that ID."""
template = (
'https://www.basketball-bund.net/public/tabelle.jsp'
'?print=1'
'&viewDescKey=sport.dbb.views.TabellePublicView/index.jsp_'
'&liga_id={:d}'
)
return template.format(league_id)
def fetch_content(url: str) -> str:
"""Retrieve and return the content of that URL."""
request = _create_request(url)
return urlopen(request).read().decode('utf-8')
def _create_request(url: str) -> Request:
"""Create an HTTP GET request."""
headers = {'User-Agent': USER_AGENT}
return Request(url, headers=headers)
|
<commit_before>"""
dbbrankingparser.httpclient
~~~~~~~~~~~~~~~~~~~~~~~~~~~
HTTP client utilities
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: MIT, see LICENSE for details.
"""
from urllib.request import Request, urlopen
USER_AGENT = (
'Mozilla/5.0 (X11; Linux x86_64; rv:38.0) '
'Gecko/20100101 Firefox/38.0 Iceweasel/38.6.0'
) # type: str
def assemble_url(league_id: int) -> str:
"""Assemble the ranking HTML's URL for the league with that ID."""
template = (
'http://www.basketball-bund.net/public/tabelle.jsp'
'?print=1'
'&viewDescKey=sport.dbb.views.TabellePublicView/index.jsp_'
'&liga_id={:d}'
)
return template.format(league_id)
def fetch_content(url: str) -> str:
"""Retrieve and return the content of that URL."""
request = _create_request(url)
return urlopen(request).read().decode('utf-8')
def _create_request(url: str) -> Request:
"""Create an HTTP GET request."""
headers = {'User-Agent': USER_AGENT}
return Request(url, headers=headers)
<commit_msg>Use HTTPS to retrieve ranking from DBB<commit_after>
|
"""
dbbrankingparser.httpclient
~~~~~~~~~~~~~~~~~~~~~~~~~~~
HTTP client utilities
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: MIT, see LICENSE for details.
"""
from urllib.request import Request, urlopen
USER_AGENT = (
'Mozilla/5.0 (X11; Linux x86_64; rv:38.0) '
'Gecko/20100101 Firefox/38.0 Iceweasel/38.6.0'
) # type: str
def assemble_url(league_id: int) -> str:
"""Assemble the ranking HTML's URL for the league with that ID."""
template = (
'https://www.basketball-bund.net/public/tabelle.jsp'
'?print=1'
'&viewDescKey=sport.dbb.views.TabellePublicView/index.jsp_'
'&liga_id={:d}'
)
return template.format(league_id)
def fetch_content(url: str) -> str:
"""Retrieve and return the content of that URL."""
request = _create_request(url)
return urlopen(request).read().decode('utf-8')
def _create_request(url: str) -> Request:
"""Create an HTTP GET request."""
headers = {'User-Agent': USER_AGENT}
return Request(url, headers=headers)
|
"""
dbbrankingparser.httpclient
~~~~~~~~~~~~~~~~~~~~~~~~~~~
HTTP client utilities
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: MIT, see LICENSE for details.
"""
from urllib.request import Request, urlopen
USER_AGENT = (
'Mozilla/5.0 (X11; Linux x86_64; rv:38.0) '
'Gecko/20100101 Firefox/38.0 Iceweasel/38.6.0'
) # type: str
def assemble_url(league_id: int) -> str:
"""Assemble the ranking HTML's URL for the league with that ID."""
template = (
'http://www.basketball-bund.net/public/tabelle.jsp'
'?print=1'
'&viewDescKey=sport.dbb.views.TabellePublicView/index.jsp_'
'&liga_id={:d}'
)
return template.format(league_id)
def fetch_content(url: str) -> str:
"""Retrieve and return the content of that URL."""
request = _create_request(url)
return urlopen(request).read().decode('utf-8')
def _create_request(url: str) -> Request:
"""Create an HTTP GET request."""
headers = {'User-Agent': USER_AGENT}
return Request(url, headers=headers)
Use HTTPS to retrieve ranking from DBB"""
dbbrankingparser.httpclient
~~~~~~~~~~~~~~~~~~~~~~~~~~~
HTTP client utilities
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: MIT, see LICENSE for details.
"""
from urllib.request import Request, urlopen
USER_AGENT = (
'Mozilla/5.0 (X11; Linux x86_64; rv:38.0) '
'Gecko/20100101 Firefox/38.0 Iceweasel/38.6.0'
) # type: str
def assemble_url(league_id: int) -> str:
"""Assemble the ranking HTML's URL for the league with that ID."""
template = (
'https://www.basketball-bund.net/public/tabelle.jsp'
'?print=1'
'&viewDescKey=sport.dbb.views.TabellePublicView/index.jsp_'
'&liga_id={:d}'
)
return template.format(league_id)
def fetch_content(url: str) -> str:
"""Retrieve and return the content of that URL."""
request = _create_request(url)
return urlopen(request).read().decode('utf-8')
def _create_request(url: str) -> Request:
"""Create an HTTP GET request."""
headers = {'User-Agent': USER_AGENT}
return Request(url, headers=headers)
|
<commit_before>"""
dbbrankingparser.httpclient
~~~~~~~~~~~~~~~~~~~~~~~~~~~
HTTP client utilities
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: MIT, see LICENSE for details.
"""
from urllib.request import Request, urlopen
USER_AGENT = (
'Mozilla/5.0 (X11; Linux x86_64; rv:38.0) '
'Gecko/20100101 Firefox/38.0 Iceweasel/38.6.0'
) # type: str
def assemble_url(league_id: int) -> str:
"""Assemble the ranking HTML's URL for the league with that ID."""
template = (
'http://www.basketball-bund.net/public/tabelle.jsp'
'?print=1'
'&viewDescKey=sport.dbb.views.TabellePublicView/index.jsp_'
'&liga_id={:d}'
)
return template.format(league_id)
def fetch_content(url: str) -> str:
"""Retrieve and return the content of that URL."""
request = _create_request(url)
return urlopen(request).read().decode('utf-8')
def _create_request(url: str) -> Request:
"""Create an HTTP GET request."""
headers = {'User-Agent': USER_AGENT}
return Request(url, headers=headers)
<commit_msg>Use HTTPS to retrieve ranking from DBB<commit_after>"""
dbbrankingparser.httpclient
~~~~~~~~~~~~~~~~~~~~~~~~~~~
HTTP client utilities
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: MIT, see LICENSE for details.
"""
from urllib.request import Request, urlopen
USER_AGENT = (
'Mozilla/5.0 (X11; Linux x86_64; rv:38.0) '
'Gecko/20100101 Firefox/38.0 Iceweasel/38.6.0'
) # type: str
def assemble_url(league_id: int) -> str:
"""Assemble the ranking HTML's URL for the league with that ID."""
template = (
'https://www.basketball-bund.net/public/tabelle.jsp'
'?print=1'
'&viewDescKey=sport.dbb.views.TabellePublicView/index.jsp_'
'&liga_id={:d}'
)
return template.format(league_id)
def fetch_content(url: str) -> str:
"""Retrieve and return the content of that URL."""
request = _create_request(url)
return urlopen(request).read().decode('utf-8')
def _create_request(url: str) -> Request:
"""Create an HTTP GET request."""
headers = {'User-Agent': USER_AGENT}
return Request(url, headers=headers)
|
c109b41dc76c333bda1973fa2a543688f2fd5141
|
braid/config.py
|
braid/config.py
|
"""
Support for multiple environments based on python configuration files.
"""
from __future__ import print_function, absolute_import
import imp
import os
from twisted.python.filepath import FilePath
from fabric.api import env, task
CONFIG_DIRS = [
'~/.braid',
'./braidrc.local',
]
def loadEnvironmentConfig(envName, directories=CONFIG_DIRS, extension='.py'):
"""
Loads configuration directives for the specified environment into Fabric's
C{env} variable.
This function tries to load a python module from each specified directory
and stores all of its public uppercase attributes as attributes of Fabric's
environment (all attribute names will be lowercased).
"""
for confDir in directories:
path = FilePath(os.path.expanduser(confDir)).child(envName + extension)
if path.exists():
module = imp.load_source('braid.settings.' + envName, path.path)
for k in dir(module):
if k == k.upper():
setattr(env, k.lower(), getattr(module, k))
@task
def environment(env):
"""
Loads the passed environment configuration. This task can be invoked before
executing the desired Fabric action.
"""
loadEnvironmentConfig(env)
@task
def test():
"""
Shortcut for the C{environment:testing} task.
"""
loadEnvironmentConfig('testing')
@task
def prod():
"""
Shortcut for the C{environment:production} task.
"""
loadEnvironmentConfig('production')
|
"""
Support for multiple environments based on python configuration files.
"""
from __future__ import print_function, absolute_import
import imp
import os
from twisted.python.filepath import FilePath
from fabric.api import env, task
CONFIG_DIRS = [
'~/.braid',
'./braidrc.local',
]
def loadEnvironmentConfig(envName, directories=CONFIG_DIRS, extension='.py'):
"""
Loads configuration directives for the specified environment into Fabric's
C{env} variable.
This function tries to load a python module from each specified directory
and stores all of its public uppercase attributes as attributes of Fabric's
environment (all attribute names will be lowercased).
"""
for confDir in directories:
path = FilePath(os.path.expanduser(confDir)).child(envName + extension)
if path.exists():
module = imp.load_source('braid.settings.' + envName, path.path)
for k in dir(module):
if k == k.upper():
setattr(env, k.lower(), getattr(module, k))
@task
def environment(env):
"""
Load the passed environment configuration.
This task can be invoked before executing the desired Fabric action.
"""
loadEnvironmentConfig(env)
@task
def test():
"""
Load the configuration for the testing environment.
Shortcut for the C{environment:testing} task.
"""
loadEnvironmentConfig('testing')
@task
def prod():
"""
Load the configuration for the production environment.
Shortcut for the C{environment:production} task.
"""
loadEnvironmentConfig('production')
|
Make docstrings more Fabric friendly
|
Make docstrings more Fabric friendly
|
Python
|
mit
|
alex/braid,alex/braid
|
"""
Support for multiple environments based on python configuration files.
"""
from __future__ import print_function, absolute_import
import imp
import os
from twisted.python.filepath import FilePath
from fabric.api import env, task
CONFIG_DIRS = [
'~/.braid',
'./braidrc.local',
]
def loadEnvironmentConfig(envName, directories=CONFIG_DIRS, extension='.py'):
"""
Loads configuration directives for the specified environment into Fabric's
C{env} variable.
This function tries to load a python module from each specified directory
and stores all of its public uppercase attributes as attributes of Fabric's
environment (all attribute names will be lowercased).
"""
for confDir in directories:
path = FilePath(os.path.expanduser(confDir)).child(envName + extension)
if path.exists():
module = imp.load_source('braid.settings.' + envName, path.path)
for k in dir(module):
if k == k.upper():
setattr(env, k.lower(), getattr(module, k))
@task
def environment(env):
"""
Loads the passed environment configuration. This task can be invoked before
executing the desired Fabric action.
"""
loadEnvironmentConfig(env)
@task
def test():
"""
Shortcut for the C{environment:testing} task.
"""
loadEnvironmentConfig('testing')
@task
def prod():
"""
Shortcut for the C{environment:production} task.
"""
loadEnvironmentConfig('production')
Make docstrings more Fabric friendly
|
"""
Support for multiple environments based on python configuration files.
"""
from __future__ import print_function, absolute_import
import imp
import os
from twisted.python.filepath import FilePath
from fabric.api import env, task
CONFIG_DIRS = [
'~/.braid',
'./braidrc.local',
]
def loadEnvironmentConfig(envName, directories=CONFIG_DIRS, extension='.py'):
"""
Loads configuration directives for the specified environment into Fabric's
C{env} variable.
This function tries to load a python module from each specified directory
and stores all of its public uppercase attributes as attributes of Fabric's
environment (all attribute names will be lowercased).
"""
for confDir in directories:
path = FilePath(os.path.expanduser(confDir)).child(envName + extension)
if path.exists():
module = imp.load_source('braid.settings.' + envName, path.path)
for k in dir(module):
if k == k.upper():
setattr(env, k.lower(), getattr(module, k))
@task
def environment(env):
"""
Load the passed environment configuration.
This task can be invoked before executing the desired Fabric action.
"""
loadEnvironmentConfig(env)
@task
def test():
"""
Load the configuration for the testing environment.
Shortcut for the C{environment:testing} task.
"""
loadEnvironmentConfig('testing')
@task
def prod():
"""
Load the configuration for the production environment.
Shortcut for the C{environment:production} task.
"""
loadEnvironmentConfig('production')
|
<commit_before>"""
Support for multiple environments based on python configuration files.
"""
from __future__ import print_function, absolute_import
import imp
import os
from twisted.python.filepath import FilePath
from fabric.api import env, task
CONFIG_DIRS = [
'~/.braid',
'./braidrc.local',
]
def loadEnvironmentConfig(envName, directories=CONFIG_DIRS, extension='.py'):
"""
Loads configuration directives for the specified environment into Fabric's
C{env} variable.
This function tries to load a python module from each specified directory
and stores all of its public uppercase attributes as attributes of Fabric's
environment (all attribute names will be lowercased).
"""
for confDir in directories:
path = FilePath(os.path.expanduser(confDir)).child(envName + extension)
if path.exists():
module = imp.load_source('braid.settings.' + envName, path.path)
for k in dir(module):
if k == k.upper():
setattr(env, k.lower(), getattr(module, k))
@task
def environment(env):
"""
Loads the passed environment configuration. This task can be invoked before
executing the desired Fabric action.
"""
loadEnvironmentConfig(env)
@task
def test():
"""
Shortcut for the C{environment:testing} task.
"""
loadEnvironmentConfig('testing')
@task
def prod():
"""
Shortcut for the C{environment:production} task.
"""
loadEnvironmentConfig('production')
<commit_msg>Make docstrings more Fabric friendly<commit_after>
|
"""
Support for multiple environments based on python configuration files.
"""
from __future__ import print_function, absolute_import
import imp
import os
from twisted.python.filepath import FilePath
from fabric.api import env, task
CONFIG_DIRS = [
'~/.braid',
'./braidrc.local',
]
def loadEnvironmentConfig(envName, directories=CONFIG_DIRS, extension='.py'):
"""
Loads configuration directives for the specified environment into Fabric's
C{env} variable.
This function tries to load a python module from each specified directory
and stores all of its public uppercase attributes as attributes of Fabric's
environment (all attribute names will be lowercased).
"""
for confDir in directories:
path = FilePath(os.path.expanduser(confDir)).child(envName + extension)
if path.exists():
module = imp.load_source('braid.settings.' + envName, path.path)
for k in dir(module):
if k == k.upper():
setattr(env, k.lower(), getattr(module, k))
@task
def environment(env):
"""
Load the passed environment configuration.
This task can be invoked before executing the desired Fabric action.
"""
loadEnvironmentConfig(env)
@task
def test():
"""
Load the configuration for the testing environment.
Shortcut for the C{environment:testing} task.
"""
loadEnvironmentConfig('testing')
@task
def prod():
"""
Load the configuration for the production environment.
Shortcut for the C{environment:production} task.
"""
loadEnvironmentConfig('production')
|
"""
Support for multiple environments based on python configuration files.
"""
from __future__ import print_function, absolute_import
import imp
import os
from twisted.python.filepath import FilePath
from fabric.api import env, task
CONFIG_DIRS = [
'~/.braid',
'./braidrc.local',
]
def loadEnvironmentConfig(envName, directories=CONFIG_DIRS, extension='.py'):
"""
Loads configuration directives for the specified environment into Fabric's
C{env} variable.
This function tries to load a python module from each specified directory
and stores all of its public uppercase attributes as attributes of Fabric's
environment (all attribute names will be lowercased).
"""
for confDir in directories:
path = FilePath(os.path.expanduser(confDir)).child(envName + extension)
if path.exists():
module = imp.load_source('braid.settings.' + envName, path.path)
for k in dir(module):
if k == k.upper():
setattr(env, k.lower(), getattr(module, k))
@task
def environment(env):
"""
Loads the passed environment configuration. This task can be invoked before
executing the desired Fabric action.
"""
loadEnvironmentConfig(env)
@task
def test():
"""
Shortcut for the C{environment:testing} task.
"""
loadEnvironmentConfig('testing')
@task
def prod():
"""
Shortcut for the C{environment:production} task.
"""
loadEnvironmentConfig('production')
Make docstrings more Fabric friendly"""
Support for multiple environments based on python configuration files.
"""
from __future__ import print_function, absolute_import
import imp
import os
from twisted.python.filepath import FilePath
from fabric.api import env, task
CONFIG_DIRS = [
'~/.braid',
'./braidrc.local',
]
def loadEnvironmentConfig(envName, directories=CONFIG_DIRS, extension='.py'):
"""
Loads configuration directives for the specified environment into Fabric's
C{env} variable.
This function tries to load a python module from each specified directory
and stores all of its public uppercase attributes as attributes of Fabric's
environment (all attribute names will be lowercased).
"""
for confDir in directories:
path = FilePath(os.path.expanduser(confDir)).child(envName + extension)
if path.exists():
module = imp.load_source('braid.settings.' + envName, path.path)
for k in dir(module):
if k == k.upper():
setattr(env, k.lower(), getattr(module, k))
@task
def environment(env):
"""
Load the passed environment configuration.
This task can be invoked before executing the desired Fabric action.
"""
loadEnvironmentConfig(env)
@task
def test():
"""
Load the configuration for the testing environment.
Shortcut for the C{environment:testing} task.
"""
loadEnvironmentConfig('testing')
@task
def prod():
"""
Load the configuration for the production environment.
Shortcut for the C{environment:production} task.
"""
loadEnvironmentConfig('production')
|
<commit_before>"""
Support for multiple environments based on python configuration files.
"""
from __future__ import print_function, absolute_import
import imp
import os
from twisted.python.filepath import FilePath
from fabric.api import env, task
CONFIG_DIRS = [
'~/.braid',
'./braidrc.local',
]
def loadEnvironmentConfig(envName, directories=CONFIG_DIRS, extension='.py'):
"""
Loads configuration directives for the specified environment into Fabric's
C{env} variable.
This function tries to load a python module from each specified directory
and stores all of its public uppercase attributes as attributes of Fabric's
environment (all attribute names will be lowercased).
"""
for confDir in directories:
path = FilePath(os.path.expanduser(confDir)).child(envName + extension)
if path.exists():
module = imp.load_source('braid.settings.' + envName, path.path)
for k in dir(module):
if k == k.upper():
setattr(env, k.lower(), getattr(module, k))
@task
def environment(env):
"""
Loads the passed environment configuration. This task can be invoked before
executing the desired Fabric action.
"""
loadEnvironmentConfig(env)
@task
def test():
"""
Shortcut for the C{environment:testing} task.
"""
loadEnvironmentConfig('testing')
@task
def prod():
"""
Shortcut for the C{environment:production} task.
"""
loadEnvironmentConfig('production')
<commit_msg>Make docstrings more Fabric friendly<commit_after>"""
Support for multiple environments based on python configuration files.
"""
from __future__ import print_function, absolute_import
import imp
import os
from twisted.python.filepath import FilePath
from fabric.api import env, task
CONFIG_DIRS = [
'~/.braid',
'./braidrc.local',
]
def loadEnvironmentConfig(envName, directories=CONFIG_DIRS, extension='.py'):
"""
Loads configuration directives for the specified environment into Fabric's
C{env} variable.
This function tries to load a python module from each specified directory
and stores all of its public uppercase attributes as attributes of Fabric's
environment (all attribute names will be lowercased).
"""
for confDir in directories:
path = FilePath(os.path.expanduser(confDir)).child(envName + extension)
if path.exists():
module = imp.load_source('braid.settings.' + envName, path.path)
for k in dir(module):
if k == k.upper():
setattr(env, k.lower(), getattr(module, k))
@task
def environment(env):
"""
Load the passed environment configuration.
This task can be invoked before executing the desired Fabric action.
"""
loadEnvironmentConfig(env)
@task
def test():
"""
Load the configuration for the testing environment.
Shortcut for the C{environment:testing} task.
"""
loadEnvironmentConfig('testing')
@task
def prod():
"""
Load the configuration for the production environment.
Shortcut for the C{environment:production} task.
"""
loadEnvironmentConfig('production')
|
97a1e627b682f9aec80134334277b63e81265ddd
|
tests/test_ircv3.py
|
tests/test_ircv3.py
|
import pytest
from pydle.features import ircv3
pytestmark = [pytest.mark.unit, pytest.mark.ircv3]
@pytest.mark.parametrize(
"payload, expected",
[
(
rb"@+example=raw+:=,escaped\:\s\\ :irc.example.com NOTICE #channel :Message",
{"+example": """raw+:=,escaped; \\"""}
),
(
rb"@+example=\foo\bar :irc.example.com NOTICE #channel :Message",
{"+example": "foobar"}
),
(
rb'@msgid=796~1602221579~51;account=user123 :user123!user123@((ip) PRIVMSG #user123 :ping',
{'msgid': '796~1602221579~51', 'account': 'user123'}
),
(
rb'@inspircd.org/service;inspircd.org/bot :ChanServ!services@services.(domain) MODE #user123 +qo user123 :user123',
{"inspircd.org/service": True, r"inspircd.org/bot": True}
)
]
)
def test_tagged_message_escape_sequences(payload, expected):
message = ircv3.tags.TaggedMessage.parse(payload)
assert message.tags == expected
|
import pytest
from pydle.features import ircv3
pytestmark = [pytest.mark.unit, pytest.mark.ircv3]
@pytest.mark.parametrize(
"payload, expected",
[
(
rb'@empty=;missing :irc.example.com NOTICE #channel :Message',
{'empty': True, 'missing': True}
),
(
rb"@+example=raw+:=,escaped\:\s\\ :irc.example.com NOTICE #channel :Message",
{"+example": """raw+:=,escaped; \\"""}
),
(
rb"@+example=\foo\bar :irc.example.com NOTICE #channel :Message",
{"+example": "foobar"}
),
(
rb'@msgid=796~1602221579~51;account=user123 :user123!user123@(ip) PRIVMSG #user123 :ping',
{'msgid': '796~1602221579~51', 'account': 'user123'}
),
(
rb'@inspircd.org/service;inspircd.org/bot :ChanServ!services@services.(domain) MODE #user123 +qo user123 :user123',
{"inspircd.org/service": True, r"inspircd.org/bot": True}
)
]
)
def test_tagged_message_escape_sequences(payload, expected):
message = ircv3.tags.TaggedMessage.parse(payload)
assert message.tags == expected
|
Add test case for empty and missing IRCv3 tags
|
Add test case for empty and missing IRCv3 tags
|
Python
|
bsd-3-clause
|
Shizmob/pydle
|
import pytest
from pydle.features import ircv3
pytestmark = [pytest.mark.unit, pytest.mark.ircv3]
@pytest.mark.parametrize(
"payload, expected",
[
(
rb"@+example=raw+:=,escaped\:\s\\ :irc.example.com NOTICE #channel :Message",
{"+example": """raw+:=,escaped; \\"""}
),
(
rb"@+example=\foo\bar :irc.example.com NOTICE #channel :Message",
{"+example": "foobar"}
),
(
rb'@msgid=796~1602221579~51;account=user123 :user123!user123@((ip) PRIVMSG #user123 :ping',
{'msgid': '796~1602221579~51', 'account': 'user123'}
),
(
rb'@inspircd.org/service;inspircd.org/bot :ChanServ!services@services.(domain) MODE #user123 +qo user123 :user123',
{"inspircd.org/service": True, r"inspircd.org/bot": True}
)
]
)
def test_tagged_message_escape_sequences(payload, expected):
message = ircv3.tags.TaggedMessage.parse(payload)
assert message.tags == expected
Add test case for empty and missing IRCv3 tags
|
import pytest
from pydle.features import ircv3
pytestmark = [pytest.mark.unit, pytest.mark.ircv3]
@pytest.mark.parametrize(
"payload, expected",
[
(
rb'@empty=;missing :irc.example.com NOTICE #channel :Message',
{'empty': True, 'missing': True}
),
(
rb"@+example=raw+:=,escaped\:\s\\ :irc.example.com NOTICE #channel :Message",
{"+example": """raw+:=,escaped; \\"""}
),
(
rb"@+example=\foo\bar :irc.example.com NOTICE #channel :Message",
{"+example": "foobar"}
),
(
rb'@msgid=796~1602221579~51;account=user123 :user123!user123@(ip) PRIVMSG #user123 :ping',
{'msgid': '796~1602221579~51', 'account': 'user123'}
),
(
rb'@inspircd.org/service;inspircd.org/bot :ChanServ!services@services.(domain) MODE #user123 +qo user123 :user123',
{"inspircd.org/service": True, r"inspircd.org/bot": True}
)
]
)
def test_tagged_message_escape_sequences(payload, expected):
message = ircv3.tags.TaggedMessage.parse(payload)
assert message.tags == expected
|
<commit_before>import pytest
from pydle.features import ircv3
pytestmark = [pytest.mark.unit, pytest.mark.ircv3]
@pytest.mark.parametrize(
"payload, expected",
[
(
rb"@+example=raw+:=,escaped\:\s\\ :irc.example.com NOTICE #channel :Message",
{"+example": """raw+:=,escaped; \\"""}
),
(
rb"@+example=\foo\bar :irc.example.com NOTICE #channel :Message",
{"+example": "foobar"}
),
(
rb'@msgid=796~1602221579~51;account=user123 :user123!user123@((ip) PRIVMSG #user123 :ping',
{'msgid': '796~1602221579~51', 'account': 'user123'}
),
(
rb'@inspircd.org/service;inspircd.org/bot :ChanServ!services@services.(domain) MODE #user123 +qo user123 :user123',
{"inspircd.org/service": True, r"inspircd.org/bot": True}
)
]
)
def test_tagged_message_escape_sequences(payload, expected):
message = ircv3.tags.TaggedMessage.parse(payload)
assert message.tags == expected
<commit_msg>Add test case for empty and missing IRCv3 tags<commit_after>
|
import pytest
from pydle.features import ircv3
pytestmark = [pytest.mark.unit, pytest.mark.ircv3]
@pytest.mark.parametrize(
"payload, expected",
[
(
rb'@empty=;missing :irc.example.com NOTICE #channel :Message',
{'empty': True, 'missing': True}
),
(
rb"@+example=raw+:=,escaped\:\s\\ :irc.example.com NOTICE #channel :Message",
{"+example": """raw+:=,escaped; \\"""}
),
(
rb"@+example=\foo\bar :irc.example.com NOTICE #channel :Message",
{"+example": "foobar"}
),
(
rb'@msgid=796~1602221579~51;account=user123 :user123!user123@(ip) PRIVMSG #user123 :ping',
{'msgid': '796~1602221579~51', 'account': 'user123'}
),
(
rb'@inspircd.org/service;inspircd.org/bot :ChanServ!services@services.(domain) MODE #user123 +qo user123 :user123',
{"inspircd.org/service": True, r"inspircd.org/bot": True}
)
]
)
def test_tagged_message_escape_sequences(payload, expected):
message = ircv3.tags.TaggedMessage.parse(payload)
assert message.tags == expected
|
import pytest
from pydle.features import ircv3
pytestmark = [pytest.mark.unit, pytest.mark.ircv3]
@pytest.mark.parametrize(
"payload, expected",
[
(
rb"@+example=raw+:=,escaped\:\s\\ :irc.example.com NOTICE #channel :Message",
{"+example": """raw+:=,escaped; \\"""}
),
(
rb"@+example=\foo\bar :irc.example.com NOTICE #channel :Message",
{"+example": "foobar"}
),
(
rb'@msgid=796~1602221579~51;account=user123 :user123!user123@((ip) PRIVMSG #user123 :ping',
{'msgid': '796~1602221579~51', 'account': 'user123'}
),
(
rb'@inspircd.org/service;inspircd.org/bot :ChanServ!services@services.(domain) MODE #user123 +qo user123 :user123',
{"inspircd.org/service": True, r"inspircd.org/bot": True}
)
]
)
def test_tagged_message_escape_sequences(payload, expected):
message = ircv3.tags.TaggedMessage.parse(payload)
assert message.tags == expected
Add test case for empty and missing IRCv3 tagsimport pytest
from pydle.features import ircv3
pytestmark = [pytest.mark.unit, pytest.mark.ircv3]
@pytest.mark.parametrize(
"payload, expected",
[
(
rb'@empty=;missing :irc.example.com NOTICE #channel :Message',
{'empty': True, 'missing': True}
),
(
rb"@+example=raw+:=,escaped\:\s\\ :irc.example.com NOTICE #channel :Message",
{"+example": """raw+:=,escaped; \\"""}
),
(
rb"@+example=\foo\bar :irc.example.com NOTICE #channel :Message",
{"+example": "foobar"}
),
(
rb'@msgid=796~1602221579~51;account=user123 :user123!user123@(ip) PRIVMSG #user123 :ping',
{'msgid': '796~1602221579~51', 'account': 'user123'}
),
(
rb'@inspircd.org/service;inspircd.org/bot :ChanServ!services@services.(domain) MODE #user123 +qo user123 :user123',
{"inspircd.org/service": True, r"inspircd.org/bot": True}
)
]
)
def test_tagged_message_escape_sequences(payload, expected):
message = ircv3.tags.TaggedMessage.parse(payload)
assert message.tags == expected
|
<commit_before>import pytest
from pydle.features import ircv3
pytestmark = [pytest.mark.unit, pytest.mark.ircv3]
@pytest.mark.parametrize(
"payload, expected",
[
(
rb"@+example=raw+:=,escaped\:\s\\ :irc.example.com NOTICE #channel :Message",
{"+example": """raw+:=,escaped; \\"""}
),
(
rb"@+example=\foo\bar :irc.example.com NOTICE #channel :Message",
{"+example": "foobar"}
),
(
rb'@msgid=796~1602221579~51;account=user123 :user123!user123@((ip) PRIVMSG #user123 :ping',
{'msgid': '796~1602221579~51', 'account': 'user123'}
),
(
rb'@inspircd.org/service;inspircd.org/bot :ChanServ!services@services.(domain) MODE #user123 +qo user123 :user123',
{"inspircd.org/service": True, r"inspircd.org/bot": True}
)
]
)
def test_tagged_message_escape_sequences(payload, expected):
message = ircv3.tags.TaggedMessage.parse(payload)
assert message.tags == expected
<commit_msg>Add test case for empty and missing IRCv3 tags<commit_after>import pytest
from pydle.features import ircv3
pytestmark = [pytest.mark.unit, pytest.mark.ircv3]
@pytest.mark.parametrize(
"payload, expected",
[
(
rb'@empty=;missing :irc.example.com NOTICE #channel :Message',
{'empty': True, 'missing': True}
),
(
rb"@+example=raw+:=,escaped\:\s\\ :irc.example.com NOTICE #channel :Message",
{"+example": """raw+:=,escaped; \\"""}
),
(
rb"@+example=\foo\bar :irc.example.com NOTICE #channel :Message",
{"+example": "foobar"}
),
(
rb'@msgid=796~1602221579~51;account=user123 :user123!user123@(ip) PRIVMSG #user123 :ping',
{'msgid': '796~1602221579~51', 'account': 'user123'}
),
(
rb'@inspircd.org/service;inspircd.org/bot :ChanServ!services@services.(domain) MODE #user123 +qo user123 :user123',
{"inspircd.org/service": True, r"inspircd.org/bot": True}
)
]
)
def test_tagged_message_escape_sequences(payload, expected):
message = ircv3.tags.TaggedMessage.parse(payload)
assert message.tags == expected
|
127a3da0d453785bd9c711d738e20dfdc1876df1
|
tool/serial_dump.py
|
tool/serial_dump.py
|
#!/usr/bin/python
import serial
import string
import io
import time
import sys
if __name__ == '__main__':
port = "/dev/ttyUSB0"
baudrate = "57600"
second = 0.1
if (len(sys.argv) < 3):
print("Usage: serial_dump.py /dev/ttyUSB0 57600")
exit()
elif (len(sys.argv) == 3):
port = sys.argv[1]
baudrate = sys.argv[2]
elif (len(sys.argv) == 4):
port = sys.argv[1]
baudrate = sys.argv[2]
second = float(sys.argv[3])
print( "open {0}, buadrate {1}, delay in {2} seconds".format(port,baudrate,second))
ser = serial.Serial(port, baudrate);
with open("gps.log","rb") as f:
string = f.read()
for byte in string:
ser.write(byte)
print_byte = ":".join("{:02x}".format(ord(c)) for c in byte)
print ("{0} ".format(print_byte))
time.sleep(second)
ser.close()
|
#!/usr/bin/python
import serial
import string
import io
import time
import sys
if __name__ == '__main__':
port = "/dev/ttyUSB0"
baudrate = "57600"
second = 0.001
if (len(sys.argv) < 4 ):
print("Usage: \n./serial_dump.py /dev/ttyUSB0 57600 file_name 0.01")
exit()
elif (len(sys.argv) == 4):
port = sys.argv[1]
baudrate = sys.argv[2]
file_name = sys.argv[3]
elif (len(sys.argv) == 5):
port = sys.argv[1]
baudrate = sys.argv[2]
file_name = sys.argv[3]
second = float(sys.argv[4])
print( "open {0}, buadrate {1}, delay in {2} seconds".format(port,baudrate,second))
ser = serial.Serial(port, baudrate);
with open(file_name,"rb") as f:
string = f.read()
for byte in string:
ser.write(byte)
print_byte = ":".join("{:02x}".format(ord(c)) for c in byte)
print ("{0} ".format(print_byte))
time.sleep(second)
ser.close()
|
Change command option, need to specify file name now
|
Change command option, need to specify file name now
|
Python
|
mit
|
ming6842/firmware-new,fboris/firmware,UrsusPilot/firmware,fboris/firmware,UrsusPilot/firmware,fboris/firmware,UrsusPilot/firmware,ming6842/firmware-new,ming6842/firmware-new
|
#!/usr/bin/python
import serial
import string
import io
import time
import sys
if __name__ == '__main__':
port = "/dev/ttyUSB0"
baudrate = "57600"
second = 0.1
if (len(sys.argv) < 3):
print("Usage: serial_dump.py /dev/ttyUSB0 57600")
exit()
elif (len(sys.argv) == 3):
port = sys.argv[1]
baudrate = sys.argv[2]
elif (len(sys.argv) == 4):
port = sys.argv[1]
baudrate = sys.argv[2]
second = float(sys.argv[3])
print( "open {0}, buadrate {1}, delay in {2} seconds".format(port,baudrate,second))
ser = serial.Serial(port, baudrate);
with open("gps.log","rb") as f:
string = f.read()
for byte in string:
ser.write(byte)
print_byte = ":".join("{:02x}".format(ord(c)) for c in byte)
print ("{0} ".format(print_byte))
time.sleep(second)
ser.close()
Change command option, need to specify file name now
|
#!/usr/bin/python
import serial
import string
import io
import time
import sys
if __name__ == '__main__':
port = "/dev/ttyUSB0"
baudrate = "57600"
second = 0.001
if (len(sys.argv) < 4 ):
print("Usage: \n./serial_dump.py /dev/ttyUSB0 57600 file_name 0.01")
exit()
elif (len(sys.argv) == 4):
port = sys.argv[1]
baudrate = sys.argv[2]
file_name = sys.argv[3]
elif (len(sys.argv) == 5):
port = sys.argv[1]
baudrate = sys.argv[2]
file_name = sys.argv[3]
second = float(sys.argv[4])
print( "open {0}, buadrate {1}, delay in {2} seconds".format(port,baudrate,second))
ser = serial.Serial(port, baudrate);
with open(file_name,"rb") as f:
string = f.read()
for byte in string:
ser.write(byte)
print_byte = ":".join("{:02x}".format(ord(c)) for c in byte)
print ("{0} ".format(print_byte))
time.sleep(second)
ser.close()
|
<commit_before>#!/usr/bin/python
import serial
import string
import io
import time
import sys
if __name__ == '__main__':
port = "/dev/ttyUSB0"
baudrate = "57600"
second = 0.1
if (len(sys.argv) < 3):
print("Usage: serial_dump.py /dev/ttyUSB0 57600")
exit()
elif (len(sys.argv) == 3):
port = sys.argv[1]
baudrate = sys.argv[2]
elif (len(sys.argv) == 4):
port = sys.argv[1]
baudrate = sys.argv[2]
second = float(sys.argv[3])
print( "open {0}, buadrate {1}, delay in {2} seconds".format(port,baudrate,second))
ser = serial.Serial(port, baudrate);
with open("gps.log","rb") as f:
string = f.read()
for byte in string:
ser.write(byte)
print_byte = ":".join("{:02x}".format(ord(c)) for c in byte)
print ("{0} ".format(print_byte))
time.sleep(second)
ser.close()
<commit_msg>Change command option, need to specify file name now<commit_after>
|
#!/usr/bin/python
import serial
import string
import io
import time
import sys
if __name__ == '__main__':
port = "/dev/ttyUSB0"
baudrate = "57600"
second = 0.001
if (len(sys.argv) < 4 ):
print("Usage: \n./serial_dump.py /dev/ttyUSB0 57600 file_name 0.01")
exit()
elif (len(sys.argv) == 4):
port = sys.argv[1]
baudrate = sys.argv[2]
file_name = sys.argv[3]
elif (len(sys.argv) == 5):
port = sys.argv[1]
baudrate = sys.argv[2]
file_name = sys.argv[3]
second = float(sys.argv[4])
print( "open {0}, buadrate {1}, delay in {2} seconds".format(port,baudrate,second))
ser = serial.Serial(port, baudrate);
with open(file_name,"rb") as f:
string = f.read()
for byte in string:
ser.write(byte)
print_byte = ":".join("{:02x}".format(ord(c)) for c in byte)
print ("{0} ".format(print_byte))
time.sleep(second)
ser.close()
|
#!/usr/bin/python
import serial
import string
import io
import time
import sys
if __name__ == '__main__':
port = "/dev/ttyUSB0"
baudrate = "57600"
second = 0.1
if (len(sys.argv) < 3):
print("Usage: serial_dump.py /dev/ttyUSB0 57600")
exit()
elif (len(sys.argv) == 3):
port = sys.argv[1]
baudrate = sys.argv[2]
elif (len(sys.argv) == 4):
port = sys.argv[1]
baudrate = sys.argv[2]
second = float(sys.argv[3])
print( "open {0}, buadrate {1}, delay in {2} seconds".format(port,baudrate,second))
ser = serial.Serial(port, baudrate);
with open("gps.log","rb") as f:
string = f.read()
for byte in string:
ser.write(byte)
print_byte = ":".join("{:02x}".format(ord(c)) for c in byte)
print ("{0} ".format(print_byte))
time.sleep(second)
ser.close()
Change command option, need to specify file name now#!/usr/bin/python
import serial
import string
import io
import time
import sys
if __name__ == '__main__':
port = "/dev/ttyUSB0"
baudrate = "57600"
second = 0.001
if (len(sys.argv) < 4 ):
print("Usage: \n./serial_dump.py /dev/ttyUSB0 57600 file_name 0.01")
exit()
elif (len(sys.argv) == 4):
port = sys.argv[1]
baudrate = sys.argv[2]
file_name = sys.argv[3]
elif (len(sys.argv) == 5):
port = sys.argv[1]
baudrate = sys.argv[2]
file_name = sys.argv[3]
second = float(sys.argv[4])
print( "open {0}, buadrate {1}, delay in {2} seconds".format(port,baudrate,second))
ser = serial.Serial(port, baudrate);
with open(file_name,"rb") as f:
string = f.read()
for byte in string:
ser.write(byte)
print_byte = ":".join("{:02x}".format(ord(c)) for c in byte)
print ("{0} ".format(print_byte))
time.sleep(second)
ser.close()
|
<commit_before>#!/usr/bin/python
import serial
import string
import io
import time
import sys
if __name__ == '__main__':
port = "/dev/ttyUSB0"
baudrate = "57600"
second = 0.1
if (len(sys.argv) < 3):
print("Usage: serial_dump.py /dev/ttyUSB0 57600")
exit()
elif (len(sys.argv) == 3):
port = sys.argv[1]
baudrate = sys.argv[2]
elif (len(sys.argv) == 4):
port = sys.argv[1]
baudrate = sys.argv[2]
second = float(sys.argv[3])
print( "open {0}, buadrate {1}, delay in {2} seconds".format(port,baudrate,second))
ser = serial.Serial(port, baudrate);
with open("gps.log","rb") as f:
string = f.read()
for byte in string:
ser.write(byte)
print_byte = ":".join("{:02x}".format(ord(c)) for c in byte)
print ("{0} ".format(print_byte))
time.sleep(second)
ser.close()
<commit_msg>Change command option, need to specify file name now<commit_after>#!/usr/bin/python
import serial
import string
import io
import time
import sys
if __name__ == '__main__':
port = "/dev/ttyUSB0"
baudrate = "57600"
second = 0.001
if (len(sys.argv) < 4 ):
print("Usage: \n./serial_dump.py /dev/ttyUSB0 57600 file_name 0.01")
exit()
elif (len(sys.argv) == 4):
port = sys.argv[1]
baudrate = sys.argv[2]
file_name = sys.argv[3]
elif (len(sys.argv) == 5):
port = sys.argv[1]
baudrate = sys.argv[2]
file_name = sys.argv[3]
second = float(sys.argv[4])
print( "open {0}, buadrate {1}, delay in {2} seconds".format(port,baudrate,second))
ser = serial.Serial(port, baudrate);
with open(file_name,"rb") as f:
string = f.read()
for byte in string:
ser.write(byte)
print_byte = ":".join("{:02x}".format(ord(c)) for c in byte)
print ("{0} ".format(print_byte))
time.sleep(second)
ser.close()
|
0655505b20c5fc88ba3b5de1d948538acc5c1b8a
|
normandy/health/urls.py
|
normandy/health/urls.py
|
from django.conf.urls import url
from normandy.health.api import views
urlpatterns = [
url(r'^__version__', views.version, name='normandy.version'),
url(r'^__heartbeat__', views.heartbeat, name='normandy.heartbeat'),
url(r'^__lbheartbeat__', views.heartbeat, name='normandy.lbheartbeat'),
]
|
from django.conf.urls import url
from normandy.health.api import views
urlpatterns = [
url(r'^__version__', views.version, name='normandy.version'),
url(r'^__heartbeat__', views.heartbeat, name='normandy.heartbeat'),
url(r'^__lbheartbeat__', views.lbheartbeat, name='normandy.lbheartbeat'),
]
|
Use the right view for the lbheartbeat check
|
Use the right view for the lbheartbeat check
|
Python
|
mpl-2.0
|
mozilla/normandy,Osmose/normandy,Osmose/normandy,mozilla/normandy,Osmose/normandy,Osmose/normandy,mozilla/normandy,mozilla/normandy
|
from django.conf.urls import url
from normandy.health.api import views
urlpatterns = [
url(r'^__version__', views.version, name='normandy.version'),
url(r'^__heartbeat__', views.heartbeat, name='normandy.heartbeat'),
url(r'^__lbheartbeat__', views.heartbeat, name='normandy.lbheartbeat'),
]
Use the right view for the lbheartbeat check
|
from django.conf.urls import url
from normandy.health.api import views
urlpatterns = [
url(r'^__version__', views.version, name='normandy.version'),
url(r'^__heartbeat__', views.heartbeat, name='normandy.heartbeat'),
url(r'^__lbheartbeat__', views.lbheartbeat, name='normandy.lbheartbeat'),
]
|
<commit_before>from django.conf.urls import url
from normandy.health.api import views
urlpatterns = [
url(r'^__version__', views.version, name='normandy.version'),
url(r'^__heartbeat__', views.heartbeat, name='normandy.heartbeat'),
url(r'^__lbheartbeat__', views.heartbeat, name='normandy.lbheartbeat'),
]
<commit_msg>Use the right view for the lbheartbeat check<commit_after>
|
from django.conf.urls import url
from normandy.health.api import views
urlpatterns = [
url(r'^__version__', views.version, name='normandy.version'),
url(r'^__heartbeat__', views.heartbeat, name='normandy.heartbeat'),
url(r'^__lbheartbeat__', views.lbheartbeat, name='normandy.lbheartbeat'),
]
|
from django.conf.urls import url
from normandy.health.api import views
urlpatterns = [
url(r'^__version__', views.version, name='normandy.version'),
url(r'^__heartbeat__', views.heartbeat, name='normandy.heartbeat'),
url(r'^__lbheartbeat__', views.heartbeat, name='normandy.lbheartbeat'),
]
Use the right view for the lbheartbeat checkfrom django.conf.urls import url
from normandy.health.api import views
urlpatterns = [
url(r'^__version__', views.version, name='normandy.version'),
url(r'^__heartbeat__', views.heartbeat, name='normandy.heartbeat'),
url(r'^__lbheartbeat__', views.lbheartbeat, name='normandy.lbheartbeat'),
]
|
<commit_before>from django.conf.urls import url
from normandy.health.api import views
urlpatterns = [
url(r'^__version__', views.version, name='normandy.version'),
url(r'^__heartbeat__', views.heartbeat, name='normandy.heartbeat'),
url(r'^__lbheartbeat__', views.heartbeat, name='normandy.lbheartbeat'),
]
<commit_msg>Use the right view for the lbheartbeat check<commit_after>from django.conf.urls import url
from normandy.health.api import views
urlpatterns = [
url(r'^__version__', views.version, name='normandy.version'),
url(r'^__heartbeat__', views.heartbeat, name='normandy.heartbeat'),
url(r'^__lbheartbeat__', views.lbheartbeat, name='normandy.lbheartbeat'),
]
|
5fb17ccf0311500e5ce14a49e246d1a6cbc427a4
|
mopidy/frontends/mpd/__init__.py
|
mopidy/frontends/mpd/__init__.py
|
import logging
from mopidy.frontends.base import BaseFrontend
from mopidy.frontends.mpd.dispatcher import MpdDispatcher
from mopidy.frontends.mpd.process import MpdProcess
from mopidy.utils.process import unpickle_connection
logger = logging.getLogger('mopidy.frontends.mpd')
class MpdFrontend(BaseFrontend):
"""
The MPD frontend.
**Settings:**
- :attr:`mopidy.settings.MPD_SERVER_HOSTNAME`
- :attr:`mopidy.settings.MPD_SERVER_PORT`
"""
def __init__(self, *args, **kwargs):
super(MpdFrontend, self).__init__(*args, **kwargs)
self.process = None
self.dispatcher = MpdDispatcher(self.backend)
def start(self):
"""Starts the MPD server."""
self.process = MpdProcess(self.core_queue)
self.process.start()
def destroy(self):
"""Destroys the MPD server."""
self.process.destroy()
def process_message(self, message):
"""
Processes messages with the MPD frontend as destination.
:param message: the message
:type message: dict
"""
assert message['to'] == 'frontend', \
u'Message recipient must be "frontend".'
if message['command'] == 'mpd_request':
response = self.dispatcher.handle_request(message['request'])
connection = unpickle_connection(message['reply_to'])
connection.send(response)
else:
logger.warning(u'Cannot handle message: %s', message)
|
import logging
from mopidy.frontends.base import BaseFrontend
from mopidy.frontends.mpd.dispatcher import MpdDispatcher
from mopidy.frontends.mpd.process import MpdProcess
from mopidy.utils.process import unpickle_connection
logger = logging.getLogger('mopidy.frontends.mpd')
class MpdFrontend(BaseFrontend):
"""
The MPD frontend.
**Settings:**
- :attr:`mopidy.settings.MPD_SERVER_HOSTNAME`
- :attr:`mopidy.settings.MPD_SERVER_PORT`
"""
def __init__(self, *args, **kwargs):
super(MpdFrontend, self).__init__(*args, **kwargs)
self.process = None
self.dispatcher = MpdDispatcher(self.backend)
def start(self):
"""Starts the MPD server."""
self.process = MpdProcess(self.core_queue)
self.process.start()
def destroy(self):
"""Destroys the MPD server."""
self.process.destroy()
def process_message(self, message):
"""
Processes messages with the MPD frontend as destination.
:param message: the message
:type message: dict
"""
assert message['to'] == 'frontend', \
u'Message recipient must be "frontend".'
if message['command'] == 'mpd_request':
response = self.dispatcher.handle_request(message['request'])
connection = unpickle_connection(message['reply_to'])
connection.send(response)
else:
pass # Ignore messages for other frontends
|
Make MpdFrontend ignore unknown messages
|
Make MpdFrontend ignore unknown messages
|
Python
|
apache-2.0
|
diandiankan/mopidy,rawdlite/mopidy,adamcik/mopidy,ZenithDK/mopidy,SuperStarPL/mopidy,bencevans/mopidy,abarisain/mopidy,pacificIT/mopidy,bacontext/mopidy,jodal/mopidy,adamcik/mopidy,jcass77/mopidy,jmarsik/mopidy,quartz55/mopidy,quartz55/mopidy,kingosticks/mopidy,SuperStarPL/mopidy,ali/mopidy,bencevans/mopidy,adamcik/mopidy,swak/mopidy,tkem/mopidy,jmarsik/mopidy,SuperStarPL/mopidy,glogiotatidis/mopidy,quartz55/mopidy,glogiotatidis/mopidy,hkariti/mopidy,ZenithDK/mopidy,swak/mopidy,woutervanwijk/mopidy,hkariti/mopidy,rawdlite/mopidy,swak/mopidy,priestd09/mopidy,dbrgn/mopidy,mokieyue/mopidy,bacontext/mopidy,mopidy/mopidy,ZenithDK/mopidy,priestd09/mopidy,diandiankan/mopidy,tkem/mopidy,bencevans/mopidy,mokieyue/mopidy,pacificIT/mopidy,glogiotatidis/mopidy,ZenithDK/mopidy,pacificIT/mopidy,ali/mopidy,priestd09/mopidy,bacontext/mopidy,vrs01/mopidy,pacificIT/mopidy,dbrgn/mopidy,mokieyue/mopidy,dbrgn/mopidy,jcass77/mopidy,liamw9534/mopidy,kingosticks/mopidy,abarisain/mopidy,tkem/mopidy,tkem/mopidy,liamw9534/mopidy,mokieyue/mopidy,ali/mopidy,jodal/mopidy,dbrgn/mopidy,diandiankan/mopidy,kingosticks/mopidy,jmarsik/mopidy,vrs01/mopidy,vrs01/mopidy,woutervanwijk/mopidy,diandiankan/mopidy,jodal/mopidy,ali/mopidy,hkariti/mopidy,quartz55/mopidy,swak/mopidy,jmarsik/mopidy,jcass77/mopidy,SuperStarPL/mopidy,rawdlite/mopidy,bacontext/mopidy,bencevans/mopidy,glogiotatidis/mopidy,mopidy/mopidy,mopidy/mopidy,hkariti/mopidy,vrs01/mopidy,rawdlite/mopidy
|
import logging
from mopidy.frontends.base import BaseFrontend
from mopidy.frontends.mpd.dispatcher import MpdDispatcher
from mopidy.frontends.mpd.process import MpdProcess
from mopidy.utils.process import unpickle_connection
logger = logging.getLogger('mopidy.frontends.mpd')
class MpdFrontend(BaseFrontend):
"""
The MPD frontend.
**Settings:**
- :attr:`mopidy.settings.MPD_SERVER_HOSTNAME`
- :attr:`mopidy.settings.MPD_SERVER_PORT`
"""
def __init__(self, *args, **kwargs):
super(MpdFrontend, self).__init__(*args, **kwargs)
self.process = None
self.dispatcher = MpdDispatcher(self.backend)
def start(self):
"""Starts the MPD server."""
self.process = MpdProcess(self.core_queue)
self.process.start()
def destroy(self):
"""Destroys the MPD server."""
self.process.destroy()
def process_message(self, message):
"""
Processes messages with the MPD frontend as destination.
:param message: the message
:type message: dict
"""
assert message['to'] == 'frontend', \
u'Message recipient must be "frontend".'
if message['command'] == 'mpd_request':
response = self.dispatcher.handle_request(message['request'])
connection = unpickle_connection(message['reply_to'])
connection.send(response)
else:
logger.warning(u'Cannot handle message: %s', message)
Make MpdFrontend ignore unknown messages
|
import logging
from mopidy.frontends.base import BaseFrontend
from mopidy.frontends.mpd.dispatcher import MpdDispatcher
from mopidy.frontends.mpd.process import MpdProcess
from mopidy.utils.process import unpickle_connection
logger = logging.getLogger('mopidy.frontends.mpd')
class MpdFrontend(BaseFrontend):
"""
The MPD frontend.
**Settings:**
- :attr:`mopidy.settings.MPD_SERVER_HOSTNAME`
- :attr:`mopidy.settings.MPD_SERVER_PORT`
"""
def __init__(self, *args, **kwargs):
super(MpdFrontend, self).__init__(*args, **kwargs)
self.process = None
self.dispatcher = MpdDispatcher(self.backend)
def start(self):
"""Starts the MPD server."""
self.process = MpdProcess(self.core_queue)
self.process.start()
def destroy(self):
"""Destroys the MPD server."""
self.process.destroy()
def process_message(self, message):
"""
Processes messages with the MPD frontend as destination.
:param message: the message
:type message: dict
"""
assert message['to'] == 'frontend', \
u'Message recipient must be "frontend".'
if message['command'] == 'mpd_request':
response = self.dispatcher.handle_request(message['request'])
connection = unpickle_connection(message['reply_to'])
connection.send(response)
else:
pass # Ignore messages for other frontends
|
<commit_before>import logging
from mopidy.frontends.base import BaseFrontend
from mopidy.frontends.mpd.dispatcher import MpdDispatcher
from mopidy.frontends.mpd.process import MpdProcess
from mopidy.utils.process import unpickle_connection
logger = logging.getLogger('mopidy.frontends.mpd')
class MpdFrontend(BaseFrontend):
"""
The MPD frontend.
**Settings:**
- :attr:`mopidy.settings.MPD_SERVER_HOSTNAME`
- :attr:`mopidy.settings.MPD_SERVER_PORT`
"""
def __init__(self, *args, **kwargs):
super(MpdFrontend, self).__init__(*args, **kwargs)
self.process = None
self.dispatcher = MpdDispatcher(self.backend)
def start(self):
"""Starts the MPD server."""
self.process = MpdProcess(self.core_queue)
self.process.start()
def destroy(self):
"""Destroys the MPD server."""
self.process.destroy()
def process_message(self, message):
"""
Processes messages with the MPD frontend as destination.
:param message: the message
:type message: dict
"""
assert message['to'] == 'frontend', \
u'Message recipient must be "frontend".'
if message['command'] == 'mpd_request':
response = self.dispatcher.handle_request(message['request'])
connection = unpickle_connection(message['reply_to'])
connection.send(response)
else:
logger.warning(u'Cannot handle message: %s', message)
<commit_msg>Make MpdFrontend ignore unknown messages<commit_after>
|
import logging
from mopidy.frontends.base import BaseFrontend
from mopidy.frontends.mpd.dispatcher import MpdDispatcher
from mopidy.frontends.mpd.process import MpdProcess
from mopidy.utils.process import unpickle_connection
logger = logging.getLogger('mopidy.frontends.mpd')
class MpdFrontend(BaseFrontend):
"""
The MPD frontend.
**Settings:**
- :attr:`mopidy.settings.MPD_SERVER_HOSTNAME`
- :attr:`mopidy.settings.MPD_SERVER_PORT`
"""
def __init__(self, *args, **kwargs):
super(MpdFrontend, self).__init__(*args, **kwargs)
self.process = None
self.dispatcher = MpdDispatcher(self.backend)
def start(self):
"""Starts the MPD server."""
self.process = MpdProcess(self.core_queue)
self.process.start()
def destroy(self):
"""Destroys the MPD server."""
self.process.destroy()
def process_message(self, message):
"""
Processes messages with the MPD frontend as destination.
:param message: the message
:type message: dict
"""
assert message['to'] == 'frontend', \
u'Message recipient must be "frontend".'
if message['command'] == 'mpd_request':
response = self.dispatcher.handle_request(message['request'])
connection = unpickle_connection(message['reply_to'])
connection.send(response)
else:
pass # Ignore messages for other frontends
|
import logging
from mopidy.frontends.base import BaseFrontend
from mopidy.frontends.mpd.dispatcher import MpdDispatcher
from mopidy.frontends.mpd.process import MpdProcess
from mopidy.utils.process import unpickle_connection
logger = logging.getLogger('mopidy.frontends.mpd')
class MpdFrontend(BaseFrontend):
"""
The MPD frontend.
**Settings:**
- :attr:`mopidy.settings.MPD_SERVER_HOSTNAME`
- :attr:`mopidy.settings.MPD_SERVER_PORT`
"""
def __init__(self, *args, **kwargs):
super(MpdFrontend, self).__init__(*args, **kwargs)
self.process = None
self.dispatcher = MpdDispatcher(self.backend)
def start(self):
"""Starts the MPD server."""
self.process = MpdProcess(self.core_queue)
self.process.start()
def destroy(self):
"""Destroys the MPD server."""
self.process.destroy()
def process_message(self, message):
"""
Processes messages with the MPD frontend as destination.
:param message: the message
:type message: dict
"""
assert message['to'] == 'frontend', \
u'Message recipient must be "frontend".'
if message['command'] == 'mpd_request':
response = self.dispatcher.handle_request(message['request'])
connection = unpickle_connection(message['reply_to'])
connection.send(response)
else:
logger.warning(u'Cannot handle message: %s', message)
Make MpdFrontend ignore unknown messagesimport logging
from mopidy.frontends.base import BaseFrontend
from mopidy.frontends.mpd.dispatcher import MpdDispatcher
from mopidy.frontends.mpd.process import MpdProcess
from mopidy.utils.process import unpickle_connection
logger = logging.getLogger('mopidy.frontends.mpd')
class MpdFrontend(BaseFrontend):
"""
The MPD frontend.
**Settings:**
- :attr:`mopidy.settings.MPD_SERVER_HOSTNAME`
- :attr:`mopidy.settings.MPD_SERVER_PORT`
"""
def __init__(self, *args, **kwargs):
super(MpdFrontend, self).__init__(*args, **kwargs)
self.process = None
self.dispatcher = MpdDispatcher(self.backend)
def start(self):
"""Starts the MPD server."""
self.process = MpdProcess(self.core_queue)
self.process.start()
def destroy(self):
"""Destroys the MPD server."""
self.process.destroy()
def process_message(self, message):
"""
Processes messages with the MPD frontend as destination.
:param message: the message
:type message: dict
"""
assert message['to'] == 'frontend', \
u'Message recipient must be "frontend".'
if message['command'] == 'mpd_request':
response = self.dispatcher.handle_request(message['request'])
connection = unpickle_connection(message['reply_to'])
connection.send(response)
else:
pass # Ignore messages for other frontends
|
<commit_before>import logging
from mopidy.frontends.base import BaseFrontend
from mopidy.frontends.mpd.dispatcher import MpdDispatcher
from mopidy.frontends.mpd.process import MpdProcess
from mopidy.utils.process import unpickle_connection
logger = logging.getLogger('mopidy.frontends.mpd')
class MpdFrontend(BaseFrontend):
"""
The MPD frontend.
**Settings:**
- :attr:`mopidy.settings.MPD_SERVER_HOSTNAME`
- :attr:`mopidy.settings.MPD_SERVER_PORT`
"""
def __init__(self, *args, **kwargs):
super(MpdFrontend, self).__init__(*args, **kwargs)
self.process = None
self.dispatcher = MpdDispatcher(self.backend)
def start(self):
"""Starts the MPD server."""
self.process = MpdProcess(self.core_queue)
self.process.start()
def destroy(self):
"""Destroys the MPD server."""
self.process.destroy()
def process_message(self, message):
"""
Processes messages with the MPD frontend as destination.
:param message: the message
:type message: dict
"""
assert message['to'] == 'frontend', \
u'Message recipient must be "frontend".'
if message['command'] == 'mpd_request':
response = self.dispatcher.handle_request(message['request'])
connection = unpickle_connection(message['reply_to'])
connection.send(response)
else:
logger.warning(u'Cannot handle message: %s', message)
<commit_msg>Make MpdFrontend ignore unknown messages<commit_after>import logging
from mopidy.frontends.base import BaseFrontend
from mopidy.frontends.mpd.dispatcher import MpdDispatcher
from mopidy.frontends.mpd.process import MpdProcess
from mopidy.utils.process import unpickle_connection
logger = logging.getLogger('mopidy.frontends.mpd')
class MpdFrontend(BaseFrontend):
"""
The MPD frontend.
**Settings:**
- :attr:`mopidy.settings.MPD_SERVER_HOSTNAME`
- :attr:`mopidy.settings.MPD_SERVER_PORT`
"""
def __init__(self, *args, **kwargs):
super(MpdFrontend, self).__init__(*args, **kwargs)
self.process = None
self.dispatcher = MpdDispatcher(self.backend)
def start(self):
"""Starts the MPD server."""
self.process = MpdProcess(self.core_queue)
self.process.start()
def destroy(self):
"""Destroys the MPD server."""
self.process.destroy()
def process_message(self, message):
"""
Processes messages with the MPD frontend as destination.
:param message: the message
:type message: dict
"""
assert message['to'] == 'frontend', \
u'Message recipient must be "frontend".'
if message['command'] == 'mpd_request':
response = self.dispatcher.handle_request(message['request'])
connection = unpickle_connection(message['reply_to'])
connection.send(response)
else:
pass # Ignore messages for other frontends
|
076ef01bd3334d2a1941df369286e4972223901e
|
PyramidSort.py
|
PyramidSort.py
|
import sublime, sublime_plugin
def pyramid_sort(txt):
txt = list(filter(lambda s: s.strip(), txt))
txt.sort(key = lambda s: len(s))
return txt
class PyramidSortCommand(sublime_plugin.TextCommand):
def run(self, edit):
regions = [s for s in self.view.sel() if not s.empty()]
if regions:
for r in regions:
txt = self.view.substr(r)
lines = txt.splitlines()
lines = pyramid_sort(lines)
self.view.replace(edit, r, u"\n".join(lines))
|
#
# 123
# 12
# 1
import sublime, sublime_plugin
def pyramid_sort(txt):
txt = list(filter(lambda s: s.strip(), txt))
txt.sort(key = lambda s: len(s))
return txt
class PyramidSortCommand(sublime_plugin.TextCommand):
def run(self, edit):
regions = [s for s in self.view.sel() if not s.empty()]
if regions:
for r in regions:
lr = self.view.line(r)
txt = self.view.substr(lr)
lines = txt.splitlines()
lines = pyramid_sort(lines)
self.view.replace(edit, lr, u"\n".join(lines))
|
Revert "removed grab line from region, gives some unexpected behaviour. Instead just replace exactly what is marked"
|
Revert "removed grab line from region, gives some unexpected behaviour. Instead just replace exactly what is marked"
This reverts commit 9c944db3affc8181146fa27d8483a58d2731756b.
|
Python
|
apache-2.0
|
kenglxn/PyramidSortSublimeTextPlugin,kenglxn/PyramidSortSublimeTextPlugin
|
import sublime, sublime_plugin
def pyramid_sort(txt):
txt = list(filter(lambda s: s.strip(), txt))
txt.sort(key = lambda s: len(s))
return txt
class PyramidSortCommand(sublime_plugin.TextCommand):
def run(self, edit):
regions = [s for s in self.view.sel() if not s.empty()]
if regions:
for r in regions:
txt = self.view.substr(r)
lines = txt.splitlines()
lines = pyramid_sort(lines)
self.view.replace(edit, r, u"\n".join(lines))
Revert "removed grab line from region, gives some unexpected behaviour. Instead just replace exactly what is marked"
This reverts commit 9c944db3affc8181146fa27d8483a58d2731756b.
|
#
# 123
# 12
# 1
import sublime, sublime_plugin
def pyramid_sort(txt):
txt = list(filter(lambda s: s.strip(), txt))
txt.sort(key = lambda s: len(s))
return txt
class PyramidSortCommand(sublime_plugin.TextCommand):
def run(self, edit):
regions = [s for s in self.view.sel() if not s.empty()]
if regions:
for r in regions:
lr = self.view.line(r)
txt = self.view.substr(lr)
lines = txt.splitlines()
lines = pyramid_sort(lines)
self.view.replace(edit, lr, u"\n".join(lines))
|
<commit_before>import sublime, sublime_plugin
def pyramid_sort(txt):
txt = list(filter(lambda s: s.strip(), txt))
txt.sort(key = lambda s: len(s))
return txt
class PyramidSortCommand(sublime_plugin.TextCommand):
def run(self, edit):
regions = [s for s in self.view.sel() if not s.empty()]
if regions:
for r in regions:
txt = self.view.substr(r)
lines = txt.splitlines()
lines = pyramid_sort(lines)
self.view.replace(edit, r, u"\n".join(lines))
<commit_msg>Revert "removed grab line from region, gives some unexpected behaviour. Instead just replace exactly what is marked"
This reverts commit 9c944db3affc8181146fa27d8483a58d2731756b.<commit_after>
|
#
# 123
# 12
# 1
import sublime, sublime_plugin
def pyramid_sort(txt):
txt = list(filter(lambda s: s.strip(), txt))
txt.sort(key = lambda s: len(s))
return txt
class PyramidSortCommand(sublime_plugin.TextCommand):
def run(self, edit):
regions = [s for s in self.view.sel() if not s.empty()]
if regions:
for r in regions:
lr = self.view.line(r)
txt = self.view.substr(lr)
lines = txt.splitlines()
lines = pyramid_sort(lines)
self.view.replace(edit, lr, u"\n".join(lines))
|
import sublime, sublime_plugin
def pyramid_sort(txt):
txt = list(filter(lambda s: s.strip(), txt))
txt.sort(key = lambda s: len(s))
return txt
class PyramidSortCommand(sublime_plugin.TextCommand):
def run(self, edit):
regions = [s for s in self.view.sel() if not s.empty()]
if regions:
for r in regions:
txt = self.view.substr(r)
lines = txt.splitlines()
lines = pyramid_sort(lines)
self.view.replace(edit, r, u"\n".join(lines))
Revert "removed grab line from region, gives some unexpected behaviour. Instead just replace exactly what is marked"
This reverts commit 9c944db3affc8181146fa27d8483a58d2731756b.#
# 123
# 12
# 1
import sublime, sublime_plugin
def pyramid_sort(txt):
txt = list(filter(lambda s: s.strip(), txt))
txt.sort(key = lambda s: len(s))
return txt
class PyramidSortCommand(sublime_plugin.TextCommand):
def run(self, edit):
regions = [s for s in self.view.sel() if not s.empty()]
if regions:
for r in regions:
lr = self.view.line(r)
txt = self.view.substr(lr)
lines = txt.splitlines()
lines = pyramid_sort(lines)
self.view.replace(edit, lr, u"\n".join(lines))
|
<commit_before>import sublime, sublime_plugin
def pyramid_sort(txt):
txt = list(filter(lambda s: s.strip(), txt))
txt.sort(key = lambda s: len(s))
return txt
class PyramidSortCommand(sublime_plugin.TextCommand):
def run(self, edit):
regions = [s for s in self.view.sel() if not s.empty()]
if regions:
for r in regions:
txt = self.view.substr(r)
lines = txt.splitlines()
lines = pyramid_sort(lines)
self.view.replace(edit, r, u"\n".join(lines))
<commit_msg>Revert "removed grab line from region, gives some unexpected behaviour. Instead just replace exactly what is marked"
This reverts commit 9c944db3affc8181146fa27d8483a58d2731756b.<commit_after>#
# 123
# 12
# 1
import sublime, sublime_plugin
def pyramid_sort(txt):
txt = list(filter(lambda s: s.strip(), txt))
txt.sort(key = lambda s: len(s))
return txt
class PyramidSortCommand(sublime_plugin.TextCommand):
def run(self, edit):
regions = [s for s in self.view.sel() if not s.empty()]
if regions:
for r in regions:
lr = self.view.line(r)
txt = self.view.substr(lr)
lines = txt.splitlines()
lines = pyramid_sort(lines)
self.view.replace(edit, lr, u"\n".join(lines))
|
44f2ea1a47ee8502580853aaf6ca98597d83446a
|
__openerp__.py
|
__openerp__.py
|
# -*- coding: utf-8 -*-
{
"name": "Alternate Ledger",
"version": "1.2.2",
"author": "XCG Consulting",
"category": 'Accounting',
"description": '''Allow the creation of new accounting ledgers that store
separate transactions.''',
'website': 'http://www.openerp-experts.com',
'init_xml': [],
"depends": [
'base',
'account_streamline',
],
"data": [
'security/ir.model.access.csv',
'views/account_view.xml',
'views/ledger_type.xml',
'views/menu.xml',
'views/account_journal.xml',
],
'js': [
'static/src/js/account_move_line_alternate_quickadd.js',
],
'qweb': [
'static/src/xml/account_move_line_alternate_quickadd.xml',
],
'installable': True,
'active': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
# -*- coding: utf-8 -*-
{
"name": "Alternate Ledger",
"version": "1.2.3",
"author": "XCG Consulting",
"category": 'Accounting',
"description": '''Allow the creation of new accounting ledgers that store
separate transactions.''',
'website': 'http://www.openerp-experts.com',
'init_xml': [],
"depends": [
'base',
'account_streamline',
],
"data": [
'security/ir.model.access.csv',
'views/account_view.xml',
'views/ledger_type.xml',
'views/menu.xml',
'views/account_journal.xml',
],
'js': [
'static/src/js/account_move_line_alternate_quickadd.js',
],
'qweb': [
'static/src/xml/account_move_line_alternate_quickadd.xml',
],
'installable': True,
'active': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
Change version to 1.2.3 (dev)
|
Change version to 1.2.3 (dev)
|
Python
|
agpl-3.0
|
xcgd/alternate_ledger,xcgd/alternate_ledger
|
# -*- coding: utf-8 -*-
{
"name": "Alternate Ledger",
"version": "1.2.2",
"author": "XCG Consulting",
"category": 'Accounting',
"description": '''Allow the creation of new accounting ledgers that store
separate transactions.''',
'website': 'http://www.openerp-experts.com',
'init_xml': [],
"depends": [
'base',
'account_streamline',
],
"data": [
'security/ir.model.access.csv',
'views/account_view.xml',
'views/ledger_type.xml',
'views/menu.xml',
'views/account_journal.xml',
],
'js': [
'static/src/js/account_move_line_alternate_quickadd.js',
],
'qweb': [
'static/src/xml/account_move_line_alternate_quickadd.xml',
],
'installable': True,
'active': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
Change version to 1.2.3 (dev)
|
# -*- coding: utf-8 -*-
{
"name": "Alternate Ledger",
"version": "1.2.3",
"author": "XCG Consulting",
"category": 'Accounting',
"description": '''Allow the creation of new accounting ledgers that store
separate transactions.''',
'website': 'http://www.openerp-experts.com',
'init_xml': [],
"depends": [
'base',
'account_streamline',
],
"data": [
'security/ir.model.access.csv',
'views/account_view.xml',
'views/ledger_type.xml',
'views/menu.xml',
'views/account_journal.xml',
],
'js': [
'static/src/js/account_move_line_alternate_quickadd.js',
],
'qweb': [
'static/src/xml/account_move_line_alternate_quickadd.xml',
],
'installable': True,
'active': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
<commit_before># -*- coding: utf-8 -*-
{
"name": "Alternate Ledger",
"version": "1.2.2",
"author": "XCG Consulting",
"category": 'Accounting',
"description": '''Allow the creation of new accounting ledgers that store
separate transactions.''',
'website': 'http://www.openerp-experts.com',
'init_xml': [],
"depends": [
'base',
'account_streamline',
],
"data": [
'security/ir.model.access.csv',
'views/account_view.xml',
'views/ledger_type.xml',
'views/menu.xml',
'views/account_journal.xml',
],
'js': [
'static/src/js/account_move_line_alternate_quickadd.js',
],
'qweb': [
'static/src/xml/account_move_line_alternate_quickadd.xml',
],
'installable': True,
'active': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
<commit_msg>Change version to 1.2.3 (dev)<commit_after>
|
# -*- coding: utf-8 -*-
{
"name": "Alternate Ledger",
"version": "1.2.3",
"author": "XCG Consulting",
"category": 'Accounting',
"description": '''Allow the creation of new accounting ledgers that store
separate transactions.''',
'website': 'http://www.openerp-experts.com',
'init_xml': [],
"depends": [
'base',
'account_streamline',
],
"data": [
'security/ir.model.access.csv',
'views/account_view.xml',
'views/ledger_type.xml',
'views/menu.xml',
'views/account_journal.xml',
],
'js': [
'static/src/js/account_move_line_alternate_quickadd.js',
],
'qweb': [
'static/src/xml/account_move_line_alternate_quickadd.xml',
],
'installable': True,
'active': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
# -*- coding: utf-8 -*-
{
"name": "Alternate Ledger",
"version": "1.2.2",
"author": "XCG Consulting",
"category": 'Accounting',
"description": '''Allow the creation of new accounting ledgers that store
separate transactions.''',
'website': 'http://www.openerp-experts.com',
'init_xml': [],
"depends": [
'base',
'account_streamline',
],
"data": [
'security/ir.model.access.csv',
'views/account_view.xml',
'views/ledger_type.xml',
'views/menu.xml',
'views/account_journal.xml',
],
'js': [
'static/src/js/account_move_line_alternate_quickadd.js',
],
'qweb': [
'static/src/xml/account_move_line_alternate_quickadd.xml',
],
'installable': True,
'active': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
Change version to 1.2.3 (dev)# -*- coding: utf-8 -*-
{
"name": "Alternate Ledger",
"version": "1.2.3",
"author": "XCG Consulting",
"category": 'Accounting',
"description": '''Allow the creation of new accounting ledgers that store
separate transactions.''',
'website': 'http://www.openerp-experts.com',
'init_xml': [],
"depends": [
'base',
'account_streamline',
],
"data": [
'security/ir.model.access.csv',
'views/account_view.xml',
'views/ledger_type.xml',
'views/menu.xml',
'views/account_journal.xml',
],
'js': [
'static/src/js/account_move_line_alternate_quickadd.js',
],
'qweb': [
'static/src/xml/account_move_line_alternate_quickadd.xml',
],
'installable': True,
'active': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
<commit_before># -*- coding: utf-8 -*-
{
"name": "Alternate Ledger",
"version": "1.2.2",
"author": "XCG Consulting",
"category": 'Accounting',
"description": '''Allow the creation of new accounting ledgers that store
separate transactions.''',
'website': 'http://www.openerp-experts.com',
'init_xml': [],
"depends": [
'base',
'account_streamline',
],
"data": [
'security/ir.model.access.csv',
'views/account_view.xml',
'views/ledger_type.xml',
'views/menu.xml',
'views/account_journal.xml',
],
'js': [
'static/src/js/account_move_line_alternate_quickadd.js',
],
'qweb': [
'static/src/xml/account_move_line_alternate_quickadd.xml',
],
'installable': True,
'active': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
<commit_msg>Change version to 1.2.3 (dev)<commit_after># -*- coding: utf-8 -*-
{
"name": "Alternate Ledger",
"version": "1.2.3",
"author": "XCG Consulting",
"category": 'Accounting',
"description": '''Allow the creation of new accounting ledgers that store
separate transactions.''',
'website': 'http://www.openerp-experts.com',
'init_xml': [],
"depends": [
'base',
'account_streamline',
],
"data": [
'security/ir.model.access.csv',
'views/account_view.xml',
'views/ledger_type.xml',
'views/menu.xml',
'views/account_journal.xml',
],
'js': [
'static/src/js/account_move_line_alternate_quickadd.js',
],
'qweb': [
'static/src/xml/account_move_line_alternate_quickadd.xml',
],
'installable': True,
'active': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
6f4b4a9e54e527292d04d0a0f50ce6e02e08750d
|
pymc/__init__.py
|
pymc/__init__.py
|
__version__ = "3.0"
import matplotlib
matplotlib.use('Agg')
from .core import *
from .distributions import *
from .math import *
from .trace import *
from .sample import *
from .step_methods import *
from .tuning import *
from .debug import *
from .diagnostics import *
from .plots import *
from .tests import test
from . import glm
from .data import *
|
__version__ = "3.0"
from .core import *
from .distributions import *
from .math import *
from .trace import *
from .sample import *
from .step_methods import *
from .tuning import *
from .debug import *
from .diagnostics import *
from .plots import *
from .tests import test
from . import glm
from .data import *
|
Revert "Experimenting with import order"
|
Revert "Experimenting with import order"
This reverts commit c407a00, which selected the Agg backend for
Matplotlib in pymc/__init__.py, overriding the effects of 40a8070. These
changes were unnecessary to fix the non-interative display errors in the
Travis tests and prevent interactive plotting unless the user has
selected a backend prior to importing pymc.
|
Python
|
apache-2.0
|
MCGallaspy/pymc3,superbobry/pymc3,JesseLivezey/pymc3,wanderer2/pymc3,JesseLivezey/pymc3,kmather73/pymc3,MichielCottaar/pymc3,kmather73/pymc3,kyleam/pymc3,dhiapet/PyMC3,tyarkoni/pymc3,LoLab-VU/pymc,tyarkoni/pymc3,clk8908/pymc3,superbobry/pymc3,jameshensman/pymc3,arunlodhi/pymc3,Anjum48/pymc3,MCGallaspy/pymc3,wanderer2/pymc3,Anjum48/pymc3,CVML/pymc3,hothHowler/pymc3,evidation-health/pymc3,dhiapet/PyMC3,arunlodhi/pymc3,evidation-health/pymc3,CVML/pymc3,clk8908/pymc3,kyleam/pymc3,jameshensman/pymc3,MichielCottaar/pymc3,hothHowler/pymc3,LoLab-VU/pymc
|
__version__ = "3.0"
import matplotlib
matplotlib.use('Agg')
from .core import *
from .distributions import *
from .math import *
from .trace import *
from .sample import *
from .step_methods import *
from .tuning import *
from .debug import *
from .diagnostics import *
from .plots import *
from .tests import test
from . import glm
from .data import *
Revert "Experimenting with import order"
This reverts commit c407a00, which selected the Agg backend for
Matplotlib in pymc/__init__.py, overriding the effects of 40a8070. These
changes were unnecessary to fix the non-interative display errors in the
Travis tests and prevent interactive plotting unless the user has
selected a backend prior to importing pymc.
|
__version__ = "3.0"
from .core import *
from .distributions import *
from .math import *
from .trace import *
from .sample import *
from .step_methods import *
from .tuning import *
from .debug import *
from .diagnostics import *
from .plots import *
from .tests import test
from . import glm
from .data import *
|
<commit_before>__version__ = "3.0"
import matplotlib
matplotlib.use('Agg')
from .core import *
from .distributions import *
from .math import *
from .trace import *
from .sample import *
from .step_methods import *
from .tuning import *
from .debug import *
from .diagnostics import *
from .plots import *
from .tests import test
from . import glm
from .data import *
<commit_msg>Revert "Experimenting with import order"
This reverts commit c407a00, which selected the Agg backend for
Matplotlib in pymc/__init__.py, overriding the effects of 40a8070. These
changes were unnecessary to fix the non-interative display errors in the
Travis tests and prevent interactive plotting unless the user has
selected a backend prior to importing pymc.<commit_after>
|
__version__ = "3.0"
from .core import *
from .distributions import *
from .math import *
from .trace import *
from .sample import *
from .step_methods import *
from .tuning import *
from .debug import *
from .diagnostics import *
from .plots import *
from .tests import test
from . import glm
from .data import *
|
__version__ = "3.0"
import matplotlib
matplotlib.use('Agg')
from .core import *
from .distributions import *
from .math import *
from .trace import *
from .sample import *
from .step_methods import *
from .tuning import *
from .debug import *
from .diagnostics import *
from .plots import *
from .tests import test
from . import glm
from .data import *
Revert "Experimenting with import order"
This reverts commit c407a00, which selected the Agg backend for
Matplotlib in pymc/__init__.py, overriding the effects of 40a8070. These
changes were unnecessary to fix the non-interative display errors in the
Travis tests and prevent interactive plotting unless the user has
selected a backend prior to importing pymc.__version__ = "3.0"
from .core import *
from .distributions import *
from .math import *
from .trace import *
from .sample import *
from .step_methods import *
from .tuning import *
from .debug import *
from .diagnostics import *
from .plots import *
from .tests import test
from . import glm
from .data import *
|
<commit_before>__version__ = "3.0"
import matplotlib
matplotlib.use('Agg')
from .core import *
from .distributions import *
from .math import *
from .trace import *
from .sample import *
from .step_methods import *
from .tuning import *
from .debug import *
from .diagnostics import *
from .plots import *
from .tests import test
from . import glm
from .data import *
<commit_msg>Revert "Experimenting with import order"
This reverts commit c407a00, which selected the Agg backend for
Matplotlib in pymc/__init__.py, overriding the effects of 40a8070. These
changes were unnecessary to fix the non-interative display errors in the
Travis tests and prevent interactive plotting unless the user has
selected a backend prior to importing pymc.<commit_after>__version__ = "3.0"
from .core import *
from .distributions import *
from .math import *
from .trace import *
from .sample import *
from .step_methods import *
from .tuning import *
from .debug import *
from .diagnostics import *
from .plots import *
from .tests import test
from . import glm
from .data import *
|
69b0e1c60eafff596ebb494a7e79a22c6bea374b
|
polling_stations/apps/data_collection/management/commands/import_hart.py
|
polling_stations/apps/data_collection/management/commands/import_hart.py
|
from data_collection.management.commands import BaseXpressDemocracyClubCsvImporter
class Command(BaseXpressDemocracyClubCsvImporter):
council_id = 'E07000089'
addresses_name = 'parl.2017-06-08/Version 1/Hart DC General Election polling place 120517.TSV'
stations_name = 'parl.2017-06-08/Version 1/Hart DC General Election polling place 120517.TSV'
elections = ['parl.2017-06-08']
csv_delimiter = '\t'
|
from data_collection.management.commands import BaseXpressDemocracyClubCsvImporter
class Command(BaseXpressDemocracyClubCsvImporter):
council_id = 'E07000089'
addresses_name = 'parl.2017-06-08/Version 1/Hart DC General Election polling place 120517.TSV'
stations_name = 'parl.2017-06-08/Version 1/Hart DC General Election polling place 120517.TSV'
elections = ['parl.2017-06-08']
csv_delimiter = '\t'
def station_record_to_dict(self, record):
if record.polling_place_id == '1914':
record = record._replace(polling_place_easting = '479224')
record = record._replace(polling_place_northing = '154016')
return super().station_record_to_dict(record)
|
Fix dodgy point in Hart
|
Fix dodgy point in Hart
|
Python
|
bsd-3-clause
|
DemocracyClub/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,chris48s/UK-Polling-Stations,chris48s/UK-Polling-Stations,chris48s/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations
|
from data_collection.management.commands import BaseXpressDemocracyClubCsvImporter
class Command(BaseXpressDemocracyClubCsvImporter):
council_id = 'E07000089'
addresses_name = 'parl.2017-06-08/Version 1/Hart DC General Election polling place 120517.TSV'
stations_name = 'parl.2017-06-08/Version 1/Hart DC General Election polling place 120517.TSV'
elections = ['parl.2017-06-08']
csv_delimiter = '\t'
Fix dodgy point in Hart
|
from data_collection.management.commands import BaseXpressDemocracyClubCsvImporter
class Command(BaseXpressDemocracyClubCsvImporter):
council_id = 'E07000089'
addresses_name = 'parl.2017-06-08/Version 1/Hart DC General Election polling place 120517.TSV'
stations_name = 'parl.2017-06-08/Version 1/Hart DC General Election polling place 120517.TSV'
elections = ['parl.2017-06-08']
csv_delimiter = '\t'
def station_record_to_dict(self, record):
if record.polling_place_id == '1914':
record = record._replace(polling_place_easting = '479224')
record = record._replace(polling_place_northing = '154016')
return super().station_record_to_dict(record)
|
<commit_before>from data_collection.management.commands import BaseXpressDemocracyClubCsvImporter
class Command(BaseXpressDemocracyClubCsvImporter):
council_id = 'E07000089'
addresses_name = 'parl.2017-06-08/Version 1/Hart DC General Election polling place 120517.TSV'
stations_name = 'parl.2017-06-08/Version 1/Hart DC General Election polling place 120517.TSV'
elections = ['parl.2017-06-08']
csv_delimiter = '\t'
<commit_msg>Fix dodgy point in Hart<commit_after>
|
from data_collection.management.commands import BaseXpressDemocracyClubCsvImporter
class Command(BaseXpressDemocracyClubCsvImporter):
council_id = 'E07000089'
addresses_name = 'parl.2017-06-08/Version 1/Hart DC General Election polling place 120517.TSV'
stations_name = 'parl.2017-06-08/Version 1/Hart DC General Election polling place 120517.TSV'
elections = ['parl.2017-06-08']
csv_delimiter = '\t'
def station_record_to_dict(self, record):
if record.polling_place_id == '1914':
record = record._replace(polling_place_easting = '479224')
record = record._replace(polling_place_northing = '154016')
return super().station_record_to_dict(record)
|
from data_collection.management.commands import BaseXpressDemocracyClubCsvImporter
class Command(BaseXpressDemocracyClubCsvImporter):
council_id = 'E07000089'
addresses_name = 'parl.2017-06-08/Version 1/Hart DC General Election polling place 120517.TSV'
stations_name = 'parl.2017-06-08/Version 1/Hart DC General Election polling place 120517.TSV'
elections = ['parl.2017-06-08']
csv_delimiter = '\t'
Fix dodgy point in Hartfrom data_collection.management.commands import BaseXpressDemocracyClubCsvImporter
class Command(BaseXpressDemocracyClubCsvImporter):
council_id = 'E07000089'
addresses_name = 'parl.2017-06-08/Version 1/Hart DC General Election polling place 120517.TSV'
stations_name = 'parl.2017-06-08/Version 1/Hart DC General Election polling place 120517.TSV'
elections = ['parl.2017-06-08']
csv_delimiter = '\t'
def station_record_to_dict(self, record):
if record.polling_place_id == '1914':
record = record._replace(polling_place_easting = '479224')
record = record._replace(polling_place_northing = '154016')
return super().station_record_to_dict(record)
|
<commit_before>from data_collection.management.commands import BaseXpressDemocracyClubCsvImporter
class Command(BaseXpressDemocracyClubCsvImporter):
council_id = 'E07000089'
addresses_name = 'parl.2017-06-08/Version 1/Hart DC General Election polling place 120517.TSV'
stations_name = 'parl.2017-06-08/Version 1/Hart DC General Election polling place 120517.TSV'
elections = ['parl.2017-06-08']
csv_delimiter = '\t'
<commit_msg>Fix dodgy point in Hart<commit_after>from data_collection.management.commands import BaseXpressDemocracyClubCsvImporter
class Command(BaseXpressDemocracyClubCsvImporter):
council_id = 'E07000089'
addresses_name = 'parl.2017-06-08/Version 1/Hart DC General Election polling place 120517.TSV'
stations_name = 'parl.2017-06-08/Version 1/Hart DC General Election polling place 120517.TSV'
elections = ['parl.2017-06-08']
csv_delimiter = '\t'
def station_record_to_dict(self, record):
if record.polling_place_id == '1914':
record = record._replace(polling_place_easting = '479224')
record = record._replace(polling_place_northing = '154016')
return super().station_record_to_dict(record)
|
c24ecf7387f962415fcb03cd0dca9a136d1eda4e
|
cesium/setup.py
|
cesium/setup.py
|
def configuration(parent_package='', top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('cesium', parent_package, top_path)
config.add_subpackage('science_features')
config.add_data_files('cesium.yaml.example')
config.add_data_dir('data')
return config
if __name__ == "__main__":
from numpy.distutils.core import setup
config = configuration(top_path='').todict()
setup(**config)
|
def configuration(parent_package='', top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('cesium', parent_package, top_path)
config.add_subpackage('science_features')
config.add_data_files('cesium.yaml.example')
config.add_data_dir('tests')
return config
if __name__ == "__main__":
from numpy.distutils.core import setup
config = configuration(top_path='').todict()
setup(**config)
|
Add test data to cesium package
|
Add test data to cesium package
|
Python
|
bsd-3-clause
|
acrellin/mltsp,acrellin/mltsp,mltsp/mltsp,bnaul/mltsp,bnaul/mltsp,bnaul/mltsp,mltsp/mltsp,bnaul/mltsp,acrellin/mltsp,bnaul/mltsp,acrellin/mltsp,mltsp/mltsp,mltsp/mltsp,acrellin/mltsp,mltsp/mltsp,mltsp/mltsp,bnaul/mltsp,acrellin/mltsp
|
def configuration(parent_package='', top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('cesium', parent_package, top_path)
config.add_subpackage('science_features')
config.add_data_files('cesium.yaml.example')
config.add_data_dir('data')
return config
if __name__ == "__main__":
from numpy.distutils.core import setup
config = configuration(top_path='').todict()
setup(**config)
Add test data to cesium package
|
def configuration(parent_package='', top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('cesium', parent_package, top_path)
config.add_subpackage('science_features')
config.add_data_files('cesium.yaml.example')
config.add_data_dir('tests')
return config
if __name__ == "__main__":
from numpy.distutils.core import setup
config = configuration(top_path='').todict()
setup(**config)
|
<commit_before>def configuration(parent_package='', top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('cesium', parent_package, top_path)
config.add_subpackage('science_features')
config.add_data_files('cesium.yaml.example')
config.add_data_dir('data')
return config
if __name__ == "__main__":
from numpy.distutils.core import setup
config = configuration(top_path='').todict()
setup(**config)
<commit_msg>Add test data to cesium package<commit_after>
|
def configuration(parent_package='', top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('cesium', parent_package, top_path)
config.add_subpackage('science_features')
config.add_data_files('cesium.yaml.example')
config.add_data_dir('tests')
return config
if __name__ == "__main__":
from numpy.distutils.core import setup
config = configuration(top_path='').todict()
setup(**config)
|
def configuration(parent_package='', top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('cesium', parent_package, top_path)
config.add_subpackage('science_features')
config.add_data_files('cesium.yaml.example')
config.add_data_dir('data')
return config
if __name__ == "__main__":
from numpy.distutils.core import setup
config = configuration(top_path='').todict()
setup(**config)
Add test data to cesium packagedef configuration(parent_package='', top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('cesium', parent_package, top_path)
config.add_subpackage('science_features')
config.add_data_files('cesium.yaml.example')
config.add_data_dir('tests')
return config
if __name__ == "__main__":
from numpy.distutils.core import setup
config = configuration(top_path='').todict()
setup(**config)
|
<commit_before>def configuration(parent_package='', top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('cesium', parent_package, top_path)
config.add_subpackage('science_features')
config.add_data_files('cesium.yaml.example')
config.add_data_dir('data')
return config
if __name__ == "__main__":
from numpy.distutils.core import setup
config = configuration(top_path='').todict()
setup(**config)
<commit_msg>Add test data to cesium package<commit_after>def configuration(parent_package='', top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('cesium', parent_package, top_path)
config.add_subpackage('science_features')
config.add_data_files('cesium.yaml.example')
config.add_data_dir('tests')
return config
if __name__ == "__main__":
from numpy.distutils.core import setup
config = configuration(top_path='').todict()
setup(**config)
|
1ac2e2b03048cf89c8df36c838130212f4ac63d3
|
server/src/weblab/__init__.py
|
server/src/weblab/__init__.py
|
import os
import json
from .util import data_filename
version_filename = data_filename(os.path.join("weblab", "version.json"))
base_version = "5.0"
__version__ = base_version
if version_filename:
try:
git_version = json.loads(open(version_filename).read())
except:
git_version = None
if git_version and 'version' in git_version:
__version__ = "{0} - {1}".format(base_version, git_version.get('version'))
__ALL__ = []
|
import os
import json
from .util import data_filename
version_filename = data_filename(os.path.join("weblab", "version.json"))
base_version = "5.0"
__version__ = base_version
if version_filename:
try:
git_version = json.loads(open(version_filename).read())
except:
git_version = None
if git_version and 'version' in git_version:
__version__ = "{0} - {1} ({2})".format(base_version, git_version.get('version'), git_version.get('date'))
__ALL__ = []
|
Add date to the version
|
Add date to the version
|
Python
|
bsd-2-clause
|
morelab/weblabdeusto,porduna/weblabdeusto,morelab/weblabdeusto,morelab/weblabdeusto,morelab/weblabdeusto,weblabdeusto/weblabdeusto,weblabdeusto/weblabdeusto,morelab/weblabdeusto,weblabdeusto/weblabdeusto,porduna/weblabdeusto,porduna/weblabdeusto,porduna/weblabdeusto,morelab/weblabdeusto,weblabdeusto/weblabdeusto,porduna/weblabdeusto,porduna/weblabdeusto,weblabdeusto/weblabdeusto,porduna/weblabdeusto,morelab/weblabdeusto,morelab/weblabdeusto,weblabdeusto/weblabdeusto,porduna/weblabdeusto,weblabdeusto/weblabdeusto,morelab/weblabdeusto,weblabdeusto/weblabdeusto,weblabdeusto/weblabdeusto,porduna/weblabdeusto,porduna/weblabdeusto,morelab/weblabdeusto,weblabdeusto/weblabdeusto
|
import os
import json
from .util import data_filename
version_filename = data_filename(os.path.join("weblab", "version.json"))
base_version = "5.0"
__version__ = base_version
if version_filename:
try:
git_version = json.loads(open(version_filename).read())
except:
git_version = None
if git_version and 'version' in git_version:
__version__ = "{0} - {1}".format(base_version, git_version.get('version'))
__ALL__ = []
Add date to the version
|
import os
import json
from .util import data_filename
version_filename = data_filename(os.path.join("weblab", "version.json"))
base_version = "5.0"
__version__ = base_version
if version_filename:
try:
git_version = json.loads(open(version_filename).read())
except:
git_version = None
if git_version and 'version' in git_version:
__version__ = "{0} - {1} ({2})".format(base_version, git_version.get('version'), git_version.get('date'))
__ALL__ = []
|
<commit_before>import os
import json
from .util import data_filename
version_filename = data_filename(os.path.join("weblab", "version.json"))
base_version = "5.0"
__version__ = base_version
if version_filename:
try:
git_version = json.loads(open(version_filename).read())
except:
git_version = None
if git_version and 'version' in git_version:
__version__ = "{0} - {1}".format(base_version, git_version.get('version'))
__ALL__ = []
<commit_msg>Add date to the version<commit_after>
|
import os
import json
from .util import data_filename
version_filename = data_filename(os.path.join("weblab", "version.json"))
base_version = "5.0"
__version__ = base_version
if version_filename:
try:
git_version = json.loads(open(version_filename).read())
except:
git_version = None
if git_version and 'version' in git_version:
__version__ = "{0} - {1} ({2})".format(base_version, git_version.get('version'), git_version.get('date'))
__ALL__ = []
|
import os
import json
from .util import data_filename
version_filename = data_filename(os.path.join("weblab", "version.json"))
base_version = "5.0"
__version__ = base_version
if version_filename:
try:
git_version = json.loads(open(version_filename).read())
except:
git_version = None
if git_version and 'version' in git_version:
__version__ = "{0} - {1}".format(base_version, git_version.get('version'))
__ALL__ = []
Add date to the versionimport os
import json
from .util import data_filename
version_filename = data_filename(os.path.join("weblab", "version.json"))
base_version = "5.0"
__version__ = base_version
if version_filename:
try:
git_version = json.loads(open(version_filename).read())
except:
git_version = None
if git_version and 'version' in git_version:
__version__ = "{0} - {1} ({2})".format(base_version, git_version.get('version'), git_version.get('date'))
__ALL__ = []
|
<commit_before>import os
import json
from .util import data_filename
version_filename = data_filename(os.path.join("weblab", "version.json"))
base_version = "5.0"
__version__ = base_version
if version_filename:
try:
git_version = json.loads(open(version_filename).read())
except:
git_version = None
if git_version and 'version' in git_version:
__version__ = "{0} - {1}".format(base_version, git_version.get('version'))
__ALL__ = []
<commit_msg>Add date to the version<commit_after>import os
import json
from .util import data_filename
version_filename = data_filename(os.path.join("weblab", "version.json"))
base_version = "5.0"
__version__ = base_version
if version_filename:
try:
git_version = json.loads(open(version_filename).read())
except:
git_version = None
if git_version and 'version' in git_version:
__version__ = "{0} - {1} ({2})".format(base_version, git_version.get('version'), git_version.get('date'))
__ALL__ = []
|
50ab2ed3d8e50e5106dc486e4d20c889d6b18e82
|
spkg/base/package_database.py
|
spkg/base/package_database.py
|
"""
Package database utilities for creating and modifying the database.
"""
from os.path import split, splitext
from json import load
f = open("packages.json")
data = load(f)
g = []
for p in data:
pkg = {
"name": p["name"],
"dependencies": p["dependencies"],
"version": p["version"],
"download": p["download"],
}
g.append(pkg)
from json import dump
from StringIO import StringIO
s = StringIO()
dump(g, s, sort_keys=True, indent=4)
s.seek(0)
s = s.read()
# Remove the trailing space
s = s.replace(" \n", "\n")
f = open("packages.json", "w")
f.write(s)
|
"""
Package database utilities for creating and modifying the database.
"""
from os.path import split, splitext
from json import load
f = open("packages.json")
data = load(f)
g = []
for p in data:
pkg = {
"name": p["name"],
"dependencies": p["dependencies"],
"version": p["version"],
"download": p["download"],
}
g.append(pkg)
from json import dump
from StringIO import StringIO
s = StringIO()
dump(g, s, sort_keys=True, indent=4)
s.seek(0)
s = s.read()
# Remove the trailing space
s = s.replace(" \n", "\n")
f = open("packages.json", "w")
f.write(s)
f.write("\n")
|
Add a new line at the end of the file
|
Add a new line at the end of the file
|
Python
|
bsd-3-clause
|
qsnake/qsnake,qsnake/qsnake
|
"""
Package database utilities for creating and modifying the database.
"""
from os.path import split, splitext
from json import load
f = open("packages.json")
data = load(f)
g = []
for p in data:
pkg = {
"name": p["name"],
"dependencies": p["dependencies"],
"version": p["version"],
"download": p["download"],
}
g.append(pkg)
from json import dump
from StringIO import StringIO
s = StringIO()
dump(g, s, sort_keys=True, indent=4)
s.seek(0)
s = s.read()
# Remove the trailing space
s = s.replace(" \n", "\n")
f = open("packages.json", "w")
f.write(s)
Add a new line at the end of the file
|
"""
Package database utilities for creating and modifying the database.
"""
from os.path import split, splitext
from json import load
f = open("packages.json")
data = load(f)
g = []
for p in data:
pkg = {
"name": p["name"],
"dependencies": p["dependencies"],
"version": p["version"],
"download": p["download"],
}
g.append(pkg)
from json import dump
from StringIO import StringIO
s = StringIO()
dump(g, s, sort_keys=True, indent=4)
s.seek(0)
s = s.read()
# Remove the trailing space
s = s.replace(" \n", "\n")
f = open("packages.json", "w")
f.write(s)
f.write("\n")
|
<commit_before>"""
Package database utilities for creating and modifying the database.
"""
from os.path import split, splitext
from json import load
f = open("packages.json")
data = load(f)
g = []
for p in data:
pkg = {
"name": p["name"],
"dependencies": p["dependencies"],
"version": p["version"],
"download": p["download"],
}
g.append(pkg)
from json import dump
from StringIO import StringIO
s = StringIO()
dump(g, s, sort_keys=True, indent=4)
s.seek(0)
s = s.read()
# Remove the trailing space
s = s.replace(" \n", "\n")
f = open("packages.json", "w")
f.write(s)
<commit_msg>Add a new line at the end of the file<commit_after>
|
"""
Package database utilities for creating and modifying the database.
"""
from os.path import split, splitext
from json import load
f = open("packages.json")
data = load(f)
g = []
for p in data:
pkg = {
"name": p["name"],
"dependencies": p["dependencies"],
"version": p["version"],
"download": p["download"],
}
g.append(pkg)
from json import dump
from StringIO import StringIO
s = StringIO()
dump(g, s, sort_keys=True, indent=4)
s.seek(0)
s = s.read()
# Remove the trailing space
s = s.replace(" \n", "\n")
f = open("packages.json", "w")
f.write(s)
f.write("\n")
|
"""
Package database utilities for creating and modifying the database.
"""
from os.path import split, splitext
from json import load
f = open("packages.json")
data = load(f)
g = []
for p in data:
pkg = {
"name": p["name"],
"dependencies": p["dependencies"],
"version": p["version"],
"download": p["download"],
}
g.append(pkg)
from json import dump
from StringIO import StringIO
s = StringIO()
dump(g, s, sort_keys=True, indent=4)
s.seek(0)
s = s.read()
# Remove the trailing space
s = s.replace(" \n", "\n")
f = open("packages.json", "w")
f.write(s)
Add a new line at the end of the file"""
Package database utilities for creating and modifying the database.
"""
from os.path import split, splitext
from json import load
f = open("packages.json")
data = load(f)
g = []
for p in data:
pkg = {
"name": p["name"],
"dependencies": p["dependencies"],
"version": p["version"],
"download": p["download"],
}
g.append(pkg)
from json import dump
from StringIO import StringIO
s = StringIO()
dump(g, s, sort_keys=True, indent=4)
s.seek(0)
s = s.read()
# Remove the trailing space
s = s.replace(" \n", "\n")
f = open("packages.json", "w")
f.write(s)
f.write("\n")
|
<commit_before>"""
Package database utilities for creating and modifying the database.
"""
from os.path import split, splitext
from json import load
f = open("packages.json")
data = load(f)
g = []
for p in data:
pkg = {
"name": p["name"],
"dependencies": p["dependencies"],
"version": p["version"],
"download": p["download"],
}
g.append(pkg)
from json import dump
from StringIO import StringIO
s = StringIO()
dump(g, s, sort_keys=True, indent=4)
s.seek(0)
s = s.read()
# Remove the trailing space
s = s.replace(" \n", "\n")
f = open("packages.json", "w")
f.write(s)
<commit_msg>Add a new line at the end of the file<commit_after>"""
Package database utilities for creating and modifying the database.
"""
from os.path import split, splitext
from json import load
f = open("packages.json")
data = load(f)
g = []
for p in data:
pkg = {
"name": p["name"],
"dependencies": p["dependencies"],
"version": p["version"],
"download": p["download"],
}
g.append(pkg)
from json import dump
from StringIO import StringIO
s = StringIO()
dump(g, s, sort_keys=True, indent=4)
s.seek(0)
s = s.read()
# Remove the trailing space
s = s.replace(" \n", "\n")
f = open("packages.json", "w")
f.write(s)
f.write("\n")
|
766ea05836544b808cd2c346873d9e4f60c858a1
|
ping/tests/test_ping.py
|
ping/tests/test_ping.py
|
import pytest
import mock
from datadog_checks.checks import AgentCheck
from datadog_checks.ping import PingCheck
from datadog_checks.errors import CheckException
def mock_exec_ping():
return """FAKEPING 127.0.0.1 (127.0.0.1): 56 data bytes
64 bytes from 127.0.0.1: icmp_seq=0 ttl=64 time=0.093 ms
--- 127.0.0.1 ping statistics ---
1 packets transmitted, 1 packets received, 0.0% packet loss
round-trip min/avg/max/stddev = 0.093/0.093/0.093/0.000 ms"""
def test_check(aggregator, instance):
c = PingCheck('ping', {}, {})
# empty instance
instance = {}
with pytest.raises(CheckException):
c.check(instance)
# only name
with pytest.raises(CheckException):
c.check({'name': 'Datadog'})
test_check
# good check
instance = {
'host': '127.0.0.1',
'name': "Localhost"
}
with mock.patch.object(c, "_exec_ping", return_value=mock_exec_ping()):
c.check(instance)
aggregator.assert_service_check('network.ping.can_connect', AgentCheck.OK)
|
import pytest
import mock
from datadog_checks.checks import AgentCheck
from datadog_checks.ping import PingCheck
from datadog_checks.errors import CheckException
def mock_exec_ping():
return """FAKEPING 127.0.0.1 (127.0.0.1): 56 data bytes
64 bytes from 127.0.0.1: icmp_seq=0 ttl=64 time=0.093 ms
--- 127.0.0.1 ping statistics ---
1 packets transmitted, 1 packets received, 0.0% packet loss
round-trip min/avg/max/stddev = 0.093/0.093/0.093/0.000 ms"""
def test_check(aggregator, instance):
c = PingCheck('ping', {}, {})
# empty instance
instance = {}
with pytest.raises(CheckException):
c.check(instance)
# only name
with pytest.raises(CheckException):
c.check({'name': 'Datadog'})
test_check
# good check
instance = {
'host': '127.0.0.1',
'name': "Localhost"
}
with mock.patch.object(c, "_exec_ping", return_value=mock_exec_ping()):
c.check(instance)
aggregator.assert_service_check('network.ping.can_connect', AgentCheck.OK)
aggregator.assert_metric('network.ping.can_connect', value=1)
|
Update test to assert metric
|
Update test to assert metric
|
Python
|
bsd-3-clause
|
DataDog/integrations-extras,DataDog/integrations-extras,DataDog/integrations-extras,DataDog/integrations-extras,DataDog/integrations-extras
|
import pytest
import mock
from datadog_checks.checks import AgentCheck
from datadog_checks.ping import PingCheck
from datadog_checks.errors import CheckException
def mock_exec_ping():
return """FAKEPING 127.0.0.1 (127.0.0.1): 56 data bytes
64 bytes from 127.0.0.1: icmp_seq=0 ttl=64 time=0.093 ms
--- 127.0.0.1 ping statistics ---
1 packets transmitted, 1 packets received, 0.0% packet loss
round-trip min/avg/max/stddev = 0.093/0.093/0.093/0.000 ms"""
def test_check(aggregator, instance):
c = PingCheck('ping', {}, {})
# empty instance
instance = {}
with pytest.raises(CheckException):
c.check(instance)
# only name
with pytest.raises(CheckException):
c.check({'name': 'Datadog'})
test_check
# good check
instance = {
'host': '127.0.0.1',
'name': "Localhost"
}
with mock.patch.object(c, "_exec_ping", return_value=mock_exec_ping()):
c.check(instance)
aggregator.assert_service_check('network.ping.can_connect', AgentCheck.OK)
Update test to assert metric
|
import pytest
import mock
from datadog_checks.checks import AgentCheck
from datadog_checks.ping import PingCheck
from datadog_checks.errors import CheckException
def mock_exec_ping():
return """FAKEPING 127.0.0.1 (127.0.0.1): 56 data bytes
64 bytes from 127.0.0.1: icmp_seq=0 ttl=64 time=0.093 ms
--- 127.0.0.1 ping statistics ---
1 packets transmitted, 1 packets received, 0.0% packet loss
round-trip min/avg/max/stddev = 0.093/0.093/0.093/0.000 ms"""
def test_check(aggregator, instance):
c = PingCheck('ping', {}, {})
# empty instance
instance = {}
with pytest.raises(CheckException):
c.check(instance)
# only name
with pytest.raises(CheckException):
c.check({'name': 'Datadog'})
test_check
# good check
instance = {
'host': '127.0.0.1',
'name': "Localhost"
}
with mock.patch.object(c, "_exec_ping", return_value=mock_exec_ping()):
c.check(instance)
aggregator.assert_service_check('network.ping.can_connect', AgentCheck.OK)
aggregator.assert_metric('network.ping.can_connect', value=1)
|
<commit_before>import pytest
import mock
from datadog_checks.checks import AgentCheck
from datadog_checks.ping import PingCheck
from datadog_checks.errors import CheckException
def mock_exec_ping():
return """FAKEPING 127.0.0.1 (127.0.0.1): 56 data bytes
64 bytes from 127.0.0.1: icmp_seq=0 ttl=64 time=0.093 ms
--- 127.0.0.1 ping statistics ---
1 packets transmitted, 1 packets received, 0.0% packet loss
round-trip min/avg/max/stddev = 0.093/0.093/0.093/0.000 ms"""
def test_check(aggregator, instance):
c = PingCheck('ping', {}, {})
# empty instance
instance = {}
with pytest.raises(CheckException):
c.check(instance)
# only name
with pytest.raises(CheckException):
c.check({'name': 'Datadog'})
test_check
# good check
instance = {
'host': '127.0.0.1',
'name': "Localhost"
}
with mock.patch.object(c, "_exec_ping", return_value=mock_exec_ping()):
c.check(instance)
aggregator.assert_service_check('network.ping.can_connect', AgentCheck.OK)
<commit_msg>Update test to assert metric<commit_after>
|
import pytest
import mock
from datadog_checks.checks import AgentCheck
from datadog_checks.ping import PingCheck
from datadog_checks.errors import CheckException
def mock_exec_ping():
return """FAKEPING 127.0.0.1 (127.0.0.1): 56 data bytes
64 bytes from 127.0.0.1: icmp_seq=0 ttl=64 time=0.093 ms
--- 127.0.0.1 ping statistics ---
1 packets transmitted, 1 packets received, 0.0% packet loss
round-trip min/avg/max/stddev = 0.093/0.093/0.093/0.000 ms"""
def test_check(aggregator, instance):
c = PingCheck('ping', {}, {})
# empty instance
instance = {}
with pytest.raises(CheckException):
c.check(instance)
# only name
with pytest.raises(CheckException):
c.check({'name': 'Datadog'})
test_check
# good check
instance = {
'host': '127.0.0.1',
'name': "Localhost"
}
with mock.patch.object(c, "_exec_ping", return_value=mock_exec_ping()):
c.check(instance)
aggregator.assert_service_check('network.ping.can_connect', AgentCheck.OK)
aggregator.assert_metric('network.ping.can_connect', value=1)
|
import pytest
import mock
from datadog_checks.checks import AgentCheck
from datadog_checks.ping import PingCheck
from datadog_checks.errors import CheckException
def mock_exec_ping():
return """FAKEPING 127.0.0.1 (127.0.0.1): 56 data bytes
64 bytes from 127.0.0.1: icmp_seq=0 ttl=64 time=0.093 ms
--- 127.0.0.1 ping statistics ---
1 packets transmitted, 1 packets received, 0.0% packet loss
round-trip min/avg/max/stddev = 0.093/0.093/0.093/0.000 ms"""
def test_check(aggregator, instance):
c = PingCheck('ping', {}, {})
# empty instance
instance = {}
with pytest.raises(CheckException):
c.check(instance)
# only name
with pytest.raises(CheckException):
c.check({'name': 'Datadog'})
test_check
# good check
instance = {
'host': '127.0.0.1',
'name': "Localhost"
}
with mock.patch.object(c, "_exec_ping", return_value=mock_exec_ping()):
c.check(instance)
aggregator.assert_service_check('network.ping.can_connect', AgentCheck.OK)
Update test to assert metricimport pytest
import mock
from datadog_checks.checks import AgentCheck
from datadog_checks.ping import PingCheck
from datadog_checks.errors import CheckException
def mock_exec_ping():
return """FAKEPING 127.0.0.1 (127.0.0.1): 56 data bytes
64 bytes from 127.0.0.1: icmp_seq=0 ttl=64 time=0.093 ms
--- 127.0.0.1 ping statistics ---
1 packets transmitted, 1 packets received, 0.0% packet loss
round-trip min/avg/max/stddev = 0.093/0.093/0.093/0.000 ms"""
def test_check(aggregator, instance):
c = PingCheck('ping', {}, {})
# empty instance
instance = {}
with pytest.raises(CheckException):
c.check(instance)
# only name
with pytest.raises(CheckException):
c.check({'name': 'Datadog'})
test_check
# good check
instance = {
'host': '127.0.0.1',
'name': "Localhost"
}
with mock.patch.object(c, "_exec_ping", return_value=mock_exec_ping()):
c.check(instance)
aggregator.assert_service_check('network.ping.can_connect', AgentCheck.OK)
aggregator.assert_metric('network.ping.can_connect', value=1)
|
<commit_before>import pytest
import mock
from datadog_checks.checks import AgentCheck
from datadog_checks.ping import PingCheck
from datadog_checks.errors import CheckException
def mock_exec_ping():
return """FAKEPING 127.0.0.1 (127.0.0.1): 56 data bytes
64 bytes from 127.0.0.1: icmp_seq=0 ttl=64 time=0.093 ms
--- 127.0.0.1 ping statistics ---
1 packets transmitted, 1 packets received, 0.0% packet loss
round-trip min/avg/max/stddev = 0.093/0.093/0.093/0.000 ms"""
def test_check(aggregator, instance):
c = PingCheck('ping', {}, {})
# empty instance
instance = {}
with pytest.raises(CheckException):
c.check(instance)
# only name
with pytest.raises(CheckException):
c.check({'name': 'Datadog'})
test_check
# good check
instance = {
'host': '127.0.0.1',
'name': "Localhost"
}
with mock.patch.object(c, "_exec_ping", return_value=mock_exec_ping()):
c.check(instance)
aggregator.assert_service_check('network.ping.can_connect', AgentCheck.OK)
<commit_msg>Update test to assert metric<commit_after>import pytest
import mock
from datadog_checks.checks import AgentCheck
from datadog_checks.ping import PingCheck
from datadog_checks.errors import CheckException
def mock_exec_ping():
return """FAKEPING 127.0.0.1 (127.0.0.1): 56 data bytes
64 bytes from 127.0.0.1: icmp_seq=0 ttl=64 time=0.093 ms
--- 127.0.0.1 ping statistics ---
1 packets transmitted, 1 packets received, 0.0% packet loss
round-trip min/avg/max/stddev = 0.093/0.093/0.093/0.000 ms"""
def test_check(aggregator, instance):
c = PingCheck('ping', {}, {})
# empty instance
instance = {}
with pytest.raises(CheckException):
c.check(instance)
# only name
with pytest.raises(CheckException):
c.check({'name': 'Datadog'})
test_check
# good check
instance = {
'host': '127.0.0.1',
'name': "Localhost"
}
with mock.patch.object(c, "_exec_ping", return_value=mock_exec_ping()):
c.check(instance)
aggregator.assert_service_check('network.ping.can_connect', AgentCheck.OK)
aggregator.assert_metric('network.ping.can_connect', value=1)
|
164fe2780554ddca5f66273e11efea37cfaf1368
|
numba/tests/issues/test_issue_204.py
|
numba/tests/issues/test_issue_204.py
|
from numba import autojit, jit
@autojit
def closure_modulo(a, b):
@jit('int32()')
def foo():
return a % b
return foo()
print closure_modulo(100, 48)
|
from numba import autojit, jit
@autojit
def closure_modulo(a, b):
@jit('int32()')
def foo():
return a % b
return foo()
def test_closure_modulo():
assert closure_modulo(100, 48) == 4
if __name__ == '__main__':
test_closure_modulo()
|
Fix tests for python 3
|
Fix tests for python 3
|
Python
|
bsd-2-clause
|
GaZ3ll3/numba,pombredanne/numba,ssarangi/numba,stefanseefeld/numba,GaZ3ll3/numba,shiquanwang/numba,jriehl/numba,gdementen/numba,sklam/numba,jriehl/numba,ssarangi/numba,stonebig/numba,sklam/numba,GaZ3ll3/numba,seibert/numba,numba/numba,gmarkall/numba,sklam/numba,GaZ3ll3/numba,gmarkall/numba,stonebig/numba,seibert/numba,seibert/numba,IntelLabs/numba,pombredanne/numba,seibert/numba,numba/numba,jriehl/numba,IntelLabs/numba,shiquanwang/numba,gmarkall/numba,pitrou/numba,IntelLabs/numba,numba/numba,sklam/numba,pombredanne/numba,gdementen/numba,cpcloud/numba,stuartarchibald/numba,GaZ3ll3/numba,IntelLabs/numba,pombredanne/numba,jriehl/numba,pitrou/numba,stonebig/numba,stefanseefeld/numba,stefanseefeld/numba,stuartarchibald/numba,cpcloud/numba,ssarangi/numba,gdementen/numba,gmarkall/numba,cpcloud/numba,stonebig/numba,numba/numba,numba/numba,stefanseefeld/numba,stefanseefeld/numba,stuartarchibald/numba,ssarangi/numba,ssarangi/numba,gdementen/numba,seibert/numba,cpcloud/numba,pitrou/numba,stuartarchibald/numba,jriehl/numba,pombredanne/numba,pitrou/numba,stonebig/numba,IntelLabs/numba,gdementen/numba,stuartarchibald/numba,shiquanwang/numba,pitrou/numba,cpcloud/numba,gmarkall/numba,sklam/numba
|
from numba import autojit, jit
@autojit
def closure_modulo(a, b):
@jit('int32()')
def foo():
return a % b
return foo()
print closure_modulo(100, 48)
Fix tests for python 3
|
from numba import autojit, jit
@autojit
def closure_modulo(a, b):
@jit('int32()')
def foo():
return a % b
return foo()
def test_closure_modulo():
assert closure_modulo(100, 48) == 4
if __name__ == '__main__':
test_closure_modulo()
|
<commit_before>from numba import autojit, jit
@autojit
def closure_modulo(a, b):
@jit('int32()')
def foo():
return a % b
return foo()
print closure_modulo(100, 48)
<commit_msg>Fix tests for python 3<commit_after>
|
from numba import autojit, jit
@autojit
def closure_modulo(a, b):
@jit('int32()')
def foo():
return a % b
return foo()
def test_closure_modulo():
assert closure_modulo(100, 48) == 4
if __name__ == '__main__':
test_closure_modulo()
|
from numba import autojit, jit
@autojit
def closure_modulo(a, b):
@jit('int32()')
def foo():
return a % b
return foo()
print closure_modulo(100, 48)
Fix tests for python 3from numba import autojit, jit
@autojit
def closure_modulo(a, b):
@jit('int32()')
def foo():
return a % b
return foo()
def test_closure_modulo():
assert closure_modulo(100, 48) == 4
if __name__ == '__main__':
test_closure_modulo()
|
<commit_before>from numba import autojit, jit
@autojit
def closure_modulo(a, b):
@jit('int32()')
def foo():
return a % b
return foo()
print closure_modulo(100, 48)
<commit_msg>Fix tests for python 3<commit_after>from numba import autojit, jit
@autojit
def closure_modulo(a, b):
@jit('int32()')
def foo():
return a % b
return foo()
def test_closure_modulo():
assert closure_modulo(100, 48) == 4
if __name__ == '__main__':
test_closure_modulo()
|
422bf9860aacc3babbdd09ab1bd0941455b6ac7b
|
calaccess_campaign_browser/management/commands/dropcalaccesscampaignbrowser.py
|
calaccess_campaign_browser/management/commands/dropcalaccesscampaignbrowser.py
|
from django.db import connection
from calaccess_campaign_browser import models
from calaccess_campaign_browser.management.commands import CalAccessCommand
class Command(CalAccessCommand):
help = "Drops all CAL-ACCESS campaign browser database tables"
def handle(self, *args, **options):
self.header("Dropping CAL-ACCESS campaign browser database tables")
self.cursor = connection.cursor()
# Ignore MySQL "note" warnings so this can be run with DEBUG=True
self.cursor.execute("""SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0;""")
# Loop through the models and drop all the tables
model_list = [
models.Contribution,
models.Expenditure,
models.Summary,
models.Filing,
models.FilingPeriod,
models.Committee,
models.Filer,
models.Cycle,
]
sql = """DROP TABLE IF EXISTS `%s`;"""
for m in model_list:
self.log(" %s" % m.__name__)
self.cursor.execute(sql % m._meta.db_table)
# Revert database to default "note" warning behavior
self.cursor.execute("""SET SQL_NOTES=@OLD_SQL_NOTES;""")
|
from django.db import connection
from calaccess_campaign_browser import models
from calaccess_campaign_browser.management.commands import CalAccessCommand
class Command(CalAccessCommand):
help = "Drops all CAL-ACCESS campaign browser database tables"
def handle(self, *args, **options):
self.header("Dropping CAL-ACCESS campaign browser database tables")
self.cursor = connection.cursor()
# Ignore MySQL "note" warnings so this can be run with DEBUG=True
self.cursor.execute("""SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0;""")
# Loop through the models and drop all the tables
model_list = [
models.Contribution,
models.Expenditure,
models.Summary,
models.Filing,
models.FilingPeriod,
models.Committee,
models.Filer,
models.Cycle,
models.Election,
models.Office,
models.Candidate,
models.Proposition,
models.PropositionFiler,
]
sql = """DROP TABLE IF EXISTS `%s`;"""
for m in model_list:
self.log(" %s" % m.__name__)
self.cursor.execute(sql % m._meta.db_table)
# Revert database to default "note" warning behavior
self.cursor.execute("""SET SQL_NOTES=@OLD_SQL_NOTES;""")
|
Add scraper models to drop command
|
Add scraper models to drop command
|
Python
|
mit
|
california-civic-data-coalition/django-calaccess-campaign-browser,myersjustinc/django-calaccess-campaign-browser,dwillis/django-calaccess-campaign-browser,dwillis/django-calaccess-campaign-browser,california-civic-data-coalition/django-calaccess-campaign-browser,myersjustinc/django-calaccess-campaign-browser
|
from django.db import connection
from calaccess_campaign_browser import models
from calaccess_campaign_browser.management.commands import CalAccessCommand
class Command(CalAccessCommand):
help = "Drops all CAL-ACCESS campaign browser database tables"
def handle(self, *args, **options):
self.header("Dropping CAL-ACCESS campaign browser database tables")
self.cursor = connection.cursor()
# Ignore MySQL "note" warnings so this can be run with DEBUG=True
self.cursor.execute("""SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0;""")
# Loop through the models and drop all the tables
model_list = [
models.Contribution,
models.Expenditure,
models.Summary,
models.Filing,
models.FilingPeriod,
models.Committee,
models.Filer,
models.Cycle,
]
sql = """DROP TABLE IF EXISTS `%s`;"""
for m in model_list:
self.log(" %s" % m.__name__)
self.cursor.execute(sql % m._meta.db_table)
# Revert database to default "note" warning behavior
self.cursor.execute("""SET SQL_NOTES=@OLD_SQL_NOTES;""")
Add scraper models to drop command
|
from django.db import connection
from calaccess_campaign_browser import models
from calaccess_campaign_browser.management.commands import CalAccessCommand
class Command(CalAccessCommand):
help = "Drops all CAL-ACCESS campaign browser database tables"
def handle(self, *args, **options):
self.header("Dropping CAL-ACCESS campaign browser database tables")
self.cursor = connection.cursor()
# Ignore MySQL "note" warnings so this can be run with DEBUG=True
self.cursor.execute("""SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0;""")
# Loop through the models and drop all the tables
model_list = [
models.Contribution,
models.Expenditure,
models.Summary,
models.Filing,
models.FilingPeriod,
models.Committee,
models.Filer,
models.Cycle,
models.Election,
models.Office,
models.Candidate,
models.Proposition,
models.PropositionFiler,
]
sql = """DROP TABLE IF EXISTS `%s`;"""
for m in model_list:
self.log(" %s" % m.__name__)
self.cursor.execute(sql % m._meta.db_table)
# Revert database to default "note" warning behavior
self.cursor.execute("""SET SQL_NOTES=@OLD_SQL_NOTES;""")
|
<commit_before>from django.db import connection
from calaccess_campaign_browser import models
from calaccess_campaign_browser.management.commands import CalAccessCommand
class Command(CalAccessCommand):
help = "Drops all CAL-ACCESS campaign browser database tables"
def handle(self, *args, **options):
self.header("Dropping CAL-ACCESS campaign browser database tables")
self.cursor = connection.cursor()
# Ignore MySQL "note" warnings so this can be run with DEBUG=True
self.cursor.execute("""SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0;""")
# Loop through the models and drop all the tables
model_list = [
models.Contribution,
models.Expenditure,
models.Summary,
models.Filing,
models.FilingPeriod,
models.Committee,
models.Filer,
models.Cycle,
]
sql = """DROP TABLE IF EXISTS `%s`;"""
for m in model_list:
self.log(" %s" % m.__name__)
self.cursor.execute(sql % m._meta.db_table)
# Revert database to default "note" warning behavior
self.cursor.execute("""SET SQL_NOTES=@OLD_SQL_NOTES;""")
<commit_msg>Add scraper models to drop command<commit_after>
|
from django.db import connection
from calaccess_campaign_browser import models
from calaccess_campaign_browser.management.commands import CalAccessCommand
class Command(CalAccessCommand):
help = "Drops all CAL-ACCESS campaign browser database tables"
def handle(self, *args, **options):
self.header("Dropping CAL-ACCESS campaign browser database tables")
self.cursor = connection.cursor()
# Ignore MySQL "note" warnings so this can be run with DEBUG=True
self.cursor.execute("""SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0;""")
# Loop through the models and drop all the tables
model_list = [
models.Contribution,
models.Expenditure,
models.Summary,
models.Filing,
models.FilingPeriod,
models.Committee,
models.Filer,
models.Cycle,
models.Election,
models.Office,
models.Candidate,
models.Proposition,
models.PropositionFiler,
]
sql = """DROP TABLE IF EXISTS `%s`;"""
for m in model_list:
self.log(" %s" % m.__name__)
self.cursor.execute(sql % m._meta.db_table)
# Revert database to default "note" warning behavior
self.cursor.execute("""SET SQL_NOTES=@OLD_SQL_NOTES;""")
|
from django.db import connection
from calaccess_campaign_browser import models
from calaccess_campaign_browser.management.commands import CalAccessCommand
class Command(CalAccessCommand):
help = "Drops all CAL-ACCESS campaign browser database tables"
def handle(self, *args, **options):
self.header("Dropping CAL-ACCESS campaign browser database tables")
self.cursor = connection.cursor()
# Ignore MySQL "note" warnings so this can be run with DEBUG=True
self.cursor.execute("""SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0;""")
# Loop through the models and drop all the tables
model_list = [
models.Contribution,
models.Expenditure,
models.Summary,
models.Filing,
models.FilingPeriod,
models.Committee,
models.Filer,
models.Cycle,
]
sql = """DROP TABLE IF EXISTS `%s`;"""
for m in model_list:
self.log(" %s" % m.__name__)
self.cursor.execute(sql % m._meta.db_table)
# Revert database to default "note" warning behavior
self.cursor.execute("""SET SQL_NOTES=@OLD_SQL_NOTES;""")
Add scraper models to drop commandfrom django.db import connection
from calaccess_campaign_browser import models
from calaccess_campaign_browser.management.commands import CalAccessCommand
class Command(CalAccessCommand):
help = "Drops all CAL-ACCESS campaign browser database tables"
def handle(self, *args, **options):
self.header("Dropping CAL-ACCESS campaign browser database tables")
self.cursor = connection.cursor()
# Ignore MySQL "note" warnings so this can be run with DEBUG=True
self.cursor.execute("""SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0;""")
# Loop through the models and drop all the tables
model_list = [
models.Contribution,
models.Expenditure,
models.Summary,
models.Filing,
models.FilingPeriod,
models.Committee,
models.Filer,
models.Cycle,
models.Election,
models.Office,
models.Candidate,
models.Proposition,
models.PropositionFiler,
]
sql = """DROP TABLE IF EXISTS `%s`;"""
for m in model_list:
self.log(" %s" % m.__name__)
self.cursor.execute(sql % m._meta.db_table)
# Revert database to default "note" warning behavior
self.cursor.execute("""SET SQL_NOTES=@OLD_SQL_NOTES;""")
|
<commit_before>from django.db import connection
from calaccess_campaign_browser import models
from calaccess_campaign_browser.management.commands import CalAccessCommand
class Command(CalAccessCommand):
help = "Drops all CAL-ACCESS campaign browser database tables"
def handle(self, *args, **options):
self.header("Dropping CAL-ACCESS campaign browser database tables")
self.cursor = connection.cursor()
# Ignore MySQL "note" warnings so this can be run with DEBUG=True
self.cursor.execute("""SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0;""")
# Loop through the models and drop all the tables
model_list = [
models.Contribution,
models.Expenditure,
models.Summary,
models.Filing,
models.FilingPeriod,
models.Committee,
models.Filer,
models.Cycle,
]
sql = """DROP TABLE IF EXISTS `%s`;"""
for m in model_list:
self.log(" %s" % m.__name__)
self.cursor.execute(sql % m._meta.db_table)
# Revert database to default "note" warning behavior
self.cursor.execute("""SET SQL_NOTES=@OLD_SQL_NOTES;""")
<commit_msg>Add scraper models to drop command<commit_after>from django.db import connection
from calaccess_campaign_browser import models
from calaccess_campaign_browser.management.commands import CalAccessCommand
class Command(CalAccessCommand):
help = "Drops all CAL-ACCESS campaign browser database tables"
def handle(self, *args, **options):
self.header("Dropping CAL-ACCESS campaign browser database tables")
self.cursor = connection.cursor()
# Ignore MySQL "note" warnings so this can be run with DEBUG=True
self.cursor.execute("""SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0;""")
# Loop through the models and drop all the tables
model_list = [
models.Contribution,
models.Expenditure,
models.Summary,
models.Filing,
models.FilingPeriod,
models.Committee,
models.Filer,
models.Cycle,
models.Election,
models.Office,
models.Candidate,
models.Proposition,
models.PropositionFiler,
]
sql = """DROP TABLE IF EXISTS `%s`;"""
for m in model_list:
self.log(" %s" % m.__name__)
self.cursor.execute(sql % m._meta.db_table)
# Revert database to default "note" warning behavior
self.cursor.execute("""SET SQL_NOTES=@OLD_SQL_NOTES;""")
|
25429b016ccd979c95da329491e95e69a4a18308
|
packages/pcl-reference-assemblies.py
|
packages/pcl-reference-assemblies.py
|
import glob
import os
import shutil
class PCLReferenceAssembliesPackage(Package):
def __init__(self):
Package.__init__(self,
name='PortableReferenceAssemblies',
version='2014-04-14',
sources=['http://storage.bos.xamarin.com/bot-provisioning/PortableReferenceAssemblies-2014-04-14.zip'])
def prep(self):
self.extract_archive(self.sources[0],
validate_only=False,
overwrite=True)
def build(self):
pass
# A bunch of shell script written inside python literals ;(
def install(self):
dest = os.path.join(self.prefix, "lib", "mono", "xbuild-frameworks", ".NETPortable")
if not os.path.exists(dest):
os.makedirs(dest)
shutil.rmtree(dest, ignore_errors=True)
pcldir = os.path.join(self.package_build_dir(), self.source_dir_name, ".NETPortable")
self.sh("rsync -abv -q %s/* %s" % (pcldir, dest))
PCLReferenceAssembliesPackage()
|
import glob
import os
import shutil
class PCLReferenceAssembliesPackage(Package):
def __init__(self):
Package.__init__(self,
name='PortableReferenceAssemblies',
version='2014-04-14',
sources=['http://storage.bos.xamarin.com/bot-provisioning/PortableReferenceAssemblies-2014-04-14.zip'])
def prep(self):
self.extract_archive(self.sources[0],
validate_only=False,
overwrite=True)
def build(self):
pass
# A bunch of shell script written inside python literals ;(
def install(self):
dest = os.path.join(self.prefix, "lib", "mono", "xbuild-frameworks", ".NETPortable")
if not os.path.exists(dest):
os.makedirs(dest)
shutil.rmtree(dest, ignore_errors=True)
pcldir = os.path.join(self.package_build_dir(), self.source_dir_name)
self.sh("rsync -abv -q %s/* %s" % (pcldir, dest))
PCLReferenceAssembliesPackage()
|
Fix the directory structure inside the source.
|
Fix the directory structure inside the source.
|
Python
|
mit
|
mono/bockbuild,BansheeMediaPlayer/bockbuild,BansheeMediaPlayer/bockbuild,BansheeMediaPlayer/bockbuild,mono/bockbuild
|
import glob
import os
import shutil
class PCLReferenceAssembliesPackage(Package):
def __init__(self):
Package.__init__(self,
name='PortableReferenceAssemblies',
version='2014-04-14',
sources=['http://storage.bos.xamarin.com/bot-provisioning/PortableReferenceAssemblies-2014-04-14.zip'])
def prep(self):
self.extract_archive(self.sources[0],
validate_only=False,
overwrite=True)
def build(self):
pass
# A bunch of shell script written inside python literals ;(
def install(self):
dest = os.path.join(self.prefix, "lib", "mono", "xbuild-frameworks", ".NETPortable")
if not os.path.exists(dest):
os.makedirs(dest)
shutil.rmtree(dest, ignore_errors=True)
pcldir = os.path.join(self.package_build_dir(), self.source_dir_name, ".NETPortable")
self.sh("rsync -abv -q %s/* %s" % (pcldir, dest))
PCLReferenceAssembliesPackage()
Fix the directory structure inside the source.
|
import glob
import os
import shutil
class PCLReferenceAssembliesPackage(Package):
def __init__(self):
Package.__init__(self,
name='PortableReferenceAssemblies',
version='2014-04-14',
sources=['http://storage.bos.xamarin.com/bot-provisioning/PortableReferenceAssemblies-2014-04-14.zip'])
def prep(self):
self.extract_archive(self.sources[0],
validate_only=False,
overwrite=True)
def build(self):
pass
# A bunch of shell script written inside python literals ;(
def install(self):
dest = os.path.join(self.prefix, "lib", "mono", "xbuild-frameworks", ".NETPortable")
if not os.path.exists(dest):
os.makedirs(dest)
shutil.rmtree(dest, ignore_errors=True)
pcldir = os.path.join(self.package_build_dir(), self.source_dir_name)
self.sh("rsync -abv -q %s/* %s" % (pcldir, dest))
PCLReferenceAssembliesPackage()
|
<commit_before>import glob
import os
import shutil
class PCLReferenceAssembliesPackage(Package):
def __init__(self):
Package.__init__(self,
name='PortableReferenceAssemblies',
version='2014-04-14',
sources=['http://storage.bos.xamarin.com/bot-provisioning/PortableReferenceAssemblies-2014-04-14.zip'])
def prep(self):
self.extract_archive(self.sources[0],
validate_only=False,
overwrite=True)
def build(self):
pass
# A bunch of shell script written inside python literals ;(
def install(self):
dest = os.path.join(self.prefix, "lib", "mono", "xbuild-frameworks", ".NETPortable")
if not os.path.exists(dest):
os.makedirs(dest)
shutil.rmtree(dest, ignore_errors=True)
pcldir = os.path.join(self.package_build_dir(), self.source_dir_name, ".NETPortable")
self.sh("rsync -abv -q %s/* %s" % (pcldir, dest))
PCLReferenceAssembliesPackage()
<commit_msg>Fix the directory structure inside the source.<commit_after>
|
import glob
import os
import shutil
class PCLReferenceAssembliesPackage(Package):
def __init__(self):
Package.__init__(self,
name='PortableReferenceAssemblies',
version='2014-04-14',
sources=['http://storage.bos.xamarin.com/bot-provisioning/PortableReferenceAssemblies-2014-04-14.zip'])
def prep(self):
self.extract_archive(self.sources[0],
validate_only=False,
overwrite=True)
def build(self):
pass
# A bunch of shell script written inside python literals ;(
def install(self):
dest = os.path.join(self.prefix, "lib", "mono", "xbuild-frameworks", ".NETPortable")
if not os.path.exists(dest):
os.makedirs(dest)
shutil.rmtree(dest, ignore_errors=True)
pcldir = os.path.join(self.package_build_dir(), self.source_dir_name)
self.sh("rsync -abv -q %s/* %s" % (pcldir, dest))
PCLReferenceAssembliesPackage()
|
import glob
import os
import shutil
class PCLReferenceAssembliesPackage(Package):
def __init__(self):
Package.__init__(self,
name='PortableReferenceAssemblies',
version='2014-04-14',
sources=['http://storage.bos.xamarin.com/bot-provisioning/PortableReferenceAssemblies-2014-04-14.zip'])
def prep(self):
self.extract_archive(self.sources[0],
validate_only=False,
overwrite=True)
def build(self):
pass
# A bunch of shell script written inside python literals ;(
def install(self):
dest = os.path.join(self.prefix, "lib", "mono", "xbuild-frameworks", ".NETPortable")
if not os.path.exists(dest):
os.makedirs(dest)
shutil.rmtree(dest, ignore_errors=True)
pcldir = os.path.join(self.package_build_dir(), self.source_dir_name, ".NETPortable")
self.sh("rsync -abv -q %s/* %s" % (pcldir, dest))
PCLReferenceAssembliesPackage()
Fix the directory structure inside the source.import glob
import os
import shutil
class PCLReferenceAssembliesPackage(Package):
def __init__(self):
Package.__init__(self,
name='PortableReferenceAssemblies',
version='2014-04-14',
sources=['http://storage.bos.xamarin.com/bot-provisioning/PortableReferenceAssemblies-2014-04-14.zip'])
def prep(self):
self.extract_archive(self.sources[0],
validate_only=False,
overwrite=True)
def build(self):
pass
# A bunch of shell script written inside python literals ;(
def install(self):
dest = os.path.join(self.prefix, "lib", "mono", "xbuild-frameworks", ".NETPortable")
if not os.path.exists(dest):
os.makedirs(dest)
shutil.rmtree(dest, ignore_errors=True)
pcldir = os.path.join(self.package_build_dir(), self.source_dir_name)
self.sh("rsync -abv -q %s/* %s" % (pcldir, dest))
PCLReferenceAssembliesPackage()
|
<commit_before>import glob
import os
import shutil
class PCLReferenceAssembliesPackage(Package):
def __init__(self):
Package.__init__(self,
name='PortableReferenceAssemblies',
version='2014-04-14',
sources=['http://storage.bos.xamarin.com/bot-provisioning/PortableReferenceAssemblies-2014-04-14.zip'])
def prep(self):
self.extract_archive(self.sources[0],
validate_only=False,
overwrite=True)
def build(self):
pass
# A bunch of shell script written inside python literals ;(
def install(self):
dest = os.path.join(self.prefix, "lib", "mono", "xbuild-frameworks", ".NETPortable")
if not os.path.exists(dest):
os.makedirs(dest)
shutil.rmtree(dest, ignore_errors=True)
pcldir = os.path.join(self.package_build_dir(), self.source_dir_name, ".NETPortable")
self.sh("rsync -abv -q %s/* %s" % (pcldir, dest))
PCLReferenceAssembliesPackage()
<commit_msg>Fix the directory structure inside the source.<commit_after>import glob
import os
import shutil
class PCLReferenceAssembliesPackage(Package):
def __init__(self):
Package.__init__(self,
name='PortableReferenceAssemblies',
version='2014-04-14',
sources=['http://storage.bos.xamarin.com/bot-provisioning/PortableReferenceAssemblies-2014-04-14.zip'])
def prep(self):
self.extract_archive(self.sources[0],
validate_only=False,
overwrite=True)
def build(self):
pass
# A bunch of shell script written inside python literals ;(
def install(self):
dest = os.path.join(self.prefix, "lib", "mono", "xbuild-frameworks", ".NETPortable")
if not os.path.exists(dest):
os.makedirs(dest)
shutil.rmtree(dest, ignore_errors=True)
pcldir = os.path.join(self.package_build_dir(), self.source_dir_name)
self.sh("rsync -abv -q %s/* %s" % (pcldir, dest))
PCLReferenceAssembliesPackage()
|
b5bf391ca0303f877b39bed4c3266441a9b78b2b
|
src/waldur_mastermind/common/serializers.py
|
src/waldur_mastermind/common/serializers.py
|
from rest_framework import serializers
def validate_options(options, attributes):
fields = {}
for name, option in options.items():
params = {}
field_type = option.get('type', '')
field_class = serializers.CharField
if field_type == 'integer':
field_class = serializers.IntegerField
elif field_type == 'money':
field_class = serializers.IntegerField
elif field_type == 'boolean':
field_class = serializers.BooleanField
default_value = option.get('default')
if default_value:
params['default'] = default_value
else:
params['required'] = option.get('required', False)
if field_class == serializers.IntegerField:
if 'min' in option:
params['min_value'] = option.get('min')
if 'max' in option:
params['max_value'] = option.get('max')
if 'choices' in option:
field_class = serializers.ChoiceField
params['choices'] = option.get('choices')
if field_type == 'select_string_multi':
field_class = serializers.MultipleChoiceField
params['choices'] = option.get('choices')
fields[name] = field_class(**params)
serializer_class = type('AttributesSerializer', (serializers.Serializer,), fields)
serializer = serializer_class(data=attributes)
serializer.is_valid(raise_exception=True)
|
from rest_framework import serializers
class StringListSerializer(serializers.ListField):
child = serializers.CharField()
FIELD_CLASSES = {
'integer': serializers.IntegerField,
'date': serializers.DateField,
'time': serializers.TimeField,
'money': serializers.IntegerField,
'boolean': serializers.BooleanField,
'select_string': serializers.ChoiceField,
'select_string_multi': serializers.MultipleChoiceField,
'select_openstack_tenant': serializers.CharField,
'select_multiple_openstack_tenants': StringListSerializer,
'select_openstack_instance': serializers.CharField,
'select_multiple_openstack_instances': StringListSerializer,
}
def validate_options(options, attributes):
fields = {}
for name, option in options.items():
params = {}
field_type = option.get('type', '')
field_class = FIELD_CLASSES.get(field_type, serializers.CharField)
default_value = option.get('default')
if default_value:
params['default'] = default_value
else:
params['required'] = option.get('required', False)
if field_class == serializers.IntegerField:
if 'min' in option:
params['min_value'] = option.get('min')
if 'max' in option:
params['max_value'] = option.get('max')
if 'choices' in option:
params['choices'] = option['choices']
fields[name] = field_class(**params)
serializer_class = type('AttributesSerializer', (serializers.Serializer,), fields)
serializer = serializer_class(data=attributes)
serializer.is_valid(raise_exception=True)
|
Fix validation of OpenStack select fields in request-based item form
|
Fix validation of OpenStack select fields in request-based item form [WAL-4035]
|
Python
|
mit
|
opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/nodeconductor-assembly-waldur,opennode/nodeconductor-assembly-waldur,opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/nodeconductor-assembly-waldur
|
from rest_framework import serializers
def validate_options(options, attributes):
fields = {}
for name, option in options.items():
params = {}
field_type = option.get('type', '')
field_class = serializers.CharField
if field_type == 'integer':
field_class = serializers.IntegerField
elif field_type == 'money':
field_class = serializers.IntegerField
elif field_type == 'boolean':
field_class = serializers.BooleanField
default_value = option.get('default')
if default_value:
params['default'] = default_value
else:
params['required'] = option.get('required', False)
if field_class == serializers.IntegerField:
if 'min' in option:
params['min_value'] = option.get('min')
if 'max' in option:
params['max_value'] = option.get('max')
if 'choices' in option:
field_class = serializers.ChoiceField
params['choices'] = option.get('choices')
if field_type == 'select_string_multi':
field_class = serializers.MultipleChoiceField
params['choices'] = option.get('choices')
fields[name] = field_class(**params)
serializer_class = type('AttributesSerializer', (serializers.Serializer,), fields)
serializer = serializer_class(data=attributes)
serializer.is_valid(raise_exception=True)
Fix validation of OpenStack select fields in request-based item form [WAL-4035]
|
from rest_framework import serializers
class StringListSerializer(serializers.ListField):
child = serializers.CharField()
FIELD_CLASSES = {
'integer': serializers.IntegerField,
'date': serializers.DateField,
'time': serializers.TimeField,
'money': serializers.IntegerField,
'boolean': serializers.BooleanField,
'select_string': serializers.ChoiceField,
'select_string_multi': serializers.MultipleChoiceField,
'select_openstack_tenant': serializers.CharField,
'select_multiple_openstack_tenants': StringListSerializer,
'select_openstack_instance': serializers.CharField,
'select_multiple_openstack_instances': StringListSerializer,
}
def validate_options(options, attributes):
fields = {}
for name, option in options.items():
params = {}
field_type = option.get('type', '')
field_class = FIELD_CLASSES.get(field_type, serializers.CharField)
default_value = option.get('default')
if default_value:
params['default'] = default_value
else:
params['required'] = option.get('required', False)
if field_class == serializers.IntegerField:
if 'min' in option:
params['min_value'] = option.get('min')
if 'max' in option:
params['max_value'] = option.get('max')
if 'choices' in option:
params['choices'] = option['choices']
fields[name] = field_class(**params)
serializer_class = type('AttributesSerializer', (serializers.Serializer,), fields)
serializer = serializer_class(data=attributes)
serializer.is_valid(raise_exception=True)
|
<commit_before>from rest_framework import serializers
def validate_options(options, attributes):
fields = {}
for name, option in options.items():
params = {}
field_type = option.get('type', '')
field_class = serializers.CharField
if field_type == 'integer':
field_class = serializers.IntegerField
elif field_type == 'money':
field_class = serializers.IntegerField
elif field_type == 'boolean':
field_class = serializers.BooleanField
default_value = option.get('default')
if default_value:
params['default'] = default_value
else:
params['required'] = option.get('required', False)
if field_class == serializers.IntegerField:
if 'min' in option:
params['min_value'] = option.get('min')
if 'max' in option:
params['max_value'] = option.get('max')
if 'choices' in option:
field_class = serializers.ChoiceField
params['choices'] = option.get('choices')
if field_type == 'select_string_multi':
field_class = serializers.MultipleChoiceField
params['choices'] = option.get('choices')
fields[name] = field_class(**params)
serializer_class = type('AttributesSerializer', (serializers.Serializer,), fields)
serializer = serializer_class(data=attributes)
serializer.is_valid(raise_exception=True)
<commit_msg>Fix validation of OpenStack select fields in request-based item form [WAL-4035]<commit_after>
|
from rest_framework import serializers
class StringListSerializer(serializers.ListField):
child = serializers.CharField()
FIELD_CLASSES = {
'integer': serializers.IntegerField,
'date': serializers.DateField,
'time': serializers.TimeField,
'money': serializers.IntegerField,
'boolean': serializers.BooleanField,
'select_string': serializers.ChoiceField,
'select_string_multi': serializers.MultipleChoiceField,
'select_openstack_tenant': serializers.CharField,
'select_multiple_openstack_tenants': StringListSerializer,
'select_openstack_instance': serializers.CharField,
'select_multiple_openstack_instances': StringListSerializer,
}
def validate_options(options, attributes):
fields = {}
for name, option in options.items():
params = {}
field_type = option.get('type', '')
field_class = FIELD_CLASSES.get(field_type, serializers.CharField)
default_value = option.get('default')
if default_value:
params['default'] = default_value
else:
params['required'] = option.get('required', False)
if field_class == serializers.IntegerField:
if 'min' in option:
params['min_value'] = option.get('min')
if 'max' in option:
params['max_value'] = option.get('max')
if 'choices' in option:
params['choices'] = option['choices']
fields[name] = field_class(**params)
serializer_class = type('AttributesSerializer', (serializers.Serializer,), fields)
serializer = serializer_class(data=attributes)
serializer.is_valid(raise_exception=True)
|
from rest_framework import serializers
def validate_options(options, attributes):
fields = {}
for name, option in options.items():
params = {}
field_type = option.get('type', '')
field_class = serializers.CharField
if field_type == 'integer':
field_class = serializers.IntegerField
elif field_type == 'money':
field_class = serializers.IntegerField
elif field_type == 'boolean':
field_class = serializers.BooleanField
default_value = option.get('default')
if default_value:
params['default'] = default_value
else:
params['required'] = option.get('required', False)
if field_class == serializers.IntegerField:
if 'min' in option:
params['min_value'] = option.get('min')
if 'max' in option:
params['max_value'] = option.get('max')
if 'choices' in option:
field_class = serializers.ChoiceField
params['choices'] = option.get('choices')
if field_type == 'select_string_multi':
field_class = serializers.MultipleChoiceField
params['choices'] = option.get('choices')
fields[name] = field_class(**params)
serializer_class = type('AttributesSerializer', (serializers.Serializer,), fields)
serializer = serializer_class(data=attributes)
serializer.is_valid(raise_exception=True)
Fix validation of OpenStack select fields in request-based item form [WAL-4035]from rest_framework import serializers
class StringListSerializer(serializers.ListField):
child = serializers.CharField()
FIELD_CLASSES = {
'integer': serializers.IntegerField,
'date': serializers.DateField,
'time': serializers.TimeField,
'money': serializers.IntegerField,
'boolean': serializers.BooleanField,
'select_string': serializers.ChoiceField,
'select_string_multi': serializers.MultipleChoiceField,
'select_openstack_tenant': serializers.CharField,
'select_multiple_openstack_tenants': StringListSerializer,
'select_openstack_instance': serializers.CharField,
'select_multiple_openstack_instances': StringListSerializer,
}
def validate_options(options, attributes):
fields = {}
for name, option in options.items():
params = {}
field_type = option.get('type', '')
field_class = FIELD_CLASSES.get(field_type, serializers.CharField)
default_value = option.get('default')
if default_value:
params['default'] = default_value
else:
params['required'] = option.get('required', False)
if field_class == serializers.IntegerField:
if 'min' in option:
params['min_value'] = option.get('min')
if 'max' in option:
params['max_value'] = option.get('max')
if 'choices' in option:
params['choices'] = option['choices']
fields[name] = field_class(**params)
serializer_class = type('AttributesSerializer', (serializers.Serializer,), fields)
serializer = serializer_class(data=attributes)
serializer.is_valid(raise_exception=True)
|
<commit_before>from rest_framework import serializers
def validate_options(options, attributes):
fields = {}
for name, option in options.items():
params = {}
field_type = option.get('type', '')
field_class = serializers.CharField
if field_type == 'integer':
field_class = serializers.IntegerField
elif field_type == 'money':
field_class = serializers.IntegerField
elif field_type == 'boolean':
field_class = serializers.BooleanField
default_value = option.get('default')
if default_value:
params['default'] = default_value
else:
params['required'] = option.get('required', False)
if field_class == serializers.IntegerField:
if 'min' in option:
params['min_value'] = option.get('min')
if 'max' in option:
params['max_value'] = option.get('max')
if 'choices' in option:
field_class = serializers.ChoiceField
params['choices'] = option.get('choices')
if field_type == 'select_string_multi':
field_class = serializers.MultipleChoiceField
params['choices'] = option.get('choices')
fields[name] = field_class(**params)
serializer_class = type('AttributesSerializer', (serializers.Serializer,), fields)
serializer = serializer_class(data=attributes)
serializer.is_valid(raise_exception=True)
<commit_msg>Fix validation of OpenStack select fields in request-based item form [WAL-4035]<commit_after>from rest_framework import serializers
class StringListSerializer(serializers.ListField):
child = serializers.CharField()
FIELD_CLASSES = {
'integer': serializers.IntegerField,
'date': serializers.DateField,
'time': serializers.TimeField,
'money': serializers.IntegerField,
'boolean': serializers.BooleanField,
'select_string': serializers.ChoiceField,
'select_string_multi': serializers.MultipleChoiceField,
'select_openstack_tenant': serializers.CharField,
'select_multiple_openstack_tenants': StringListSerializer,
'select_openstack_instance': serializers.CharField,
'select_multiple_openstack_instances': StringListSerializer,
}
def validate_options(options, attributes):
fields = {}
for name, option in options.items():
params = {}
field_type = option.get('type', '')
field_class = FIELD_CLASSES.get(field_type, serializers.CharField)
default_value = option.get('default')
if default_value:
params['default'] = default_value
else:
params['required'] = option.get('required', False)
if field_class == serializers.IntegerField:
if 'min' in option:
params['min_value'] = option.get('min')
if 'max' in option:
params['max_value'] = option.get('max')
if 'choices' in option:
params['choices'] = option['choices']
fields[name] = field_class(**params)
serializer_class = type('AttributesSerializer', (serializers.Serializer,), fields)
serializer = serializer_class(data=attributes)
serializer.is_valid(raise_exception=True)
|
1fb54fcb5236b8c5f33f3eb855c1085c00eeeb2c
|
src/__init__.py
|
src/__init__.py
|
from .pytesseract import ( # noqa: F401
Output,
TesseractError,
TesseractNotFoundError,
TSVNotSupported,
get_tesseract_version,
image_to_alto_xml,
image_to_boxes,
image_to_data,
image_to_osd,
image_to_pdf_or_hocr,
image_to_string,
run_and_get_output,
)
|
from .pytesseract import ( # noqa: F401
Output,
TesseractError,
TesseractNotFoundError,
ALTONotSupported,
TSVNotSupported,
get_tesseract_version,
image_to_alto_xml,
image_to_boxes,
image_to_data,
image_to_osd,
image_to_pdf_or_hocr,
image_to_string,
run_and_get_output,
)
|
Make the ALTONotSupported exception available
|
Make the ALTONotSupported exception available
|
Python
|
apache-2.0
|
madmaze/pytesseract
|
from .pytesseract import ( # noqa: F401
Output,
TesseractError,
TesseractNotFoundError,
TSVNotSupported,
get_tesseract_version,
image_to_alto_xml,
image_to_boxes,
image_to_data,
image_to_osd,
image_to_pdf_or_hocr,
image_to_string,
run_and_get_output,
)
Make the ALTONotSupported exception available
|
from .pytesseract import ( # noqa: F401
Output,
TesseractError,
TesseractNotFoundError,
ALTONotSupported,
TSVNotSupported,
get_tesseract_version,
image_to_alto_xml,
image_to_boxes,
image_to_data,
image_to_osd,
image_to_pdf_or_hocr,
image_to_string,
run_and_get_output,
)
|
<commit_before>from .pytesseract import ( # noqa: F401
Output,
TesseractError,
TesseractNotFoundError,
TSVNotSupported,
get_tesseract_version,
image_to_alto_xml,
image_to_boxes,
image_to_data,
image_to_osd,
image_to_pdf_or_hocr,
image_to_string,
run_and_get_output,
)
<commit_msg>Make the ALTONotSupported exception available<commit_after>
|
from .pytesseract import ( # noqa: F401
Output,
TesseractError,
TesseractNotFoundError,
ALTONotSupported,
TSVNotSupported,
get_tesseract_version,
image_to_alto_xml,
image_to_boxes,
image_to_data,
image_to_osd,
image_to_pdf_or_hocr,
image_to_string,
run_and_get_output,
)
|
from .pytesseract import ( # noqa: F401
Output,
TesseractError,
TesseractNotFoundError,
TSVNotSupported,
get_tesseract_version,
image_to_alto_xml,
image_to_boxes,
image_to_data,
image_to_osd,
image_to_pdf_or_hocr,
image_to_string,
run_and_get_output,
)
Make the ALTONotSupported exception availablefrom .pytesseract import ( # noqa: F401
Output,
TesseractError,
TesseractNotFoundError,
ALTONotSupported,
TSVNotSupported,
get_tesseract_version,
image_to_alto_xml,
image_to_boxes,
image_to_data,
image_to_osd,
image_to_pdf_or_hocr,
image_to_string,
run_and_get_output,
)
|
<commit_before>from .pytesseract import ( # noqa: F401
Output,
TesseractError,
TesseractNotFoundError,
TSVNotSupported,
get_tesseract_version,
image_to_alto_xml,
image_to_boxes,
image_to_data,
image_to_osd,
image_to_pdf_or_hocr,
image_to_string,
run_and_get_output,
)
<commit_msg>Make the ALTONotSupported exception available<commit_after>from .pytesseract import ( # noqa: F401
Output,
TesseractError,
TesseractNotFoundError,
ALTONotSupported,
TSVNotSupported,
get_tesseract_version,
image_to_alto_xml,
image_to_boxes,
image_to_data,
image_to_osd,
image_to_pdf_or_hocr,
image_to_string,
run_and_get_output,
)
|
f89dce3ff6d0858c5a29b96610fe4113d6200184
|
gallery/storages.py
|
gallery/storages.py
|
# coding: utf-8
from __future__ import unicode_literals
import re
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.core.files.storage import FileSystemStorage
from django.dispatch import receiver
from django.utils.lru_cache import lru_cache
from django.utils.module_loading import import_string
try:
from django.test.signals import setting_changed # Django ≥ 1.8
except ImportError: # pragma: no cover
from django.core.signals import setting_changed # Django < 1.8
@lru_cache()
def get_storage(name):
name = name.upper()
storage_setting = 'GALLERY_{}_STORAGE'.format(name)
dir_setting = 'GALLERY_{}_DIR'.format(name)
try:
storage_class = getattr(settings, storage_setting)
except AttributeError:
# There's a good chance that this fallback will survive for a long
# time because deprecating it would require updating all the tests.
try:
storage_dir = getattr(settings, dir_setting)
except AttributeError:
raise ImproperlyConfigured(
"Please define {} or {}".format(storage_setting, dir_setting))
else:
return FileSystemStorage(location=storage_dir)
else:
return import_string(storage_class)()
@receiver(setting_changed)
def clear_storages_cache(**kwargs):
if re.match(r'^GALLERY_[A-Z]+_(STORAGE|DIR)$', kwargs['setting']):
get_storage.cache_clear()
|
# coding: utf-8
from __future__ import unicode_literals
import re
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.core.files.storage import FileSystemStorage
from django.dispatch import receiver
from django.test.signals import setting_changed
from django.utils.lru_cache import lru_cache
from django.utils.module_loading import import_string
@lru_cache()
def get_storage(name):
name = name.upper()
storage_setting = 'GALLERY_{}_STORAGE'.format(name)
dir_setting = 'GALLERY_{}_DIR'.format(name)
try:
storage_class = getattr(settings, storage_setting)
except AttributeError:
# There's a good chance that this fallback will survive for a long
# time because deprecating it would require updating all the tests.
try:
storage_dir = getattr(settings, dir_setting)
except AttributeError:
raise ImproperlyConfigured(
"Please define {} or {}".format(storage_setting, dir_setting))
else:
return FileSystemStorage(location=storage_dir)
else:
return import_string(storage_class)()
@receiver(setting_changed)
def clear_storages_cache(**kwargs):
if re.match(r'^GALLERY_[A-Z]+_(STORAGE|DIR)$', kwargs['setting']):
get_storage.cache_clear()
|
Remove backwards compatibility with Django < 1.8.
|
Remove backwards compatibility with Django < 1.8.
|
Python
|
bsd-3-clause
|
aaugustin/myks-gallery,aaugustin/myks-gallery
|
# coding: utf-8
from __future__ import unicode_literals
import re
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.core.files.storage import FileSystemStorage
from django.dispatch import receiver
from django.utils.lru_cache import lru_cache
from django.utils.module_loading import import_string
try:
from django.test.signals import setting_changed # Django ≥ 1.8
except ImportError: # pragma: no cover
from django.core.signals import setting_changed # Django < 1.8
@lru_cache()
def get_storage(name):
name = name.upper()
storage_setting = 'GALLERY_{}_STORAGE'.format(name)
dir_setting = 'GALLERY_{}_DIR'.format(name)
try:
storage_class = getattr(settings, storage_setting)
except AttributeError:
# There's a good chance that this fallback will survive for a long
# time because deprecating it would require updating all the tests.
try:
storage_dir = getattr(settings, dir_setting)
except AttributeError:
raise ImproperlyConfigured(
"Please define {} or {}".format(storage_setting, dir_setting))
else:
return FileSystemStorage(location=storage_dir)
else:
return import_string(storage_class)()
@receiver(setting_changed)
def clear_storages_cache(**kwargs):
if re.match(r'^GALLERY_[A-Z]+_(STORAGE|DIR)$', kwargs['setting']):
get_storage.cache_clear()
Remove backwards compatibility with Django < 1.8.
|
# coding: utf-8
from __future__ import unicode_literals
import re
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.core.files.storage import FileSystemStorage
from django.dispatch import receiver
from django.test.signals import setting_changed
from django.utils.lru_cache import lru_cache
from django.utils.module_loading import import_string
@lru_cache()
def get_storage(name):
name = name.upper()
storage_setting = 'GALLERY_{}_STORAGE'.format(name)
dir_setting = 'GALLERY_{}_DIR'.format(name)
try:
storage_class = getattr(settings, storage_setting)
except AttributeError:
# There's a good chance that this fallback will survive for a long
# time because deprecating it would require updating all the tests.
try:
storage_dir = getattr(settings, dir_setting)
except AttributeError:
raise ImproperlyConfigured(
"Please define {} or {}".format(storage_setting, dir_setting))
else:
return FileSystemStorage(location=storage_dir)
else:
return import_string(storage_class)()
@receiver(setting_changed)
def clear_storages_cache(**kwargs):
if re.match(r'^GALLERY_[A-Z]+_(STORAGE|DIR)$', kwargs['setting']):
get_storage.cache_clear()
|
<commit_before># coding: utf-8
from __future__ import unicode_literals
import re
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.core.files.storage import FileSystemStorage
from django.dispatch import receiver
from django.utils.lru_cache import lru_cache
from django.utils.module_loading import import_string
try:
from django.test.signals import setting_changed # Django ≥ 1.8
except ImportError: # pragma: no cover
from django.core.signals import setting_changed # Django < 1.8
@lru_cache()
def get_storage(name):
name = name.upper()
storage_setting = 'GALLERY_{}_STORAGE'.format(name)
dir_setting = 'GALLERY_{}_DIR'.format(name)
try:
storage_class = getattr(settings, storage_setting)
except AttributeError:
# There's a good chance that this fallback will survive for a long
# time because deprecating it would require updating all the tests.
try:
storage_dir = getattr(settings, dir_setting)
except AttributeError:
raise ImproperlyConfigured(
"Please define {} or {}".format(storage_setting, dir_setting))
else:
return FileSystemStorage(location=storage_dir)
else:
return import_string(storage_class)()
@receiver(setting_changed)
def clear_storages_cache(**kwargs):
if re.match(r'^GALLERY_[A-Z]+_(STORAGE|DIR)$', kwargs['setting']):
get_storage.cache_clear()
<commit_msg>Remove backwards compatibility with Django < 1.8.<commit_after>
|
# coding: utf-8
from __future__ import unicode_literals
import re
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.core.files.storage import FileSystemStorage
from django.dispatch import receiver
from django.test.signals import setting_changed
from django.utils.lru_cache import lru_cache
from django.utils.module_loading import import_string
@lru_cache()
def get_storage(name):
name = name.upper()
storage_setting = 'GALLERY_{}_STORAGE'.format(name)
dir_setting = 'GALLERY_{}_DIR'.format(name)
try:
storage_class = getattr(settings, storage_setting)
except AttributeError:
# There's a good chance that this fallback will survive for a long
# time because deprecating it would require updating all the tests.
try:
storage_dir = getattr(settings, dir_setting)
except AttributeError:
raise ImproperlyConfigured(
"Please define {} or {}".format(storage_setting, dir_setting))
else:
return FileSystemStorage(location=storage_dir)
else:
return import_string(storage_class)()
@receiver(setting_changed)
def clear_storages_cache(**kwargs):
if re.match(r'^GALLERY_[A-Z]+_(STORAGE|DIR)$', kwargs['setting']):
get_storage.cache_clear()
|
# coding: utf-8
from __future__ import unicode_literals
import re
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.core.files.storage import FileSystemStorage
from django.dispatch import receiver
from django.utils.lru_cache import lru_cache
from django.utils.module_loading import import_string
try:
from django.test.signals import setting_changed # Django ≥ 1.8
except ImportError: # pragma: no cover
from django.core.signals import setting_changed # Django < 1.8
@lru_cache()
def get_storage(name):
name = name.upper()
storage_setting = 'GALLERY_{}_STORAGE'.format(name)
dir_setting = 'GALLERY_{}_DIR'.format(name)
try:
storage_class = getattr(settings, storage_setting)
except AttributeError:
# There's a good chance that this fallback will survive for a long
# time because deprecating it would require updating all the tests.
try:
storage_dir = getattr(settings, dir_setting)
except AttributeError:
raise ImproperlyConfigured(
"Please define {} or {}".format(storage_setting, dir_setting))
else:
return FileSystemStorage(location=storage_dir)
else:
return import_string(storage_class)()
@receiver(setting_changed)
def clear_storages_cache(**kwargs):
if re.match(r'^GALLERY_[A-Z]+_(STORAGE|DIR)$', kwargs['setting']):
get_storage.cache_clear()
Remove backwards compatibility with Django < 1.8.# coding: utf-8
from __future__ import unicode_literals
import re
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.core.files.storage import FileSystemStorage
from django.dispatch import receiver
from django.test.signals import setting_changed
from django.utils.lru_cache import lru_cache
from django.utils.module_loading import import_string
@lru_cache()
def get_storage(name):
name = name.upper()
storage_setting = 'GALLERY_{}_STORAGE'.format(name)
dir_setting = 'GALLERY_{}_DIR'.format(name)
try:
storage_class = getattr(settings, storage_setting)
except AttributeError:
# There's a good chance that this fallback will survive for a long
# time because deprecating it would require updating all the tests.
try:
storage_dir = getattr(settings, dir_setting)
except AttributeError:
raise ImproperlyConfigured(
"Please define {} or {}".format(storage_setting, dir_setting))
else:
return FileSystemStorage(location=storage_dir)
else:
return import_string(storage_class)()
@receiver(setting_changed)
def clear_storages_cache(**kwargs):
if re.match(r'^GALLERY_[A-Z]+_(STORAGE|DIR)$', kwargs['setting']):
get_storage.cache_clear()
|
<commit_before># coding: utf-8
from __future__ import unicode_literals
import re
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.core.files.storage import FileSystemStorage
from django.dispatch import receiver
from django.utils.lru_cache import lru_cache
from django.utils.module_loading import import_string
try:
from django.test.signals import setting_changed # Django ≥ 1.8
except ImportError: # pragma: no cover
from django.core.signals import setting_changed # Django < 1.8
@lru_cache()
def get_storage(name):
name = name.upper()
storage_setting = 'GALLERY_{}_STORAGE'.format(name)
dir_setting = 'GALLERY_{}_DIR'.format(name)
try:
storage_class = getattr(settings, storage_setting)
except AttributeError:
# There's a good chance that this fallback will survive for a long
# time because deprecating it would require updating all the tests.
try:
storage_dir = getattr(settings, dir_setting)
except AttributeError:
raise ImproperlyConfigured(
"Please define {} or {}".format(storage_setting, dir_setting))
else:
return FileSystemStorage(location=storage_dir)
else:
return import_string(storage_class)()
@receiver(setting_changed)
def clear_storages_cache(**kwargs):
if re.match(r'^GALLERY_[A-Z]+_(STORAGE|DIR)$', kwargs['setting']):
get_storage.cache_clear()
<commit_msg>Remove backwards compatibility with Django < 1.8.<commit_after># coding: utf-8
from __future__ import unicode_literals
import re
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.core.files.storage import FileSystemStorage
from django.dispatch import receiver
from django.test.signals import setting_changed
from django.utils.lru_cache import lru_cache
from django.utils.module_loading import import_string
@lru_cache()
def get_storage(name):
name = name.upper()
storage_setting = 'GALLERY_{}_STORAGE'.format(name)
dir_setting = 'GALLERY_{}_DIR'.format(name)
try:
storage_class = getattr(settings, storage_setting)
except AttributeError:
# There's a good chance that this fallback will survive for a long
# time because deprecating it would require updating all the tests.
try:
storage_dir = getattr(settings, dir_setting)
except AttributeError:
raise ImproperlyConfigured(
"Please define {} or {}".format(storage_setting, dir_setting))
else:
return FileSystemStorage(location=storage_dir)
else:
return import_string(storage_class)()
@receiver(setting_changed)
def clear_storages_cache(**kwargs):
if re.match(r'^GALLERY_[A-Z]+_(STORAGE|DIR)$', kwargs['setting']):
get_storage.cache_clear()
|
4121dc4b67d198b7aeea16a4c46d7fc85e359190
|
presentation/models.py
|
presentation/models.py
|
from django.db import models
from model_utils.models import TimeStampedModel
from warp.users.models import User
class Presentation(TimeStampedModel):
subject = models.CharField(max_length=50)
author = models.ForeignKey(User, on_delete=models.CASCADE)
views = models.IntegerField(default=0)
markdown = models.TextField()
html = models.TextField()
|
from django.db import models
from model_utils.models import TimeStampedModel
from warp.users.models import User
class Presentation(TimeStampedModel):
subject = models.CharField(max_length=50)
author = models.ForeignKey(User, on_delete=models.CASCADE)
views = models.IntegerField(default=0)
markdown = models.TextField()
html = models.TextField()
is_public = models.BooleanField(default=True)
|
Add 'is_public' field for checking the whether or not presentation is public
|
Add 'is_public' field for checking the whether or not presentation is public
|
Python
|
mit
|
SaturDJang/warp,SaturDJang/warp,SaturDJang/warp,SaturDJang/warp
|
from django.db import models
from model_utils.models import TimeStampedModel
from warp.users.models import User
class Presentation(TimeStampedModel):
subject = models.CharField(max_length=50)
author = models.ForeignKey(User, on_delete=models.CASCADE)
views = models.IntegerField(default=0)
markdown = models.TextField()
html = models.TextField()
Add 'is_public' field for checking the whether or not presentation is public
|
from django.db import models
from model_utils.models import TimeStampedModel
from warp.users.models import User
class Presentation(TimeStampedModel):
subject = models.CharField(max_length=50)
author = models.ForeignKey(User, on_delete=models.CASCADE)
views = models.IntegerField(default=0)
markdown = models.TextField()
html = models.TextField()
is_public = models.BooleanField(default=True)
|
<commit_before>from django.db import models
from model_utils.models import TimeStampedModel
from warp.users.models import User
class Presentation(TimeStampedModel):
subject = models.CharField(max_length=50)
author = models.ForeignKey(User, on_delete=models.CASCADE)
views = models.IntegerField(default=0)
markdown = models.TextField()
html = models.TextField()
<commit_msg>Add 'is_public' field for checking the whether or not presentation is public<commit_after>
|
from django.db import models
from model_utils.models import TimeStampedModel
from warp.users.models import User
class Presentation(TimeStampedModel):
subject = models.CharField(max_length=50)
author = models.ForeignKey(User, on_delete=models.CASCADE)
views = models.IntegerField(default=0)
markdown = models.TextField()
html = models.TextField()
is_public = models.BooleanField(default=True)
|
from django.db import models
from model_utils.models import TimeStampedModel
from warp.users.models import User
class Presentation(TimeStampedModel):
subject = models.CharField(max_length=50)
author = models.ForeignKey(User, on_delete=models.CASCADE)
views = models.IntegerField(default=0)
markdown = models.TextField()
html = models.TextField()
Add 'is_public' field for checking the whether or not presentation is publicfrom django.db import models
from model_utils.models import TimeStampedModel
from warp.users.models import User
class Presentation(TimeStampedModel):
subject = models.CharField(max_length=50)
author = models.ForeignKey(User, on_delete=models.CASCADE)
views = models.IntegerField(default=0)
markdown = models.TextField()
html = models.TextField()
is_public = models.BooleanField(default=True)
|
<commit_before>from django.db import models
from model_utils.models import TimeStampedModel
from warp.users.models import User
class Presentation(TimeStampedModel):
subject = models.CharField(max_length=50)
author = models.ForeignKey(User, on_delete=models.CASCADE)
views = models.IntegerField(default=0)
markdown = models.TextField()
html = models.TextField()
<commit_msg>Add 'is_public' field for checking the whether or not presentation is public<commit_after>from django.db import models
from model_utils.models import TimeStampedModel
from warp.users.models import User
class Presentation(TimeStampedModel):
subject = models.CharField(max_length=50)
author = models.ForeignKey(User, on_delete=models.CASCADE)
views = models.IntegerField(default=0)
markdown = models.TextField()
html = models.TextField()
is_public = models.BooleanField(default=True)
|
0b5f3dc674001c9abd1a7d7df18badfafdb825eb
|
equajson.py
|
equajson.py
|
#! /usr/bin/env python
from __future__ import print_function
import os
import sys
import json
def pretty_print(equation):
print(equation["description"]["terse"])
eqn_dict = equation["unicode-pretty-print"]
equation_text = eqn_dict["multiline"]
for line in equation_text:
print(line)
if "parameters" in eqn_dict:
print("where:")
for param_dict in eqn_dict["parameters"]:
symbol = param_dict["symbol"]
label = param_dict["label"]
print(symbol,'=',label)
def main(query):
here = sys.path[0]
json_dir = os.path.join(here, 'equajson')
for filename in os.listdir(json_dir):
if not filename.endswith('.json'):
continue
filepath = os.path.join(json_dir, filename)
with open(filepath) as json_file:
try:
equation = json.load(json_file)
except ValueError:
sys.stderr.write("Invalid JSON for file: `{}'\n".format(json_file.name))
continue # try the next file
description = equation["description"]["verbose"]
if query.lower() in description.lower():
pretty_print(equation)
print()
if __name__ == '__main__':
num_args = len(sys.argv) - 1
if num_args != 1:
sys.stderr.write("Usage: python "+sys.argv[0]+" query"+'\n')
sys.exit(1)
main(sys.argv[1])
|
#! /usr/bin/env python
from __future__ import print_function
import os
import sys
import json
def pretty_print(equation):
print(equation["description"]["terse"])
eqn_dict = equation["unicode-pretty-print"]
equation_text = eqn_dict["multiline"]
for line in equation_text:
print(line)
if "parameters" in eqn_dict:
print("where:")
for param_dict in eqn_dict["parameters"]:
symbol = param_dict["symbol"]
label = param_dict["label"]
print(symbol,'=',label)
def main(query):
here = sys.path[0]
json_dir = os.path.join(here, 'equajson')
for filename in os.listdir(json_dir):
if not filename.endswith('.json'):
continue
filepath = os.path.join(json_dir, filename)
with open(filepath) as json_file:
try:
equation = json.load(json_file)
except ValueError:
sys.stderr.write("Invalid JSON for file: `{}'\n".format(json_file.name))
continue # try the next file
description = equation["description"]["verbose"]
if query.lower() in description.lower():
pretty_print(equation)
print('-'*80)
if __name__ == '__main__':
num_args = len(sys.argv) - 1
if num_args != 1:
sys.stderr.write("Usage: python "+sys.argv[0]+" query"+'\n')
sys.exit(1)
main(sys.argv[1])
|
Add visual separator between outputs.
|
Add visual separator between outputs.
|
Python
|
mit
|
nbeaver/equajson
|
#! /usr/bin/env python
from __future__ import print_function
import os
import sys
import json
def pretty_print(equation):
print(equation["description"]["terse"])
eqn_dict = equation["unicode-pretty-print"]
equation_text = eqn_dict["multiline"]
for line in equation_text:
print(line)
if "parameters" in eqn_dict:
print("where:")
for param_dict in eqn_dict["parameters"]:
symbol = param_dict["symbol"]
label = param_dict["label"]
print(symbol,'=',label)
def main(query):
here = sys.path[0]
json_dir = os.path.join(here, 'equajson')
for filename in os.listdir(json_dir):
if not filename.endswith('.json'):
continue
filepath = os.path.join(json_dir, filename)
with open(filepath) as json_file:
try:
equation = json.load(json_file)
except ValueError:
sys.stderr.write("Invalid JSON for file: `{}'\n".format(json_file.name))
continue # try the next file
description = equation["description"]["verbose"]
if query.lower() in description.lower():
pretty_print(equation)
print()
if __name__ == '__main__':
num_args = len(sys.argv) - 1
if num_args != 1:
sys.stderr.write("Usage: python "+sys.argv[0]+" query"+'\n')
sys.exit(1)
main(sys.argv[1])
Add visual separator between outputs.
|
#! /usr/bin/env python
from __future__ import print_function
import os
import sys
import json
def pretty_print(equation):
print(equation["description"]["terse"])
eqn_dict = equation["unicode-pretty-print"]
equation_text = eqn_dict["multiline"]
for line in equation_text:
print(line)
if "parameters" in eqn_dict:
print("where:")
for param_dict in eqn_dict["parameters"]:
symbol = param_dict["symbol"]
label = param_dict["label"]
print(symbol,'=',label)
def main(query):
here = sys.path[0]
json_dir = os.path.join(here, 'equajson')
for filename in os.listdir(json_dir):
if not filename.endswith('.json'):
continue
filepath = os.path.join(json_dir, filename)
with open(filepath) as json_file:
try:
equation = json.load(json_file)
except ValueError:
sys.stderr.write("Invalid JSON for file: `{}'\n".format(json_file.name))
continue # try the next file
description = equation["description"]["verbose"]
if query.lower() in description.lower():
pretty_print(equation)
print('-'*80)
if __name__ == '__main__':
num_args = len(sys.argv) - 1
if num_args != 1:
sys.stderr.write("Usage: python "+sys.argv[0]+" query"+'\n')
sys.exit(1)
main(sys.argv[1])
|
<commit_before>#! /usr/bin/env python
from __future__ import print_function
import os
import sys
import json
def pretty_print(equation):
print(equation["description"]["terse"])
eqn_dict = equation["unicode-pretty-print"]
equation_text = eqn_dict["multiline"]
for line in equation_text:
print(line)
if "parameters" in eqn_dict:
print("where:")
for param_dict in eqn_dict["parameters"]:
symbol = param_dict["symbol"]
label = param_dict["label"]
print(symbol,'=',label)
def main(query):
here = sys.path[0]
json_dir = os.path.join(here, 'equajson')
for filename in os.listdir(json_dir):
if not filename.endswith('.json'):
continue
filepath = os.path.join(json_dir, filename)
with open(filepath) as json_file:
try:
equation = json.load(json_file)
except ValueError:
sys.stderr.write("Invalid JSON for file: `{}'\n".format(json_file.name))
continue # try the next file
description = equation["description"]["verbose"]
if query.lower() in description.lower():
pretty_print(equation)
print()
if __name__ == '__main__':
num_args = len(sys.argv) - 1
if num_args != 1:
sys.stderr.write("Usage: python "+sys.argv[0]+" query"+'\n')
sys.exit(1)
main(sys.argv[1])
<commit_msg>Add visual separator between outputs.<commit_after>
|
#! /usr/bin/env python
from __future__ import print_function
import os
import sys
import json
def pretty_print(equation):
print(equation["description"]["terse"])
eqn_dict = equation["unicode-pretty-print"]
equation_text = eqn_dict["multiline"]
for line in equation_text:
print(line)
if "parameters" in eqn_dict:
print("where:")
for param_dict in eqn_dict["parameters"]:
symbol = param_dict["symbol"]
label = param_dict["label"]
print(symbol,'=',label)
def main(query):
here = sys.path[0]
json_dir = os.path.join(here, 'equajson')
for filename in os.listdir(json_dir):
if not filename.endswith('.json'):
continue
filepath = os.path.join(json_dir, filename)
with open(filepath) as json_file:
try:
equation = json.load(json_file)
except ValueError:
sys.stderr.write("Invalid JSON for file: `{}'\n".format(json_file.name))
continue # try the next file
description = equation["description"]["verbose"]
if query.lower() in description.lower():
pretty_print(equation)
print('-'*80)
if __name__ == '__main__':
num_args = len(sys.argv) - 1
if num_args != 1:
sys.stderr.write("Usage: python "+sys.argv[0]+" query"+'\n')
sys.exit(1)
main(sys.argv[1])
|
#! /usr/bin/env python
from __future__ import print_function
import os
import sys
import json
def pretty_print(equation):
print(equation["description"]["terse"])
eqn_dict = equation["unicode-pretty-print"]
equation_text = eqn_dict["multiline"]
for line in equation_text:
print(line)
if "parameters" in eqn_dict:
print("where:")
for param_dict in eqn_dict["parameters"]:
symbol = param_dict["symbol"]
label = param_dict["label"]
print(symbol,'=',label)
def main(query):
here = sys.path[0]
json_dir = os.path.join(here, 'equajson')
for filename in os.listdir(json_dir):
if not filename.endswith('.json'):
continue
filepath = os.path.join(json_dir, filename)
with open(filepath) as json_file:
try:
equation = json.load(json_file)
except ValueError:
sys.stderr.write("Invalid JSON for file: `{}'\n".format(json_file.name))
continue # try the next file
description = equation["description"]["verbose"]
if query.lower() in description.lower():
pretty_print(equation)
print()
if __name__ == '__main__':
num_args = len(sys.argv) - 1
if num_args != 1:
sys.stderr.write("Usage: python "+sys.argv[0]+" query"+'\n')
sys.exit(1)
main(sys.argv[1])
Add visual separator between outputs.#! /usr/bin/env python
from __future__ import print_function
import os
import sys
import json
def pretty_print(equation):
print(equation["description"]["terse"])
eqn_dict = equation["unicode-pretty-print"]
equation_text = eqn_dict["multiline"]
for line in equation_text:
print(line)
if "parameters" in eqn_dict:
print("where:")
for param_dict in eqn_dict["parameters"]:
symbol = param_dict["symbol"]
label = param_dict["label"]
print(symbol,'=',label)
def main(query):
here = sys.path[0]
json_dir = os.path.join(here, 'equajson')
for filename in os.listdir(json_dir):
if not filename.endswith('.json'):
continue
filepath = os.path.join(json_dir, filename)
with open(filepath) as json_file:
try:
equation = json.load(json_file)
except ValueError:
sys.stderr.write("Invalid JSON for file: `{}'\n".format(json_file.name))
continue # try the next file
description = equation["description"]["verbose"]
if query.lower() in description.lower():
pretty_print(equation)
print('-'*80)
if __name__ == '__main__':
num_args = len(sys.argv) - 1
if num_args != 1:
sys.stderr.write("Usage: python "+sys.argv[0]+" query"+'\n')
sys.exit(1)
main(sys.argv[1])
|
<commit_before>#! /usr/bin/env python
from __future__ import print_function
import os
import sys
import json
def pretty_print(equation):
print(equation["description"]["terse"])
eqn_dict = equation["unicode-pretty-print"]
equation_text = eqn_dict["multiline"]
for line in equation_text:
print(line)
if "parameters" in eqn_dict:
print("where:")
for param_dict in eqn_dict["parameters"]:
symbol = param_dict["symbol"]
label = param_dict["label"]
print(symbol,'=',label)
def main(query):
here = sys.path[0]
json_dir = os.path.join(here, 'equajson')
for filename in os.listdir(json_dir):
if not filename.endswith('.json'):
continue
filepath = os.path.join(json_dir, filename)
with open(filepath) as json_file:
try:
equation = json.load(json_file)
except ValueError:
sys.stderr.write("Invalid JSON for file: `{}'\n".format(json_file.name))
continue # try the next file
description = equation["description"]["verbose"]
if query.lower() in description.lower():
pretty_print(equation)
print()
if __name__ == '__main__':
num_args = len(sys.argv) - 1
if num_args != 1:
sys.stderr.write("Usage: python "+sys.argv[0]+" query"+'\n')
sys.exit(1)
main(sys.argv[1])
<commit_msg>Add visual separator between outputs.<commit_after>#! /usr/bin/env python
from __future__ import print_function
import os
import sys
import json
def pretty_print(equation):
print(equation["description"]["terse"])
eqn_dict = equation["unicode-pretty-print"]
equation_text = eqn_dict["multiline"]
for line in equation_text:
print(line)
if "parameters" in eqn_dict:
print("where:")
for param_dict in eqn_dict["parameters"]:
symbol = param_dict["symbol"]
label = param_dict["label"]
print(symbol,'=',label)
def main(query):
here = sys.path[0]
json_dir = os.path.join(here, 'equajson')
for filename in os.listdir(json_dir):
if not filename.endswith('.json'):
continue
filepath = os.path.join(json_dir, filename)
with open(filepath) as json_file:
try:
equation = json.load(json_file)
except ValueError:
sys.stderr.write("Invalid JSON for file: `{}'\n".format(json_file.name))
continue # try the next file
description = equation["description"]["verbose"]
if query.lower() in description.lower():
pretty_print(equation)
print('-'*80)
if __name__ == '__main__':
num_args = len(sys.argv) - 1
if num_args != 1:
sys.stderr.write("Usage: python "+sys.argv[0]+" query"+'\n')
sys.exit(1)
main(sys.argv[1])
|
6c61e1000f3f87501b6e45a2715bd26a3b83b407
|
collector/absolutefrequency.py
|
collector/absolutefrequency.py
|
from collector import ItemCollector
class ItemNumericAbsoluteFrequencyCollector(ItemCollector):
def __init__(self, previous_collector_set = None):
ItemCollector.__init__(self, previous_collector_set)
self.absolute_frequencies = {}
def collect(self, item, collector_set=None):
current_absolute_frequency = self.absolute_frequencies.get(item, 0) + 1
self.absolute_frequencies[item] = current_absolute_frequency
def get_result(self, collector_set=None):
return self.absolute_frequencies
|
import collections
from collector import ItemCollector
class ItemNumericAbsoluteFrequencyCollector(ItemCollector):
def __init__(self, previous_collector_set = None):
ItemCollector.__init__(self, previous_collector_set)
self.absolute_frequencies = collections.defaultdict(int)
def collect(self, item, collector_set=None):
self.absolute_frequencies[item] += 1
def get_result(self, collector_set=None):
return self.absolute_frequencies
|
Use defaultdict for absolute frequency collector
|
Use defaultdict for absolute frequency collector
|
Python
|
mit
|
davidfoerster/schema-matching
|
from collector import ItemCollector
class ItemNumericAbsoluteFrequencyCollector(ItemCollector):
def __init__(self, previous_collector_set = None):
ItemCollector.__init__(self, previous_collector_set)
self.absolute_frequencies = {}
def collect(self, item, collector_set=None):
current_absolute_frequency = self.absolute_frequencies.get(item, 0) + 1
self.absolute_frequencies[item] = current_absolute_frequency
def get_result(self, collector_set=None):
return self.absolute_frequencies
Use defaultdict for absolute frequency collector
|
import collections
from collector import ItemCollector
class ItemNumericAbsoluteFrequencyCollector(ItemCollector):
def __init__(self, previous_collector_set = None):
ItemCollector.__init__(self, previous_collector_set)
self.absolute_frequencies = collections.defaultdict(int)
def collect(self, item, collector_set=None):
self.absolute_frequencies[item] += 1
def get_result(self, collector_set=None):
return self.absolute_frequencies
|
<commit_before>from collector import ItemCollector
class ItemNumericAbsoluteFrequencyCollector(ItemCollector):
def __init__(self, previous_collector_set = None):
ItemCollector.__init__(self, previous_collector_set)
self.absolute_frequencies = {}
def collect(self, item, collector_set=None):
current_absolute_frequency = self.absolute_frequencies.get(item, 0) + 1
self.absolute_frequencies[item] = current_absolute_frequency
def get_result(self, collector_set=None):
return self.absolute_frequencies
<commit_msg>Use defaultdict for absolute frequency collector<commit_after>
|
import collections
from collector import ItemCollector
class ItemNumericAbsoluteFrequencyCollector(ItemCollector):
def __init__(self, previous_collector_set = None):
ItemCollector.__init__(self, previous_collector_set)
self.absolute_frequencies = collections.defaultdict(int)
def collect(self, item, collector_set=None):
self.absolute_frequencies[item] += 1
def get_result(self, collector_set=None):
return self.absolute_frequencies
|
from collector import ItemCollector
class ItemNumericAbsoluteFrequencyCollector(ItemCollector):
def __init__(self, previous_collector_set = None):
ItemCollector.__init__(self, previous_collector_set)
self.absolute_frequencies = {}
def collect(self, item, collector_set=None):
current_absolute_frequency = self.absolute_frequencies.get(item, 0) + 1
self.absolute_frequencies[item] = current_absolute_frequency
def get_result(self, collector_set=None):
return self.absolute_frequencies
Use defaultdict for absolute frequency collectorimport collections
from collector import ItemCollector
class ItemNumericAbsoluteFrequencyCollector(ItemCollector):
def __init__(self, previous_collector_set = None):
ItemCollector.__init__(self, previous_collector_set)
self.absolute_frequencies = collections.defaultdict(int)
def collect(self, item, collector_set=None):
self.absolute_frequencies[item] += 1
def get_result(self, collector_set=None):
return self.absolute_frequencies
|
<commit_before>from collector import ItemCollector
class ItemNumericAbsoluteFrequencyCollector(ItemCollector):
def __init__(self, previous_collector_set = None):
ItemCollector.__init__(self, previous_collector_set)
self.absolute_frequencies = {}
def collect(self, item, collector_set=None):
current_absolute_frequency = self.absolute_frequencies.get(item, 0) + 1
self.absolute_frequencies[item] = current_absolute_frequency
def get_result(self, collector_set=None):
return self.absolute_frequencies
<commit_msg>Use defaultdict for absolute frequency collector<commit_after>import collections
from collector import ItemCollector
class ItemNumericAbsoluteFrequencyCollector(ItemCollector):
def __init__(self, previous_collector_set = None):
ItemCollector.__init__(self, previous_collector_set)
self.absolute_frequencies = collections.defaultdict(int)
def collect(self, item, collector_set=None):
self.absolute_frequencies[item] += 1
def get_result(self, collector_set=None):
return self.absolute_frequencies
|
8a6ba483e88b4f5ace6e9a6773f0ad681edf92b2
|
packages/Python/lldbsuite/test/api/multiple-targets/TestMultipleTargets.py
|
packages/Python/lldbsuite/test/api/multiple-targets/TestMultipleTargets.py
|
"""Test the lldb public C++ api when creating multiple targets simultaneously."""
from __future__ import print_function
import os
import re
import subprocess
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class TestMultipleSimultaneousDebuggers(TestBase):
mydir = TestBase.compute_mydir(__file__)
NO_DEBUG_INFO_TESTCASE = True
@skipIfNoSBHeaders
@skipIfHostIncompatibleWithRemote
def test_multiple_debuggers(self):
env = {self.dylibPath: self.getLLDBLibraryEnvVal()}
self.driver_exe = os.path.join(os.getcwd(), "multi-target")
self.buildDriver('main.cpp', self.driver_exe)
self.addTearDownHook(lambda: os.remove(self.driver_exe))
self.signBinary(self.driver_exe)
# check_call will raise a CalledProcessError if multi-process-driver doesn't return
# exit code 0 to indicate success. We can let this exception go - the test harness
# will recognize it as a test failure.
if self.TraceOn():
print("Running test %s" % self.driver_exe)
check_call([self.driver_exe, self.driver_exe], env=env)
else:
with open(os.devnull, 'w') as fnull:
check_call([self.driver_exe, self.driver_exe],
env=env, stdout=fnull, stderr=fnull)
|
"""Test the lldb public C++ api when creating multiple targets simultaneously."""
from __future__ import print_function
import os
import re
import subprocess
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class TestMultipleTargets(TestBase):
mydir = TestBase.compute_mydir(__file__)
NO_DEBUG_INFO_TESTCASE = True
@skipIfNoSBHeaders
@skipIfHostIncompatibleWithRemote
def test_multiple_debuggers(self):
env = {self.dylibPath: self.getLLDBLibraryEnvVal()}
self.driver_exe = os.path.join(os.getcwd(), "multi-target")
self.buildDriver('main.cpp', self.driver_exe)
self.addTearDownHook(lambda: os.remove(self.driver_exe))
self.signBinary(self.driver_exe)
# check_call will raise a CalledProcessError if multi-process-driver doesn't return
# exit code 0 to indicate success. We can let this exception go - the test harness
# will recognize it as a test failure.
if self.TraceOn():
print("Running test %s" % self.driver_exe)
check_call([self.driver_exe, self.driver_exe], env=env)
else:
with open(os.devnull, 'w') as fnull:
check_call([self.driver_exe, self.driver_exe],
env=env, stdout=fnull, stderr=fnull)
|
Rename multiple target test so it is unique.
|
Rename multiple target test so it is unique.
git-svn-id: 4c4cc70b1ef44ba2b7963015e681894188cea27e@289222 91177308-0d34-0410-b5e6-96231b3b80d8
|
Python
|
apache-2.0
|
apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,llvm-mirror/lldb,llvm-mirror/lldb,llvm-mirror/lldb,apple/swift-lldb,llvm-mirror/lldb,llvm-mirror/lldb
|
"""Test the lldb public C++ api when creating multiple targets simultaneously."""
from __future__ import print_function
import os
import re
import subprocess
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class TestMultipleSimultaneousDebuggers(TestBase):
mydir = TestBase.compute_mydir(__file__)
NO_DEBUG_INFO_TESTCASE = True
@skipIfNoSBHeaders
@skipIfHostIncompatibleWithRemote
def test_multiple_debuggers(self):
env = {self.dylibPath: self.getLLDBLibraryEnvVal()}
self.driver_exe = os.path.join(os.getcwd(), "multi-target")
self.buildDriver('main.cpp', self.driver_exe)
self.addTearDownHook(lambda: os.remove(self.driver_exe))
self.signBinary(self.driver_exe)
# check_call will raise a CalledProcessError if multi-process-driver doesn't return
# exit code 0 to indicate success. We can let this exception go - the test harness
# will recognize it as a test failure.
if self.TraceOn():
print("Running test %s" % self.driver_exe)
check_call([self.driver_exe, self.driver_exe], env=env)
else:
with open(os.devnull, 'w') as fnull:
check_call([self.driver_exe, self.driver_exe],
env=env, stdout=fnull, stderr=fnull)
Rename multiple target test so it is unique.
git-svn-id: 4c4cc70b1ef44ba2b7963015e681894188cea27e@289222 91177308-0d34-0410-b5e6-96231b3b80d8
|
"""Test the lldb public C++ api when creating multiple targets simultaneously."""
from __future__ import print_function
import os
import re
import subprocess
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class TestMultipleTargets(TestBase):
mydir = TestBase.compute_mydir(__file__)
NO_DEBUG_INFO_TESTCASE = True
@skipIfNoSBHeaders
@skipIfHostIncompatibleWithRemote
def test_multiple_debuggers(self):
env = {self.dylibPath: self.getLLDBLibraryEnvVal()}
self.driver_exe = os.path.join(os.getcwd(), "multi-target")
self.buildDriver('main.cpp', self.driver_exe)
self.addTearDownHook(lambda: os.remove(self.driver_exe))
self.signBinary(self.driver_exe)
# check_call will raise a CalledProcessError if multi-process-driver doesn't return
# exit code 0 to indicate success. We can let this exception go - the test harness
# will recognize it as a test failure.
if self.TraceOn():
print("Running test %s" % self.driver_exe)
check_call([self.driver_exe, self.driver_exe], env=env)
else:
with open(os.devnull, 'w') as fnull:
check_call([self.driver_exe, self.driver_exe],
env=env, stdout=fnull, stderr=fnull)
|
<commit_before>"""Test the lldb public C++ api when creating multiple targets simultaneously."""
from __future__ import print_function
import os
import re
import subprocess
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class TestMultipleSimultaneousDebuggers(TestBase):
mydir = TestBase.compute_mydir(__file__)
NO_DEBUG_INFO_TESTCASE = True
@skipIfNoSBHeaders
@skipIfHostIncompatibleWithRemote
def test_multiple_debuggers(self):
env = {self.dylibPath: self.getLLDBLibraryEnvVal()}
self.driver_exe = os.path.join(os.getcwd(), "multi-target")
self.buildDriver('main.cpp', self.driver_exe)
self.addTearDownHook(lambda: os.remove(self.driver_exe))
self.signBinary(self.driver_exe)
# check_call will raise a CalledProcessError if multi-process-driver doesn't return
# exit code 0 to indicate success. We can let this exception go - the test harness
# will recognize it as a test failure.
if self.TraceOn():
print("Running test %s" % self.driver_exe)
check_call([self.driver_exe, self.driver_exe], env=env)
else:
with open(os.devnull, 'w') as fnull:
check_call([self.driver_exe, self.driver_exe],
env=env, stdout=fnull, stderr=fnull)
<commit_msg>Rename multiple target test so it is unique.
git-svn-id: 4c4cc70b1ef44ba2b7963015e681894188cea27e@289222 91177308-0d34-0410-b5e6-96231b3b80d8<commit_after>
|
"""Test the lldb public C++ api when creating multiple targets simultaneously."""
from __future__ import print_function
import os
import re
import subprocess
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class TestMultipleTargets(TestBase):
mydir = TestBase.compute_mydir(__file__)
NO_DEBUG_INFO_TESTCASE = True
@skipIfNoSBHeaders
@skipIfHostIncompatibleWithRemote
def test_multiple_debuggers(self):
env = {self.dylibPath: self.getLLDBLibraryEnvVal()}
self.driver_exe = os.path.join(os.getcwd(), "multi-target")
self.buildDriver('main.cpp', self.driver_exe)
self.addTearDownHook(lambda: os.remove(self.driver_exe))
self.signBinary(self.driver_exe)
# check_call will raise a CalledProcessError if multi-process-driver doesn't return
# exit code 0 to indicate success. We can let this exception go - the test harness
# will recognize it as a test failure.
if self.TraceOn():
print("Running test %s" % self.driver_exe)
check_call([self.driver_exe, self.driver_exe], env=env)
else:
with open(os.devnull, 'w') as fnull:
check_call([self.driver_exe, self.driver_exe],
env=env, stdout=fnull, stderr=fnull)
|
"""Test the lldb public C++ api when creating multiple targets simultaneously."""
from __future__ import print_function
import os
import re
import subprocess
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class TestMultipleSimultaneousDebuggers(TestBase):
mydir = TestBase.compute_mydir(__file__)
NO_DEBUG_INFO_TESTCASE = True
@skipIfNoSBHeaders
@skipIfHostIncompatibleWithRemote
def test_multiple_debuggers(self):
env = {self.dylibPath: self.getLLDBLibraryEnvVal()}
self.driver_exe = os.path.join(os.getcwd(), "multi-target")
self.buildDriver('main.cpp', self.driver_exe)
self.addTearDownHook(lambda: os.remove(self.driver_exe))
self.signBinary(self.driver_exe)
# check_call will raise a CalledProcessError if multi-process-driver doesn't return
# exit code 0 to indicate success. We can let this exception go - the test harness
# will recognize it as a test failure.
if self.TraceOn():
print("Running test %s" % self.driver_exe)
check_call([self.driver_exe, self.driver_exe], env=env)
else:
with open(os.devnull, 'w') as fnull:
check_call([self.driver_exe, self.driver_exe],
env=env, stdout=fnull, stderr=fnull)
Rename multiple target test so it is unique.
git-svn-id: 4c4cc70b1ef44ba2b7963015e681894188cea27e@289222 91177308-0d34-0410-b5e6-96231b3b80d8"""Test the lldb public C++ api when creating multiple targets simultaneously."""
from __future__ import print_function
import os
import re
import subprocess
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class TestMultipleTargets(TestBase):
mydir = TestBase.compute_mydir(__file__)
NO_DEBUG_INFO_TESTCASE = True
@skipIfNoSBHeaders
@skipIfHostIncompatibleWithRemote
def test_multiple_debuggers(self):
env = {self.dylibPath: self.getLLDBLibraryEnvVal()}
self.driver_exe = os.path.join(os.getcwd(), "multi-target")
self.buildDriver('main.cpp', self.driver_exe)
self.addTearDownHook(lambda: os.remove(self.driver_exe))
self.signBinary(self.driver_exe)
# check_call will raise a CalledProcessError if multi-process-driver doesn't return
# exit code 0 to indicate success. We can let this exception go - the test harness
# will recognize it as a test failure.
if self.TraceOn():
print("Running test %s" % self.driver_exe)
check_call([self.driver_exe, self.driver_exe], env=env)
else:
with open(os.devnull, 'w') as fnull:
check_call([self.driver_exe, self.driver_exe],
env=env, stdout=fnull, stderr=fnull)
|
<commit_before>"""Test the lldb public C++ api when creating multiple targets simultaneously."""
from __future__ import print_function
import os
import re
import subprocess
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class TestMultipleSimultaneousDebuggers(TestBase):
mydir = TestBase.compute_mydir(__file__)
NO_DEBUG_INFO_TESTCASE = True
@skipIfNoSBHeaders
@skipIfHostIncompatibleWithRemote
def test_multiple_debuggers(self):
env = {self.dylibPath: self.getLLDBLibraryEnvVal()}
self.driver_exe = os.path.join(os.getcwd(), "multi-target")
self.buildDriver('main.cpp', self.driver_exe)
self.addTearDownHook(lambda: os.remove(self.driver_exe))
self.signBinary(self.driver_exe)
# check_call will raise a CalledProcessError if multi-process-driver doesn't return
# exit code 0 to indicate success. We can let this exception go - the test harness
# will recognize it as a test failure.
if self.TraceOn():
print("Running test %s" % self.driver_exe)
check_call([self.driver_exe, self.driver_exe], env=env)
else:
with open(os.devnull, 'w') as fnull:
check_call([self.driver_exe, self.driver_exe],
env=env, stdout=fnull, stderr=fnull)
<commit_msg>Rename multiple target test so it is unique.
git-svn-id: 4c4cc70b1ef44ba2b7963015e681894188cea27e@289222 91177308-0d34-0410-b5e6-96231b3b80d8<commit_after>"""Test the lldb public C++ api when creating multiple targets simultaneously."""
from __future__ import print_function
import os
import re
import subprocess
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class TestMultipleTargets(TestBase):
mydir = TestBase.compute_mydir(__file__)
NO_DEBUG_INFO_TESTCASE = True
@skipIfNoSBHeaders
@skipIfHostIncompatibleWithRemote
def test_multiple_debuggers(self):
env = {self.dylibPath: self.getLLDBLibraryEnvVal()}
self.driver_exe = os.path.join(os.getcwd(), "multi-target")
self.buildDriver('main.cpp', self.driver_exe)
self.addTearDownHook(lambda: os.remove(self.driver_exe))
self.signBinary(self.driver_exe)
# check_call will raise a CalledProcessError if multi-process-driver doesn't return
# exit code 0 to indicate success. We can let this exception go - the test harness
# will recognize it as a test failure.
if self.TraceOn():
print("Running test %s" % self.driver_exe)
check_call([self.driver_exe, self.driver_exe], env=env)
else:
with open(os.devnull, 'w') as fnull:
check_call([self.driver_exe, self.driver_exe],
env=env, stdout=fnull, stderr=fnull)
|
743198c5e94471cfa68bdb8335e1d75ce4580722
|
components/archivist/archivist.py
|
components/archivist/archivist.py
|
#! /usr/bin/env python
from pika import BlockingConnection, ConnectionParameters
from psycopg2 import connect
RABBIT_MQ_HOST = '54.76.183.35'
RABBIT_MQ_PORT = 5672
POSTGRES_HOST = 'microservices.cc9uedlzx2lk.eu-west-1.rds.amazonaws.com'
POSTGRES_DATABASE = 'micro'
POSTGRES_USER = 'microservices'
POSTGRES_PASSWORD = 'microservices'
def store(ch, method, properties, body):
topic, content = method.routing_key, body
conn = connect(host=POSTGRES_HOST, database=POSTGRES_DATABASE,
user=POSTGRES_USER, password=POSTGRES_PASSWORD)
cursor = conn.cursor()
cursor.execute('INSERT INTO facts VALUES (%s, now(), %s);',
(topic, content))
conn.commit()
cursor.close()
connection.close()
print 'Recorded topic %s, content %s' % (topic, content)
connection = BlockingConnection(ConnectionParameters(host=RABBIT_MQ_HOST,
port=RABBIT_MQ_PORT))
channel = connection.channel()
channel.exchange_declare(exchange='alex2', type='topic')
result = channel.queue_declare(exclusive=True)
queue = result.method.queue
channel.queue_bind(exchange='alex2', queue=queue, routing_key='*')
channel.basic_consume(store, queue=queue, no_ack=True)
channel.start_consuming()
|
#! /usr/bin/env python
from pika import BlockingConnection, ConnectionParameters
from psycopg2 import connect
RABBIT_MQ_HOST = '54.76.183.35'
RABBIT_MQ_PORT = 5672
POSTGRES_HOST = 'microservices.cc9uedlzx2lk.eu-west-1.rds.amazonaws.com'
POSTGRES_DATABASE = 'micro'
POSTGRES_USER = 'microservices'
POSTGRES_PASSWORD = 'microservices'
def store(ch, method, properties, body):
topic, content = method.routing_key, body
conn = connect(host=POSTGRES_HOST, database=POSTGRES_DATABASE,
user=POSTGRES_USER, password=POSTGRES_PASSWORD)
cursor = conn.cursor()
cursor.execute('INSERT INTO facts VALUES (%s, now(), %s);',
(topic, content))
conn.commit()
cursor.close()
conn.close()
print 'Recorded topic %s, content %s' % (topic, content)
connection = BlockingConnection(ConnectionParameters(host=RABBIT_MQ_HOST,
port=RABBIT_MQ_PORT))
channel = connection.channel()
channel.exchange_declare(exchange='alex2', type='topic')
result = channel.queue_declare(exclusive=True)
queue = result.method.queue
channel.queue_bind(exchange='alex2', queue=queue, routing_key='*')
channel.basic_consume(store, queue=queue, no_ack=True)
channel.start_consuming()
|
Fix silly close error, wrong connection
|
Fix silly close error, wrong connection
|
Python
|
mit
|
douglassquirrel/combo,douglassquirrel/microservices-hackathon-july-2014,douglassquirrel/microservices-hackathon-july-2014,douglassquirrel/combo,douglassquirrel/microservices-hackathon-july-2014,douglassquirrel/microservices-hackathon-july-2014,douglassquirrel/combo
|
#! /usr/bin/env python
from pika import BlockingConnection, ConnectionParameters
from psycopg2 import connect
RABBIT_MQ_HOST = '54.76.183.35'
RABBIT_MQ_PORT = 5672
POSTGRES_HOST = 'microservices.cc9uedlzx2lk.eu-west-1.rds.amazonaws.com'
POSTGRES_DATABASE = 'micro'
POSTGRES_USER = 'microservices'
POSTGRES_PASSWORD = 'microservices'
def store(ch, method, properties, body):
topic, content = method.routing_key, body
conn = connect(host=POSTGRES_HOST, database=POSTGRES_DATABASE,
user=POSTGRES_USER, password=POSTGRES_PASSWORD)
cursor = conn.cursor()
cursor.execute('INSERT INTO facts VALUES (%s, now(), %s);',
(topic, content))
conn.commit()
cursor.close()
connection.close()
print 'Recorded topic %s, content %s' % (topic, content)
connection = BlockingConnection(ConnectionParameters(host=RABBIT_MQ_HOST,
port=RABBIT_MQ_PORT))
channel = connection.channel()
channel.exchange_declare(exchange='alex2', type='topic')
result = channel.queue_declare(exclusive=True)
queue = result.method.queue
channel.queue_bind(exchange='alex2', queue=queue, routing_key='*')
channel.basic_consume(store, queue=queue, no_ack=True)
channel.start_consuming()
Fix silly close error, wrong connection
|
#! /usr/bin/env python
from pika import BlockingConnection, ConnectionParameters
from psycopg2 import connect
RABBIT_MQ_HOST = '54.76.183.35'
RABBIT_MQ_PORT = 5672
POSTGRES_HOST = 'microservices.cc9uedlzx2lk.eu-west-1.rds.amazonaws.com'
POSTGRES_DATABASE = 'micro'
POSTGRES_USER = 'microservices'
POSTGRES_PASSWORD = 'microservices'
def store(ch, method, properties, body):
topic, content = method.routing_key, body
conn = connect(host=POSTGRES_HOST, database=POSTGRES_DATABASE,
user=POSTGRES_USER, password=POSTGRES_PASSWORD)
cursor = conn.cursor()
cursor.execute('INSERT INTO facts VALUES (%s, now(), %s);',
(topic, content))
conn.commit()
cursor.close()
conn.close()
print 'Recorded topic %s, content %s' % (topic, content)
connection = BlockingConnection(ConnectionParameters(host=RABBIT_MQ_HOST,
port=RABBIT_MQ_PORT))
channel = connection.channel()
channel.exchange_declare(exchange='alex2', type='topic')
result = channel.queue_declare(exclusive=True)
queue = result.method.queue
channel.queue_bind(exchange='alex2', queue=queue, routing_key='*')
channel.basic_consume(store, queue=queue, no_ack=True)
channel.start_consuming()
|
<commit_before>#! /usr/bin/env python
from pika import BlockingConnection, ConnectionParameters
from psycopg2 import connect
RABBIT_MQ_HOST = '54.76.183.35'
RABBIT_MQ_PORT = 5672
POSTGRES_HOST = 'microservices.cc9uedlzx2lk.eu-west-1.rds.amazonaws.com'
POSTGRES_DATABASE = 'micro'
POSTGRES_USER = 'microservices'
POSTGRES_PASSWORD = 'microservices'
def store(ch, method, properties, body):
topic, content = method.routing_key, body
conn = connect(host=POSTGRES_HOST, database=POSTGRES_DATABASE,
user=POSTGRES_USER, password=POSTGRES_PASSWORD)
cursor = conn.cursor()
cursor.execute('INSERT INTO facts VALUES (%s, now(), %s);',
(topic, content))
conn.commit()
cursor.close()
connection.close()
print 'Recorded topic %s, content %s' % (topic, content)
connection = BlockingConnection(ConnectionParameters(host=RABBIT_MQ_HOST,
port=RABBIT_MQ_PORT))
channel = connection.channel()
channel.exchange_declare(exchange='alex2', type='topic')
result = channel.queue_declare(exclusive=True)
queue = result.method.queue
channel.queue_bind(exchange='alex2', queue=queue, routing_key='*')
channel.basic_consume(store, queue=queue, no_ack=True)
channel.start_consuming()
<commit_msg>Fix silly close error, wrong connection<commit_after>
|
#! /usr/bin/env python
from pika import BlockingConnection, ConnectionParameters
from psycopg2 import connect
RABBIT_MQ_HOST = '54.76.183.35'
RABBIT_MQ_PORT = 5672
POSTGRES_HOST = 'microservices.cc9uedlzx2lk.eu-west-1.rds.amazonaws.com'
POSTGRES_DATABASE = 'micro'
POSTGRES_USER = 'microservices'
POSTGRES_PASSWORD = 'microservices'
def store(ch, method, properties, body):
topic, content = method.routing_key, body
conn = connect(host=POSTGRES_HOST, database=POSTGRES_DATABASE,
user=POSTGRES_USER, password=POSTGRES_PASSWORD)
cursor = conn.cursor()
cursor.execute('INSERT INTO facts VALUES (%s, now(), %s);',
(topic, content))
conn.commit()
cursor.close()
conn.close()
print 'Recorded topic %s, content %s' % (topic, content)
connection = BlockingConnection(ConnectionParameters(host=RABBIT_MQ_HOST,
port=RABBIT_MQ_PORT))
channel = connection.channel()
channel.exchange_declare(exchange='alex2', type='topic')
result = channel.queue_declare(exclusive=True)
queue = result.method.queue
channel.queue_bind(exchange='alex2', queue=queue, routing_key='*')
channel.basic_consume(store, queue=queue, no_ack=True)
channel.start_consuming()
|
#! /usr/bin/env python
from pika import BlockingConnection, ConnectionParameters
from psycopg2 import connect
RABBIT_MQ_HOST = '54.76.183.35'
RABBIT_MQ_PORT = 5672
POSTGRES_HOST = 'microservices.cc9uedlzx2lk.eu-west-1.rds.amazonaws.com'
POSTGRES_DATABASE = 'micro'
POSTGRES_USER = 'microservices'
POSTGRES_PASSWORD = 'microservices'
def store(ch, method, properties, body):
topic, content = method.routing_key, body
conn = connect(host=POSTGRES_HOST, database=POSTGRES_DATABASE,
user=POSTGRES_USER, password=POSTGRES_PASSWORD)
cursor = conn.cursor()
cursor.execute('INSERT INTO facts VALUES (%s, now(), %s);',
(topic, content))
conn.commit()
cursor.close()
connection.close()
print 'Recorded topic %s, content %s' % (topic, content)
connection = BlockingConnection(ConnectionParameters(host=RABBIT_MQ_HOST,
port=RABBIT_MQ_PORT))
channel = connection.channel()
channel.exchange_declare(exchange='alex2', type='topic')
result = channel.queue_declare(exclusive=True)
queue = result.method.queue
channel.queue_bind(exchange='alex2', queue=queue, routing_key='*')
channel.basic_consume(store, queue=queue, no_ack=True)
channel.start_consuming()
Fix silly close error, wrong connection#! /usr/bin/env python
from pika import BlockingConnection, ConnectionParameters
from psycopg2 import connect
RABBIT_MQ_HOST = '54.76.183.35'
RABBIT_MQ_PORT = 5672
POSTGRES_HOST = 'microservices.cc9uedlzx2lk.eu-west-1.rds.amazonaws.com'
POSTGRES_DATABASE = 'micro'
POSTGRES_USER = 'microservices'
POSTGRES_PASSWORD = 'microservices'
def store(ch, method, properties, body):
topic, content = method.routing_key, body
conn = connect(host=POSTGRES_HOST, database=POSTGRES_DATABASE,
user=POSTGRES_USER, password=POSTGRES_PASSWORD)
cursor = conn.cursor()
cursor.execute('INSERT INTO facts VALUES (%s, now(), %s);',
(topic, content))
conn.commit()
cursor.close()
conn.close()
print 'Recorded topic %s, content %s' % (topic, content)
connection = BlockingConnection(ConnectionParameters(host=RABBIT_MQ_HOST,
port=RABBIT_MQ_PORT))
channel = connection.channel()
channel.exchange_declare(exchange='alex2', type='topic')
result = channel.queue_declare(exclusive=True)
queue = result.method.queue
channel.queue_bind(exchange='alex2', queue=queue, routing_key='*')
channel.basic_consume(store, queue=queue, no_ack=True)
channel.start_consuming()
|
<commit_before>#! /usr/bin/env python
from pika import BlockingConnection, ConnectionParameters
from psycopg2 import connect
RABBIT_MQ_HOST = '54.76.183.35'
RABBIT_MQ_PORT = 5672
POSTGRES_HOST = 'microservices.cc9uedlzx2lk.eu-west-1.rds.amazonaws.com'
POSTGRES_DATABASE = 'micro'
POSTGRES_USER = 'microservices'
POSTGRES_PASSWORD = 'microservices'
def store(ch, method, properties, body):
topic, content = method.routing_key, body
conn = connect(host=POSTGRES_HOST, database=POSTGRES_DATABASE,
user=POSTGRES_USER, password=POSTGRES_PASSWORD)
cursor = conn.cursor()
cursor.execute('INSERT INTO facts VALUES (%s, now(), %s);',
(topic, content))
conn.commit()
cursor.close()
connection.close()
print 'Recorded topic %s, content %s' % (topic, content)
connection = BlockingConnection(ConnectionParameters(host=RABBIT_MQ_HOST,
port=RABBIT_MQ_PORT))
channel = connection.channel()
channel.exchange_declare(exchange='alex2', type='topic')
result = channel.queue_declare(exclusive=True)
queue = result.method.queue
channel.queue_bind(exchange='alex2', queue=queue, routing_key='*')
channel.basic_consume(store, queue=queue, no_ack=True)
channel.start_consuming()
<commit_msg>Fix silly close error, wrong connection<commit_after>#! /usr/bin/env python
from pika import BlockingConnection, ConnectionParameters
from psycopg2 import connect
RABBIT_MQ_HOST = '54.76.183.35'
RABBIT_MQ_PORT = 5672
POSTGRES_HOST = 'microservices.cc9uedlzx2lk.eu-west-1.rds.amazonaws.com'
POSTGRES_DATABASE = 'micro'
POSTGRES_USER = 'microservices'
POSTGRES_PASSWORD = 'microservices'
def store(ch, method, properties, body):
topic, content = method.routing_key, body
conn = connect(host=POSTGRES_HOST, database=POSTGRES_DATABASE,
user=POSTGRES_USER, password=POSTGRES_PASSWORD)
cursor = conn.cursor()
cursor.execute('INSERT INTO facts VALUES (%s, now(), %s);',
(topic, content))
conn.commit()
cursor.close()
conn.close()
print 'Recorded topic %s, content %s' % (topic, content)
connection = BlockingConnection(ConnectionParameters(host=RABBIT_MQ_HOST,
port=RABBIT_MQ_PORT))
channel = connection.channel()
channel.exchange_declare(exchange='alex2', type='topic')
result = channel.queue_declare(exclusive=True)
queue = result.method.queue
channel.queue_bind(exchange='alex2', queue=queue, routing_key='*')
channel.basic_consume(store, queue=queue, no_ack=True)
channel.start_consuming()
|
1318d0bc658d23d22452b27004c5d670f4c80d17
|
spacy/tests/conftest.py
|
spacy/tests/conftest.py
|
import pytest
import os
import spacy
@pytest.fixture(scope="session")
def EN():
return spacy.load("en")
@pytest.fixture(scope="session")
def DE():
return spacy.load("de")
def pytest_addoption(parser):
parser.addoption("--models", action="store_true",
help="include tests that require full models")
parser.addoption("--vectors", action="store_true",
help="include word vectors tests")
parser.addoption("--slow", action="store_true",
help="include slow tests")
def pytest_runtest_setup(item):
for opt in ['models', 'vectors', 'slow']:
if opt in item.keywords and not item.config.getoption("--%s" % opt):
pytest.skip("need --%s option to run" % opt)
|
import pytest
import os
from ..en import English
from ..de import German
@pytest.fixture(scope="session")
def EN():
return English(path=None)
@pytest.fixture(scope="session")
def DE():
return German(path=None)
def pytest_addoption(parser):
parser.addoption("--models", action="store_true",
help="include tests that require full models")
parser.addoption("--vectors", action="store_true",
help="include word vectors tests")
parser.addoption("--slow", action="store_true",
help="include slow tests")
def pytest_runtest_setup(item):
for opt in ['models', 'vectors', 'slow']:
if opt in item.keywords and not item.config.getoption("--%s" % opt):
pytest.skip("need --%s option to run" % opt)
|
Test with the non-loaded versions of the English and German pipelines.
|
Test with the non-loaded versions of the English and German pipelines.
|
Python
|
mit
|
raphael0202/spaCy,honnibal/spaCy,banglakit/spaCy,aikramer2/spaCy,recognai/spaCy,raphael0202/spaCy,recognai/spaCy,explosion/spaCy,spacy-io/spaCy,explosion/spaCy,oroszgy/spaCy.hu,raphael0202/spaCy,Gregory-Howard/spaCy,aikramer2/spaCy,aikramer2/spaCy,explosion/spaCy,oroszgy/spaCy.hu,aikramer2/spaCy,explosion/spaCy,spacy-io/spaCy,honnibal/spaCy,recognai/spaCy,Gregory-Howard/spaCy,spacy-io/spaCy,Gregory-Howard/spaCy,oroszgy/spaCy.hu,banglakit/spaCy,Gregory-Howard/spaCy,recognai/spaCy,explosion/spaCy,honnibal/spaCy,recognai/spaCy,recognai/spaCy,oroszgy/spaCy.hu,oroszgy/spaCy.hu,banglakit/spaCy,Gregory-Howard/spaCy,spacy-io/spaCy,spacy-io/spaCy,banglakit/spaCy,oroszgy/spaCy.hu,raphael0202/spaCy,raphael0202/spaCy,aikramer2/spaCy,Gregory-Howard/spaCy,raphael0202/spaCy,explosion/spaCy,aikramer2/spaCy,spacy-io/spaCy,banglakit/spaCy,banglakit/spaCy,honnibal/spaCy
|
import pytest
import os
import spacy
@pytest.fixture(scope="session")
def EN():
return spacy.load("en")
@pytest.fixture(scope="session")
def DE():
return spacy.load("de")
def pytest_addoption(parser):
parser.addoption("--models", action="store_true",
help="include tests that require full models")
parser.addoption("--vectors", action="store_true",
help="include word vectors tests")
parser.addoption("--slow", action="store_true",
help="include slow tests")
def pytest_runtest_setup(item):
for opt in ['models', 'vectors', 'slow']:
if opt in item.keywords and not item.config.getoption("--%s" % opt):
pytest.skip("need --%s option to run" % opt)
Test with the non-loaded versions of the English and German pipelines.
|
import pytest
import os
from ..en import English
from ..de import German
@pytest.fixture(scope="session")
def EN():
return English(path=None)
@pytest.fixture(scope="session")
def DE():
return German(path=None)
def pytest_addoption(parser):
parser.addoption("--models", action="store_true",
help="include tests that require full models")
parser.addoption("--vectors", action="store_true",
help="include word vectors tests")
parser.addoption("--slow", action="store_true",
help="include slow tests")
def pytest_runtest_setup(item):
for opt in ['models', 'vectors', 'slow']:
if opt in item.keywords and not item.config.getoption("--%s" % opt):
pytest.skip("need --%s option to run" % opt)
|
<commit_before>import pytest
import os
import spacy
@pytest.fixture(scope="session")
def EN():
return spacy.load("en")
@pytest.fixture(scope="session")
def DE():
return spacy.load("de")
def pytest_addoption(parser):
parser.addoption("--models", action="store_true",
help="include tests that require full models")
parser.addoption("--vectors", action="store_true",
help="include word vectors tests")
parser.addoption("--slow", action="store_true",
help="include slow tests")
def pytest_runtest_setup(item):
for opt in ['models', 'vectors', 'slow']:
if opt in item.keywords and not item.config.getoption("--%s" % opt):
pytest.skip("need --%s option to run" % opt)
<commit_msg>Test with the non-loaded versions of the English and German pipelines.<commit_after>
|
import pytest
import os
from ..en import English
from ..de import German
@pytest.fixture(scope="session")
def EN():
return English(path=None)
@pytest.fixture(scope="session")
def DE():
return German(path=None)
def pytest_addoption(parser):
parser.addoption("--models", action="store_true",
help="include tests that require full models")
parser.addoption("--vectors", action="store_true",
help="include word vectors tests")
parser.addoption("--slow", action="store_true",
help="include slow tests")
def pytest_runtest_setup(item):
for opt in ['models', 'vectors', 'slow']:
if opt in item.keywords and not item.config.getoption("--%s" % opt):
pytest.skip("need --%s option to run" % opt)
|
import pytest
import os
import spacy
@pytest.fixture(scope="session")
def EN():
return spacy.load("en")
@pytest.fixture(scope="session")
def DE():
return spacy.load("de")
def pytest_addoption(parser):
parser.addoption("--models", action="store_true",
help="include tests that require full models")
parser.addoption("--vectors", action="store_true",
help="include word vectors tests")
parser.addoption("--slow", action="store_true",
help="include slow tests")
def pytest_runtest_setup(item):
for opt in ['models', 'vectors', 'slow']:
if opt in item.keywords and not item.config.getoption("--%s" % opt):
pytest.skip("need --%s option to run" % opt)
Test with the non-loaded versions of the English and German pipelines.import pytest
import os
from ..en import English
from ..de import German
@pytest.fixture(scope="session")
def EN():
return English(path=None)
@pytest.fixture(scope="session")
def DE():
return German(path=None)
def pytest_addoption(parser):
parser.addoption("--models", action="store_true",
help="include tests that require full models")
parser.addoption("--vectors", action="store_true",
help="include word vectors tests")
parser.addoption("--slow", action="store_true",
help="include slow tests")
def pytest_runtest_setup(item):
for opt in ['models', 'vectors', 'slow']:
if opt in item.keywords and not item.config.getoption("--%s" % opt):
pytest.skip("need --%s option to run" % opt)
|
<commit_before>import pytest
import os
import spacy
@pytest.fixture(scope="session")
def EN():
return spacy.load("en")
@pytest.fixture(scope="session")
def DE():
return spacy.load("de")
def pytest_addoption(parser):
parser.addoption("--models", action="store_true",
help="include tests that require full models")
parser.addoption("--vectors", action="store_true",
help="include word vectors tests")
parser.addoption("--slow", action="store_true",
help="include slow tests")
def pytest_runtest_setup(item):
for opt in ['models', 'vectors', 'slow']:
if opt in item.keywords and not item.config.getoption("--%s" % opt):
pytest.skip("need --%s option to run" % opt)
<commit_msg>Test with the non-loaded versions of the English and German pipelines.<commit_after>import pytest
import os
from ..en import English
from ..de import German
@pytest.fixture(scope="session")
def EN():
return English(path=None)
@pytest.fixture(scope="session")
def DE():
return German(path=None)
def pytest_addoption(parser):
parser.addoption("--models", action="store_true",
help="include tests that require full models")
parser.addoption("--vectors", action="store_true",
help="include word vectors tests")
parser.addoption("--slow", action="store_true",
help="include slow tests")
def pytest_runtest_setup(item):
for opt in ['models', 'vectors', 'slow']:
if opt in item.keywords and not item.config.getoption("--%s" % opt):
pytest.skip("need --%s option to run" % opt)
|
07bf035221667bdd80ed8570079163d1162d0dd2
|
cartoframes/__init__.py
|
cartoframes/__init__.py
|
from ._version import __version__
from .core.cartodataframe import CartoDataFrame
from .core.logger import set_log_level
from .io.carto import read_carto, to_carto, has_table, delete_table, describe_table, \
update_table, copy_table, create_table_from_query
__all__ = [
'__version__',
'CartoDataFrame',
'read_carto',
'to_carto',
'has_table',
'delete_table',
'describe_table',
'update_table',
'copy_table',
'create_table_from_query',
'set_log_level'
]
|
from ._version import __version__
from .utils.utils import check_package
from .core.cartodataframe import CartoDataFrame
from .core.logger import set_log_level
from .io.carto import read_carto, to_carto, has_table, delete_table, describe_table, \
update_table, copy_table, create_table_from_query
# Check installed packages versions
check_package('carto', '>=1.8.2')
check_package('pandas', '>=0.23.0')
check_package('geopandas', '>=0.6.0')
__all__ = [
'__version__',
'CartoDataFrame',
'read_carto',
'to_carto',
'has_table',
'delete_table',
'describe_table',
'update_table',
'copy_table',
'create_table_from_query',
'set_log_level'
]
|
Check critical dependencies versions on runtime
|
Check critical dependencies versions on runtime
|
Python
|
bsd-3-clause
|
CartoDB/cartoframes,CartoDB/cartoframes
|
from ._version import __version__
from .core.cartodataframe import CartoDataFrame
from .core.logger import set_log_level
from .io.carto import read_carto, to_carto, has_table, delete_table, describe_table, \
update_table, copy_table, create_table_from_query
__all__ = [
'__version__',
'CartoDataFrame',
'read_carto',
'to_carto',
'has_table',
'delete_table',
'describe_table',
'update_table',
'copy_table',
'create_table_from_query',
'set_log_level'
]
Check critical dependencies versions on runtime
|
from ._version import __version__
from .utils.utils import check_package
from .core.cartodataframe import CartoDataFrame
from .core.logger import set_log_level
from .io.carto import read_carto, to_carto, has_table, delete_table, describe_table, \
update_table, copy_table, create_table_from_query
# Check installed packages versions
check_package('carto', '>=1.8.2')
check_package('pandas', '>=0.23.0')
check_package('geopandas', '>=0.6.0')
__all__ = [
'__version__',
'CartoDataFrame',
'read_carto',
'to_carto',
'has_table',
'delete_table',
'describe_table',
'update_table',
'copy_table',
'create_table_from_query',
'set_log_level'
]
|
<commit_before>from ._version import __version__
from .core.cartodataframe import CartoDataFrame
from .core.logger import set_log_level
from .io.carto import read_carto, to_carto, has_table, delete_table, describe_table, \
update_table, copy_table, create_table_from_query
__all__ = [
'__version__',
'CartoDataFrame',
'read_carto',
'to_carto',
'has_table',
'delete_table',
'describe_table',
'update_table',
'copy_table',
'create_table_from_query',
'set_log_level'
]
<commit_msg>Check critical dependencies versions on runtime<commit_after>
|
from ._version import __version__
from .utils.utils import check_package
from .core.cartodataframe import CartoDataFrame
from .core.logger import set_log_level
from .io.carto import read_carto, to_carto, has_table, delete_table, describe_table, \
update_table, copy_table, create_table_from_query
# Check installed packages versions
check_package('carto', '>=1.8.2')
check_package('pandas', '>=0.23.0')
check_package('geopandas', '>=0.6.0')
__all__ = [
'__version__',
'CartoDataFrame',
'read_carto',
'to_carto',
'has_table',
'delete_table',
'describe_table',
'update_table',
'copy_table',
'create_table_from_query',
'set_log_level'
]
|
from ._version import __version__
from .core.cartodataframe import CartoDataFrame
from .core.logger import set_log_level
from .io.carto import read_carto, to_carto, has_table, delete_table, describe_table, \
update_table, copy_table, create_table_from_query
__all__ = [
'__version__',
'CartoDataFrame',
'read_carto',
'to_carto',
'has_table',
'delete_table',
'describe_table',
'update_table',
'copy_table',
'create_table_from_query',
'set_log_level'
]
Check critical dependencies versions on runtimefrom ._version import __version__
from .utils.utils import check_package
from .core.cartodataframe import CartoDataFrame
from .core.logger import set_log_level
from .io.carto import read_carto, to_carto, has_table, delete_table, describe_table, \
update_table, copy_table, create_table_from_query
# Check installed packages versions
check_package('carto', '>=1.8.2')
check_package('pandas', '>=0.23.0')
check_package('geopandas', '>=0.6.0')
__all__ = [
'__version__',
'CartoDataFrame',
'read_carto',
'to_carto',
'has_table',
'delete_table',
'describe_table',
'update_table',
'copy_table',
'create_table_from_query',
'set_log_level'
]
|
<commit_before>from ._version import __version__
from .core.cartodataframe import CartoDataFrame
from .core.logger import set_log_level
from .io.carto import read_carto, to_carto, has_table, delete_table, describe_table, \
update_table, copy_table, create_table_from_query
__all__ = [
'__version__',
'CartoDataFrame',
'read_carto',
'to_carto',
'has_table',
'delete_table',
'describe_table',
'update_table',
'copy_table',
'create_table_from_query',
'set_log_level'
]
<commit_msg>Check critical dependencies versions on runtime<commit_after>from ._version import __version__
from .utils.utils import check_package
from .core.cartodataframe import CartoDataFrame
from .core.logger import set_log_level
from .io.carto import read_carto, to_carto, has_table, delete_table, describe_table, \
update_table, copy_table, create_table_from_query
# Check installed packages versions
check_package('carto', '>=1.8.2')
check_package('pandas', '>=0.23.0')
check_package('geopandas', '>=0.6.0')
__all__ = [
'__version__',
'CartoDataFrame',
'read_carto',
'to_carto',
'has_table',
'delete_table',
'describe_table',
'update_table',
'copy_table',
'create_table_from_query',
'set_log_level'
]
|
39d47ed5c0e89f41648a9bdd412b6190d274a488
|
tbapy/models.py
|
tbapy/models.py
|
class _base_model_class(dict):
def __init__(self, json={}):
self.update(json)
self.update(self.__dict__)
self.__dict__ = self
def __repr__(self):
return '%s(%s)' % (self.__class__.__name__, self.json())
def json(self):
return dict.__repr__(self)
def _model_class(class_name):
return type(class_name, (_base_model_class,), {})
APIStatus = _model_class('Status')
Team = _model_class('Team')
Event = _model_class('Event')
Match = _model_class('Match')
Award = _model_class('Award')
District = _model_class('District')
Media = _model_class('Media')
Robot = _model_class('Robot')
Profile = _model_class('Profile')
Alliance = _model_class('Alliance')
DistrictPoints = _model_class('DistrictPoints')
Insights = _model_class('Insights')
OPRs = _model_class('OPRs')
Prediction = _model_class('Prediction')
Rankings = _model_class('Rankings')
DistrictRanking = _model_class('DistrictRanking')
Status = _model_class('TeamStatus')
|
class _base_model_class(dict):
def __init__(self, json={}):
self.update(json)
self.update(self.__dict__)
self.__dict__ = self
def __repr__(self):
return '%s(%s)' % (self.__class__.__name__, self.json())
def json(self):
return dict.__repr__(self)
def _model_class(class_name):
return type(class_name, (_base_model_class,), {})
APIStatus = _model_class('APIStatus')
Team = _model_class('Team')
Event = _model_class('Event')
Match = _model_class('Match')
Award = _model_class('Award')
District = _model_class('District')
Media = _model_class('Media')
Robot = _model_class('Robot')
Profile = _model_class('Profile')
Alliance = _model_class('Alliance')
DistrictPoints = _model_class('DistrictPoints')
Insights = _model_class('Insights')
OPRs = _model_class('OPRs')
Prediction = _model_class('Prediction')
Rankings = _model_class('Rankings')
DistrictRanking = _model_class('DistrictRanking')
Status = _model_class('Status')
|
Clear up Status naming confusion
|
Clear up Status naming confusion
|
Python
|
mit
|
frc1418/tbapy
|
class _base_model_class(dict):
def __init__(self, json={}):
self.update(json)
self.update(self.__dict__)
self.__dict__ = self
def __repr__(self):
return '%s(%s)' % (self.__class__.__name__, self.json())
def json(self):
return dict.__repr__(self)
def _model_class(class_name):
return type(class_name, (_base_model_class,), {})
APIStatus = _model_class('Status')
Team = _model_class('Team')
Event = _model_class('Event')
Match = _model_class('Match')
Award = _model_class('Award')
District = _model_class('District')
Media = _model_class('Media')
Robot = _model_class('Robot')
Profile = _model_class('Profile')
Alliance = _model_class('Alliance')
DistrictPoints = _model_class('DistrictPoints')
Insights = _model_class('Insights')
OPRs = _model_class('OPRs')
Prediction = _model_class('Prediction')
Rankings = _model_class('Rankings')
DistrictRanking = _model_class('DistrictRanking')
Status = _model_class('TeamStatus')
Clear up Status naming confusion
|
class _base_model_class(dict):
def __init__(self, json={}):
self.update(json)
self.update(self.__dict__)
self.__dict__ = self
def __repr__(self):
return '%s(%s)' % (self.__class__.__name__, self.json())
def json(self):
return dict.__repr__(self)
def _model_class(class_name):
return type(class_name, (_base_model_class,), {})
APIStatus = _model_class('APIStatus')
Team = _model_class('Team')
Event = _model_class('Event')
Match = _model_class('Match')
Award = _model_class('Award')
District = _model_class('District')
Media = _model_class('Media')
Robot = _model_class('Robot')
Profile = _model_class('Profile')
Alliance = _model_class('Alliance')
DistrictPoints = _model_class('DistrictPoints')
Insights = _model_class('Insights')
OPRs = _model_class('OPRs')
Prediction = _model_class('Prediction')
Rankings = _model_class('Rankings')
DistrictRanking = _model_class('DistrictRanking')
Status = _model_class('Status')
|
<commit_before>class _base_model_class(dict):
def __init__(self, json={}):
self.update(json)
self.update(self.__dict__)
self.__dict__ = self
def __repr__(self):
return '%s(%s)' % (self.__class__.__name__, self.json())
def json(self):
return dict.__repr__(self)
def _model_class(class_name):
return type(class_name, (_base_model_class,), {})
APIStatus = _model_class('Status')
Team = _model_class('Team')
Event = _model_class('Event')
Match = _model_class('Match')
Award = _model_class('Award')
District = _model_class('District')
Media = _model_class('Media')
Robot = _model_class('Robot')
Profile = _model_class('Profile')
Alliance = _model_class('Alliance')
DistrictPoints = _model_class('DistrictPoints')
Insights = _model_class('Insights')
OPRs = _model_class('OPRs')
Prediction = _model_class('Prediction')
Rankings = _model_class('Rankings')
DistrictRanking = _model_class('DistrictRanking')
Status = _model_class('TeamStatus')
<commit_msg>Clear up Status naming confusion<commit_after>
|
class _base_model_class(dict):
def __init__(self, json={}):
self.update(json)
self.update(self.__dict__)
self.__dict__ = self
def __repr__(self):
return '%s(%s)' % (self.__class__.__name__, self.json())
def json(self):
return dict.__repr__(self)
def _model_class(class_name):
return type(class_name, (_base_model_class,), {})
APIStatus = _model_class('APIStatus')
Team = _model_class('Team')
Event = _model_class('Event')
Match = _model_class('Match')
Award = _model_class('Award')
District = _model_class('District')
Media = _model_class('Media')
Robot = _model_class('Robot')
Profile = _model_class('Profile')
Alliance = _model_class('Alliance')
DistrictPoints = _model_class('DistrictPoints')
Insights = _model_class('Insights')
OPRs = _model_class('OPRs')
Prediction = _model_class('Prediction')
Rankings = _model_class('Rankings')
DistrictRanking = _model_class('DistrictRanking')
Status = _model_class('Status')
|
class _base_model_class(dict):
def __init__(self, json={}):
self.update(json)
self.update(self.__dict__)
self.__dict__ = self
def __repr__(self):
return '%s(%s)' % (self.__class__.__name__, self.json())
def json(self):
return dict.__repr__(self)
def _model_class(class_name):
return type(class_name, (_base_model_class,), {})
APIStatus = _model_class('Status')
Team = _model_class('Team')
Event = _model_class('Event')
Match = _model_class('Match')
Award = _model_class('Award')
District = _model_class('District')
Media = _model_class('Media')
Robot = _model_class('Robot')
Profile = _model_class('Profile')
Alliance = _model_class('Alliance')
DistrictPoints = _model_class('DistrictPoints')
Insights = _model_class('Insights')
OPRs = _model_class('OPRs')
Prediction = _model_class('Prediction')
Rankings = _model_class('Rankings')
DistrictRanking = _model_class('DistrictRanking')
Status = _model_class('TeamStatus')
Clear up Status naming confusionclass _base_model_class(dict):
def __init__(self, json={}):
self.update(json)
self.update(self.__dict__)
self.__dict__ = self
def __repr__(self):
return '%s(%s)' % (self.__class__.__name__, self.json())
def json(self):
return dict.__repr__(self)
def _model_class(class_name):
return type(class_name, (_base_model_class,), {})
APIStatus = _model_class('APIStatus')
Team = _model_class('Team')
Event = _model_class('Event')
Match = _model_class('Match')
Award = _model_class('Award')
District = _model_class('District')
Media = _model_class('Media')
Robot = _model_class('Robot')
Profile = _model_class('Profile')
Alliance = _model_class('Alliance')
DistrictPoints = _model_class('DistrictPoints')
Insights = _model_class('Insights')
OPRs = _model_class('OPRs')
Prediction = _model_class('Prediction')
Rankings = _model_class('Rankings')
DistrictRanking = _model_class('DistrictRanking')
Status = _model_class('Status')
|
<commit_before>class _base_model_class(dict):
def __init__(self, json={}):
self.update(json)
self.update(self.__dict__)
self.__dict__ = self
def __repr__(self):
return '%s(%s)' % (self.__class__.__name__, self.json())
def json(self):
return dict.__repr__(self)
def _model_class(class_name):
return type(class_name, (_base_model_class,), {})
APIStatus = _model_class('Status')
Team = _model_class('Team')
Event = _model_class('Event')
Match = _model_class('Match')
Award = _model_class('Award')
District = _model_class('District')
Media = _model_class('Media')
Robot = _model_class('Robot')
Profile = _model_class('Profile')
Alliance = _model_class('Alliance')
DistrictPoints = _model_class('DistrictPoints')
Insights = _model_class('Insights')
OPRs = _model_class('OPRs')
Prediction = _model_class('Prediction')
Rankings = _model_class('Rankings')
DistrictRanking = _model_class('DistrictRanking')
Status = _model_class('TeamStatus')
<commit_msg>Clear up Status naming confusion<commit_after>class _base_model_class(dict):
def __init__(self, json={}):
self.update(json)
self.update(self.__dict__)
self.__dict__ = self
def __repr__(self):
return '%s(%s)' % (self.__class__.__name__, self.json())
def json(self):
return dict.__repr__(self)
def _model_class(class_name):
return type(class_name, (_base_model_class,), {})
APIStatus = _model_class('APIStatus')
Team = _model_class('Team')
Event = _model_class('Event')
Match = _model_class('Match')
Award = _model_class('Award')
District = _model_class('District')
Media = _model_class('Media')
Robot = _model_class('Robot')
Profile = _model_class('Profile')
Alliance = _model_class('Alliance')
DistrictPoints = _model_class('DistrictPoints')
Insights = _model_class('Insights')
OPRs = _model_class('OPRs')
Prediction = _model_class('Prediction')
Rankings = _model_class('Rankings')
DistrictRanking = _model_class('DistrictRanking')
Status = _model_class('Status')
|
f338c4ff0c1ff30a3fa44182b0ce0dcbe4ae9dca
|
Mariana/regularizations.py
|
Mariana/regularizations.py
|
__all__ = ["SingleLayerRegularizer_ABC", "L1", "L2"]
class SingleLayerRegularizer_ABC(object) :
"""An abstract regularization to be applied to a layer."""
def __init__(self, factor, *args, **kwargs) :
self.name = self.__class__.__name__
self.factor = factor
self.hyperparameters = ["factor"]
def getFormula(self, layer) :
"""Returns the expression to be added to the cost"""
raise NotImplemented("Must be implemented in child")
class L1(SingleLayerRegularizer_ABC) :
"""
Will add this to the cost
.. math::
factor * abs(Weights)
"""
def getFormula(self, layer) :
return self.factor * ( abs(layer.W).sum() )
class L2(SingleLayerRegularizer_ABC) :
"""
Will add this to the cost
.. math::
factor * (Weights)^2
"""
def getFormula(self, layer) :
return self.factor * ( abs(layer.W).sum() )
|
__all__ = ["SingleLayerRegularizer_ABC", "L1", "L2"]
class SingleLayerRegularizer_ABC(object) :
"""An abstract regularization to be applied to a layer."""
def __init__(self, factor, *args, **kwargs) :
self.name = self.__class__.__name__
self.factor = factor
self.hyperparameters = ["factor"]
def getFormula(self, layer) :
"""Returns the expression to be added to the cost"""
raise NotImplemented("Must be implemented in child")
class L1(SingleLayerRegularizer_ABC) :
"""
Will add this to the cost
.. math::
factor * abs(Weights)
"""
def getFormula(self, layer) :
return self.factor * ( abs(layer.W).sum() )
class L2(SingleLayerRegularizer_ABC) :
"""
Will add this to the cost
.. math::
factor * (Weights)^2
"""
def getFormula(self, layer) :
return self.factor * ( (layer.W * layer.W).sum() )
|
Fix L2 formula that was mistakenly added as L1.
|
Fix L2 formula that was mistakenly added as L1.
|
Python
|
apache-2.0
|
tariqdaouda/Mariana,tariqdaouda/Mariana,tariqdaouda/Mariana,JonathanSeguin/Mariana
|
__all__ = ["SingleLayerRegularizer_ABC", "L1", "L2"]
class SingleLayerRegularizer_ABC(object) :
"""An abstract regularization to be applied to a layer."""
def __init__(self, factor, *args, **kwargs) :
self.name = self.__class__.__name__
self.factor = factor
self.hyperparameters = ["factor"]
def getFormula(self, layer) :
"""Returns the expression to be added to the cost"""
raise NotImplemented("Must be implemented in child")
class L1(SingleLayerRegularizer_ABC) :
"""
Will add this to the cost
.. math::
factor * abs(Weights)
"""
def getFormula(self, layer) :
return self.factor * ( abs(layer.W).sum() )
class L2(SingleLayerRegularizer_ABC) :
"""
Will add this to the cost
.. math::
factor * (Weights)^2
"""
def getFormula(self, layer) :
return self.factor * ( abs(layer.W).sum() )Fix L2 formula that was mistakenly added as L1.
|
__all__ = ["SingleLayerRegularizer_ABC", "L1", "L2"]
class SingleLayerRegularizer_ABC(object) :
"""An abstract regularization to be applied to a layer."""
def __init__(self, factor, *args, **kwargs) :
self.name = self.__class__.__name__
self.factor = factor
self.hyperparameters = ["factor"]
def getFormula(self, layer) :
"""Returns the expression to be added to the cost"""
raise NotImplemented("Must be implemented in child")
class L1(SingleLayerRegularizer_ABC) :
"""
Will add this to the cost
.. math::
factor * abs(Weights)
"""
def getFormula(self, layer) :
return self.factor * ( abs(layer.W).sum() )
class L2(SingleLayerRegularizer_ABC) :
"""
Will add this to the cost
.. math::
factor * (Weights)^2
"""
def getFormula(self, layer) :
return self.factor * ( (layer.W * layer.W).sum() )
|
<commit_before>__all__ = ["SingleLayerRegularizer_ABC", "L1", "L2"]
class SingleLayerRegularizer_ABC(object) :
"""An abstract regularization to be applied to a layer."""
def __init__(self, factor, *args, **kwargs) :
self.name = self.__class__.__name__
self.factor = factor
self.hyperparameters = ["factor"]
def getFormula(self, layer) :
"""Returns the expression to be added to the cost"""
raise NotImplemented("Must be implemented in child")
class L1(SingleLayerRegularizer_ABC) :
"""
Will add this to the cost
.. math::
factor * abs(Weights)
"""
def getFormula(self, layer) :
return self.factor * ( abs(layer.W).sum() )
class L2(SingleLayerRegularizer_ABC) :
"""
Will add this to the cost
.. math::
factor * (Weights)^2
"""
def getFormula(self, layer) :
return self.factor * ( abs(layer.W).sum() )<commit_msg>Fix L2 formula that was mistakenly added as L1.<commit_after>
|
__all__ = ["SingleLayerRegularizer_ABC", "L1", "L2"]
class SingleLayerRegularizer_ABC(object) :
"""An abstract regularization to be applied to a layer."""
def __init__(self, factor, *args, **kwargs) :
self.name = self.__class__.__name__
self.factor = factor
self.hyperparameters = ["factor"]
def getFormula(self, layer) :
"""Returns the expression to be added to the cost"""
raise NotImplemented("Must be implemented in child")
class L1(SingleLayerRegularizer_ABC) :
"""
Will add this to the cost
.. math::
factor * abs(Weights)
"""
def getFormula(self, layer) :
return self.factor * ( abs(layer.W).sum() )
class L2(SingleLayerRegularizer_ABC) :
"""
Will add this to the cost
.. math::
factor * (Weights)^2
"""
def getFormula(self, layer) :
return self.factor * ( (layer.W * layer.W).sum() )
|
__all__ = ["SingleLayerRegularizer_ABC", "L1", "L2"]
class SingleLayerRegularizer_ABC(object) :
"""An abstract regularization to be applied to a layer."""
def __init__(self, factor, *args, **kwargs) :
self.name = self.__class__.__name__
self.factor = factor
self.hyperparameters = ["factor"]
def getFormula(self, layer) :
"""Returns the expression to be added to the cost"""
raise NotImplemented("Must be implemented in child")
class L1(SingleLayerRegularizer_ABC) :
"""
Will add this to the cost
.. math::
factor * abs(Weights)
"""
def getFormula(self, layer) :
return self.factor * ( abs(layer.W).sum() )
class L2(SingleLayerRegularizer_ABC) :
"""
Will add this to the cost
.. math::
factor * (Weights)^2
"""
def getFormula(self, layer) :
return self.factor * ( abs(layer.W).sum() )Fix L2 formula that was mistakenly added as L1.__all__ = ["SingleLayerRegularizer_ABC", "L1", "L2"]
class SingleLayerRegularizer_ABC(object) :
"""An abstract regularization to be applied to a layer."""
def __init__(self, factor, *args, **kwargs) :
self.name = self.__class__.__name__
self.factor = factor
self.hyperparameters = ["factor"]
def getFormula(self, layer) :
"""Returns the expression to be added to the cost"""
raise NotImplemented("Must be implemented in child")
class L1(SingleLayerRegularizer_ABC) :
"""
Will add this to the cost
.. math::
factor * abs(Weights)
"""
def getFormula(self, layer) :
return self.factor * ( abs(layer.W).sum() )
class L2(SingleLayerRegularizer_ABC) :
"""
Will add this to the cost
.. math::
factor * (Weights)^2
"""
def getFormula(self, layer) :
return self.factor * ( (layer.W * layer.W).sum() )
|
<commit_before>__all__ = ["SingleLayerRegularizer_ABC", "L1", "L2"]
class SingleLayerRegularizer_ABC(object) :
"""An abstract regularization to be applied to a layer."""
def __init__(self, factor, *args, **kwargs) :
self.name = self.__class__.__name__
self.factor = factor
self.hyperparameters = ["factor"]
def getFormula(self, layer) :
"""Returns the expression to be added to the cost"""
raise NotImplemented("Must be implemented in child")
class L1(SingleLayerRegularizer_ABC) :
"""
Will add this to the cost
.. math::
factor * abs(Weights)
"""
def getFormula(self, layer) :
return self.factor * ( abs(layer.W).sum() )
class L2(SingleLayerRegularizer_ABC) :
"""
Will add this to the cost
.. math::
factor * (Weights)^2
"""
def getFormula(self, layer) :
return self.factor * ( abs(layer.W).sum() )<commit_msg>Fix L2 formula that was mistakenly added as L1.<commit_after>__all__ = ["SingleLayerRegularizer_ABC", "L1", "L2"]
class SingleLayerRegularizer_ABC(object) :
"""An abstract regularization to be applied to a layer."""
def __init__(self, factor, *args, **kwargs) :
self.name = self.__class__.__name__
self.factor = factor
self.hyperparameters = ["factor"]
def getFormula(self, layer) :
"""Returns the expression to be added to the cost"""
raise NotImplemented("Must be implemented in child")
class L1(SingleLayerRegularizer_ABC) :
"""
Will add this to the cost
.. math::
factor * abs(Weights)
"""
def getFormula(self, layer) :
return self.factor * ( abs(layer.W).sum() )
class L2(SingleLayerRegularizer_ABC) :
"""
Will add this to the cost
.. math::
factor * (Weights)^2
"""
def getFormula(self, layer) :
return self.factor * ( (layer.W * layer.W).sum() )
|
49f61f7f47bbb69236ef319dfa861ea437a0aac4
|
build_qrc.py
|
build_qrc.py
|
#!/usr/bin/env python
import os
import sys
import json
def read_conf(fname):
if not os.path.isfile(fname):
return {}
with open(fname, 'r') as conf:
return json.load(conf)
def build_qrc(resources):
yield '<RCC>'
yield '<qresource>'
for d in resources:
for root, dirs, files in os.walk(d):
for f in files:
yield '<file>{}</file>'.format(os.path.join(root, f))
yield '</qresource>'
yield '</RCC>'
def build_resources(resources, target):
with open(target, 'w') as f:
for line in build_qrc(resources):
f.write(line + os.linesep)
def build(source):
conf = read_conf(source)
target = os.path.basename(source)
if '.' in target:
target = target.rsplit('.', 1)[0]
target += '.qrc'
build_resources(conf.get('resources', []), target)
if __name__ == '__main__':
build(sys.argv[1] if len(sys.argv) >= 1 else 'resources.json')
|
#!/usr/bin/env python
import os
import sys
import json
def read_conf(fname):
if not os.path.isfile(fname):
return {}
with open(fname, 'r') as conf:
return json.load(conf)
def build_qrc(resources):
yield '<RCC>'
yield '<qresource>'
for d in resources:
for root, dirs, files in os.walk(d):
dirs.sort()
files.sort()
for f in files:
yield '<file>{}</file>'.format(os.path.join(root, f))
yield '</qresource>'
yield '</RCC>'
def build_resources(resources, target):
with open(target, 'w') as f:
for line in build_qrc(resources):
f.write(line + os.linesep)
def build(source):
conf = read_conf(source)
target = os.path.basename(source)
if '.' in target:
target = target.rsplit('.', 1)[0]
target += '.qrc'
build_resources(conf.get('resources', []), target)
if __name__ == '__main__':
build(sys.argv[1] if len(sys.argv) >= 1 else 'resources.json')
|
Sort qrc input file list
|
Sort qrc input file list
so that yubikey-manager-qt packages build in a reproducible way
in spite of indeterministic filesystem readdir order
See https://reproducible-builds.org/ for why this is good.
|
Python
|
bsd-2-clause
|
Yubico/yubikey-manager-qt,Yubico/yubikey-manager-qt,Yubico/yubikey-manager-qt,Yubico/yubikey-manager-qt
|
#!/usr/bin/env python
import os
import sys
import json
def read_conf(fname):
if not os.path.isfile(fname):
return {}
with open(fname, 'r') as conf:
return json.load(conf)
def build_qrc(resources):
yield '<RCC>'
yield '<qresource>'
for d in resources:
for root, dirs, files in os.walk(d):
for f in files:
yield '<file>{}</file>'.format(os.path.join(root, f))
yield '</qresource>'
yield '</RCC>'
def build_resources(resources, target):
with open(target, 'w') as f:
for line in build_qrc(resources):
f.write(line + os.linesep)
def build(source):
conf = read_conf(source)
target = os.path.basename(source)
if '.' in target:
target = target.rsplit('.', 1)[0]
target += '.qrc'
build_resources(conf.get('resources', []), target)
if __name__ == '__main__':
build(sys.argv[1] if len(sys.argv) >= 1 else 'resources.json')
Sort qrc input file list
so that yubikey-manager-qt packages build in a reproducible way
in spite of indeterministic filesystem readdir order
See https://reproducible-builds.org/ for why this is good.
|
#!/usr/bin/env python
import os
import sys
import json
def read_conf(fname):
if not os.path.isfile(fname):
return {}
with open(fname, 'r') as conf:
return json.load(conf)
def build_qrc(resources):
yield '<RCC>'
yield '<qresource>'
for d in resources:
for root, dirs, files in os.walk(d):
dirs.sort()
files.sort()
for f in files:
yield '<file>{}</file>'.format(os.path.join(root, f))
yield '</qresource>'
yield '</RCC>'
def build_resources(resources, target):
with open(target, 'w') as f:
for line in build_qrc(resources):
f.write(line + os.linesep)
def build(source):
conf = read_conf(source)
target = os.path.basename(source)
if '.' in target:
target = target.rsplit('.', 1)[0]
target += '.qrc'
build_resources(conf.get('resources', []), target)
if __name__ == '__main__':
build(sys.argv[1] if len(sys.argv) >= 1 else 'resources.json')
|
<commit_before>#!/usr/bin/env python
import os
import sys
import json
def read_conf(fname):
if not os.path.isfile(fname):
return {}
with open(fname, 'r') as conf:
return json.load(conf)
def build_qrc(resources):
yield '<RCC>'
yield '<qresource>'
for d in resources:
for root, dirs, files in os.walk(d):
for f in files:
yield '<file>{}</file>'.format(os.path.join(root, f))
yield '</qresource>'
yield '</RCC>'
def build_resources(resources, target):
with open(target, 'w') as f:
for line in build_qrc(resources):
f.write(line + os.linesep)
def build(source):
conf = read_conf(source)
target = os.path.basename(source)
if '.' in target:
target = target.rsplit('.', 1)[0]
target += '.qrc'
build_resources(conf.get('resources', []), target)
if __name__ == '__main__':
build(sys.argv[1] if len(sys.argv) >= 1 else 'resources.json')
<commit_msg>Sort qrc input file list
so that yubikey-manager-qt packages build in a reproducible way
in spite of indeterministic filesystem readdir order
See https://reproducible-builds.org/ for why this is good.<commit_after>
|
#!/usr/bin/env python
import os
import sys
import json
def read_conf(fname):
if not os.path.isfile(fname):
return {}
with open(fname, 'r') as conf:
return json.load(conf)
def build_qrc(resources):
yield '<RCC>'
yield '<qresource>'
for d in resources:
for root, dirs, files in os.walk(d):
dirs.sort()
files.sort()
for f in files:
yield '<file>{}</file>'.format(os.path.join(root, f))
yield '</qresource>'
yield '</RCC>'
def build_resources(resources, target):
with open(target, 'w') as f:
for line in build_qrc(resources):
f.write(line + os.linesep)
def build(source):
conf = read_conf(source)
target = os.path.basename(source)
if '.' in target:
target = target.rsplit('.', 1)[0]
target += '.qrc'
build_resources(conf.get('resources', []), target)
if __name__ == '__main__':
build(sys.argv[1] if len(sys.argv) >= 1 else 'resources.json')
|
#!/usr/bin/env python
import os
import sys
import json
def read_conf(fname):
if not os.path.isfile(fname):
return {}
with open(fname, 'r') as conf:
return json.load(conf)
def build_qrc(resources):
yield '<RCC>'
yield '<qresource>'
for d in resources:
for root, dirs, files in os.walk(d):
for f in files:
yield '<file>{}</file>'.format(os.path.join(root, f))
yield '</qresource>'
yield '</RCC>'
def build_resources(resources, target):
with open(target, 'w') as f:
for line in build_qrc(resources):
f.write(line + os.linesep)
def build(source):
conf = read_conf(source)
target = os.path.basename(source)
if '.' in target:
target = target.rsplit('.', 1)[0]
target += '.qrc'
build_resources(conf.get('resources', []), target)
if __name__ == '__main__':
build(sys.argv[1] if len(sys.argv) >= 1 else 'resources.json')
Sort qrc input file list
so that yubikey-manager-qt packages build in a reproducible way
in spite of indeterministic filesystem readdir order
See https://reproducible-builds.org/ for why this is good.#!/usr/bin/env python
import os
import sys
import json
def read_conf(fname):
if not os.path.isfile(fname):
return {}
with open(fname, 'r') as conf:
return json.load(conf)
def build_qrc(resources):
yield '<RCC>'
yield '<qresource>'
for d in resources:
for root, dirs, files in os.walk(d):
dirs.sort()
files.sort()
for f in files:
yield '<file>{}</file>'.format(os.path.join(root, f))
yield '</qresource>'
yield '</RCC>'
def build_resources(resources, target):
with open(target, 'w') as f:
for line in build_qrc(resources):
f.write(line + os.linesep)
def build(source):
conf = read_conf(source)
target = os.path.basename(source)
if '.' in target:
target = target.rsplit('.', 1)[0]
target += '.qrc'
build_resources(conf.get('resources', []), target)
if __name__ == '__main__':
build(sys.argv[1] if len(sys.argv) >= 1 else 'resources.json')
|
<commit_before>#!/usr/bin/env python
import os
import sys
import json
def read_conf(fname):
if not os.path.isfile(fname):
return {}
with open(fname, 'r') as conf:
return json.load(conf)
def build_qrc(resources):
yield '<RCC>'
yield '<qresource>'
for d in resources:
for root, dirs, files in os.walk(d):
for f in files:
yield '<file>{}</file>'.format(os.path.join(root, f))
yield '</qresource>'
yield '</RCC>'
def build_resources(resources, target):
with open(target, 'w') as f:
for line in build_qrc(resources):
f.write(line + os.linesep)
def build(source):
conf = read_conf(source)
target = os.path.basename(source)
if '.' in target:
target = target.rsplit('.', 1)[0]
target += '.qrc'
build_resources(conf.get('resources', []), target)
if __name__ == '__main__':
build(sys.argv[1] if len(sys.argv) >= 1 else 'resources.json')
<commit_msg>Sort qrc input file list
so that yubikey-manager-qt packages build in a reproducible way
in spite of indeterministic filesystem readdir order
See https://reproducible-builds.org/ for why this is good.<commit_after>#!/usr/bin/env python
import os
import sys
import json
def read_conf(fname):
if not os.path.isfile(fname):
return {}
with open(fname, 'r') as conf:
return json.load(conf)
def build_qrc(resources):
yield '<RCC>'
yield '<qresource>'
for d in resources:
for root, dirs, files in os.walk(d):
dirs.sort()
files.sort()
for f in files:
yield '<file>{}</file>'.format(os.path.join(root, f))
yield '</qresource>'
yield '</RCC>'
def build_resources(resources, target):
with open(target, 'w') as f:
for line in build_qrc(resources):
f.write(line + os.linesep)
def build(source):
conf = read_conf(source)
target = os.path.basename(source)
if '.' in target:
target = target.rsplit('.', 1)[0]
target += '.qrc'
build_resources(conf.get('resources', []), target)
if __name__ == '__main__':
build(sys.argv[1] if len(sys.argv) >= 1 else 'resources.json')
|
9209ce05cae66f99166101905f6981da04eef656
|
wake/filters.py
|
wake/filters.py
|
from datetime import datetime
from twitter_text import TwitterText
def relative_time(timestamp):
delta = (datetime.now() - datetime.fromtimestamp(timestamp))
delta_s = delta.days * 86400 + delta.seconds
if delta_s < 60:
return "less than a minute ago"
elif delta_s < 120:
return "about a minute ago"
elif delta_s < (60 * 60):
return str(delta_s / 60) + " minutes ago"
elif delta_s < (120 * 60):
return "about an hour ago"
elif delta_s < (24 * 60 * 60):
return "about " + str(delta_s / 3600) + " hours ago"
elif delta_s < (48 * 60 * 60):
return "1 day ago"
else:
return str(delta_s / 86400) + " days ago"
def tweet(text):
return TwitterText(text).autolink.auto_link()
|
from datetime import datetime
from twitter_text import TwitterText
from flask import Markup
def relative_time(timestamp):
delta = (datetime.now() - datetime.fromtimestamp(timestamp))
delta_s = delta.days * 86400 + delta.seconds
if delta_s < 60:
return "less than a minute ago"
elif delta_s < 120:
return "about a minute ago"
elif delta_s < (60 * 60):
return str(delta_s / 60) + " minutes ago"
elif delta_s < (120 * 60):
return "about an hour ago"
elif delta_s < (24 * 60 * 60):
return "about " + str(delta_s / 3600) + " hours ago"
elif delta_s < (48 * 60 * 60):
return "1 day ago"
else:
return str(delta_s / 86400) + " days ago"
def tweet(text):
return Markup(TwitterText(text).autolink.auto_link())
|
Mark output of tweet filter as safe by default
|
Mark output of tweet filter as safe by default
|
Python
|
bsd-3-clause
|
chromakode/wake
|
from datetime import datetime
from twitter_text import TwitterText
def relative_time(timestamp):
delta = (datetime.now() - datetime.fromtimestamp(timestamp))
delta_s = delta.days * 86400 + delta.seconds
if delta_s < 60:
return "less than a minute ago"
elif delta_s < 120:
return "about a minute ago"
elif delta_s < (60 * 60):
return str(delta_s / 60) + " minutes ago"
elif delta_s < (120 * 60):
return "about an hour ago"
elif delta_s < (24 * 60 * 60):
return "about " + str(delta_s / 3600) + " hours ago"
elif delta_s < (48 * 60 * 60):
return "1 day ago"
else:
return str(delta_s / 86400) + " days ago"
def tweet(text):
return TwitterText(text).autolink.auto_link()
Mark output of tweet filter as safe by default
|
from datetime import datetime
from twitter_text import TwitterText
from flask import Markup
def relative_time(timestamp):
delta = (datetime.now() - datetime.fromtimestamp(timestamp))
delta_s = delta.days * 86400 + delta.seconds
if delta_s < 60:
return "less than a minute ago"
elif delta_s < 120:
return "about a minute ago"
elif delta_s < (60 * 60):
return str(delta_s / 60) + " minutes ago"
elif delta_s < (120 * 60):
return "about an hour ago"
elif delta_s < (24 * 60 * 60):
return "about " + str(delta_s / 3600) + " hours ago"
elif delta_s < (48 * 60 * 60):
return "1 day ago"
else:
return str(delta_s / 86400) + " days ago"
def tweet(text):
return Markup(TwitterText(text).autolink.auto_link())
|
<commit_before>from datetime import datetime
from twitter_text import TwitterText
def relative_time(timestamp):
delta = (datetime.now() - datetime.fromtimestamp(timestamp))
delta_s = delta.days * 86400 + delta.seconds
if delta_s < 60:
return "less than a minute ago"
elif delta_s < 120:
return "about a minute ago"
elif delta_s < (60 * 60):
return str(delta_s / 60) + " minutes ago"
elif delta_s < (120 * 60):
return "about an hour ago"
elif delta_s < (24 * 60 * 60):
return "about " + str(delta_s / 3600) + " hours ago"
elif delta_s < (48 * 60 * 60):
return "1 day ago"
else:
return str(delta_s / 86400) + " days ago"
def tweet(text):
return TwitterText(text).autolink.auto_link()
<commit_msg>Mark output of tweet filter as safe by default<commit_after>
|
from datetime import datetime
from twitter_text import TwitterText
from flask import Markup
def relative_time(timestamp):
delta = (datetime.now() - datetime.fromtimestamp(timestamp))
delta_s = delta.days * 86400 + delta.seconds
if delta_s < 60:
return "less than a minute ago"
elif delta_s < 120:
return "about a minute ago"
elif delta_s < (60 * 60):
return str(delta_s / 60) + " minutes ago"
elif delta_s < (120 * 60):
return "about an hour ago"
elif delta_s < (24 * 60 * 60):
return "about " + str(delta_s / 3600) + " hours ago"
elif delta_s < (48 * 60 * 60):
return "1 day ago"
else:
return str(delta_s / 86400) + " days ago"
def tweet(text):
return Markup(TwitterText(text).autolink.auto_link())
|
from datetime import datetime
from twitter_text import TwitterText
def relative_time(timestamp):
delta = (datetime.now() - datetime.fromtimestamp(timestamp))
delta_s = delta.days * 86400 + delta.seconds
if delta_s < 60:
return "less than a minute ago"
elif delta_s < 120:
return "about a minute ago"
elif delta_s < (60 * 60):
return str(delta_s / 60) + " minutes ago"
elif delta_s < (120 * 60):
return "about an hour ago"
elif delta_s < (24 * 60 * 60):
return "about " + str(delta_s / 3600) + " hours ago"
elif delta_s < (48 * 60 * 60):
return "1 day ago"
else:
return str(delta_s / 86400) + " days ago"
def tweet(text):
return TwitterText(text).autolink.auto_link()
Mark output of tweet filter as safe by defaultfrom datetime import datetime
from twitter_text import TwitterText
from flask import Markup
def relative_time(timestamp):
delta = (datetime.now() - datetime.fromtimestamp(timestamp))
delta_s = delta.days * 86400 + delta.seconds
if delta_s < 60:
return "less than a minute ago"
elif delta_s < 120:
return "about a minute ago"
elif delta_s < (60 * 60):
return str(delta_s / 60) + " minutes ago"
elif delta_s < (120 * 60):
return "about an hour ago"
elif delta_s < (24 * 60 * 60):
return "about " + str(delta_s / 3600) + " hours ago"
elif delta_s < (48 * 60 * 60):
return "1 day ago"
else:
return str(delta_s / 86400) + " days ago"
def tweet(text):
return Markup(TwitterText(text).autolink.auto_link())
|
<commit_before>from datetime import datetime
from twitter_text import TwitterText
def relative_time(timestamp):
delta = (datetime.now() - datetime.fromtimestamp(timestamp))
delta_s = delta.days * 86400 + delta.seconds
if delta_s < 60:
return "less than a minute ago"
elif delta_s < 120:
return "about a minute ago"
elif delta_s < (60 * 60):
return str(delta_s / 60) + " minutes ago"
elif delta_s < (120 * 60):
return "about an hour ago"
elif delta_s < (24 * 60 * 60):
return "about " + str(delta_s / 3600) + " hours ago"
elif delta_s < (48 * 60 * 60):
return "1 day ago"
else:
return str(delta_s / 86400) + " days ago"
def tweet(text):
return TwitterText(text).autolink.auto_link()
<commit_msg>Mark output of tweet filter as safe by default<commit_after>from datetime import datetime
from twitter_text import TwitterText
from flask import Markup
def relative_time(timestamp):
delta = (datetime.now() - datetime.fromtimestamp(timestamp))
delta_s = delta.days * 86400 + delta.seconds
if delta_s < 60:
return "less than a minute ago"
elif delta_s < 120:
return "about a minute ago"
elif delta_s < (60 * 60):
return str(delta_s / 60) + " minutes ago"
elif delta_s < (120 * 60):
return "about an hour ago"
elif delta_s < (24 * 60 * 60):
return "about " + str(delta_s / 3600) + " hours ago"
elif delta_s < (48 * 60 * 60):
return "1 day ago"
else:
return str(delta_s / 86400) + " days ago"
def tweet(text):
return Markup(TwitterText(text).autolink.auto_link())
|
9f8d134585a423773a6122c7312c1d88c6203867
|
fastats/_version.py
|
fastats/_version.py
|
# This is the authoritative version number which should be used everywhere,
# including setup, packaging, documentation generation etc.
#
# Normally, this should be available as fastats.__version__
VERSION = '2017.1.3rc0'
|
# This is the authoritative version number which should be used everywhere,
# including setup, packaging, documentation generation etc.
#
# Normally, this should be available as fastats.__version__
VERSION = '2017.1rc0'
|
Fix version to match the current milestone
|
Fix version to match the current milestone
|
Python
|
mit
|
fastats/fastats,dwillmer/fastats
|
# This is the authoritative version number which should be used everywhere,
# including setup, packaging, documentation generation etc.
#
# Normally, this should be available as fastats.__version__
VERSION = '2017.1.3rc0'
Fix version to match the current milestone
|
# This is the authoritative version number which should be used everywhere,
# including setup, packaging, documentation generation etc.
#
# Normally, this should be available as fastats.__version__
VERSION = '2017.1rc0'
|
<commit_before># This is the authoritative version number which should be used everywhere,
# including setup, packaging, documentation generation etc.
#
# Normally, this should be available as fastats.__version__
VERSION = '2017.1.3rc0'
<commit_msg>Fix version to match the current milestone<commit_after>
|
# This is the authoritative version number which should be used everywhere,
# including setup, packaging, documentation generation etc.
#
# Normally, this should be available as fastats.__version__
VERSION = '2017.1rc0'
|
# This is the authoritative version number which should be used everywhere,
# including setup, packaging, documentation generation etc.
#
# Normally, this should be available as fastats.__version__
VERSION = '2017.1.3rc0'
Fix version to match the current milestone# This is the authoritative version number which should be used everywhere,
# including setup, packaging, documentation generation etc.
#
# Normally, this should be available as fastats.__version__
VERSION = '2017.1rc0'
|
<commit_before># This is the authoritative version number which should be used everywhere,
# including setup, packaging, documentation generation etc.
#
# Normally, this should be available as fastats.__version__
VERSION = '2017.1.3rc0'
<commit_msg>Fix version to match the current milestone<commit_after># This is the authoritative version number which should be used everywhere,
# including setup, packaging, documentation generation etc.
#
# Normally, this should be available as fastats.__version__
VERSION = '2017.1rc0'
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.