commit stringlengths 40 40 | old_file stringlengths 4 150 | new_file stringlengths 4 150 | old_contents stringlengths 0 3.26k | new_contents stringlengths 1 4.43k | subject stringlengths 15 501 | message stringlengths 15 4.06k | lang stringclasses 4 values | license stringclasses 13 values | repos stringlengths 5 91.5k | diff stringlengths 0 4.35k |
|---|---|---|---|---|---|---|---|---|---|---|
7db366558418f9fb997f8688f4816a500348e5c6 | tools/pdb-files.py | tools/pdb-files.py | import os
import os.path
import sys
import zipfile
'''
Seeks for *.pdb files from current directory and all child directories.
All found pdb files are copied to 'pdb-files.zip' file with their relative file paths.
'''
fileList = []
rootdir = os.curdir
zip_file_name = "pdb-files.zip"
if os.path.isfile(zip_file_name):
print ("file '%s' already exits. Please remove the file before run this script." % (zip_file_name))
sys.exit()
print ("All pdb files are stored to: %s" %(zip_file_name))
print ("Please wait, this might take several minutes...")
count = 0
total_size = 0
for root, subFolders, files in os.walk(rootdir):
for file in files:
name, ext = os.path.splitext( file )
if ext != '.pdb':
continue
if file:
count = count +1
total_size = total_size + os.path.getsize(os.path.join(root,file))
fileList.append(os.path.join(root,file))
zout = zipfile.ZipFile(zip_file_name, "w")
for fname in fileList:
zout.write(fname)
zout.close()
print ("Totally %i pdb files was found with total size %.1f megabytes." % (count, total_size/1024/1024))
| import os
import os.path
import sys
import zipfile
'''
Seeks for *.pdb files from current directory and all child directories.
All found pdb files are copied to 'pdb-files.zip' file with their relative file paths.
'''
fileList = []
rootdir = os.getcwd()[0:-6] # strip the /tools from the end
zip_file_name = "Tundra-pdb.zip"
if os.path.isfile(zip_file_name):
print ("file '%s' already exits. Please remove the file before run this script." % (zip_file_name))
sys.exit()
print ("\nAll pdb files are stored to: %s" %(zip_file_name))
print ("Please wait, this might take several minutes...")
count = 0
total_size = 0
for root, subFolders, files in os.walk(rootdir):
for file in files:
name, ext = os.path.splitext( file )
if ext != '.pdb':
continue
if file:
count = count +1
total_size = total_size + os.path.getsize(os.path.join(root,file))
fileList.append(os.path.join(root,file))
zout = zipfile.ZipFile(zip_file_name, "w")
print "\nPacking..."
for fname in fileList:
achivefilename = fname[len(rootdir)+1:]
print " * " + achivefilename
zout.write(fname, achivefilename)
zout.close()
print ("Totally %i pdb files was found with total size %.1f megabytes." % (count, total_size/1024/1024))
| Fix py script to package all .pdb files now that its moved to tools. | Fix py script to package all .pdb files now that its moved to tools.
| Python | apache-2.0 | pharos3d/tundra,BogusCurry/tundra,realXtend/tundra,realXtend/tundra,AlphaStaxLLC/tundra,pharos3d/tundra,realXtend/tundra,jesterKing/naali,BogusCurry/tundra,realXtend/tundra,jesterKing/naali,realXtend/tundra,BogusCurry/tundra,AlphaStaxLLC/tundra,AlphaStaxLLC/tundra,pharos3d/tundra,jesterKing/naali,jesterKing/naali,AlphaStaxLLC/tundra,BogusCurry/tundra,AlphaStaxLLC/tundra,realXtend/tundra,pharos3d/tundra,AlphaStaxLLC/tundra,jesterKing/naali,BogusCurry/tundra,BogusCurry/tundra,pharos3d/tundra,pharos3d/tundra,jesterKing/naali,jesterKing/naali | ---
+++
@@ -9,13 +9,13 @@
'''
fileList = []
-rootdir = os.curdir
-zip_file_name = "pdb-files.zip"
+rootdir = os.getcwd()[0:-6] # strip the /tools from the end
+zip_file_name = "Tundra-pdb.zip"
if os.path.isfile(zip_file_name):
print ("file '%s' already exits. Please remove the file before run this script." % (zip_file_name))
sys.exit()
-print ("All pdb files are stored to: %s" %(zip_file_name))
+print ("\nAll pdb files are stored to: %s" %(zip_file_name))
print ("Please wait, this might take several minutes...")
count = 0
@@ -30,9 +30,12 @@
total_size = total_size + os.path.getsize(os.path.join(root,file))
fileList.append(os.path.join(root,file))
-zout = zipfile.ZipFile(zip_file_name, "w")
+zout = zipfile.ZipFile(zip_file_name, "w")
+print "\nPacking..."
for fname in fileList:
- zout.write(fname)
+ achivefilename = fname[len(rootdir)+1:]
+ print " * " + achivefilename
+ zout.write(fname, achivefilename)
zout.close()
print ("Totally %i pdb files was found with total size %.1f megabytes." % (count, total_size/1024/1024)) |
42be4a39b9241ff3138efa52b316070713fc552a | people/serializers.py | people/serializers.py | from rest_framework import serializers
from people.models import Customer
from people.models import InternalUser
class CustomerSerializer(serializers.ModelSerializer):
phone_number = serializers.IntegerField(validators=[lambda x: len(str(x)) == 10])
class Meta:
model = Customer
fields = '__all__'
class InternalUserSerializer(serializers.ModelSerializer):
class Meta:
model = InternalUser
fields = '__all__'
| from django.contrib.gis import serializers
from rest_framework import serializers
from people.models import Customer
from people.models import InternalUser
class CustomerSerializer(serializers.ModelSerializer):
phone_number = serializers.IntegerField()
def validate_phone_number(self, val):
if len(str(val)) != 10:
raise serializers.ValidationError('The phone number must be 10 digits long')
class Meta:
model = Customer
fields = '__all__'
class InternalUserSerializer(serializers.ModelSerializer):
class Meta:
model = InternalUser
fields = '__all__'
| Put validators in phone numbers | Put validators in phone numbers
| Python | apache-2.0 | rameshgopalakrishnan/v_excel_inventory,rameshgopalakrishnan/v_excel_inventory,rameshgopalakrishnan/v_excel_inventory | ---
+++
@@ -1,10 +1,15 @@
+from django.contrib.gis import serializers
from rest_framework import serializers
from people.models import Customer
from people.models import InternalUser
class CustomerSerializer(serializers.ModelSerializer):
- phone_number = serializers.IntegerField(validators=[lambda x: len(str(x)) == 10])
+ phone_number = serializers.IntegerField()
+
+ def validate_phone_number(self, val):
+ if len(str(val)) != 10:
+ raise serializers.ValidationError('The phone number must be 10 digits long')
class Meta:
model = Customer |
326cf5d548e9dcb231cac8d10410c0f589c545a2 | cabot/celeryconfig.py | cabot/celeryconfig.py | import os
from datetime import timedelta
BROKER_URL = os.environ['CELERY_BROKER_URL']
# Set environment variable if you want to run tests without a redis instance
CELERY_ALWAYS_EAGER = os.environ.get('CELERY_ALWAYS_EAGER', False)
CELERY_RESULT_BACKEND = os.environ.get('CELERY_RESULT_BACKEND', None)
CELERY_IMPORTS = ('cabot.cabotapp.tasks', )
CELERYBEAT_SCHEDULER = "djcelery.schedulers.DatabaseScheduler"
CELERY_TASK_SERIALIZER = "json"
CELERY_ACCEPT_CONTENT = ['json', 'msgpack', 'yaml']
CELERYD_TASK_SOFT_TIME_LIMIT = 120
CELERYD_TASK_TIME_LIMIT = 150
CELERYBEAT_SCHEDULE = {
'run-all-checks': {
'task': 'cabot.cabotapp.tasks.run_all_checks',
'schedule': timedelta(seconds=60),
},
'update-shifts': {
'task': 'cabot.cabotapp.tasks.update_shifts',
'schedule': timedelta(seconds=1800),
},
'clean-db': {
'task': 'cabot.cabotapp.tasks.clean_db',
'schedule': timedelta(seconds=60 * 60 * 24),
},
}
CELERY_TIMEZONE = 'UTC'
| import os
from datetime import timedelta
from cabot.settings_utils import environ_get_list
BROKER_URL = environ_get_list(['CELERY_BROKER_URL', 'CACHE_URL'])
# Set environment variable if you want to run tests without a redis instance
CELERY_ALWAYS_EAGER = os.environ.get('CELERY_ALWAYS_EAGER', False)
CELERY_RESULT_BACKEND = os.environ.get('CELERY_RESULT_BACKEND', None)
CELERY_IMPORTS = ('cabot.cabotapp.tasks', )
CELERYBEAT_SCHEDULER = 'djcelery.schedulers.DatabaseScheduler'
CELERY_TASK_SERIALIZER = 'json'
CELERY_ACCEPT_CONTENT = ['json', 'msgpack', 'yaml']
CELERYD_TASK_SOFT_TIME_LIMIT = 120
CELERYD_TASK_TIME_LIMIT = 150
CELERYBEAT_SCHEDULE = {
'run-all-checks': {
'task': 'cabot.cabotapp.tasks.run_all_checks',
'schedule': timedelta(seconds=60),
},
'update-shifts': {
'task': 'cabot.cabotapp.tasks.update_shifts',
'schedule': timedelta(seconds=1800),
},
'clean-db': {
'task': 'cabot.cabotapp.tasks.clean_db',
'schedule': timedelta(seconds=60 * 60 * 24),
},
}
CELERY_TIMEZONE = 'UTC'
| Support CACHE_URL for the Celery broker as well. | Support CACHE_URL for the Celery broker as well.
| Python | mit | maks-us/cabot,arachnys/cabot,arachnys/cabot,maks-us/cabot,arachnys/cabot,maks-us/cabot,maks-us/cabot,arachnys/cabot | ---
+++
@@ -1,13 +1,14 @@
import os
from datetime import timedelta
+from cabot.settings_utils import environ_get_list
-BROKER_URL = os.environ['CELERY_BROKER_URL']
+BROKER_URL = environ_get_list(['CELERY_BROKER_URL', 'CACHE_URL'])
# Set environment variable if you want to run tests without a redis instance
CELERY_ALWAYS_EAGER = os.environ.get('CELERY_ALWAYS_EAGER', False)
CELERY_RESULT_BACKEND = os.environ.get('CELERY_RESULT_BACKEND', None)
CELERY_IMPORTS = ('cabot.cabotapp.tasks', )
-CELERYBEAT_SCHEDULER = "djcelery.schedulers.DatabaseScheduler"
-CELERY_TASK_SERIALIZER = "json"
+CELERYBEAT_SCHEDULER = 'djcelery.schedulers.DatabaseScheduler'
+CELERY_TASK_SERIALIZER = 'json'
CELERY_ACCEPT_CONTENT = ['json', 'msgpack', 'yaml']
CELERYD_TASK_SOFT_TIME_LIMIT = 120
CELERYD_TASK_TIME_LIMIT = 150 |
ac3819cc978c83db10d4bdd151cc2db4d3c28eaf | wagtail_embed_videos/migrations/0002_collections.py | wagtail_embed_videos/migrations/0002_collections.py | # Generated by Django 2.0.1 on 2018-01-28 01:16
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import wagtail.core.models
class Migration(migrations.Migration):
dependencies = [
('wagtailcore', '0040_page_draft_title'),
('wagtail_embed_videos', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='embedvideo',
name='collection',
field=models.ForeignKey(default=wagtail.core.models.get_root_collection_id, on_delete=django.db.models.deletion.CASCADE, related_name='+', to='wagtailcore.Collection', verbose_name='collection'),
),
migrations.AlterField(
model_name='embedvideo',
name='created_at',
field=models.DateTimeField(auto_now_add=True, verbose_name='Created'),
),
migrations.AlterField(
model_name='embedvideo',
name='uploaded_by_user',
field=models.ForeignKey(blank=True, editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='Uploader'),
),
]
| # Generated by Django 2.0.1 on 2018-01-28 01:16
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
from wagtail.wagtailcore.models import Collection
class Migration(migrations.Migration):
dependencies = [
('wagtailcore', '0040_page_draft_title'),
('wagtail_embed_videos', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='embedvideo',
name='collection',
field=models.ForeignKey(default=wagtail.core.models.get_root_collection_id, on_delete=django.db.models.deletion.CASCADE, related_name='+', to='wagtailcore.Collection', verbose_name='collection'),
),
migrations.AlterField(
model_name='embedvideo',
name='created_at',
field=models.DateTimeField(auto_now_add=True, verbose_name='Created'),
),
migrations.AlterField(
model_name='embedvideo',
name='uploaded_by_user',
field=models.ForeignKey(blank=True, editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='Uploader'),
),
]
| Change importing in order to make Wagtail<2.0 comp | Change importing in order to make Wagtail<2.0 comp | Python | bsd-3-clause | SalahAdDin/wagtail-embedvideos,SalahAdDin/wagtail-embedvideos,SalahAdDin/wagtail-embedvideos | ---
+++
@@ -3,7 +3,7 @@
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
-import wagtail.core.models
+from wagtail.wagtailcore.models import Collection
class Migration(migrations.Migration): |
99aabf10b091df07a023dbf638cf605c01db1d74 | src/pcapi/utils/admin.py | src/pcapi/utils/admin.py | import argparse
import os
import shutil
from pcapi import get_resource
def create_skeleton(path):
if os.path.exists(path):
print 'Directory already exist'
return False
config_file = get_resource('pcapi.ini.example')
# create the folder structure
os.makedirs(os.path.join(path, 'data'))
os.makedirs(os.path.join(path, 'logs'))
project_dir = os.path.abspath(path)
# copy the config file
shutil.copyfile(config_file, os.path.join(project_dir, 'pcapi.ini'))
return True
def parse_commandline():
# main parser
parser = argparse.ArgumentParser()
subparsers = parser.add_subparsers(help='actions',
dest='action')
# runserver parser
subparsers.add_parser('runserver', help='run the pcapi server')
# create parser
create = subparsers.add_parser('create',
help='create the pcapi instance structure')
create.add_argument('path',
action='store',
help='instance path')
args = parser.parse_args()
if args.action == 'create':
if not create_skeleton(args.path):
return
elif args.action == 'runserver':
from pcapi.server import runserver
runserver()
if __name__ == '__main__':
parse_commandline()
| import argparse
import os
import shutil
from pkg_resources import resource_filename
def create_skeleton(path):
if os.path.exists(path):
print 'Directory already exist'
return False
config_file = resource_filename('pcapi', 'data/pcapi.ini.example')
# create the folder structure
os.makedirs(os.path.join(path, 'data'))
os.makedirs(os.path.join(path, 'logs'))
project_dir = os.path.abspath(path)
# copy the config file
shutil.copyfile(config_file, os.path.join(project_dir, 'pcapi.ini'))
return True
def parse_commandline():
# main parser
parser = argparse.ArgumentParser()
subparsers = parser.add_subparsers(help='actions',
dest='action')
# runserver parser
subparsers.add_parser('runserver', help='run the pcapi server')
# create parser
create = subparsers.add_parser('create',
help='create the pcapi instance structure')
create.add_argument('path',
action='store',
help='instance path')
args = parser.parse_args()
if args.action == 'create':
if not create_skeleton(args.path):
return
elif args.action == 'runserver':
from pcapi.server import runserver
runserver()
if __name__ == '__main__':
parse_commandline()
| Use the pkg api for reading the resources in the package | Use the pkg api for reading the resources in the package
Issue cobweb-eu/pcapi#18
| Python | bsd-3-clause | cobweb-eu/pcapi,xmichael/pcapi,edina/pcapi,xmichael/pcapi,cobweb-eu/pcapi,edina/pcapi | ---
+++
@@ -2,7 +2,7 @@
import os
import shutil
-from pcapi import get_resource
+from pkg_resources import resource_filename
def create_skeleton(path):
@@ -10,7 +10,7 @@
print 'Directory already exist'
return False
- config_file = get_resource('pcapi.ini.example')
+ config_file = resource_filename('pcapi', 'data/pcapi.ini.example')
# create the folder structure
os.makedirs(os.path.join(path, 'data')) |
b5f980b700707ecc611746f93b1f62650c76c451 | pgcrypto_fields/aggregates.py | pgcrypto_fields/aggregates.py | from django.db import models
class Decrypt(models.Aggregate):
"""`Decrypt` creates an alias for `DecryptFunctionSQL`.
`alias` is `{self.lookup}__decrypt` where 'decrypt' is `self.name.lower()`.
`self.lookup` is defined in `models.Aggregate.__init__`.
"""
def add_to_query(self, query, alias, col, source, is_summary):
"""Add the aggregate to the query."""
from pgcrypto_fields.sql import aggregates
klass = getattr(aggregates, self.name)
aggregate = klass(
col,
source=source,
is_summary=is_summary,
**self.extra
)
query.aggregates[alias] = aggregate
class PGPPub(Decrypt):
"""PGP public key based aggregation."""
name = 'PGPPub'
class PGPSym(Decrypt):
"""PGP symmetric key based aggregation."""
name = 'PGPSym'
| from django.db import models
from pgcrypto_fields.sql import aggregates
class Decrypt(models.Aggregate):
"""`Decrypt` creates an alias for `DecryptFunctionSQL`.
`alias` is `{self.lookup}__decrypt` where 'decrypt' is `self.name.lower()`.
`self.lookup` is defined in `models.Aggregate.__init__`.
"""
def add_to_query(self, query, alias, col, source, is_summary):
"""Add the aggregate to the query."""
klass = getattr(aggregates, self.name)
aggregate = klass(
col,
source=source,
is_summary=is_summary,
**self.extra
)
query.aggregates[alias] = aggregate
class PGPPub(Decrypt):
"""PGP public key based aggregation."""
name = 'PGPPub'
class PGPSym(Decrypt):
"""PGP symmetric key based aggregation."""
name = 'PGPSym'
| Move import to top of the file | Move import to top of the file
| Python | bsd-2-clause | incuna/django-pgcrypto-fields,atdsaa/django-pgcrypto-fields | ---
+++
@@ -1,4 +1,6 @@
from django.db import models
+
+from pgcrypto_fields.sql import aggregates
class Decrypt(models.Aggregate):
@@ -11,8 +13,6 @@
def add_to_query(self, query, alias, col, source, is_summary):
"""Add the aggregate to the query."""
- from pgcrypto_fields.sql import aggregates
-
klass = getattr(aggregates, self.name)
aggregate = klass(
col, |
f4807197cb48da72a88a0b12c950902614f4b9f6 | celery_bungiesearch/tasks/bulkdelete.py | celery_bungiesearch/tasks/bulkdelete.py | from .celerybungie import CeleryBungieTask
from bungiesearch import Bungiesearch
from bungiesearch.utils import update_index
class BulkDeleteTask(CeleryBungieTask):
def run(self, model, instances, **kwargs):
settings = Bungiesearch.BUNGIE.get('SIGNALS', {})
buffer_size = settings.get('BUFFER_SIZE', 100)
update_index(instances, model.__name__, action='delete', bulk_size=buffer_size)
| from .celerybungie import CeleryBungieTask
from bungiesearch import Bungiesearch
from bungiesearch.utils import update_index
from elasticsearch import TransportError
class BulkDeleteTask(CeleryBungieTask):
def run(self, model, instances, **kwargs):
settings = Bungiesearch.BUNGIE.get('SIGNALS', {})
buffer_size = settings.get('BUFFER_SIZE', 100)
try:
update_index(instances, model.__name__, action='delete', bulk_size=buffer_size)
except TransportError as e:
if e.status_code == 404:
return
raise
| Add error handling code to bulk delete | Add error handling code to bulk delete
| Python | mit | afrancis13/celery-bungiesearch | ---
+++
@@ -1,6 +1,8 @@
from .celerybungie import CeleryBungieTask
from bungiesearch import Bungiesearch
from bungiesearch.utils import update_index
+
+from elasticsearch import TransportError
class BulkDeleteTask(CeleryBungieTask):
@@ -8,4 +10,10 @@
def run(self, model, instances, **kwargs):
settings = Bungiesearch.BUNGIE.get('SIGNALS', {})
buffer_size = settings.get('BUFFER_SIZE', 100)
- update_index(instances, model.__name__, action='delete', bulk_size=buffer_size)
+
+ try:
+ update_index(instances, model.__name__, action='delete', bulk_size=buffer_size)
+ except TransportError as e:
+ if e.status_code == 404:
+ return
+ raise |
02bb859424301bf7697a444a50a23c8c834466ab | loldb/__main__.py | loldb/__main__.py | """
Usage:
loldb --path=<path> [options]
loldb -h | --help
loldb --version
Options:
-p, --path=<path> Location of LoL installation.
--lang=<language> Language to output [default: en_US].
-h, --help Display this message.
--version Display version number.
"""
import os
import docopt
from . import __version__
from .provider import ResourceProvider
from .champion import get_champions
from .item import get_items
def main(args):
path = args['--path']
if not os.path.isdir(path):
print('Invalid directory "%s"' % path)
provider = ResourceProvider(
lol_path=path,
language=args['--lang']
)
champions = get_champions(provider)
items = get_items(provider)
# TODO: Save output
if __name__ == '__main__':
main(docopt.docopt(__doc__, version='LoLDB v%s' % __version__))
| """
Usage:
loldb --path=<path> [options]
loldb -h | --help
loldb --version
Options:
-p, --path=<path> Location of LoL installation.
-o, --out=<path> File path to save json representation.
--lang=<language> Language to output [default: en_US].
-h, --help Display this message.
--version Display version number.
"""
import json
import os
import docopt
from . import __version__
from .provider import ResourceProvider
from .champion import get_champions
from .item import get_items
from .converter import (
format_champion,
format_item,
)
from .validate import validate_champions
class Encoder(json.JSONEncoder):
def default(self, o):
if isinstance(o, set):
return list(o)
return super(Encoder, self).default(o)
def main(args):
path = args['--path']
if not os.path.isdir(path):
print('Invalid directory "%s"' % path)
output_path = args['--out']
provider = ResourceProvider(
lol_path=path,
language=args['--lang']
)
champions = get_champions(provider)
print('\n'.join(validate_champions(champions)))
champions = map(format_champion, champions)
items = get_items(provider)
items = dict(zip(items.keys(), map(format_item, items.values())))
output = {
'champions': champions,
'items': items,
}
output = json.dumps(output, cls=Encoder)
if output_path is not None:
with open(args['--out'], 'w') as f:
f.write(output)
else:
print(output)
if __name__ == '__main__':
main(docopt.docopt(__doc__, version='LoLDB v%s' % __version__))
| Save data to file from CLI. | Save data to file from CLI.
| Python | mit | Met48/League-of-Legends-DB | ---
+++
@@ -6,12 +6,14 @@
Options:
-p, --path=<path> Location of LoL installation.
+ -o, --out=<path> File path to save json representation.
--lang=<language> Language to output [default: en_US].
-h, --help Display this message.
--version Display version number.
"""
+import json
import os
import docopt
@@ -20,19 +22,46 @@
from .provider import ResourceProvider
from .champion import get_champions
from .item import get_items
+from .converter import (
+ format_champion,
+ format_item,
+)
+from .validate import validate_champions
+
+
+class Encoder(json.JSONEncoder):
+ def default(self, o):
+ if isinstance(o, set):
+ return list(o)
+ return super(Encoder, self).default(o)
def main(args):
path = args['--path']
if not os.path.isdir(path):
print('Invalid directory "%s"' % path)
+ output_path = args['--out']
provider = ResourceProvider(
lol_path=path,
language=args['--lang']
)
champions = get_champions(provider)
+ print('\n'.join(validate_champions(champions)))
+ champions = map(format_champion, champions)
+
items = get_items(provider)
- # TODO: Save output
+ items = dict(zip(items.keys(), map(format_item, items.values())))
+
+ output = {
+ 'champions': champions,
+ 'items': items,
+ }
+ output = json.dumps(output, cls=Encoder)
+ if output_path is not None:
+ with open(args['--out'], 'w') as f:
+ f.write(output)
+ else:
+ print(output)
if __name__ == '__main__':
main(docopt.docopt(__doc__, version='LoLDB v%s' % __version__)) |
78bbb6cbf145ee7d78c41f39b4f078d986265232 | comics/comics/pennyarcade.py | comics/comics/pennyarcade.py | from comics.aggregator.crawler import CrawlerBase, CrawlerImage
from comics.core.comic_data import ComicDataBase
class ComicData(ComicDataBase):
name = "Penny Arcade"
language = "en"
url = "http://penny-arcade.com/"
start_date = "1998-11-18"
rights = "Mike Krahulik & Jerry Holkins"
class Crawler(CrawlerBase):
history_capable_date = "1998-11-18"
schedule = "Mo,We,Fr"
time_zone = "US/Pacific"
# Without User-Agent set, the server returns 403 Forbidden
headers = {"User-Agent": "Mozilla/4.0"}
def crawl(self, pub_date):
page_url = "http://penny-arcade.com/comic/%s" % (
pub_date.strftime("%Y/%m/%d"),
)
page = self.parse_page(page_url)
title = page.alt("#comicFrame img")
url = page.src("#comicFrame img")
return CrawlerImage(url, title)
| from comics.aggregator.crawler import CrawlerBase, CrawlerImage
from comics.core.comic_data import ComicDataBase
class ComicData(ComicDataBase):
name = "Penny Arcade"
language = "en"
url = "http://penny-arcade.com/"
start_date = "1998-11-18"
rights = "Mike Krahulik & Jerry Holkins"
class Crawler(CrawlerBase):
history_capable_date = "1998-11-18"
schedule = "Mo,We,Fr"
time_zone = "US/Pacific"
# Without User-Agent set, the server returns 403 Forbidden
headers = {"User-Agent": "Mozilla/4.0"}
def crawl(self, pub_date):
page_url = "http://penny-arcade.com/comic/%s" % (
pub_date.strftime("%Y/%m/%d"),
)
page = self.parse_page(page_url)
# The site gives a 404 page without a real 404 code
page_title = page.text("title")
if page_title == "Penny Arcade - 404":
return
title = page.alt("#comicFrame img")
url = page.src("#comicFrame img")
return CrawlerImage(url, title)
| Check "Penny Arcade" for 404 page without 404 header | Check "Penny Arcade" for 404 page without 404 header
| Python | agpl-3.0 | jodal/comics,jodal/comics,datagutten/comics,jodal/comics,datagutten/comics,datagutten/comics,datagutten/comics,jodal/comics | ---
+++
@@ -23,6 +23,11 @@
pub_date.strftime("%Y/%m/%d"),
)
page = self.parse_page(page_url)
+ # The site gives a 404 page without a real 404 code
+ page_title = page.text("title")
+ if page_title == "Penny Arcade - 404":
+ return
+
title = page.alt("#comicFrame img")
url = page.src("#comicFrame img")
return CrawlerImage(url, title) |
39ebab1a41975bd37549129e2b915c99d153ee0a | src/bindings/pygaia/scripts/classification/balanced_sampling.py | src/bindings/pygaia/scripts/classification/balanced_sampling.py | # This script creates a balanced ground truth given an unbalanced on by applying
# random sampling. The size of the resulting classes equals to the minimum size
# among original classes.
import sys
import yaml
from random import shuffle
try:
input_gt = sys.argv[1]
balanced_gt = sys.argv[2]
except:
print 'usage:', sys.argv[0], '<input-grounttruth> <output-balanced-groundtruth>'
sys.exit()
input_gt = yaml.load(open(input_gt, 'r'))
gt = {}
for t, l in input_gt['groundTruth'].items():
gt.setdefault(l, [])
gt[l] += [t]
for label in gt:
print label, len(gt[label])
min_class_len = min(len(gt[label]) for label in gt)
print 'Minimum class length:', min_class_len
for label in gt:
shuffle(gt[label])
for track in gt[label][:min_class_len]:
input_gt['groundTruth'][track] = label
with open(balanced_gt, 'w') as f:
yaml.dump(input_gt, f)
| # This script creates a balanced ground truth given an unbalanced on by applying
# random sampling. The size of the resulting classes equals to the minimum size
# among original classes.
import sys
import yaml
from random import shuffle
try:
input_gt = sys.argv[1]
balanced_gt = sys.argv[2]
except:
print 'usage:', sys.argv[0], '<input-grounttruth> <output-balanced-groundtruth>'
sys.exit()
input_gt = yaml.load(open(input_gt, 'r'))
gt = {}
for t, l in input_gt['groundTruth'].items():
gt.setdefault(l, [])
gt[l] += [t]
for label in gt:
print label, len(gt[label])
min_class_len = min(len(gt[label]) for label in gt)
print 'Minimum class length:', min_class_len
input_gt['groundTruth'] = {}
for label in gt:
shuffle(gt[label])
for track in gt[label][:min_class_len]:
input_gt['groundTruth'][track] = label
with open(balanced_gt, 'w') as f:
yaml.dump(input_gt, f)
| Fix previous commit (balancing scripts) | Fix previous commit (balancing scripts)
| Python | agpl-3.0 | kartikgupta0909/gaia,ChristianFrisson/gaia,MTG/gaia,kartikgupta0909/gaia,kartikgupta0909/gaia,ChristianFrisson/gaia,ChristianFrisson/gaia,MTG/gaia,MTG/gaia,ChristianFrisson/gaia,kartikgupta0909/gaia,MTG/gaia | ---
+++
@@ -26,6 +26,7 @@
min_class_len = min(len(gt[label]) for label in gt)
print 'Minimum class length:', min_class_len
+input_gt['groundTruth'] = {}
for label in gt:
shuffle(gt[label])
for track in gt[label][:min_class_len]: |
5458a44ed193a7c4a37a3414e860a23dc5564c39 | github3/repos/deployment.py | github3/repos/deployment.py | # -*- coding: utf-8 -*-
from github3.models import GitHubCore
from github3.users import User
class Deployment(GitHubCore):
CUSTOM_HEADERS = {
'Accept': 'application/vnd.github.cannonball-preview+json'
}
def __init__(self, deployment, session=None):
super(Deployment, self).__init__(deployment, session)
self._api = deployment.get('url')
#: GitHub's id of this deployment
self.id = deployment.get('id')
#: SHA of the branch on GitHub
self.sha = deployment.get('sha')
#: User object representing the creator of the deployment
self.creator = deployment.get('creator')
if self.creator:
self.creator = User(self.creator, self)
#: JSON string payload of the Deployment
self.payload = deployment.get('payload')
#: Date the Deployment was created
self.created_at = deployment.get('created_at')
if self.created_at:
self.created_at = self._strptime(self.created_at)
#: Date the Deployment was updated
self.updated_at = deployment.get('updated_at')
if self.updated_at:
self.updated_at = self._strptime(self.updated_at)
#: Description of the deployment
self.description = deployment.get('description')
#: URL to get the statuses of this deployment
self.statuses_url = deployment.get('statuses_url')
| # -*- coding: utf-8 -*-
from github3.models import GitHubCore
from github3.users import User
class Deployment(GitHubCore):
CUSTOM_HEADERS = {
'Accept': 'application/vnd.github.cannonball-preview+json'
}
def __init__(self, deployment, session=None):
super(Deployment, self).__init__(deployment, session)
self._api = deployment.get('url')
#: GitHub's id of this deployment
self.id = deployment.get('id')
#: SHA of the branch on GitHub
self.sha = deployment.get('sha')
#: User object representing the creator of the deployment
self.creator = deployment.get('creator')
if self.creator:
self.creator = User(self.creator, self)
#: JSON string payload of the Deployment
self.payload = deployment.get('payload')
#: Date the Deployment was created
self.created_at = deployment.get('created_at')
if self.created_at:
self.created_at = self._strptime(self.created_at)
#: Date the Deployment was updated
self.updated_at = deployment.get('updated_at')
if self.updated_at:
self.updated_at = self._strptime(self.updated_at)
#: Description of the deployment
self.description = deployment.get('description')
#: URL to get the statuses of this deployment
self.statuses_url = deployment.get('statuses_url')
def __repr__(self):
return '<Deployment [{0} @ {1}]>'.format(self.id, self.sha)
| Add repr to Deployment class | Add repr to Deployment class
| Python | bsd-3-clause | wbrefvem/github3.py,icio/github3.py,jim-minter/github3.py,itsmemattchung/github3.py,sigmavirus24/github3.py,ueg1990/github3.py,agamdua/github3.py,balloob/github3.py,krxsky/github3.py,degustaf/github3.py,christophelec/github3.py,h4ck3rm1k3/github3.py | ---
+++
@@ -41,3 +41,6 @@
#: URL to get the statuses of this deployment
self.statuses_url = deployment.get('statuses_url')
+
+ def __repr__(self):
+ return '<Deployment [{0} @ {1}]>'.format(self.id, self.sha) |
8ee8c42cd4d4be09d47cb7ebf5941583183bb3f3 | logger/utilities.py | logger/utilities.py | #!/usr/bin/env python3
"""Small utility functions for use in various places."""
__all__ = ["pick", "is_dunder", "convert_to_od"]
import collections
def pick(arg, default):
"""Handler for default versus given argument."""
return default if arg is None else arg
def is_dunder(name):
"""Return True if a __dunder__ name, False otherwise."""
return name[:2] == name[-2:] == "__" and "_" not in (name[2:3], name[-3:-2])
def convert_to_od(mapping, order):
"""Convert mapping to an OrderedDict instance using order."""
return collections.OrderedDict([(i, mapping[i]) for i in order])
| #!/usr/bin/env python3
"""Small utility functions for use in various places."""
__all__ = ["pick", "is_dunder", "convert_to_od"]
import collections
import itertools
def pick(arg, default):
"""Handler for default versus given argument."""
return default if arg is None else arg
def is_dunder(name):
"""Return True if a __dunder__ name, False otherwise."""
return name[:2] == name[-2:] == "__" and "_" not in (name[2:3], name[-3:-2])
def convert_to_od(mapping, order):
"""Convert mapping to an OrderedDict instance using order."""
return collections.OrderedDict([(i, mapping[i]) for i in order])
def counter_to_iterable(counter):
"""Convert a counter to an iterable / iterator."""
for item in itertools.starmap(itertools.repeat, counter):
yield from item
| Add a 'counter_to_iterable' utility function | Add a 'counter_to_iterable' utility function
| Python | bsd-2-clause | Vgr255/logging | ---
+++
@@ -5,6 +5,7 @@
__all__ = ["pick", "is_dunder", "convert_to_od"]
import collections
+import itertools
def pick(arg, default):
"""Handler for default versus given argument."""
@@ -17,3 +18,8 @@
def convert_to_od(mapping, order):
"""Convert mapping to an OrderedDict instance using order."""
return collections.OrderedDict([(i, mapping[i]) for i in order])
+
+def counter_to_iterable(counter):
+ """Convert a counter to an iterable / iterator."""
+ for item in itertools.starmap(itertools.repeat, counter):
+ yield from item |
4f48fa8636000a1b780c962288bb588b2760465f | pyheufybot/utils/fileutils.py | pyheufybot/utils/fileutils.py | import codecs, os
def readFile(filePath):
try:
with open(filePath, "r") as f:
return f.read()
except Exception as e:
print "*** ERROR: An exception occurred while reading file \"{}\" ({})".format(filePath, e)
return None
def writeFile(filePath, line, append=False):
try:
action = "a+" if append else "w"
with codecs.open(filePath, action, "utf-8") as f:
f.write(line)
return True
except Exception as e:
print "*** ERROR: An exception occurred while writing file \"{}\" ({})".format(filePath, e)
def createDirs(path):
if not os.path.exists(path):
os.makedirs(path)
| import codecs, os, time
def readFile(filePath):
try:
with open(filePath, "r") as f:
return f.read()
except Exception as e:
today = time.strftime("[%H:%M:%S]")
print "{} *** ERROR: An exception occurred while reading file \"{}\" ({})".format(today, filePath, e)
return None
def writeFile(filePath, line, append=False):
try:
action = "a+" if append else "w"
with codecs.open(filePath, action, "utf-8") as f:
f.write(line)
return True
except Exception as e:
today = time.strftime("[%H:%M:%S]")
print "{} *** ERROR: An exception occurred while writing file \"{}\" ({})".format(today, filePath, e)
return False
def createDirs(path):
if not os.path.exists(path):
os.makedirs(path)
| Improve error logging in file IO | Improve error logging in file IO
| Python | mit | Heufneutje/PyHeufyBot,Heufneutje/PyHeufyBot | ---
+++
@@ -1,11 +1,12 @@
-import codecs, os
+import codecs, os, time
def readFile(filePath):
try:
with open(filePath, "r") as f:
return f.read()
except Exception as e:
- print "*** ERROR: An exception occurred while reading file \"{}\" ({})".format(filePath, e)
+ today = time.strftime("[%H:%M:%S]")
+ print "{} *** ERROR: An exception occurred while reading file \"{}\" ({})".format(today, filePath, e)
return None
def writeFile(filePath, line, append=False):
@@ -15,7 +16,9 @@
f.write(line)
return True
except Exception as e:
- print "*** ERROR: An exception occurred while writing file \"{}\" ({})".format(filePath, e)
+ today = time.strftime("[%H:%M:%S]")
+ print "{} *** ERROR: An exception occurred while writing file \"{}\" ({})".format(today, filePath, e)
+ return False
def createDirs(path):
if not os.path.exists(path): |
c2128be32df870a601224be9f7e746dbd9cb72ee | makerscience_profile/api.py | makerscience_profile/api.py | from .models import MakerScienceProfile
from tastypie.resources import ModelResource
from tastypie.authorization import DjangoAuthorization
from tastypie import fields
from tastypie.constants import ALL_WITH_RELATIONS
from dataserver.authentication import AnonymousApiKeyAuthentication
from accounts.api import ProfileResource
from scout.api import PostalAddressResource
class MakerScienceProfileResource(ModelResource):
parent = fields.OneToOneField(ProfileResource, 'parent', full=True)
location = fields.ToOneField(PostalAddressResource, 'location', null=True, blank=True, full=True)
class Meta:
queryset = MakerScienceProfile.objects.all()
allowed_methods = ['get', 'post', 'put', 'patch']
resource_name = 'makerscience/profile'
authentication = AnonymousApiKeyAuthentication()
authorization = DjangoAuthorization()
always_return_data = True
filtering = {
'parent' : ALL_WITH_RELATIONS,
}
def dehydrate(self, bundle):
bundle.data["full_name"] = "%s %s" % (bundle.obj.parent.user.first_name, bundle.obj.parent.user.last_name)
return bundle
| from .models import MakerScienceProfile
from tastypie.resources import ModelResource
from tastypie.authorization import DjangoAuthorization
from tastypie import fields
from tastypie.constants import ALL_WITH_RELATIONS
from dataserver.authentication import AnonymousApiKeyAuthentication
from accounts.api import ProfileResource
from scout.api import PostalAddressResource
from projects.api import ProjectTeamResource
class MakerScienceProfileResource(ModelResource):
parent = fields.OneToOneField(ProfileResource, 'parent', full=True)
location = fields.ToOneField(PostalAddressResource, 'location', null=True, blank=True, full=True)
teams = fields.ToManyField(ProjectTeamResource, 'parent__projectteam_set', full=True, null=True)
class Meta:
queryset = MakerScienceProfile.objects.all()
allowed_methods = ['get', 'post', 'put', 'patch']
resource_name = 'makerscience/profile'
authentication = AnonymousApiKeyAuthentication()
authorization = DjangoAuthorization()
always_return_data = True
filtering = {
'parent' : ALL_WITH_RELATIONS,
}
def dehydrate(self, bundle):
bundle.data["full_name"] = "%s %s" % (bundle.obj.parent.user.first_name, bundle.obj.parent.user.last_name)
return bundle
| Add teams field in MakerScienceProfileResource | Add teams field in MakerScienceProfileResource
| Python | agpl-3.0 | atiberghien/makerscience-server,atiberghien/makerscience-server | ---
+++
@@ -6,10 +6,13 @@
from dataserver.authentication import AnonymousApiKeyAuthentication
from accounts.api import ProfileResource
from scout.api import PostalAddressResource
+from projects.api import ProjectTeamResource
class MakerScienceProfileResource(ModelResource):
parent = fields.OneToOneField(ProfileResource, 'parent', full=True)
location = fields.ToOneField(PostalAddressResource, 'location', null=True, blank=True, full=True)
+
+ teams = fields.ToManyField(ProjectTeamResource, 'parent__projectteam_set', full=True, null=True)
class Meta:
queryset = MakerScienceProfile.objects.all() |
4189c6cc8d6e9ec83753ce2f7da39273a553196e | third_party/__init__.py | third_party/__init__.py | import os.path
import sys
# This bit of evil should inject third_party into the path for relative imports.
sys.path.append(os.path.dirname(__file__))
| import os.path
import sys
# This bit of evil should inject third_party into the path for relative imports.
sys.path.insert(1, os.path.dirname(__file__))
| Insert third_party into the second slot of sys.path rather than the last slot | Insert third_party into the second slot of sys.path rather than the last slot
| Python | apache-2.0 | catap/namebench,jimmsta/namebench-1 | ---
+++
@@ -2,4 +2,4 @@
import sys
# This bit of evil should inject third_party into the path for relative imports.
-sys.path.append(os.path.dirname(__file__))
+sys.path.insert(1, os.path.dirname(__file__)) |
0321591b9a9596c876e615ac9bacfe63e2c44b2c | midterm/problem8.py | midterm/problem8.py | # Problem 8
# 20.0 points possible (graded)
# Implement a function that meets the specifications below.
# For example, the following functions, f, g, and test code:
# def f(i):
# return i + 2
# def g(i):
# return i > 5
# L = [0, -10, 5, 6, -4]
# print(applyF_filterG(L, f, g))
# print(L)
# Should print:
# 6
# [5, 6]
def f(i):
return i + 2
def g(i):
return i > 5
def applyF_filterG(L, f, g):
"""
Assumes L is a list of integers
Assume functions f and g are defined for you.
f takes in an integer, applies a function, returns another integer
g takes in an integer, applies a Boolean function, returns either True or False
Mutates L such that, for each element i originally in L, L contains i if g(f(i)) returns True, and no other elements
Returns the largest element in the mutated L or -1 if the list is empty
"""
l = L[:]
for i in l:
if g(f(i)) is False:
L.remove(i)
if len(L) == 0:
return -1
else:
return max(L)
L = [0, -10, 5, 6, -4]
print(applyF_filterG(L, f, g))
print(L) | # Problem 8
# 20.0 points possible (graded)
# Implement a function that meets the specifications below.
# For example, the following functions, f, g, and test code:
# def f(i):
# return i + 2
# def g(i):
# return i > 5
# L = [0, -10, 5, 6, -4]
# print(applyF_filterG(L, f, g))
# print(L)
# Should print:
# 6
# [5, 6]
def f(i):
return i + 2
def g(i):
return i > 5
def applyF_filterG(L, f, g):
"""
Assumes L is a list of integers
Assume functions f and g are defined for you.
f takes in an integer, applies a function, returns another integer
g takes in an integer, applies a Boolean function, returns either True or False
Mutates L such that, for each element i originally in L, L contains i if g(f(i)) returns True, and no other elements
Returns the largest element in the mutated L or -1 if the list is empty
"""
l = L[:]
for i in l:
if not g(f(i)):
L.remove(i)
if len(L) == 0:
return -1
else:
return max(L)
L = [0, -10, 5, 6, -4]
print(applyF_filterG(L, f, g))
print(L) | Fix applyF_filterG function to pass test case | Fix applyF_filterG function to pass test case
| Python | mit | Kunal57/MIT_6.00.1x | ---
+++
@@ -34,7 +34,7 @@
"""
l = L[:]
for i in l:
- if g(f(i)) is False:
+ if not g(f(i)):
L.remove(i)
if len(L) == 0:
return -1 |
bb26d56cbce6d7f5d12bd9a2e5c428df6bf4b1d7 | fabfile.py | fabfile.py | import sys
import sh
from fabric import api as fab
sed = sh.sed.bake('-i bak -e')
TRAVIS_YAML = '.travis.yml'
REPLACE_LANGUAGE = 's/language: .*/language: {}/'
def is_dirty():
return "" != sh.git.status(porcelain=True).strip()
def release(language, message):
if is_dirty():
sys.exit("Repo must be in clean state before deploying. Please commit changes.")
sed(REPLACE_LANGUAGE.format(language), TRAVIS_YAML)
if is_dirty():
sh.git.add(TRAVIS_YAML)
sh.git.commit(m=message)
sh.git.pull(rebase=True)
sh.git.push()
@fab.task
def release_osx():
release('objective-c', "Release OS X")
@fab.task
def release_linux():
release('python', "Release Linux")
| import sys
import sh
from fabric import api as fab
sed = sh.sed.bake('-i bak -e')
TRAVIS_YAML = '.travis.yml'
REPLACE_LANGUAGE = 's/language: .*/language: {}/'
def is_dirty():
return "" != sh.git.status(porcelain=True).strip()
def release(language, message):
if is_dirty():
sys.exit("Repo must be in clean state before deploying. Please commit changes.")
sed(REPLACE_LANGUAGE.format(language), TRAVIS_YAML)
if is_dirty():
sh.git.add(TRAVIS_YAML)
sh.git.commit(m=message, allow_empty=True)
sh.git.pull(rebase=True)
sh.git.push()
@fab.task
def release_osx():
release('objective-c', "Release OS X")
@fab.task
def release_linux():
release('python', "Release Linux")
| Allow empty so we can force new build | Allow empty so we can force new build
| Python | bsd-3-clause | datamicroscopes/release,jzf2101/release,datamicroscopes/release,jzf2101/release | ---
+++
@@ -18,7 +18,7 @@
sed(REPLACE_LANGUAGE.format(language), TRAVIS_YAML)
if is_dirty():
sh.git.add(TRAVIS_YAML)
- sh.git.commit(m=message)
+ sh.git.commit(m=message, allow_empty=True)
sh.git.pull(rebase=True)
sh.git.push()
|
2201a23aa0407496402f0766d09f5df9951b9709 | models/employees.py | models/employees.py | import datetime
from openedoo.core.libs.tools import hashing_werkzeug
from openedoo_project import db
from .users import User
class Employee(User):
@staticmethod
def is_exist(self, username):
employee = self.query.get(username=username).first()
return employee
@classmethod
def get_public_list(self):
employees = self.query.with_entities(self.username,
self.fullname,
self.nip)
return employees
@classmethod
def check_records(self):
employees = self.query.limit(1).all()
return employees
@classmethod
def add(self, form={}):
if not form:
raise ValueError('Form is supplied with wrong data.')
data = {
'username': form['username'],
'fullname': form['fullname'],
'password': hashing_werkzeug(form['password']),
'nip': form['nip'],
'created': datetime.datetime.now()
}
employeeData = self(data)
db.session.add(employeeData)
return db.session.commit()
| import datetime
from openedoo.core.libs.tools import hashing_werkzeug
from openedoo_project import db
from .users import User
class Employee(User):
@classmethod
def is_exist(self, username):
employee = self.query.get(username=username).first()
return employee
@classmethod
def get_public_list(self):
employees = self.query.with_entities(self.username,
self.fullname,
self.nip)
return employees
@classmethod
def check_records(self):
employees = self.query.limit(1).all()
return employees
@classmethod
def add(self, form={}):
if not form:
raise ValueError('Form is supplied with wrong data.')
data = {
'username': form['username'],
'fullname': form['fullname'],
'password': hashing_werkzeug(form['password']),
'nip': form['nip'],
'created': datetime.datetime.now()
}
employeeData = self(data)
db.session.add(employeeData)
return db.session.commit()
| Change bad function decorator implementation | Change bad function decorator implementation
| Python | mit | openedoo/module_employee,openedoo/module_employee,openedoo/module_employee | ---
+++
@@ -6,7 +6,7 @@
class Employee(User):
- @staticmethod
+ @classmethod
def is_exist(self, username):
employee = self.query.get(username=username).first()
return employee |
5be4dec175c9e672ec5e7e011be604ad39459565 | apps/polls/admin.py | apps/polls/admin.py | from django.contrib import admin
from apps.polls.models import Poll, Choice
class ChoiceInline(admin.TabularInline):
model = Choice
extra = 3
class PollAdmin(admin.ModelAdmin):
fieldsets = [
(None, {'fields': ['question']}),
('Date information', {'fields': ['pub_date'], 'classes': ['collapse']}),
]
inlines = [ChoiceInline]
list_display = ('question', 'pub_date', 'was_published_recently')
list_filter = ['pub_date']
search_fields = ['question']
admin.site.register(Poll, PollAdmin)
admin.site.register(Choice) | from django.contrib import admin
from apps.polls.models import Poll, Choice
class ChoiceInline(admin.TabularInline):
model = Choice
extra = 3
class PollAdmin(admin.ModelAdmin):
fieldsets = [
(None, {'fields': ['question']}),
('Date information', {'fields': ['pub_date'], 'classes': ['collapse']}),
]
inlines = [ChoiceInline]
list_display = ('question', 'pub_date', 'was_published_recently')
list_filter = ['pub_date']
search_fields = ['question']
date_hierarchy = 'pub_date'
admin.site.register(Poll, PollAdmin)
admin.site.register(Choice) | Add date_hierarchy = 'pub_date' to PollAdmin | Add date_hierarchy = 'pub_date' to PollAdmin
| Python | bsd-3-clause | teracyhq/django-tutorial,datphan/teracy-tutorial | ---
+++
@@ -15,6 +15,7 @@
list_display = ('question', 'pub_date', 'was_published_recently')
list_filter = ['pub_date']
search_fields = ['question']
+ date_hierarchy = 'pub_date'
admin.site.register(Poll, PollAdmin) |
143c0188566ac07ac3fdb9e6dfca8863cc169bbb | ts3observer/observer.py | ts3observer/observer.py | '''
Created on Nov 9, 2014
@author: fechnert
'''
import yaml
import logging
import features
class Configuration(dict):
''' Read and provide the yaml config '''
def __init__(self, path):
''' Initialize the file '''
with open(path, 'r') as f:
self.update(yaml.load(f))
class Supervisor(object):
''' Guide the different features to do their work '''
def __init__(self):
''' Initialize the Config '''
self.config = Configuration('config.yml')
def execute(self):
for feature in self._import_features().values():
try:
feature.run()
except NotImplementedError:
logging.warn('Can\'t run Feature \'{}\''.format(feature.__class__.__name__))
def _get_enabled_features(self):
''' Get all features which are enabled in config '''
features = []
for feature in self.config['features']:
if self.config['features'][feature]['enable']:
features.append(feature)
return features
def _import_features(self):
''' Import only the needed features '''
feature_objects = {}
for feature in self._get_enabled_features():
feature_objects.update({
feature: getattr(features, feature)(self.config['features'][feature])
})
return feature_objects
| '''
Created on Nov 9, 2014
@author: fechnert
'''
import yaml
import logging
import features
class Configuration(dict):
''' Read and provide the yaml config '''
def __init__(self, path):
''' Initialize the file '''
with open(path, 'r') as f:
self.update(yaml.load(f))
class Supervisor(object):
''' Guide the different features to do their work '''
def __init__(self):
''' Initialize the Config '''
self.config = Configuration('config.yml')
def execute(self):
for feature in self._import_features().values():
try:
feature.run()
except NotImplementedError:
logging.warn('Can\'t run Feature \'{}\''.format(feature.__class__.__name__))
def _get_enabled_features(self):
''' Get all features which are enabled in config '''
features = []
for feature in self.config['features']:
if self.config['features'][feature]['enable']:
features.append(feature)
return features
def _import_features(self):
''' Import only the needed features '''
feature_objects = {}
for feature in self._get_enabled_features():
feature_objects.update({
feature: getattr(features, feature)(self.config['features'][feature])
})
return feature_objects
class Client(object):
''' Represents the client '''
def __init__(self, **kwargs):
''' Fill the object dynamically with client attributes got from telnet '''
for key, value in kwargs.items():
setattr(self, key, value)
class Channel(object):
''' Represents the Channel '''
def __init__(self, **kwargs):
''' Fill the object dynamically with channel attributes got from telnet '''
for key, value in kwargs.items():
setattr(self, key, value)
| Add client and channel models | Add client and channel models
| Python | mit | HWDexperte/ts3observer | ---
+++
@@ -48,3 +48,21 @@
feature: getattr(features, feature)(self.config['features'][feature])
})
return feature_objects
+
+
+class Client(object):
+ ''' Represents the client '''
+
+ def __init__(self, **kwargs):
+ ''' Fill the object dynamically with client attributes got from telnet '''
+ for key, value in kwargs.items():
+ setattr(self, key, value)
+
+
+class Channel(object):
+ ''' Represents the Channel '''
+
+ def __init__(self, **kwargs):
+ ''' Fill the object dynamically with channel attributes got from telnet '''
+ for key, value in kwargs.items():
+ setattr(self, key, value) |
1cab84d3f3726df2a7cfe4e5ad8efee81051c73e | tests/test_patched_stream.py | tests/test_patched_stream.py | import nose
import StringIO
import cle
def test_patched_stream():
stream = StringIO.StringIO('0123456789abcdef')
stream1 = cle.PatchedStream(stream, [(2, 'AA')])
stream1.seek(0)
nose.tools.assert_equal(stream1.read(), '01AA456789abcdef')
stream2 = cle.PatchedStream(stream, [(2, 'AA')])
stream2.seek(0)
nose.tools.assert_equal(stream2.read(3), '01A')
stream3 = cle.PatchedStream(stream, [(2, 'AA')])
stream3.seek(3)
nose.tools.assert_equal(stream3.read(3), 'A45')
stream4 = cle.PatchedStream(stream, [(-1, 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA')])
stream4.seek(0)
nose.tools.assert_equal(stream4.read(), 'A'*0x10)
| import nose
import StringIO
import os
import cle
tests_path = os.path.join(os.path.dirname(__file__), '..', '..', 'binaries', 'tests')
def test_patched_stream():
stream = StringIO.StringIO('0123456789abcdef')
stream1 = cle.PatchedStream(stream, [(2, 'AA')])
stream1.seek(0)
nose.tools.assert_equal(stream1.read(), '01AA456789abcdef')
stream2 = cle.PatchedStream(stream, [(2, 'AA')])
stream2.seek(0)
nose.tools.assert_equal(stream2.read(3), '01A')
stream3 = cle.PatchedStream(stream, [(2, 'AA')])
stream3.seek(3)
nose.tools.assert_equal(stream3.read(3), 'A45')
stream4 = cle.PatchedStream(stream, [(-1, 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA')])
stream4.seek(0)
nose.tools.assert_equal(stream4.read(), 'A'*0x10)
def test_malformed_sections():
ld = cle.Loader(os.path.join(tests_path, 'i386', 'oxfoo1m3'))
nose.tools.assert_equal(len(ld.main_object.segments), 1)
nose.tools.assert_equal(len(ld.main_object.sections), 0)
| Add tests for loading binaries with malformed sections | Add tests for loading binaries with malformed sections
| Python | bsd-2-clause | angr/cle | ---
+++
@@ -1,7 +1,10 @@
import nose
import StringIO
+import os
import cle
+
+tests_path = os.path.join(os.path.dirname(__file__), '..', '..', 'binaries', 'tests')
def test_patched_stream():
stream = StringIO.StringIO('0123456789abcdef')
@@ -21,3 +24,8 @@
stream4 = cle.PatchedStream(stream, [(-1, 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA')])
stream4.seek(0)
nose.tools.assert_equal(stream4.read(), 'A'*0x10)
+
+def test_malformed_sections():
+ ld = cle.Loader(os.path.join(tests_path, 'i386', 'oxfoo1m3'))
+ nose.tools.assert_equal(len(ld.main_object.segments), 1)
+ nose.tools.assert_equal(len(ld.main_object.sections), 0) |
de2a2e296ba1cb60a333fc52fef39d260e5ad4e5 | tests/basics/unary_op.py | tests/basics/unary_op.py | x = 1
print(+x)
print(-x)
print(~x)
print(not None)
print(not False)
print(not True)
print(not 0)
print(not 1)
print(not -1)
print(not ())
print(not (1,))
print(not [])
print(not [1,])
print(not {})
print(not {1:1})
| x = 1
print(+x)
print(-x)
print(~x)
print(not None)
print(not False)
print(not True)
print(not 0)
print(not 1)
print(not -1)
print(not ())
print(not (1,))
print(not [])
print(not [1,])
print(not {})
print(not {1:1})
# check user instance
class A: pass
print(not A())
# check user instances derived from builtins
class B(int): pass
print(not B())
class C(list): pass
print(not C())
| Add test for "not" of a user defined class. | tests: Add test for "not" of a user defined class.
| Python | mit | lowRISC/micropython,Timmenem/micropython,lowRISC/micropython,drrk/micropython,PappaPeppar/micropython,ernesto-g/micropython,deshipu/micropython,pozetroninc/micropython,AriZuu/micropython,Peetz0r/micropython-esp32,AriZuu/micropython,jmarcelino/pycom-micropython,redbear/micropython,micropython/micropython-esp32,turbinenreiter/micropython,tuc-osg/micropython,redbear/micropython,ganshun666/micropython,alex-robbins/micropython,henriknelson/micropython,Timmenem/micropython,MrSurly/micropython,Peetz0r/micropython-esp32,pfalcon/micropython,MrSurly/micropython-esp32,dinau/micropython,jmarcelino/pycom-micropython,adafruit/micropython,galenhz/micropython,mianos/micropython,HenrikSolver/micropython,SHA2017-badge/micropython-esp32,puuu/micropython,tuc-osg/micropython,galenhz/micropython,puuu/micropython,danicampora/micropython,misterdanb/micropython,misterdanb/micropython,dinau/micropython,dxxb/micropython,TDAbboud/micropython,SHA2017-badge/micropython-esp32,adafruit/micropython,ernesto-g/micropython,alex-march/micropython,swegener/micropython,MrSurly/micropython-esp32,lowRISC/micropython,tuc-osg/micropython,MrSurly/micropython-esp32,pramasoul/micropython,dxxb/micropython,supergis/micropython,toolmacher/micropython,infinnovation/micropython,AriZuu/micropython,chrisdearman/micropython,adamkh/micropython,praemdonck/micropython,adafruit/circuitpython,torwag/micropython,dmazzella/micropython,tralamazza/micropython,alex-march/micropython,dxxb/micropython,alex-march/micropython,bvernoux/micropython,trezor/micropython,galenhz/micropython,selste/micropython,pramasoul/micropython,pramasoul/micropython,swegener/micropython,misterdanb/micropython,martinribelotta/micropython,galenhz/micropython,EcmaXp/micropython,supergis/micropython,MrSurly/micropython-esp32,cwyark/micropython,adafruit/circuitpython,kerneltask/micropython,blazewicz/micropython,mianos/micropython,oopy/micropython,pfalcon/micropython,hiway/micropython,selste/micropython,misterdanb/micropython,mpalomer/micropython,jmarcelino/pycom-micropython,deshipu/micropython,hosaka/micropython,adamkh/micropython,ganshun666/micropython,adafruit/micropython,cwyark/micropython,MrSurly/micropython-esp32,MrSurly/micropython,drrk/micropython,martinribelotta/micropython,ryannathans/micropython,bvernoux/micropython,supergis/micropython,danicampora/micropython,tobbad/micropython,turbinenreiter/micropython,adamkh/micropython,pozetroninc/micropython,selste/micropython,selste/micropython,tobbad/micropython,pozetroninc/micropython,adamkh/micropython,oopy/micropython,redbear/micropython,kerneltask/micropython,trezor/micropython,pramasoul/micropython,trezor/micropython,dxxb/micropython,dinau/micropython,dmazzella/micropython,kerneltask/micropython,ryannathans/micropython,toolmacher/micropython,drrk/micropython,turbinenreiter/micropython,xhat/micropython,emfcamp/micropython,blazewicz/micropython,lowRISC/micropython,tralamazza/micropython,tuc-osg/micropython,toolmacher/micropython,hosaka/micropython,ryannathans/micropython,PappaPeppar/micropython,torwag/micropython,emfcamp/micropython,supergis/micropython,martinribelotta/micropython,ryannathans/micropython,swegener/micropython,Peetz0r/micropython-esp32,galenhz/micropython,MrSurly/micropython,TDAbboud/micropython,deshipu/micropython,pozetroninc/micropython,infinnovation/micropython,micropython/micropython-esp32,oopy/micropython,hosaka/micropython,pozetroninc/micropython,bvernoux/micropython,mpalomer/micropython,chrisdearman/micropython,tralamazza/micropython,dinau/micropython,adafruit/circuitpython,puuu/micropython,chrisdearman/micropython,TDAbboud/micropython,deshipu/micropython,deshipu/micropython,torwag/micropython,adafruit/micropython,dmazzella/micropython,xhat/micropython,micropython/micropython-esp32,infinnovation/micropython,lowRISC/micropython,xhat/micropython,mhoffma/micropython,torwag/micropython,HenrikSolver/micropython,ganshun666/micropython,matthewelse/micropython,SHA2017-badge/micropython-esp32,danicampora/micropython,praemdonck/micropython,tobbad/micropython,adafruit/circuitpython,torwag/micropython,MrSurly/micropython,AriZuu/micropython,PappaPeppar/micropython,matthewelse/micropython,mpalomer/micropython,mpalomer/micropython,tobbad/micropython,redbear/micropython,Timmenem/micropython,Timmenem/micropython,praemdonck/micropython,danicampora/micropython,emfcamp/micropython,henriknelson/micropython,trezor/micropython,SHA2017-badge/micropython-esp32,ganshun666/micropython,cwyark/micropython,bvernoux/micropython,Timmenem/micropython,alex-robbins/micropython,micropython/micropython-esp32,hiway/micropython,EcmaXp/micropython,HenrikSolver/micropython,matthewelse/micropython,drrk/micropython,blazewicz/micropython,hosaka/micropython,dxxb/micropython,hiway/micropython,turbinenreiter/micropython,HenrikSolver/micropython,MrSurly/micropython,adafruit/micropython,alex-robbins/micropython,selste/micropython,ernesto-g/micropython,cwyark/micropython,emfcamp/micropython,Peetz0r/micropython-esp32,infinnovation/micropython,puuu/micropython,Peetz0r/micropython-esp32,chrisdearman/micropython,alex-march/micropython,drrk/micropython,praemdonck/micropython,HenrikSolver/micropython,martinribelotta/micropython,infinnovation/micropython,dmazzella/micropython,kerneltask/micropython,tobbad/micropython,mhoffma/micropython,matthewelse/micropython,dinau/micropython,SHA2017-badge/micropython-esp32,ernesto-g/micropython,mianos/micropython,mianos/micropython,hiway/micropython,swegener/micropython,mhoffma/micropython,henriknelson/micropython,PappaPeppar/micropython,TDAbboud/micropython,EcmaXp/micropython,cwyark/micropython,emfcamp/micropython,mhoffma/micropython,mpalomer/micropython,swegener/micropython,xhat/micropython,tralamazza/micropython,adamkh/micropython,blazewicz/micropython,henriknelson/micropython,oopy/micropython,TDAbboud/micropython,alex-robbins/micropython,toolmacher/micropython,pfalcon/micropython,trezor/micropython,EcmaXp/micropython,pfalcon/micropython,pramasoul/micropython,adafruit/circuitpython,kerneltask/micropython,mhoffma/micropython,matthewelse/micropython,praemdonck/micropython,henriknelson/micropython,hiway/micropython,ganshun666/micropython,puuu/micropython,micropython/micropython-esp32,ryannathans/micropython,toolmacher/micropython,oopy/micropython,chrisdearman/micropython,xhat/micropython,bvernoux/micropython,matthewelse/micropython,redbear/micropython,jmarcelino/pycom-micropython,jmarcelino/pycom-micropython,supergis/micropython,blazewicz/micropython,pfalcon/micropython,turbinenreiter/micropython,hosaka/micropython,adafruit/circuitpython,danicampora/micropython,EcmaXp/micropython,mianos/micropython,ernesto-g/micropython,AriZuu/micropython,alex-robbins/micropython,tuc-osg/micropython,PappaPeppar/micropython,alex-march/micropython,misterdanb/micropython,martinribelotta/micropython | ---
+++
@@ -15,3 +15,13 @@
print(not [1,])
print(not {})
print(not {1:1})
+
+# check user instance
+class A: pass
+print(not A())
+
+# check user instances derived from builtins
+class B(int): pass
+print(not B())
+class C(list): pass
+print(not C()) |
781a65e709829842241a4f7f328f3bd46b6a5a53 | allmychanges/settings/development.py | allmychanges/settings/development.py | import os
from .default import * # nopep8
DEBUG = True
TEMPLATE_DEBUG = DEBUG
if DEBUG:
INSTALLED_APPS += (
'debug_toolbar',
)
# debug toolbar settings
MIDDLEWARE_CLASSES += (
'debug_toolbar.middleware.DebugToolbarMiddleware',
)
DEBUG_TOOLBAR_PANELS = (
'debug_toolbar.panels.versions.VersionsPanel',
'debug_toolbar.panels.timer.TimerPanel',
'debug_toolbar.panels.settings.SettingsPanel',
'debug_toolbar.panels.headers.HeadersPanel',
'debug_toolbar.panels.request.RequestPanel',
'debug_toolbar.panels.templates.TemplatesPanel',
'debug_toolbar.panels.sql.SQLPanel',
'debug_toolbar.panels.signals.SignalsPanel',
'debug_toolbar.panels.logging.LoggingPanel',
)
DEBUG_TOOLBAR_CONFIG = {
'INTERCEPT_REDIRECTS': False,
'SHOW_TOOLBAR_CALLBACK': 'allmychanges.utils.show_debug_toolbar'
}
METRIKA_ID = '24627125'
ANALYTICS_ID = 'UA-49927178-2'
LOG_FILENAME = '/var/log/allmychanges/django-' + CURRENT_USER + '.log'
init_logging(LOG_FILENAME)
if not os.path.exists(TEMP_DIR):
os.makedirs(TEMP_DIR)
ALLOWED_HOSTS = ['localhost', 'art.dev.allmychanges.com']
| import os
from .default import * # nopep8
DEBUG = True
TEMPLATE_DEBUG = DEBUG
if DEBUG:
INSTALLED_APPS += (
'debug_toolbar',
)
# debug toolbar settings
MIDDLEWARE_CLASSES += (
'debug_toolbar.middleware.DebugToolbarMiddleware',
)
DEBUG_TOOLBAR_PANELS = (
'debug_toolbar.panels.versions.VersionsPanel',
'debug_toolbar.panels.timer.TimerPanel',
'debug_toolbar.panels.settings.SettingsPanel',
'debug_toolbar.panels.headers.HeadersPanel',
'debug_toolbar.panels.request.RequestPanel',
'debug_toolbar.panels.templates.TemplatesPanel',
'debug_toolbar.panels.sql.SQLPanel',
'debug_toolbar.panels.signals.SignalsPanel',
'debug_toolbar.panels.logging.LoggingPanel',
)
DEBUG_TOOLBAR_CONFIG = {
'SHOW_TOOLBAR_CALLBACK': 'allmychanges.utils.show_debug_toolbar'
}
METRIKA_ID = '24627125'
ANALYTICS_ID = 'UA-49927178-2'
LOG_FILENAME = '/var/log/allmychanges/django-' + CURRENT_USER + '.log'
init_logging(LOG_FILENAME)
if not os.path.exists(TEMP_DIR):
os.makedirs(TEMP_DIR)
ALLOWED_HOSTS = ['localhost', 'art.dev.allmychanges.com']
| Remove warning from debug toolbar. | Remove warning from debug toolbar.
| Python | bsd-2-clause | AllMyChanges/allmychanges.com,AllMyChanges/allmychanges.com,AllMyChanges/allmychanges.com,AllMyChanges/allmychanges.com | ---
+++
@@ -26,7 +26,6 @@
)
DEBUG_TOOLBAR_CONFIG = {
- 'INTERCEPT_REDIRECTS': False,
'SHOW_TOOLBAR_CALLBACK': 'allmychanges.utils.show_debug_toolbar'
}
|
3bbfc62cb194c1c68ce24ffe9fa0732a0f00fd9c | test/664-raceway.py | test/664-raceway.py | # https://www.openstreetmap.org/way/28825404
assert_has_feature(
16, 10476, 25242, 'roads',
{ 'id': 28825404, 'kind': 'minor_road', 'highway': 'raceway' })
# https://www.openstreetmap.org/way/59440900
# Thunderoad Speedway Go-carts
assert_has_feature(
16, 10516, 25247, 'roads',
{ 'id': 59440900, 'kind': 'minor_road', 'highway': 'raceway' })
| # https://www.openstreetmap.org/way/28825404
assert_has_feature(
16, 10476, 25242, 'roads',
{ 'id': 28825404, 'kind': 'minor_road', 'highway': 'raceway', 'sort_key': 375 })
# https://www.openstreetmap.org/way/59440900
# Thunderoad Speedway Go-carts
assert_has_feature(
16, 10516, 25247, 'roads',
{ 'id': 59440900, 'kind': 'minor_road', 'highway': 'raceway', 'sort_key': 375 })
| Add sort_key assertion to raceway tests | Add sort_key assertion to raceway tests
| Python | mit | mapzen/vector-datasource,mapzen/vector-datasource,mapzen/vector-datasource | ---
+++
@@ -1,10 +1,10 @@
# https://www.openstreetmap.org/way/28825404
assert_has_feature(
16, 10476, 25242, 'roads',
- { 'id': 28825404, 'kind': 'minor_road', 'highway': 'raceway' })
+ { 'id': 28825404, 'kind': 'minor_road', 'highway': 'raceway', 'sort_key': 375 })
# https://www.openstreetmap.org/way/59440900
# Thunderoad Speedway Go-carts
assert_has_feature(
16, 10516, 25247, 'roads',
- { 'id': 59440900, 'kind': 'minor_road', 'highway': 'raceway' })
+ { 'id': 59440900, 'kind': 'minor_road', 'highway': 'raceway', 'sort_key': 375 }) |
1e6a424e2669441e6910d3a2803bc139df16dd51 | new_validity.py | new_validity.py | import pandas as pd
import numpy as np
import operator
from sys import argv
import os
def extract( file_name ):
with open(file_name) as f:
for i,line in enumerate(f,1):
if "SCN" in line:
return i
def main(lta_name):
os.system('ltahdr -i'+ lta_name + '> lta_file.txt')
dictionary = {}
#lta_file = str(argv[1])
skipped_rows = extract('lta_file.txt')-1
header = pd.read_csv('lta_file.txt',skiprows=skipped_rows,delimiter=r"\s+")
flux = list(set(header["OBJECT"]))
#print flux
header['Nrecs'] = header['Nrecs'].astype(float)
for i in flux :
temp = header.loc[header.OBJECT==i,'Nrecs'].values
temp = np.mean(temp)
dictionary[i]=temp
#print dictionary
source = max(dictionary.iteritems(),key=operator.itemgetter(1))[0]
return source
| import pandas as pd
import numpy as np
import operator
from sys import argv
import os
def extract( file_name ):
with open(file_name) as f:
for i,line in enumerate(f,1):
if "SCN" in line:
return i
def main():
lta_file = str(argv[1])
calibrator_list = ['3C48', '3C147', '3C286']
os.system('ltahdr -i'+ lta_file + '> lta_file.txt')
dictionary = {}
try:
skipped_rows = extract('lta_file.txt')-1
header = pd.read_csv('lta_file.txt',skiprows=skipped_rows,delimiter=r"\s+")
flux = list(set(header["OBJECT"]))
#print flux
header['Nrecs'] = header['Nrecs'].astype(float)
for i in flux :
temp = header.loc[header.OBJECT==i,'Nrecs'].values
temp = np.mean(temp)
dictionary[i]=temp
print dictionary
#Sort the list of targets according to the number of recordings
list_of_targets = [ i for i,j in sorted(dictionary.iteritems(),key=operator.itemgetter(1), reverse=True)]
source = max(list_of_targets)
for i in len(flux):
if source in calibrator_list:
continue
else:
return source
except:
pass
print main()
| Add scratch file for testing new validity | Add scratch file for testing new validity
| Python | mit | NCRA-TIFR/gadpu,NCRA-TIFR/gadpu | ---
+++
@@ -9,23 +9,36 @@
for i,line in enumerate(f,1):
if "SCN" in line:
return i
-def main(lta_name):
- os.system('ltahdr -i'+ lta_name + '> lta_file.txt')
+
+def main():
+ lta_file = str(argv[1])
+ calibrator_list = ['3C48', '3C147', '3C286']
+ os.system('ltahdr -i'+ lta_file + '> lta_file.txt')
dictionary = {}
- #lta_file = str(argv[1])
- skipped_rows = extract('lta_file.txt')-1
+ try:
+ skipped_rows = extract('lta_file.txt')-1
- header = pd.read_csv('lta_file.txt',skiprows=skipped_rows,delimiter=r"\s+")
- flux = list(set(header["OBJECT"]))
- #print flux
+ header = pd.read_csv('lta_file.txt',skiprows=skipped_rows,delimiter=r"\s+")
+ flux = list(set(header["OBJECT"]))
+ #print flux
- header['Nrecs'] = header['Nrecs'].astype(float)
+ header['Nrecs'] = header['Nrecs'].astype(float)
- for i in flux :
- temp = header.loc[header.OBJECT==i,'Nrecs'].values
- temp = np.mean(temp)
- dictionary[i]=temp
- #print dictionary
+ for i in flux :
+ temp = header.loc[header.OBJECT==i,'Nrecs'].values
+ temp = np.mean(temp)
+ dictionary[i]=temp
+ print dictionary
+
+ #Sort the list of targets according to the number of recordings
+ list_of_targets = [ i for i,j in sorted(dictionary.iteritems(),key=operator.itemgetter(1), reverse=True)]
+ source = max(list_of_targets)
+ for i in len(flux):
+ if source in calibrator_list:
+ continue
+ else:
+ return source
+ except:
+ pass
- source = max(dictionary.iteritems(),key=operator.itemgetter(1))[0]
- return source
+print main() |
3ceb8bbcc6b5b43deae31a1c64331e86555eb601 | python/ql/test/library-tests/frameworks/cryptography/test_ec.py | python/ql/test/library-tests/frameworks/cryptography/test_ec.py | # see https://cryptography.io/en/latest/hazmat/primitives/asymmetric/rsa.html
from cryptography.hazmat.primitives.asymmetric import ec
from cryptography.hazmat.primitives import hashes
from cryptography.exceptions import InvalidSignature
private_key = ec.generate_private_key(curve=ec.SECP384R1()) # $ PublicKeyGeneration keySize=384
public_key = private_key.public_key()
HASH_ALGORITHM = hashes.SHA256()
# ------------------------------------------------------------------------------
# sign/verify
# ------------------------------------------------------------------------------
print("sign/verify")
SIGNATURE_ALGORITHM = ec.ECDSA(HASH_ALGORITHM)
message = b"message"
signature = private_key.sign(
message,
signature_algorithm=SIGNATURE_ALGORITHM,
)
print("signature={}".format(signature))
print()
public_key.verify(
signature, message, signature_algorithm=SIGNATURE_ALGORITHM
)
print("Signature verified (as expected)")
try:
public_key.verify(
signature, b"other message", signature_algorithm=SIGNATURE_ALGORITHM
)
raise Exception("Signature verified (unexpected)")
except InvalidSignature:
print("Signature mismatch (as expected)")
| # see https://cryptography.io/en/latest/hazmat/primitives/asymmetric/rsa.html
from cryptography.hazmat.primitives.asymmetric import ec
from cryptography.hazmat.primitives import hashes
from cryptography.exceptions import InvalidSignature
private_key = ec.generate_private_key(curve=ec.SECP384R1()) # $ PublicKeyGeneration keySize=384
private_key = ec.generate_private_key(curve=ec.SECP384R1) # $ MISSING: PublicKeyGeneration keySize=384
public_key = private_key.public_key()
HASH_ALGORITHM = hashes.SHA256()
# ------------------------------------------------------------------------------
# sign/verify
# ------------------------------------------------------------------------------
print("sign/verify")
SIGNATURE_ALGORITHM = ec.ECDSA(HASH_ALGORITHM)
message = b"message"
signature = private_key.sign(
message,
signature_algorithm=SIGNATURE_ALGORITHM,
)
print("signature={}".format(signature))
print()
public_key.verify(
signature, message, signature_algorithm=SIGNATURE_ALGORITHM
)
print("Signature verified (as expected)")
try:
public_key.verify(
signature, b"other message", signature_algorithm=SIGNATURE_ALGORITHM
)
raise Exception("Signature verified (unexpected)")
except InvalidSignature:
print("Signature mismatch (as expected)")
| Add cryptography test for EC | Python: Add cryptography test for EC
Apparently, passing in the class (without instantiating it) is allowed
| Python | mit | github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql | ---
+++
@@ -6,6 +6,7 @@
private_key = ec.generate_private_key(curve=ec.SECP384R1()) # $ PublicKeyGeneration keySize=384
+private_key = ec.generate_private_key(curve=ec.SECP384R1) # $ MISSING: PublicKeyGeneration keySize=384
public_key = private_key.public_key()
HASH_ALGORITHM = hashes.SHA256() |
8bccbe0fdb3d6770ecbbe28528628f10988145bd | kitchen/dashboard/graphs.py | kitchen/dashboard/graphs.py | import os
import pydot
from kitchen.settings import STATIC_ROOT
def generate_node_map(nodes):
"""Generates a graphviz nodemap"""
graph = pydot.Dot(graph_type='digraph')
graph_nodes = {}
for node in nodes:
label = node['name'] + "\n" + "\n".join(
[role for role in node['role'] if not role.startswith("env")])
node_el = pydot.Node(label, style="filled", fillcolor="red")
graph_nodes[node['name']] = node_el
graph.add_node(node_el)
for node in nodes:
for attr in node.keys():
if isinstance(node[attr], dict) and 'client_roles' in node[attr]:
for client_node in nodes:
if set.intersection(set(node[attr]['client_roles']),
set(client_node['roles'])):
graph.add_edge(pydot.Edge(
graph_nodes[client_node['name']],
graph_nodes[node['name']]))
keys = graph_nodes.keys()
graph.add_edge(pydot.Edge(graph_nodes[keys[3]], graph_nodes[keys[5]]))
graph.write_png(os.path.join(STATIC_ROOT, 'img', 'node_map.png'))
| import os
import pydot
from kitchen.settings import STATIC_ROOT, REPO
def generate_node_map(nodes):
"""Generates a graphviz nodemap"""
graph = pydot.Dot(graph_type='digraph')
graph_nodes = {}
for node in nodes:
label = node['name'] + "\n" + "\n".join(
[role for role in node['role'] \
if not role.startswith(REPO['ENV_PREFIX'])])
node_el = pydot.Node(label, style="filled", fillcolor="red")
graph_nodes[node['name']] = node_el
graph.add_node(node_el)
for node in nodes:
for attr in node.keys():
if isinstance(node[attr], dict) and 'client_roles' in node[attr]:
for client_node in nodes:
if set.intersection(set(node[attr]['client_roles']),
set(client_node['roles'])):
graph.add_edge(pydot.Edge(
graph_nodes[client_node['name']],
graph_nodes[node['name']]))
keys = graph_nodes.keys()
graph.add_edge(pydot.Edge(graph_nodes[keys[3]], graph_nodes[keys[5]]))
graph.write_png(os.path.join(STATIC_ROOT, 'img', 'node_map.png'))
| Use the env prefix setting | Use the env prefix setting
| Python | apache-2.0 | edelight/kitchen,edelight/kitchen,edelight/kitchen,edelight/kitchen | ---
+++
@@ -1,7 +1,7 @@
import os
import pydot
-from kitchen.settings import STATIC_ROOT
+from kitchen.settings import STATIC_ROOT, REPO
def generate_node_map(nodes):
@@ -10,7 +10,8 @@
graph_nodes = {}
for node in nodes:
label = node['name'] + "\n" + "\n".join(
- [role for role in node['role'] if not role.startswith("env")])
+ [role for role in node['role'] \
+ if not role.startswith(REPO['ENV_PREFIX'])])
node_el = pydot.Node(label, style="filled", fillcolor="red")
graph_nodes[node['name']] = node_el
graph.add_node(node_el) |
9401ce692e8b0362e387cb5fb042f530edd2c0b3 | toolkit/models/models.py | toolkit/models/models.py | import arrow
from django.conf import settings
from django.db import models
from .mixins import ModelPermissionsMixin
class CCEModel(ModelPermissionsMixin, models.Model):
"""
Abstract base model with permissions mixin.
"""
class Meta:
abstract = True
class CCEAuditModel(CCEModel):
"""
Abstract model with fields for the user and timestamp of a row's creation
and last update.
.. note:: - Inherits from **CCEModel**
- Requires **django-cuser** package to determine current user
:tags:
django-cuser
"""
from cuser.fields import CurrentUserField
last_updated_by = CurrentUserField(
related_name='%(app_label)s_%(class)s_last_updated')
last_updated_at = models.DateTimeField(auto_now=True)
created_by = CurrentUserField(add_only=True,
related_name='%(app_label)s_%(class)s_'
'last_created')
created_at = models.DateTimeField(auto_now_add=True)
class Meta:
abstract = True
@property
def tz_last_updated_at(self):
if not hasattr(settings, 'TIME_ZONE'):
return arrow.get(self.last_updated_at).datetime
return arrow.get(self.last_updated_at, settings.TIME_ZONE).datetime
@property
def tz_created_at(self):
if not hasattr(settings, 'TIME_ZONE'):
return arrow.get(self.created_at).datetime
return arrow.get(self.created_at, settings.TIME_ZONE).datetime
| from django.db import models
from django.utils.timezone import localtime
from .mixins import ModelPermissionsMixin
class CCEModel(ModelPermissionsMixin, models.Model):
"""
Abstract base model with permissions mixin.
"""
class Meta:
abstract = True
class CCEAuditModel(CCEModel):
"""
Abstract model with fields for the user and timestamp of a row's creation
and last update.
.. note:: - Inherits from **CCEModel**
- Requires **django-cuser** package to determine current user
:tags:
django-cuser
"""
from cuser.fields import CurrentUserField
last_updated_by = CurrentUserField(related_name='%(app_label)s_%(class)s_last_updated')
last_updated_at = models.DateTimeField(auto_now=True)
created_by = CurrentUserField(add_only=True, related_name='%(app_label)s_%(class)s_last_created')
created_at = models.DateTimeField(auto_now_add=True)
class Meta:
abstract = True
@property
def tz_last_updated_at(self):
return localtime(self.last_updated_at)
@property
def tz_created_at(self):
return localtime(self.created_at)
| Update Timezone aware values for CCEAuditModel | Update Timezone aware values for CCEAuditModel
| Python | bsd-3-clause | cceit/cce-toolkit,cceit/cce-toolkit,cceit/cce-toolkit | ---
+++
@@ -1,6 +1,6 @@
-import arrow
-from django.conf import settings
from django.db import models
+from django.utils.timezone import localtime
+
from .mixins import ModelPermissionsMixin
@@ -25,12 +25,9 @@
"""
from cuser.fields import CurrentUserField
- last_updated_by = CurrentUserField(
- related_name='%(app_label)s_%(class)s_last_updated')
+ last_updated_by = CurrentUserField(related_name='%(app_label)s_%(class)s_last_updated')
last_updated_at = models.DateTimeField(auto_now=True)
- created_by = CurrentUserField(add_only=True,
- related_name='%(app_label)s_%(class)s_'
- 'last_created')
+ created_by = CurrentUserField(add_only=True, related_name='%(app_label)s_%(class)s_last_created')
created_at = models.DateTimeField(auto_now_add=True)
class Meta:
@@ -38,12 +35,8 @@
@property
def tz_last_updated_at(self):
- if not hasattr(settings, 'TIME_ZONE'):
- return arrow.get(self.last_updated_at).datetime
- return arrow.get(self.last_updated_at, settings.TIME_ZONE).datetime
+ return localtime(self.last_updated_at)
@property
def tz_created_at(self):
- if not hasattr(settings, 'TIME_ZONE'):
- return arrow.get(self.created_at).datetime
- return arrow.get(self.created_at, settings.TIME_ZONE).datetime
+ return localtime(self.created_at) |
0958ec9188bc2017be576de62911e76247cbe45f | scikits/gpu/tests/test_fbo.py | scikits/gpu/tests/test_fbo.py | from nose.tools import *
from scikits.gpu.fbo import *
from pyglet.gl import *
class TestFramebuffer(object):
def create(self, x, y, colours, dtype):
fbo = Framebuffer(x, y, bands=colours, dtype=dtype)
fbo.bind()
fbo.unbind()
fbo.delete()
def test_creation(self):
fbo = Framebuffer(64, 64)
for dtype in [gl.GL_UNSIGNED_BYTE, gl.GL_BYTE,
gl.GL_INT, gl.GL_UNSIGNED_INT,
gl.GL_FLOAT]:
for bands in [1, 2, 3, 4]:
yield self.create, 16, 16, bands, dtype
| from nose.tools import *
from scikits.gpu.fbo import *
from pyglet.gl import *
class TestFramebuffer(object):
def create(self, x, y, colours, dtype):
fbo = Framebuffer(x, y, bands=colours, dtype=dtype)
fbo.bind()
fbo.unbind()
fbo.delete()
def test_creation(self):
fbo = Framebuffer(64, 64)
for dtype in [gl.GL_UNSIGNED_BYTE, gl.GL_BYTE,
gl.GL_INT, gl.GL_UNSIGNED_INT,
gl.GL_FLOAT]:
for bands in [1, 2, 3, 4]:
yield self.create, 16, 16, bands, dtype
def test_bind_deleted(self):
fbo = Framebuffer(32, 32)
fbo.delete()
assert_raises(RuntimeError, fbo.bind)
| Test that framebuffer can't be bound after deletion. | Test that framebuffer can't be bound after deletion.
| Python | mit | certik/scikits.gpu,stefanv/scikits.gpu | ---
+++
@@ -17,3 +17,8 @@
gl.GL_FLOAT]:
for bands in [1, 2, 3, 4]:
yield self.create, 16, 16, bands, dtype
+
+ def test_bind_deleted(self):
+ fbo = Framebuffer(32, 32)
+ fbo.delete()
+ assert_raises(RuntimeError, fbo.bind) |
4eada6970d72b3863104790229286edf8d17720c | accelerator/tests/contexts/user_role_context.py | accelerator/tests/contexts/user_role_context.py | from builtins import object
from accelerator.tests.factories import (
ExpertFactory,
ProgramFactory,
ProgramRoleFactory,
ProgramRoleGrantFactory,
UserRoleFactory,
)
class UserRoleContext(object):
def __init__(self, user_role_name, program=None, user=None):
if user and not program:
self.program = user.get_profile().current_program
else:
self.program = program or ProgramFactory()
self.user = (user or
ExpertFactory(profile__current_program=self.program))
self.user_role = UserRoleFactory(name=user_role_name)
self.program_role = ProgramRoleFactory(user_role=self.user_role,
program=self.program)
self.program_role_grant = ProgramRoleGrantFactory(
person=self.user,
program_role=self.program_role)
| from builtins import object
from accelerator.tests.factories import (
ExpertFactory,
ProgramFactory,
ProgramRoleFactory,
ProgramRoleGrantFactory,
UserRoleFactory,
)
from accelerator.models import UserRole
class UserRoleContext(object):
def __init__(self, user_role_name, program=None, user=None):
if user and not program:
self.program = user.get_profile().current_program
else:
self.program = program or ProgramFactory()
self.user = (user or
ExpertFactory(profile__current_program=self.program))
self.user_role = _user_role_for_name(user_role_name)
self.program_role = ProgramRoleFactory(user_role=self.user_role,
program=self.program)
self.program_role_grant = ProgramRoleGrantFactory(
person=self.user,
program_role=self.program_role)
def _user_role_for_name(user_role_name):
return (UserRole.objects.filter(name=user_role_name).first() or
UserRoleFactory(name=user_role_name))
| Make UserRoleContext safe to use | [AC-7397] Make UserRoleContext safe to use
| Python | mit | masschallenge/django-accelerator,masschallenge/django-accelerator | ---
+++
@@ -7,6 +7,7 @@
ProgramRoleGrantFactory,
UserRoleFactory,
)
+from accelerator.models import UserRole
class UserRoleContext(object):
@@ -18,9 +19,15 @@
self.program = program or ProgramFactory()
self.user = (user or
ExpertFactory(profile__current_program=self.program))
- self.user_role = UserRoleFactory(name=user_role_name)
+ self.user_role = _user_role_for_name(user_role_name)
+
self.program_role = ProgramRoleFactory(user_role=self.user_role,
program=self.program)
self.program_role_grant = ProgramRoleGrantFactory(
person=self.user,
program_role=self.program_role)
+
+def _user_role_for_name(user_role_name):
+
+ return (UserRole.objects.filter(name=user_role_name).first() or
+ UserRoleFactory(name=user_role_name)) |
85245f55fe430bfcf4946d2501394dad813a6591 | core/modules/html_has_same_domain.py | core/modules/html_has_same_domain.py | from bs4 import BeautifulSoup as bs
from get_root_domain import get_root_domain
def html_has_same_domain(url, resp):
mod = 'html_has_same_domain'
cnt = 0
root = get_root_domain(url)
current_page = bs(resp.text, 'lxml')
for tag in current_page.find_all('a'):
if tag.get('href'):
in_url = get_root_domain(tag.get('href'))
if in_url == root:
cnt += 1
if cnt >= 1:
return "S", mod
return "U", mod
| from bs4 import BeautifulSoup as bs
from get_root_domain import get_root_domain
def html_has_same_domain(url, resp):
mod = 'html_has_same_domain'
cnt = 0
root = get_root_domain(url)
current_page = bs(resp.text, 'lxml')
for tag in current_page.find_all('a'):
if tag.get('href'):
in_url = get_root_domain(tag.get('href'))
if in_url == root:
cnt += 1
if ("naver" in tag.text.lower()):
return "P", mod
if cnt >= 1:
return "S", mod
return "U", mod
| Add logic to check for cross-site anchor tags to naver | Add logic to check for cross-site anchor tags to naver
| Python | bsd-2-clause | mjkim610/phishing-detection,jaeyung1001/phishing_site_detection | ---
+++
@@ -1,6 +1,5 @@
from bs4 import BeautifulSoup as bs
from get_root_domain import get_root_domain
-
def html_has_same_domain(url, resp):
mod = 'html_has_same_domain'
@@ -13,6 +12,8 @@
in_url = get_root_domain(tag.get('href'))
if in_url == root:
cnt += 1
+ if ("naver" in tag.text.lower()):
+ return "P", mod
if cnt >= 1:
return "S", mod |
c0455de3061ba049ad9d501b85118f8ef4cd673c | peakachulib/tmm.py | peakachulib/tmm.py | import numpy as np
import pandas as pd
from rpy2.robjects import r, pandas2ri
pandas2ri.activate()
class TMM(object):
def __init__(self, count_df):
r("suppressMessages(library(edgeR))")
self.count_df = count_df
def calc_size_factors(self, method="TMM"):
# Convert pandas dataframe to R dataframe
r_dge = r.DGEList(self.count_df)
# Calculate normalization factors
r_dge = r.calcNormFactors(r_dge, method=method)
size_factors = (np.array(r_dge.rx2('samples').rx2("lib.size")) *
np.array(r_dge.rx2("samples").rx2("norm.factors")))
# convert to pandas series
size_factors = pd.Series(size_factors, index=self.count_df.columns)
# adjust size factors so that the maximum is 1.0
size_factors = size_factors/size_factors.max()
return size_factors
| import numpy as np
import pandas as pd
from rpy2.robjects import r, pandas2ri
pandas2ri.activate()
class TMM(object):
def __init__(self, count_df):
r("suppressMessages(library(edgeR))")
self.count_df = count_df
def calc_size_factors(self):
# Convert pandas dataframe to R dataframe
r_dge = r.DGEList(self.count_df)
# Calculate normalization factors
r_dge = r.calcNormFactors(r_dge, method="TMM")
size_factors = (np.array(r_dge.rx2('samples').rx2("lib.size")) *
np.array(r_dge.rx2("samples").rx2("norm.factors")))
# convert to pandas series
size_factors = pd.Series(size_factors, index=self.count_df.columns)
# adjust size factors so that the maximum is 1.0
size_factors = size_factors/size_factors.max()
return size_factors
| Fix TMM as normalization method from edgeR package | Fix TMM as normalization method from edgeR package
| Python | isc | tbischler/PEAKachu | ---
+++
@@ -9,12 +9,12 @@
def __init__(self, count_df):
r("suppressMessages(library(edgeR))")
self.count_df = count_df
-
- def calc_size_factors(self, method="TMM"):
+
+ def calc_size_factors(self):
# Convert pandas dataframe to R dataframe
r_dge = r.DGEList(self.count_df)
# Calculate normalization factors
- r_dge = r.calcNormFactors(r_dge, method=method)
+ r_dge = r.calcNormFactors(r_dge, method="TMM")
size_factors = (np.array(r_dge.rx2('samples').rx2("lib.size")) *
np.array(r_dge.rx2("samples").rx2("norm.factors")))
# convert to pandas series |
23f709e483bc7b0dfa15da8207ddc509715ebaa0 | petlib/__init__.py | petlib/__init__.py | # The petlib version
VERSION = '0.0.25' | # The petlib version
VERSION = '0.0.26'
def run_tests():
# These are only needed in case we test
import pytest
import os.path
import glob
# List all petlib files in the directory
petlib_dir = dir = os.path.dirname(os.path.realpath(__file__))
pyfiles = glob.glob(os.path.join(petlib_dir, '*.py'))
pyfiles = " ".join(pyfiles)
# Run the test suite
print("Directory: %s" % pyfiles)
res = pytest.main("-v -x %s" % pyfiles)
print("Result: %s" % res)
# Return exit result
return res | Make a petlib.run_tests() function that tests an install | Make a petlib.run_tests() function that tests an install
| Python | bsd-2-clause | gdanezis/petlib | ---
+++
@@ -1,2 +1,21 @@
# The petlib version
-VERSION = '0.0.25'
+VERSION = '0.0.26'
+
+def run_tests():
+ # These are only needed in case we test
+ import pytest
+ import os.path
+ import glob
+
+ # List all petlib files in the directory
+ petlib_dir = dir = os.path.dirname(os.path.realpath(__file__))
+ pyfiles = glob.glob(os.path.join(petlib_dir, '*.py'))
+ pyfiles = " ".join(pyfiles)
+
+ # Run the test suite
+ print("Directory: %s" % pyfiles)
+ res = pytest.main("-v -x %s" % pyfiles)
+ print("Result: %s" % res)
+
+ # Return exit result
+ return res |
4ca953b2210c469e5d09bb03c66cbe0839959e49 | libvirt/libvirt_list_vms.py | libvirt/libvirt_list_vms.py | #!/usr/bin/python
import libvirt
import sys
conn=libvirt.open("qemu:///system")
if conn == None:
print('Failed to open connection to qemu:///system', sys.stderr)
exit(1)
#vms = conn.listDefinedDomains()
#print '\n'.join(vms)
vms = conn.listAllDomains(0)
if len(vms) != 0:
for vm in vms:
print(vm.name())
else:
print('None')
conn.close()
exit(0)
| #!/usr/bin/python
import libvirt
import sys
def getConnection():
try:
conn=libvirt.open("qemu:///system")
return conn
except libvirt.libvirtError, e:
print e.get_error_message()
sys.exit(1)
def delConnection(conn):
try:
conn.close()
except:
print get_error_message()
sys.exit(1)
def getAllDomains(conn):
vms = conn.listAllDomains(0)
if len(vms) != 0:
for vm in vms:
print(vm.name())
else:
print('None')
if __name__ == '__main__':
conn = getConnection()
getAllDomains(conn)
delConnection(conn)
| Update script list domain libvirt | Update script list domain libvirt
| Python | apache-2.0 | skylost/heap,skylost/heap,skylost/heap | ---
+++
@@ -3,19 +3,30 @@
import libvirt
import sys
-conn=libvirt.open("qemu:///system")
-if conn == None:
- print('Failed to open connection to qemu:///system', sys.stderr)
- exit(1)
+def getConnection():
+ try:
+ conn=libvirt.open("qemu:///system")
+ return conn
+ except libvirt.libvirtError, e:
+ print e.get_error_message()
+ sys.exit(1)
-#vms = conn.listDefinedDomains()
-#print '\n'.join(vms)
-vms = conn.listAllDomains(0)
-if len(vms) != 0:
+def delConnection(conn):
+ try:
+ conn.close()
+ except:
+ print get_error_message()
+ sys.exit(1)
+
+def getAllDomains(conn):
+ vms = conn.listAllDomains(0)
+ if len(vms) != 0:
for vm in vms:
- print(vm.name())
-else:
+ print(vm.name())
+ else:
print('None')
-conn.close()
-exit(0)
+if __name__ == '__main__':
+ conn = getConnection()
+ getAllDomains(conn)
+ delConnection(conn) |
ecbb73f69d6481a94c86f1e0110c39800ebc7d07 | ledctl.py | ledctl.py | from flask import Flask
app = Flask(__name__)
@app.route("/")
def home():
return "Hello World!"
| from flask import Flask, request
import pigpio
app = Flask(__name__)
#rgb 22, 27, 17
#base teal 40 97 15
GPIO_RED = 22
GPIO_GREEN = 27
GPIO_BLUE = 17
def to_PWM_dutycycle(string):
try:
i = int(string)
if i < 0:
i = 0
elif i > 255:
i = 255
return i
except ValueError:
return 0
@app.route("/")
def home():
return "Hello World!"
@app.route("/color")
def set_color():
args = request.args.to_dict()
r = to_PWM_dutycycle(args['r'])
g = to_PWM_dutycycle(args['g'])
b = to_PWM_dutycycle(args['b'])
pi = pigpio.pi()
pi.set_PWM_dutycycle(GPIO_RED, r)
pi.set_PWM_dutycycle(GPIO_GREEN, g)
pi.set_PWM_dutycycle(GPIO_BLUE, b)
return str(r) + ' ' + str(g) + ' ' + str(b)
if __name__ == "__main__":
#app.run(debug=True)
app.run(host='0.0.0.0', port=8080, debug=True)
| Add API to set leds color | Add API to set leds color
| Python | mit | ayoy/ledctl | ---
+++
@@ -1,7 +1,42 @@
-from flask import Flask
+from flask import Flask, request
+import pigpio
app = Flask(__name__)
+#rgb 22, 27, 17
+#base teal 40 97 15
+
+GPIO_RED = 22
+GPIO_GREEN = 27
+GPIO_BLUE = 17
+
+def to_PWM_dutycycle(string):
+ try:
+ i = int(string)
+ if i < 0:
+ i = 0
+ elif i > 255:
+ i = 255
+ return i
+ except ValueError:
+ return 0
@app.route("/")
def home():
return "Hello World!"
+
+@app.route("/color")
+def set_color():
+ args = request.args.to_dict()
+ r = to_PWM_dutycycle(args['r'])
+ g = to_PWM_dutycycle(args['g'])
+ b = to_PWM_dutycycle(args['b'])
+ pi = pigpio.pi()
+ pi.set_PWM_dutycycle(GPIO_RED, r)
+ pi.set_PWM_dutycycle(GPIO_GREEN, g)
+ pi.set_PWM_dutycycle(GPIO_BLUE, b)
+ return str(r) + ' ' + str(g) + ' ' + str(b)
+
+if __name__ == "__main__":
+ #app.run(debug=True)
+ app.run(host='0.0.0.0', port=8080, debug=True)
+ |
e7a771011e93660c811effb8357df035bae8f9a6 | pentai/gui/settings_screen.py | pentai/gui/settings_screen.py | from kivy.uix.screenmanager import Screen
#from kivy.properties import *
from kivy.uix.settings import SettingSpacer
from my_setting import *
import audio as a_m
class SettingsScreen(Screen):
def __init__(self, *args, **kwargs):
super(SettingsScreen, self).__init__(*args, **kwargs)
def adjust_volumes(self, *args):
a_m.adjust_volumes()
def set_confirmation_popups(self, *args):
self.app.set_confirmation_popups()
| from kivy.uix.screenmanager import Screen
#from kivy.properties import *
from kivy.uix.settings import SettingSpacer
from my_setting import *
import audio as a_m
from kivy.uix.widget import Widget
class HSpacer(Widget):
pass
class VSpacer(Widget):
pass
class SettingsScreen(Screen):
def __init__(self, *args, **kwargs):
super(SettingsScreen, self).__init__(*args, **kwargs)
def adjust_volumes(self, *args):
a_m.adjust_volumes()
def set_confirmation_popups(self, *args):
self.app.set_confirmation_popups()
| Use our own spacer widgets | Use our own spacer widgets
| Python | mit | cropleyb/pentai,cropleyb/pentai,cropleyb/pentai | ---
+++
@@ -4,6 +4,14 @@
from my_setting import *
import audio as a_m
+
+from kivy.uix.widget import Widget
+
+class HSpacer(Widget):
+ pass
+
+class VSpacer(Widget):
+ pass
class SettingsScreen(Screen):
def __init__(self, *args, **kwargs): |
e42d38f9ad3f8b5229c9618e4dd9d6b371de89c5 | test/test_am_bmi.py | test/test_am_bmi.py | import unittest
import utils
import os
import sys
import shutil
TOPDIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
sys.path.append(os.path.join(TOPDIR, 'lib'))
sys.path.append(TOPDIR)
import cryptosite.am_bmi
class Tests(unittest.TestCase):
def test_get_sas(self):
"""Test get_sas() function"""
with utils.temporary_working_directory() as tmpdir:
shutil.copy(os.path.join(TOPDIR, 'test', 'input', 'test.pdb'),
'1abc.pdb')
out = cryptosite.am_bmi.get_sas('1abc', 1.4)
self.assertEqual(len(out), 60)
if __name__ == '__main__':
unittest.main()
| import unittest
import utils
import os
import sys
import shutil
import subprocess
TOPDIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
utils.set_search_paths(TOPDIR)
import cryptosite.am_bmi
class Tests(unittest.TestCase):
def test_get_sas(self):
"""Test get_sas() function"""
with utils.temporary_working_directory() as tmpdir:
shutil.copy(os.path.join(TOPDIR, 'test', 'input', 'test.pdb'),
'1abc.pdb')
out = cryptosite.am_bmi.get_sas('1abc', 1.4)
self.assertEqual(len(out), 60)
def test_main(self):
"""Test simple complete run of am_bmi"""
with utils.temporary_working_directory() as tmpdir:
shutil.copy(os.path.join(TOPDIR, 'test', 'input',
'pm.pdb.B10010001.pdb'), '.')
with open('SnapList.txt', 'w') as fh:
fh.write("pm.pdb.B10010001.pdb -100.0\n")
fh.write("high-energy.pdb -10.0\n")
subprocess.check_call(['cryptosite', 'am_bmi'])
with open('am_features.out') as fh:
lines = sorted(fh.readlines())
self.assertEqual(len(lines), 12)
self.assertEqual(lines[0], 'ALA\t1\tA\t17.328\t12.02\t32.6\t48.0\n')
if __name__ == '__main__':
unittest.main()
| Test simple complete run of am_bmi. | Test simple complete run of am_bmi.
| Python | lgpl-2.1 | salilab/cryptosite,salilab/cryptosite,salilab/cryptosite | ---
+++
@@ -3,10 +3,10 @@
import os
import sys
import shutil
+import subprocess
TOPDIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
-sys.path.append(os.path.join(TOPDIR, 'lib'))
-sys.path.append(TOPDIR)
+utils.set_search_paths(TOPDIR)
import cryptosite.am_bmi
class Tests(unittest.TestCase):
@@ -18,5 +18,19 @@
out = cryptosite.am_bmi.get_sas('1abc', 1.4)
self.assertEqual(len(out), 60)
+ def test_main(self):
+ """Test simple complete run of am_bmi"""
+ with utils.temporary_working_directory() as tmpdir:
+ shutil.copy(os.path.join(TOPDIR, 'test', 'input',
+ 'pm.pdb.B10010001.pdb'), '.')
+ with open('SnapList.txt', 'w') as fh:
+ fh.write("pm.pdb.B10010001.pdb -100.0\n")
+ fh.write("high-energy.pdb -10.0\n")
+ subprocess.check_call(['cryptosite', 'am_bmi'])
+ with open('am_features.out') as fh:
+ lines = sorted(fh.readlines())
+ self.assertEqual(len(lines), 12)
+ self.assertEqual(lines[0], 'ALA\t1\tA\t17.328\t12.02\t32.6\t48.0\n')
+
if __name__ == '__main__':
unittest.main() |
291ae1ae359b7985f25c4d32ee31ff6ccbc6eb7d | curious/commands/__init__.py | curious/commands/__init__.py | # This file is part of curious.
#
# curious is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# curious is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with curious. If not, see <http://www.gnu.org/licenses/>.
"""
Commands helpers.
.. currentmodule:: curious.commands
.. autosummary::
:toctree: commands
manager
context
decorators
plugin
utils
exc
converters
"""
from curious.commands.context import Context
from curious.commands.decorators import command, condition
from curious.commands.manager import CommandsManager
from curious.commands.plugin import Plugin
| # This file is part of curious.
#
# curious is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# curious is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with curious. If not, see <http://www.gnu.org/licenses/>.
"""
Commands helpers.
.. currentmodule:: curious.commands
.. autosummary::
:toctree: commands
manager
context
decorators
plugin
utils
ratelimit
help
exc
converters
"""
from curious.commands.context import Context
from curious.commands.decorators import command, condition
from curious.commands.manager import CommandsManager
from curious.commands.plugin import Plugin
| Add ratelimit and help to autosummary. | Add ratelimit and help to autosummary.
Signed-off-by: Laura F. D <07c342be6e560e7f43842e2e21b774e61d85f047@veriny.tf>
| Python | mit | SunDwarf/curious | ---
+++
@@ -26,6 +26,8 @@
decorators
plugin
utils
+ ratelimit
+ help
exc
converters |
f8640410f4271b22a2836d9fe4f5d09b28c7b19c | angr/storage/memory_mixins/regioned_memory/abstract_merger_mixin.py | angr/storage/memory_mixins/regioned_memory/abstract_merger_mixin.py | import logging
from typing import Iterable, Tuple, Any
from .. import MemoryMixin
l = logging.getLogger(name=__name__)
class AbstractMergerMixin(MemoryMixin):
def _merge_values(self, values: Iterable[Tuple[Any,Any]], merged_size: int):
if self.category == 'reg' and self.state.arch.register_endness == 'Iend_LE':
should_reverse = True
elif self.state.arch.memory_endness == 'Iend_LE':
should_reverse = True
else:
should_reverse = False
values = list(values)
merged_val = values[0][0]
if should_reverse: merged_val = merged_val.reversed
for tm, _ in values[1:]:
if should_reverse: tm = tm.reversed
if self._is_uninitialized(tm):
continue
l.info("Merging %s %s...", merged_val, tm)
merged_val = merged_val.union(tm)
l.info("... Merged to %s", merged_val)
if should_reverse:
merged_val = merged_val.reversed
return merged_val
@staticmethod
def _is_uninitialized(a):
return getattr(a._model_vsa, 'uninitialized', False)
| import logging
from typing import Iterable, Tuple, Any
from .. import MemoryMixin
l = logging.getLogger(name=__name__)
class AbstractMergerMixin(MemoryMixin):
def _merge_values(self, values: Iterable[Tuple[Any,Any]], merged_size: int):
# if self.category == 'reg' and self.state.arch.register_endness == 'Iend_LE':
# should_reverse = True
# elif self.state.arch.memory_endness == 'Iend_LE':
# should_reverse = True
# else:
# should_reverse = False
values = list(values)
merged_val = values[0][0]
# if should_reverse: merged_val = merged_val.reversed
for tm, _ in values[1:]:
# if should_reverse: tm = tm.reversed
if self._is_uninitialized(tm):
continue
l.info("Merging %s %s...", merged_val, tm)
merged_val = merged_val.union(tm)
l.info("... Merged to %s", merged_val)
# if should_reverse:
# merged_val = merged_val.reversed
if not values[0][0].uninitialized and self.state.solver.backends.vsa.identical(merged_val, values[0][0]):
return None
return merged_val
@staticmethod
def _is_uninitialized(a):
return getattr(a._model_vsa, 'uninitialized', False)
| Remove reversing heuristics from merge_values for abstract memory. | Remove reversing heuristics from merge_values for abstract memory.
This is because SimMemoryObject handles endness now.
Also re-introduce the logic for dealing with uninit memory values.
| Python | bsd-2-clause | angr/angr,angr/angr,angr/angr | ---
+++
@@ -10,20 +10,20 @@
def _merge_values(self, values: Iterable[Tuple[Any,Any]], merged_size: int):
- if self.category == 'reg' and self.state.arch.register_endness == 'Iend_LE':
- should_reverse = True
- elif self.state.arch.memory_endness == 'Iend_LE':
- should_reverse = True
- else:
- should_reverse = False
+ # if self.category == 'reg' and self.state.arch.register_endness == 'Iend_LE':
+ # should_reverse = True
+ # elif self.state.arch.memory_endness == 'Iend_LE':
+ # should_reverse = True
+ # else:
+ # should_reverse = False
values = list(values)
merged_val = values[0][0]
- if should_reverse: merged_val = merged_val.reversed
+ # if should_reverse: merged_val = merged_val.reversed
for tm, _ in values[1:]:
- if should_reverse: tm = tm.reversed
+ # if should_reverse: tm = tm.reversed
if self._is_uninitialized(tm):
continue
@@ -31,8 +31,11 @@
merged_val = merged_val.union(tm)
l.info("... Merged to %s", merged_val)
- if should_reverse:
- merged_val = merged_val.reversed
+ # if should_reverse:
+ # merged_val = merged_val.reversed
+
+ if not values[0][0].uninitialized and self.state.solver.backends.vsa.identical(merged_val, values[0][0]):
+ return None
return merged_val
|
c1e9d369680e779d481aa7db17be9348d56ec29d | test_linked_list.py | test_linked_list.py | from __future__ import unicode_literals
import linked_list
# def func(x):
# return x + 1
# def tdest_answer():
# assert func(3) == 5
# init
a = linked_list.LinkedList()
def test_size():
assert a.size is 0
def test_head():
assert a.head is None
def test_init():
assert type(a) is linked_list.LinkedList
| """Pytest file for linked_list.py
Run this with the command 'py.test test_linked_list.py'
"""
from __future__ import unicode_literals
import linked_list
import copy
# init method
a = linked_list.LinkedList()
def test_init_size():
assert a.sizeOfList is 0
assert type(a.sizeOfList) is int
def test_init_head():
assert a.head is None
def test_init_type():
assert type(a) is linked_list.LinkedList
# insert method
b = copy.copy(a) # make a copy every time a change is made
b.insert(5) # so the test can handle different values
def test_insert_size():
assert b.sizeOfList is 1
def test_insert_head():
assert b.head.value is 5
def test_insert_next():
assert b.head.next_node is None
c = copy.copy(b)
c.insert(6)
def test_insert_new_size():
assert c.sizeOfList is 2
def test_insert_new_head():
assert c.head.value is 6
def test_insert_pointer():
assert c.head.next_node.value is 5
# size method
def test_size():
assert c.size() is 2
# search method
def test_search_value_in_list():
assert c.search(5).value is 5
def test_search_value_not_in_list():
assert c.search(7) is None # 7 is not in the list
# remove method
d = copy.copy(c)
d.remove(d.search(6))
def test_remove_value():
assert d.search(6) is None
def test_remove_size():
assert d.size() is 1
# display method
def test_display():
assert d.display() == (5,) # test to make sure they are equivalent
| Add comments to test file | Add comments to test file
Add comments after all tests passed
| Python | mit | jesseklein406/data-structures | ---
+++
@@ -1,27 +1,97 @@
+"""Pytest file for linked_list.py
+
+Run this with the command 'py.test test_linked_list.py'
+"""
+
+
from __future__ import unicode_literals
import linked_list
-
-# def func(x):
-# return x + 1
-
-# def tdest_answer():
-# assert func(3) == 5
+import copy
-# init
+# init method
a = linked_list.LinkedList()
-def test_size():
- assert a.size is 0
+def test_init_size():
+ assert a.sizeOfList is 0
+ assert type(a.sizeOfList) is int
-def test_head():
+def test_init_head():
assert a.head is None
-def test_init():
+def test_init_type():
assert type(a) is linked_list.LinkedList
+# insert method
+
+b = copy.copy(a) # make a copy every time a change is made
+b.insert(5) # so the test can handle different values
+
+
+def test_insert_size():
+ assert b.sizeOfList is 1
+
+
+def test_insert_head():
+ assert b.head.value is 5
+
+
+def test_insert_next():
+ assert b.head.next_node is None
+
+
+c = copy.copy(b)
+c.insert(6)
+
+
+def test_insert_new_size():
+ assert c.sizeOfList is 2
+
+
+def test_insert_new_head():
+ assert c.head.value is 6
+
+
+def test_insert_pointer():
+ assert c.head.next_node.value is 5
+
+
+# size method
+
+def test_size():
+ assert c.size() is 2
+
+
+# search method
+
+def test_search_value_in_list():
+ assert c.search(5).value is 5
+
+
+def test_search_value_not_in_list():
+ assert c.search(7) is None # 7 is not in the list
+
+
+# remove method
+
+d = copy.copy(c)
+d.remove(d.search(6))
+
+
+def test_remove_value():
+ assert d.search(6) is None
+
+
+def test_remove_size():
+ assert d.size() is 1
+
+
+# display method
+
+def test_display():
+ assert d.display() == (5,) # test to make sure they are equivalent |
7c68a78a81721ecbbda0f999576b91b803a34a3e | .circleci/get-commit-range.py | .circleci/get-commit-range.py | #!/usr/bin/env python3
import os
import argparse
from github import Github
def from_pr(project, repo, pr_number):
gh = Github()
pr = gh.get_repo(f'{project}/{repo}').get_pull(pr_number)
base = pr.base.ref
head = pr.head.ref
return f'origin/{base}...{head}'
def main():
argparser = argparse.ArgumentParser()
argparser.add_argument(
'project',
default=os.environ['CIRCLE_PROJECT_USERNAME'],
nargs='?'
)
argparser.add_argument(
'repo',
default=os.environ['CIRCLE_PROJECT_REPONAME'],
nargs='?'
)
argparser.add_argument(
'--pr-number',
type=int,
nargs='?'
)
args = argparser.parse_args()
if not args.pr_number:
pr_number = int(os.environ['CIRCLE_PR_NUMBER'])
else:
pr_number = args.pr_number
print(from_pr(args.project, args.repo, pr_number))
if __name__ == '__main__':
main() | #!/usr/bin/env python3
import os
import argparse
from github import Github
def from_pr(project, repo, pr_number):
gh = Github()
pr = gh.get_repo(f'{project}/{repo}').get_pull(pr_number)
base = pr.base.sha
head = pr.base.sha
return f'{base}...{head}'
def main():
argparser = argparse.ArgumentParser()
argparser.add_argument(
'project',
default=os.environ['CIRCLE_PROJECT_USERNAME'],
nargs='?'
)
argparser.add_argument(
'repo',
default=os.environ['CIRCLE_PROJECT_REPONAME'],
nargs='?'
)
argparser.add_argument(
'--pr-number',
type=int,
nargs='?'
)
args = argparser.parse_args()
if not args.pr_number:
pr_number = int(os.environ['CIRCLE_PR_NUMBER'])
else:
pr_number = args.pr_number
print(from_pr(args.project, args.repo, pr_number))
if __name__ == '__main__':
main() | Use SHAs for commit_range rather than refs | Use SHAs for commit_range rather than refs
Refs are local and might not always be present in the checkout.
| Python | bsd-3-clause | ryanlovett/datahub,berkeley-dsep-infra/datahub,ryanlovett/datahub,ryanlovett/datahub,berkeley-dsep-infra/datahub,berkeley-dsep-infra/datahub | ---
+++
@@ -7,9 +7,9 @@
def from_pr(project, repo, pr_number):
gh = Github()
pr = gh.get_repo(f'{project}/{repo}').get_pull(pr_number)
- base = pr.base.ref
- head = pr.head.ref
- return f'origin/{base}...{head}'
+ base = pr.base.sha
+ head = pr.base.sha
+ return f'{base}...{head}'
def main(): |
4a1fcd1981ea1993227fb568a1b744cbf38178b4 | app/DataLogger/sqlite_logger.py | app/DataLogger/sqlite_logger.py | import sqlite3
import time
class SQLiteLogger:
def __init__(self, filename="test.db"):
self.filename = filename
self.connection = None
def __enter__(self):
try:
with open(self.filename):
self.connection = sqlite3.connect(self.filename)
except IOError:
self.connection = sqlite3.connect(self.filename)
cursor = self.connection.cursor()
cursor.execute('''CREATE TABLE readings
(date real, device text, property text, value real)''')
self.connection.commit()
return self
def __exit__(self, type, value, traceback):
self.connection.close()
self.connection = None
def log(self, device, property, value, t=None):
if t is None:
t = time.time()
values = (t, device, property, value)
cursor = self.connection.cursor()
cursor.execute("INSERT INTO readings VALUES(?,?,?,?)", values)
self.connection.commit()
| import sqlite3
import time
class SQLiteLogger:
def __init__(self, filename="g2x.db"):
self.filename = filename
self.connection = None
def __enter__(self):
try:
with open(self.filename):
self.connection = sqlite3.connect(self.filename)
except IOError:
self.connection = sqlite3.connect(self.filename)
cursor = self.connection.cursor()
cursor.execute('''CREATE TABLE readings
(date real, device text, property text, value real)''')
self.connection.commit()
return self
def __exit__(self, type, value, traceback):
self.connection.close()
self.connection = None
def log(self, device, property, value, t=None):
if t is None:
t = time.time()
values = (t, device, property, value)
cursor = self.connection.cursor()
cursor.execute("INSERT INTO readings VALUES(?,?,?,?)", values)
self.connection.commit()
| Change default db name to g2x.db | Change default db name to g2x.db
| Python | mit | gizmo-cda/g2x,gizmo-cda/g2x,gizmo-cda/g2x,gizmo-cda/g2x,thelonious/g2x,thelonious/g2x | ---
+++
@@ -3,12 +3,11 @@
class SQLiteLogger:
- def __init__(self, filename="test.db"):
+ def __init__(self, filename="g2x.db"):
self.filename = filename
self.connection = None
def __enter__(self):
-
try:
with open(self.filename):
self.connection = sqlite3.connect(self.filename) |
25a0d4b8f91f1d771c215079832170cd0402d2ee | gi/overrides/__init__.py | gi/overrides/__init__.py | from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
print __path__, __name__
| from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
print(__path__, __name__)
| Fix another syntax error with newer Python versions | Fix another syntax error with newer Python versions
| Python | lgpl-2.1 | lubosz/gst-python,GStreamer/gst-python,lubosz/gst-python,GStreamer/gst-python,pexip/gst-python,GStreamer/gst-python,pexip/gst-python,lubosz/gst-python,freedesktop-unofficial-mirror/gstreamer__gst-python,pexip/gst-python,freedesktop-unofficial-mirror/gstreamer__gst-python,freedesktop-unofficial-mirror/gstreamer__gst-python | ---
+++
@@ -1,4 +1,4 @@
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
-print __path__, __name__
+print(__path__, __name__) |
93d9ae1275aa6f40f3ad4a63b6919eb3eaaf6cf8 | nimble/sources/elementary.py | nimble/sources/elementary.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from ..composition import SeekableSource
import numpy as np
class IntegerIdentitySource(SeekableSource):
"""Return the integer used as position argument."""
def __init__(self, size=np.iinfo(np.uint32).max, **kwargs):
self.parallel_possible = True
self.cached = True
self._shape = 1,
self._size = size
super(IntegerIdentitySource, self).__init__(name=u"IntegerIdentitySource", **kwargs)
def _get_data_at(self, position):
return np.array([position])
@property
def dtype(self):
return np.uint32
| # -*- coding: utf-8 -*-
from __future__ import absolute_import
from ..composition import SeekableSource
import numpy as np
class IntegerIdentitySource(SeekableSource):
"""Return the integer used as position argument."""
def __init__(self, size=np.iinfo(np.uint32).max, **kwargs):
self.parallel_possible = True
self.cached = True
self._shape = 1,
self._dtype = np.uint32
self._size = size
super(IntegerIdentitySource, self).__init__(name=u"IntegerIdentitySource", **kwargs)
def _get_data_at(self, position):
return np.array([position], dtype=self._dtype)
@property
def dtype(self):
return self._dtype
| Set identity integer source data type | Set identity integer source data type
| Python | mit | risteon/nimble | ---
+++
@@ -13,12 +13,13 @@
self.parallel_possible = True
self.cached = True
self._shape = 1,
+ self._dtype = np.uint32
self._size = size
super(IntegerIdentitySource, self).__init__(name=u"IntegerIdentitySource", **kwargs)
def _get_data_at(self, position):
- return np.array([position])
+ return np.array([position], dtype=self._dtype)
@property
def dtype(self):
- return np.uint32
+ return self._dtype |
6d507595b0e51ed4a366c3288eec808ac91e30bc | pyinfra/modules/virtualenv.py | pyinfra/modules/virtualenv.py | # pyinfra
# File: pyinfra/modules/pip.py
# Desc: manage virtualenvs
'''
Manage Python virtual environments
'''
from __future__ import unicode_literals
from pyinfra.api import operation
from pyinfra.modules import files
@operation
def virtualenv(
state, host,
path, python='python3', site_packages=False, always_copy=False,
present=True,
):
'''
Manage virtualenv.
+ python: python interpreter to use
+ site_packages: give access to the global site-packages
+ always_copy: always copy files rather than symlinking
+ present: whether the virtualenv should be installed
'''
if present is False and host.fact.directory(path):
# Ensure deletion of unwanted virtualenv
yield from files.directory(state, host, path, present=False)
elif present and not host.fact.directory(path):
# Create missing virtualenv
command = '/usr/bin/virtualenv -p {}'.format(python)
if site_packages:
command += ' --system-site-packages'
if always_copy:
command += ' --always-copy'
command += ' ' + path
yield command
| # pyinfra
# File: pyinfra/modules/pip.py
# Desc: manage virtualenvs
'''
Manage Python virtual environments
'''
from __future__ import unicode_literals
from pyinfra.api import operation
from pyinfra.modules import files
@operation
def virtualenv(
state, host,
path, python='python3', site_packages=False, always_copy=False,
present=True,
):
'''
Manage virtualenv.
+ python: python interpreter to use
+ site_packages: give access to the global site-packages
+ always_copy: always copy files rather than symlinking
+ present: whether the virtualenv should be installed
'''
if present is False and host.fact.directory(path):
# Ensure deletion of unwanted virtualenv
# no 'yield from' in python 2.7
for cmd in files.directory(state, host, path, present=False):
yield cmd
elif present and not host.fact.directory(path):
# Create missing virtualenv
command = '/usr/bin/virtualenv -p {}'.format(python)
if site_packages:
command += ' --system-site-packages'
if always_copy:
command += ' --always-copy'
command += ' ' + path
yield command
| Fix no yield from in middle ages | Fix no yield from in middle ages
| Python | mit | Fizzadar/pyinfra,Fizzadar/pyinfra | ---
+++
@@ -29,7 +29,9 @@
if present is False and host.fact.directory(path):
# Ensure deletion of unwanted virtualenv
- yield from files.directory(state, host, path, present=False)
+ # no 'yield from' in python 2.7
+ for cmd in files.directory(state, host, path, present=False):
+ yield cmd
elif present and not host.fact.directory(path):
# Create missing virtualenv |
6d567ad3eb7749692b05a7685ffbd99f74d965cd | manage.py | manage.py | import os
from flask.ext.script import Manager
from flask.ext.migrate import Migrate
from flask.ext.migrate import MigrateCommand
from flask_security.utils import encrypt_password
from service.models import *
from service import app
from service import db
from service import user_datastore
app.config.from_object(os.environ['SETTINGS'])
migrate = Migrate(app, db)
manager = Manager(app)
manager.add_command('db', MigrateCommand)
@manager.option('-e', '--email', dest='email')
@manager.option('-p', '--password', dest='password')
def create_user(email, password):
if not user_datastore.find_user(email=email):
user_datastore.create_user(email=email,
password=encrypt_password(password))
db.session.commit()
if __name__ == '__main__':
manager.run()
| import os
from flask.ext.script import Manager
from flask.ext.migrate import Migrate
from flask.ext.migrate import MigrateCommand
from flask_security.utils import encrypt_password
from service.models import *
from service import app
from service import db
from service import user_datastore
app.config.from_object(os.environ['SETTINGS'])
migrate = Migrate(app, db)
manager = Manager(app)
manager.add_command('db', MigrateCommand)
@manager.option('--email', dest='email')
@manager.option('--password', dest='password')
def create_user(email, password):
if not user_datastore.find_user(email=email):
user_datastore.create_user(email=email,
password=encrypt_password(password))
db.session.commit()
if __name__ == '__main__':
manager.run()
| Fix create user command to work locally and on heroku | Fix create user command to work locally and on heroku
| Python | mit | LandRegistry/service-frontend-alpha,LandRegistry/service-frontend-alpha,LandRegistry/service-frontend-alpha,LandRegistry/service-frontend-alpha,LandRegistry/service-frontend-alpha | ---
+++
@@ -18,8 +18,8 @@
manager.add_command('db', MigrateCommand)
-@manager.option('-e', '--email', dest='email')
-@manager.option('-p', '--password', dest='password')
+@manager.option('--email', dest='email')
+@manager.option('--password', dest='password')
def create_user(email, password):
if not user_datastore.find_user(email=email):
user_datastore.create_user(email=email, |
d1e0949533ad30e2cd3e5afccbf59d835c1b0fe3 | doc/examples/plot_entropy.py | doc/examples/plot_entropy.py | """
=======
Entropy
=======
Image entropy is a quantity which is used to describe the amount of information
coded in an image.
"""
import numpy as np
import matplotlib.pyplot as plt
from skimage import data
from skimage.filter.rank import entropy
from skimage.morphology import disk
from skimage.util import img_as_ubyte
image = img_as_ubyte(data.camera())
plt.figure(figsize=(10, 4))
plt.subplot(121)
plt.imshow(image, cmap=plt.cm.gray)
plt.title('Image')
plt.colorbar()
plt.subplot(122)
plt.imshow(entropy(image, disk(5)), cmap=plt.cm.jet)
plt.title('Entropy')
plt.colorbar()
plt.show()
| """
=======
Entropy
=======
Image entropy is a quantity which is used to describe the amount of information
coded in an image.
"""
import numpy as np
import matplotlib.pyplot as plt
from skimage import data
from skimage.filter.rank import entropy
from skimage.morphology import disk
from skimage.util import img_as_ubyte
image = img_as_ubyte(data.camera())
fig, (ax0, ax1) = plt.subplots(ncols=2, figsize=(10, 4))
img0 = ax0.imshow(image, cmap=plt.cm.gray)
ax0.set_title('Image')
ax0.axis('off')
plt.colorbar(img0, ax=ax0)
img1 = ax1.imshow(entropy(image, disk(5)), cmap=plt.cm.jet)
ax1.set_title('Entropy')
ax1.axis('off')
plt.colorbar(img1, ax=ax1)
plt.show()
| Update entropy example with improved matplotlib usage | Update entropy example with improved matplotlib usage
| Python | bsd-3-clause | dpshelio/scikit-image,ofgulban/scikit-image,SamHames/scikit-image,GaZ3ll3/scikit-image,warmspringwinds/scikit-image,oew1v07/scikit-image,WarrenWeckesser/scikits-image,ClinicalGraphics/scikit-image,chriscrosscutler/scikit-image,bsipocz/scikit-image,jwiggins/scikit-image,michaelpacer/scikit-image,almarklein/scikit-image,blink1073/scikit-image,blink1073/scikit-image,chintak/scikit-image,rjeli/scikit-image,bennlich/scikit-image,rjeli/scikit-image,Britefury/scikit-image,robintw/scikit-image,bennlich/scikit-image,vighneshbirodkar/scikit-image,ClinicalGraphics/scikit-image,chriscrosscutler/scikit-image,emon10005/scikit-image,Hiyorimi/scikit-image,paalge/scikit-image,youprofit/scikit-image,juliusbierk/scikit-image,keflavich/scikit-image,almarklein/scikit-image,pratapvardhan/scikit-image,SamHames/scikit-image,paalge/scikit-image,ajaybhat/scikit-image,youprofit/scikit-image,vighneshbirodkar/scikit-image,WarrenWeckesser/scikits-image,michaelpacer/scikit-image,ajaybhat/scikit-image,Hiyorimi/scikit-image,newville/scikit-image,warmspringwinds/scikit-image,SamHames/scikit-image,michaelaye/scikit-image,michaelaye/scikit-image,chintak/scikit-image,ofgulban/scikit-image,oew1v07/scikit-image,dpshelio/scikit-image,SamHames/scikit-image,chintak/scikit-image,chintak/scikit-image,robintw/scikit-image,Midafi/scikit-image,bsipocz/scikit-image,paalge/scikit-image,almarklein/scikit-image,keflavich/scikit-image,jwiggins/scikit-image,GaZ3ll3/scikit-image,emon10005/scikit-image,Britefury/scikit-image,pratapvardhan/scikit-image,Midafi/scikit-image,juliusbierk/scikit-image,ofgulban/scikit-image,newville/scikit-image,vighneshbirodkar/scikit-image,almarklein/scikit-image,rjeli/scikit-image | ---
+++
@@ -18,16 +18,17 @@
image = img_as_ubyte(data.camera())
-plt.figure(figsize=(10, 4))
+fig, (ax0, ax1) = plt.subplots(ncols=2, figsize=(10, 4))
-plt.subplot(121)
-plt.imshow(image, cmap=plt.cm.gray)
-plt.title('Image')
-plt.colorbar()
-plt.subplot(122)
-plt.imshow(entropy(image, disk(5)), cmap=plt.cm.jet)
-plt.title('Entropy')
-plt.colorbar()
+img0 = ax0.imshow(image, cmap=plt.cm.gray)
+ax0.set_title('Image')
+ax0.axis('off')
+plt.colorbar(img0, ax=ax0)
+
+img1 = ax1.imshow(entropy(image, disk(5)), cmap=plt.cm.jet)
+ax1.set_title('Entropy')
+ax1.axis('off')
+plt.colorbar(img1, ax=ax1)
plt.show() |
411b594c7d363f68555a97fccff92a43392d0d04 | webshop/core/util.py | webshop/core/util.py | # Copyright (C) 2010-2011 Mathijs de Bruin <mathijs@mathijsfietst.nl>
#
# This file is part of django-webshop.
#
# django-webshop is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
from django.db.models import get_model
""" Several util functions for use in core functionality. """
def get_model_from_string(model):
"""
Takes a string in the form of `appname.Model`, (ie.
`basic_webshop.CartItem`) and returns the model class for it.
"""
return get_model(*model.split('.'))
| # Copyright (C) 2010-2011 Mathijs de Bruin <mathijs@mathijsfietst.nl>
#
# This file is part of django-webshop.
#
# django-webshop is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
import logging
logger = logging.getLogger(__name__)
from django.db import models
""" Several util functions for use in core functionality. """
def get_model_from_string(model):
"""
Takes a string in the form of `appname.Model`, (ie.
`basic_webshop.CartItem`) and returns the model class for it.
"""
model_class = models.get_model(*model.split('.'))
assert isinstance(model_class, models.base.ModelBase), \
'%s does not refer to a known Model class.' % model
return model_class
| Make sure we have actually looked up a model in get_model_from_string. | Make sure we have actually looked up a model
in get_model_from_string.
| Python | agpl-3.0 | dokterbob/django-shopkit,dokterbob/django-shopkit | ---
+++
@@ -16,14 +16,23 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
-from django.db.models import get_model
+import logging
+logger = logging.getLogger(__name__)
+
+from django.db import models
+
""" Several util functions for use in core functionality. """
+
def get_model_from_string(model):
"""
Takes a string in the form of `appname.Model`, (ie.
`basic_webshop.CartItem`) and returns the model class for it.
"""
+ model_class = models.get_model(*model.split('.'))
- return get_model(*model.split('.'))
+ assert isinstance(model_class, models.base.ModelBase), \
+ '%s does not refer to a known Model class.' % model
+
+ return model_class |
508d86ba316fd48522d73d4ae3049f96e8e73eae | dpaste/urls/dpaste.py | dpaste/urls/dpaste.py | from django.conf.urls.defaults import url, patterns
urlpatterns = patterns('dpaste.views',
url(r'^$', 'snippet_new', name='snippet_new'),
url(r'^diff/$', 'snippet_diff', name='snippet_diff'),
url(r'^history/$', 'snippet_history', name='snippet_history'),
url(r'^delete/$', 'snippet_delete', name='snippet_delete'),
url(r'^(?P<snippet_id>[a-zA-Z0-9]+)/?$', 'snippet_details', name='snippet_details'),
url(r'^(?P<snippet_id>[a-zA-Z0-9]+)/delete/$', 'snippet_delete', name='snippet_delete'),
url(r'^(?P<snippet_id>[a-zA-Z0-9]+)/gist/$', 'snippet_gist', name='snippet_gist'),
url(r'^(?P<snippet_id>[a-zA-Z0-9]+)/raw/$', 'snippet_details', {'template_name': 'dpaste/snippet_details_raw.html', 'is_raw': True}, name='snippet_details_raw'),
)
| from django.conf.urls.defaults import url, patterns
urlpatterns = patterns('dpaste.views',
url(r'^$', 'snippet_new', name='snippet_new'),
url(r'^diff/$', 'snippet_diff', name='snippet_diff'),
url(r'^history/$', 'snippet_history', name='snippet_history'),
url(r'^delete/$', 'snippet_delete', name='snippet_delete'),
url(r'^(?P<snippet_id>[a-zA-Z0-9]+)/?$', 'snippet_details', name='snippet_details'),
url(r'^(?P<snippet_id>[a-zA-Z0-9]+)/delete/$', 'snippet_delete', name='snippet_delete'),
url(r'^(?P<snippet_id>[a-zA-Z0-9]+)/gist/$', 'snippet_gist', name='snippet_gist'),
url(r'^(?P<snippet_id>[a-zA-Z0-9]+)/raw/?$', 'snippet_details', {'template_name': 'dpaste/snippet_details_raw.html', 'is_raw': True}, name='snippet_details_raw'),
)
| Allow raw snippets without trailing slash | Allow raw snippets without trailing slash
Fixes an asymmetry where both of
curl https://dpaste.de/xXxx
curl https://dpaste.de/xXxx/
work, but
curl https://dpaste.de/xXxx/raw/
fails without a trailing slash (because curl doesn't follow redirects by
default).
| Python | mit | bartTC/dpaste,rbarrois/xelpaste,bartTC/dpaste,bartTC/dpaste,rbarrois/xelpaste,SanketDG/dpaste,SanketDG/dpaste,rbarrois/xelpaste,SanketDG/dpaste | ---
+++
@@ -8,5 +8,5 @@
url(r'^(?P<snippet_id>[a-zA-Z0-9]+)/?$', 'snippet_details', name='snippet_details'),
url(r'^(?P<snippet_id>[a-zA-Z0-9]+)/delete/$', 'snippet_delete', name='snippet_delete'),
url(r'^(?P<snippet_id>[a-zA-Z0-9]+)/gist/$', 'snippet_gist', name='snippet_gist'),
- url(r'^(?P<snippet_id>[a-zA-Z0-9]+)/raw/$', 'snippet_details', {'template_name': 'dpaste/snippet_details_raw.html', 'is_raw': True}, name='snippet_details_raw'),
+ url(r'^(?P<snippet_id>[a-zA-Z0-9]+)/raw/?$', 'snippet_details', {'template_name': 'dpaste/snippet_details_raw.html', 'is_raw': True}, name='snippet_details_raw'),
) |
aa94c28835a67ca000226eb30bdbb0ef852383c5 | jshbot/configurations.py | jshbot/configurations.py | import json
from jshbot.exceptions import ConfiguredBotException, ErrorTypes
CBException = ConfiguredBotException('Configurations')
def get(bot, plugin_name, key=None, extra=None, extension='json'):
"""Gets the configuration file for the given plugin.
Keyword arguments:
key -- Gets the specified key from the config file, otherwise everything.
extra -- Looks for <plugin_name>-<extra>.<extension>
extension -- If 'json', reads the file as json, otherwise reads it as text.
"""
if extra: # Open from external configuration file
filename = '{0}/config/{1}-{2}.{3}'.format(
bot.path, plugin_name, extra, extension)
else: # Open from configuration dictionary
try:
config = bot.configurations[plugin_name]
except KeyError:
raise CBException(
"Plugin {} not found in the configurations dictionary.".format(plugin_name))
try:
if key:
return config[key]
else:
return config
except KeyError:
raise CBException("Key {} not found in the configuration file.".format(key))
try:
with open(filename, 'r') as config_file:
if extension.lower() == 'json':
return json.load(config_file)
else:
return config_file.read()
except FileNotFoundError:
raise CBException("File {} not found.".format(filename))
except Exception as e:
raise CBException("Failed to read {} properly.".format(filename), e=e)
| import json
import yaml
from jshbot.exceptions import ConfiguredBotException, ErrorTypes
CBException = ConfiguredBotException('Configurations')
def get(bot, plugin_name, key=None, extra=None, extension='yaml'):
"""Gets the configuration file for the given plugin.
Keyword arguments:
key -- Gets the specified key from the config file, otherwise everything.
extra -- Looks for <plugin_name>-<extra>.<extension>
extension -- If 'json', reads the file as json, otherwise reads it as text.
"""
if extra: # Open from external configuration file
filename = '{0}/config/{1}-{2}.{3}'.format(bot.path, plugin_name[:-3], extra, extension)
else: # Open from configuration dictionary
try:
config = bot.configurations[plugin_name]
except KeyError:
raise CBException(
"Plugin {} not found in the configurations dictionary.".format(plugin_name))
try:
if key:
return config[key]
else:
return config
except KeyError:
raise CBException("Key {} not found in the configuration file.".format(key))
try:
with open(filename, 'r') as config_file:
if extension.lower() == 'json':
return json.load(config_file)
elif extension.lower() == 'yaml':
return yaml.load(config_file)
else:
return config_file.read()
except FileNotFoundError:
raise CBException("File {} not found.".format(filename))
except Exception as e:
raise CBException("Failed to read {} properly.".format(filename), e=e)
| Change default extension to yaml | Change default extension to yaml
| Python | mit | jkchen2/JshBot,jkchen2/JshBot | ---
+++
@@ -1,11 +1,12 @@
import json
+import yaml
from jshbot.exceptions import ConfiguredBotException, ErrorTypes
CBException = ConfiguredBotException('Configurations')
-def get(bot, plugin_name, key=None, extra=None, extension='json'):
+def get(bot, plugin_name, key=None, extra=None, extension='yaml'):
"""Gets the configuration file for the given plugin.
Keyword arguments:
@@ -14,8 +15,7 @@
extension -- If 'json', reads the file as json, otherwise reads it as text.
"""
if extra: # Open from external configuration file
- filename = '{0}/config/{1}-{2}.{3}'.format(
- bot.path, plugin_name, extra, extension)
+ filename = '{0}/config/{1}-{2}.{3}'.format(bot.path, plugin_name[:-3], extra, extension)
else: # Open from configuration dictionary
try:
config = bot.configurations[plugin_name]
@@ -33,6 +33,8 @@
with open(filename, 'r') as config_file:
if extension.lower() == 'json':
return json.load(config_file)
+ elif extension.lower() == 'yaml':
+ return yaml.load(config_file)
else:
return config_file.read()
except FileNotFoundError: |
67752442760221c2e53990bb5dd10f1e045d74a1 | nltk_training/information_extraction.py | nltk_training/information_extraction.py | #!/usr/bin/python
# -*- coding: UTF-8 -*-
from __future__ import division
import feedparser, os
from BeautifulSoup import BeautifulSoup
import nltk, re, pprint
from nltk import word_tokenize
# from urllib2 import Request as request
import urllib2
def ie_preprocess(document):
sentences = nltk.sent_tokenize(document) [1]
sentences = [nltk.word_tokenize(sent) for sent in sentences] [2]
sentences = [nltk.pos_tag(sent) for sent in sentences]
return sentences
dir_path = os.path.dirname(os.path.realpath(__file__))
with open(dir_path+"/../bible_fulltext/Bible_French_djvu.txt") as file:
ie_preprocess(file.read().decode('utf8'))
| #!/usr/bin/python
# -*- coding: UTF-8 -*-
from __future__ import division
import feedparser, os
from BeautifulSoup import BeautifulSoup
import nltk, re, pprint
from nltk import word_tokenize
# from urllib2 import Request as request
import urllib2
def ie_preprocess(document):
sentences = nltk.sent_tokenize(document)
sentences = [nltk.word_tokenize(sent) for sent in sentences]
sentences = [nltk.pos_tag(sent) for sent in sentences]
return sentences
dir_path = os.path.dirname(os.path.realpath(__file__))
with open(dir_path+"/../bible_fulltext/Bible_French_djvu_Genesis.txt") as file:
sentences = ie_preprocess(file.read().decode('utf8'))
cp = nltk.RegexpParser('CHUNK: {<V.*> <TO> <V.*>}')
for sent in sentences:
#print sent
tree = cp.parse(sent)
for subtree in tree.subtrees():
if subtree.label() == 'CHUNK': print (subtree)
| Add lastest updates to script | Add lastest updates to script
| Python | apache-2.0 | fullbright/gary-reporter,fullbright/gary-reporter | ---
+++
@@ -10,13 +10,19 @@
import urllib2
def ie_preprocess(document):
- sentences = nltk.sent_tokenize(document) [1]
- sentences = [nltk.word_tokenize(sent) for sent in sentences] [2]
+ sentences = nltk.sent_tokenize(document)
+ sentences = [nltk.word_tokenize(sent) for sent in sentences]
sentences = [nltk.pos_tag(sent) for sent in sentences]
return sentences
dir_path = os.path.dirname(os.path.realpath(__file__))
-with open(dir_path+"/../bible_fulltext/Bible_French_djvu.txt") as file:
- ie_preprocess(file.read().decode('utf8'))
+with open(dir_path+"/../bible_fulltext/Bible_French_djvu_Genesis.txt") as file:
+ sentences = ie_preprocess(file.read().decode('utf8'))
+ cp = nltk.RegexpParser('CHUNK: {<V.*> <TO> <V.*>}')
+ for sent in sentences:
+ #print sent
+ tree = cp.parse(sent)
+ for subtree in tree.subtrees():
+ if subtree.label() == 'CHUNK': print (subtree) |
a36adf795f370877a472fa4730a3eb31271b8b23 | subversion/bindings/swig/python/tests/run_all.py | subversion/bindings/swig/python/tests/run_all.py | import sys, os
bindir = os.path.dirname(sys.argv[0])
sys.path[0:0] = [ os.getcwd(), "%s/.libs" % os.getcwd(), \
"%s/.." % bindir, "%s/../.libs" % bindir ]
import unittest
import pool
import trac.versioncontrol.tests
# Run all tests
def suite():
"""Run all tests"""
suite = unittest.TestSuite()
suite.addTest(pool.suite())
suite.addTest(trac.versioncontrol.tests.suite());
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| import sys, os
bindir = os.path.dirname(sys.argv[0])
sys.path[0:0] = [ os.getcwd(), "%s/.libs" % os.getcwd(), \
"%s/.." % bindir, "%s/../.libs" % bindir ]
# OSes without RPATH support are going to have to do things here to make
# the correct shared libraries be found.
if sys.platform == 'cygwin':
import glob
svndir = os.path.dirname(os.path.dirname(os.path.dirname(os.getcwd())))
libpath = os.getenv("PATH").split(":")
libpath.insert(0, "%s/libsvn_swig_py/.libs" % os.getcwd())
for libdir in glob.glob("%s/libsvn_*" % svndir):
libpath.insert(0, "%s/.libs" % (libdir))
os.putenv("PATH", ":".join(libpath))
import unittest
import pool
import trac.versioncontrol.tests
# Run all tests
def suite():
"""Run all tests"""
suite = unittest.TestSuite()
suite.addTest(pool.suite())
suite.addTest(trac.versioncontrol.tests.suite());
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| Make the Python bindings testsuite be able to find the needed shared libraries on Cygwin. Needed to compensate for Windows' complete lack of library RPATHs. | Make the Python bindings testsuite be able to find the needed shared libraries
on Cygwin. Needed to compensate for Windows' complete lack of library RPATHs.
* subversion/bindings/swig/python/tests/run_all.py: On Cygwin, manipulate $PATH
so that the relevant shared libraries are found.
| Python | apache-2.0 | jmckaskill/subversion,jmckaskill/subversion,jmckaskill/subversion,jmckaskill/subversion,jmckaskill/subversion,jmckaskill/subversion,jmckaskill/subversion,jmckaskill/subversion | ---
+++
@@ -2,6 +2,18 @@
bindir = os.path.dirname(sys.argv[0])
sys.path[0:0] = [ os.getcwd(), "%s/.libs" % os.getcwd(), \
"%s/.." % bindir, "%s/../.libs" % bindir ]
+
+# OSes without RPATH support are going to have to do things here to make
+# the correct shared libraries be found.
+if sys.platform == 'cygwin':
+ import glob
+ svndir = os.path.dirname(os.path.dirname(os.path.dirname(os.getcwd())))
+ libpath = os.getenv("PATH").split(":")
+ libpath.insert(0, "%s/libsvn_swig_py/.libs" % os.getcwd())
+ for libdir in glob.glob("%s/libsvn_*" % svndir):
+ libpath.insert(0, "%s/.libs" % (libdir))
+ os.putenv("PATH", ":".join(libpath))
+
import unittest
import pool
import trac.versioncontrol.tests |
ef6c29b6ebd8e3b536dcd63cfce683a6b69897d7 | nyuki/workflow/tasks/python_script.py | nyuki/workflow/tasks/python_script.py | import logging
from tukio.task import register
from tukio.task.holder import TaskHolder
log = logging.getLogger(__name__)
@register('python_script', 'execute')
class PythonScript(TaskHolder):
"""
Mainly a testing task
"""
SCHEMA = {
'type': 'object',
'properties': {
'script': {'type': 'string', 'maxLength': 16384}
}
}
async def execute(self, event):
if self.config.get('script'):
eval(self.config.get('script'))
return event.data
| import logging
from tukio.task import register
from tukio.task.holder import TaskHolder
log = logging.getLogger(__name__)
@register('python_script', 'execute')
class PythonScript(TaskHolder):
"""
Mainly a testing task
"""
SCHEMA = {
'type': 'object',
'properties': {
'script': {'type': 'string', 'maxLength': 16384}
}
}
async def execute(self, event):
if self.config.get('script'):
# Compile string into python statement (allow multi-line)
cc = compile(self.config['script'], 'dummy', 'exec')
# Eval the compiled string
eval(cc)
return event.data
| Improve script task to allow multiline | Improve script task to allow multiline
| Python | apache-2.0 | optiflows/nyuki,optiflows/nyuki,gdraynz/nyuki,gdraynz/nyuki | ---
+++
@@ -22,5 +22,8 @@
async def execute(self, event):
if self.config.get('script'):
- eval(self.config.get('script'))
+ # Compile string into python statement (allow multi-line)
+ cc = compile(self.config['script'], 'dummy', 'exec')
+ # Eval the compiled string
+ eval(cc)
return event.data |
223b58cb0f9c63543a4d23f75db4450ce93ab86d | readthedocs/builds/forms.py | readthedocs/builds/forms.py | import logging
from django import forms
from readthedocs.builds.models import VersionAlias, Version
from readthedocs.core.utils import trigger_build
from readthedocs.projects.models import Project
from readthedocs.projects.tasks import clear_artifacts
log = logging.getLogger(__name__)
class AliasForm(forms.ModelForm):
class Meta:
model = VersionAlias
fields = (
'project',
'from_slug',
'to_slug',
'largest',
)
def __init__(self, instance=None, *args, **kwargs):
super(AliasForm, self).__init__(instance=instance, *args, **kwargs)
if instance:
self.fields['project'].queryset = (Project.objects
.filter(pk=instance.project.pk))
class VersionForm(forms.ModelForm):
class Meta:
model = Version
fields = ['active', 'privacy_level', 'tags']
def save(self, *args, **kwargs):
obj = super(VersionForm, self).save(*args, **kwargs)
if obj.active and not obj.built and not obj.uploaded:
trigger_build(project=obj.project, version=obj)
def clean(self):
cleaned_data = super(VersionForm, self).clean()
if self.instance.pk is not None: # new instance only
if self.instance.active is True and cleaned_data['active'] is False:
log.info('Removing files for version %s' % self.instance.slug)
clear_artifacts.delay(version_pk=[self.instance.pk])
return cleaned_data
| import logging
from django import forms
from readthedocs.builds.models import VersionAlias, Version
from readthedocs.core.utils import trigger_build
from readthedocs.projects.models import Project
from readthedocs.projects.tasks import clear_artifacts
log = logging.getLogger(__name__)
class AliasForm(forms.ModelForm):
class Meta:
model = VersionAlias
fields = (
'project',
'from_slug',
'to_slug',
'largest',
)
def __init__(self, instance=None, *args, **kwargs):
super(AliasForm, self).__init__(instance=instance, *args, **kwargs)
if instance:
self.fields['project'].queryset = (Project.objects
.filter(pk=instance.project.pk))
class VersionForm(forms.ModelForm):
class Meta:
model = Version
fields = ['active', 'privacy_level', 'tags']
def save(self, *args, **kwargs):
obj = super(VersionForm, self).save(*args, **kwargs)
if obj.active and not obj.built and not obj.uploaded:
trigger_build(project=obj.project, version=obj)
def clean(self):
cleaned_data = super(VersionForm, self).clean()
if self.instance.pk is not None: # new instance only
if self.instance.active is True and cleaned_data['active'] is False:
log.info('Removing files for version %s' % self.instance.slug)
clear_artifacts.delay(version_pk=self.instance.pk)
self.instance.built = False
return cleaned_data
| Handle built state tracking on versions | Handle built state tracking on versions | Python | mit | espdev/readthedocs.org,pombredanne/readthedocs.org,espdev/readthedocs.org,stevepiercy/readthedocs.org,rtfd/readthedocs.org,davidfischer/readthedocs.org,istresearch/readthedocs.org,davidfischer/readthedocs.org,rtfd/readthedocs.org,safwanrahman/readthedocs.org,davidfischer/readthedocs.org,rtfd/readthedocs.org,tddv/readthedocs.org,istresearch/readthedocs.org,pombredanne/readthedocs.org,stevepiercy/readthedocs.org,espdev/readthedocs.org,safwanrahman/readthedocs.org,rtfd/readthedocs.org,istresearch/readthedocs.org,safwanrahman/readthedocs.org,tddv/readthedocs.org,espdev/readthedocs.org,tddv/readthedocs.org,davidfischer/readthedocs.org,stevepiercy/readthedocs.org,pombredanne/readthedocs.org,safwanrahman/readthedocs.org,istresearch/readthedocs.org,stevepiercy/readthedocs.org,espdev/readthedocs.org | ---
+++
@@ -45,5 +45,6 @@
if self.instance.pk is not None: # new instance only
if self.instance.active is True and cleaned_data['active'] is False:
log.info('Removing files for version %s' % self.instance.slug)
- clear_artifacts.delay(version_pk=[self.instance.pk])
+ clear_artifacts.delay(version_pk=self.instance.pk)
+ self.instance.built = False
return cleaned_data |
b593c9fa9939c7fc524a2d4a1c3a7e337fe8de07 | wooey/migrations/0037_populate-jsonfield.py | wooey/migrations/0037_populate-jsonfield.py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.13 on 2018-03-04 23:14
from __future__ import unicode_literals
from django.db import migrations
def populate_default(apps, schema_editor):
ScriptParameter = apps.get_model('wooey', 'ScriptParameter')
for obj in ScriptParameter.objects.all():
obj.default = obj._default
obj.save()
class Migration(migrations.Migration):
dependencies = [
('wooey', '0036_add-jsonfield'),
]
operations = [
migrations.RunPython(populate_default)
]
| # -*- coding: utf-8 -*-
# Generated by Django 1.9.13 on 2018-03-04 23:14
from __future__ import unicode_literals
import json
from django.db import migrations
def populate_default(apps, schema_editor):
ScriptParameter = apps.get_model('wooey', 'ScriptParameter')
for obj in ScriptParameter.objects.all():
try:
obj.default = json.loads(obj._default)
except Exception:
obj.default = obj._default
obj.save()
class Migration(migrations.Migration):
dependencies = [
('wooey', '0036_add-jsonfield'),
]
operations = [
migrations.RunPython(populate_default)
]
| Convert iniital json field if possible in migration | Convert iniital json field if possible in migration
| Python | bsd-3-clause | wooey/Wooey,wooey/Wooey,wooey/Wooey,wooey/Wooey | ---
+++
@@ -1,6 +1,7 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.9.13 on 2018-03-04 23:14
from __future__ import unicode_literals
+import json
from django.db import migrations
@@ -8,7 +9,10 @@
def populate_default(apps, schema_editor):
ScriptParameter = apps.get_model('wooey', 'ScriptParameter')
for obj in ScriptParameter.objects.all():
- obj.default = obj._default
+ try:
+ obj.default = json.loads(obj._default)
+ except Exception:
+ obj.default = obj._default
obj.save()
|
b6fff4186de098946cc1e4c0204f78936f73044f | tests/basics/tuple1.py | tests/basics/tuple1.py | # basic tuple functionality
x = (1, 2, 3 * 4)
print(x)
try:
x[0] = 4
except TypeError:
print("TypeError")
print(x)
try:
x.append(5)
except AttributeError:
print("AttributeError")
print(x[1:])
print(x[:-1])
print(x[2:3])
print(x + (10, 100, 10000))
# construction of tuple from large iterator (tests implementation detail of uPy)
print(tuple(range(20)))
# unsupported unary operation
try:
+()
except TypeError:
print('TypeError')
# unsupported type on RHS of add
try:
() + None
except TypeError:
print('TypeError')
| # basic tuple functionality
x = (1, 2, 3 * 4)
print(x)
try:
x[0] = 4
except TypeError:
print("TypeError")
print(x)
try:
x.append(5)
except AttributeError:
print("AttributeError")
print(x[1:])
print(x[:-1])
print(x[2:3])
print(x + (10, 100, 10000))
# inplace add operator
x += (10, 11, 12)
print(x)
# construction of tuple from large iterator (tests implementation detail of uPy)
print(tuple(range(20)))
# unsupported unary operation
try:
+()
except TypeError:
print('TypeError')
# unsupported type on RHS of add
try:
() + None
except TypeError:
print('TypeError')
| Add test for tuple inplace add. | tests/basics: Add test for tuple inplace add.
| Python | mit | infinnovation/micropython,dmazzella/micropython,henriknelson/micropython,chrisdearman/micropython,deshipu/micropython,AriZuu/micropython,infinnovation/micropython,AriZuu/micropython,puuu/micropython,alex-robbins/micropython,torwag/micropython,SHA2017-badge/micropython-esp32,tralamazza/micropython,chrisdearman/micropython,deshipu/micropython,micropython/micropython-esp32,trezor/micropython,pfalcon/micropython,dmazzella/micropython,AriZuu/micropython,cwyark/micropython,kerneltask/micropython,torwag/micropython,dmazzella/micropython,selste/micropython,alex-robbins/micropython,pozetroninc/micropython,pozetroninc/micropython,swegener/micropython,tralamazza/micropython,lowRISC/micropython,pramasoul/micropython,PappaPeppar/micropython,deshipu/micropython,cwyark/micropython,lowRISC/micropython,ryannathans/micropython,MrSurly/micropython-esp32,adafruit/circuitpython,oopy/micropython,adafruit/micropython,chrisdearman/micropython,MrSurly/micropython,pramasoul/micropython,puuu/micropython,oopy/micropython,lowRISC/micropython,kerneltask/micropython,MrSurly/micropython-esp32,pozetroninc/micropython,ryannathans/micropython,infinnovation/micropython,swegener/micropython,torwag/micropython,bvernoux/micropython,selste/micropython,trezor/micropython,tobbad/micropython,blazewicz/micropython,adafruit/circuitpython,infinnovation/micropython,TDAbboud/micropython,torwag/micropython,bvernoux/micropython,SHA2017-badge/micropython-esp32,adafruit/circuitpython,HenrikSolver/micropython,AriZuu/micropython,micropython/micropython-esp32,blazewicz/micropython,cwyark/micropython,tralamazza/micropython,hiway/micropython,henriknelson/micropython,oopy/micropython,HenrikSolver/micropython,bvernoux/micropython,tobbad/micropython,hiway/micropython,deshipu/micropython,torwag/micropython,henriknelson/micropython,henriknelson/micropython,Peetz0r/micropython-esp32,kerneltask/micropython,cwyark/micropython,blazewicz/micropython,micropython/micropython-esp32,cwyark/micropython,micropython/micropython-esp32,ryannathans/micropython,PappaPeppar/micropython,Peetz0r/micropython-esp32,PappaPeppar/micropython,kerneltask/micropython,alex-robbins/micropython,deshipu/micropython,PappaPeppar/micropython,pfalcon/micropython,tobbad/micropython,pramasoul/micropython,MrSurly/micropython-esp32,hiway/micropython,ryannathans/micropython,swegener/micropython,kerneltask/micropython,selste/micropython,HenrikSolver/micropython,Peetz0r/micropython-esp32,blazewicz/micropython,Timmenem/micropython,blazewicz/micropython,adafruit/circuitpython,Timmenem/micropython,MrSurly/micropython,Timmenem/micropython,adafruit/circuitpython,infinnovation/micropython,tralamazza/micropython,toolmacher/micropython,MrSurly/micropython,pozetroninc/micropython,adafruit/circuitpython,lowRISC/micropython,ryannathans/micropython,toolmacher/micropython,pfalcon/micropython,dmazzella/micropython,lowRISC/micropython,Peetz0r/micropython-esp32,SHA2017-badge/micropython-esp32,chrisdearman/micropython,AriZuu/micropython,adafruit/micropython,hiway/micropython,SHA2017-badge/micropython-esp32,bvernoux/micropython,TDAbboud/micropython,oopy/micropython,MrSurly/micropython-esp32,pramasoul/micropython,tobbad/micropython,tobbad/micropython,selste/micropython,adafruit/micropython,HenrikSolver/micropython,puuu/micropython,puuu/micropython,trezor/micropython,Peetz0r/micropython-esp32,PappaPeppar/micropython,chrisdearman/micropython,toolmacher/micropython,oopy/micropython,hiway/micropython,adafruit/micropython,toolmacher/micropython,alex-robbins/micropython,Timmenem/micropython,TDAbboud/micropython,alex-robbins/micropython,pfalcon/micropython,pramasoul/micropython,bvernoux/micropython,henriknelson/micropython,trezor/micropython,TDAbboud/micropython,selste/micropython,adafruit/micropython,puuu/micropython,swegener/micropython,micropython/micropython-esp32,TDAbboud/micropython,MrSurly/micropython,swegener/micropython,HenrikSolver/micropython,pozetroninc/micropython,pfalcon/micropython,MrSurly/micropython-esp32,trezor/micropython,MrSurly/micropython,Timmenem/micropython,SHA2017-badge/micropython-esp32,toolmacher/micropython | ---
+++
@@ -17,6 +17,10 @@
print(x + (10, 100, 10000))
+# inplace add operator
+x += (10, 11, 12)
+print(x)
+
# construction of tuple from large iterator (tests implementation detail of uPy)
print(tuple(range(20)))
|
c306e731fde754dc11629ff32f7d0b6afb510e81 | controllers/accounts_manager.py | controllers/accounts_manager.py | from flask_restful import Resource
class AccountsManager(Resource):
"""docstring for AccountsManager."""
def get(self):
return {"route": "login"}
def post(self):
return {"route": "register"}
| from flask import jsonify, make_response
from flask_restful import Resource, reqparse
from app.models import User
from app.db_instance import save
from validator import validate
class AccountsManager(Resource):
"""docstring for AccountsManager."""
def __init__(self):
self.parser = reqparse.RequestParser()
self.parser.add_argument('username',
type=str,
required=True,
help="username is required")
self.parser.add_argument('password',
type=str,
required=True,
help="password is required")
def post(self):
args = self.parser.parse_args(strict=True)
username = args.get("username")
password = args.get("password")
if any(arg == "" for arg in [username, password]):
message = "username and password is required"
status = 400
elif not username.isalpha():
message = "username should not contain special characters"
status = 400
elif len(password) < 6:
message = "password should be more than 6 characters"
status = 400
elif User.query.filter_by(username=username).first():
message = "username already exists"
status = 409
else:
user = User(username, password)
save(user)
message = "user registered successfully"
status = 201
return make_response(jsonify({
"message": message
}), status)
| Add Register resource to handle user registration and save user data to the database | Add Register resource to handle user registration and save user data to the database
| Python | mit | brayoh/bucket-list-api | ---
+++
@@ -1,10 +1,48 @@
-from flask_restful import Resource
+from flask import jsonify, make_response
+from flask_restful import Resource, reqparse
+from app.models import User
+from app.db_instance import save
+from validator import validate
class AccountsManager(Resource):
"""docstring for AccountsManager."""
- def get(self):
- return {"route": "login"}
+ def __init__(self):
+ self.parser = reqparse.RequestParser()
+ self.parser.add_argument('username',
+ type=str,
+ required=True,
+ help="username is required")
+ self.parser.add_argument('password',
+ type=str,
+ required=True,
+ help="password is required")
def post(self):
- return {"route": "register"}
+ args = self.parser.parse_args(strict=True)
+ username = args.get("username")
+ password = args.get("password")
+
+ if any(arg == "" for arg in [username, password]):
+ message = "username and password is required"
+ status = 400
+ elif not username.isalpha():
+ message = "username should not contain special characters"
+ status = 400
+ elif len(password) < 6:
+ message = "password should be more than 6 characters"
+ status = 400
+ elif User.query.filter_by(username=username).first():
+ message = "username already exists"
+ status = 409
+ else:
+ user = User(username, password)
+ save(user)
+
+ message = "user registered successfully"
+ status = 201
+
+
+ return make_response(jsonify({
+ "message": message
+ }), status) |
ec648988b9ce5def40538004c7704739a3a9dd6e | disco_aws_automation/version.py | disco_aws_automation/version.py | """Place of record for the package version"""
__version__ = "1.1.17"
__rpm_version__ = "WILL_BE_SET_BY_RPM_BUILD"
__git_hash__ = "WILL_BE_SET_BY_EGG_BUILD"
| """Place of record for the package version"""
__version__ = "1.1.19"
__rpm_version__ = "WILL_BE_SET_BY_RPM_BUILD"
__git_hash__ = "WILL_BE_SET_BY_EGG_BUILD"
| Return exit code on disco_deploy test and update | Return exit code on disco_deploy test and update
| Python | bsd-2-clause | amplifylitco/asiaq,amplifylitco/asiaq,amplifylitco/asiaq | ---
+++
@@ -1,5 +1,5 @@
"""Place of record for the package version"""
-__version__ = "1.1.17"
+__version__ = "1.1.19"
__rpm_version__ = "WILL_BE_SET_BY_RPM_BUILD"
__git_hash__ = "WILL_BE_SET_BY_EGG_BUILD" |
63c640f2d16b033cc8dff426768cd1c6cbaa5626 | Lib/distutils/__init__.py | Lib/distutils/__init__.py | """distutils
The main package for the Python Module Distribution Utilities. Normally
used from a setup script as
from distutils.core import setup
setup (...)
"""
# This module should be kept compatible with Python 2.1.
__revision__ = "$Id$"
import sys
__version__ = "%d.%d.%d" % sys.version_info[:3]
del sys
| """distutils
The main package for the Python Module Distribution Utilities. Normally
used from a setup script as
from distutils.core import setup
setup (...)
"""
# This module should be kept compatible with Python 2.1.
__revision__ = "$Id$"
# Distutils version
#
# Please coordinate with Marc-Andre Lemburg <mal@egenix.com> when adding
# new features to distutils that would warrant bumping the version number.
#
# In general, major and minor version should loosely follow the Python
# version number the distutils code was shipped with.
#
__version__ = "2.5.0"
| Revert to having static version numbers again. | Revert to having static version numbers again.
| Python | mit | sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator | ---
+++
@@ -12,6 +12,12 @@
__revision__ = "$Id$"
-import sys
-__version__ = "%d.%d.%d" % sys.version_info[:3]
-del sys
+# Distutils version
+#
+# Please coordinate with Marc-Andre Lemburg <mal@egenix.com> when adding
+# new features to distutils that would warrant bumping the version number.
+#
+# In general, major and minor version should loosely follow the Python
+# version number the distutils code was shipped with.
+#
+__version__ = "2.5.0" |
61bfc6ac93db9bf11c88f549c9122ac5b498e3d6 | Lib/test/test_contains.py | Lib/test/test_contains.py | from test_support import TestFailed
class base_set:
def __init__(self, el):
self.el = el
class set(base_set):
def __contains__(self, el):
return self.el == el
class seq(base_set):
def __getitem__(self, n):
return [self.el][n]
def check(ok, *args):
if not ok:
raise TestFailed, join(map(str, args), " ")
a = base_set(1)
b = set(1)
c = seq(1)
check(1 in b, "1 not in set(1)")
check(0 not in b, "0 in set(1)")
check(1 in c, "1 not in seq(1)")
check(0 not in c, "0 in seq(1)")
try:
1 in a
check(0, "in base_set did not raise error")
except AttributeError:
pass
try:
1 not in a
check(0, "not in base_set did not raise error")
except AttributeError:
pass
| from test_support import TestFailed
class base_set:
def __init__(self, el):
self.el = el
class set(base_set):
def __contains__(self, el):
return self.el == el
class seq(base_set):
def __getitem__(self, n):
return [self.el][n]
def check(ok, *args):
if not ok:
raise TestFailed, join(map(str, args), " ")
a = base_set(1)
b = set(1)
c = seq(1)
check(1 in b, "1 not in set(1)")
check(0 not in b, "0 in set(1)")
check(1 in c, "1 not in seq(1)")
check(0 not in c, "0 in seq(1)")
try:
1 in a
check(0, "in base_set did not raise error")
except AttributeError:
pass
try:
1 not in a
check(0, "not in base_set did not raise error")
except AttributeError:
pass
# Test char in string
check('c' in 'abc', "'c' not in 'abc'")
check('d' not in 'abc', "'d' in 'abc'")
try:
'' in 'abc'
check(0, "'' in 'abc' did not raise error")
except TypeError:
pass
try:
'ab' in 'abc'
check(0, "'ab' in 'abc' did not raise error")
except TypeError:
pass
try:
None in 'abc'
check(0, "None in 'abc' did not raise error")
except TypeError:
pass
| Add tests for char in string -- including required exceptions for non-char in string. | Add tests for char in string -- including required exceptions for
non-char in string.
| Python | mit | sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator | ---
+++
@@ -39,3 +39,26 @@
check(0, "not in base_set did not raise error")
except AttributeError:
pass
+
+# Test char in string
+
+check('c' in 'abc', "'c' not in 'abc'")
+check('d' not in 'abc', "'d' in 'abc'")
+
+try:
+ '' in 'abc'
+ check(0, "'' in 'abc' did not raise error")
+except TypeError:
+ pass
+
+try:
+ 'ab' in 'abc'
+ check(0, "'ab' in 'abc' did not raise error")
+except TypeError:
+ pass
+
+try:
+ None in 'abc'
+ check(0, "None in 'abc' did not raise error")
+except TypeError:
+ pass |
850e328f024d79623256a8b38ee0f054d4210ce5 | src/constants.py | src/constants.py | #!/usr/bin/env python
TRAJECTORY = 'linear'
if TRAJECTORY == 'linear':
SIMULATION_TIME_IN_SECONDS = 80.0
elif TRAJECTORY == 'circular':
SIMULATION_TIME_IN_SECONDS = 120.0
elif TRAJECTORY == 'squared':
SIMULATION_TIME_IN_SECONDS = 160.0
DELTA_T = 0.1 # this is the sampling time
STEPS = int(SIMULATION_TIME_IN_SECONDS / DELTA_T)
# control constants
K_X = 0.90
K_Y = 0.90
K_THETA = 0.90
# PID control constants
K_P_V = 0.2
K_I_V = 1.905
K_D_V = 0.00
K_P_W = 0.45
K_I_W = 1.25
K_D_W = 0.000
| #!/usr/bin/env python
TRAJECTORY = 'linear'
CONTROLLER = 'pid'
if TRAJECTORY == 'linear':
SIMULATION_TIME_IN_SECONDS = 80.0
elif TRAJECTORY == 'circular':
SIMULATION_TIME_IN_SECONDS = 120.0
elif TRAJECTORY == 'squared':
SIMULATION_TIME_IN_SECONDS = 160.0
DELTA_T = 0.1 # this is the sampling time
STEPS = int(SIMULATION_TIME_IN_SECONDS / DELTA_T)
# control constants
K_X = 0.90
K_Y = 0.90
K_THETA = 0.90
# PID control constants
K_P_V = 0.2
K_I_V = 1.905
K_D_V = 0.00
K_P_W = 0.45
K_I_W = 1.25
K_D_W = 0.000
| Create constant to define a controller that will be used | Create constant to define a controller that will be used
| Python | mit | bit0001/trajectory_tracking,bit0001/trajectory_tracking | ---
+++
@@ -1,5 +1,6 @@
#!/usr/bin/env python
TRAJECTORY = 'linear'
+CONTROLLER = 'pid'
if TRAJECTORY == 'linear':
SIMULATION_TIME_IN_SECONDS = 80.0 |
279e56746984aac878d453c09437a6f6514e7342 | xpserver_web/models.py | xpserver_web/models.py | from django.db import models
from django.contrib.auth.models import User
class Profile(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE, null=True, blank=True)
activation_code = models.CharField(max_length=255, default="0000")
fcm_registration_id = models.CharField(max_length=255, blank=True)
def __str__(self):
if self.user:
return '%s - profile.' % self.user.username
else:
return 'Profile unknown user'
| from django.db import models
from django.contrib.auth.models import User
class Profile(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE, null=True, blank=True)
activation_code = models.CharField(max_length=255, default="0000")
fcm_registration_id = models.CharField(max_length=255, blank=True)
def __str__(self):
if self.user:
if self.fcm_registration_id:
return '%s - profile with activated mobile app.' % self.user.username
else:
return '%s - profile.' % self.user.username
else:
return 'Profile unknown user'
| Change str method of profile | Change str method of profile
| Python | mit | xp2017-hackergarden/server,xp2017-hackergarden/server,xp2017-hackergarden/server,xp2017-hackergarden/server | ---
+++
@@ -9,6 +9,9 @@
def __str__(self):
if self.user:
- return '%s - profile.' % self.user.username
+ if self.fcm_registration_id:
+ return '%s - profile with activated mobile app.' % self.user.username
+ else:
+ return '%s - profile.' % self.user.username
else:
return 'Profile unknown user' |
bf7174e96efeaf11c2a2c5722e16f25204a3d3b7 | scripts/cluster_importer.py | scripts/cluster_importer.py | #!/usr/bin/env python
# x COLUMN NAMES
# 0 State_Name
# 1 State_code
# 2 Lga_name
# 3 Lga_code
# 4 EA_NAME
# 5 EA_code
# 6 EAsize
# 7 Unique ID
# 8 Reserve Cluster (RC)
# 9 PRIMARY
# 10 LOCALITY NAME
import csv
import json
with open('2015_06_29_NNHS_2015_Selected EA_Final.xlsx - EA_2015.csv') as csvfile:
reader = csv.reader(csvfile, delimiter=',')
clusterfile = {}
for row in reader:
clusterfile[row[5]] = {
"cluster_name": row[4],
"second_admin_level_name": row[2],
"first_admin_level_name": row[0],
}
print json.dumps(clusterfile, indent=2, separators=(',', ': '))
| #!/usr/bin/env python
# x COLUMN NAMES
# 0 State_Name
# 1 State_code
# 2 Lga_name
# 3 Lga_code
# 4 EA_NAME
# 5 EA_code
# 6 EAsize
# 7 Unique ID
# 8 Reserve Cluster (RC)
# 9 PRIMARY
# 10 LOCALITY NAME
import csv
import json
with open('2015_06_29_NNHS_2015_Selected EA_Final.xlsx - EA_2015.csv') as csvfile:
reader = csv.reader(csvfile, delimiter=',')
clusterfile = {}
for row in reader:
clusterfile[row[7]] = {
"cluster_name": row[4],
"second_admin_level_name": row[2],
"first_admin_level_name": row[0],
}
print json.dumps(clusterfile, indent=2, separators=(',', ': '))
| Index clusters by unique ID | Index clusters by unique ID
| Python | agpl-3.0 | eHealthAfrica/nutsurv,johanneswilm/eha-nutsurv-django,eHealthAfrica/nutsurv,johanneswilm/eha-nutsurv-django,johanneswilm/eha-nutsurv-django,eHealthAfrica/nutsurv | ---
+++
@@ -21,7 +21,7 @@
reader = csv.reader(csvfile, delimiter=',')
clusterfile = {}
for row in reader:
- clusterfile[row[5]] = {
+ clusterfile[row[7]] = {
"cluster_name": row[4],
"second_admin_level_name": row[2],
"first_admin_level_name": row[0], |
ec4929175af38e56397ec8afd05b63dc12850226 | alg_dijkstra_shortest_path.py | alg_dijkstra_shortest_path.py | from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def dijkstra(weighted_graph_d, start_vertex):
"""Dijkstra algorithm for weighted graph.
Finds shortest path in a weighted graph from a particular node
to all vertices that are reachable from it.
"""
pass
def main():
weighted_graph_d = {
'u': {'v': 2, 'w': 5, 'x': 1},
'v': {'u': 2, 'w': 3, 'x': 2},
'w': {'u': 5, 'v': 3, 'x': 3, 'y': 1, 'z': 5},
'x': {'u': 1, 'v': 2, 'w': 3, 'y': 1},
'y': {'w': 1, 'x': 1, 'z': 1},
'z': {'w': 5, 'y': 1}
}
start_vertex = 'x'
print('weighted_graph_d: {}'.format(weighted_graph_d))
print('Dijkstra shortest path from {}:'.format(start_vertex))
shortest_path_d, vertex_lookup_d = dijkstra(
weighted_graph_d, start_vertex)
print('shortest_path_d: {}'.format(shortest_path_d))
print('vertex_lookup_d: {}'.format(vertex_lookup_d))
if __name__ == '__main__':
main()
| from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def dijkstra(weighted_graph_d, start_vertex):
"""Dijkstra algorithm for "weighted" graph.
Finds shortest path in a weighted graph from a particular node
to all vertices that are reachable from it.
"""
pass
def main():
weighted_graph_d = {
'u': {'v': 2, 'w': 5, 'x': 1},
'v': {'u': 2, 'w': 3, 'x': 2},
'w': {'u': 5, 'v': 3, 'x': 3, 'y': 1, 'z': 5},
'x': {'u': 1, 'v': 2, 'w': 3, 'y': 1},
'y': {'w': 1, 'x': 1, 'z': 1},
'z': {'w': 5, 'y': 1}
}
start_vertex = 'x'
print('weighted_graph_d: {}'.format(weighted_graph_d))
print('Dijkstra shortest path from {}:'.format(start_vertex))
shortest_path_d, vertex_lookup_d = dijkstra(
weighted_graph_d, start_vertex)
print('shortest_path_d: {}'.format(shortest_path_d))
print('vertex_lookup_d: {}'.format(vertex_lookup_d))
if __name__ == '__main__':
main()
| Revise doc string with highlighting "weighted" graph | Revise doc string with highlighting "weighted" graph
| Python | bsd-2-clause | bowen0701/algorithms_data_structures | ---
+++
@@ -4,7 +4,7 @@
def dijkstra(weighted_graph_d, start_vertex):
- """Dijkstra algorithm for weighted graph.
+ """Dijkstra algorithm for "weighted" graph.
Finds shortest path in a weighted graph from a particular node
to all vertices that are reachable from it. |
965e2dc74afef720055db315863e038e500fc44d | mangopaysdk/types/dto.py | mangopaysdk/types/dto.py | class Dto(object):
"""Abstract class for all DTOs (entities and their composites)."""
def GetSubObjects(self):
"""Get array with mapping which property is object and what type of object.
To be overridden in child class if has any sub objects.
return array
"""
return {}
def GetDependsObjects(self):
"""Get array with mapping which property depends on other property.
To be overridden in child class if has any dependent objects.
return array
"""
return {}
def GetReadOnlyProperties(self):
"""Get array with read only properties - not used in response.
To be overridden in child class if has any read-only properies.
return array
"""
return []
| class Dto(object):
"""Abstract class for all DTOs (entities and their composites)."""
def __str__(self):
return str(self.__to_dict())
def __to_dict(self):
data = {}
for key in dir(self):
if key.startswith("__"): continue # Skip private fields
value = getattr(self, key)
if value is None or isinstance(value,str) or \
isinstance(value,int) or isinstance(value,float):
data[key] = value
elif isinstance(value,Dto):
data[key] = value.__to_dict()
return data
def GetSubObjects(self):
"""Get array with mapping which property is object and what type of object.
To be overridden in child class if has any sub objects.
return array
"""
return {}
def GetDependsObjects(self):
"""Get array with mapping which property depends on other property.
To be overridden in child class if has any dependent objects.
return array
"""
return {}
def GetReadOnlyProperties(self):
"""Get array with read only properties - not used in response.
To be overridden in child class if has any read-only properies.
return array
"""
return []
| Add a __str__() method to Dto to make debugging easier | Add a __str__() method to Dto to make debugging easier
| Python | mit | chocopoche/mangopay2-python-sdk,Mangopay/mangopay2-python-sdk | ---
+++
@@ -1,6 +1,25 @@
class Dto(object):
"""Abstract class for all DTOs (entities and their composites)."""
-
+
+ def __str__(self):
+ return str(self.__to_dict())
+
+ def __to_dict(self):
+ data = {}
+ for key in dir(self):
+ if key.startswith("__"): continue # Skip private fields
+
+ value = getattr(self, key)
+ if value is None or isinstance(value,str) or \
+ isinstance(value,int) or isinstance(value,float):
+
+ data[key] = value
+
+ elif isinstance(value,Dto):
+ data[key] = value.__to_dict()
+
+ return data
+
def GetSubObjects(self):
"""Get array with mapping which property is object and what type of object.
To be overridden in child class if has any sub objects. |
23b08d24405badeb88461006d29426ab452a2ac4 | hooks/post_gen_project.py | hooks/post_gen_project.py | import os
import subprocess
src = os.path.join(os.getcwd(), 'src', 'utils', 'prepare-commit-msg.py')
dst = os.path.join('.git', 'hooks', 'prepare-commit-msg')
process = subprocess.call(['git', 'init'])
os.symlink(src, dst)
| import os
import subprocess
src = os.path.join(os.getcwd(), 'src', 'utils', 'prepare-commit-msg.py')
dst = os.path.join('.git', 'hooks', 'prepare-commit-msg')
subprocess.call(['git', 'init'])
os.symlink(src, dst)
subprocess.call(['git', 'add', '-A'])
subprocess.call(['git', 'commit', '-m', 'Initial commit'])
| Add inital commit to post generate hook | Add inital commit to post generate hook
| Python | mit | Empiria/matador-cookiecutter | ---
+++
@@ -4,5 +4,7 @@
src = os.path.join(os.getcwd(), 'src', 'utils', 'prepare-commit-msg.py')
dst = os.path.join('.git', 'hooks', 'prepare-commit-msg')
-process = subprocess.call(['git', 'init'])
+subprocess.call(['git', 'init'])
os.symlink(src, dst)
+subprocess.call(['git', 'add', '-A'])
+subprocess.call(['git', 'commit', '-m', 'Initial commit']) |
6a80b3c6d27ad494bbc3c9b9d67b6445b0bbfc40 | example/sp-wsgi/service_conf.py | example/sp-wsgi/service_conf.py | from saml2.assertion import Policy
HOST = '127.0.0.1'
PORT = 8087
HTTPS = False
# Which groups of entity categories to use
POLICY = Policy(
{
"default": {"entity_categories": ["swamid", "edugain"]}
}
)
# HTTPS cert information
SERVER_CERT = "pki/ssl.crt"
SERVER_KEY = "pki/ssl.pem"
CERT_CHAIN = ""
| from saml2.assertion import Policy
HOST = '127.0.0.1'
PORT = 8087
HTTPS = False
# Which groups of entity categories to use
POLICY = Policy(
{
"default": {"entity_categories": ["swamid", "edugain"]}
}
)
# HTTPS cert information
SERVER_CERT = "pki/mycert.pem"
SERVER_KEY = "pki/mykey.pem"
CERT_CHAIN = ""
| Update example HTTPS cert & key filenames. | Update example HTTPS cert & key filenames.
pki/my{cert,key}.pem are used for request payloads; set those as
the defaults for HTTPS as well. Note that HTTPS isn't necessarily
in a working state - this just gets us a bit closer.
| Python | bsd-2-clause | tpazderka/pysaml2,tpazderka/pysaml2,Runscope/pysaml2,Runscope/pysaml2 | ---
+++
@@ -12,6 +12,6 @@
)
# HTTPS cert information
-SERVER_CERT = "pki/ssl.crt"
-SERVER_KEY = "pki/ssl.pem"
+SERVER_CERT = "pki/mycert.pem"
+SERVER_KEY = "pki/mykey.pem"
CERT_CHAIN = "" |
7bc693102a5394bb73b3df2320fca5a35bebc91f | test/test_vocab.py | test/test_vocab.py | import numpy as np
import unittest
from torchtext import vocab
from collections import Counter
class TestVocab(unittest.TestCase):
def test_vocab(self):
c = Counter(['hello', 'world'])
v = vocab.Vocab(c, vectors='glove.test_twitter.27B.200d')
self.assertEqual(v.itos, ['<unk>', '<pad>', 'hello', 'world'])
vectors = v.vectors.numpy()
# The first 5 entries in each vector.
expected_glove_twitter = {
'hello': [0.34683, -0.19612, -0.34923, -0.28158, -0.75627],
'world': [0.035771, 0.62946, 0.27443, -0.36455, 0.39189],
}
for word in ['hello', 'world']:
self.assertTrue(
np.allclose(
vectors[v.stoi[word], :5], expected_glove_twitter[word]
)
)
self.assertTrue(np.allclose(vectors[v.stoi['<unk>'], :], np.zeros(200)))
if __name__ == '__main__':
unittest.main()
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from collections import Counter
import unittest
import numpy as np
from torchtext import vocab
class TestVocab(unittest.TestCase):
def test_vocab(self):
c = Counter({'hello': 4, 'world': 3, 'แแIแOแชแฎ_TแฎแญT': 5, 'freq_too_low': 2})
v = vocab.Vocab(c, min_freq=3, specials=['<pad>', '<bos>'],
vectors='glove.test_twitter.27B.200d')
self.assertEqual(v.itos, ['<unk>', '<pad>', '<bos>',
'แแIแOแชแฎ_TแฎแญT', 'hello', 'world'])
vectors = v.vectors.numpy()
# The first 5 entries in each vector.
expected_glove_twitter = {
'hello': [0.34683, -0.19612, -0.34923, -0.28158, -0.75627],
'world': [0.035771, 0.62946, 0.27443, -0.36455, 0.39189],
}
for word in ['hello', 'world']:
self.assertTrue(
np.allclose(
vectors[v.stoi[word], :5], expected_glove_twitter[word]
)
)
self.assertTrue(np.allclose(vectors[v.stoi['<unk>'], :], np.zeros(200)))
if __name__ == '__main__':
unittest.main()
| Test vocab min_freq and specials vocab args, as well as unicode input | Test vocab min_freq and specials vocab args, as well as unicode input
| Python | bsd-3-clause | pytorch/text,pytorch/text,pytorch/text,pytorch/text | ---
+++
@@ -1,16 +1,20 @@
-import numpy as np
+# -*- coding: utf-8 -*-
+from __future__ import unicode_literals
+from collections import Counter
import unittest
+import numpy as np
from torchtext import vocab
-from collections import Counter
class TestVocab(unittest.TestCase):
def test_vocab(self):
- c = Counter(['hello', 'world'])
- v = vocab.Vocab(c, vectors='glove.test_twitter.27B.200d')
+ c = Counter({'hello': 4, 'world': 3, 'แแIแOแชแฎ_TแฎแญT': 5, 'freq_too_low': 2})
+ v = vocab.Vocab(c, min_freq=3, specials=['<pad>', '<bos>'],
+ vectors='glove.test_twitter.27B.200d')
- self.assertEqual(v.itos, ['<unk>', '<pad>', 'hello', 'world'])
+ self.assertEqual(v.itos, ['<unk>', '<pad>', '<bos>',
+ 'แแIแOแชแฎ_TแฎแญT', 'hello', 'world'])
vectors = v.vectors.numpy()
# The first 5 entries in each vector. |
e1a2898f8f54eec874ebdc17ea6eb27440f62818 | opps/articles/forms.py | opps/articles/forms.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from django import forms
from .models import Post, Album, Link
from opps.core.widgets import OppsEditor
class PostAdminForm(forms.ModelForm):
multiupload_link = '/fileupload/image/'
class Meta:
model = Post
widgets = {'content': OppsEditor()}
class AlbumAdminForm(forms.ModelForm):
multiupload_link = '/fileupload/image/'
class Meta:
model = Album
widgets = {
'headline': OppsEditor()
}
class LinkAdminForm(forms.ModelForm):
class Meta:
model = Link
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
from django import forms
from .models import Post, Album, Link
from opps.core.widgets import OppsEditor
class PostAdminForm(forms.ModelForm):
multiupload_link = '/fileupload/image/'
class Meta:
model = Post
widgets = {'content': OppsEditor()}
class AlbumAdminForm(forms.ModelForm):
multiupload_link = '/fileupload/image/'
class Meta:
model = Album
widgets = {
'headline': OppsEditor()
}
class LinkAdminForm(forms.ModelForm):
class Meta:
model = Link
| Fix pep8, articles form E301 expected 1 blank line, found 0 | Fix pep8, articles form E301 expected 1 blank line, found 0
| Python | mit | opps/opps,jeanmask/opps,opps/opps,YACOWS/opps,williamroot/opps,opps/opps,williamroot/opps,williamroot/opps,YACOWS/opps,jeanmask/opps,YACOWS/opps,williamroot/opps,jeanmask/opps,YACOWS/opps,opps/opps,jeanmask/opps | ---
+++
@@ -8,14 +8,18 @@
class PostAdminForm(forms.ModelForm):
+
multiupload_link = '/fileupload/image/'
+
class Meta:
model = Post
widgets = {'content': OppsEditor()}
class AlbumAdminForm(forms.ModelForm):
+
multiupload_link = '/fileupload/image/'
+
class Meta:
model = Album
widgets = { |
b56712563e4205ccbf8b98deace4197e2f250361 | movement.py | movement.py | if __name__ == "__main__":
x, y = 0, 0
steps = 0
while True:
dir = input('Your current position is %s, %s, where would you like to move to? ' % (str(x), str(y)))
directions = { 'north': (0, 1),
'south' : (0, -1),
'east' : (1, 0),
'west' : (-1, 0)}
if dir in directions:
print("You moved %s, " % dir)
x += directions[dir][0]
y += directions[dir][1]
steps += 1
elif dir == "leave":
print("Goodbye. You moved %s steps in total." % steps)
break
else:
print("You tried to move %s but that is not possible." % dir) | if __name__ == "__main__":
x, y = 0, 0
steps = 0
while True:
dir = input('Your current position is %s, %s, where would you like to move to? ' % (str(x), str(y)))
directions = { 'north': (0, 1),
'south' : (0, -1),
'east' : (1, 0),
'west' : (-1, 0)}
abb_directions = {'n': (0, 1),
's' : (0, -1),
'e' : (1, 0),
'w' : (-1, 0)}
long_directions = {'n' : 'north', 's' : 'south', 'e' : 'east', 'w' : 'west'}
dir = dir.lower().replace(" ", "")
if dir in directions:
print("You moved %s. " % dir)
x += directions[dir][0]
y += directions[dir][1]
steps += 1
elif dir in abb_directions:
print("You moved %s. " % long_directions[dir])
x += abb_directions[dir][0]
y += abb_directions[dir][1]
steps += 1
elif dir == "leave":
print("Goodbye. You moved %s steps in total." % steps)
break
else:
print("You tried to move %s but that is not possible." % dir) | Add abbreviations and space handling | Add abbreviations and space handling
| Python | mit | mewturn/Python | ---
+++
@@ -4,17 +4,31 @@
while True:
dir = input('Your current position is %s, %s, where would you like to move to? ' % (str(x), str(y)))
- directions = { 'north': (0, 1),
+ directions = { 'north': (0, 1),
'south' : (0, -1),
'east' : (1, 0),
'west' : (-1, 0)}
-
+
+ abb_directions = {'n': (0, 1),
+ 's' : (0, -1),
+ 'e' : (1, 0),
+ 'w' : (-1, 0)}
+
+ long_directions = {'n' : 'north', 's' : 'south', 'e' : 'east', 'w' : 'west'}
+
+ dir = dir.lower().replace(" ", "")
if dir in directions:
- print("You moved %s, " % dir)
+ print("You moved %s. " % dir)
x += directions[dir][0]
y += directions[dir][1]
steps += 1
-
+
+ elif dir in abb_directions:
+ print("You moved %s. " % long_directions[dir])
+ x += abb_directions[dir][0]
+ y += abb_directions[dir][1]
+ steps += 1
+
elif dir == "leave":
print("Goodbye. You moved %s steps in total." % steps)
break |
c797481691f44f6741d2aa8491c7a112674ddaab | neb/node.py | neb/node.py | from neb.api import TrinityResource
from neb.relationship import Relationship
from neb.statistic import NodeStatistic
class Node(TrinityResource):
def create(self, node_id, **kwargs):
params = dict(id=node_id, node=kwargs)
return self.post(self._node_path(), payload=params)
def connect(self, to, type, **kwargs):
return Relationship().create(start=self.id, to=to, type=type, **kwargs)
def statistic(self, stat):
return NodeStatistic().calculate(node_id=self.id, stat=stat)
@staticmethod
def _node_path(node_id=None):
if node_id:
path = 'node/%s' % node_id
else:
path = 'node'
return path
def request(self, *args, **kwargs):
response = super(Node, self).request(*args, **kwargs)
return Node(data=response)
| from neb.api import TrinityResource
from neb.relationship import Relationship
from neb.statistic import NodeStatistic
class Node(TrinityResource):
def create(self, node_id, **kwargs):
params = dict(id=node_id, node=kwargs)
return self.post(self._node_path(), payload=params)
def connect(self, to, type, **kwargs):
if isinstance(to, Node):
to = to.id
return Relationship().create(start=self.id, to=to, type=type, **kwargs)
def statistic(self, stat):
return NodeStatistic().calculate(node_id=self.id, stat=stat)
@staticmethod
def _node_path(node_id=None):
if node_id:
path = 'node/%s' % node_id
else:
path = 'node'
return path
def request(self, *args, **kwargs):
response = super(Node, self).request(*args, **kwargs)
return Node(data=response)
| Allow Node to Node connection. | Allow Node to Node connection.
| Python | mit | peplin/neb | ---
+++
@@ -8,6 +8,8 @@
return self.post(self._node_path(), payload=params)
def connect(self, to, type, **kwargs):
+ if isinstance(to, Node):
+ to = to.id
return Relationship().create(start=self.id, to=to, type=type, **kwargs)
def statistic(self, stat): |
f1d2d809dbf77133ef10b59fafc98f5658779bbe | malaffinity/exceptions.py | malaffinity/exceptions.py | """malaffinity exceptions."""
class MALRateLimitExceededError(Exception): # noqa: D204, D205, D400
"""
Raised when MAL's blocking your request, because you're going over their
rate limit of one request every two seconds. Slow down and try again.
"""
pass
class MALAffinityException(Exception): # noqa: D204
"""Base class for MALAffinity exceptions."""
pass
class NoAffinityError(MALAffinityException): # noqa: D204, D205, D400
"""
Raised when either the shared rated anime between the base user
and another user is less than 10, the user does not have any rated
anime, or the standard deviation of either users' scores is zero.
"""
pass
class InvalidUsernameError(MALAffinityException): # noqa: D204
"""Raised when username specified does not exist."""
pass
| """malaffinity exceptions."""
class MALRateLimitExceededError(Exception): # noqa: D204, D205, D400
"""
Raised when MAL's blocking your request, because you're going over their
rate limit of one request every two seconds. Slow down and try again.
"""
pass
class MALAffinityException(Exception): # noqa: D204
"""Base class for MALAffinity exceptions."""
pass
class NoAffinityError(MALAffinityException): # noqa: D204, D205, D400
"""
Raised when either the shared rated anime between the base user
and another user is less than 11, the user does not have any rated
anime, or the standard deviation of either users' scores is zero.
"""
pass
class InvalidUsernameError(MALAffinityException): # noqa: D204
"""Raised when username specified does not exist."""
pass
| Correct incorrect information in `NoAffinityError` docstring | Correct incorrect information in `NoAffinityError` docstring
Incorrectly stated that the minimum number of shared, rated anime needed to calculate affinity was 10, when it's actually 11
| Python | mit | erkghlerngm44/malaffinity | ---
+++
@@ -17,7 +17,7 @@
class NoAffinityError(MALAffinityException): # noqa: D204, D205, D400
"""
Raised when either the shared rated anime between the base user
- and another user is less than 10, the user does not have any rated
+ and another user is less than 11, the user does not have any rated
anime, or the standard deviation of either users' scores is zero.
"""
pass |
41cebb59f673453499fd92996fc9aa1a1311f1e2 | odbc2csv.py | odbc2csv.py | import pypyodbc
import csv
conn = pypyodbc.connect("DSN=")
cur = conn.cursor()
tables = []
cur.execute("select * from sys.tables")
for row in cur.fetchall():
tables.append(row[0])
for table in tables:
cur.execute("select * from %s" % table)
column_names = []
for d in cur.description:
column_names.append(d[0])
file = open("%s.csv" % table, "w")
writer = csv.writer(file)
writer.writerow(column_names)
for row in cur.fetchall():
writer.writerow(row)
file.close()
| import pypyodbc
import csv
conn = pypyodbc.connect("DSN=")
cur = conn.cursor()
tables = []
cur.execute("select * from sys.tables")
for row in cur.fetchall():
tables.append(row[0])
for table in tables:
cur.execute("select * from %s" % table)
column_names = []
for d in cur.description:
column_names.append(d[0])
file = open("%s.csv" % table, "wb")
writer = csv.writer(file)
writer.writerow(column_names)
for row in cur.fetchall():
writer.writerow(row)
file.close()
| Write binary for CSV file. | Write binary for CSV file. | Python | isc | wablair/misc_scripts,wablair/misc_scripts,wablair/misc_scripts,wablair/misc_scripts | ---
+++
@@ -19,7 +19,7 @@
for d in cur.description:
column_names.append(d[0])
- file = open("%s.csv" % table, "w")
+ file = open("%s.csv" % table, "wb")
writer = csv.writer(file)
writer.writerow(column_names)
|
9443ba9d5cccde590aa07b2d7c74a7a4ea90fe6d | opps/urls.py | opps/urls.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.conf.urls import patterns, url, include
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns(
'',
#url(r'^admin/images/mass/', include('opps.images.urls',
# namespace='images', app_name='images')),
url(r'^admin/', include(admin.site.urls)),
url(r'^redactor/', include('redactor.urls')),
url(r'^sitemap', include('opps.sitemaps.urls')),
url(r'^page/', include('opps.flatpages.urls', namespace='pages',
app_name='pages')),
url(r'^', include('opps.articles.urls', namespace='articles',
app_name='articles')),
)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.conf.urls import patterns, url, include
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns(
'',
url(r'^admin/', include(admin.site.urls)),
url(r'^redactor/', include('redactor.urls')),
url(r'^sitemap', include('opps.sitemaps.urls')),
url(r'^page/', include('opps.flatpages.urls', namespace='pages',
app_name='pages')),
url(r'^', include('opps.articles.urls', namespace='articles',
app_name='articles')),
)
| Remove url images mass, not used | Remove url images mass, not used
| Python | mit | YACOWS/opps,jeanmask/opps,jeanmask/opps,williamroot/opps,YACOWS/opps,williamroot/opps,jeanmask/opps,YACOWS/opps,opps/opps,opps/opps,williamroot/opps,williamroot/opps,opps/opps,YACOWS/opps,jeanmask/opps,opps/opps | ---
+++
@@ -8,8 +8,6 @@
urlpatterns = patterns(
'',
- #url(r'^admin/images/mass/', include('opps.images.urls',
- # namespace='images', app_name='images')),
url(r'^admin/', include(admin.site.urls)),
url(r'^redactor/', include('redactor.urls')), |
3112ff56e43d91d7e1bcff747dff5d434316897b | alerts/donations/currencymap.py | alerts/donations/currencymap.py | # -*- coding: utf-8 -*-
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
SYMBOLS = {
'USD': '$',
'AUD': 'A$',
'EUR': 'โฌ',
'CAD': 'C$'
}
| # -*- coding: utf-8 -*-
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
SYMBOLS = {
'USD': u'$',
'AUD': u'A$',
'EUR': u'โฌ',
'CAD': u'C$'
}
| Use unicode for currency symbols. | Use unicode for currency symbols.
| Python | apache-2.0 | google/mirandum,google/mirandum,google/mirandum,google/mirandum | ---
+++
@@ -13,8 +13,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
SYMBOLS = {
- 'USD': '$',
- 'AUD': 'A$',
- 'EUR': 'โฌ',
- 'CAD': 'C$'
+ 'USD': u'$',
+ 'AUD': u'A$',
+ 'EUR': u'โฌ',
+ 'CAD': u'C$'
} |
37c63e6ea5c14a0c7aae11581ae32f24eaaa9641 | test/layers_test.py | test/layers_test.py | import theanets
import numpy as np
class TestLayer:
def test_build(self):
layer = theanets.layers.build('feedforward', nin=2, nout=4)
assert isinstance(layer, theanets.layers.Layer)
class TestFeedforward:
def test_create(self):
l = theanets.layers.Feedforward(nin=2, nout=4)
assert l.reset() == 12
class TestTied:
def test_create(self):
l0 = theanets.layers.Feedforward(nin=2, nout=4)
l = theanets.layers.Tied(partner=l0)
assert l.reset() == 2
class TestClassifier:
def test_create(self):
l = theanets.layers.Classifier(nin=2, nout=4)
assert l.reset() == 12
class TestRecurrent:
def test_create(self):
l = theanets.layers.Recurrent(nin=2, nout=4)
assert l.reset() == 28
class TestMRNN:
def test_create(self):
l = theanets.layers.MRNN(nin=2, nout=4, factors=3)
assert l.reset() == 42
class TestLSTM:
def test_create(self):
l = theanets.layers.LSTM(nin=2, nout=4)
assert l.reset() == 124
| import theanets
import numpy as np
class TestLayer:
def test_build(self):
layer = theanets.layers.build('feedforward', nin=2, nout=4)
assert isinstance(layer, theanets.layers.Layer)
class TestFeedforward:
def test_create(self):
l = theanets.layers.Feedforward(nin=2, nout=4)
assert l.reset() == 12
class TestTied:
def test_create(self):
l0 = theanets.layers.Feedforward(nin=2, nout=4)
l = theanets.layers.Tied(partner=l0)
assert l.reset() == 2
class TestClassifier:
def test_create(self):
l = theanets.layers.Classifier(nin=2, nout=4)
assert l.reset() == 12
class TestRNN:
def test_create(self):
l = theanets.layers.RNN(nin=2, nout=4)
assert l.reset() == 28
class TestMRNN:
def test_create(self):
l = theanets.layers.MRNN(nin=2, nout=4, factors=3)
assert l.reset() == 42
class TestLSTM:
def test_create(self):
l = theanets.layers.LSTM(nin=2, nout=4)
assert l.reset() == 124
| Update layers test for RNN change. | Update layers test for RNN change.
| Python | mit | devdoer/theanets,chrinide/theanets,lmjohns3/theanets | ---
+++
@@ -24,9 +24,9 @@
l = theanets.layers.Classifier(nin=2, nout=4)
assert l.reset() == 12
-class TestRecurrent:
+class TestRNN:
def test_create(self):
- l = theanets.layers.Recurrent(nin=2, nout=4)
+ l = theanets.layers.RNN(nin=2, nout=4)
assert l.reset() == 28
class TestMRNN: |
9f952d2b060b19500f9c056ced4092d5ddc9902f | Code/Checking_threshold.py | Code/Checking_threshold.py |
def checking_threshold(a, b, avg_heart_rate):
"""checking for Tachycardia or Bradycardia
:param a: int variable, lower bound bpm
:param b: int variable, upper bound bpm
:param avg_heart_rate: array, bpm
:return: The condition string
"""
# Checks if the the heart rate is lesser or greater than the threshold
if avg_heart_rate <= a:
output = "Bradycardia"
return output
elif avg_heart_rate >= b:
output = "Tachycardia"
return output
else:
output = "Normal Heart Rate"
return output
# a=int(input("Enter the Bradycardia Threshold"))
# b=int(input("Enter the Tachycardia Threshold"))
# avg_heart_rate=72
# Checking_Threshold(a,b,avg_heart_rate)
|
def checking_threshold(a, b, avg_heart_rate):
"""
checking for Tachycardia or Bradycardia
:param a: int variable, lower bound bpm
:param b: int variable, upper bound bpm
:param avg_heart_rate: array, bpm
:return: The condition string
"""
# Checks if the the heart rate is lesser or greater than the threshold
if avg_heart_rate <= a:
output = "Bradycardia"
return output
elif avg_heart_rate >= b:
output = "Tachycardia"
return output
else:
output = "Normal Heart Rate"
return output
# a=int(input("Enter the Bradycardia Threshold"))
# b=int(input("Enter the Tachycardia Threshold"))
# avg_heart_rate=72
# Checking_Threshold(a,b,avg_heart_rate)
| Update Checking Threshold with pep8 syntax | Update Checking Threshold with pep8 syntax | Python | mit | MounikaVanka/bme590hrm,MounikaVanka/bme590hrm | ---
+++
@@ -1,6 +1,7 @@
def checking_threshold(a, b, avg_heart_rate):
- """checking for Tachycardia or Bradycardia
+ """
+ checking for Tachycardia or Bradycardia
:param a: int variable, lower bound bpm
:param b: int variable, upper bound bpm
:param avg_heart_rate: array, bpm
@@ -24,5 +25,3 @@
# b=int(input("Enter the Tachycardia Threshold"))
# avg_heart_rate=72
# Checking_Threshold(a,b,avg_heart_rate)
-
- |
5568b4674c647c979e223837d905302fd59eb546 | HARK/ConsumptionSaving/tests/test_SmallOpenEconomy.py | HARK/ConsumptionSaving/tests/test_SmallOpenEconomy.py | import copy
from HARK import distributeParams
from HARK.ConsumptionSaving.ConsAggShockModel import AggShockConsumerType, SmallOpenEconomy, init_cobb_douglas
from HARK.distribution import Uniform
import numpy as np
import unittest
class testSmallOpenEconomy(unittest.TestCase):
def test_small_open(self):
agent = AggShockConsumerType()
agent.AgentCount = 100 # Very low number of agents for the sake of speed
agent.cycles = 0
# Make agents heterogeneous in their discount factor
agents = distributeParams(agent,
'DiscFac',
3,
Uniform(bot=.90, top=.94) # Impatient agents
)
# Make an economy with those agents living in it
small_economy = SmallOpenEconomy(
agents=agents,
Rfree = 0.2,
wRte = 0.2,
KtoLnow = 1,
**copy.copy(init_cobb_douglas)
)
small_economy.act_T = 400 # Short simulation history
small_economy.max_loops = 3 # Give up quickly for the sake of time
small_economy.makeAggShkHist() # Simulate a history of aggregate shocks
small_economy.verbose = False # Turn off printed messages
# Give data about the economy to all the agents in it
for this_type in small_economy.agents:
this_type.getEconomyData(small_economy)
small_economy.solve()
| import copy
from HARK import distributeParams
from HARK.ConsumptionSaving.ConsAggShockModel import AggShockConsumerType, SmallOpenEconomy, init_cobb_douglas
from HARK.distribution import Uniform
import numpy as np
import unittest
class testSmallOpenEconomy(unittest.TestCase):
def test_small_open(self):
agent = AggShockConsumerType()
agent.AgentCount = 100 # Very low number of agents for the sake of speed
agent.cycles = 0
# Make agents heterogeneous in their discount factor
agents = distributeParams(agent,
'DiscFac',
3,
Uniform(bot=.90, top=.94) # Impatient agents
)
# Make an economy with those agents living in it
small_economy = SmallOpenEconomy(
agents=agents,
Rfree = 1.03,
wRte = 1.0,
KtoLnow = 1.0,
**copy.copy(init_cobb_douglas)
)
small_economy.act_T = 400 # Short simulation history
small_economy.max_loops = 3 # Give up quickly for the sake of time
small_economy.makeAggShkHist() # Simulate a history of aggregate shocks
small_economy.verbose = False # Turn off printed messages
# Give data about the economy to all the agents in it
for this_type in small_economy.agents:
this_type.getEconomyData(small_economy)
small_economy.solve()
| Change Rfree and wRte in SmallOpenEconomy test | Change Rfree and wRte in SmallOpenEconomy test
Interest factor was set to 20%; changing it to a more reasonable 1.03 fixed the weird interaction with the new nan_bool functionality.
| Python | apache-2.0 | econ-ark/HARK,econ-ark/HARK | ---
+++
@@ -22,9 +22,9 @@
# Make an economy with those agents living in it
small_economy = SmallOpenEconomy(
agents=agents,
- Rfree = 0.2,
- wRte = 0.2,
- KtoLnow = 1,
+ Rfree = 1.03,
+ wRte = 1.0,
+ KtoLnow = 1.0,
**copy.copy(init_cobb_douglas)
)
|
27ee536137a98a317f2cfbb2010fa5fe31037e99 | txircd/modules/cmd_user.py | txircd/modules/cmd_user.py | from twisted.words.protocols import irc
from txircd.modbase import Command
class UserCommand(Command):
def onUse(self, user, params):
if user.registered == 0:
self.sendMessage(irc.ERR_ALREADYREGISTRED, ":Unauthorized command (already registered)")
return
if params and len(params) < 4:
user.sendMessage(irc.ERR_NEEDMOREPARAMS, "USER", ":Not enough parameters")
if not user.username:
user.registered -= 1
user.username = filter(lambda x: x in string.ascii_letters + string.digits + "-_", params[0])[:12]
if not user.username:
user.registered += 1
user.sendMessage(irc.ERR_NEEDMOREPARAMS, "USER", ":Your username is not valid")
return
user.realname = params[3]
if user.registered == 0:
user.register()
def Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn():
return {
"commands": {
"USER": UserCommand()
}
}
def cleanup():
del self.ircd.commands["USER"] | from twisted.words.protocols import irc
from txircd.modbase import Command
class UserCommand(Command):
def onUse(self, user, data):
if not user.username:
user.registered -= 1
user.username = data["ident"]
user.realname = data["gecos"]
if user.registered == 0:
user.register()
def processParams(self, user, params):
if user.registered == 0:
user.sendMessage(irc.ERR_ALREADYREGISTRED, ":Unauthorized command (already registered)")
return {}
if params and len(params) < 4:
user.sendMessage(irc.ERR_NEEDMOREPARAMS, "USER", ":Not enough parameters")
return {}
ident = filter(lambda x: x in string.ascii_letters + string.digits + "-_", params[0])[:12]
if not ident:
user.sendMessage(irc.ERR_NEEDMOREPARAMS, "USER", ":Your username is not valid")
return {}
return {
"user": user,
"ident": ident,
"gecos": params[3]
}
def Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn():
return {
"commands": {
"USER": UserCommand()
}
}
def cleanup():
del self.ircd.commands["USER"] | Update the USER command to take advantage of core capabilities as well | Update the USER command to take advantage of core capabilities as well
| Python | bsd-3-clause | DesertBus/txircd,Heufneutje/txircd,ElementalAlchemist/txircd | ---
+++
@@ -2,22 +2,30 @@
from txircd.modbase import Command
class UserCommand(Command):
- def onUse(self, user, params):
+ def onUse(self, user, data):
+ if not user.username:
+ user.registered -= 1
+ user.username = data["ident"]
+ user.realname = data["gecos"]
if user.registered == 0:
- self.sendMessage(irc.ERR_ALREADYREGISTRED, ":Unauthorized command (already registered)")
- return
+ user.register()
+
+ def processParams(self, user, params):
+ if user.registered == 0:
+ user.sendMessage(irc.ERR_ALREADYREGISTRED, ":Unauthorized command (already registered)")
+ return {}
if params and len(params) < 4:
user.sendMessage(irc.ERR_NEEDMOREPARAMS, "USER", ":Not enough parameters")
- if not user.username:
- user.registered -= 1
- user.username = filter(lambda x: x in string.ascii_letters + string.digits + "-_", params[0])[:12]
- if not user.username:
- user.registered += 1
+ return {}
+ ident = filter(lambda x: x in string.ascii_letters + string.digits + "-_", params[0])[:12]
+ if not ident:
user.sendMessage(irc.ERR_NEEDMOREPARAMS, "USER", ":Your username is not valid")
- return
- user.realname = params[3]
- if user.registered == 0:
- user.register()
+ return {}
+ return {
+ "user": user,
+ "ident": ident,
+ "gecos": params[3]
+ }
def Spawner(object):
def __init__(self, ircd): |
939319ddece1925c8c3152f4437b4848749b85b3 | config/fuzz_pox_mesh.py | config/fuzz_pox_mesh.py | from experiment_config_lib import ControllerConfig
from sts.topology import MeshTopology
from sts.control_flow import Fuzzer
from sts.input_traces.input_logger import InputLogger
from sts.invariant_checker import InvariantChecker
from sts.simulation_state import SimulationConfig
# Use POX as our controller
command_line = "./pox.py --verbose --no-cli openflow.of_01 --address=../sts_socket_pipe sts.syncproto.pox_syncer samples.topo forwarding.l2_multi messenger.messenger samples.nommessenger"
controllers = [ControllerConfig(command_line, address="sts_socket_pipe", cwd="pox", sync="tcp:localhost:18899")]
topology_class = MeshTopology
topology_params = "num_switches=2"
dataplane_trace = "dataplane_traces/ping_pong_same_subnet.trace"
simulation_config = SimulationConfig(controller_configs=controllers,
topology_class=topology_class,
topology_params=topology_params,
dataplane_trace=dataplane_trace)
control_flow = Fuzzer(simulation_config, check_interval=1, halt_on_violation=True,
input_logger=InputLogger(),
invariant_check=InvariantChecker.check_liveness)
| from experiment_config_lib import ControllerConfig
from sts.topology import MeshTopology
from sts.control_flow import Fuzzer
from sts.input_traces.input_logger import InputLogger
from sts.invariant_checker import InvariantChecker
from sts.simulation_state import SimulationConfig
# Use POX as our controller
command_line = ('''./pox.py --verbose --no-cli sts.syncproto.pox_syncer '''
'''samples.topo forwarding.l2_multi '''
'''sts.util.socket_mux.pox_monkeypatcher '''
'''openflow.of_01 --address=../sts_socket_pipe''')
controllers = [ControllerConfig(command_line, address="sts_socket_pipe", cwd="pox", sync="tcp:localhost:18899")]
topology_class = MeshTopology
topology_params = "num_switches=2"
dataplane_trace = "dataplane_traces/ping_pong_same_subnet.trace"
simulation_config = SimulationConfig(controller_configs=controllers,
topology_class=topology_class,
topology_params=topology_params,
dataplane_trace=dataplane_trace,
monkey_patch_select=True)
control_flow = Fuzzer(simulation_config, check_interval=1, halt_on_violation=True,
input_logger=InputLogger(),
invariant_check=InvariantChecker.check_liveness)
| Add a config that exercises the Multiplexed socketS | Add a config that exercises the Multiplexed socketS
| Python | apache-2.0 | ucb-sts/sts,ucb-sts/sts,jmiserez/sts,jmiserez/sts | ---
+++
@@ -6,7 +6,10 @@
from sts.simulation_state import SimulationConfig
# Use POX as our controller
-command_line = "./pox.py --verbose --no-cli openflow.of_01 --address=../sts_socket_pipe sts.syncproto.pox_syncer samples.topo forwarding.l2_multi messenger.messenger samples.nommessenger"
+command_line = ('''./pox.py --verbose --no-cli sts.syncproto.pox_syncer '''
+ '''samples.topo forwarding.l2_multi '''
+ '''sts.util.socket_mux.pox_monkeypatcher '''
+ '''openflow.of_01 --address=../sts_socket_pipe''')
controllers = [ControllerConfig(command_line, address="sts_socket_pipe", cwd="pox", sync="tcp:localhost:18899")]
topology_class = MeshTopology
@@ -16,7 +19,8 @@
simulation_config = SimulationConfig(controller_configs=controllers,
topology_class=topology_class,
topology_params=topology_params,
- dataplane_trace=dataplane_trace)
+ dataplane_trace=dataplane_trace,
+ monkey_patch_select=True)
control_flow = Fuzzer(simulation_config, check_interval=1, halt_on_violation=True,
input_logger=InputLogger(), |
7ed304238a3c30c9dfa9e2dc03c53ec068d78a80 | pipenv/environments.py | pipenv/environments.py | import os
# Prevent invalid shebangs with Homebrew-installed Python:
# https://bugs.python.org/issue22490
os.environ.pop('__PYVENV_LAUNCHER__', None)
# Shell compatibility mode, for mis-configured shells.
PIPENV_SHELL_COMPAT = os.environ.get('PIPENV_SHELL_COMPAT')
# Create the virtualenv in the project, isntead of with pew.
PIPENV_VENV_IN_PROJECT = os.environ.get('PIPENV_VENV_IN_PROJECT')
# No color mode, for unfun people.
PIPENV_COLORBLIND = os.environ.get('PIPENV_COLORBLIND')
# Disable spinner for better test and deploy logs (for the unworthy).
PIPENV_NOSPIN = os.environ.get('PIPENV_NOSPIN')
# User-configuraable max-depth for Pipfile searching.
# Note: +1 because of a temporary bug in Pipenv.
PIPENV_MAX_DEPTH = int(os.environ.get('PIPENV_MAX_DEPTH', 3)) + 1
# Use shell compatibility mode when using venv in project mode.
if PIPENV_VENV_IN_PROJECT:
PIPENV_SHELL_COMPAT = True
| import os
# Prevent invalid shebangs with Homebrew-installed Python:
# https://bugs.python.org/issue22490
os.environ.pop('__PYVENV_LAUNCHER__', None)
# Shell compatibility mode, for mis-configured shells.
PIPENV_SHELL_COMPAT = os.environ.get('PIPENV_SHELL_COMPAT')
# Create the virtualenv in the project, isntead of with pew.
PIPENV_VENV_IN_PROJECT = os.environ.get('PIPENV_VENV_IN_PROJECT')
# No color mode, for unfun people.
PIPENV_COLORBLIND = os.environ.get('PIPENV_COLORBLIND')
# Disable spinner for better test and deploy logs (for the unworthy).
PIPENV_NOSPIN = os.environ.get('PIPENV_NOSPIN')
# User-configuraable max-depth for Pipfile searching.
# Note: +1 because of a temporary bug in Pipenv.
PIPENV_MAX_DEPTH = int(os.environ.get('PIPENV_MAX_DEPTH', '3')) + 1
# Use shell compatibility mode when using venv in project mode.
if PIPENV_VENV_IN_PROJECT:
PIPENV_SHELL_COMPAT = True
| Use string as a default value for PIPENV_MAX_DEPTH | Use string as a default value for PIPENV_MAX_DEPTH
| Python | mit | adrianliaw/pipenv,nateprewitt/pipenv,kennethreitz/pipenv | ---
+++
@@ -20,7 +20,7 @@
# User-configuraable max-depth for Pipfile searching.
# Note: +1 because of a temporary bug in Pipenv.
-PIPENV_MAX_DEPTH = int(os.environ.get('PIPENV_MAX_DEPTH', 3)) + 1
+PIPENV_MAX_DEPTH = int(os.environ.get('PIPENV_MAX_DEPTH', '3')) + 1
# Use shell compatibility mode when using venv in project mode.
if PIPENV_VENV_IN_PROJECT: |
dc05182f04dcebf61d368fe9f834b37d75b59bfd | Lib/fontmake/errors.py | Lib/fontmake/errors.py | class FontmakeError(Exception):
"""Base class for all fontmake exceptions."""
pass
class TTFAError(FontmakeError):
def __init__(self, exitcode):
self.exitcode = exitcode
def __str__(self):
return "ttfautohint command failed: error " + str(self.exitcode)
| import os
class FontmakeError(Exception):
"""Base class for all fontmake exceptions.
This exception is intended to be chained to the original exception. The
main purpose is to provide a source file trail that points to where the
explosion came from.
"""
def __init__(self, msg, source_file):
self.msg = msg
self.source_trail = [source_file]
def __str__(self):
trail = " -> ".join(
f"'{str(os.path.relpath(s))}'"
for s in reversed(self.source_trail)
if s is not None
)
cause = str(self.__cause__) if self.__cause__ is not None else None
message = ""
if trail:
message = f"In {trail}: "
message += f"{self.msg}"
if cause:
message += f": {cause}"
return message
class TTFAError(FontmakeError):
def __init__(self, exitcode, source_file):
self.exitcode = exitcode
self.source_trail = source_file
def __str__(self):
return (
f"ttfautohint failed for '{str(os.path.relpath(self.source_trail))}': "
f"error code {str(self.exitcode)}."
)
| Add source trail logic to FontmakeError and partly TTFAError | Add source trail logic to FontmakeError and partly TTFAError
| Python | apache-2.0 | googlei18n/fontmake,googlei18n/fontmake,googlefonts/fontmake,googlefonts/fontmake | ---
+++
@@ -1,12 +1,43 @@
+import os
+
+
class FontmakeError(Exception):
- """Base class for all fontmake exceptions."""
+ """Base class for all fontmake exceptions.
- pass
+ This exception is intended to be chained to the original exception. The
+ main purpose is to provide a source file trail that points to where the
+ explosion came from.
+ """
+
+ def __init__(self, msg, source_file):
+ self.msg = msg
+ self.source_trail = [source_file]
+
+ def __str__(self):
+ trail = " -> ".join(
+ f"'{str(os.path.relpath(s))}'"
+ for s in reversed(self.source_trail)
+ if s is not None
+ )
+ cause = str(self.__cause__) if self.__cause__ is not None else None
+
+ message = ""
+ if trail:
+ message = f"In {trail}: "
+ message += f"{self.msg}"
+ if cause:
+ message += f": {cause}"
+
+ return message
class TTFAError(FontmakeError):
- def __init__(self, exitcode):
+ def __init__(self, exitcode, source_file):
self.exitcode = exitcode
+ self.source_trail = source_file
def __str__(self):
- return "ttfautohint command failed: error " + str(self.exitcode)
+ return (
+ f"ttfautohint failed for '{str(os.path.relpath(self.source_trail))}': "
+ f"error code {str(self.exitcode)}."
+ ) |
232db259f2c202e60692563ec05b456b5158449e | django_replicated/router.py | django_replicated/router.py | # -*- coding:utf-8 -*-
import random
from django.db.utils import DEFAULT_DB_ALIAS
from django.conf import settings
class ReplicationRouter(object):
def __init__(self):
self.state_stack = ['master']
self._state_change_enabled = True
def set_state_change(self, enabled):
self._state_change_enabled = enabled
def state(self):
'''
Current state of routing: 'master' or 'slave'.
'''
return self.state_stack[-1]
def use_state(self, state):
'''
Switches router into a new state. Requires a paired call
to 'revert' for reverting to previous state.
'''
if not self._state_change_enabled:
state = self.state()
self.state_stack.append(state)
return self
def revert(self):
'''
Reverts wrapper state to a previous value after calling
'use_state'.
'''
self.state_stack.pop()
def db_for_write(self, model, **hints):
return DEFAULT_DB_ALIAS
def db_for_read(self, model, **hints):
if self.state() == 'master':
return self.db_for_write(model, **hints)
slaves = getattr(settings, 'DATABASE_SLAVES', [DEFAULT_DB_ALIAS])
return random.choice(slaves)
| # -*- coding:utf-8 -*-
import random
from django.db import connections
from django.db.utils import DEFAULT_DB_ALIAS
from django.conf import settings
def is_alive(db):
try:
if db.connection is not None and hasattr(db.connection, 'ping'):
db.connection.ping()
else:
db.cursor()
return True
except StandardError:
return False
class ReplicationRouter(object):
def __init__(self):
self.state_stack = ['master']
self._state_change_enabled = True
def set_state_change(self, enabled):
self._state_change_enabled = enabled
def state(self):
'''
Current state of routing: 'master' or 'slave'.
'''
return self.state_stack[-1]
def use_state(self, state):
'''
Switches router into a new state. Requires a paired call
to 'revert' for reverting to previous state.
'''
if not self._state_change_enabled:
state = self.state()
self.state_stack.append(state)
return self
def revert(self):
'''
Reverts wrapper state to a previous value after calling
'use_state'.
'''
self.state_stack.pop()
def db_for_write(self, model, **hints):
return DEFAULT_DB_ALIAS
def db_for_read(self, model, **hints):
if self.state() == 'master':
return self.db_for_write(model, **hints)
slaves = getattr(settings, 'DATABASE_SLAVES', [DEFAULT_DB_ALIAS])
random.shuffle(slaves)
for slave in slaves:
if is_alive(connections[slave]):
return slave
else:
return DEFAULT_DB_ALIAS
| Check if slaves are alive and fallback to other slaves and eventually to master. | Check if slaves are alive and fallback to other slaves and eventually to master.
| Python | bsd-3-clause | dmirain/django_replicated,Zunonia/django_replicated,lavr/django_replicated | ---
+++
@@ -1,8 +1,20 @@
# -*- coding:utf-8 -*-
import random
+from django.db import connections
from django.db.utils import DEFAULT_DB_ALIAS
from django.conf import settings
+
+
+def is_alive(db):
+ try:
+ if db.connection is not None and hasattr(db.connection, 'ping'):
+ db.connection.ping()
+ else:
+ db.cursor()
+ return True
+ except StandardError:
+ return False
class ReplicationRouter(object):
@@ -43,4 +55,9 @@
if self.state() == 'master':
return self.db_for_write(model, **hints)
slaves = getattr(settings, 'DATABASE_SLAVES', [DEFAULT_DB_ALIAS])
- return random.choice(slaves)
+ random.shuffle(slaves)
+ for slave in slaves:
+ if is_alive(connections[slave]):
+ return slave
+ else:
+ return DEFAULT_DB_ALIAS |
ae9392137c66832e2e4fa0a51938aad2e6fdb8a4 | django_q/__init__.py | django_q/__init__.py | import os
import sys
import django
myPath = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, myPath)
VERSION = (0, 9, 2)
default_app_config = 'django_q.apps.DjangoQConfig'
# root imports will slowly be deprecated.
# please import from the relevant sub modules
if django.VERSION[:2] < (1, 9):
from .tasks import async, schedule, result, result_group, fetch, fetch_group, count_group, delete_group, queue_size
from .models import Task, Schedule, Success, Failure
from .cluster import Cluster
from .status import Stat
from .brokers import get_broker
__all__ = ['conf', 'cluster', 'models', 'tasks']
| # import os
# import sys
import django
# myPath = os.path.dirname(os.path.abspath(__file__))
# sys.path.insert(0, myPath)
VERSION = (0, 9, 2)
default_app_config = 'django_q.apps.DjangoQConfig'
# root imports will slowly be deprecated.
# please import from the relevant sub modules
if django.VERSION[:2] < (1, 9):
from .tasks import async, schedule, result, result_group, fetch, fetch_group, count_group, delete_group, queue_size
from .models import Task, Schedule, Success, Failure
from .cluster import Cluster
from .status import Stat
from .brokers import get_broker
__all__ = ['conf', 'cluster', 'models', 'tasks']
| Change path location of django q | Change path location of django q
| Python | mit | Koed00/django-q | ---
+++
@@ -1,9 +1,9 @@
-import os
-import sys
+# import os
+# import sys
import django
-myPath = os.path.dirname(os.path.abspath(__file__))
-sys.path.insert(0, myPath)
+# myPath = os.path.dirname(os.path.abspath(__file__))
+# sys.path.insert(0, myPath)
VERSION = (0, 9, 2)
|
c13dbbc35faf567cb7a10ccacb1fcd070c8773c1 | llvmlite/binding/common.py | llvmlite/binding/common.py | import atexit
def _encode_string(s):
encoded = s.encode('latin1')
return encoded
def _decode_string(b):
return b.decode('latin1')
_encode_string.__doc__ = """Encode a string for use by LLVM."""
_decode_string.__doc__ = """Decode a LLVM character (byte)string."""
_shutting_down = [False]
def _at_shutdown():
_shutting_down[0] = True
atexit.register(_at_shutdown)
def _is_shutting_down(_shutting_down=_shutting_down):
"""
Whether the interpreter is currently shutting down.
For use in finalizers, __del__ methods, and similar; it is advised
to early bind this function rather than look it up when calling it,
since at shutdown module globals may be cleared.
"""
return _shutting_down[0]
| import atexit
def _encode_string(s):
encoded = s.encode('utf-8')
return encoded
def _decode_string(b):
return b.decode('utf-8')
_encode_string.__doc__ = """Encode a string for use by LLVM."""
_decode_string.__doc__ = """Decode a LLVM character (byte)string."""
_shutting_down = [False]
def _at_shutdown():
_shutting_down[0] = True
atexit.register(_at_shutdown)
def _is_shutting_down(_shutting_down=_shutting_down):
"""
Whether the interpreter is currently shutting down.
For use in finalizers, __del__ methods, and similar; it is advised
to early bind this function rather than look it up when calling it,
since at shutdown module globals may be cleared.
"""
return _shutting_down[0]
| Switch encoding to UTF-8 from latin1 | Switch encoding to UTF-8 from latin1
This change was originally made in PR #53, but may no longer be required
(and may cause issues with comments in IR that use non-latin1
characters).
| Python | bsd-2-clause | numba/llvmlite,numba/llvmlite,numba/llvmlite,numba/llvmlite | ---
+++
@@ -2,12 +2,12 @@
def _encode_string(s):
- encoded = s.encode('latin1')
+ encoded = s.encode('utf-8')
return encoded
def _decode_string(b):
- return b.decode('latin1')
+ return b.decode('utf-8')
_encode_string.__doc__ = """Encode a string for use by LLVM.""" |
d6bc297b71c9cb2bce45bdcd20f99f9fe642cf01 | plotting.py | plotting.py | #!/usr/bin/env python
""" Set of helper function and variables for plotting.
This module provides a set of functions and variables that will be useful for
plotting.
"""
class ColorMarker:
def __init__(self):
# A list of colors
self._colors = ['k', 'b', 'g', 'c', 'm', 'y']
# A list of markers
self._markers = ['o', 's', '^', 'D', 'd', 'h', 'x', '*', '+', 'v', '<', '>', '1', '2', '3', '4', '8', 'p', 'H']
def get_colors(self):
""" Get a set of color/marker combinations.
:rtype: list of tuple
:returns: A list of tuples containing color|marker pairs. There are a total
of 114 combinations. Red and white are not used in this color scheme.
Red is reserved for coloring points beyond a threshold, and white does not
show up on white backgrounds.
"""
comb = list()
for i in self._markers:
for j in self._colors:
comb.append((j, i))
return comb
| #!/usr/bin/env python
""" Set of helper function and variables for plotting.
This module provides a set of functions and variables that will be useful for
plotting.
"""
class ColorMarker:
def __init__(self):
# A list of colors
self._colors = ['k', 'b', 'g', 'c', 'm', 'y']
# A list of markers
self._markers = ['o', 's', '^', 'D', 'd', 'h', 'x', '*', '+', 'v', '<', '>', '1', '2', '3', '4', '8', 'p', 'H']
def get_colors(self):
""" Get a set of color/marker combinations.
:rtype: list of tuple
:returns: A list of tuples containing color|marker pairs. There are a total
of 114 combinations. Red and white are not used in this color scheme.
Red is reserved for coloring points beyond a threshold, and white does not
show up on white backgrounds.
"""
comb = list()
for marker in self._markers:
for color in self._colors:
comb.append((color, marker))
return comb
| Fix naming to folliwng naming conventions for mclab | Fix naming to folliwng naming conventions for mclab
| Python | mit | secimTools/SECIMTools,secimTools/SECIMTools,secimTools/SECIMTools | ---
+++
@@ -25,8 +25,8 @@
"""
comb = list()
- for i in self._markers:
- for j in self._colors:
- comb.append((j, i))
+ for marker in self._markers:
+ for color in self._colors:
+ comb.append((color, marker))
return comb |
a670b598f4416b0e99acd7442e5a51295a5daaa3 | tests/test_utils.py | tests/test_utils.py | import os
import time
import unittest
from helpers.utils import sigchld_handler, sigterm_handler, sleep
def nop(*args, **kwargs):
pass
def os_waitpid(a, b):
return (0, 0)
def time_sleep(_):
sigchld_handler(None, None)
class TestUtils(unittest.TestCase):
def __init__(self, method_name='runTest'):
self.setUp = self.set_up
self.tearDown = self.tear_down
super(TestUtils, self).__init__(method_name)
def set_up(self):
self.time_sleep = time.sleep
time.sleep = nop
def tear_down(self):
time.sleep = self.time_sleep
def test_sigterm_handler(self):
self.assertRaises(SystemExit, sigterm_handler, None, None)
def test_sigchld_handler(self):
sigchld_handler(None, None)
os.waitpid = os_waitpid
sigchld_handler(None, None)
def test_sleep(self):
time.sleep = time_sleep
sleep(0.01)
| import os
import time
import unittest
from helpers.utils import reap_children, sigchld_handler, sigterm_handler, sleep
def nop(*args, **kwargs):
pass
def os_waitpid(a, b):
return (0, 0)
def time_sleep(_):
sigchld_handler(None, None)
class TestUtils(unittest.TestCase):
def __init__(self, method_name='runTest'):
self.setUp = self.set_up
self.tearDown = self.tear_down
super(TestUtils, self).__init__(method_name)
def set_up(self):
self.time_sleep = time.sleep
time.sleep = nop
def tear_down(self):
time.sleep = self.time_sleep
def test_sigterm_handler(self):
self.assertRaises(SystemExit, sigterm_handler, None, None)
def test_reap_children(self):
reap_children()
os.waitpid = os_waitpid
sigchld_handler(None, None)
reap_children()
def test_sleep(self):
time.sleep = time_sleep
sleep(0.01)
| Implement unit test for reap_children function | Implement unit test for reap_children function
| Python | mit | jinty/patroni,sean-/patroni,jinty/patroni,pgexperts/patroni,sean-/patroni,zalando/patroni,pgexperts/patroni,zalando/patroni | ---
+++
@@ -2,7 +2,7 @@
import time
import unittest
-from helpers.utils import sigchld_handler, sigterm_handler, sleep
+from helpers.utils import reap_children, sigchld_handler, sigterm_handler, sleep
def nop(*args, **kwargs):
@@ -34,10 +34,11 @@
def test_sigterm_handler(self):
self.assertRaises(SystemExit, sigterm_handler, None, None)
- def test_sigchld_handler(self):
- sigchld_handler(None, None)
+ def test_reap_children(self):
+ reap_children()
os.waitpid = os_waitpid
sigchld_handler(None, None)
+ reap_children()
def test_sleep(self):
time.sleep = time_sleep |
d0901a36de4d7ef71bf615131f48e6333d93c2b0 | tests/project/settings.py | tests/project/settings.py | from os.path import dirname, join, abspath
BASE_DIR = dirname(abspath(__file__))
INSTALLED_APPS = [
'django.contrib.staticfiles',
'markitup',
]
TEMPLATE_DIRS = [join(BASE_DIR, 'templates')]
ROOT_URLCONF = 'tests.project.urls'
MARKITUP_FILTER = ('markdown.markdown', {'safe_mode': True})
MARKITUP_SET = 'markitup/sets/markdown/' # Default includes trailing slash so that others know it's a directory
DEBUG = True
STATICFILES_DIRS = [join(BASE_DIR, 'static')]
STATIC_URL = '/static/'
SECRET_KEY = 'secret'
ALLOWED_HOSTS = ['localhost']
| from os.path import dirname, join, abspath
BASE_DIR = dirname(abspath(__file__))
INSTALLED_APPS = [
'django.contrib.staticfiles',
'markitup',
]
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
join(BASE_DIR, 'templates'),
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.contrib.auth.context_processors.auth',
'django.template.context_processors.debug',
'django.template.context_processors.i18n',
'django.template.context_processors.media',
'django.template.context_processors.static',
'django.template.context_processors.tz',
'django.contrib.messages.context_processors.messages',
],
},
},
]
ROOT_URLCONF = 'tests.project.urls'
MARKITUP_FILTER = ('markdown.markdown', {'safe_mode': True})
MARKITUP_SET = 'markitup/sets/markdown/' # Default includes trailing slash so that others know it's a directory
DEBUG = True
STATICFILES_DIRS = [join(BASE_DIR, 'static')]
STATIC_URL = '/static/'
SECRET_KEY = 'secret'
ALLOWED_HOSTS = ['localhost']
| Use TEMPLATES setting in tests. | Use TEMPLATES setting in tests.
| Python | bsd-3-clause | zsiciarz/django-markitup,carljm/django-markitup,zsiciarz/django-markitup,carljm/django-markitup,carljm/django-markitup,zsiciarz/django-markitup | ---
+++
@@ -7,7 +7,26 @@
'markitup',
]
-TEMPLATE_DIRS = [join(BASE_DIR, 'templates')]
+TEMPLATES = [
+ {
+ 'BACKEND': 'django.template.backends.django.DjangoTemplates',
+ 'DIRS': [
+ join(BASE_DIR, 'templates'),
+ ],
+ 'APP_DIRS': True,
+ 'OPTIONS': {
+ 'context_processors': [
+ 'django.contrib.auth.context_processors.auth',
+ 'django.template.context_processors.debug',
+ 'django.template.context_processors.i18n',
+ 'django.template.context_processors.media',
+ 'django.template.context_processors.static',
+ 'django.template.context_processors.tz',
+ 'django.contrib.messages.context_processors.messages',
+ ],
+ },
+ },
+]
ROOT_URLCONF = 'tests.project.urls'
|
778632bc28d39bc697cae445f8ed4c33689f8d82 | rest/messages/generate-twiml-sms-voice/example-1.py | rest/messages/generate-twiml-sms-voice/example-1.py | from flask import Flask, request, redirect
import twilio.twiml
app = Flask(__name__)
@app.route("/", methods=['GET', 'POST'])
def hello_monkey():
"""Respond to incoming calls with a simple text message."""
resp = twilio.twiml.Response()
resp.say("Hello! You will get an SMS message soon.")
resp.sms("This is the ship that made the Kessel Run in fourteen parsecs?")
return str(resp)
if __name__ == "__main__":
app.run(debug=True) | from flask import Flask, request
from twilio import twiml
app = Flask(__name__)
@app.route("/voice", methods=['GET', 'POST'])
def voice():
"""Respond to incoming phone calls with a text message."""
# Start our TwiML response
resp = twiml.Response()
# Read a message aloud to the caller
resp.say("Hello! You will get an SMS message soon.")
# Also tell Twilio to send a text message to the caller
resp.sms("This is the ship that made the Kessel Run in fourteen parsecs?")
return str(resp)
if __name__ == "__main__":
app.run(debug=True)
| Clean up Send SMS and MMS Python example | Clean up Send SMS and MMS Python example
| Python | mit | TwilioDevEd/api-snippets,teoreteetik/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,teoreteetik/api-snippets,teoreteetik/api-snippets,teoreteetik/api-snippets,teoreteetik/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,teoreteetik/api-snippets,teoreteetik/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,teoreteetik/api-snippets,teoreteetik/api-snippets,TwilioDevEd/api-snippets | ---
+++
@@ -1,15 +1,21 @@
-from flask import Flask, request, redirect
-import twilio.twiml
+from flask import Flask, request
+from twilio import twiml
+
app = Flask(__name__)
-@app.route("/", methods=['GET', 'POST'])
-def hello_monkey():
- """Respond to incoming calls with a simple text message."""
+@app.route("/voice", methods=['GET', 'POST'])
+def voice():
+ """Respond to incoming phone calls with a text message."""
+ # Start our TwiML response
+ resp = twiml.Response()
- resp = twilio.twiml.Response()
+ # Read a message aloud to the caller
resp.say("Hello! You will get an SMS message soon.")
+
+ # Also tell Twilio to send a text message to the caller
resp.sms("This is the ship that made the Kessel Run in fourteen parsecs?")
+
return str(resp)
if __name__ == "__main__": |
1065f63e29c9b31f55ed1986c409fc85f1aa26e3 | linter.py | linter.py | #
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Aparajita Fishman
# Copyright (c) 2013 Aparajita Fishman
#
# Project: https://github.com/SublimeLinter/SublimeLinter-contrib-json
# License: MIT
#
"""This module exports the JSON plugin linter class."""
import json
from SublimeLinter.lint import Linter
class JSON(Linter):
"""Provides an interface to json.loads()."""
language = 'json'
cmd = None
regex = r'^(?P<message>.+):\s*line (?P<line>\d+) column (?P<col>\d+)'
def run(self, cmd, code):
"""Attempt to parse code as JSON, return '' if it succeeds, the error message if it fails."""
try:
json.loads(code)
return ''
except ValueError as err:
return str(err)
| #
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Aparajita Fishman
# Copyright (c) 2013 Aparajita Fishman
#
# Project: https://github.com/SublimeLinter/SublimeLinter-contrib-json
# License: MIT
#
"""This module exports the JSON plugin linter class."""
import json
from SublimeLinter.lint import Linter
class JSON(Linter):
"""Provides an interface to json.loads()."""
syntax = 'json'
cmd = None
regex = r'^(?P<message>.+):\s*line (?P<line>\d+) column (?P<col>\d+)'
def run(self, cmd, code):
"""Attempt to parse code as JSON, return '' if it succeeds, the error message if it fails."""
try:
json.loads(code)
return ''
except ValueError as err:
return str(err)
| Change 'language' to 'syntax', that is more precise terminology. | Change 'language' to 'syntax', that is more precise terminology.
| Python | mit | SublimeLinter/SublimeLinter-json | ---
+++
@@ -20,7 +20,7 @@
"""Provides an interface to json.loads()."""
- language = 'json'
+ syntax = 'json'
cmd = None
regex = r'^(?P<message>.+):\s*line (?P<line>\d+) column (?P<col>\d+)'
|
8c19b6dafa599dc284bb8ef740aa0426d9246dc6 | linter.py | linter.py | #
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Bruno JJE
# Copyright (c) 2015 Bruno JJE
#
# License: MIT
#
"""This module exports the Ghdl plugin class."""
from SublimeLinter.lint import Linter
class Ghdl(Linter):
"""Provides an interface to ghdl."""
syntax = 'vhdl'
cmd = 'ghdl -a @'
version_re = r'GHDL (?P<version>\d+\.\d+)'
version_requirement = '>= 0.31'
tempfile_suffix = '-'
# Here is a sample ghdl error output:
# ----8<------------
# filtre8.vhd:35:3: object class keyword such as 'variable' is expected
# ----8<------------
regex = (
r"^(?P<path>.*)(?P<error>:)(?P<line>[0-9]+):(?P<col>[0-9]+)"
r": (?P<message>.*)"
)
def split_match(self, match):
"""
Extract and return values from match.
We override this method to prefix the error message with the
linter name.
"""
match, line, col, error, warning, message, near = super().split_match(match)
# Not sure the filename check is required, but we do it
# anyway just in case...
if match and match.group('path') != self.filename:
match = None
if match:
message = '[ghdl] ' + message
return match, line, col, error, warning, message, near
| #
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Bruno JJE
# Copyright (c) 2015 Bruno JJE
#
# License: MIT
#
"""This module exports the Ghdl plugin class."""
from SublimeLinter.lint import Linter
class Ghdl(Linter):
"""Provides an interface to ghdl."""
syntax = 'vhdl'
cmd = 'ghdl -a @'
version_re = r'GHDL (?P<version>\d+\.\d+)'
version_requirement = '>= 0.31'
tempfile_suffix = 'vhd'
# Here is a sample ghdl error output:
# ----8<------------
# filtre8.vhd:35:3: object class keyword such as 'variable' is expected
# ----8<------------
regex = (
r"^(?P<path>.*)(?P<error>:)(?P<line>[0-9]+):(?P<col>[0-9]+)"
r": (?P<message>.*)"
)
def split_match(self, match):
"""
Extract and return values from match.
We override this method to prefix the error message with the
linter name.
"""
match, line, col, error, warning, message, near = super().split_match(match)
if match:
message = '[ghdl] ' + message
return match, line, col, error, warning, message, near
| Change 'tempfile_suffix' and remove filename check. | Change 'tempfile_suffix' and remove filename check.
| Python | mit | BrunoJJE/SublimeLinter-contrib-ghdl | ---
+++
@@ -21,7 +21,7 @@
cmd = 'ghdl -a @'
version_re = r'GHDL (?P<version>\d+\.\d+)'
version_requirement = '>= 0.31'
- tempfile_suffix = '-'
+ tempfile_suffix = 'vhd'
# Here is a sample ghdl error output:
# ----8<------------
@@ -44,11 +44,6 @@
match, line, col, error, warning, message, near = super().split_match(match)
- # Not sure the filename check is required, but we do it
- # anyway just in case...
- if match and match.group('path') != self.filename:
- match = None
-
if match:
message = '[ghdl] ' + message
|
3738df68d89e8eb0743378ecb89659e44cbb999d | troposphere/qldb.py | troposphere/qldb.py | # Copyright (c) 2012-2019, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
#
# *** Do not modify - this file is autogenerated ***
# Resource specification version: 6.1.0
from . import AWSObject
from troposphere import Tags
from .validators import boolean
class Ledger(AWSObject):
resource_type = "AWS::QLDB::Ledger"
props = {
'DeletionProtection': (boolean, False),
'Name': (basestring, False),
'PermissionsMode': (basestring, True),
'Tags': (Tags, False),
}
| # Copyright (c) 2012-2019, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
#
# *** Do not modify - this file is autogenerated ***
# Resource specification version: 16.1.0
from . import AWSObject
from . import AWSProperty
from troposphere import Tags
from .validators import boolean
class Ledger(AWSObject):
resource_type = "AWS::QLDB::Ledger"
props = {
'DeletionProtection': (boolean, False),
'Name': (basestring, False),
'PermissionsMode': (basestring, True),
'Tags': (Tags, False),
}
class KinesisConfiguration(AWSProperty):
props = {
'AggregationEnabled': (boolean, False),
'StreamArn': (basestring, False),
}
class Stream(AWSObject):
resource_type = "AWS::QLDB::Stream"
props = {
'ExclusiveEndTime': (basestring, False),
'InclusiveStartTime': (basestring, True),
'KinesisConfiguration': (KinesisConfiguration, True),
'LedgerName': (basestring, True),
'RoleArn': (basestring, True),
'StreamName': (basestring, True),
'Tags': (Tags, False),
}
| Add AWS::QLDB::Stream per 2020-07-08 update | Add AWS::QLDB::Stream per 2020-07-08 update
| Python | bsd-2-clause | cloudtools/troposphere,cloudtools/troposphere | ---
+++
@@ -4,10 +4,11 @@
# See LICENSE file for full license.
#
# *** Do not modify - this file is autogenerated ***
-# Resource specification version: 6.1.0
+# Resource specification version: 16.1.0
from . import AWSObject
+from . import AWSProperty
from troposphere import Tags
from .validators import boolean
@@ -21,3 +22,24 @@
'PermissionsMode': (basestring, True),
'Tags': (Tags, False),
}
+
+
+class KinesisConfiguration(AWSProperty):
+ props = {
+ 'AggregationEnabled': (boolean, False),
+ 'StreamArn': (basestring, False),
+ }
+
+
+class Stream(AWSObject):
+ resource_type = "AWS::QLDB::Stream"
+
+ props = {
+ 'ExclusiveEndTime': (basestring, False),
+ 'InclusiveStartTime': (basestring, True),
+ 'KinesisConfiguration': (KinesisConfiguration, True),
+ 'LedgerName': (basestring, True),
+ 'RoleArn': (basestring, True),
+ 'StreamName': (basestring, True),
+ 'Tags': (Tags, False),
+ } |
96439cb26a09158f112541025a6c2901b983eae9 | tests/test_pay_onetime.py | tests/test_pay_onetime.py | # -*- coding: utf-8 -*-
def test_pay_onetime(iamport):
# Without 'card_number'
payload_notEnough = {
'merchant_uid': 'qwer1234',
'amount': 5000,
'expiry': '2019-03',
'birth': '500203',
'pwd_2digit': '19'
}
try:
iamport.pay_onetime(**payload_notEnough)
except KeyError as e:
assert "Essential parameter is missing!: card_number" in str(e)
payload_full = {
'merchant_uid': 'qwer1234',
'amount': 5000,
'card_number': '4092-0230-1234-1234',
'expiry': '2019-03',
'birth': '500203',
'pwd_2digit': '19'
}
try:
iamport.pay_onetime(**payload_full)
except iamport.ResponseError as e:
assert e.code == -1
assert u'์นด๋์ ๋ณด ์ธ์ฆ์ ์คํจํ์์ต๋๋ค.' in e.message
| # -*- coding: utf-8 -*-
import string, random
def test_pay_onetime(iamport):
merchant_uid = ''.join(
random.choice(string.ascii_uppercase + string.digits)
for _ in range(10)
)
# Without 'card_number'
payload_not_enough = {
'merchant_uid': merchant_uid,
'amount': 5000,
'expiry': '2019-03',
'birth': '500203',
'pwd_2digit': '19'
}
try:
iamport.pay_onetime(**payload_not_enough)
except KeyError as e:
assert "Essential parameter is missing!: card_number" in str(e)
merchant_uid = ''.join(
random.choice(string.ascii_uppercase + string.digits)
for _ in range(10)
)
payload_full = {
'merchant_uid': merchant_uid,
'amount': 5000,
'card_number': '4092-0230-1234-1234',
'expiry': '2019-03',
'birth': '500203',
'pwd_2digit': '19'
}
try:
iamport.pay_onetime(**payload_full)
except iamport.ResponseError as e:
assert e.code == -1
assert u'์นด๋์ ๋ณด ์ธ์ฆ์ ์คํจํ์์ต๋๋ค.' in e.message
| Add random merchant_uid for continous testing | Add random merchant_uid for continous testing
| Python | mit | iamport/iamport-rest-client-python | ---
+++
@@ -1,10 +1,16 @@
# -*- coding: utf-8 -*-
+import string, random
def test_pay_onetime(iamport):
+ merchant_uid = ''.join(
+ random.choice(string.ascii_uppercase + string.digits)
+ for _ in range(10)
+ )
+
# Without 'card_number'
- payload_notEnough = {
- 'merchant_uid': 'qwer1234',
+ payload_not_enough = {
+ 'merchant_uid': merchant_uid,
'amount': 5000,
'expiry': '2019-03',
'birth': '500203',
@@ -12,12 +18,17 @@
}
try:
- iamport.pay_onetime(**payload_notEnough)
+ iamport.pay_onetime(**payload_not_enough)
except KeyError as e:
assert "Essential parameter is missing!: card_number" in str(e)
+ merchant_uid = ''.join(
+ random.choice(string.ascii_uppercase + string.digits)
+ for _ in range(10)
+ )
+
payload_full = {
- 'merchant_uid': 'qwer1234',
+ 'merchant_uid': merchant_uid,
'amount': 5000,
'card_number': '4092-0230-1234-1234',
'expiry': '2019-03', |
55ff308a538b80796b10d12d9acd1f1b84010d17 | bluebottle/common/management/commands/makemessages.py | bluebottle/common/management/commands/makemessages.py | import json
import tempfile
from django.core.management.commands.makemessages import Command as BaseCommand
from bluebottle.clients.utils import get_currencies
class Command(BaseCommand):
""" Extend the makemessages to include some of the fixtures """
fixtures = [
('bb_projects', 'project_data.json'),
('bb_tasks', 'skills.json'),
('geo', 'geo_data.json'),
]
def handle(self, *args, **kwargs):
with tempfile.NamedTemporaryFile(dir='bluebottle', suffix='.py') as temp:
for app, file in self.fixtures:
with open('bluebottle/{}/fixtures/{}'.format(app, file)) as fixture_file:
strings = [
fixture['fields']['name'].encode('utf-8')
for fixture
in json.load(fixture_file)
]
for string in strings:
temp.write('gettext("{}")\n'.format(string))
for currency in get_currencies():
temp.write('gettext("{}")\n'.format(currency['name']))
temp.flush()
return super(Command, self).handle(*args, **kwargs)
| import json
import os
from django.core.management.commands.makemessages import Command as BaseCommand
from bluebottle.clients.utils import get_currencies
class Command(BaseCommand):
""" Extend the makemessages to include some of the fixtures """
fixtures = [
('bb_projects', 'project_data.json'),
('bb_tasks', 'skills.json'),
('geo', 'geo_data.json'),
]
def handle(self, *args, **kwargs):
with open('bluebottle/fixtures.py', 'w') as temp:
for app, file in self.fixtures:
with open('bluebottle/{}/fixtures/{}'.format(app, file)) as fixture_file:
strings = [
fixture['fields']['name'].encode('utf-8')
for fixture
in json.load(fixture_file)
]
for string in strings:
temp.write('gettext("{}")\n'.format(string))
for currency in get_currencies():
temp.write('gettext("{}")\n'.format(currency['name']))
temp.flush()
super(Command, self).handle(*args, **kwargs)
os.unlink('bluebottle/fixtures.py')
| Make sure we always use the same filename for the fixtures translations. This way the translations do not contain accidental changes. | Make sure we always use the same filename for the fixtures translations.
This way the translations do not contain accidental changes.
| Python | bsd-3-clause | onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle | ---
+++
@@ -1,5 +1,5 @@
import json
-import tempfile
+import os
from django.core.management.commands.makemessages import Command as BaseCommand
@@ -16,7 +16,7 @@
]
def handle(self, *args, **kwargs):
- with tempfile.NamedTemporaryFile(dir='bluebottle', suffix='.py') as temp:
+ with open('bluebottle/fixtures.py', 'w') as temp:
for app, file in self.fixtures:
with open('bluebottle/{}/fixtures/{}'.format(app, file)) as fixture_file:
strings = [
@@ -33,4 +33,6 @@
temp.flush()
- return super(Command, self).handle(*args, **kwargs)
+ super(Command, self).handle(*args, **kwargs)
+
+ os.unlink('bluebottle/fixtures.py') |
b4ef0f107ca8fefbe556babb00f31c7b88019d50 | pydarkstar/__init__.py | pydarkstar/__init__.py | __version__ = 0.1
import pydarkstar.logutils
import logging
pydarkstar.logutils.setError()
try:
import sqlalchemy
except ImportError as e:
logging.exception(e.__class__.__name__)
logging.error('pip install sqlalchemy')
exit(-1)
try:
import pymysql
except ImportError as e:
logging.exception(e.__class__.__name__)
logging.error('pip install pymysql')
exit(-1)
try:
import bs4
except ImportError as e:
logging.exception(e.__class__.__name__)
logging.error('pip install beautifulsoup4')
exit(-1)
import scrub | __version__ = 0.1
import pydarkstar.logutils
import logging
pydarkstar.logutils.setError()
try:
import sqlalchemy
except ImportError as e:
logging.exception(e.__class__.__name__)
logging.error('pip install sqlalchemy')
exit(-1)
try:
import pymysql
except ImportError as e:
logging.exception(e.__class__.__name__)
logging.error('pip install pymysql')
exit(-1)
try:
import bs4
except ImportError as e:
logging.exception(e.__class__.__name__)
logging.error('pip install beautifulsoup4')
exit(-1) | Revert "Change imports to relative." | Revert "Change imports to relative."
This reverts commit 9d0990249b7e0e46e38a665cb8c32a1ee435c291.
| Python | mit | LegionXI/pydarkstar,AdamGagorik/pydarkstar | ---
+++
@@ -25,5 +25,3 @@
logging.exception(e.__class__.__name__)
logging.error('pip install beautifulsoup4')
exit(-1)
-
-import scrub |
a1a29908edfe67ad7ee046435f2485e0c6f95943 | pyoracc/atf/atffile.py | pyoracc/atf/atffile.py | from .atflex import AtfLexer
from .atfyacc import AtfParser
from mako.template import Template
class AtfFile(object):
template = Template("${text.serialize()}")
def __init__(self, content):
self.content = content
if content[-1] != '\n':
content += "\n"
lexer = AtfLexer().lexer
parser = AtfParser().parser
self.text = parser.parse(content, lexer=lexer)
def __str__(self):
return AtfFile.template.render_unicode(**vars(self))
def serialize(self):
return AtfFile.template.render_unicode(**vars(self))
| from .atflex import AtfLexer
from .atfyacc import AtfParser
from mako.template import Template
class AtfFile(object):
template = Template("${text.serialize()}")
def __init__(self, content):
self.content = content
if content[-1] != '\n':
content += "\n"
lexer = AtfLexer().lexer
parser = AtfParser().parser
self.text = parser.parse(content, lexer=lexer)
def __str__(self):
return AtfFile.template.render_unicode(**vars(self))
def serialize(self):
return AtfFile.template.render_unicode(**vars(self))
def _debug_lex_and_yac_file(file):
import codecs
text = codecs.open(file, encoding='utf-8-sig').read()
from pyoracc.atf.atffile import AtfLexer
lexer = AtfLexer().lexer
lexer.input(text)
for tok in lexer:
print(tok)
print("Lexed file")
exer = AtfLexer().lexer
parser = AtfParser().parser
parser.parse(text, lexer=lexer)
print("Parsed file")
| Add handy private debug and print method | Add handy private debug and print method
| Python | mit | UCL/pyoracc | ---
+++
@@ -20,3 +20,18 @@
def serialize(self):
return AtfFile.template.render_unicode(**vars(self))
+
+
+def _debug_lex_and_yac_file(file):
+ import codecs
+ text = codecs.open(file, encoding='utf-8-sig').read()
+ from pyoracc.atf.atffile import AtfLexer
+ lexer = AtfLexer().lexer
+ lexer.input(text)
+ for tok in lexer:
+ print(tok)
+ print("Lexed file")
+ exer = AtfLexer().lexer
+ parser = AtfParser().parser
+ parser.parse(text, lexer=lexer)
+ print("Parsed file") |
b220aea07d233a608505ecd73f977a6920e867e0 | python/luck-balance.py | python/luck-balance.py | #!/bin/python3
import math
import os
import random
import re
import sys
def max_luck_balance(contests, num_can_lose):
"""
Returns a single integer denoting the maximum amount of luck Lena can have
after all the contests.
"""
balance = 0
unimportant_contests = [contest for contest in contests if contest[1] == 0]
for contest_luck, _is_important in unimportant_contests:
balance += contest_luck
important_contests = sorted([contest for contest in contests if contest[1] == 1], reverse=True)
contests_to_win = (important_contests)[:num_can_lose]
contests_to_lose = (important_contests)[num_can_lose:]
for contest_luck, _is_important in contests_to_win:
balance += contest_luck
for contest_luck, _is_important in contests_to_lose:
balance -= contest_luck
return balance
if __name__ == '__main__':
num_contests, num_can_lose = map(int, input().split())
contests = []
for _ in range(num_contests):
contests.append(tuple(map(int, input().rstrip().split())))
result = max_luck_balance(contests, num_can_lose)
print(result)
| #!/bin/python3
import math
import os
import random
import re
import sys
def max_luck_balance(contests, num_can_lose):
"""
Returns a single integer denoting the maximum amount of luck Lena can have
after all the contests.
"""
balance = 0
# We can lose all unimportant contests.
unimportant_contests = [contest for contest in contests if contest[1] == 0]
for contest_luck, _is_important in unimportant_contests:
balance += contest_luck
# Sort the important contests in descending order of luck balance.
important_contests = sorted([contest for contest in contests if contest[1] == 1], reverse=True)
# We want to lose as many of the high balance contests as possible.
contests_to_lose = (important_contests)[:num_can_lose]
# We must win the remaining contests.
contests_to_win = (important_contests)[num_can_lose:]
for contest_luck, _is_important in contests_to_lose:
balance += contest_luck
for contest_luck, _is_important in contests_to_win:
balance -= contest_luck
return balance
if __name__ == '__main__':
num_contests, num_can_lose = map(int, input().split())
contests = []
for _ in range(num_contests):
contests.append(tuple(map(int, input().rstrip().split())))
result = max_luck_balance(contests, num_can_lose)
print(result)
| Add dev comments and fix variable naming | Add dev comments and fix variable naming
| Python | mit | rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank | ---
+++
@@ -12,19 +12,25 @@
after all the contests.
"""
balance = 0
+
+ # We can lose all unimportant contests.
unimportant_contests = [contest for contest in contests if contest[1] == 0]
for contest_luck, _is_important in unimportant_contests:
balance += contest_luck
+ # Sort the important contests in descending order of luck balance.
important_contests = sorted([contest for contest in contests if contest[1] == 1], reverse=True)
- contests_to_win = (important_contests)[:num_can_lose]
- contests_to_lose = (important_contests)[num_can_lose:]
+ # We want to lose as many of the high balance contests as possible.
+ contests_to_lose = (important_contests)[:num_can_lose]
+
+ # We must win the remaining contests.
+ contests_to_win = (important_contests)[num_can_lose:]
+
+ for contest_luck, _is_important in contests_to_lose:
+ balance += contest_luck
for contest_luck, _is_important in contests_to_win:
- balance += contest_luck
-
- for contest_luck, _is_important in contests_to_lose:
balance -= contest_luck
return balance |
29aeca4df24c84cecd48f0893da94624dab0e1c7 | manage.py | manage.py | import os
from app import create_app
from flask.ext.script import Manager
app = create_app(os.getenv('FLASK_CONFIG') or 'default')
manager = Manager(app)
if __name__ == '__main__':
manager.run()
| import os
from app import create_app, db
from app.models import User
from flask.ext.script import Manager
app = create_app(os.getenv('FLASK_CONFIG') or 'default')
manager = Manager(app)
@manager.command
def adduser(email, username, admin=False):
""" Register a new user"""
from getpass import getpass
password = getpass()
password2 = getpass(prompt = 'Confirm: ')
if password != password2:
import sys
sys.exit("Error: Passwords do not match!")
db.create_all()
user = User(email=email, username=username, password=password, is_admin=admin)
db.session.add(user)
db.session.commit()
print('User {0} was registered successfully!'.format(username))
if __name__ == '__main__':
manager.run()
| Add a custom script command to add a user to the database | Add a custom script command to add a user to the database
| Python | mit | finnurtorfa/aflafrettir.is,finnurtorfa/aflafrettir.is,finnurtorfa/aflafrettir.is,finnurtorfa/aflafrettir.is | ---
+++
@@ -1,11 +1,31 @@
import os
-from app import create_app
+from app import create_app, db
+from app.models import User
from flask.ext.script import Manager
app = create_app(os.getenv('FLASK_CONFIG') or 'default')
manager = Manager(app)
+@manager.command
+def adduser(email, username, admin=False):
+ """ Register a new user"""
+ from getpass import getpass
+ password = getpass()
+ password2 = getpass(prompt = 'Confirm: ')
+
+ if password != password2:
+ import sys
+ sys.exit("Error: Passwords do not match!")
+
+ db.create_all()
+
+ user = User(email=email, username=username, password=password, is_admin=admin)
+ db.session.add(user)
+ db.session.commit()
+
+ print('User {0} was registered successfully!'.format(username))
+
if __name__ == '__main__':
manager.run() |
4ee589cd8fd7e60606524e26a3b69e202242b75c | meinberlin/apps/servicekonto/apps.py | meinberlin/apps/servicekonto/apps.py | from allauth.socialaccount import providers
from django.apps import AppConfig
from .provider import ServiceKontoProvider
class Config(AppConfig):
name = 'meinberlin.apps.servicekonto'
label = 'meinberlin_servicekonto'
def ready(self):
providers.registry.register(ServiceKontoProvider)
| from allauth.socialaccount import providers
from django.apps import AppConfig
class Config(AppConfig):
name = 'meinberlin.apps.servicekonto'
label = 'meinberlin_servicekonto'
def ready(self):
from .provider import ServiceKontoProvider
providers.registry.register(ServiceKontoProvider)
| Fix servicekonto import to be lazy on ready | Fix servicekonto import to be lazy on ready
| Python | agpl-3.0 | liqd/a4-meinberlin,liqd/a4-meinberlin,liqd/a4-meinberlin,liqd/a4-meinberlin | ---
+++
@@ -1,7 +1,5 @@
from allauth.socialaccount import providers
from django.apps import AppConfig
-
-from .provider import ServiceKontoProvider
class Config(AppConfig):
@@ -9,4 +7,5 @@
label = 'meinberlin_servicekonto'
def ready(self):
+ from .provider import ServiceKontoProvider
providers.registry.register(ServiceKontoProvider) |
aa780dc20583882c03fe1e3cd37f57c3cf9c7f17 | taiga/projects/migrations/0006_auto_20141029_1040.py | taiga/projects/migrations/0006_auto_20141029_1040.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def update_total_milestones(apps, schema_editor):
Project = apps.get_model("projects", "Project")
qs = Project.objects.filter(total_milestones__isnull=True)
qs.update(total_milestones=0)
class Migration(migrations.Migration):
dependencies = [
('projects', '0005_membership_invitation_extra_text'),
]
operations = [
migrations.RunPython(update_total_milestones),
migrations.AlterField(
model_name='project',
name='total_milestones',
field=models.IntegerField(verbose_name='total of milestones', default=0),
),
]
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def update_total_milestones(apps, schema_editor):
Project = apps.get_model("projects", "Project")
qs = Project.objects.filter(total_milestones__isnull=True)
qs.update(total_milestones=0)
class Migration(migrations.Migration):
dependencies = [
('projects', '0005_membership_invitation_extra_text'),
]
operations = [
migrations.RunPython(update_total_milestones),
migrations.AlterField(
model_name='project',
name='total_milestones',
field=models.IntegerField(null=False, blank=False, default=0, verbose_name='total of milestones'),
),
]
| Add missing parameters (seems bug on django 1.7.x) | Add missing parameters (seems bug on django 1.7.x)
| Python | agpl-3.0 | xdevelsistemas/taiga-back-community,astronaut1712/taiga-back,astagi/taiga-back,Tigerwhit4/taiga-back,gam-phon/taiga-back,CMLL/taiga-back,dayatz/taiga-back,dycodedev/taiga-back,joshisa/taiga-back,joshisa/taiga-back,rajiteh/taiga-back,19kestier/taiga-back,Zaneh-/bearded-tribble-back,CoolCloud/taiga-back,coopsource/taiga-back,coopsource/taiga-back,forging2012/taiga-back,19kestier/taiga-back,CMLL/taiga-back,obimod/taiga-back,obimod/taiga-back,EvgeneOskin/taiga-back,EvgeneOskin/taiga-back,obimod/taiga-back,gam-phon/taiga-back,rajiteh/taiga-back,Tigerwhit4/taiga-back,seanchen/taiga-back,taigaio/taiga-back,obimod/taiga-back,xdevelsistemas/taiga-back-community,Tigerwhit4/taiga-back,gauravjns/taiga-back,Rademade/taiga-back,Zaneh-/bearded-tribble-back,Rademade/taiga-back,dycodedev/taiga-back,WALR/taiga-back,Zaneh-/bearded-tribble-back,joshisa/taiga-back,astronaut1712/taiga-back,bdang2012/taiga-back-casting,CoolCloud/taiga-back,gauravjns/taiga-back,astronaut1712/taiga-back,joshisa/taiga-back,jeffdwyatt/taiga-back,coopsource/taiga-back,EvgeneOskin/taiga-back,19kestier/taiga-back,crr0004/taiga-back,EvgeneOskin/taiga-back,astronaut1712/taiga-back,frt-arch/taiga-back,WALR/taiga-back,CoolCloud/taiga-back,forging2012/taiga-back,dycodedev/taiga-back,dycodedev/taiga-back,xdevelsistemas/taiga-back-community,frt-arch/taiga-back,rajiteh/taiga-back,rajiteh/taiga-back,coopsource/taiga-back,crr0004/taiga-back,Rademade/taiga-back,WALR/taiga-back,crr0004/taiga-back,jeffdwyatt/taiga-back,taigaio/taiga-back,dayatz/taiga-back,astagi/taiga-back,bdang2012/taiga-back-casting,WALR/taiga-back,Rademade/taiga-back,dayatz/taiga-back,gauravjns/taiga-back,seanchen/taiga-back,Tigerwhit4/taiga-back,jeffdwyatt/taiga-back,forging2012/taiga-back,astagi/taiga-back,CMLL/taiga-back,CMLL/taiga-back,astagi/taiga-back,bdang2012/taiga-back-casting,frt-arch/taiga-back,Rademade/taiga-back,crr0004/taiga-back,gam-phon/taiga-back,jeffdwyatt/taiga-back,CoolCloud/taiga-back,forging2012/taiga-back,bdang2012/taiga-back-casting,seanchen/taiga-back,seanchen/taiga-back,gauravjns/taiga-back,taigaio/taiga-back,gam-phon/taiga-back | ---
+++
@@ -19,6 +19,6 @@
migrations.AlterField(
model_name='project',
name='total_milestones',
- field=models.IntegerField(verbose_name='total of milestones', default=0),
+ field=models.IntegerField(null=False, blank=False, default=0, verbose_name='total of milestones'),
),
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.