commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
4b3ef2ffd1d3759f6109a8cec022326735d5a48a
|
lmgtfy/helpers.py
|
lmgtfy/helpers.py
|
from datetime import datetime, timedelta
from crispy_forms.layout import Submit
from lmgtfy.models import Domain, DomainSearch, TLD
from lmgtfy.tasks import search_bing_task
class CleanSubmitButton(Submit):
field_classes = 'btn btn-default'
# def search_yahoo(domain):
# domain_db_record, _created = Domain.objects.get_or_create(name=domain)
# # currently we are do not allow to search the same domain more than once per day
# recently_searched = DomainSearch.objects.filter(
# created_at__gte=datetime.now()-timedelta(days=1),
# domain=domain_db_record
# ).count()
# if recently_searched:
# return False
# else:
# search_yahoo_task.apply_async(kwargs={'domain': domain})
# return True
def search_bing(domain):
domain_db_record, _created = Domain.objects.get_or_create(name=domain)
# currently we are do not allow to search the same domain more than once per day
recently_searched = DomainSearch.objects.filter(
created_at__gte=datetime.now()-timedelta(days=1),
domain=domain_db_record
).count()
if recently_searched:
return False
else:
domain_search_record = DomainSearch.objects.create(domain=domain_db_record)
search_bing_task.apply_async(kwargs={'domain_search_record': domain_search_record})
return True
def check_valid_tld(domain):
allowed_tlds = TLD.objects.all().values_list('name', flat=True)
for tld in allowed_tlds:
if domain.endswith(tld):
return True
return False
|
from datetime import datetime, timedelta
from crispy_forms.layout import Submit
from lmgtfy.models import Domain, DomainSearch, TLD
from lmgtfy.tasks import search_bing_task
class CleanSubmitButton(Submit):
field_classes = 'btn btn-default'
def search_bing(domain):
domain_db_record, _created = Domain.objects.get_or_create(name=domain)
# Bing does not allow us to search the same domain more than once per day.
recently_searched = DomainSearch.objects.filter(
created_at__gte=datetime.now()-timedelta(days=1),
domain=domain_db_record
).count()
if recently_searched:
return False
else:
domain_search_record = DomainSearch.objects.create(domain=domain_db_record)
search_bing_task.apply_async(kwargs={'domain_search_record': domain_search_record})
return True
def check_valid_tld(domain):
allowed_tlds = TLD.objects.all().values_list('name', flat=True)
for tld in allowed_tlds:
if domain.endswith(tld):
return True
return False
|
Remove unused Yahoo code, correct comment text
|
Remove unused Yahoo code, correct comment text
|
Python
|
mit
|
opendata/lmgtdfy,opendata/lmgtdfy,todrobbins/lmgtdfy,todrobbins/lmgtdfy
|
from datetime import datetime, timedelta
from crispy_forms.layout import Submit
from lmgtfy.models import Domain, DomainSearch, TLD
from lmgtfy.tasks import search_bing_task
class CleanSubmitButton(Submit):
field_classes = 'btn btn-default'
# def search_yahoo(domain):
# domain_db_record, _created = Domain.objects.get_or_create(name=domain)
# # currently we are do not allow to search the same domain more than once per day
# recently_searched = DomainSearch.objects.filter(
# created_at__gte=datetime.now()-timedelta(days=1),
# domain=domain_db_record
# ).count()
# if recently_searched:
# return False
# else:
# search_yahoo_task.apply_async(kwargs={'domain': domain})
# return True
def search_bing(domain):
domain_db_record, _created = Domain.objects.get_or_create(name=domain)
# currently we are do not allow to search the same domain more than once per day
recently_searched = DomainSearch.objects.filter(
created_at__gte=datetime.now()-timedelta(days=1),
domain=domain_db_record
).count()
if recently_searched:
return False
else:
domain_search_record = DomainSearch.objects.create(domain=domain_db_record)
search_bing_task.apply_async(kwargs={'domain_search_record': domain_search_record})
return True
def check_valid_tld(domain):
allowed_tlds = TLD.objects.all().values_list('name', flat=True)
for tld in allowed_tlds:
if domain.endswith(tld):
return True
return False
Remove unused Yahoo code, correct comment text
|
from datetime import datetime, timedelta
from crispy_forms.layout import Submit
from lmgtfy.models import Domain, DomainSearch, TLD
from lmgtfy.tasks import search_bing_task
class CleanSubmitButton(Submit):
field_classes = 'btn btn-default'
def search_bing(domain):
domain_db_record, _created = Domain.objects.get_or_create(name=domain)
# Bing does not allow us to search the same domain more than once per day.
recently_searched = DomainSearch.objects.filter(
created_at__gte=datetime.now()-timedelta(days=1),
domain=domain_db_record
).count()
if recently_searched:
return False
else:
domain_search_record = DomainSearch.objects.create(domain=domain_db_record)
search_bing_task.apply_async(kwargs={'domain_search_record': domain_search_record})
return True
def check_valid_tld(domain):
allowed_tlds = TLD.objects.all().values_list('name', flat=True)
for tld in allowed_tlds:
if domain.endswith(tld):
return True
return False
|
<commit_before>from datetime import datetime, timedelta
from crispy_forms.layout import Submit
from lmgtfy.models import Domain, DomainSearch, TLD
from lmgtfy.tasks import search_bing_task
class CleanSubmitButton(Submit):
field_classes = 'btn btn-default'
# def search_yahoo(domain):
# domain_db_record, _created = Domain.objects.get_or_create(name=domain)
# # currently we are do not allow to search the same domain more than once per day
# recently_searched = DomainSearch.objects.filter(
# created_at__gte=datetime.now()-timedelta(days=1),
# domain=domain_db_record
# ).count()
# if recently_searched:
# return False
# else:
# search_yahoo_task.apply_async(kwargs={'domain': domain})
# return True
def search_bing(domain):
domain_db_record, _created = Domain.objects.get_or_create(name=domain)
# currently we are do not allow to search the same domain more than once per day
recently_searched = DomainSearch.objects.filter(
created_at__gte=datetime.now()-timedelta(days=1),
domain=domain_db_record
).count()
if recently_searched:
return False
else:
domain_search_record = DomainSearch.objects.create(domain=domain_db_record)
search_bing_task.apply_async(kwargs={'domain_search_record': domain_search_record})
return True
def check_valid_tld(domain):
allowed_tlds = TLD.objects.all().values_list('name', flat=True)
for tld in allowed_tlds:
if domain.endswith(tld):
return True
return False
<commit_msg>Remove unused Yahoo code, correct comment text<commit_after>
|
from datetime import datetime, timedelta
from crispy_forms.layout import Submit
from lmgtfy.models import Domain, DomainSearch, TLD
from lmgtfy.tasks import search_bing_task
class CleanSubmitButton(Submit):
field_classes = 'btn btn-default'
def search_bing(domain):
domain_db_record, _created = Domain.objects.get_or_create(name=domain)
# Bing does not allow us to search the same domain more than once per day.
recently_searched = DomainSearch.objects.filter(
created_at__gte=datetime.now()-timedelta(days=1),
domain=domain_db_record
).count()
if recently_searched:
return False
else:
domain_search_record = DomainSearch.objects.create(domain=domain_db_record)
search_bing_task.apply_async(kwargs={'domain_search_record': domain_search_record})
return True
def check_valid_tld(domain):
allowed_tlds = TLD.objects.all().values_list('name', flat=True)
for tld in allowed_tlds:
if domain.endswith(tld):
return True
return False
|
from datetime import datetime, timedelta
from crispy_forms.layout import Submit
from lmgtfy.models import Domain, DomainSearch, TLD
from lmgtfy.tasks import search_bing_task
class CleanSubmitButton(Submit):
field_classes = 'btn btn-default'
# def search_yahoo(domain):
# domain_db_record, _created = Domain.objects.get_or_create(name=domain)
# # currently we are do not allow to search the same domain more than once per day
# recently_searched = DomainSearch.objects.filter(
# created_at__gte=datetime.now()-timedelta(days=1),
# domain=domain_db_record
# ).count()
# if recently_searched:
# return False
# else:
# search_yahoo_task.apply_async(kwargs={'domain': domain})
# return True
def search_bing(domain):
domain_db_record, _created = Domain.objects.get_or_create(name=domain)
# currently we are do not allow to search the same domain more than once per day
recently_searched = DomainSearch.objects.filter(
created_at__gte=datetime.now()-timedelta(days=1),
domain=domain_db_record
).count()
if recently_searched:
return False
else:
domain_search_record = DomainSearch.objects.create(domain=domain_db_record)
search_bing_task.apply_async(kwargs={'domain_search_record': domain_search_record})
return True
def check_valid_tld(domain):
allowed_tlds = TLD.objects.all().values_list('name', flat=True)
for tld in allowed_tlds:
if domain.endswith(tld):
return True
return False
Remove unused Yahoo code, correct comment textfrom datetime import datetime, timedelta
from crispy_forms.layout import Submit
from lmgtfy.models import Domain, DomainSearch, TLD
from lmgtfy.tasks import search_bing_task
class CleanSubmitButton(Submit):
field_classes = 'btn btn-default'
def search_bing(domain):
domain_db_record, _created = Domain.objects.get_or_create(name=domain)
# Bing does not allow us to search the same domain more than once per day.
recently_searched = DomainSearch.objects.filter(
created_at__gte=datetime.now()-timedelta(days=1),
domain=domain_db_record
).count()
if recently_searched:
return False
else:
domain_search_record = DomainSearch.objects.create(domain=domain_db_record)
search_bing_task.apply_async(kwargs={'domain_search_record': domain_search_record})
return True
def check_valid_tld(domain):
allowed_tlds = TLD.objects.all().values_list('name', flat=True)
for tld in allowed_tlds:
if domain.endswith(tld):
return True
return False
|
<commit_before>from datetime import datetime, timedelta
from crispy_forms.layout import Submit
from lmgtfy.models import Domain, DomainSearch, TLD
from lmgtfy.tasks import search_bing_task
class CleanSubmitButton(Submit):
field_classes = 'btn btn-default'
# def search_yahoo(domain):
# domain_db_record, _created = Domain.objects.get_or_create(name=domain)
# # currently we are do not allow to search the same domain more than once per day
# recently_searched = DomainSearch.objects.filter(
# created_at__gte=datetime.now()-timedelta(days=1),
# domain=domain_db_record
# ).count()
# if recently_searched:
# return False
# else:
# search_yahoo_task.apply_async(kwargs={'domain': domain})
# return True
def search_bing(domain):
domain_db_record, _created = Domain.objects.get_or_create(name=domain)
# currently we are do not allow to search the same domain more than once per day
recently_searched = DomainSearch.objects.filter(
created_at__gte=datetime.now()-timedelta(days=1),
domain=domain_db_record
).count()
if recently_searched:
return False
else:
domain_search_record = DomainSearch.objects.create(domain=domain_db_record)
search_bing_task.apply_async(kwargs={'domain_search_record': domain_search_record})
return True
def check_valid_tld(domain):
allowed_tlds = TLD.objects.all().values_list('name', flat=True)
for tld in allowed_tlds:
if domain.endswith(tld):
return True
return False
<commit_msg>Remove unused Yahoo code, correct comment text<commit_after>from datetime import datetime, timedelta
from crispy_forms.layout import Submit
from lmgtfy.models import Domain, DomainSearch, TLD
from lmgtfy.tasks import search_bing_task
class CleanSubmitButton(Submit):
field_classes = 'btn btn-default'
def search_bing(domain):
domain_db_record, _created = Domain.objects.get_or_create(name=domain)
# Bing does not allow us to search the same domain more than once per day.
recently_searched = DomainSearch.objects.filter(
created_at__gte=datetime.now()-timedelta(days=1),
domain=domain_db_record
).count()
if recently_searched:
return False
else:
domain_search_record = DomainSearch.objects.create(domain=domain_db_record)
search_bing_task.apply_async(kwargs={'domain_search_record': domain_search_record})
return True
def check_valid_tld(domain):
allowed_tlds = TLD.objects.all().values_list('name', flat=True)
for tld in allowed_tlds:
if domain.endswith(tld):
return True
return False
|
410ac0cb59114714b39b7fbd2d7ef9357702067d
|
opps/core/models/published.py
|
opps/core/models/published.py
|
#!/usr/bin/env python
from django.db import models
from django.utils.translation import ugettext_lazy as _
from datetime import datetime
class PublishedMnager(models.Manager):
def all_published(self):
return super(PublisherMnager, self).get_query_set().filter(
date_available__lte=datetime.now(), published=True)
class Published(models.Model):
date_available = models.DateTimeField(_(u"Date available"),
default=datetime.now, null=True)
published = models.BooleanField(_(u"Published"), default=False)
objects = PublishedMnager()
class Meta:
abstract = True
def is_published(self):
return self.published and \
self.date_available.replace(tzinfo=None) <= datetime.now()
|
#!/usr/bin/env python
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth.models import User
from datetime import datetime
class PublishedMnager(models.Manager):
def all_published(self):
return super(PublisherMnager, self).get_query_set().filter(
date_available__lte=datetime.now(), published=True)
class Published(models.Model):
user = models.ForeignKey(User, related_name='users')
date_available = models.DateTimeField(_(u"Date available"),
default=datetime.now, null=True)
published = models.BooleanField(_(u"Published"), default=False)
objects = PublishedMnager()
class Meta:
abstract = True
def is_published(self):
return self.published and \
self.date_available.replace(tzinfo=None) <= datetime.now()
|
Add user models on Published class models
|
Add user models on Published class models
|
Python
|
mit
|
williamroot/opps,opps/opps,YACOWS/opps,opps/opps,jeanmask/opps,jeanmask/opps,williamroot/opps,williamroot/opps,YACOWS/opps,jeanmask/opps,williamroot/opps,opps/opps,YACOWS/opps,YACOWS/opps,jeanmask/opps,opps/opps
|
#!/usr/bin/env python
from django.db import models
from django.utils.translation import ugettext_lazy as _
from datetime import datetime
class PublishedMnager(models.Manager):
def all_published(self):
return super(PublisherMnager, self).get_query_set().filter(
date_available__lte=datetime.now(), published=True)
class Published(models.Model):
date_available = models.DateTimeField(_(u"Date available"),
default=datetime.now, null=True)
published = models.BooleanField(_(u"Published"), default=False)
objects = PublishedMnager()
class Meta:
abstract = True
def is_published(self):
return self.published and \
self.date_available.replace(tzinfo=None) <= datetime.now()
Add user models on Published class models
|
#!/usr/bin/env python
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth.models import User
from datetime import datetime
class PublishedMnager(models.Manager):
def all_published(self):
return super(PublisherMnager, self).get_query_set().filter(
date_available__lte=datetime.now(), published=True)
class Published(models.Model):
user = models.ForeignKey(User, related_name='users')
date_available = models.DateTimeField(_(u"Date available"),
default=datetime.now, null=True)
published = models.BooleanField(_(u"Published"), default=False)
objects = PublishedMnager()
class Meta:
abstract = True
def is_published(self):
return self.published and \
self.date_available.replace(tzinfo=None) <= datetime.now()
|
<commit_before>#!/usr/bin/env python
from django.db import models
from django.utils.translation import ugettext_lazy as _
from datetime import datetime
class PublishedMnager(models.Manager):
def all_published(self):
return super(PublisherMnager, self).get_query_set().filter(
date_available__lte=datetime.now(), published=True)
class Published(models.Model):
date_available = models.DateTimeField(_(u"Date available"),
default=datetime.now, null=True)
published = models.BooleanField(_(u"Published"), default=False)
objects = PublishedMnager()
class Meta:
abstract = True
def is_published(self):
return self.published and \
self.date_available.replace(tzinfo=None) <= datetime.now()
<commit_msg>Add user models on Published class models<commit_after>
|
#!/usr/bin/env python
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth.models import User
from datetime import datetime
class PublishedMnager(models.Manager):
def all_published(self):
return super(PublisherMnager, self).get_query_set().filter(
date_available__lte=datetime.now(), published=True)
class Published(models.Model):
user = models.ForeignKey(User, related_name='users')
date_available = models.DateTimeField(_(u"Date available"),
default=datetime.now, null=True)
published = models.BooleanField(_(u"Published"), default=False)
objects = PublishedMnager()
class Meta:
abstract = True
def is_published(self):
return self.published and \
self.date_available.replace(tzinfo=None) <= datetime.now()
|
#!/usr/bin/env python
from django.db import models
from django.utils.translation import ugettext_lazy as _
from datetime import datetime
class PublishedMnager(models.Manager):
def all_published(self):
return super(PublisherMnager, self).get_query_set().filter(
date_available__lte=datetime.now(), published=True)
class Published(models.Model):
date_available = models.DateTimeField(_(u"Date available"),
default=datetime.now, null=True)
published = models.BooleanField(_(u"Published"), default=False)
objects = PublishedMnager()
class Meta:
abstract = True
def is_published(self):
return self.published and \
self.date_available.replace(tzinfo=None) <= datetime.now()
Add user models on Published class models#!/usr/bin/env python
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth.models import User
from datetime import datetime
class PublishedMnager(models.Manager):
def all_published(self):
return super(PublisherMnager, self).get_query_set().filter(
date_available__lte=datetime.now(), published=True)
class Published(models.Model):
user = models.ForeignKey(User, related_name='users')
date_available = models.DateTimeField(_(u"Date available"),
default=datetime.now, null=True)
published = models.BooleanField(_(u"Published"), default=False)
objects = PublishedMnager()
class Meta:
abstract = True
def is_published(self):
return self.published and \
self.date_available.replace(tzinfo=None) <= datetime.now()
|
<commit_before>#!/usr/bin/env python
from django.db import models
from django.utils.translation import ugettext_lazy as _
from datetime import datetime
class PublishedMnager(models.Manager):
def all_published(self):
return super(PublisherMnager, self).get_query_set().filter(
date_available__lte=datetime.now(), published=True)
class Published(models.Model):
date_available = models.DateTimeField(_(u"Date available"),
default=datetime.now, null=True)
published = models.BooleanField(_(u"Published"), default=False)
objects = PublishedMnager()
class Meta:
abstract = True
def is_published(self):
return self.published and \
self.date_available.replace(tzinfo=None) <= datetime.now()
<commit_msg>Add user models on Published class models<commit_after>#!/usr/bin/env python
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth.models import User
from datetime import datetime
class PublishedMnager(models.Manager):
def all_published(self):
return super(PublisherMnager, self).get_query_set().filter(
date_available__lte=datetime.now(), published=True)
class Published(models.Model):
user = models.ForeignKey(User, related_name='users')
date_available = models.DateTimeField(_(u"Date available"),
default=datetime.now, null=True)
published = models.BooleanField(_(u"Published"), default=False)
objects = PublishedMnager()
class Meta:
abstract = True
def is_published(self):
return self.published and \
self.date_available.replace(tzinfo=None) <= datetime.now()
|
8cd68fb795295b3a26d30f48f5529389b5ebd4b0
|
readcsv.py
|
readcsv.py
|
# -*- coding: utf-8 -*-
"""
Created on Sat Jul 26 12:04:49 2014
@author: pgao
"""
"""
Read the CSV
NOTE: Manually edited csv file twice to match FIELD NAME format
"""
import csv
datareader = csv.DictReader(open("C:/Users/pgao/Documents/DATA_FIELD_DESCRIPTORS.csv"))
data = []
entry = {}
current_table = ""
for line in datareader:
new_table_number = line['TABLE NUMBER']
if new_table_number != current_table:
entry = {}
current_table = new_table_number
entry['Matrix Number'] = line['TABLE NUMBER']
entry['File Name'] = line['SEGMENT']
next_line = datareader.next()
entry['Universe'] = (next_line['FIELD NAME'][9:].lstrip())
try:
entry['Name'] = line['FIELD NAME'][:line['FIELD NAME'].index('[')-1]
entry['Cell Count'] = line['FIELD NAME'][line['FIELD NAME'].index('[')+1]
except ValueError:
print line
data.append(entry)
#Write the tsv file
datawriter = csv.DictWriter(open("C:/Users/pgao/Documents/SF1.tsv", "w"),
['File Name', 'Matrix Number',
'Cell Count', 'Name', 'Universe'],
delimiter = '\t',
lineterminator='\n')
datawriter.writeheader()
datawriter.writerows(data)
|
# -*- coding: utf-8 -*-
"""
Created on Sat Jul 26 12:04:49 2014
@author: pgao
"""
"""
Read the CSV
NOTE: Manually edited csv file twice to match FIELD NAME format
"""
import csv
datareader = csv.DictReader(open("sf1_data_field_descriptors_2010.csv"))
data = []
entry = None
current_table = ""
for line in datareader:
new_table_number = line['TABLE NUMBER']
if new_table_number != current_table:
# save the old one
if entry != None:
data.append(entry)
entry = {}
current_table = new_table_number
entry['Matrix Number'] = line['TABLE NUMBER']
entry['File Name'] = line['SEGMENT']
next_line = datareader.next()
entry['Universe'] = (next_line['FIELD NAME'][9:].lstrip())
entry['Name'] = line['FIELD NAME'][:line['FIELD NAME'].index('[')-1]
entry['Cell Count'] = 0
# Increment the cell count iff there's actually data, rather than this being a descriptive row
if len(line['FIELD CODE']) > 0:
entry['Cell Count'] += 1
# Write the tsv file
datawriter = csv.DictWriter(open("sf1_2010.tsv", "w"),
['File Name', 'Matrix Number',
'Cell Count', 'Name', 'Universe'],
dialect = 'excel-tab'
)
datawriter.writeheader()
datawriter.writerows(data)
|
Fix bug with matrices with >= 10 columns
|
Fix bug with matrices with >= 10 columns
|
Python
|
isc
|
ikding/census-tools,ikding/census-tools
|
# -*- coding: utf-8 -*-
"""
Created on Sat Jul 26 12:04:49 2014
@author: pgao
"""
"""
Read the CSV
NOTE: Manually edited csv file twice to match FIELD NAME format
"""
import csv
datareader = csv.DictReader(open("C:/Users/pgao/Documents/DATA_FIELD_DESCRIPTORS.csv"))
data = []
entry = {}
current_table = ""
for line in datareader:
new_table_number = line['TABLE NUMBER']
if new_table_number != current_table:
entry = {}
current_table = new_table_number
entry['Matrix Number'] = line['TABLE NUMBER']
entry['File Name'] = line['SEGMENT']
next_line = datareader.next()
entry['Universe'] = (next_line['FIELD NAME'][9:].lstrip())
try:
entry['Name'] = line['FIELD NAME'][:line['FIELD NAME'].index('[')-1]
entry['Cell Count'] = line['FIELD NAME'][line['FIELD NAME'].index('[')+1]
except ValueError:
print line
data.append(entry)
#Write the tsv file
datawriter = csv.DictWriter(open("C:/Users/pgao/Documents/SF1.tsv", "w"),
['File Name', 'Matrix Number',
'Cell Count', 'Name', 'Universe'],
delimiter = '\t',
lineterminator='\n')
datawriter.writeheader()
datawriter.writerows(data)Fix bug with matrices with >= 10 columns
|
# -*- coding: utf-8 -*-
"""
Created on Sat Jul 26 12:04:49 2014
@author: pgao
"""
"""
Read the CSV
NOTE: Manually edited csv file twice to match FIELD NAME format
"""
import csv
datareader = csv.DictReader(open("sf1_data_field_descriptors_2010.csv"))
data = []
entry = None
current_table = ""
for line in datareader:
new_table_number = line['TABLE NUMBER']
if new_table_number != current_table:
# save the old one
if entry != None:
data.append(entry)
entry = {}
current_table = new_table_number
entry['Matrix Number'] = line['TABLE NUMBER']
entry['File Name'] = line['SEGMENT']
next_line = datareader.next()
entry['Universe'] = (next_line['FIELD NAME'][9:].lstrip())
entry['Name'] = line['FIELD NAME'][:line['FIELD NAME'].index('[')-1]
entry['Cell Count'] = 0
# Increment the cell count iff there's actually data, rather than this being a descriptive row
if len(line['FIELD CODE']) > 0:
entry['Cell Count'] += 1
# Write the tsv file
datawriter = csv.DictWriter(open("sf1_2010.tsv", "w"),
['File Name', 'Matrix Number',
'Cell Count', 'Name', 'Universe'],
dialect = 'excel-tab'
)
datawriter.writeheader()
datawriter.writerows(data)
|
<commit_before># -*- coding: utf-8 -*-
"""
Created on Sat Jul 26 12:04:49 2014
@author: pgao
"""
"""
Read the CSV
NOTE: Manually edited csv file twice to match FIELD NAME format
"""
import csv
datareader = csv.DictReader(open("C:/Users/pgao/Documents/DATA_FIELD_DESCRIPTORS.csv"))
data = []
entry = {}
current_table = ""
for line in datareader:
new_table_number = line['TABLE NUMBER']
if new_table_number != current_table:
entry = {}
current_table = new_table_number
entry['Matrix Number'] = line['TABLE NUMBER']
entry['File Name'] = line['SEGMENT']
next_line = datareader.next()
entry['Universe'] = (next_line['FIELD NAME'][9:].lstrip())
try:
entry['Name'] = line['FIELD NAME'][:line['FIELD NAME'].index('[')-1]
entry['Cell Count'] = line['FIELD NAME'][line['FIELD NAME'].index('[')+1]
except ValueError:
print line
data.append(entry)
#Write the tsv file
datawriter = csv.DictWriter(open("C:/Users/pgao/Documents/SF1.tsv", "w"),
['File Name', 'Matrix Number',
'Cell Count', 'Name', 'Universe'],
delimiter = '\t',
lineterminator='\n')
datawriter.writeheader()
datawriter.writerows(data)<commit_msg>Fix bug with matrices with >= 10 columns<commit_after>
|
# -*- coding: utf-8 -*-
"""
Created on Sat Jul 26 12:04:49 2014
@author: pgao
"""
"""
Read the CSV
NOTE: Manually edited csv file twice to match FIELD NAME format
"""
import csv
datareader = csv.DictReader(open("sf1_data_field_descriptors_2010.csv"))
data = []
entry = None
current_table = ""
for line in datareader:
new_table_number = line['TABLE NUMBER']
if new_table_number != current_table:
# save the old one
if entry != None:
data.append(entry)
entry = {}
current_table = new_table_number
entry['Matrix Number'] = line['TABLE NUMBER']
entry['File Name'] = line['SEGMENT']
next_line = datareader.next()
entry['Universe'] = (next_line['FIELD NAME'][9:].lstrip())
entry['Name'] = line['FIELD NAME'][:line['FIELD NAME'].index('[')-1]
entry['Cell Count'] = 0
# Increment the cell count iff there's actually data, rather than this being a descriptive row
if len(line['FIELD CODE']) > 0:
entry['Cell Count'] += 1
# Write the tsv file
datawriter = csv.DictWriter(open("sf1_2010.tsv", "w"),
['File Name', 'Matrix Number',
'Cell Count', 'Name', 'Universe'],
dialect = 'excel-tab'
)
datawriter.writeheader()
datawriter.writerows(data)
|
# -*- coding: utf-8 -*-
"""
Created on Sat Jul 26 12:04:49 2014
@author: pgao
"""
"""
Read the CSV
NOTE: Manually edited csv file twice to match FIELD NAME format
"""
import csv
datareader = csv.DictReader(open("C:/Users/pgao/Documents/DATA_FIELD_DESCRIPTORS.csv"))
data = []
entry = {}
current_table = ""
for line in datareader:
new_table_number = line['TABLE NUMBER']
if new_table_number != current_table:
entry = {}
current_table = new_table_number
entry['Matrix Number'] = line['TABLE NUMBER']
entry['File Name'] = line['SEGMENT']
next_line = datareader.next()
entry['Universe'] = (next_line['FIELD NAME'][9:].lstrip())
try:
entry['Name'] = line['FIELD NAME'][:line['FIELD NAME'].index('[')-1]
entry['Cell Count'] = line['FIELD NAME'][line['FIELD NAME'].index('[')+1]
except ValueError:
print line
data.append(entry)
#Write the tsv file
datawriter = csv.DictWriter(open("C:/Users/pgao/Documents/SF1.tsv", "w"),
['File Name', 'Matrix Number',
'Cell Count', 'Name', 'Universe'],
delimiter = '\t',
lineterminator='\n')
datawriter.writeheader()
datawriter.writerows(data)Fix bug with matrices with >= 10 columns# -*- coding: utf-8 -*-
"""
Created on Sat Jul 26 12:04:49 2014
@author: pgao
"""
"""
Read the CSV
NOTE: Manually edited csv file twice to match FIELD NAME format
"""
import csv
datareader = csv.DictReader(open("sf1_data_field_descriptors_2010.csv"))
data = []
entry = None
current_table = ""
for line in datareader:
new_table_number = line['TABLE NUMBER']
if new_table_number != current_table:
# save the old one
if entry != None:
data.append(entry)
entry = {}
current_table = new_table_number
entry['Matrix Number'] = line['TABLE NUMBER']
entry['File Name'] = line['SEGMENT']
next_line = datareader.next()
entry['Universe'] = (next_line['FIELD NAME'][9:].lstrip())
entry['Name'] = line['FIELD NAME'][:line['FIELD NAME'].index('[')-1]
entry['Cell Count'] = 0
# Increment the cell count iff there's actually data, rather than this being a descriptive row
if len(line['FIELD CODE']) > 0:
entry['Cell Count'] += 1
# Write the tsv file
datawriter = csv.DictWriter(open("sf1_2010.tsv", "w"),
['File Name', 'Matrix Number',
'Cell Count', 'Name', 'Universe'],
dialect = 'excel-tab'
)
datawriter.writeheader()
datawriter.writerows(data)
|
<commit_before># -*- coding: utf-8 -*-
"""
Created on Sat Jul 26 12:04:49 2014
@author: pgao
"""
"""
Read the CSV
NOTE: Manually edited csv file twice to match FIELD NAME format
"""
import csv
datareader = csv.DictReader(open("C:/Users/pgao/Documents/DATA_FIELD_DESCRIPTORS.csv"))
data = []
entry = {}
current_table = ""
for line in datareader:
new_table_number = line['TABLE NUMBER']
if new_table_number != current_table:
entry = {}
current_table = new_table_number
entry['Matrix Number'] = line['TABLE NUMBER']
entry['File Name'] = line['SEGMENT']
next_line = datareader.next()
entry['Universe'] = (next_line['FIELD NAME'][9:].lstrip())
try:
entry['Name'] = line['FIELD NAME'][:line['FIELD NAME'].index('[')-1]
entry['Cell Count'] = line['FIELD NAME'][line['FIELD NAME'].index('[')+1]
except ValueError:
print line
data.append(entry)
#Write the tsv file
datawriter = csv.DictWriter(open("C:/Users/pgao/Documents/SF1.tsv", "w"),
['File Name', 'Matrix Number',
'Cell Count', 'Name', 'Universe'],
delimiter = '\t',
lineterminator='\n')
datawriter.writeheader()
datawriter.writerows(data)<commit_msg>Fix bug with matrices with >= 10 columns<commit_after># -*- coding: utf-8 -*-
"""
Created on Sat Jul 26 12:04:49 2014
@author: pgao
"""
"""
Read the CSV
NOTE: Manually edited csv file twice to match FIELD NAME format
"""
import csv
datareader = csv.DictReader(open("sf1_data_field_descriptors_2010.csv"))
data = []
entry = None
current_table = ""
for line in datareader:
new_table_number = line['TABLE NUMBER']
if new_table_number != current_table:
# save the old one
if entry != None:
data.append(entry)
entry = {}
current_table = new_table_number
entry['Matrix Number'] = line['TABLE NUMBER']
entry['File Name'] = line['SEGMENT']
next_line = datareader.next()
entry['Universe'] = (next_line['FIELD NAME'][9:].lstrip())
entry['Name'] = line['FIELD NAME'][:line['FIELD NAME'].index('[')-1]
entry['Cell Count'] = 0
# Increment the cell count iff there's actually data, rather than this being a descriptive row
if len(line['FIELD CODE']) > 0:
entry['Cell Count'] += 1
# Write the tsv file
datawriter = csv.DictWriter(open("sf1_2010.tsv", "w"),
['File Name', 'Matrix Number',
'Cell Count', 'Name', 'Universe'],
dialect = 'excel-tab'
)
datawriter.writeheader()
datawriter.writerows(data)
|
bddc39150ab9ace8dcb71dc1d0ab7623986fabcd
|
bibliopixel/animation/mixer.py
|
bibliopixel/animation/mixer.py
|
import copy
from . import parallel
from .. util import color_list
class Mixer(parallel.Parallel):
def __init__(self, *args, levels=None, master=1, **kwds):
self.master = master
super().__init__(*args, **kwds)
self.mixer = color_list.Mixer(
self.color_list,
[a.color_list for a in self.animations],
levels)
def step(self, amt=1):
super().step(amt)
self.mixer.clear()
self.mixer.mix(self.master)
|
import copy
from . import parallel
from .. util import color_list
class Mixer(parallel.Parallel):
def __init__(self, *args, levels=None, master=1, **kwds):
self.master = master
super().__init__(*args, **kwds)
self.mixer = color_list.Mixer(
self.color_list,
[a.color_list for a in self.animations],
levels)
self.levels = self.mixer.levels
def step(self, amt=1):
super().step(amt)
self.mixer.clear()
self.mixer.mix(self.master)
|
Copy `levels` up into the Mixer animation for addressing convenience
|
Copy `levels` up into the Mixer animation for addressing convenience
|
Python
|
mit
|
ManiacalLabs/BiblioPixel,rec/BiblioPixel,rec/BiblioPixel,ManiacalLabs/BiblioPixel,rec/BiblioPixel,ManiacalLabs/BiblioPixel,ManiacalLabs/BiblioPixel,rec/BiblioPixel
|
import copy
from . import parallel
from .. util import color_list
class Mixer(parallel.Parallel):
def __init__(self, *args, levels=None, master=1, **kwds):
self.master = master
super().__init__(*args, **kwds)
self.mixer = color_list.Mixer(
self.color_list,
[a.color_list for a in self.animations],
levels)
def step(self, amt=1):
super().step(amt)
self.mixer.clear()
self.mixer.mix(self.master)
Copy `levels` up into the Mixer animation for addressing convenience
|
import copy
from . import parallel
from .. util import color_list
class Mixer(parallel.Parallel):
def __init__(self, *args, levels=None, master=1, **kwds):
self.master = master
super().__init__(*args, **kwds)
self.mixer = color_list.Mixer(
self.color_list,
[a.color_list for a in self.animations],
levels)
self.levels = self.mixer.levels
def step(self, amt=1):
super().step(amt)
self.mixer.clear()
self.mixer.mix(self.master)
|
<commit_before>import copy
from . import parallel
from .. util import color_list
class Mixer(parallel.Parallel):
def __init__(self, *args, levels=None, master=1, **kwds):
self.master = master
super().__init__(*args, **kwds)
self.mixer = color_list.Mixer(
self.color_list,
[a.color_list for a in self.animations],
levels)
def step(self, amt=1):
super().step(amt)
self.mixer.clear()
self.mixer.mix(self.master)
<commit_msg>Copy `levels` up into the Mixer animation for addressing convenience<commit_after>
|
import copy
from . import parallel
from .. util import color_list
class Mixer(parallel.Parallel):
def __init__(self, *args, levels=None, master=1, **kwds):
self.master = master
super().__init__(*args, **kwds)
self.mixer = color_list.Mixer(
self.color_list,
[a.color_list for a in self.animations],
levels)
self.levels = self.mixer.levels
def step(self, amt=1):
super().step(amt)
self.mixer.clear()
self.mixer.mix(self.master)
|
import copy
from . import parallel
from .. util import color_list
class Mixer(parallel.Parallel):
def __init__(self, *args, levels=None, master=1, **kwds):
self.master = master
super().__init__(*args, **kwds)
self.mixer = color_list.Mixer(
self.color_list,
[a.color_list for a in self.animations],
levels)
def step(self, amt=1):
super().step(amt)
self.mixer.clear()
self.mixer.mix(self.master)
Copy `levels` up into the Mixer animation for addressing convenienceimport copy
from . import parallel
from .. util import color_list
class Mixer(parallel.Parallel):
def __init__(self, *args, levels=None, master=1, **kwds):
self.master = master
super().__init__(*args, **kwds)
self.mixer = color_list.Mixer(
self.color_list,
[a.color_list for a in self.animations],
levels)
self.levels = self.mixer.levels
def step(self, amt=1):
super().step(amt)
self.mixer.clear()
self.mixer.mix(self.master)
|
<commit_before>import copy
from . import parallel
from .. util import color_list
class Mixer(parallel.Parallel):
def __init__(self, *args, levels=None, master=1, **kwds):
self.master = master
super().__init__(*args, **kwds)
self.mixer = color_list.Mixer(
self.color_list,
[a.color_list for a in self.animations],
levels)
def step(self, amt=1):
super().step(amt)
self.mixer.clear()
self.mixer.mix(self.master)
<commit_msg>Copy `levels` up into the Mixer animation for addressing convenience<commit_after>import copy
from . import parallel
from .. util import color_list
class Mixer(parallel.Parallel):
def __init__(self, *args, levels=None, master=1, **kwds):
self.master = master
super().__init__(*args, **kwds)
self.mixer = color_list.Mixer(
self.color_list,
[a.color_list for a in self.animations],
levels)
self.levels = self.mixer.levels
def step(self, amt=1):
super().step(amt)
self.mixer.clear()
self.mixer.mix(self.master)
|
942ccea9445789423b3ea5131dcbd42c5a509797
|
conans/model/conan_generator.py
|
conans/model/conan_generator.py
|
from conans.util.files import save
from conans.errors import ConanException
from abc import ABCMeta, abstractproperty
class Generator(object):
__metaclass__ = ABCMeta
def __init__(self, deps_build_info, build_info):
self._deps_build_info = deps_build_info
self._build_info = build_info
@abstractproperty
def deps_build_info(self):
return self._deps_build_info
@abstractproperty
def build_info(self):
return self._build_info
@abstractproperty
def content(self):
raise NotImplementedError()
@abstractproperty
def filename(self):
raise NotImplementedError()
class GeneratorManager(object):
def __init__(self):
self._known_generators = {}
def add(self, name, generator_class):
if name in self._known_generators:
raise ConanException()
elif not issubclass(generator_class, Generator):
raise ConanException()
else:
self._known_generators[name] = generator_class
def remove(self, name):
if name in self._known_generators:
del self._known_generators[name]
@property
def available(self):
return self._known_generators.keys()
def __contains__(self, key):
return key in self._known_generators
def __getitem__(self, key):
return self._known_generators[key]
|
from conans.util.files import save
from conans.errors import ConanException
from abc import ABCMeta, abstractproperty
class Generator(object):
__metaclass__ = ABCMeta
def __init__(self, deps_build_info, build_info):
self._deps_build_info = deps_build_info
self._build_info = build_info
@property
def deps_build_info(self):
return self._deps_build_info
@property
def build_info(self):
return self._build_info
@abstractproperty
def content(self):
raise NotImplementedError()
@abstractproperty
def filename(self):
raise NotImplementedError()
class GeneratorManager(object):
def __init__(self):
self._known_generators = {}
def add(self, name, generator_class):
if name in self._known_generators:
raise ConanException()
elif not issubclass(generator_class, Generator):
raise ConanException()
else:
self._known_generators[name] = generator_class
def remove(self, name):
if name in self._known_generators:
del self._known_generators[name]
@property
def available(self):
return self._known_generators.keys()
def __contains__(self, key):
return key in self._known_generators
def __getitem__(self, key):
return self._known_generators[key]
|
Fix bug associated with bad ABC implementation.
|
Fix bug associated with bad ABC implementation.
|
Python
|
mit
|
tivek/conan,mropert/conan,memsharded/conan,dragly/conan,conan-io/conan,dragly/conan,Xaltotun/conan,tivek/conan,mropert/conan,memsharded/conan,luckielordie/conan,Xaltotun/conan,memsharded/conan,conan-io/conan,lasote/conan,birsoyo/conan,conan-io/conan,luckielordie/conan,lasote/conan,birsoyo/conan,memsharded/conan
|
from conans.util.files import save
from conans.errors import ConanException
from abc import ABCMeta, abstractproperty
class Generator(object):
__metaclass__ = ABCMeta
def __init__(self, deps_build_info, build_info):
self._deps_build_info = deps_build_info
self._build_info = build_info
@abstractproperty
def deps_build_info(self):
return self._deps_build_info
@abstractproperty
def build_info(self):
return self._build_info
@abstractproperty
def content(self):
raise NotImplementedError()
@abstractproperty
def filename(self):
raise NotImplementedError()
class GeneratorManager(object):
def __init__(self):
self._known_generators = {}
def add(self, name, generator_class):
if name in self._known_generators:
raise ConanException()
elif not issubclass(generator_class, Generator):
raise ConanException()
else:
self._known_generators[name] = generator_class
def remove(self, name):
if name in self._known_generators:
del self._known_generators[name]
@property
def available(self):
return self._known_generators.keys()
def __contains__(self, key):
return key in self._known_generators
def __getitem__(self, key):
return self._known_generators[key]
Fix bug associated with bad ABC implementation.
|
from conans.util.files import save
from conans.errors import ConanException
from abc import ABCMeta, abstractproperty
class Generator(object):
__metaclass__ = ABCMeta
def __init__(self, deps_build_info, build_info):
self._deps_build_info = deps_build_info
self._build_info = build_info
@property
def deps_build_info(self):
return self._deps_build_info
@property
def build_info(self):
return self._build_info
@abstractproperty
def content(self):
raise NotImplementedError()
@abstractproperty
def filename(self):
raise NotImplementedError()
class GeneratorManager(object):
def __init__(self):
self._known_generators = {}
def add(self, name, generator_class):
if name in self._known_generators:
raise ConanException()
elif not issubclass(generator_class, Generator):
raise ConanException()
else:
self._known_generators[name] = generator_class
def remove(self, name):
if name in self._known_generators:
del self._known_generators[name]
@property
def available(self):
return self._known_generators.keys()
def __contains__(self, key):
return key in self._known_generators
def __getitem__(self, key):
return self._known_generators[key]
|
<commit_before>from conans.util.files import save
from conans.errors import ConanException
from abc import ABCMeta, abstractproperty
class Generator(object):
__metaclass__ = ABCMeta
def __init__(self, deps_build_info, build_info):
self._deps_build_info = deps_build_info
self._build_info = build_info
@abstractproperty
def deps_build_info(self):
return self._deps_build_info
@abstractproperty
def build_info(self):
return self._build_info
@abstractproperty
def content(self):
raise NotImplementedError()
@abstractproperty
def filename(self):
raise NotImplementedError()
class GeneratorManager(object):
def __init__(self):
self._known_generators = {}
def add(self, name, generator_class):
if name in self._known_generators:
raise ConanException()
elif not issubclass(generator_class, Generator):
raise ConanException()
else:
self._known_generators[name] = generator_class
def remove(self, name):
if name in self._known_generators:
del self._known_generators[name]
@property
def available(self):
return self._known_generators.keys()
def __contains__(self, key):
return key in self._known_generators
def __getitem__(self, key):
return self._known_generators[key]
<commit_msg>Fix bug associated with bad ABC implementation.<commit_after>
|
from conans.util.files import save
from conans.errors import ConanException
from abc import ABCMeta, abstractproperty
class Generator(object):
__metaclass__ = ABCMeta
def __init__(self, deps_build_info, build_info):
self._deps_build_info = deps_build_info
self._build_info = build_info
@property
def deps_build_info(self):
return self._deps_build_info
@property
def build_info(self):
return self._build_info
@abstractproperty
def content(self):
raise NotImplementedError()
@abstractproperty
def filename(self):
raise NotImplementedError()
class GeneratorManager(object):
def __init__(self):
self._known_generators = {}
def add(self, name, generator_class):
if name in self._known_generators:
raise ConanException()
elif not issubclass(generator_class, Generator):
raise ConanException()
else:
self._known_generators[name] = generator_class
def remove(self, name):
if name in self._known_generators:
del self._known_generators[name]
@property
def available(self):
return self._known_generators.keys()
def __contains__(self, key):
return key in self._known_generators
def __getitem__(self, key):
return self._known_generators[key]
|
from conans.util.files import save
from conans.errors import ConanException
from abc import ABCMeta, abstractproperty
class Generator(object):
__metaclass__ = ABCMeta
def __init__(self, deps_build_info, build_info):
self._deps_build_info = deps_build_info
self._build_info = build_info
@abstractproperty
def deps_build_info(self):
return self._deps_build_info
@abstractproperty
def build_info(self):
return self._build_info
@abstractproperty
def content(self):
raise NotImplementedError()
@abstractproperty
def filename(self):
raise NotImplementedError()
class GeneratorManager(object):
def __init__(self):
self._known_generators = {}
def add(self, name, generator_class):
if name in self._known_generators:
raise ConanException()
elif not issubclass(generator_class, Generator):
raise ConanException()
else:
self._known_generators[name] = generator_class
def remove(self, name):
if name in self._known_generators:
del self._known_generators[name]
@property
def available(self):
return self._known_generators.keys()
def __contains__(self, key):
return key in self._known_generators
def __getitem__(self, key):
return self._known_generators[key]
Fix bug associated with bad ABC implementation.from conans.util.files import save
from conans.errors import ConanException
from abc import ABCMeta, abstractproperty
class Generator(object):
__metaclass__ = ABCMeta
def __init__(self, deps_build_info, build_info):
self._deps_build_info = deps_build_info
self._build_info = build_info
@property
def deps_build_info(self):
return self._deps_build_info
@property
def build_info(self):
return self._build_info
@abstractproperty
def content(self):
raise NotImplementedError()
@abstractproperty
def filename(self):
raise NotImplementedError()
class GeneratorManager(object):
def __init__(self):
self._known_generators = {}
def add(self, name, generator_class):
if name in self._known_generators:
raise ConanException()
elif not issubclass(generator_class, Generator):
raise ConanException()
else:
self._known_generators[name] = generator_class
def remove(self, name):
if name in self._known_generators:
del self._known_generators[name]
@property
def available(self):
return self._known_generators.keys()
def __contains__(self, key):
return key in self._known_generators
def __getitem__(self, key):
return self._known_generators[key]
|
<commit_before>from conans.util.files import save
from conans.errors import ConanException
from abc import ABCMeta, abstractproperty
class Generator(object):
__metaclass__ = ABCMeta
def __init__(self, deps_build_info, build_info):
self._deps_build_info = deps_build_info
self._build_info = build_info
@abstractproperty
def deps_build_info(self):
return self._deps_build_info
@abstractproperty
def build_info(self):
return self._build_info
@abstractproperty
def content(self):
raise NotImplementedError()
@abstractproperty
def filename(self):
raise NotImplementedError()
class GeneratorManager(object):
def __init__(self):
self._known_generators = {}
def add(self, name, generator_class):
if name in self._known_generators:
raise ConanException()
elif not issubclass(generator_class, Generator):
raise ConanException()
else:
self._known_generators[name] = generator_class
def remove(self, name):
if name in self._known_generators:
del self._known_generators[name]
@property
def available(self):
return self._known_generators.keys()
def __contains__(self, key):
return key in self._known_generators
def __getitem__(self, key):
return self._known_generators[key]
<commit_msg>Fix bug associated with bad ABC implementation.<commit_after>from conans.util.files import save
from conans.errors import ConanException
from abc import ABCMeta, abstractproperty
class Generator(object):
__metaclass__ = ABCMeta
def __init__(self, deps_build_info, build_info):
self._deps_build_info = deps_build_info
self._build_info = build_info
@property
def deps_build_info(self):
return self._deps_build_info
@property
def build_info(self):
return self._build_info
@abstractproperty
def content(self):
raise NotImplementedError()
@abstractproperty
def filename(self):
raise NotImplementedError()
class GeneratorManager(object):
def __init__(self):
self._known_generators = {}
def add(self, name, generator_class):
if name in self._known_generators:
raise ConanException()
elif not issubclass(generator_class, Generator):
raise ConanException()
else:
self._known_generators[name] = generator_class
def remove(self, name):
if name in self._known_generators:
del self._known_generators[name]
@property
def available(self):
return self._known_generators.keys()
def __contains__(self, key):
return key in self._known_generators
def __getitem__(self, key):
return self._known_generators[key]
|
2bcf80e71ffc75796ef7d3667f61e57a884e5c5b
|
angr/__init__.py
|
angr/__init__.py
|
""" Angr module """
# pylint: disable=wildcard-import
import logging
logging.getLogger("angr").addHandler(logging.NullHandler())
from .project import *
from .functionmanager import *
from .variableseekr import *
from .regmap import *
from .path import *
from .errors import *
from .surveyor import *
from .service import *
from .analyses import *
from .analysis import *
from .tablespecs import *
from . import surveyors
from .blade import Blade
from .simos import SimOS
from .path_group import PathGroup
from .surveyors.caller import Callable
from .log import Loggers
loggers = Loggers()
|
""" Angr module """
# pylint: disable=wildcard-import
import logging
logging.getLogger("angr").addHandler(logging.NullHandler())
from .project import *
from .functionmanager import *
from .variableseekr import *
from .regmap import *
from .path import *
from .errors import *
from .surveyor import *
from .service import *
from .analyses import *
from .analysis import *
from .tablespecs import *
from . import surveyors
from .blade import Blade
from .simos import SimOS
from .path_group import PathGroup
from .surveyors.caller import Callable
from .log import Loggers
loggers = Loggers(logging.ERROR)
|
Make default logging level ERROR
|
Make default logging level ERROR
|
Python
|
bsd-2-clause
|
tyb0807/angr,axt/angr,chubbymaggie/angr,haylesr/angr,schieb/angr,chubbymaggie/angr,angr/angr,f-prettyland/angr,haylesr/angr,tyb0807/angr,axt/angr,angr/angr,angr/angr,schieb/angr,iamahuman/angr,chubbymaggie/angr,iamahuman/angr,schieb/angr,tyb0807/angr,iamahuman/angr,axt/angr,f-prettyland/angr,f-prettyland/angr
|
""" Angr module """
# pylint: disable=wildcard-import
import logging
logging.getLogger("angr").addHandler(logging.NullHandler())
from .project import *
from .functionmanager import *
from .variableseekr import *
from .regmap import *
from .path import *
from .errors import *
from .surveyor import *
from .service import *
from .analyses import *
from .analysis import *
from .tablespecs import *
from . import surveyors
from .blade import Blade
from .simos import SimOS
from .path_group import PathGroup
from .surveyors.caller import Callable
from .log import Loggers
loggers = Loggers()
Make default logging level ERROR
|
""" Angr module """
# pylint: disable=wildcard-import
import logging
logging.getLogger("angr").addHandler(logging.NullHandler())
from .project import *
from .functionmanager import *
from .variableseekr import *
from .regmap import *
from .path import *
from .errors import *
from .surveyor import *
from .service import *
from .analyses import *
from .analysis import *
from .tablespecs import *
from . import surveyors
from .blade import Blade
from .simos import SimOS
from .path_group import PathGroup
from .surveyors.caller import Callable
from .log import Loggers
loggers = Loggers(logging.ERROR)
|
<commit_before>""" Angr module """
# pylint: disable=wildcard-import
import logging
logging.getLogger("angr").addHandler(logging.NullHandler())
from .project import *
from .functionmanager import *
from .variableseekr import *
from .regmap import *
from .path import *
from .errors import *
from .surveyor import *
from .service import *
from .analyses import *
from .analysis import *
from .tablespecs import *
from . import surveyors
from .blade import Blade
from .simos import SimOS
from .path_group import PathGroup
from .surveyors.caller import Callable
from .log import Loggers
loggers = Loggers()
<commit_msg>Make default logging level ERROR<commit_after>
|
""" Angr module """
# pylint: disable=wildcard-import
import logging
logging.getLogger("angr").addHandler(logging.NullHandler())
from .project import *
from .functionmanager import *
from .variableseekr import *
from .regmap import *
from .path import *
from .errors import *
from .surveyor import *
from .service import *
from .analyses import *
from .analysis import *
from .tablespecs import *
from . import surveyors
from .blade import Blade
from .simos import SimOS
from .path_group import PathGroup
from .surveyors.caller import Callable
from .log import Loggers
loggers = Loggers(logging.ERROR)
|
""" Angr module """
# pylint: disable=wildcard-import
import logging
logging.getLogger("angr").addHandler(logging.NullHandler())
from .project import *
from .functionmanager import *
from .variableseekr import *
from .regmap import *
from .path import *
from .errors import *
from .surveyor import *
from .service import *
from .analyses import *
from .analysis import *
from .tablespecs import *
from . import surveyors
from .blade import Blade
from .simos import SimOS
from .path_group import PathGroup
from .surveyors.caller import Callable
from .log import Loggers
loggers = Loggers()
Make default logging level ERROR""" Angr module """
# pylint: disable=wildcard-import
import logging
logging.getLogger("angr").addHandler(logging.NullHandler())
from .project import *
from .functionmanager import *
from .variableseekr import *
from .regmap import *
from .path import *
from .errors import *
from .surveyor import *
from .service import *
from .analyses import *
from .analysis import *
from .tablespecs import *
from . import surveyors
from .blade import Blade
from .simos import SimOS
from .path_group import PathGroup
from .surveyors.caller import Callable
from .log import Loggers
loggers = Loggers(logging.ERROR)
|
<commit_before>""" Angr module """
# pylint: disable=wildcard-import
import logging
logging.getLogger("angr").addHandler(logging.NullHandler())
from .project import *
from .functionmanager import *
from .variableseekr import *
from .regmap import *
from .path import *
from .errors import *
from .surveyor import *
from .service import *
from .analyses import *
from .analysis import *
from .tablespecs import *
from . import surveyors
from .blade import Blade
from .simos import SimOS
from .path_group import PathGroup
from .surveyors.caller import Callable
from .log import Loggers
loggers = Loggers()
<commit_msg>Make default logging level ERROR<commit_after>""" Angr module """
# pylint: disable=wildcard-import
import logging
logging.getLogger("angr").addHandler(logging.NullHandler())
from .project import *
from .functionmanager import *
from .variableseekr import *
from .regmap import *
from .path import *
from .errors import *
from .surveyor import *
from .service import *
from .analyses import *
from .analysis import *
from .tablespecs import *
from . import surveyors
from .blade import Blade
from .simos import SimOS
from .path_group import PathGroup
from .surveyors.caller import Callable
from .log import Loggers
loggers = Loggers(logging.ERROR)
|
96ddcc42b7da307433a3dce99aad1b2cdeff58cd
|
app/app/views.py
|
app/app/views.py
|
from pyramid.response import Response
from pyramid.view import view_config
from sqlalchemy.exc import DBAPIError
from .models import DBSession, PathAndRow_Model, SceneList_Model
@view_config(route_name='index', renderer='templates/index.jinja2')
def index(request):
'''Index page.'''
lat = float(request.params.get('lat', 47.614848))
lng = float(request.params.get('lng', -122.3359059))
scenes = SceneList_Model.scenelist(PathAndRow_Model.pathandrow(lat, lng))
return {'scenes': scenes}
@view_config(route_name='submit', renderer='json')
def submit(request):
'''Accept a post request.'''
return {}
|
from pyramid.response import Response
from pyramid.view import view_config
from sqlalchemy.exc import DBAPIError
from .models import DBSession, PathAndRow_Model, SceneList_Model
from sqs import *
import os
@view_config(route_name='index', renderer='templates/index.jinja2')
def index(request):
'''Index page.'''
lat = float(request.params.get('lat', 47.614848))
lng = float(request.params.get('lng', -122.3359059))
scenes = SceneList_Model.scenelist(PathAndRow_Model.pathandrow(lat, lng))
return {'scenes': scenes}
@view_config(route_name='submit', renderer='json')
def submit(request):
'''Accept a post request.'''
conn = make_connection(aws_access_key_id=os.environ['AWS_ACCESS_KEY_ID'],
aws_secret_access_key=os.environ['AWS_SECRET_ACCESS_KEY'])
jobs_queue = get_queue('landsat_jobs_queue', conn)
message = build_job_message(job_id=1, email='test@test.com',
scene_id=request.params.get(scene_id)
band_1=4, band_2=3, band_3=2)
enqueue_message(message, jobs_queue)
return None
|
Add sqs to submit view
|
Add sqs to submit view
|
Python
|
mit
|
recombinators/snapsat,recombinators/snapsat,recombinators/snapsat
|
from pyramid.response import Response
from pyramid.view import view_config
from sqlalchemy.exc import DBAPIError
from .models import DBSession, PathAndRow_Model, SceneList_Model
@view_config(route_name='index', renderer='templates/index.jinja2')
def index(request):
'''Index page.'''
lat = float(request.params.get('lat', 47.614848))
lng = float(request.params.get('lng', -122.3359059))
scenes = SceneList_Model.scenelist(PathAndRow_Model.pathandrow(lat, lng))
return {'scenes': scenes}
@view_config(route_name='submit', renderer='json')
def submit(request):
'''Accept a post request.'''
return {}
Add sqs to submit view
|
from pyramid.response import Response
from pyramid.view import view_config
from sqlalchemy.exc import DBAPIError
from .models import DBSession, PathAndRow_Model, SceneList_Model
from sqs import *
import os
@view_config(route_name='index', renderer='templates/index.jinja2')
def index(request):
'''Index page.'''
lat = float(request.params.get('lat', 47.614848))
lng = float(request.params.get('lng', -122.3359059))
scenes = SceneList_Model.scenelist(PathAndRow_Model.pathandrow(lat, lng))
return {'scenes': scenes}
@view_config(route_name='submit', renderer='json')
def submit(request):
'''Accept a post request.'''
conn = make_connection(aws_access_key_id=os.environ['AWS_ACCESS_KEY_ID'],
aws_secret_access_key=os.environ['AWS_SECRET_ACCESS_KEY'])
jobs_queue = get_queue('landsat_jobs_queue', conn)
message = build_job_message(job_id=1, email='test@test.com',
scene_id=request.params.get(scene_id)
band_1=4, band_2=3, band_3=2)
enqueue_message(message, jobs_queue)
return None
|
<commit_before>from pyramid.response import Response
from pyramid.view import view_config
from sqlalchemy.exc import DBAPIError
from .models import DBSession, PathAndRow_Model, SceneList_Model
@view_config(route_name='index', renderer='templates/index.jinja2')
def index(request):
'''Index page.'''
lat = float(request.params.get('lat', 47.614848))
lng = float(request.params.get('lng', -122.3359059))
scenes = SceneList_Model.scenelist(PathAndRow_Model.pathandrow(lat, lng))
return {'scenes': scenes}
@view_config(route_name='submit', renderer='json')
def submit(request):
'''Accept a post request.'''
return {}
<commit_msg>Add sqs to submit view<commit_after>
|
from pyramid.response import Response
from pyramid.view import view_config
from sqlalchemy.exc import DBAPIError
from .models import DBSession, PathAndRow_Model, SceneList_Model
from sqs import *
import os
@view_config(route_name='index', renderer='templates/index.jinja2')
def index(request):
'''Index page.'''
lat = float(request.params.get('lat', 47.614848))
lng = float(request.params.get('lng', -122.3359059))
scenes = SceneList_Model.scenelist(PathAndRow_Model.pathandrow(lat, lng))
return {'scenes': scenes}
@view_config(route_name='submit', renderer='json')
def submit(request):
'''Accept a post request.'''
conn = make_connection(aws_access_key_id=os.environ['AWS_ACCESS_KEY_ID'],
aws_secret_access_key=os.environ['AWS_SECRET_ACCESS_KEY'])
jobs_queue = get_queue('landsat_jobs_queue', conn)
message = build_job_message(job_id=1, email='test@test.com',
scene_id=request.params.get(scene_id)
band_1=4, band_2=3, band_3=2)
enqueue_message(message, jobs_queue)
return None
|
from pyramid.response import Response
from pyramid.view import view_config
from sqlalchemy.exc import DBAPIError
from .models import DBSession, PathAndRow_Model, SceneList_Model
@view_config(route_name='index', renderer='templates/index.jinja2')
def index(request):
'''Index page.'''
lat = float(request.params.get('lat', 47.614848))
lng = float(request.params.get('lng', -122.3359059))
scenes = SceneList_Model.scenelist(PathAndRow_Model.pathandrow(lat, lng))
return {'scenes': scenes}
@view_config(route_name='submit', renderer='json')
def submit(request):
'''Accept a post request.'''
return {}
Add sqs to submit viewfrom pyramid.response import Response
from pyramid.view import view_config
from sqlalchemy.exc import DBAPIError
from .models import DBSession, PathAndRow_Model, SceneList_Model
from sqs import *
import os
@view_config(route_name='index', renderer='templates/index.jinja2')
def index(request):
'''Index page.'''
lat = float(request.params.get('lat', 47.614848))
lng = float(request.params.get('lng', -122.3359059))
scenes = SceneList_Model.scenelist(PathAndRow_Model.pathandrow(lat, lng))
return {'scenes': scenes}
@view_config(route_name='submit', renderer='json')
def submit(request):
'''Accept a post request.'''
conn = make_connection(aws_access_key_id=os.environ['AWS_ACCESS_KEY_ID'],
aws_secret_access_key=os.environ['AWS_SECRET_ACCESS_KEY'])
jobs_queue = get_queue('landsat_jobs_queue', conn)
message = build_job_message(job_id=1, email='test@test.com',
scene_id=request.params.get(scene_id)
band_1=4, band_2=3, band_3=2)
enqueue_message(message, jobs_queue)
return None
|
<commit_before>from pyramid.response import Response
from pyramid.view import view_config
from sqlalchemy.exc import DBAPIError
from .models import DBSession, PathAndRow_Model, SceneList_Model
@view_config(route_name='index', renderer='templates/index.jinja2')
def index(request):
'''Index page.'''
lat = float(request.params.get('lat', 47.614848))
lng = float(request.params.get('lng', -122.3359059))
scenes = SceneList_Model.scenelist(PathAndRow_Model.pathandrow(lat, lng))
return {'scenes': scenes}
@view_config(route_name='submit', renderer='json')
def submit(request):
'''Accept a post request.'''
return {}
<commit_msg>Add sqs to submit view<commit_after>from pyramid.response import Response
from pyramid.view import view_config
from sqlalchemy.exc import DBAPIError
from .models import DBSession, PathAndRow_Model, SceneList_Model
from sqs import *
import os
@view_config(route_name='index', renderer='templates/index.jinja2')
def index(request):
'''Index page.'''
lat = float(request.params.get('lat', 47.614848))
lng = float(request.params.get('lng', -122.3359059))
scenes = SceneList_Model.scenelist(PathAndRow_Model.pathandrow(lat, lng))
return {'scenes': scenes}
@view_config(route_name='submit', renderer='json')
def submit(request):
'''Accept a post request.'''
conn = make_connection(aws_access_key_id=os.environ['AWS_ACCESS_KEY_ID'],
aws_secret_access_key=os.environ['AWS_SECRET_ACCESS_KEY'])
jobs_queue = get_queue('landsat_jobs_queue', conn)
message = build_job_message(job_id=1, email='test@test.com',
scene_id=request.params.get(scene_id)
band_1=4, band_2=3, band_3=2)
enqueue_message(message, jobs_queue)
return None
|
d7a864a0df2723657d7ff5b02c7568042d49093f
|
oonib/deck/api.py
|
oonib/deck/api.py
|
from cyclone import web
from oonib.deck import handlers
from oonib import config
deckAPI = [
(r"/deck", handlers.DeckListHandler),
(r"/deck/([a-z0-9]{40})$", handlers.DeckDescHandler),
(r"/deck/([a-z0-9]{40})/yaml$", web.StaticFileHandler, {"path":
config.main.deck_dir}),
]
|
from cyclone import web
from oonib.deck import handlers
from oonib import config
deckAPI = [
(r"/deck", handlers.DeckListHandler),
(r"/deck/([a-z0-9]{64})$", handlers.DeckDescHandler),
(r"/deck/([a-z0-9]{64})/yaml$", web.StaticFileHandler, {"path":
config.main.deck_dir}),
]
|
Use sha256 as per oonib.md spec
|
Use sha256 as per oonib.md spec
|
Python
|
bsd-2-clause
|
dstufft/ooni-backend,dstufft/ooni-backend,DoNotUseThisCodeJUSTFORKS/ooni-backend,DoNotUseThisCodeJUSTFORKS/ooni-backend
|
from cyclone import web
from oonib.deck import handlers
from oonib import config
deckAPI = [
(r"/deck", handlers.DeckListHandler),
(r"/deck/([a-z0-9]{40})$", handlers.DeckDescHandler),
(r"/deck/([a-z0-9]{40})/yaml$", web.StaticFileHandler, {"path":
config.main.deck_dir}),
]
Use sha256 as per oonib.md spec
|
from cyclone import web
from oonib.deck import handlers
from oonib import config
deckAPI = [
(r"/deck", handlers.DeckListHandler),
(r"/deck/([a-z0-9]{64})$", handlers.DeckDescHandler),
(r"/deck/([a-z0-9]{64})/yaml$", web.StaticFileHandler, {"path":
config.main.deck_dir}),
]
|
<commit_before>from cyclone import web
from oonib.deck import handlers
from oonib import config
deckAPI = [
(r"/deck", handlers.DeckListHandler),
(r"/deck/([a-z0-9]{40})$", handlers.DeckDescHandler),
(r"/deck/([a-z0-9]{40})/yaml$", web.StaticFileHandler, {"path":
config.main.deck_dir}),
]
<commit_msg>Use sha256 as per oonib.md spec<commit_after>
|
from cyclone import web
from oonib.deck import handlers
from oonib import config
deckAPI = [
(r"/deck", handlers.DeckListHandler),
(r"/deck/([a-z0-9]{64})$", handlers.DeckDescHandler),
(r"/deck/([a-z0-9]{64})/yaml$", web.StaticFileHandler, {"path":
config.main.deck_dir}),
]
|
from cyclone import web
from oonib.deck import handlers
from oonib import config
deckAPI = [
(r"/deck", handlers.DeckListHandler),
(r"/deck/([a-z0-9]{40})$", handlers.DeckDescHandler),
(r"/deck/([a-z0-9]{40})/yaml$", web.StaticFileHandler, {"path":
config.main.deck_dir}),
]
Use sha256 as per oonib.md specfrom cyclone import web
from oonib.deck import handlers
from oonib import config
deckAPI = [
(r"/deck", handlers.DeckListHandler),
(r"/deck/([a-z0-9]{64})$", handlers.DeckDescHandler),
(r"/deck/([a-z0-9]{64})/yaml$", web.StaticFileHandler, {"path":
config.main.deck_dir}),
]
|
<commit_before>from cyclone import web
from oonib.deck import handlers
from oonib import config
deckAPI = [
(r"/deck", handlers.DeckListHandler),
(r"/deck/([a-z0-9]{40})$", handlers.DeckDescHandler),
(r"/deck/([a-z0-9]{40})/yaml$", web.StaticFileHandler, {"path":
config.main.deck_dir}),
]
<commit_msg>Use sha256 as per oonib.md spec<commit_after>from cyclone import web
from oonib.deck import handlers
from oonib import config
deckAPI = [
(r"/deck", handlers.DeckListHandler),
(r"/deck/([a-z0-9]{64})$", handlers.DeckDescHandler),
(r"/deck/([a-z0-9]{64})/yaml$", web.StaticFileHandler, {"path":
config.main.deck_dir}),
]
|
1d0585dcb1caaec8b9fbcc7eb8c4c31e6a382af4
|
models/ras_220_genes/batch_doi_lookup.py
|
models/ras_220_genes/batch_doi_lookup.py
|
import csv
from indra.literature import pubmed_client, crossref_client
doi_cache = {}
with open('doi_cache.txt') as f:
csvreader = csv.reader(f, delimiter='\t')
for row in csvreader:
doi_cache[row[0]] = row[1]
with open('missing_dois.txt') as f:
missing_dois = [line.strip('\n') for line in f.readlines()]
for counter, ref in enumerate(missing_dois):
if doi_cache.get(ref):
print "Already got", ref
continue
title = pubmed_client.get_title(ref)
if not title:
print "No title, skipping", ref
continue
doi = crossref_client.doi_query(title)
doi_cache[ref] = doi
print "%d: %s --> %s" % (counter, ref, doi)
if counter % 100 == 0:
with open('doi_cache_%.5d.txt' % counter, 'w') as f:
print "Writing to doi cache"
csvwriter = csv.writer(f, delimiter='\t')
for k, v in doi_cache.iteritems():
csvwriter.writerow((k, v))
|
import csv
from indra.literature import pubmed_client, crossref_client
doi_cache = {}
with open('doi_cache.txt') as f:
csvreader = csv.reader(f, delimiter='\t')
for row in csvreader:
doi_cache[row[0]] = row[1]
with open('missing_dois.txt') as f:
missing_dois = [line.strip('\n') for line in f.readlines()]
def save(doi_cache, counter):
with open('doi_cache_%.5d.txt' % counter, 'w') as f:
print "Writing to doi cache"
csvwriter = csv.writer(f, delimiter='\t')
for k, v in doi_cache.iteritems():
csvwriter.writerow((k, v))
for counter, ref in enumerate(missing_dois):
if doi_cache.get(ref):
print "Already got", ref
continue
title = pubmed_client.get_title(ref)
if not title:
print "No title, skipping", ref
continue
doi = crossref_client.doi_query(title)
doi_cache[ref] = doi
print "%d: %s --> %s" % (counter, ref, doi)
if counter % 100 == 0:
save(doi_cache, counter)
save(doi_cache, counter)
|
Add final save to batch lookup
|
Add final save to batch lookup
|
Python
|
bsd-2-clause
|
sorgerlab/indra,sorgerlab/indra,johnbachman/indra,jmuhlich/indra,johnbachman/indra,sorgerlab/belpy,jmuhlich/indra,johnbachman/belpy,jmuhlich/indra,pvtodorov/indra,sorgerlab/indra,pvtodorov/indra,sorgerlab/belpy,pvtodorov/indra,johnbachman/belpy,bgyori/indra,johnbachman/indra,pvtodorov/indra,bgyori/indra,sorgerlab/belpy,johnbachman/belpy,bgyori/indra
|
import csv
from indra.literature import pubmed_client, crossref_client
doi_cache = {}
with open('doi_cache.txt') as f:
csvreader = csv.reader(f, delimiter='\t')
for row in csvreader:
doi_cache[row[0]] = row[1]
with open('missing_dois.txt') as f:
missing_dois = [line.strip('\n') for line in f.readlines()]
for counter, ref in enumerate(missing_dois):
if doi_cache.get(ref):
print "Already got", ref
continue
title = pubmed_client.get_title(ref)
if not title:
print "No title, skipping", ref
continue
doi = crossref_client.doi_query(title)
doi_cache[ref] = doi
print "%d: %s --> %s" % (counter, ref, doi)
if counter % 100 == 0:
with open('doi_cache_%.5d.txt' % counter, 'w') as f:
print "Writing to doi cache"
csvwriter = csv.writer(f, delimiter='\t')
for k, v in doi_cache.iteritems():
csvwriter.writerow((k, v))
Add final save to batch lookup
|
import csv
from indra.literature import pubmed_client, crossref_client
doi_cache = {}
with open('doi_cache.txt') as f:
csvreader = csv.reader(f, delimiter='\t')
for row in csvreader:
doi_cache[row[0]] = row[1]
with open('missing_dois.txt') as f:
missing_dois = [line.strip('\n') for line in f.readlines()]
def save(doi_cache, counter):
with open('doi_cache_%.5d.txt' % counter, 'w') as f:
print "Writing to doi cache"
csvwriter = csv.writer(f, delimiter='\t')
for k, v in doi_cache.iteritems():
csvwriter.writerow((k, v))
for counter, ref in enumerate(missing_dois):
if doi_cache.get(ref):
print "Already got", ref
continue
title = pubmed_client.get_title(ref)
if not title:
print "No title, skipping", ref
continue
doi = crossref_client.doi_query(title)
doi_cache[ref] = doi
print "%d: %s --> %s" % (counter, ref, doi)
if counter % 100 == 0:
save(doi_cache, counter)
save(doi_cache, counter)
|
<commit_before>import csv
from indra.literature import pubmed_client, crossref_client
doi_cache = {}
with open('doi_cache.txt') as f:
csvreader = csv.reader(f, delimiter='\t')
for row in csvreader:
doi_cache[row[0]] = row[1]
with open('missing_dois.txt') as f:
missing_dois = [line.strip('\n') for line in f.readlines()]
for counter, ref in enumerate(missing_dois):
if doi_cache.get(ref):
print "Already got", ref
continue
title = pubmed_client.get_title(ref)
if not title:
print "No title, skipping", ref
continue
doi = crossref_client.doi_query(title)
doi_cache[ref] = doi
print "%d: %s --> %s" % (counter, ref, doi)
if counter % 100 == 0:
with open('doi_cache_%.5d.txt' % counter, 'w') as f:
print "Writing to doi cache"
csvwriter = csv.writer(f, delimiter='\t')
for k, v in doi_cache.iteritems():
csvwriter.writerow((k, v))
<commit_msg>Add final save to batch lookup<commit_after>
|
import csv
from indra.literature import pubmed_client, crossref_client
doi_cache = {}
with open('doi_cache.txt') as f:
csvreader = csv.reader(f, delimiter='\t')
for row in csvreader:
doi_cache[row[0]] = row[1]
with open('missing_dois.txt') as f:
missing_dois = [line.strip('\n') for line in f.readlines()]
def save(doi_cache, counter):
with open('doi_cache_%.5d.txt' % counter, 'w') as f:
print "Writing to doi cache"
csvwriter = csv.writer(f, delimiter='\t')
for k, v in doi_cache.iteritems():
csvwriter.writerow((k, v))
for counter, ref in enumerate(missing_dois):
if doi_cache.get(ref):
print "Already got", ref
continue
title = pubmed_client.get_title(ref)
if not title:
print "No title, skipping", ref
continue
doi = crossref_client.doi_query(title)
doi_cache[ref] = doi
print "%d: %s --> %s" % (counter, ref, doi)
if counter % 100 == 0:
save(doi_cache, counter)
save(doi_cache, counter)
|
import csv
from indra.literature import pubmed_client, crossref_client
doi_cache = {}
with open('doi_cache.txt') as f:
csvreader = csv.reader(f, delimiter='\t')
for row in csvreader:
doi_cache[row[0]] = row[1]
with open('missing_dois.txt') as f:
missing_dois = [line.strip('\n') for line in f.readlines()]
for counter, ref in enumerate(missing_dois):
if doi_cache.get(ref):
print "Already got", ref
continue
title = pubmed_client.get_title(ref)
if not title:
print "No title, skipping", ref
continue
doi = crossref_client.doi_query(title)
doi_cache[ref] = doi
print "%d: %s --> %s" % (counter, ref, doi)
if counter % 100 == 0:
with open('doi_cache_%.5d.txt' % counter, 'w') as f:
print "Writing to doi cache"
csvwriter = csv.writer(f, delimiter='\t')
for k, v in doi_cache.iteritems():
csvwriter.writerow((k, v))
Add final save to batch lookupimport csv
from indra.literature import pubmed_client, crossref_client
doi_cache = {}
with open('doi_cache.txt') as f:
csvreader = csv.reader(f, delimiter='\t')
for row in csvreader:
doi_cache[row[0]] = row[1]
with open('missing_dois.txt') as f:
missing_dois = [line.strip('\n') for line in f.readlines()]
def save(doi_cache, counter):
with open('doi_cache_%.5d.txt' % counter, 'w') as f:
print "Writing to doi cache"
csvwriter = csv.writer(f, delimiter='\t')
for k, v in doi_cache.iteritems():
csvwriter.writerow((k, v))
for counter, ref in enumerate(missing_dois):
if doi_cache.get(ref):
print "Already got", ref
continue
title = pubmed_client.get_title(ref)
if not title:
print "No title, skipping", ref
continue
doi = crossref_client.doi_query(title)
doi_cache[ref] = doi
print "%d: %s --> %s" % (counter, ref, doi)
if counter % 100 == 0:
save(doi_cache, counter)
save(doi_cache, counter)
|
<commit_before>import csv
from indra.literature import pubmed_client, crossref_client
doi_cache = {}
with open('doi_cache.txt') as f:
csvreader = csv.reader(f, delimiter='\t')
for row in csvreader:
doi_cache[row[0]] = row[1]
with open('missing_dois.txt') as f:
missing_dois = [line.strip('\n') for line in f.readlines()]
for counter, ref in enumerate(missing_dois):
if doi_cache.get(ref):
print "Already got", ref
continue
title = pubmed_client.get_title(ref)
if not title:
print "No title, skipping", ref
continue
doi = crossref_client.doi_query(title)
doi_cache[ref] = doi
print "%d: %s --> %s" % (counter, ref, doi)
if counter % 100 == 0:
with open('doi_cache_%.5d.txt' % counter, 'w') as f:
print "Writing to doi cache"
csvwriter = csv.writer(f, delimiter='\t')
for k, v in doi_cache.iteritems():
csvwriter.writerow((k, v))
<commit_msg>Add final save to batch lookup<commit_after>import csv
from indra.literature import pubmed_client, crossref_client
doi_cache = {}
with open('doi_cache.txt') as f:
csvreader = csv.reader(f, delimiter='\t')
for row in csvreader:
doi_cache[row[0]] = row[1]
with open('missing_dois.txt') as f:
missing_dois = [line.strip('\n') for line in f.readlines()]
def save(doi_cache, counter):
with open('doi_cache_%.5d.txt' % counter, 'w') as f:
print "Writing to doi cache"
csvwriter = csv.writer(f, delimiter='\t')
for k, v in doi_cache.iteritems():
csvwriter.writerow((k, v))
for counter, ref in enumerate(missing_dois):
if doi_cache.get(ref):
print "Already got", ref
continue
title = pubmed_client.get_title(ref)
if not title:
print "No title, skipping", ref
continue
doi = crossref_client.doi_query(title)
doi_cache[ref] = doi
print "%d: %s --> %s" % (counter, ref, doi)
if counter % 100 == 0:
save(doi_cache, counter)
save(doi_cache, counter)
|
44ef3ed8a0a8edaae69178b141ba54f8112cedbc
|
tests/test_opensimplex.py
|
tests/test_opensimplex.py
|
import gzip
import json
import unittest
from opensimplex import OpenSimplex
class TestOpensimplex(unittest.TestCase):
def load_samples(self):
for line in gzip.open("tests/samples.json.gz"):
yield json.loads(line)
def test_samples(self):
simplex = OpenSimplex(seed=0)
for s in self.load_samples():
if len(s) == 3:
expected = s[2]
actual = simplex.noise2d(s[0], s[1])
elif len(s) == 4:
expected = s[3]
actual = simplex.noise3d(s[0], s[1], s[2])
elif len(s) == 5:
expected = s[4]
actual = simplex.noise4d(s[0], s[1], s[2], s[3])
else:
self.fail("Unexpected sample size: " + str(len(s)))
self.assertEqual(expected, actual)
if __name__ == "__main__":
unittest.main()
|
import gzip
import json
import unittest
from opensimplex import OpenSimplex
class TestOpensimplex(unittest.TestCase):
def load_samples(self):
for line in gzip.open("tests/samples.json.gz"):
# Python3: need to decode the line as it's a bytes object and json
# will only work on strings!
# TODO BUG: it will also take about 14 seconds to run the tests now! wtf
yield json.loads(line.decode("utf-8"))
def test_samples(self):
simplex = OpenSimplex(seed=0)
for s in self.load_samples():
if len(s) == 3:
expected = s[2]
actual = simplex.noise2d(s[0], s[1])
elif len(s) == 4:
expected = s[3]
actual = simplex.noise3d(s[0], s[1], s[2])
elif len(s) == 5:
expected = s[4]
actual = simplex.noise4d(s[0], s[1], s[2], s[3])
else:
self.fail("Unexpected sample size: " + str(len(s)))
self.assertEqual(expected, actual)
if __name__ == "__main__":
unittest.main()
|
Fix failing tests on py3k.
|
Fix failing tests on py3k.
|
Python
|
mit
|
lmas/opensimplex
|
import gzip
import json
import unittest
from opensimplex import OpenSimplex
class TestOpensimplex(unittest.TestCase):
def load_samples(self):
for line in gzip.open("tests/samples.json.gz"):
yield json.loads(line)
def test_samples(self):
simplex = OpenSimplex(seed=0)
for s in self.load_samples():
if len(s) == 3:
expected = s[2]
actual = simplex.noise2d(s[0], s[1])
elif len(s) == 4:
expected = s[3]
actual = simplex.noise3d(s[0], s[1], s[2])
elif len(s) == 5:
expected = s[4]
actual = simplex.noise4d(s[0], s[1], s[2], s[3])
else:
self.fail("Unexpected sample size: " + str(len(s)))
self.assertEqual(expected, actual)
if __name__ == "__main__":
unittest.main()
Fix failing tests on py3k.
|
import gzip
import json
import unittest
from opensimplex import OpenSimplex
class TestOpensimplex(unittest.TestCase):
def load_samples(self):
for line in gzip.open("tests/samples.json.gz"):
# Python3: need to decode the line as it's a bytes object and json
# will only work on strings!
# TODO BUG: it will also take about 14 seconds to run the tests now! wtf
yield json.loads(line.decode("utf-8"))
def test_samples(self):
simplex = OpenSimplex(seed=0)
for s in self.load_samples():
if len(s) == 3:
expected = s[2]
actual = simplex.noise2d(s[0], s[1])
elif len(s) == 4:
expected = s[3]
actual = simplex.noise3d(s[0], s[1], s[2])
elif len(s) == 5:
expected = s[4]
actual = simplex.noise4d(s[0], s[1], s[2], s[3])
else:
self.fail("Unexpected sample size: " + str(len(s)))
self.assertEqual(expected, actual)
if __name__ == "__main__":
unittest.main()
|
<commit_before>
import gzip
import json
import unittest
from opensimplex import OpenSimplex
class TestOpensimplex(unittest.TestCase):
def load_samples(self):
for line in gzip.open("tests/samples.json.gz"):
yield json.loads(line)
def test_samples(self):
simplex = OpenSimplex(seed=0)
for s in self.load_samples():
if len(s) == 3:
expected = s[2]
actual = simplex.noise2d(s[0], s[1])
elif len(s) == 4:
expected = s[3]
actual = simplex.noise3d(s[0], s[1], s[2])
elif len(s) == 5:
expected = s[4]
actual = simplex.noise4d(s[0], s[1], s[2], s[3])
else:
self.fail("Unexpected sample size: " + str(len(s)))
self.assertEqual(expected, actual)
if __name__ == "__main__":
unittest.main()
<commit_msg>Fix failing tests on py3k.<commit_after>
|
import gzip
import json
import unittest
from opensimplex import OpenSimplex
class TestOpensimplex(unittest.TestCase):
def load_samples(self):
for line in gzip.open("tests/samples.json.gz"):
# Python3: need to decode the line as it's a bytes object and json
# will only work on strings!
# TODO BUG: it will also take about 14 seconds to run the tests now! wtf
yield json.loads(line.decode("utf-8"))
def test_samples(self):
simplex = OpenSimplex(seed=0)
for s in self.load_samples():
if len(s) == 3:
expected = s[2]
actual = simplex.noise2d(s[0], s[1])
elif len(s) == 4:
expected = s[3]
actual = simplex.noise3d(s[0], s[1], s[2])
elif len(s) == 5:
expected = s[4]
actual = simplex.noise4d(s[0], s[1], s[2], s[3])
else:
self.fail("Unexpected sample size: " + str(len(s)))
self.assertEqual(expected, actual)
if __name__ == "__main__":
unittest.main()
|
import gzip
import json
import unittest
from opensimplex import OpenSimplex
class TestOpensimplex(unittest.TestCase):
def load_samples(self):
for line in gzip.open("tests/samples.json.gz"):
yield json.loads(line)
def test_samples(self):
simplex = OpenSimplex(seed=0)
for s in self.load_samples():
if len(s) == 3:
expected = s[2]
actual = simplex.noise2d(s[0], s[1])
elif len(s) == 4:
expected = s[3]
actual = simplex.noise3d(s[0], s[1], s[2])
elif len(s) == 5:
expected = s[4]
actual = simplex.noise4d(s[0], s[1], s[2], s[3])
else:
self.fail("Unexpected sample size: " + str(len(s)))
self.assertEqual(expected, actual)
if __name__ == "__main__":
unittest.main()
Fix failing tests on py3k.
import gzip
import json
import unittest
from opensimplex import OpenSimplex
class TestOpensimplex(unittest.TestCase):
def load_samples(self):
for line in gzip.open("tests/samples.json.gz"):
# Python3: need to decode the line as it's a bytes object and json
# will only work on strings!
# TODO BUG: it will also take about 14 seconds to run the tests now! wtf
yield json.loads(line.decode("utf-8"))
def test_samples(self):
simplex = OpenSimplex(seed=0)
for s in self.load_samples():
if len(s) == 3:
expected = s[2]
actual = simplex.noise2d(s[0], s[1])
elif len(s) == 4:
expected = s[3]
actual = simplex.noise3d(s[0], s[1], s[2])
elif len(s) == 5:
expected = s[4]
actual = simplex.noise4d(s[0], s[1], s[2], s[3])
else:
self.fail("Unexpected sample size: " + str(len(s)))
self.assertEqual(expected, actual)
if __name__ == "__main__":
unittest.main()
|
<commit_before>
import gzip
import json
import unittest
from opensimplex import OpenSimplex
class TestOpensimplex(unittest.TestCase):
def load_samples(self):
for line in gzip.open("tests/samples.json.gz"):
yield json.loads(line)
def test_samples(self):
simplex = OpenSimplex(seed=0)
for s in self.load_samples():
if len(s) == 3:
expected = s[2]
actual = simplex.noise2d(s[0], s[1])
elif len(s) == 4:
expected = s[3]
actual = simplex.noise3d(s[0], s[1], s[2])
elif len(s) == 5:
expected = s[4]
actual = simplex.noise4d(s[0], s[1], s[2], s[3])
else:
self.fail("Unexpected sample size: " + str(len(s)))
self.assertEqual(expected, actual)
if __name__ == "__main__":
unittest.main()
<commit_msg>Fix failing tests on py3k.<commit_after>
import gzip
import json
import unittest
from opensimplex import OpenSimplex
class TestOpensimplex(unittest.TestCase):
def load_samples(self):
for line in gzip.open("tests/samples.json.gz"):
# Python3: need to decode the line as it's a bytes object and json
# will only work on strings!
# TODO BUG: it will also take about 14 seconds to run the tests now! wtf
yield json.loads(line.decode("utf-8"))
def test_samples(self):
simplex = OpenSimplex(seed=0)
for s in self.load_samples():
if len(s) == 3:
expected = s[2]
actual = simplex.noise2d(s[0], s[1])
elif len(s) == 4:
expected = s[3]
actual = simplex.noise3d(s[0], s[1], s[2])
elif len(s) == 5:
expected = s[4]
actual = simplex.noise4d(s[0], s[1], s[2], s[3])
else:
self.fail("Unexpected sample size: " + str(len(s)))
self.assertEqual(expected, actual)
if __name__ == "__main__":
unittest.main()
|
61fb5216d12eb1a604a4a0f913eefc6006caf534
|
greatbigcrane/project/views.py
|
greatbigcrane/project/views.py
|
from django.shortcuts import render_to_response
from django.views.generic.list_detail import object_list
from django.views.generic.create_update import create_object
from project.models import Project
def index(request):
'''We should move this to a different app'''
return render_to_response('index.html')
def list_projects(request):
projects = Project.objects.all()
return object_list(request, projects, template_name="project/project_list.html",
template_object_name="project")
def view_project(request, project_id):
from django.http import HttpResponse
return HttpResponse("not implemented")
def add_project(request):
return create_object(request, model=Project)
from django.http import HttpResponse
return HttpResponse("not implemented")
|
from django.shortcuts import render_to_response
from django.views.generic.list_detail import object_list
from django.views.generic.create_update import create_object
from project.models import Project
def index(request):
'''We should move this to a different app'''
return render_to_response('index.html')
def list_projects(request):
projects = Project.objects.all()
return object_list(request, projects, template_name="project/project_list.html",
template_object_name="project")
def view_project(request, project_id):
from django.http import HttpResponse
return HttpResponse("not implemented")
def add_project(request):
return create_object(request, model=Project)
|
Drop unused code after return.
|
Drop unused code after return.
|
Python
|
apache-2.0
|
pnomolos/greatbigcrane,pnomolos/greatbigcrane
|
from django.shortcuts import render_to_response
from django.views.generic.list_detail import object_list
from django.views.generic.create_update import create_object
from project.models import Project
def index(request):
'''We should move this to a different app'''
return render_to_response('index.html')
def list_projects(request):
projects = Project.objects.all()
return object_list(request, projects, template_name="project/project_list.html",
template_object_name="project")
def view_project(request, project_id):
from django.http import HttpResponse
return HttpResponse("not implemented")
def add_project(request):
return create_object(request, model=Project)
from django.http import HttpResponse
return HttpResponse("not implemented")
Drop unused code after return.
|
from django.shortcuts import render_to_response
from django.views.generic.list_detail import object_list
from django.views.generic.create_update import create_object
from project.models import Project
def index(request):
'''We should move this to a different app'''
return render_to_response('index.html')
def list_projects(request):
projects = Project.objects.all()
return object_list(request, projects, template_name="project/project_list.html",
template_object_name="project")
def view_project(request, project_id):
from django.http import HttpResponse
return HttpResponse("not implemented")
def add_project(request):
return create_object(request, model=Project)
|
<commit_before>from django.shortcuts import render_to_response
from django.views.generic.list_detail import object_list
from django.views.generic.create_update import create_object
from project.models import Project
def index(request):
'''We should move this to a different app'''
return render_to_response('index.html')
def list_projects(request):
projects = Project.objects.all()
return object_list(request, projects, template_name="project/project_list.html",
template_object_name="project")
def view_project(request, project_id):
from django.http import HttpResponse
return HttpResponse("not implemented")
def add_project(request):
return create_object(request, model=Project)
from django.http import HttpResponse
return HttpResponse("not implemented")
<commit_msg>Drop unused code after return.<commit_after>
|
from django.shortcuts import render_to_response
from django.views.generic.list_detail import object_list
from django.views.generic.create_update import create_object
from project.models import Project
def index(request):
'''We should move this to a different app'''
return render_to_response('index.html')
def list_projects(request):
projects = Project.objects.all()
return object_list(request, projects, template_name="project/project_list.html",
template_object_name="project")
def view_project(request, project_id):
from django.http import HttpResponse
return HttpResponse("not implemented")
def add_project(request):
return create_object(request, model=Project)
|
from django.shortcuts import render_to_response
from django.views.generic.list_detail import object_list
from django.views.generic.create_update import create_object
from project.models import Project
def index(request):
'''We should move this to a different app'''
return render_to_response('index.html')
def list_projects(request):
projects = Project.objects.all()
return object_list(request, projects, template_name="project/project_list.html",
template_object_name="project")
def view_project(request, project_id):
from django.http import HttpResponse
return HttpResponse("not implemented")
def add_project(request):
return create_object(request, model=Project)
from django.http import HttpResponse
return HttpResponse("not implemented")
Drop unused code after return.from django.shortcuts import render_to_response
from django.views.generic.list_detail import object_list
from django.views.generic.create_update import create_object
from project.models import Project
def index(request):
'''We should move this to a different app'''
return render_to_response('index.html')
def list_projects(request):
projects = Project.objects.all()
return object_list(request, projects, template_name="project/project_list.html",
template_object_name="project")
def view_project(request, project_id):
from django.http import HttpResponse
return HttpResponse("not implemented")
def add_project(request):
return create_object(request, model=Project)
|
<commit_before>from django.shortcuts import render_to_response
from django.views.generic.list_detail import object_list
from django.views.generic.create_update import create_object
from project.models import Project
def index(request):
'''We should move this to a different app'''
return render_to_response('index.html')
def list_projects(request):
projects = Project.objects.all()
return object_list(request, projects, template_name="project/project_list.html",
template_object_name="project")
def view_project(request, project_id):
from django.http import HttpResponse
return HttpResponse("not implemented")
def add_project(request):
return create_object(request, model=Project)
from django.http import HttpResponse
return HttpResponse("not implemented")
<commit_msg>Drop unused code after return.<commit_after>from django.shortcuts import render_to_response
from django.views.generic.list_detail import object_list
from django.views.generic.create_update import create_object
from project.models import Project
def index(request):
'''We should move this to a different app'''
return render_to_response('index.html')
def list_projects(request):
projects = Project.objects.all()
return object_list(request, projects, template_name="project/project_list.html",
template_object_name="project")
def view_project(request, project_id):
from django.http import HttpResponse
return HttpResponse("not implemented")
def add_project(request):
return create_object(request, model=Project)
|
7040228388a7ddf5ca379a8382372698e343aa3e
|
praw/models/reddit/modmail.py
|
praw/models/reddit/modmail.py
|
"""Provide models for new modmail."""
from ...const import API_PATH
from .base import RedditBase
class ModmailConversation(RedditBase):
"""A class for modmail conversations."""
STR_FIELD = 'id'
def __init__(self, reddit, id=None, # pylint: disable=redefined-builtin
_data=None):
"""Construct an instance of the ModmailConversation object."""
super(ModmailConversation, self).__init__(reddit, _data)
if id is not None:
self.id = id # pylint: disable=invalid-name
def _info_path(self):
return API_PATH['modmail_conversation'].format(id=self.id)
|
"""Provide models for new modmail."""
from ...const import API_PATH
from .base import RedditBase
class ModmailConversation(RedditBase):
"""A class for modmail conversations."""
STR_FIELD = 'id'
def __init__(self, reddit, id=None, # pylint: disable=redefined-builtin
_data=None):
"""Construct an instance of the ModmailConversation object."""
super(ModmailConversation, self).__init__(reddit, _data)
self.id = id # pylint: disable=invalid-name
def _info_path(self):
return API_PATH['modmail_conversation'].format(id=self.id)
|
Remove unncessary check for ModmailConversation id
|
Remove unncessary check for ModmailConversation id
|
Python
|
bsd-2-clause
|
praw-dev/praw,13steinj/praw,gschizas/praw,13steinj/praw,darthkedrik/praw,darthkedrik/praw,leviroth/praw,gschizas/praw,leviroth/praw,praw-dev/praw
|
"""Provide models for new modmail."""
from ...const import API_PATH
from .base import RedditBase
class ModmailConversation(RedditBase):
"""A class for modmail conversations."""
STR_FIELD = 'id'
def __init__(self, reddit, id=None, # pylint: disable=redefined-builtin
_data=None):
"""Construct an instance of the ModmailConversation object."""
super(ModmailConversation, self).__init__(reddit, _data)
if id is not None:
self.id = id # pylint: disable=invalid-name
def _info_path(self):
return API_PATH['modmail_conversation'].format(id=self.id)
Remove unncessary check for ModmailConversation id
|
"""Provide models for new modmail."""
from ...const import API_PATH
from .base import RedditBase
class ModmailConversation(RedditBase):
"""A class for modmail conversations."""
STR_FIELD = 'id'
def __init__(self, reddit, id=None, # pylint: disable=redefined-builtin
_data=None):
"""Construct an instance of the ModmailConversation object."""
super(ModmailConversation, self).__init__(reddit, _data)
self.id = id # pylint: disable=invalid-name
def _info_path(self):
return API_PATH['modmail_conversation'].format(id=self.id)
|
<commit_before>"""Provide models for new modmail."""
from ...const import API_PATH
from .base import RedditBase
class ModmailConversation(RedditBase):
"""A class for modmail conversations."""
STR_FIELD = 'id'
def __init__(self, reddit, id=None, # pylint: disable=redefined-builtin
_data=None):
"""Construct an instance of the ModmailConversation object."""
super(ModmailConversation, self).__init__(reddit, _data)
if id is not None:
self.id = id # pylint: disable=invalid-name
def _info_path(self):
return API_PATH['modmail_conversation'].format(id=self.id)
<commit_msg>Remove unncessary check for ModmailConversation id<commit_after>
|
"""Provide models for new modmail."""
from ...const import API_PATH
from .base import RedditBase
class ModmailConversation(RedditBase):
"""A class for modmail conversations."""
STR_FIELD = 'id'
def __init__(self, reddit, id=None, # pylint: disable=redefined-builtin
_data=None):
"""Construct an instance of the ModmailConversation object."""
super(ModmailConversation, self).__init__(reddit, _data)
self.id = id # pylint: disable=invalid-name
def _info_path(self):
return API_PATH['modmail_conversation'].format(id=self.id)
|
"""Provide models for new modmail."""
from ...const import API_PATH
from .base import RedditBase
class ModmailConversation(RedditBase):
"""A class for modmail conversations."""
STR_FIELD = 'id'
def __init__(self, reddit, id=None, # pylint: disable=redefined-builtin
_data=None):
"""Construct an instance of the ModmailConversation object."""
super(ModmailConversation, self).__init__(reddit, _data)
if id is not None:
self.id = id # pylint: disable=invalid-name
def _info_path(self):
return API_PATH['modmail_conversation'].format(id=self.id)
Remove unncessary check for ModmailConversation id"""Provide models for new modmail."""
from ...const import API_PATH
from .base import RedditBase
class ModmailConversation(RedditBase):
"""A class for modmail conversations."""
STR_FIELD = 'id'
def __init__(self, reddit, id=None, # pylint: disable=redefined-builtin
_data=None):
"""Construct an instance of the ModmailConversation object."""
super(ModmailConversation, self).__init__(reddit, _data)
self.id = id # pylint: disable=invalid-name
def _info_path(self):
return API_PATH['modmail_conversation'].format(id=self.id)
|
<commit_before>"""Provide models for new modmail."""
from ...const import API_PATH
from .base import RedditBase
class ModmailConversation(RedditBase):
"""A class for modmail conversations."""
STR_FIELD = 'id'
def __init__(self, reddit, id=None, # pylint: disable=redefined-builtin
_data=None):
"""Construct an instance of the ModmailConversation object."""
super(ModmailConversation, self).__init__(reddit, _data)
if id is not None:
self.id = id # pylint: disable=invalid-name
def _info_path(self):
return API_PATH['modmail_conversation'].format(id=self.id)
<commit_msg>Remove unncessary check for ModmailConversation id<commit_after>"""Provide models for new modmail."""
from ...const import API_PATH
from .base import RedditBase
class ModmailConversation(RedditBase):
"""A class for modmail conversations."""
STR_FIELD = 'id'
def __init__(self, reddit, id=None, # pylint: disable=redefined-builtin
_data=None):
"""Construct an instance of the ModmailConversation object."""
super(ModmailConversation, self).__init__(reddit, _data)
self.id = id # pylint: disable=invalid-name
def _info_path(self):
return API_PATH['modmail_conversation'].format(id=self.id)
|
34a96c9824bef5d735f521b303fe9f9755b431ee
|
dataportal/utils/diagnostics.py
|
dataportal/utils/diagnostics.py
|
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from collections import OrderedDict
import importlib
import six
def watermark():
"""
Give the version of each of the dependencies -- useful for bug reports.
Returns
-------
result : dict
mapping the name of each package to its version string or, if an
optional dependency is not installed, None
"""
packages = ['six', 'numpy', 'scipy', 'matplotlib', 'pandas', 'pims',
'pyyaml', 'metadatastore', 'filestore',
'channelarchiver', 'bubblegum']
result = OrderedDict()
for package_name in packages:
try:
package = importlib.import_module(package_name)
except ImportError:
result[package_name] = None
else:
version = package.__version__
# enaml provides its version differently
try:
import enaml
except ImportError:
result['enaml'] = None
else:
from enaml.version import version_info
result['enaml'] = _make_version_string(version_info)
# ...as does Python
version_info = sys.version_info
result['python'] = _make_version_string(version_info)
return result
def _make_version_string(version_info):
version_string = '.'.join(map(str, [version_info[0], version_info[1],
version_info[2]]))
return version_string
|
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from collections import OrderedDict
import importlib
import sys
import six
def watermark():
"""
Give the version of each of the dependencies -- useful for bug reports.
Returns
-------
result : dict
mapping the name of each package to its version string or, if an
optional dependency is not installed, None
"""
packages = ['six', 'numpy', 'scipy', 'matplotlib', 'pandas', 'pims',
'pyyaml', 'metadatastore', 'filestore',
'channelarchiver', 'bubblegum']
result = OrderedDict()
for package_name in packages:
try:
package = importlib.import_module(package_name)
except ImportError:
result[package_name] = None
else:
try:
version = package.__version__
except AttributeError as err:
version = "FAILED TO DETECT: {0}".format(err)
result[package_name] = version
# enaml provides its version differently
try:
import enaml
except ImportError:
result['enaml'] = None
else:
from enaml.version import version_info
result['enaml'] = _make_version_string(version_info)
# ...as does Python
version_info = sys.version_info
result['python'] = _make_version_string(version_info)
return result
def _make_version_string(version_info):
version_string = '.'.join(map(str, [version_info[0], version_info[1],
version_info[2]]))
return version_string
|
Make watermark robust if __version__ attribute is missing.
|
FIX: Make watermark robust if __version__ attribute is missing.
|
Python
|
bsd-3-clause
|
danielballan/dataportal,danielballan/dataportal,danielballan/datamuxer,tacaswell/dataportal,tacaswell/dataportal,ericdill/datamuxer,NSLS-II/dataportal,NSLS-II/datamuxer,ericdill/datamuxer,NSLS-II/dataportal,ericdill/databroker,danielballan/datamuxer,ericdill/databroker
|
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from collections import OrderedDict
import importlib
import six
def watermark():
"""
Give the version of each of the dependencies -- useful for bug reports.
Returns
-------
result : dict
mapping the name of each package to its version string or, if an
optional dependency is not installed, None
"""
packages = ['six', 'numpy', 'scipy', 'matplotlib', 'pandas', 'pims',
'pyyaml', 'metadatastore', 'filestore',
'channelarchiver', 'bubblegum']
result = OrderedDict()
for package_name in packages:
try:
package = importlib.import_module(package_name)
except ImportError:
result[package_name] = None
else:
version = package.__version__
# enaml provides its version differently
try:
import enaml
except ImportError:
result['enaml'] = None
else:
from enaml.version import version_info
result['enaml'] = _make_version_string(version_info)
# ...as does Python
version_info = sys.version_info
result['python'] = _make_version_string(version_info)
return result
def _make_version_string(version_info):
version_string = '.'.join(map(str, [version_info[0], version_info[1],
version_info[2]]))
return version_string
FIX: Make watermark robust if __version__ attribute is missing.
|
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from collections import OrderedDict
import importlib
import sys
import six
def watermark():
"""
Give the version of each of the dependencies -- useful for bug reports.
Returns
-------
result : dict
mapping the name of each package to its version string or, if an
optional dependency is not installed, None
"""
packages = ['six', 'numpy', 'scipy', 'matplotlib', 'pandas', 'pims',
'pyyaml', 'metadatastore', 'filestore',
'channelarchiver', 'bubblegum']
result = OrderedDict()
for package_name in packages:
try:
package = importlib.import_module(package_name)
except ImportError:
result[package_name] = None
else:
try:
version = package.__version__
except AttributeError as err:
version = "FAILED TO DETECT: {0}".format(err)
result[package_name] = version
# enaml provides its version differently
try:
import enaml
except ImportError:
result['enaml'] = None
else:
from enaml.version import version_info
result['enaml'] = _make_version_string(version_info)
# ...as does Python
version_info = sys.version_info
result['python'] = _make_version_string(version_info)
return result
def _make_version_string(version_info):
version_string = '.'.join(map(str, [version_info[0], version_info[1],
version_info[2]]))
return version_string
|
<commit_before>from __future__ import (absolute_import, division, print_function,
unicode_literals)
from collections import OrderedDict
import importlib
import six
def watermark():
"""
Give the version of each of the dependencies -- useful for bug reports.
Returns
-------
result : dict
mapping the name of each package to its version string or, if an
optional dependency is not installed, None
"""
packages = ['six', 'numpy', 'scipy', 'matplotlib', 'pandas', 'pims',
'pyyaml', 'metadatastore', 'filestore',
'channelarchiver', 'bubblegum']
result = OrderedDict()
for package_name in packages:
try:
package = importlib.import_module(package_name)
except ImportError:
result[package_name] = None
else:
version = package.__version__
# enaml provides its version differently
try:
import enaml
except ImportError:
result['enaml'] = None
else:
from enaml.version import version_info
result['enaml'] = _make_version_string(version_info)
# ...as does Python
version_info = sys.version_info
result['python'] = _make_version_string(version_info)
return result
def _make_version_string(version_info):
version_string = '.'.join(map(str, [version_info[0], version_info[1],
version_info[2]]))
return version_string
<commit_msg>FIX: Make watermark robust if __version__ attribute is missing.<commit_after>
|
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from collections import OrderedDict
import importlib
import sys
import six
def watermark():
"""
Give the version of each of the dependencies -- useful for bug reports.
Returns
-------
result : dict
mapping the name of each package to its version string or, if an
optional dependency is not installed, None
"""
packages = ['six', 'numpy', 'scipy', 'matplotlib', 'pandas', 'pims',
'pyyaml', 'metadatastore', 'filestore',
'channelarchiver', 'bubblegum']
result = OrderedDict()
for package_name in packages:
try:
package = importlib.import_module(package_name)
except ImportError:
result[package_name] = None
else:
try:
version = package.__version__
except AttributeError as err:
version = "FAILED TO DETECT: {0}".format(err)
result[package_name] = version
# enaml provides its version differently
try:
import enaml
except ImportError:
result['enaml'] = None
else:
from enaml.version import version_info
result['enaml'] = _make_version_string(version_info)
# ...as does Python
version_info = sys.version_info
result['python'] = _make_version_string(version_info)
return result
def _make_version_string(version_info):
version_string = '.'.join(map(str, [version_info[0], version_info[1],
version_info[2]]))
return version_string
|
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from collections import OrderedDict
import importlib
import six
def watermark():
"""
Give the version of each of the dependencies -- useful for bug reports.
Returns
-------
result : dict
mapping the name of each package to its version string or, if an
optional dependency is not installed, None
"""
packages = ['six', 'numpy', 'scipy', 'matplotlib', 'pandas', 'pims',
'pyyaml', 'metadatastore', 'filestore',
'channelarchiver', 'bubblegum']
result = OrderedDict()
for package_name in packages:
try:
package = importlib.import_module(package_name)
except ImportError:
result[package_name] = None
else:
version = package.__version__
# enaml provides its version differently
try:
import enaml
except ImportError:
result['enaml'] = None
else:
from enaml.version import version_info
result['enaml'] = _make_version_string(version_info)
# ...as does Python
version_info = sys.version_info
result['python'] = _make_version_string(version_info)
return result
def _make_version_string(version_info):
version_string = '.'.join(map(str, [version_info[0], version_info[1],
version_info[2]]))
return version_string
FIX: Make watermark robust if __version__ attribute is missing.from __future__ import (absolute_import, division, print_function,
unicode_literals)
from collections import OrderedDict
import importlib
import sys
import six
def watermark():
"""
Give the version of each of the dependencies -- useful for bug reports.
Returns
-------
result : dict
mapping the name of each package to its version string or, if an
optional dependency is not installed, None
"""
packages = ['six', 'numpy', 'scipy', 'matplotlib', 'pandas', 'pims',
'pyyaml', 'metadatastore', 'filestore',
'channelarchiver', 'bubblegum']
result = OrderedDict()
for package_name in packages:
try:
package = importlib.import_module(package_name)
except ImportError:
result[package_name] = None
else:
try:
version = package.__version__
except AttributeError as err:
version = "FAILED TO DETECT: {0}".format(err)
result[package_name] = version
# enaml provides its version differently
try:
import enaml
except ImportError:
result['enaml'] = None
else:
from enaml.version import version_info
result['enaml'] = _make_version_string(version_info)
# ...as does Python
version_info = sys.version_info
result['python'] = _make_version_string(version_info)
return result
def _make_version_string(version_info):
version_string = '.'.join(map(str, [version_info[0], version_info[1],
version_info[2]]))
return version_string
|
<commit_before>from __future__ import (absolute_import, division, print_function,
unicode_literals)
from collections import OrderedDict
import importlib
import six
def watermark():
"""
Give the version of each of the dependencies -- useful for bug reports.
Returns
-------
result : dict
mapping the name of each package to its version string or, if an
optional dependency is not installed, None
"""
packages = ['six', 'numpy', 'scipy', 'matplotlib', 'pandas', 'pims',
'pyyaml', 'metadatastore', 'filestore',
'channelarchiver', 'bubblegum']
result = OrderedDict()
for package_name in packages:
try:
package = importlib.import_module(package_name)
except ImportError:
result[package_name] = None
else:
version = package.__version__
# enaml provides its version differently
try:
import enaml
except ImportError:
result['enaml'] = None
else:
from enaml.version import version_info
result['enaml'] = _make_version_string(version_info)
# ...as does Python
version_info = sys.version_info
result['python'] = _make_version_string(version_info)
return result
def _make_version_string(version_info):
version_string = '.'.join(map(str, [version_info[0], version_info[1],
version_info[2]]))
return version_string
<commit_msg>FIX: Make watermark robust if __version__ attribute is missing.<commit_after>from __future__ import (absolute_import, division, print_function,
unicode_literals)
from collections import OrderedDict
import importlib
import sys
import six
def watermark():
"""
Give the version of each of the dependencies -- useful for bug reports.
Returns
-------
result : dict
mapping the name of each package to its version string or, if an
optional dependency is not installed, None
"""
packages = ['six', 'numpy', 'scipy', 'matplotlib', 'pandas', 'pims',
'pyyaml', 'metadatastore', 'filestore',
'channelarchiver', 'bubblegum']
result = OrderedDict()
for package_name in packages:
try:
package = importlib.import_module(package_name)
except ImportError:
result[package_name] = None
else:
try:
version = package.__version__
except AttributeError as err:
version = "FAILED TO DETECT: {0}".format(err)
result[package_name] = version
# enaml provides its version differently
try:
import enaml
except ImportError:
result['enaml'] = None
else:
from enaml.version import version_info
result['enaml'] = _make_version_string(version_info)
# ...as does Python
version_info = sys.version_info
result['python'] = _make_version_string(version_info)
return result
def _make_version_string(version_info):
version_string = '.'.join(map(str, [version_info[0], version_info[1],
version_info[2]]))
return version_string
|
f5356198f30002b4b4d26fba424a4bdd546a26e5
|
keeper/api_v1/errorhandlers.py
|
keeper/api_v1/errorhandlers.py
|
"""Error handling functions.
Flask calls these functions when different HTTP error codes or Python
exceptions are emitted. These handlers provide a JSON response rather
than the default HMTL header response.
"""
from flask import jsonify
from ..exceptions import ValidationError
from . import api
@api.errorhandler(ValidationError)
def bad_request(e):
"""Handler for ValidationError exceptions."""
response = jsonify({'status': 400, 'error': 'bad request',
'message': e.args[0]})
response.status_code = 400
return response
@api.app_errorhandler(404)
def not_found(e):
"""App-wide handler for HTTP 404 errors."""
response = jsonify({'status': 404, 'error': 'not found',
'message': 'invalid resource URI'})
response.status_code = 404
return response
@api.errorhandler(405)
def method_not_supported(e):
"""Handler for HTTP 405 exceptions."""
response = jsonify({'status': 405, 'error': 'method not supported',
'message': 'the method is not supported'})
response.status_code = 405
return response
@api.app_errorhandler(500)
def internal_server_error(e):
"""App-wide handler for HTTP 500 errors."""
response = jsonify({'status': 500, 'error': 'internal server error',
'message': e.args[0]})
response.status_code = 500
return response
|
"""Error handling functions.
Flask calls these functions when different HTTP error codes or Python
exceptions are emitted. These handlers provide a JSON response rather
than the default HMTL header response.
"""
from flask import jsonify
import structlog
from ..exceptions import ValidationError
from . import api
@api.errorhandler(ValidationError)
def bad_request(e):
"""Handler for ValidationError exceptions."""
response = jsonify({'status': 400, 'error': 'bad request',
'message': e.args[0]})
response.status_code = 400
return response
@api.app_errorhandler(404)
def not_found(e):
"""App-wide handler for HTTP 404 errors."""
response = jsonify({'status': 404, 'error': 'not found',
'message': 'invalid resource URI'})
response.status_code = 404
return response
@api.errorhandler(405)
def method_not_supported(e):
"""Handler for HTTP 405 exceptions."""
response = jsonify({'status': 405, 'error': 'method not supported',
'message': 'the method is not supported'})
response.status_code = 405
return response
@api.app_errorhandler(500)
def internal_server_error(e):
"""App-wide handler for HTTP 500 errors."""
logger = structlog.get_logger()
logger.error(status=500, message=e.args[0])
response = jsonify({'status': 500, 'error': 'internal server error',
'message': e.args[0]})
response.status_code = 500
return response
|
Add an error logger to the 500 handler
|
Add an error logger to the 500 handler
|
Python
|
mit
|
lsst-sqre/ltd-keeper,lsst-sqre/ltd-keeper
|
"""Error handling functions.
Flask calls these functions when different HTTP error codes or Python
exceptions are emitted. These handlers provide a JSON response rather
than the default HMTL header response.
"""
from flask import jsonify
from ..exceptions import ValidationError
from . import api
@api.errorhandler(ValidationError)
def bad_request(e):
"""Handler for ValidationError exceptions."""
response = jsonify({'status': 400, 'error': 'bad request',
'message': e.args[0]})
response.status_code = 400
return response
@api.app_errorhandler(404)
def not_found(e):
"""App-wide handler for HTTP 404 errors."""
response = jsonify({'status': 404, 'error': 'not found',
'message': 'invalid resource URI'})
response.status_code = 404
return response
@api.errorhandler(405)
def method_not_supported(e):
"""Handler for HTTP 405 exceptions."""
response = jsonify({'status': 405, 'error': 'method not supported',
'message': 'the method is not supported'})
response.status_code = 405
return response
@api.app_errorhandler(500)
def internal_server_error(e):
"""App-wide handler for HTTP 500 errors."""
response = jsonify({'status': 500, 'error': 'internal server error',
'message': e.args[0]})
response.status_code = 500
return response
Add an error logger to the 500 handler
|
"""Error handling functions.
Flask calls these functions when different HTTP error codes or Python
exceptions are emitted. These handlers provide a JSON response rather
than the default HMTL header response.
"""
from flask import jsonify
import structlog
from ..exceptions import ValidationError
from . import api
@api.errorhandler(ValidationError)
def bad_request(e):
"""Handler for ValidationError exceptions."""
response = jsonify({'status': 400, 'error': 'bad request',
'message': e.args[0]})
response.status_code = 400
return response
@api.app_errorhandler(404)
def not_found(e):
"""App-wide handler for HTTP 404 errors."""
response = jsonify({'status': 404, 'error': 'not found',
'message': 'invalid resource URI'})
response.status_code = 404
return response
@api.errorhandler(405)
def method_not_supported(e):
"""Handler for HTTP 405 exceptions."""
response = jsonify({'status': 405, 'error': 'method not supported',
'message': 'the method is not supported'})
response.status_code = 405
return response
@api.app_errorhandler(500)
def internal_server_error(e):
"""App-wide handler for HTTP 500 errors."""
logger = structlog.get_logger()
logger.error(status=500, message=e.args[0])
response = jsonify({'status': 500, 'error': 'internal server error',
'message': e.args[0]})
response.status_code = 500
return response
|
<commit_before>"""Error handling functions.
Flask calls these functions when different HTTP error codes or Python
exceptions are emitted. These handlers provide a JSON response rather
than the default HMTL header response.
"""
from flask import jsonify
from ..exceptions import ValidationError
from . import api
@api.errorhandler(ValidationError)
def bad_request(e):
"""Handler for ValidationError exceptions."""
response = jsonify({'status': 400, 'error': 'bad request',
'message': e.args[0]})
response.status_code = 400
return response
@api.app_errorhandler(404)
def not_found(e):
"""App-wide handler for HTTP 404 errors."""
response = jsonify({'status': 404, 'error': 'not found',
'message': 'invalid resource URI'})
response.status_code = 404
return response
@api.errorhandler(405)
def method_not_supported(e):
"""Handler for HTTP 405 exceptions."""
response = jsonify({'status': 405, 'error': 'method not supported',
'message': 'the method is not supported'})
response.status_code = 405
return response
@api.app_errorhandler(500)
def internal_server_error(e):
"""App-wide handler for HTTP 500 errors."""
response = jsonify({'status': 500, 'error': 'internal server error',
'message': e.args[0]})
response.status_code = 500
return response
<commit_msg>Add an error logger to the 500 handler<commit_after>
|
"""Error handling functions.
Flask calls these functions when different HTTP error codes or Python
exceptions are emitted. These handlers provide a JSON response rather
than the default HMTL header response.
"""
from flask import jsonify
import structlog
from ..exceptions import ValidationError
from . import api
@api.errorhandler(ValidationError)
def bad_request(e):
"""Handler for ValidationError exceptions."""
response = jsonify({'status': 400, 'error': 'bad request',
'message': e.args[0]})
response.status_code = 400
return response
@api.app_errorhandler(404)
def not_found(e):
"""App-wide handler for HTTP 404 errors."""
response = jsonify({'status': 404, 'error': 'not found',
'message': 'invalid resource URI'})
response.status_code = 404
return response
@api.errorhandler(405)
def method_not_supported(e):
"""Handler for HTTP 405 exceptions."""
response = jsonify({'status': 405, 'error': 'method not supported',
'message': 'the method is not supported'})
response.status_code = 405
return response
@api.app_errorhandler(500)
def internal_server_error(e):
"""App-wide handler for HTTP 500 errors."""
logger = structlog.get_logger()
logger.error(status=500, message=e.args[0])
response = jsonify({'status': 500, 'error': 'internal server error',
'message': e.args[0]})
response.status_code = 500
return response
|
"""Error handling functions.
Flask calls these functions when different HTTP error codes or Python
exceptions are emitted. These handlers provide a JSON response rather
than the default HMTL header response.
"""
from flask import jsonify
from ..exceptions import ValidationError
from . import api
@api.errorhandler(ValidationError)
def bad_request(e):
"""Handler for ValidationError exceptions."""
response = jsonify({'status': 400, 'error': 'bad request',
'message': e.args[0]})
response.status_code = 400
return response
@api.app_errorhandler(404)
def not_found(e):
"""App-wide handler for HTTP 404 errors."""
response = jsonify({'status': 404, 'error': 'not found',
'message': 'invalid resource URI'})
response.status_code = 404
return response
@api.errorhandler(405)
def method_not_supported(e):
"""Handler for HTTP 405 exceptions."""
response = jsonify({'status': 405, 'error': 'method not supported',
'message': 'the method is not supported'})
response.status_code = 405
return response
@api.app_errorhandler(500)
def internal_server_error(e):
"""App-wide handler for HTTP 500 errors."""
response = jsonify({'status': 500, 'error': 'internal server error',
'message': e.args[0]})
response.status_code = 500
return response
Add an error logger to the 500 handler"""Error handling functions.
Flask calls these functions when different HTTP error codes or Python
exceptions are emitted. These handlers provide a JSON response rather
than the default HMTL header response.
"""
from flask import jsonify
import structlog
from ..exceptions import ValidationError
from . import api
@api.errorhandler(ValidationError)
def bad_request(e):
"""Handler for ValidationError exceptions."""
response = jsonify({'status': 400, 'error': 'bad request',
'message': e.args[0]})
response.status_code = 400
return response
@api.app_errorhandler(404)
def not_found(e):
"""App-wide handler for HTTP 404 errors."""
response = jsonify({'status': 404, 'error': 'not found',
'message': 'invalid resource URI'})
response.status_code = 404
return response
@api.errorhandler(405)
def method_not_supported(e):
"""Handler for HTTP 405 exceptions."""
response = jsonify({'status': 405, 'error': 'method not supported',
'message': 'the method is not supported'})
response.status_code = 405
return response
@api.app_errorhandler(500)
def internal_server_error(e):
"""App-wide handler for HTTP 500 errors."""
logger = structlog.get_logger()
logger.error(status=500, message=e.args[0])
response = jsonify({'status': 500, 'error': 'internal server error',
'message': e.args[0]})
response.status_code = 500
return response
|
<commit_before>"""Error handling functions.
Flask calls these functions when different HTTP error codes or Python
exceptions are emitted. These handlers provide a JSON response rather
than the default HMTL header response.
"""
from flask import jsonify
from ..exceptions import ValidationError
from . import api
@api.errorhandler(ValidationError)
def bad_request(e):
"""Handler for ValidationError exceptions."""
response = jsonify({'status': 400, 'error': 'bad request',
'message': e.args[0]})
response.status_code = 400
return response
@api.app_errorhandler(404)
def not_found(e):
"""App-wide handler for HTTP 404 errors."""
response = jsonify({'status': 404, 'error': 'not found',
'message': 'invalid resource URI'})
response.status_code = 404
return response
@api.errorhandler(405)
def method_not_supported(e):
"""Handler for HTTP 405 exceptions."""
response = jsonify({'status': 405, 'error': 'method not supported',
'message': 'the method is not supported'})
response.status_code = 405
return response
@api.app_errorhandler(500)
def internal_server_error(e):
"""App-wide handler for HTTP 500 errors."""
response = jsonify({'status': 500, 'error': 'internal server error',
'message': e.args[0]})
response.status_code = 500
return response
<commit_msg>Add an error logger to the 500 handler<commit_after>"""Error handling functions.
Flask calls these functions when different HTTP error codes or Python
exceptions are emitted. These handlers provide a JSON response rather
than the default HMTL header response.
"""
from flask import jsonify
import structlog
from ..exceptions import ValidationError
from . import api
@api.errorhandler(ValidationError)
def bad_request(e):
"""Handler for ValidationError exceptions."""
response = jsonify({'status': 400, 'error': 'bad request',
'message': e.args[0]})
response.status_code = 400
return response
@api.app_errorhandler(404)
def not_found(e):
"""App-wide handler for HTTP 404 errors."""
response = jsonify({'status': 404, 'error': 'not found',
'message': 'invalid resource URI'})
response.status_code = 404
return response
@api.errorhandler(405)
def method_not_supported(e):
"""Handler for HTTP 405 exceptions."""
response = jsonify({'status': 405, 'error': 'method not supported',
'message': 'the method is not supported'})
response.status_code = 405
return response
@api.app_errorhandler(500)
def internal_server_error(e):
"""App-wide handler for HTTP 500 errors."""
logger = structlog.get_logger()
logger.error(status=500, message=e.args[0])
response = jsonify({'status': 500, 'error': 'internal server error',
'message': e.args[0]})
response.status_code = 500
return response
|
4ac4caa0f803394693227be0b225d7a1dcab6611
|
sasview/__init__.py
|
sasview/__init__.py
|
__version__ = "4.0b1"
__build__ = "GIT_COMMIT"
try:
import logging
import subprocess
import os
import platform
FNULL = open(os.devnull, 'w')
if platform.system() == "Windows":
args = ['git', 'describe', '--tags']
else:
args = ['git describe --tags']
git_revision = subprocess.check_output(args,
#git_revision = subprocess.check_output(['pwd'],
stderr=FNULL,
shell=True)
__build__ = str(git_revision).strip()
except subprocess.CalledProcessError as cpe:
logging.warning("Error while determining build number\n Using command:\n %s \n Output:\n %s"% (cpe.cmd,cpe.output))
|
__version__ = "4.0b1"
__build__ = "GIT_COMMIT"
try:
import logging
import subprocess
import os
import platform
FNULL = open(os.devnull, 'w')
if platform.system() == "Windows":
args = ['git', 'describe', '--tags']
else:
args = ['git describe --tags']
git_revision = subprocess.check_output(args,
stderr=FNULL,
shell=True)
__build__ = str(git_revision).strip()
except subprocess.CalledProcessError as cpe:
logging.warning("Error while determining build number\n Using command:\n %s \n Output:\n %s"% (cpe.cmd,cpe.output))
|
Clean up un-needed commented line after jkrzywon fixed subprocess bad behaviour
|
Clean up un-needed commented line after jkrzywon fixed subprocess bad behaviour
|
Python
|
bsd-3-clause
|
lewisodriscoll/sasview,SasView/sasview,lewisodriscoll/sasview,lewisodriscoll/sasview,SasView/sasview,SasView/sasview,lewisodriscoll/sasview,lewisodriscoll/sasview,SasView/sasview,SasView/sasview,SasView/sasview
|
__version__ = "4.0b1"
__build__ = "GIT_COMMIT"
try:
import logging
import subprocess
import os
import platform
FNULL = open(os.devnull, 'w')
if platform.system() == "Windows":
args = ['git', 'describe', '--tags']
else:
args = ['git describe --tags']
git_revision = subprocess.check_output(args,
#git_revision = subprocess.check_output(['pwd'],
stderr=FNULL,
shell=True)
__build__ = str(git_revision).strip()
except subprocess.CalledProcessError as cpe:
logging.warning("Error while determining build number\n Using command:\n %s \n Output:\n %s"% (cpe.cmd,cpe.output))
Clean up un-needed commented line after jkrzywon fixed subprocess bad behaviour
|
__version__ = "4.0b1"
__build__ = "GIT_COMMIT"
try:
import logging
import subprocess
import os
import platform
FNULL = open(os.devnull, 'w')
if platform.system() == "Windows":
args = ['git', 'describe', '--tags']
else:
args = ['git describe --tags']
git_revision = subprocess.check_output(args,
stderr=FNULL,
shell=True)
__build__ = str(git_revision).strip()
except subprocess.CalledProcessError as cpe:
logging.warning("Error while determining build number\n Using command:\n %s \n Output:\n %s"% (cpe.cmd,cpe.output))
|
<commit_before>__version__ = "4.0b1"
__build__ = "GIT_COMMIT"
try:
import logging
import subprocess
import os
import platform
FNULL = open(os.devnull, 'w')
if platform.system() == "Windows":
args = ['git', 'describe', '--tags']
else:
args = ['git describe --tags']
git_revision = subprocess.check_output(args,
#git_revision = subprocess.check_output(['pwd'],
stderr=FNULL,
shell=True)
__build__ = str(git_revision).strip()
except subprocess.CalledProcessError as cpe:
logging.warning("Error while determining build number\n Using command:\n %s \n Output:\n %s"% (cpe.cmd,cpe.output))
<commit_msg>Clean up un-needed commented line after jkrzywon fixed subprocess bad behaviour<commit_after>
|
__version__ = "4.0b1"
__build__ = "GIT_COMMIT"
try:
import logging
import subprocess
import os
import platform
FNULL = open(os.devnull, 'w')
if platform.system() == "Windows":
args = ['git', 'describe', '--tags']
else:
args = ['git describe --tags']
git_revision = subprocess.check_output(args,
stderr=FNULL,
shell=True)
__build__ = str(git_revision).strip()
except subprocess.CalledProcessError as cpe:
logging.warning("Error while determining build number\n Using command:\n %s \n Output:\n %s"% (cpe.cmd,cpe.output))
|
__version__ = "4.0b1"
__build__ = "GIT_COMMIT"
try:
import logging
import subprocess
import os
import platform
FNULL = open(os.devnull, 'w')
if platform.system() == "Windows":
args = ['git', 'describe', '--tags']
else:
args = ['git describe --tags']
git_revision = subprocess.check_output(args,
#git_revision = subprocess.check_output(['pwd'],
stderr=FNULL,
shell=True)
__build__ = str(git_revision).strip()
except subprocess.CalledProcessError as cpe:
logging.warning("Error while determining build number\n Using command:\n %s \n Output:\n %s"% (cpe.cmd,cpe.output))
Clean up un-needed commented line after jkrzywon fixed subprocess bad behaviour__version__ = "4.0b1"
__build__ = "GIT_COMMIT"
try:
import logging
import subprocess
import os
import platform
FNULL = open(os.devnull, 'w')
if platform.system() == "Windows":
args = ['git', 'describe', '--tags']
else:
args = ['git describe --tags']
git_revision = subprocess.check_output(args,
stderr=FNULL,
shell=True)
__build__ = str(git_revision).strip()
except subprocess.CalledProcessError as cpe:
logging.warning("Error while determining build number\n Using command:\n %s \n Output:\n %s"% (cpe.cmd,cpe.output))
|
<commit_before>__version__ = "4.0b1"
__build__ = "GIT_COMMIT"
try:
import logging
import subprocess
import os
import platform
FNULL = open(os.devnull, 'w')
if platform.system() == "Windows":
args = ['git', 'describe', '--tags']
else:
args = ['git describe --tags']
git_revision = subprocess.check_output(args,
#git_revision = subprocess.check_output(['pwd'],
stderr=FNULL,
shell=True)
__build__ = str(git_revision).strip()
except subprocess.CalledProcessError as cpe:
logging.warning("Error while determining build number\n Using command:\n %s \n Output:\n %s"% (cpe.cmd,cpe.output))
<commit_msg>Clean up un-needed commented line after jkrzywon fixed subprocess bad behaviour<commit_after>__version__ = "4.0b1"
__build__ = "GIT_COMMIT"
try:
import logging
import subprocess
import os
import platform
FNULL = open(os.devnull, 'w')
if platform.system() == "Windows":
args = ['git', 'describe', '--tags']
else:
args = ['git describe --tags']
git_revision = subprocess.check_output(args,
stderr=FNULL,
shell=True)
__build__ = str(git_revision).strip()
except subprocess.CalledProcessError as cpe:
logging.warning("Error while determining build number\n Using command:\n %s \n Output:\n %s"% (cpe.cmd,cpe.output))
|
05a0e498d6b75ce3790831cd2278478d38531741
|
csunplugged/resources/utils/get_options_html.py
|
csunplugged/resources/utils/get_options_html.py
|
"""Module for getting HTML form elements for list of ResourceParameters."""
from django.conf import settings
from lxml import etree
from django.utils.translation import ugettext as _
def get_options_html(options, local_options, request_parameters=None):
"""Return HTML string of form elements for given options.
Args:
options (list): List of ResourceParameters options.
local_options (list): List of ResourceParameters local options.
request_parameters (QueryDict): Request QueryDict for resource form.
Returns:
HTML string
"""
html_elements = []
for parameter in options.values():
html_elements.append(parameter.html_element(request_parameters))
if settings.DEBUG:
html_elements.append(etree.Element("hr"))
h3 = etree.Element("h3")
h3.text = _("Local Generation Only")
html_elements.append(h3)
for parameter in local_options.values():
html_elements.append(parameter.html_element(request_parameters))
html_string = ""
for html_elem in html_elements:
html_string += etree.tostring(html_elem, pretty_print=True, encoding='utf-8').decode('utf-8')
return html_string
|
"""Module for getting HTML form elements for list of ResourceParameters."""
from django.conf import settings
from lxml import etree
def get_options_html(options, local_options, request_parameters=None):
"""Return HTML string of form elements for given options.
Args:
options (list): List of ResourceParameters options.
local_options (list): List of ResourceParameters local options.
request_parameters (QueryDict): Request QueryDict for resource form.
Returns:
HTML string
"""
html_elements = []
for parameter in options.values():
html_elements.append(parameter.html_element(request_parameters))
if settings.DEBUG:
html_elements.append(etree.Element("hr"))
h3 = etree.Element("h3")
h3.text = "Local Generation Only"
html_elements.append(h3)
for parameter in local_options.values():
html_elements.append(parameter.html_element(request_parameters))
html_string = ""
for html_elem in html_elements:
html_string += etree.tostring(html_elem, pretty_print=True, encoding='utf-8').decode('utf-8')
return html_string
|
Remove translation of development string
|
Remove translation of development string
|
Python
|
mit
|
uccser/cs-unplugged,uccser/cs-unplugged,uccser/cs-unplugged,uccser/cs-unplugged
|
"""Module for getting HTML form elements for list of ResourceParameters."""
from django.conf import settings
from lxml import etree
from django.utils.translation import ugettext as _
def get_options_html(options, local_options, request_parameters=None):
"""Return HTML string of form elements for given options.
Args:
options (list): List of ResourceParameters options.
local_options (list): List of ResourceParameters local options.
request_parameters (QueryDict): Request QueryDict for resource form.
Returns:
HTML string
"""
html_elements = []
for parameter in options.values():
html_elements.append(parameter.html_element(request_parameters))
if settings.DEBUG:
html_elements.append(etree.Element("hr"))
h3 = etree.Element("h3")
h3.text = _("Local Generation Only")
html_elements.append(h3)
for parameter in local_options.values():
html_elements.append(parameter.html_element(request_parameters))
html_string = ""
for html_elem in html_elements:
html_string += etree.tostring(html_elem, pretty_print=True, encoding='utf-8').decode('utf-8')
return html_string
Remove translation of development string
|
"""Module for getting HTML form elements for list of ResourceParameters."""
from django.conf import settings
from lxml import etree
def get_options_html(options, local_options, request_parameters=None):
"""Return HTML string of form elements for given options.
Args:
options (list): List of ResourceParameters options.
local_options (list): List of ResourceParameters local options.
request_parameters (QueryDict): Request QueryDict for resource form.
Returns:
HTML string
"""
html_elements = []
for parameter in options.values():
html_elements.append(parameter.html_element(request_parameters))
if settings.DEBUG:
html_elements.append(etree.Element("hr"))
h3 = etree.Element("h3")
h3.text = "Local Generation Only"
html_elements.append(h3)
for parameter in local_options.values():
html_elements.append(parameter.html_element(request_parameters))
html_string = ""
for html_elem in html_elements:
html_string += etree.tostring(html_elem, pretty_print=True, encoding='utf-8').decode('utf-8')
return html_string
|
<commit_before>"""Module for getting HTML form elements for list of ResourceParameters."""
from django.conf import settings
from lxml import etree
from django.utils.translation import ugettext as _
def get_options_html(options, local_options, request_parameters=None):
"""Return HTML string of form elements for given options.
Args:
options (list): List of ResourceParameters options.
local_options (list): List of ResourceParameters local options.
request_parameters (QueryDict): Request QueryDict for resource form.
Returns:
HTML string
"""
html_elements = []
for parameter in options.values():
html_elements.append(parameter.html_element(request_parameters))
if settings.DEBUG:
html_elements.append(etree.Element("hr"))
h3 = etree.Element("h3")
h3.text = _("Local Generation Only")
html_elements.append(h3)
for parameter in local_options.values():
html_elements.append(parameter.html_element(request_parameters))
html_string = ""
for html_elem in html_elements:
html_string += etree.tostring(html_elem, pretty_print=True, encoding='utf-8').decode('utf-8')
return html_string
<commit_msg>Remove translation of development string<commit_after>
|
"""Module for getting HTML form elements for list of ResourceParameters."""
from django.conf import settings
from lxml import etree
def get_options_html(options, local_options, request_parameters=None):
"""Return HTML string of form elements for given options.
Args:
options (list): List of ResourceParameters options.
local_options (list): List of ResourceParameters local options.
request_parameters (QueryDict): Request QueryDict for resource form.
Returns:
HTML string
"""
html_elements = []
for parameter in options.values():
html_elements.append(parameter.html_element(request_parameters))
if settings.DEBUG:
html_elements.append(etree.Element("hr"))
h3 = etree.Element("h3")
h3.text = "Local Generation Only"
html_elements.append(h3)
for parameter in local_options.values():
html_elements.append(parameter.html_element(request_parameters))
html_string = ""
for html_elem in html_elements:
html_string += etree.tostring(html_elem, pretty_print=True, encoding='utf-8').decode('utf-8')
return html_string
|
"""Module for getting HTML form elements for list of ResourceParameters."""
from django.conf import settings
from lxml import etree
from django.utils.translation import ugettext as _
def get_options_html(options, local_options, request_parameters=None):
"""Return HTML string of form elements for given options.
Args:
options (list): List of ResourceParameters options.
local_options (list): List of ResourceParameters local options.
request_parameters (QueryDict): Request QueryDict for resource form.
Returns:
HTML string
"""
html_elements = []
for parameter in options.values():
html_elements.append(parameter.html_element(request_parameters))
if settings.DEBUG:
html_elements.append(etree.Element("hr"))
h3 = etree.Element("h3")
h3.text = _("Local Generation Only")
html_elements.append(h3)
for parameter in local_options.values():
html_elements.append(parameter.html_element(request_parameters))
html_string = ""
for html_elem in html_elements:
html_string += etree.tostring(html_elem, pretty_print=True, encoding='utf-8').decode('utf-8')
return html_string
Remove translation of development string"""Module for getting HTML form elements for list of ResourceParameters."""
from django.conf import settings
from lxml import etree
def get_options_html(options, local_options, request_parameters=None):
"""Return HTML string of form elements for given options.
Args:
options (list): List of ResourceParameters options.
local_options (list): List of ResourceParameters local options.
request_parameters (QueryDict): Request QueryDict for resource form.
Returns:
HTML string
"""
html_elements = []
for parameter in options.values():
html_elements.append(parameter.html_element(request_parameters))
if settings.DEBUG:
html_elements.append(etree.Element("hr"))
h3 = etree.Element("h3")
h3.text = "Local Generation Only"
html_elements.append(h3)
for parameter in local_options.values():
html_elements.append(parameter.html_element(request_parameters))
html_string = ""
for html_elem in html_elements:
html_string += etree.tostring(html_elem, pretty_print=True, encoding='utf-8').decode('utf-8')
return html_string
|
<commit_before>"""Module for getting HTML form elements for list of ResourceParameters."""
from django.conf import settings
from lxml import etree
from django.utils.translation import ugettext as _
def get_options_html(options, local_options, request_parameters=None):
"""Return HTML string of form elements for given options.
Args:
options (list): List of ResourceParameters options.
local_options (list): List of ResourceParameters local options.
request_parameters (QueryDict): Request QueryDict for resource form.
Returns:
HTML string
"""
html_elements = []
for parameter in options.values():
html_elements.append(parameter.html_element(request_parameters))
if settings.DEBUG:
html_elements.append(etree.Element("hr"))
h3 = etree.Element("h3")
h3.text = _("Local Generation Only")
html_elements.append(h3)
for parameter in local_options.values():
html_elements.append(parameter.html_element(request_parameters))
html_string = ""
for html_elem in html_elements:
html_string += etree.tostring(html_elem, pretty_print=True, encoding='utf-8').decode('utf-8')
return html_string
<commit_msg>Remove translation of development string<commit_after>"""Module for getting HTML form elements for list of ResourceParameters."""
from django.conf import settings
from lxml import etree
def get_options_html(options, local_options, request_parameters=None):
"""Return HTML string of form elements for given options.
Args:
options (list): List of ResourceParameters options.
local_options (list): List of ResourceParameters local options.
request_parameters (QueryDict): Request QueryDict for resource form.
Returns:
HTML string
"""
html_elements = []
for parameter in options.values():
html_elements.append(parameter.html_element(request_parameters))
if settings.DEBUG:
html_elements.append(etree.Element("hr"))
h3 = etree.Element("h3")
h3.text = "Local Generation Only"
html_elements.append(h3)
for parameter in local_options.values():
html_elements.append(parameter.html_element(request_parameters))
html_string = ""
for html_elem in html_elements:
html_string += etree.tostring(html_elem, pretty_print=True, encoding='utf-8').decode('utf-8')
return html_string
|
06fd79674eeb82cd1e6cabca1e513f97ccf48cbf
|
avocado/forms.py
|
avocado/forms.py
|
from django import forms
from django.db import models
from avocado.models import DataField
class DataFieldAdminForm(forms.ModelForm):
def clean_app_name(self):
app_name = self.cleaned_data['app_name']
if models.get_app(app_name) is None:
raise forms.ValidationError('The app "{0}" could not be found'.format(app_name))
return app_name
def clean(self):
cleaned_data = super(DataFieldAdminForm, self).clean()
instance = super(DataFieldAdminForm, self).save(commit=False)
model_name = cleaned_data['model_name']
if instance.model is None:
del cleaned_data['model_name']
msg = 'The model "{0}" could not be found'.format(model_name)
self._errors['model_name'] = self.error_class([msg])
# test `field_name'
field_name = cleaned_data['field_name']
if instance.field is None:
del cleaned_data['field_name']
msg = 'The model "{0}" does not have a field named "{1}"'.format(model_name, field_name)
self._errors['field_name'] = self.error_class([msg])
return cleaned_data
class Meta(object):
model = DataField
|
from django import forms
from django.core.exceptions import ImproperlyConfigured
from django.db import models
from avocado.models import DataField
class DataFieldAdminForm(forms.ModelForm):
def clean_app_name(self):
app_name = self.cleaned_data.get('app_name')
try:
models.get_app(app_name)
except ImproperlyConfigured:
raise forms.ValidationError('The app "{}" could not be found'.format(app_name))
return app_name
def clean(self):
cleaned_data = self.cleaned_data
app_name = self.cleaned_data.get('app_name')
model_name = cleaned_data.get('model_name')
field_name = cleaned_data.get('field_name')
model = models.get_model(app_name, model_name)
if model is None:
del cleaned_data['model_name']
msg = 'The model "{}" could not be found in the app "{}"'.format(model_name, app_name)
self._errors['model_name'] = self.error_class([msg])
elif not model._meta.get_field_by_name(field_name):
del cleaned_data['field_name']
msg = 'The model "{}" does not have a field named "{}"'.format(model_name, field_name)
self._errors['field_name'] = self.error_class([msg])
return cleaned_data
class Meta(object):
model = DataField
|
Fix DataField admin validation for identifiers
|
Fix DataField admin validation for identifiers
|
Python
|
bsd-2-clause
|
murphyke/avocado,murphyke/avocado,murphyke/avocado,murphyke/avocado
|
from django import forms
from django.db import models
from avocado.models import DataField
class DataFieldAdminForm(forms.ModelForm):
def clean_app_name(self):
app_name = self.cleaned_data['app_name']
if models.get_app(app_name) is None:
raise forms.ValidationError('The app "{0}" could not be found'.format(app_name))
return app_name
def clean(self):
cleaned_data = super(DataFieldAdminForm, self).clean()
instance = super(DataFieldAdminForm, self).save(commit=False)
model_name = cleaned_data['model_name']
if instance.model is None:
del cleaned_data['model_name']
msg = 'The model "{0}" could not be found'.format(model_name)
self._errors['model_name'] = self.error_class([msg])
# test `field_name'
field_name = cleaned_data['field_name']
if instance.field is None:
del cleaned_data['field_name']
msg = 'The model "{0}" does not have a field named "{1}"'.format(model_name, field_name)
self._errors['field_name'] = self.error_class([msg])
return cleaned_data
class Meta(object):
model = DataField
Fix DataField admin validation for identifiers
|
from django import forms
from django.core.exceptions import ImproperlyConfigured
from django.db import models
from avocado.models import DataField
class DataFieldAdminForm(forms.ModelForm):
def clean_app_name(self):
app_name = self.cleaned_data.get('app_name')
try:
models.get_app(app_name)
except ImproperlyConfigured:
raise forms.ValidationError('The app "{}" could not be found'.format(app_name))
return app_name
def clean(self):
cleaned_data = self.cleaned_data
app_name = self.cleaned_data.get('app_name')
model_name = cleaned_data.get('model_name')
field_name = cleaned_data.get('field_name')
model = models.get_model(app_name, model_name)
if model is None:
del cleaned_data['model_name']
msg = 'The model "{}" could not be found in the app "{}"'.format(model_name, app_name)
self._errors['model_name'] = self.error_class([msg])
elif not model._meta.get_field_by_name(field_name):
del cleaned_data['field_name']
msg = 'The model "{}" does not have a field named "{}"'.format(model_name, field_name)
self._errors['field_name'] = self.error_class([msg])
return cleaned_data
class Meta(object):
model = DataField
|
<commit_before>from django import forms
from django.db import models
from avocado.models import DataField
class DataFieldAdminForm(forms.ModelForm):
def clean_app_name(self):
app_name = self.cleaned_data['app_name']
if models.get_app(app_name) is None:
raise forms.ValidationError('The app "{0}" could not be found'.format(app_name))
return app_name
def clean(self):
cleaned_data = super(DataFieldAdminForm, self).clean()
instance = super(DataFieldAdminForm, self).save(commit=False)
model_name = cleaned_data['model_name']
if instance.model is None:
del cleaned_data['model_name']
msg = 'The model "{0}" could not be found'.format(model_name)
self._errors['model_name'] = self.error_class([msg])
# test `field_name'
field_name = cleaned_data['field_name']
if instance.field is None:
del cleaned_data['field_name']
msg = 'The model "{0}" does not have a field named "{1}"'.format(model_name, field_name)
self._errors['field_name'] = self.error_class([msg])
return cleaned_data
class Meta(object):
model = DataField
<commit_msg>Fix DataField admin validation for identifiers<commit_after>
|
from django import forms
from django.core.exceptions import ImproperlyConfigured
from django.db import models
from avocado.models import DataField
class DataFieldAdminForm(forms.ModelForm):
def clean_app_name(self):
app_name = self.cleaned_data.get('app_name')
try:
models.get_app(app_name)
except ImproperlyConfigured:
raise forms.ValidationError('The app "{}" could not be found'.format(app_name))
return app_name
def clean(self):
cleaned_data = self.cleaned_data
app_name = self.cleaned_data.get('app_name')
model_name = cleaned_data.get('model_name')
field_name = cleaned_data.get('field_name')
model = models.get_model(app_name, model_name)
if model is None:
del cleaned_data['model_name']
msg = 'The model "{}" could not be found in the app "{}"'.format(model_name, app_name)
self._errors['model_name'] = self.error_class([msg])
elif not model._meta.get_field_by_name(field_name):
del cleaned_data['field_name']
msg = 'The model "{}" does not have a field named "{}"'.format(model_name, field_name)
self._errors['field_name'] = self.error_class([msg])
return cleaned_data
class Meta(object):
model = DataField
|
from django import forms
from django.db import models
from avocado.models import DataField
class DataFieldAdminForm(forms.ModelForm):
def clean_app_name(self):
app_name = self.cleaned_data['app_name']
if models.get_app(app_name) is None:
raise forms.ValidationError('The app "{0}" could not be found'.format(app_name))
return app_name
def clean(self):
cleaned_data = super(DataFieldAdminForm, self).clean()
instance = super(DataFieldAdminForm, self).save(commit=False)
model_name = cleaned_data['model_name']
if instance.model is None:
del cleaned_data['model_name']
msg = 'The model "{0}" could not be found'.format(model_name)
self._errors['model_name'] = self.error_class([msg])
# test `field_name'
field_name = cleaned_data['field_name']
if instance.field is None:
del cleaned_data['field_name']
msg = 'The model "{0}" does not have a field named "{1}"'.format(model_name, field_name)
self._errors['field_name'] = self.error_class([msg])
return cleaned_data
class Meta(object):
model = DataField
Fix DataField admin validation for identifiersfrom django import forms
from django.core.exceptions import ImproperlyConfigured
from django.db import models
from avocado.models import DataField
class DataFieldAdminForm(forms.ModelForm):
def clean_app_name(self):
app_name = self.cleaned_data.get('app_name')
try:
models.get_app(app_name)
except ImproperlyConfigured:
raise forms.ValidationError('The app "{}" could not be found'.format(app_name))
return app_name
def clean(self):
cleaned_data = self.cleaned_data
app_name = self.cleaned_data.get('app_name')
model_name = cleaned_data.get('model_name')
field_name = cleaned_data.get('field_name')
model = models.get_model(app_name, model_name)
if model is None:
del cleaned_data['model_name']
msg = 'The model "{}" could not be found in the app "{}"'.format(model_name, app_name)
self._errors['model_name'] = self.error_class([msg])
elif not model._meta.get_field_by_name(field_name):
del cleaned_data['field_name']
msg = 'The model "{}" does not have a field named "{}"'.format(model_name, field_name)
self._errors['field_name'] = self.error_class([msg])
return cleaned_data
class Meta(object):
model = DataField
|
<commit_before>from django import forms
from django.db import models
from avocado.models import DataField
class DataFieldAdminForm(forms.ModelForm):
def clean_app_name(self):
app_name = self.cleaned_data['app_name']
if models.get_app(app_name) is None:
raise forms.ValidationError('The app "{0}" could not be found'.format(app_name))
return app_name
def clean(self):
cleaned_data = super(DataFieldAdminForm, self).clean()
instance = super(DataFieldAdminForm, self).save(commit=False)
model_name = cleaned_data['model_name']
if instance.model is None:
del cleaned_data['model_name']
msg = 'The model "{0}" could not be found'.format(model_name)
self._errors['model_name'] = self.error_class([msg])
# test `field_name'
field_name = cleaned_data['field_name']
if instance.field is None:
del cleaned_data['field_name']
msg = 'The model "{0}" does not have a field named "{1}"'.format(model_name, field_name)
self._errors['field_name'] = self.error_class([msg])
return cleaned_data
class Meta(object):
model = DataField
<commit_msg>Fix DataField admin validation for identifiers<commit_after>from django import forms
from django.core.exceptions import ImproperlyConfigured
from django.db import models
from avocado.models import DataField
class DataFieldAdminForm(forms.ModelForm):
def clean_app_name(self):
app_name = self.cleaned_data.get('app_name')
try:
models.get_app(app_name)
except ImproperlyConfigured:
raise forms.ValidationError('The app "{}" could not be found'.format(app_name))
return app_name
def clean(self):
cleaned_data = self.cleaned_data
app_name = self.cleaned_data.get('app_name')
model_name = cleaned_data.get('model_name')
field_name = cleaned_data.get('field_name')
model = models.get_model(app_name, model_name)
if model is None:
del cleaned_data['model_name']
msg = 'The model "{}" could not be found in the app "{}"'.format(model_name, app_name)
self._errors['model_name'] = self.error_class([msg])
elif not model._meta.get_field_by_name(field_name):
del cleaned_data['field_name']
msg = 'The model "{}" does not have a field named "{}"'.format(model_name, field_name)
self._errors['field_name'] = self.error_class([msg])
return cleaned_data
class Meta(object):
model = DataField
|
ea4746f6b809c0c3b2a6931bc863121c07ee2c9a
|
lib/plugins/method/__init__.py
|
lib/plugins/method/__init__.py
|
from yapsy.IPlugin import IPlugin
from lib.methods import BaseMethod
class IMethodPlugin(BaseMethod, IPlugin):
def __init__(self):
pass
def setNameAndFactory(self, name, factory):
self.methodName = name
self.factory = factory
|
from yapsy.IPlugin import IPlugin
from lib.methods import BaseMethod
class IMethodPlugin(BaseMethod, IPlugin):
def __init__(self):
pass
def setNameAndFactory(self, name, factory):
self.methodName = name
self.factory = factory
@staticmethod
def supports(methodName):
raise NotImplementedError
|
Make supports method throw NotImplementedError so that methods failing to implement it does not fail silently
|
Make supports method throw NotImplementedError so that methods failing to implement it does not fail silently
|
Python
|
mit
|
factorial-io/fabalicious,factorial-io/fabalicious
|
from yapsy.IPlugin import IPlugin
from lib.methods import BaseMethod
class IMethodPlugin(BaseMethod, IPlugin):
def __init__(self):
pass
def setNameAndFactory(self, name, factory):
self.methodName = name
self.factory = factory
Make supports method throw NotImplementedError so that methods failing to implement it does not fail silently
|
from yapsy.IPlugin import IPlugin
from lib.methods import BaseMethod
class IMethodPlugin(BaseMethod, IPlugin):
def __init__(self):
pass
def setNameAndFactory(self, name, factory):
self.methodName = name
self.factory = factory
@staticmethod
def supports(methodName):
raise NotImplementedError
|
<commit_before>from yapsy.IPlugin import IPlugin
from lib.methods import BaseMethod
class IMethodPlugin(BaseMethod, IPlugin):
def __init__(self):
pass
def setNameAndFactory(self, name, factory):
self.methodName = name
self.factory = factory
<commit_msg>Make supports method throw NotImplementedError so that methods failing to implement it does not fail silently<commit_after>
|
from yapsy.IPlugin import IPlugin
from lib.methods import BaseMethod
class IMethodPlugin(BaseMethod, IPlugin):
def __init__(self):
pass
def setNameAndFactory(self, name, factory):
self.methodName = name
self.factory = factory
@staticmethod
def supports(methodName):
raise NotImplementedError
|
from yapsy.IPlugin import IPlugin
from lib.methods import BaseMethod
class IMethodPlugin(BaseMethod, IPlugin):
def __init__(self):
pass
def setNameAndFactory(self, name, factory):
self.methodName = name
self.factory = factory
Make supports method throw NotImplementedError so that methods failing to implement it does not fail silentlyfrom yapsy.IPlugin import IPlugin
from lib.methods import BaseMethod
class IMethodPlugin(BaseMethod, IPlugin):
def __init__(self):
pass
def setNameAndFactory(self, name, factory):
self.methodName = name
self.factory = factory
@staticmethod
def supports(methodName):
raise NotImplementedError
|
<commit_before>from yapsy.IPlugin import IPlugin
from lib.methods import BaseMethod
class IMethodPlugin(BaseMethod, IPlugin):
def __init__(self):
pass
def setNameAndFactory(self, name, factory):
self.methodName = name
self.factory = factory
<commit_msg>Make supports method throw NotImplementedError so that methods failing to implement it does not fail silently<commit_after>from yapsy.IPlugin import IPlugin
from lib.methods import BaseMethod
class IMethodPlugin(BaseMethod, IPlugin):
def __init__(self):
pass
def setNameAndFactory(self, name, factory):
self.methodName = name
self.factory = factory
@staticmethod
def supports(methodName):
raise NotImplementedError
|
5dd37364a9616d42291b8841552bdd24aebd908a
|
selective_search.py
|
selective_search.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import numpy
import segment
def calc_adjacency_matrix(label_img, n_region):
adjacency = numpy.diag([1] * n_region)
h, w = label_img.shape[0], label_img.shape[1]
for y in range(h):
for x in range(w):
here = label_img[y, x]
if y < h - 1:
b = label_img[y + 1, x]
adjacency[here, b] = adjacency[b, here] = 1
if x < w - 1:
r = label_img[y, x + 1]
adjacency[here, r] = adjacency[r, here] = 1
return adjacency
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import numpy
import segment
def calc_adjacency_matrix(label_img, n_region):
A = numpy.diag([1] * n_region)
h, w = label_img.shape[0], label_img.shape[1]
for y in range(h):
for x in range(w):
here = label_img[y, x]
if y < h - 1:
b = label_img[y + 1, x]
A[here, b] = A[b, here] = 1
if x < w - 1:
r = label_img[y, x + 1]
A[here, r] = A[r, here] = 1
return A
|
Change name of local variable
|
Change name of local variable
|
Python
|
mit
|
belltailjp/selective_search_py,belltailjp/selective_search_py,gamer13/selective_search_py,BradNeuberg/selective_search_py,gamer13/selective_search_py,BradNeuberg/selective_search_py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import numpy
import segment
def calc_adjacency_matrix(label_img, n_region):
adjacency = numpy.diag([1] * n_region)
h, w = label_img.shape[0], label_img.shape[1]
for y in range(h):
for x in range(w):
here = label_img[y, x]
if y < h - 1:
b = label_img[y + 1, x]
adjacency[here, b] = adjacency[b, here] = 1
if x < w - 1:
r = label_img[y, x + 1]
adjacency[here, r] = adjacency[r, here] = 1
return adjacency
Change name of local variable
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import numpy
import segment
def calc_adjacency_matrix(label_img, n_region):
A = numpy.diag([1] * n_region)
h, w = label_img.shape[0], label_img.shape[1]
for y in range(h):
for x in range(w):
here = label_img[y, x]
if y < h - 1:
b = label_img[y + 1, x]
A[here, b] = A[b, here] = 1
if x < w - 1:
r = label_img[y, x + 1]
A[here, r] = A[r, here] = 1
return A
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import numpy
import segment
def calc_adjacency_matrix(label_img, n_region):
adjacency = numpy.diag([1] * n_region)
h, w = label_img.shape[0], label_img.shape[1]
for y in range(h):
for x in range(w):
here = label_img[y, x]
if y < h - 1:
b = label_img[y + 1, x]
adjacency[here, b] = adjacency[b, here] = 1
if x < w - 1:
r = label_img[y, x + 1]
adjacency[here, r] = adjacency[r, here] = 1
return adjacency
<commit_msg>Change name of local variable<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import numpy
import segment
def calc_adjacency_matrix(label_img, n_region):
A = numpy.diag([1] * n_region)
h, w = label_img.shape[0], label_img.shape[1]
for y in range(h):
for x in range(w):
here = label_img[y, x]
if y < h - 1:
b = label_img[y + 1, x]
A[here, b] = A[b, here] = 1
if x < w - 1:
r = label_img[y, x + 1]
A[here, r] = A[r, here] = 1
return A
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import numpy
import segment
def calc_adjacency_matrix(label_img, n_region):
adjacency = numpy.diag([1] * n_region)
h, w = label_img.shape[0], label_img.shape[1]
for y in range(h):
for x in range(w):
here = label_img[y, x]
if y < h - 1:
b = label_img[y + 1, x]
adjacency[here, b] = adjacency[b, here] = 1
if x < w - 1:
r = label_img[y, x + 1]
adjacency[here, r] = adjacency[r, here] = 1
return adjacency
Change name of local variable#!/usr/bin/env python
# -*- coding: utf-8 -*-
import numpy
import segment
def calc_adjacency_matrix(label_img, n_region):
A = numpy.diag([1] * n_region)
h, w = label_img.shape[0], label_img.shape[1]
for y in range(h):
for x in range(w):
here = label_img[y, x]
if y < h - 1:
b = label_img[y + 1, x]
A[here, b] = A[b, here] = 1
if x < w - 1:
r = label_img[y, x + 1]
A[here, r] = A[r, here] = 1
return A
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import numpy
import segment
def calc_adjacency_matrix(label_img, n_region):
adjacency = numpy.diag([1] * n_region)
h, w = label_img.shape[0], label_img.shape[1]
for y in range(h):
for x in range(w):
here = label_img[y, x]
if y < h - 1:
b = label_img[y + 1, x]
adjacency[here, b] = adjacency[b, here] = 1
if x < w - 1:
r = label_img[y, x + 1]
adjacency[here, r] = adjacency[r, here] = 1
return adjacency
<commit_msg>Change name of local variable<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import numpy
import segment
def calc_adjacency_matrix(label_img, n_region):
A = numpy.diag([1] * n_region)
h, w = label_img.shape[0], label_img.shape[1]
for y in range(h):
for x in range(w):
here = label_img[y, x]
if y < h - 1:
b = label_img[y + 1, x]
A[here, b] = A[b, here] = 1
if x < w - 1:
r = label_img[y, x + 1]
A[here, r] = A[r, here] = 1
return A
|
761e7f1c5134267994cec907bd29da613b225a82
|
django/website/logframe/tests/test_admin.py
|
django/website/logframe/tests/test_admin.py
|
from mock import Mock
from ..admin import RatingAdmin, SubIndicatorAdmin
from ..models import colors, Rating, SubIndicator
def test_sub_indicator_admin_rsult_returns_indicator_result():
sub_indicator = Mock(indicator=Mock(result='result'))
admin = SubIndicatorAdmin(SubIndicator, None)
assert sub_indicator.indicator.result == admin.result(sub_indicator)
def test_rating_admin_colored_name_returns_name_for_colours():
obj = Mock(color=colors[0][0])
admin = RatingAdmin(Rating, None)
assert '<span class="rating-list-item {0}">{1}</span>'.format(colors[0][0], colors[0][1]) == admin.colored_name(obj)
|
from mock import Mock
from ..admin import RatingAdmin, SubIndicatorAdmin
from ..models import colors, Rating, SubIndicator
def test_sub_indicator_admin_result_returns_indicator_result():
sub_indicator = Mock(indicator=Mock(result='result'))
admin = SubIndicatorAdmin(SubIndicator, None)
assert sub_indicator.indicator.result == admin.result(sub_indicator)
def test_rating_admin_colored_name_returns_name_for_colours():
obj = Mock(color=colors[0][0])
admin = RatingAdmin(Rating, None)
assert '<span class="rating-list-item {0}">{1}</span>'.format(colors[0][0], colors[0][1]) == admin.colored_name(obj)
|
Rename test to get rid of typo
|
Rename test to get rid of typo
|
Python
|
agpl-3.0
|
aptivate/kashana,daniell/kashana,daniell/kashana,daniell/kashana,aptivate/kashana,aptivate/alfie,daniell/kashana,aptivate/alfie,aptivate/kashana,aptivate/alfie,aptivate/alfie,aptivate/kashana
|
from mock import Mock
from ..admin import RatingAdmin, SubIndicatorAdmin
from ..models import colors, Rating, SubIndicator
def test_sub_indicator_admin_rsult_returns_indicator_result():
sub_indicator = Mock(indicator=Mock(result='result'))
admin = SubIndicatorAdmin(SubIndicator, None)
assert sub_indicator.indicator.result == admin.result(sub_indicator)
def test_rating_admin_colored_name_returns_name_for_colours():
obj = Mock(color=colors[0][0])
admin = RatingAdmin(Rating, None)
assert '<span class="rating-list-item {0}">{1}</span>'.format(colors[0][0], colors[0][1]) == admin.colored_name(obj)
Rename test to get rid of typo
|
from mock import Mock
from ..admin import RatingAdmin, SubIndicatorAdmin
from ..models import colors, Rating, SubIndicator
def test_sub_indicator_admin_result_returns_indicator_result():
sub_indicator = Mock(indicator=Mock(result='result'))
admin = SubIndicatorAdmin(SubIndicator, None)
assert sub_indicator.indicator.result == admin.result(sub_indicator)
def test_rating_admin_colored_name_returns_name_for_colours():
obj = Mock(color=colors[0][0])
admin = RatingAdmin(Rating, None)
assert '<span class="rating-list-item {0}">{1}</span>'.format(colors[0][0], colors[0][1]) == admin.colored_name(obj)
|
<commit_before>from mock import Mock
from ..admin import RatingAdmin, SubIndicatorAdmin
from ..models import colors, Rating, SubIndicator
def test_sub_indicator_admin_rsult_returns_indicator_result():
sub_indicator = Mock(indicator=Mock(result='result'))
admin = SubIndicatorAdmin(SubIndicator, None)
assert sub_indicator.indicator.result == admin.result(sub_indicator)
def test_rating_admin_colored_name_returns_name_for_colours():
obj = Mock(color=colors[0][0])
admin = RatingAdmin(Rating, None)
assert '<span class="rating-list-item {0}">{1}</span>'.format(colors[0][0], colors[0][1]) == admin.colored_name(obj)
<commit_msg>Rename test to get rid of typo<commit_after>
|
from mock import Mock
from ..admin import RatingAdmin, SubIndicatorAdmin
from ..models import colors, Rating, SubIndicator
def test_sub_indicator_admin_result_returns_indicator_result():
sub_indicator = Mock(indicator=Mock(result='result'))
admin = SubIndicatorAdmin(SubIndicator, None)
assert sub_indicator.indicator.result == admin.result(sub_indicator)
def test_rating_admin_colored_name_returns_name_for_colours():
obj = Mock(color=colors[0][0])
admin = RatingAdmin(Rating, None)
assert '<span class="rating-list-item {0}">{1}</span>'.format(colors[0][0], colors[0][1]) == admin.colored_name(obj)
|
from mock import Mock
from ..admin import RatingAdmin, SubIndicatorAdmin
from ..models import colors, Rating, SubIndicator
def test_sub_indicator_admin_rsult_returns_indicator_result():
sub_indicator = Mock(indicator=Mock(result='result'))
admin = SubIndicatorAdmin(SubIndicator, None)
assert sub_indicator.indicator.result == admin.result(sub_indicator)
def test_rating_admin_colored_name_returns_name_for_colours():
obj = Mock(color=colors[0][0])
admin = RatingAdmin(Rating, None)
assert '<span class="rating-list-item {0}">{1}</span>'.format(colors[0][0], colors[0][1]) == admin.colored_name(obj)
Rename test to get rid of typofrom mock import Mock
from ..admin import RatingAdmin, SubIndicatorAdmin
from ..models import colors, Rating, SubIndicator
def test_sub_indicator_admin_result_returns_indicator_result():
sub_indicator = Mock(indicator=Mock(result='result'))
admin = SubIndicatorAdmin(SubIndicator, None)
assert sub_indicator.indicator.result == admin.result(sub_indicator)
def test_rating_admin_colored_name_returns_name_for_colours():
obj = Mock(color=colors[0][0])
admin = RatingAdmin(Rating, None)
assert '<span class="rating-list-item {0}">{1}</span>'.format(colors[0][0], colors[0][1]) == admin.colored_name(obj)
|
<commit_before>from mock import Mock
from ..admin import RatingAdmin, SubIndicatorAdmin
from ..models import colors, Rating, SubIndicator
def test_sub_indicator_admin_rsult_returns_indicator_result():
sub_indicator = Mock(indicator=Mock(result='result'))
admin = SubIndicatorAdmin(SubIndicator, None)
assert sub_indicator.indicator.result == admin.result(sub_indicator)
def test_rating_admin_colored_name_returns_name_for_colours():
obj = Mock(color=colors[0][0])
admin = RatingAdmin(Rating, None)
assert '<span class="rating-list-item {0}">{1}</span>'.format(colors[0][0], colors[0][1]) == admin.colored_name(obj)
<commit_msg>Rename test to get rid of typo<commit_after>from mock import Mock
from ..admin import RatingAdmin, SubIndicatorAdmin
from ..models import colors, Rating, SubIndicator
def test_sub_indicator_admin_result_returns_indicator_result():
sub_indicator = Mock(indicator=Mock(result='result'))
admin = SubIndicatorAdmin(SubIndicator, None)
assert sub_indicator.indicator.result == admin.result(sub_indicator)
def test_rating_admin_colored_name_returns_name_for_colours():
obj = Mock(color=colors[0][0])
admin = RatingAdmin(Rating, None)
assert '<span class="rating-list-item {0}">{1}</span>'.format(colors[0][0], colors[0][1]) == admin.colored_name(obj)
|
f36a1bb6c9229615d1cc498c02fb7df066e7cd1c
|
app/main/views/_templates.py
|
app/main/views/_templates.py
|
templates = [
{
'type': 'sms',
'name': 'Confirmation',
'body': 'Lasting power of attorney: We’ve received your application. Applications take between 8 and 10 weeks to process.' # noqa
},
{
'type': 'sms',
'name': 'Reminder',
'body': 'Vehicle tax: Your vehicle tax for ((registration number)) expires on ((date)). Tax your vehicle at www.gov.uk/vehicle-tax' # noqa
},
{
'type': 'sms',
'name': 'Warning',
'body': 'Vehicle tax: Your vehicle tax for ((registration number)) has expired. Tax your vehicle at www.gov.uk/vehicle-tax' # noqa
},
{
'type': 'email',
'name': 'Application alert 06/2016',
'subject': 'Your lasting power of attorney application',
'body': """Dear ((name)),
When you’ve made your lasting power of attorney (LPA), you need to register it \
with the Office of the Public Guardian (OPG).
You can apply to register your LPA yourself if you’re able to make your own decisions.
Your attorney can also register it for you. You’ll be told if they do and you can \
object to the registration.
It takes between 8 and 10 weeks to register an LPA if there are no mistakes in the application.
"""
},
{
'type': 'sms',
'name': 'Air quality alert',
'body': 'Air pollution levels will be ((level)) in ((region)) tomorrow.'
},
]
|
templates = [
{
'type': 'sms',
'name': 'Confirmation with details Jan 2016',
'body': '((name)), we’ve received your ((thing)). We’ll contact you again within 1 week.'
},
{
'type': 'sms',
'name': 'Confirmation Jan 2016',
'body': 'We’ve received your payment. We’ll contact you again within 1 week.'
}
]
|
Make SMS templates plausible for hack day
|
Make SMS templates plausible for hack day
This commit replaces the previous SMS templates.
I’ve written a couple of new ones which are plausible for developers on the
hack day:
- one with placeholders
- one without
|
Python
|
mit
|
alphagov/notifications-admin,gov-cjwaszczuk/notifications-admin,alphagov/notifications-admin,gov-cjwaszczuk/notifications-admin,alphagov/notifications-admin,gov-cjwaszczuk/notifications-admin,gov-cjwaszczuk/notifications-admin,alphagov/notifications-admin
|
templates = [
{
'type': 'sms',
'name': 'Confirmation',
'body': 'Lasting power of attorney: We’ve received your application. Applications take between 8 and 10 weeks to process.' # noqa
},
{
'type': 'sms',
'name': 'Reminder',
'body': 'Vehicle tax: Your vehicle tax for ((registration number)) expires on ((date)). Tax your vehicle at www.gov.uk/vehicle-tax' # noqa
},
{
'type': 'sms',
'name': 'Warning',
'body': 'Vehicle tax: Your vehicle tax for ((registration number)) has expired. Tax your vehicle at www.gov.uk/vehicle-tax' # noqa
},
{
'type': 'email',
'name': 'Application alert 06/2016',
'subject': 'Your lasting power of attorney application',
'body': """Dear ((name)),
When you’ve made your lasting power of attorney (LPA), you need to register it \
with the Office of the Public Guardian (OPG).
You can apply to register your LPA yourself if you’re able to make your own decisions.
Your attorney can also register it for you. You’ll be told if they do and you can \
object to the registration.
It takes between 8 and 10 weeks to register an LPA if there are no mistakes in the application.
"""
},
{
'type': 'sms',
'name': 'Air quality alert',
'body': 'Air pollution levels will be ((level)) in ((region)) tomorrow.'
},
]
Make SMS templates plausible for hack day
This commit replaces the previous SMS templates.
I’ve written a couple of new ones which are plausible for developers on the
hack day:
- one with placeholders
- one without
|
templates = [
{
'type': 'sms',
'name': 'Confirmation with details Jan 2016',
'body': '((name)), we’ve received your ((thing)). We’ll contact you again within 1 week.'
},
{
'type': 'sms',
'name': 'Confirmation Jan 2016',
'body': 'We’ve received your payment. We’ll contact you again within 1 week.'
}
]
|
<commit_before>templates = [
{
'type': 'sms',
'name': 'Confirmation',
'body': 'Lasting power of attorney: We’ve received your application. Applications take between 8 and 10 weeks to process.' # noqa
},
{
'type': 'sms',
'name': 'Reminder',
'body': 'Vehicle tax: Your vehicle tax for ((registration number)) expires on ((date)). Tax your vehicle at www.gov.uk/vehicle-tax' # noqa
},
{
'type': 'sms',
'name': 'Warning',
'body': 'Vehicle tax: Your vehicle tax for ((registration number)) has expired. Tax your vehicle at www.gov.uk/vehicle-tax' # noqa
},
{
'type': 'email',
'name': 'Application alert 06/2016',
'subject': 'Your lasting power of attorney application',
'body': """Dear ((name)),
When you’ve made your lasting power of attorney (LPA), you need to register it \
with the Office of the Public Guardian (OPG).
You can apply to register your LPA yourself if you’re able to make your own decisions.
Your attorney can also register it for you. You’ll be told if they do and you can \
object to the registration.
It takes between 8 and 10 weeks to register an LPA if there are no mistakes in the application.
"""
},
{
'type': 'sms',
'name': 'Air quality alert',
'body': 'Air pollution levels will be ((level)) in ((region)) tomorrow.'
},
]
<commit_msg>Make SMS templates plausible for hack day
This commit replaces the previous SMS templates.
I’ve written a couple of new ones which are plausible for developers on the
hack day:
- one with placeholders
- one without<commit_after>
|
templates = [
{
'type': 'sms',
'name': 'Confirmation with details Jan 2016',
'body': '((name)), we’ve received your ((thing)). We’ll contact you again within 1 week.'
},
{
'type': 'sms',
'name': 'Confirmation Jan 2016',
'body': 'We’ve received your payment. We’ll contact you again within 1 week.'
}
]
|
templates = [
{
'type': 'sms',
'name': 'Confirmation',
'body': 'Lasting power of attorney: We’ve received your application. Applications take between 8 and 10 weeks to process.' # noqa
},
{
'type': 'sms',
'name': 'Reminder',
'body': 'Vehicle tax: Your vehicle tax for ((registration number)) expires on ((date)). Tax your vehicle at www.gov.uk/vehicle-tax' # noqa
},
{
'type': 'sms',
'name': 'Warning',
'body': 'Vehicle tax: Your vehicle tax for ((registration number)) has expired. Tax your vehicle at www.gov.uk/vehicle-tax' # noqa
},
{
'type': 'email',
'name': 'Application alert 06/2016',
'subject': 'Your lasting power of attorney application',
'body': """Dear ((name)),
When you’ve made your lasting power of attorney (LPA), you need to register it \
with the Office of the Public Guardian (OPG).
You can apply to register your LPA yourself if you’re able to make your own decisions.
Your attorney can also register it for you. You’ll be told if they do and you can \
object to the registration.
It takes between 8 and 10 weeks to register an LPA if there are no mistakes in the application.
"""
},
{
'type': 'sms',
'name': 'Air quality alert',
'body': 'Air pollution levels will be ((level)) in ((region)) tomorrow.'
},
]
Make SMS templates plausible for hack day
This commit replaces the previous SMS templates.
I’ve written a couple of new ones which are plausible for developers on the
hack day:
- one with placeholders
- one withouttemplates = [
{
'type': 'sms',
'name': 'Confirmation with details Jan 2016',
'body': '((name)), we’ve received your ((thing)). We’ll contact you again within 1 week.'
},
{
'type': 'sms',
'name': 'Confirmation Jan 2016',
'body': 'We’ve received your payment. We’ll contact you again within 1 week.'
}
]
|
<commit_before>templates = [
{
'type': 'sms',
'name': 'Confirmation',
'body': 'Lasting power of attorney: We’ve received your application. Applications take between 8 and 10 weeks to process.' # noqa
},
{
'type': 'sms',
'name': 'Reminder',
'body': 'Vehicle tax: Your vehicle tax for ((registration number)) expires on ((date)). Tax your vehicle at www.gov.uk/vehicle-tax' # noqa
},
{
'type': 'sms',
'name': 'Warning',
'body': 'Vehicle tax: Your vehicle tax for ((registration number)) has expired. Tax your vehicle at www.gov.uk/vehicle-tax' # noqa
},
{
'type': 'email',
'name': 'Application alert 06/2016',
'subject': 'Your lasting power of attorney application',
'body': """Dear ((name)),
When you’ve made your lasting power of attorney (LPA), you need to register it \
with the Office of the Public Guardian (OPG).
You can apply to register your LPA yourself if you’re able to make your own decisions.
Your attorney can also register it for you. You’ll be told if they do and you can \
object to the registration.
It takes between 8 and 10 weeks to register an LPA if there are no mistakes in the application.
"""
},
{
'type': 'sms',
'name': 'Air quality alert',
'body': 'Air pollution levels will be ((level)) in ((region)) tomorrow.'
},
]
<commit_msg>Make SMS templates plausible for hack day
This commit replaces the previous SMS templates.
I’ve written a couple of new ones which are plausible for developers on the
hack day:
- one with placeholders
- one without<commit_after>templates = [
{
'type': 'sms',
'name': 'Confirmation with details Jan 2016',
'body': '((name)), we’ve received your ((thing)). We’ll contact you again within 1 week.'
},
{
'type': 'sms',
'name': 'Confirmation Jan 2016',
'body': 'We’ve received your payment. We’ll contact you again within 1 week.'
}
]
|
78e24093f314821d7818f31574dbe521c0ae5fef
|
sharepa/__init__.py
|
sharepa/__init__.py
|
from sharepa.search import ShareSearch, basic_search # noqa
from sharepa.analysis import bucket_to_dataframe, merge_dataframes # noqa
|
from sharepa.search import ShareSearch, basic_search # noqa
from sharepa.analysis import bucket_to_dataframe, merge_dataframes # noqa
def source_counts():
return bucket_to_dataframe(
'total_source_counts',
basic_search.execute().aggregations.sourceAgg.buckets
)
|
Make it so that source_counts is only executed on purpose
|
Make it so that source_counts is only executed on purpose
|
Python
|
mit
|
erinspace/sharepa,CenterForOpenScience/sharepa,fabianvf/sharepa,samanehsan/sharepa
|
from sharepa.search import ShareSearch, basic_search # noqa
from sharepa.analysis import bucket_to_dataframe, merge_dataframes # noqa
Make it so that source_counts is only executed on purpose
|
from sharepa.search import ShareSearch, basic_search # noqa
from sharepa.analysis import bucket_to_dataframe, merge_dataframes # noqa
def source_counts():
return bucket_to_dataframe(
'total_source_counts',
basic_search.execute().aggregations.sourceAgg.buckets
)
|
<commit_before>from sharepa.search import ShareSearch, basic_search # noqa
from sharepa.analysis import bucket_to_dataframe, merge_dataframes # noqa
<commit_msg>Make it so that source_counts is only executed on purpose<commit_after>
|
from sharepa.search import ShareSearch, basic_search # noqa
from sharepa.analysis import bucket_to_dataframe, merge_dataframes # noqa
def source_counts():
return bucket_to_dataframe(
'total_source_counts',
basic_search.execute().aggregations.sourceAgg.buckets
)
|
from sharepa.search import ShareSearch, basic_search # noqa
from sharepa.analysis import bucket_to_dataframe, merge_dataframes # noqa
Make it so that source_counts is only executed on purposefrom sharepa.search import ShareSearch, basic_search # noqa
from sharepa.analysis import bucket_to_dataframe, merge_dataframes # noqa
def source_counts():
return bucket_to_dataframe(
'total_source_counts',
basic_search.execute().aggregations.sourceAgg.buckets
)
|
<commit_before>from sharepa.search import ShareSearch, basic_search # noqa
from sharepa.analysis import bucket_to_dataframe, merge_dataframes # noqa
<commit_msg>Make it so that source_counts is only executed on purpose<commit_after>from sharepa.search import ShareSearch, basic_search # noqa
from sharepa.analysis import bucket_to_dataframe, merge_dataframes # noqa
def source_counts():
return bucket_to_dataframe(
'total_source_counts',
basic_search.execute().aggregations.sourceAgg.buckets
)
|
f8eb09c5ffca469b72e51db783c667f83ca56237
|
server/users-microservice/src/config/database.py
|
server/users-microservice/src/config/database.py
|
dbDevURI = 'mysql://usersmicroserviceuser:M@+=mm26xt22$%)(@localhost:3306/usersmicroservice'
dbURI = 'mysql://root:root@mysql:3306/usersmicroservice'
|
dbDevURI = 'mysql://localroot:localroot@localhost:3306/usersmicroservice'
dbURI = 'mysql://root:root@mysql:3306/usersmicroservice'
|
Create a local root user for users micro service
|
Create a local root user for users micro service
|
Python
|
mit
|
Madmous/madClones,Madmous/Trello-Clone,Madmous/madClones,Madmous/Trello-Clone,Madmous/madClones,Madmous/madClones,Madmous/Trello-Clone
|
dbDevURI = 'mysql://usersmicroserviceuser:M@+=mm26xt22$%)(@localhost:3306/usersmicroservice'
dbURI = 'mysql://root:root@mysql:3306/usersmicroservice'Create a local root user for users micro service
|
dbDevURI = 'mysql://localroot:localroot@localhost:3306/usersmicroservice'
dbURI = 'mysql://root:root@mysql:3306/usersmicroservice'
|
<commit_before>dbDevURI = 'mysql://usersmicroserviceuser:M@+=mm26xt22$%)(@localhost:3306/usersmicroservice'
dbURI = 'mysql://root:root@mysql:3306/usersmicroservice'<commit_msg>Create a local root user for users micro service<commit_after>
|
dbDevURI = 'mysql://localroot:localroot@localhost:3306/usersmicroservice'
dbURI = 'mysql://root:root@mysql:3306/usersmicroservice'
|
dbDevURI = 'mysql://usersmicroserviceuser:M@+=mm26xt22$%)(@localhost:3306/usersmicroservice'
dbURI = 'mysql://root:root@mysql:3306/usersmicroservice'Create a local root user for users micro servicedbDevURI = 'mysql://localroot:localroot@localhost:3306/usersmicroservice'
dbURI = 'mysql://root:root@mysql:3306/usersmicroservice'
|
<commit_before>dbDevURI = 'mysql://usersmicroserviceuser:M@+=mm26xt22$%)(@localhost:3306/usersmicroservice'
dbURI = 'mysql://root:root@mysql:3306/usersmicroservice'<commit_msg>Create a local root user for users micro service<commit_after>dbDevURI = 'mysql://localroot:localroot@localhost:3306/usersmicroservice'
dbURI = 'mysql://root:root@mysql:3306/usersmicroservice'
|
98bf3fd5d278f68f22ba0b5c1f98eb8970455255
|
teardown_tests.py
|
teardown_tests.py
|
#!/usr/bin/env python
import os
import shutil
import sys
if not os.environ.get("TEST_NOTEBOOKS"):
sys.exit(0)
for each in list(sys.argv[1:]) + [
"data.tif",
"data.h5",
"data_trim.h5",
"data_dn.h5",
"data_reg.h5",
"data_sub.h5",
"data_f_f0.h5",
"data_wt.h5",
"data_norm.h5",
"data_dict.h5",
"data_post.h5",
"data_traces.h5",
"data_rois.h5",
"data_proj.h5",
"data.zarr",
"data_trim.zarr",
"data_dn.zarr",
"data_reg.zarr",
"data_sub.zarr",
"data_f_f0.zarr",
"data_wt.zarr",
"data_norm.zarr",
"data_dict.zarr",
"data_post.zarr",
"data_traces.zarr",
"data_rois.zarr",
"data_proj.zarr",
"data_proj.html"]:
if os.path.isfile(each):
os.remove(each)
elif os.path.isdir(each):
shutil.rmtree(each)
|
#!/usr/bin/env python
import os
import shutil
import sys
if not os.environ.get("TEST_NOTEBOOKS"):
sys.exit(0)
for each in list(sys.argv[1:]) + [
"data.tif",
"data.h5",
"data_traces.h5",
"data_rois.h5",
"data.zarr",
"data_trim.zarr",
"data_dn.zarr",
"data_reg.zarr",
"data_sub.zarr",
"data_f_f0.zarr",
"data_wt.zarr",
"data_norm.zarr",
"data_dict.zarr",
"data_post.zarr",
"data_traces.zarr",
"data_rois.zarr",
"data_proj.zarr",
"data_proj.html",
"dask-worker-space"]:
if os.path.isfile(each):
os.remove(each)
elif os.path.isdir(each):
shutil.rmtree(each)
|
Update test files to remove in teardown script
|
Update test files to remove in teardown script
Drops HDF5 files that are no longer generated during routine testing
from teardown. Also adds Dask's workspace directory for cleanup.
|
Python
|
apache-2.0
|
DudLab/nanshe_workflow,nanshe-org/nanshe_workflow
|
#!/usr/bin/env python
import os
import shutil
import sys
if not os.environ.get("TEST_NOTEBOOKS"):
sys.exit(0)
for each in list(sys.argv[1:]) + [
"data.tif",
"data.h5",
"data_trim.h5",
"data_dn.h5",
"data_reg.h5",
"data_sub.h5",
"data_f_f0.h5",
"data_wt.h5",
"data_norm.h5",
"data_dict.h5",
"data_post.h5",
"data_traces.h5",
"data_rois.h5",
"data_proj.h5",
"data.zarr",
"data_trim.zarr",
"data_dn.zarr",
"data_reg.zarr",
"data_sub.zarr",
"data_f_f0.zarr",
"data_wt.zarr",
"data_norm.zarr",
"data_dict.zarr",
"data_post.zarr",
"data_traces.zarr",
"data_rois.zarr",
"data_proj.zarr",
"data_proj.html"]:
if os.path.isfile(each):
os.remove(each)
elif os.path.isdir(each):
shutil.rmtree(each)
Update test files to remove in teardown script
Drops HDF5 files that are no longer generated during routine testing
from teardown. Also adds Dask's workspace directory for cleanup.
|
#!/usr/bin/env python
import os
import shutil
import sys
if not os.environ.get("TEST_NOTEBOOKS"):
sys.exit(0)
for each in list(sys.argv[1:]) + [
"data.tif",
"data.h5",
"data_traces.h5",
"data_rois.h5",
"data.zarr",
"data_trim.zarr",
"data_dn.zarr",
"data_reg.zarr",
"data_sub.zarr",
"data_f_f0.zarr",
"data_wt.zarr",
"data_norm.zarr",
"data_dict.zarr",
"data_post.zarr",
"data_traces.zarr",
"data_rois.zarr",
"data_proj.zarr",
"data_proj.html",
"dask-worker-space"]:
if os.path.isfile(each):
os.remove(each)
elif os.path.isdir(each):
shutil.rmtree(each)
|
<commit_before>#!/usr/bin/env python
import os
import shutil
import sys
if not os.environ.get("TEST_NOTEBOOKS"):
sys.exit(0)
for each in list(sys.argv[1:]) + [
"data.tif",
"data.h5",
"data_trim.h5",
"data_dn.h5",
"data_reg.h5",
"data_sub.h5",
"data_f_f0.h5",
"data_wt.h5",
"data_norm.h5",
"data_dict.h5",
"data_post.h5",
"data_traces.h5",
"data_rois.h5",
"data_proj.h5",
"data.zarr",
"data_trim.zarr",
"data_dn.zarr",
"data_reg.zarr",
"data_sub.zarr",
"data_f_f0.zarr",
"data_wt.zarr",
"data_norm.zarr",
"data_dict.zarr",
"data_post.zarr",
"data_traces.zarr",
"data_rois.zarr",
"data_proj.zarr",
"data_proj.html"]:
if os.path.isfile(each):
os.remove(each)
elif os.path.isdir(each):
shutil.rmtree(each)
<commit_msg>Update test files to remove in teardown script
Drops HDF5 files that are no longer generated during routine testing
from teardown. Also adds Dask's workspace directory for cleanup.<commit_after>
|
#!/usr/bin/env python
import os
import shutil
import sys
if not os.environ.get("TEST_NOTEBOOKS"):
sys.exit(0)
for each in list(sys.argv[1:]) + [
"data.tif",
"data.h5",
"data_traces.h5",
"data_rois.h5",
"data.zarr",
"data_trim.zarr",
"data_dn.zarr",
"data_reg.zarr",
"data_sub.zarr",
"data_f_f0.zarr",
"data_wt.zarr",
"data_norm.zarr",
"data_dict.zarr",
"data_post.zarr",
"data_traces.zarr",
"data_rois.zarr",
"data_proj.zarr",
"data_proj.html",
"dask-worker-space"]:
if os.path.isfile(each):
os.remove(each)
elif os.path.isdir(each):
shutil.rmtree(each)
|
#!/usr/bin/env python
import os
import shutil
import sys
if not os.environ.get("TEST_NOTEBOOKS"):
sys.exit(0)
for each in list(sys.argv[1:]) + [
"data.tif",
"data.h5",
"data_trim.h5",
"data_dn.h5",
"data_reg.h5",
"data_sub.h5",
"data_f_f0.h5",
"data_wt.h5",
"data_norm.h5",
"data_dict.h5",
"data_post.h5",
"data_traces.h5",
"data_rois.h5",
"data_proj.h5",
"data.zarr",
"data_trim.zarr",
"data_dn.zarr",
"data_reg.zarr",
"data_sub.zarr",
"data_f_f0.zarr",
"data_wt.zarr",
"data_norm.zarr",
"data_dict.zarr",
"data_post.zarr",
"data_traces.zarr",
"data_rois.zarr",
"data_proj.zarr",
"data_proj.html"]:
if os.path.isfile(each):
os.remove(each)
elif os.path.isdir(each):
shutil.rmtree(each)
Update test files to remove in teardown script
Drops HDF5 files that are no longer generated during routine testing
from teardown. Also adds Dask's workspace directory for cleanup.#!/usr/bin/env python
import os
import shutil
import sys
if not os.environ.get("TEST_NOTEBOOKS"):
sys.exit(0)
for each in list(sys.argv[1:]) + [
"data.tif",
"data.h5",
"data_traces.h5",
"data_rois.h5",
"data.zarr",
"data_trim.zarr",
"data_dn.zarr",
"data_reg.zarr",
"data_sub.zarr",
"data_f_f0.zarr",
"data_wt.zarr",
"data_norm.zarr",
"data_dict.zarr",
"data_post.zarr",
"data_traces.zarr",
"data_rois.zarr",
"data_proj.zarr",
"data_proj.html",
"dask-worker-space"]:
if os.path.isfile(each):
os.remove(each)
elif os.path.isdir(each):
shutil.rmtree(each)
|
<commit_before>#!/usr/bin/env python
import os
import shutil
import sys
if not os.environ.get("TEST_NOTEBOOKS"):
sys.exit(0)
for each in list(sys.argv[1:]) + [
"data.tif",
"data.h5",
"data_trim.h5",
"data_dn.h5",
"data_reg.h5",
"data_sub.h5",
"data_f_f0.h5",
"data_wt.h5",
"data_norm.h5",
"data_dict.h5",
"data_post.h5",
"data_traces.h5",
"data_rois.h5",
"data_proj.h5",
"data.zarr",
"data_trim.zarr",
"data_dn.zarr",
"data_reg.zarr",
"data_sub.zarr",
"data_f_f0.zarr",
"data_wt.zarr",
"data_norm.zarr",
"data_dict.zarr",
"data_post.zarr",
"data_traces.zarr",
"data_rois.zarr",
"data_proj.zarr",
"data_proj.html"]:
if os.path.isfile(each):
os.remove(each)
elif os.path.isdir(each):
shutil.rmtree(each)
<commit_msg>Update test files to remove in teardown script
Drops HDF5 files that are no longer generated during routine testing
from teardown. Also adds Dask's workspace directory for cleanup.<commit_after>#!/usr/bin/env python
import os
import shutil
import sys
if not os.environ.get("TEST_NOTEBOOKS"):
sys.exit(0)
for each in list(sys.argv[1:]) + [
"data.tif",
"data.h5",
"data_traces.h5",
"data_rois.h5",
"data.zarr",
"data_trim.zarr",
"data_dn.zarr",
"data_reg.zarr",
"data_sub.zarr",
"data_f_f0.zarr",
"data_wt.zarr",
"data_norm.zarr",
"data_dict.zarr",
"data_post.zarr",
"data_traces.zarr",
"data_rois.zarr",
"data_proj.zarr",
"data_proj.html",
"dask-worker-space"]:
if os.path.isfile(each):
os.remove(each)
elif os.path.isdir(each):
shutil.rmtree(each)
|
fa1a2bf7d46be4a8f018595e806ced0fc5a7e4c8
|
contrib/internal/build-i18n.py
|
contrib/internal/build-i18n.py
|
#!/usr/bin/env python
from __future__ import unicode_literals
import os
import sys
from django.core.management.commands.compilemessages import compile_messages
from djblets.util.filesystem import is_exe_in_path
if __name__ == '__main__':
if not is_exe_in_path('msgfmt'):
raise RuntimeError('Could not find the "msgfmt" binary.')
cwd = os.getcwd()
os.chdir(os.path.realpath('reviewboard'))
compile_messages(sys.stdout)
os.chdir(cwd)
|
#!/usr/bin/env python
from __future__ import unicode_literals
import os
import sys
import django
from django.core.management import call_command
import reviewboard
if __name__ == '__main__':
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'reviewboard.settings')
if hasattr(django, 'setup'):
# Django >= 1.7
django.setup()
os.chdir(os.path.dirname(reviewboard.__file__))
sys.exit(call_command('compilemessages', interactive=False, verbosity=2))
|
Fix building localization files on modern versions of Django.
|
Fix building localization files on modern versions of Django.
Our build-i18n script, which wraps the building of localization files,
had a failed import on modern versions of Django. We previously were
attempting to import and run the `compilemessages` command class itself.
Now we're doing it correctly by telling Django to invoke the command,
letting it handle the proper lookup and execution itself.
Testing Done:
Successfully built packages against Django 1.11.
Reviewed at https://reviews.reviewboard.org/r/10744/
|
Python
|
mit
|
chipx86/reviewboard,chipx86/reviewboard,reviewboard/reviewboard,reviewboard/reviewboard,chipx86/reviewboard,reviewboard/reviewboard,reviewboard/reviewboard,chipx86/reviewboard
|
#!/usr/bin/env python
from __future__ import unicode_literals
import os
import sys
from django.core.management.commands.compilemessages import compile_messages
from djblets.util.filesystem import is_exe_in_path
if __name__ == '__main__':
if not is_exe_in_path('msgfmt'):
raise RuntimeError('Could not find the "msgfmt" binary.')
cwd = os.getcwd()
os.chdir(os.path.realpath('reviewboard'))
compile_messages(sys.stdout)
os.chdir(cwd)
Fix building localization files on modern versions of Django.
Our build-i18n script, which wraps the building of localization files,
had a failed import on modern versions of Django. We previously were
attempting to import and run the `compilemessages` command class itself.
Now we're doing it correctly by telling Django to invoke the command,
letting it handle the proper lookup and execution itself.
Testing Done:
Successfully built packages against Django 1.11.
Reviewed at https://reviews.reviewboard.org/r/10744/
|
#!/usr/bin/env python
from __future__ import unicode_literals
import os
import sys
import django
from django.core.management import call_command
import reviewboard
if __name__ == '__main__':
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'reviewboard.settings')
if hasattr(django, 'setup'):
# Django >= 1.7
django.setup()
os.chdir(os.path.dirname(reviewboard.__file__))
sys.exit(call_command('compilemessages', interactive=False, verbosity=2))
|
<commit_before>#!/usr/bin/env python
from __future__ import unicode_literals
import os
import sys
from django.core.management.commands.compilemessages import compile_messages
from djblets.util.filesystem import is_exe_in_path
if __name__ == '__main__':
if not is_exe_in_path('msgfmt'):
raise RuntimeError('Could not find the "msgfmt" binary.')
cwd = os.getcwd()
os.chdir(os.path.realpath('reviewboard'))
compile_messages(sys.stdout)
os.chdir(cwd)
<commit_msg>Fix building localization files on modern versions of Django.
Our build-i18n script, which wraps the building of localization files,
had a failed import on modern versions of Django. We previously were
attempting to import and run the `compilemessages` command class itself.
Now we're doing it correctly by telling Django to invoke the command,
letting it handle the proper lookup and execution itself.
Testing Done:
Successfully built packages against Django 1.11.
Reviewed at https://reviews.reviewboard.org/r/10744/<commit_after>
|
#!/usr/bin/env python
from __future__ import unicode_literals
import os
import sys
import django
from django.core.management import call_command
import reviewboard
if __name__ == '__main__':
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'reviewboard.settings')
if hasattr(django, 'setup'):
# Django >= 1.7
django.setup()
os.chdir(os.path.dirname(reviewboard.__file__))
sys.exit(call_command('compilemessages', interactive=False, verbosity=2))
|
#!/usr/bin/env python
from __future__ import unicode_literals
import os
import sys
from django.core.management.commands.compilemessages import compile_messages
from djblets.util.filesystem import is_exe_in_path
if __name__ == '__main__':
if not is_exe_in_path('msgfmt'):
raise RuntimeError('Could not find the "msgfmt" binary.')
cwd = os.getcwd()
os.chdir(os.path.realpath('reviewboard'))
compile_messages(sys.stdout)
os.chdir(cwd)
Fix building localization files on modern versions of Django.
Our build-i18n script, which wraps the building of localization files,
had a failed import on modern versions of Django. We previously were
attempting to import and run the `compilemessages` command class itself.
Now we're doing it correctly by telling Django to invoke the command,
letting it handle the proper lookup and execution itself.
Testing Done:
Successfully built packages against Django 1.11.
Reviewed at https://reviews.reviewboard.org/r/10744/#!/usr/bin/env python
from __future__ import unicode_literals
import os
import sys
import django
from django.core.management import call_command
import reviewboard
if __name__ == '__main__':
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'reviewboard.settings')
if hasattr(django, 'setup'):
# Django >= 1.7
django.setup()
os.chdir(os.path.dirname(reviewboard.__file__))
sys.exit(call_command('compilemessages', interactive=False, verbosity=2))
|
<commit_before>#!/usr/bin/env python
from __future__ import unicode_literals
import os
import sys
from django.core.management.commands.compilemessages import compile_messages
from djblets.util.filesystem import is_exe_in_path
if __name__ == '__main__':
if not is_exe_in_path('msgfmt'):
raise RuntimeError('Could not find the "msgfmt" binary.')
cwd = os.getcwd()
os.chdir(os.path.realpath('reviewboard'))
compile_messages(sys.stdout)
os.chdir(cwd)
<commit_msg>Fix building localization files on modern versions of Django.
Our build-i18n script, which wraps the building of localization files,
had a failed import on modern versions of Django. We previously were
attempting to import and run the `compilemessages` command class itself.
Now we're doing it correctly by telling Django to invoke the command,
letting it handle the proper lookup and execution itself.
Testing Done:
Successfully built packages against Django 1.11.
Reviewed at https://reviews.reviewboard.org/r/10744/<commit_after>#!/usr/bin/env python
from __future__ import unicode_literals
import os
import sys
import django
from django.core.management import call_command
import reviewboard
if __name__ == '__main__':
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'reviewboard.settings')
if hasattr(django, 'setup'):
# Django >= 1.7
django.setup()
os.chdir(os.path.dirname(reviewboard.__file__))
sys.exit(call_command('compilemessages', interactive=False, verbosity=2))
|
4514c5c5644796413c01f6132b3b6afece73ce01
|
txircd/modules/cmode_s.py
|
txircd/modules/cmode_s.py
|
from txircd.modbase import Mode
class SecretMode(Mode):
def listOutput(self, command, data):
if command != "LIST":
return data
cdata = data["cdata"]
if "s" in cdata["channel"].mode and cdata["channel"].name not in data["user"].channels:
data["cdata"] = {}
# other +s stuff is hiding in other modules.
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
self.mode_s = None
def spawn(self):
self.mode_s = SecretMode()
return {
"modes": {
"cns": self.mode_s
},
"actions": {
"commandextra": [self.mode_s.listOutput]
}
}
def cleanup(self):
self.ircd.removeMode("cns")
self.ircd.actions["commandextra"].remove(self.mode_s.listOutput)
|
from txircd.modbase import Mode
class SecretMode(Mode):
def listOutput(self, command, data):
if command != "LIST":
return data
cdata = data["cdata"]
if "s" in cdata["channel"].mode and cdata["channel"].name not in data["user"].channels:
data["cdata"].clear()
# other +s stuff is hiding in other modules.
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
self.mode_s = None
def spawn(self):
self.mode_s = SecretMode()
return {
"modes": {
"cns": self.mode_s
},
"actions": {
"commandextra": [self.mode_s.listOutput]
}
}
def cleanup(self):
self.ircd.removeMode("cns")
self.ircd.actions["commandextra"].remove(self.mode_s.listOutput)
|
Make +s actually definitely clear the cdata dictionary
|
Make +s actually definitely clear the cdata dictionary
|
Python
|
bsd-3-clause
|
ElementalAlchemist/txircd,Heufneutje/txircd,DesertBus/txircd
|
from txircd.modbase import Mode
class SecretMode(Mode):
def listOutput(self, command, data):
if command != "LIST":
return data
cdata = data["cdata"]
if "s" in cdata["channel"].mode and cdata["channel"].name not in data["user"].channels:
data["cdata"] = {}
# other +s stuff is hiding in other modules.
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
self.mode_s = None
def spawn(self):
self.mode_s = SecretMode()
return {
"modes": {
"cns": self.mode_s
},
"actions": {
"commandextra": [self.mode_s.listOutput]
}
}
def cleanup(self):
self.ircd.removeMode("cns")
self.ircd.actions["commandextra"].remove(self.mode_s.listOutput)Make +s actually definitely clear the cdata dictionary
|
from txircd.modbase import Mode
class SecretMode(Mode):
def listOutput(self, command, data):
if command != "LIST":
return data
cdata = data["cdata"]
if "s" in cdata["channel"].mode and cdata["channel"].name not in data["user"].channels:
data["cdata"].clear()
# other +s stuff is hiding in other modules.
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
self.mode_s = None
def spawn(self):
self.mode_s = SecretMode()
return {
"modes": {
"cns": self.mode_s
},
"actions": {
"commandextra": [self.mode_s.listOutput]
}
}
def cleanup(self):
self.ircd.removeMode("cns")
self.ircd.actions["commandextra"].remove(self.mode_s.listOutput)
|
<commit_before>from txircd.modbase import Mode
class SecretMode(Mode):
def listOutput(self, command, data):
if command != "LIST":
return data
cdata = data["cdata"]
if "s" in cdata["channel"].mode and cdata["channel"].name not in data["user"].channels:
data["cdata"] = {}
# other +s stuff is hiding in other modules.
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
self.mode_s = None
def spawn(self):
self.mode_s = SecretMode()
return {
"modes": {
"cns": self.mode_s
},
"actions": {
"commandextra": [self.mode_s.listOutput]
}
}
def cleanup(self):
self.ircd.removeMode("cns")
self.ircd.actions["commandextra"].remove(self.mode_s.listOutput)<commit_msg>Make +s actually definitely clear the cdata dictionary<commit_after>
|
from txircd.modbase import Mode
class SecretMode(Mode):
def listOutput(self, command, data):
if command != "LIST":
return data
cdata = data["cdata"]
if "s" in cdata["channel"].mode and cdata["channel"].name not in data["user"].channels:
data["cdata"].clear()
# other +s stuff is hiding in other modules.
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
self.mode_s = None
def spawn(self):
self.mode_s = SecretMode()
return {
"modes": {
"cns": self.mode_s
},
"actions": {
"commandextra": [self.mode_s.listOutput]
}
}
def cleanup(self):
self.ircd.removeMode("cns")
self.ircd.actions["commandextra"].remove(self.mode_s.listOutput)
|
from txircd.modbase import Mode
class SecretMode(Mode):
def listOutput(self, command, data):
if command != "LIST":
return data
cdata = data["cdata"]
if "s" in cdata["channel"].mode and cdata["channel"].name not in data["user"].channels:
data["cdata"] = {}
# other +s stuff is hiding in other modules.
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
self.mode_s = None
def spawn(self):
self.mode_s = SecretMode()
return {
"modes": {
"cns": self.mode_s
},
"actions": {
"commandextra": [self.mode_s.listOutput]
}
}
def cleanup(self):
self.ircd.removeMode("cns")
self.ircd.actions["commandextra"].remove(self.mode_s.listOutput)Make +s actually definitely clear the cdata dictionaryfrom txircd.modbase import Mode
class SecretMode(Mode):
def listOutput(self, command, data):
if command != "LIST":
return data
cdata = data["cdata"]
if "s" in cdata["channel"].mode and cdata["channel"].name not in data["user"].channels:
data["cdata"].clear()
# other +s stuff is hiding in other modules.
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
self.mode_s = None
def spawn(self):
self.mode_s = SecretMode()
return {
"modes": {
"cns": self.mode_s
},
"actions": {
"commandextra": [self.mode_s.listOutput]
}
}
def cleanup(self):
self.ircd.removeMode("cns")
self.ircd.actions["commandextra"].remove(self.mode_s.listOutput)
|
<commit_before>from txircd.modbase import Mode
class SecretMode(Mode):
def listOutput(self, command, data):
if command != "LIST":
return data
cdata = data["cdata"]
if "s" in cdata["channel"].mode and cdata["channel"].name not in data["user"].channels:
data["cdata"] = {}
# other +s stuff is hiding in other modules.
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
self.mode_s = None
def spawn(self):
self.mode_s = SecretMode()
return {
"modes": {
"cns": self.mode_s
},
"actions": {
"commandextra": [self.mode_s.listOutput]
}
}
def cleanup(self):
self.ircd.removeMode("cns")
self.ircd.actions["commandextra"].remove(self.mode_s.listOutput)<commit_msg>Make +s actually definitely clear the cdata dictionary<commit_after>from txircd.modbase import Mode
class SecretMode(Mode):
def listOutput(self, command, data):
if command != "LIST":
return data
cdata = data["cdata"]
if "s" in cdata["channel"].mode and cdata["channel"].name not in data["user"].channels:
data["cdata"].clear()
# other +s stuff is hiding in other modules.
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
self.mode_s = None
def spawn(self):
self.mode_s = SecretMode()
return {
"modes": {
"cns": self.mode_s
},
"actions": {
"commandextra": [self.mode_s.listOutput]
}
}
def cleanup(self):
self.ircd.removeMode("cns")
self.ircd.actions["commandextra"].remove(self.mode_s.listOutput)
|
7ea131b0c906c8da66f050e5833ded02f8acb495
|
user_messages/managers.py
|
user_messages/managers.py
|
from django.db.models import Manager
class ThreadManager(Manager):
def inbox(self, user):
return self.filter(userthread__user=user, userthread__deleted=False)
def unread(self, user):
return self.filter(userthread__user=user, userthread__deleted=False, userthread__unread=True)
class MessageManager(Manager):
def new_reply(self, thread, user, content):
msg = self.create(thread=thread, sender=user, content=content)
thread.userthread_set.exclude(user=user).update(deleted=False, unread=True)
return msg
def new_message(self, from_user, to_users, subject, content):
from user_messages.models import Thread
thread = Thread.objects.create(subject=subject)
for user in to_users:
thread.userthread_set.create(user=user, deleted=False, unread=True)
thread.userthread_set.create(user=from_user, deleted=True, unread=False)
return self.create(thread=thread, sender=from_user, content=content)
|
from django.db.models import Manager
from user_messages.signals import message_sent
class ThreadManager(Manager):
def inbox(self, user):
return self.filter(userthread__user=user, userthread__deleted=False)
def unread(self, user):
return self.filter(userthread__user=user, userthread__deleted=False, userthread__unread=True)
class MessageManager(Manager):
def new_reply(self, thread, user, content):
msg = self.create(thread=thread, sender=user, content=content)
thread.userthread_set.exclude(user=user).update(deleted=False, unread=True)
message_sent.send(sender=self.model, message=msg, thread=thread)
return msg
def new_message(self, from_user, to_users, subject, content):
from user_messages.models import Thread
thread = Thread.objects.create(subject=subject)
for user in to_users:
thread.userthread_set.create(user=user, deleted=False, unread=True)
thread.userthread_set.create(user=from_user, deleted=True, unread=False)
msg = self.create(thread=thread, sender=from_user, content=content)
message_sent.send(sender=self.model, message=msg, thread=thread)
return msg
|
Send a signal on each message that is sent to allow for external customization.
|
Send a signal on each message that is sent to allow for external customization.
|
Python
|
mit
|
arthur-wsw/pinax-messages,eldarion/user_messages,eldarion/user_messages,pinax/pinax-messages,pinax/pinax-messages,arthur-wsw/pinax-messages
|
from django.db.models import Manager
class ThreadManager(Manager):
def inbox(self, user):
return self.filter(userthread__user=user, userthread__deleted=False)
def unread(self, user):
return self.filter(userthread__user=user, userthread__deleted=False, userthread__unread=True)
class MessageManager(Manager):
def new_reply(self, thread, user, content):
msg = self.create(thread=thread, sender=user, content=content)
thread.userthread_set.exclude(user=user).update(deleted=False, unread=True)
return msg
def new_message(self, from_user, to_users, subject, content):
from user_messages.models import Thread
thread = Thread.objects.create(subject=subject)
for user in to_users:
thread.userthread_set.create(user=user, deleted=False, unread=True)
thread.userthread_set.create(user=from_user, deleted=True, unread=False)
return self.create(thread=thread, sender=from_user, content=content)
Send a signal on each message that is sent to allow for external customization.
|
from django.db.models import Manager
from user_messages.signals import message_sent
class ThreadManager(Manager):
def inbox(self, user):
return self.filter(userthread__user=user, userthread__deleted=False)
def unread(self, user):
return self.filter(userthread__user=user, userthread__deleted=False, userthread__unread=True)
class MessageManager(Manager):
def new_reply(self, thread, user, content):
msg = self.create(thread=thread, sender=user, content=content)
thread.userthread_set.exclude(user=user).update(deleted=False, unread=True)
message_sent.send(sender=self.model, message=msg, thread=thread)
return msg
def new_message(self, from_user, to_users, subject, content):
from user_messages.models import Thread
thread = Thread.objects.create(subject=subject)
for user in to_users:
thread.userthread_set.create(user=user, deleted=False, unread=True)
thread.userthread_set.create(user=from_user, deleted=True, unread=False)
msg = self.create(thread=thread, sender=from_user, content=content)
message_sent.send(sender=self.model, message=msg, thread=thread)
return msg
|
<commit_before>from django.db.models import Manager
class ThreadManager(Manager):
def inbox(self, user):
return self.filter(userthread__user=user, userthread__deleted=False)
def unread(self, user):
return self.filter(userthread__user=user, userthread__deleted=False, userthread__unread=True)
class MessageManager(Manager):
def new_reply(self, thread, user, content):
msg = self.create(thread=thread, sender=user, content=content)
thread.userthread_set.exclude(user=user).update(deleted=False, unread=True)
return msg
def new_message(self, from_user, to_users, subject, content):
from user_messages.models import Thread
thread = Thread.objects.create(subject=subject)
for user in to_users:
thread.userthread_set.create(user=user, deleted=False, unread=True)
thread.userthread_set.create(user=from_user, deleted=True, unread=False)
return self.create(thread=thread, sender=from_user, content=content)
<commit_msg>Send a signal on each message that is sent to allow for external customization.<commit_after>
|
from django.db.models import Manager
from user_messages.signals import message_sent
class ThreadManager(Manager):
def inbox(self, user):
return self.filter(userthread__user=user, userthread__deleted=False)
def unread(self, user):
return self.filter(userthread__user=user, userthread__deleted=False, userthread__unread=True)
class MessageManager(Manager):
def new_reply(self, thread, user, content):
msg = self.create(thread=thread, sender=user, content=content)
thread.userthread_set.exclude(user=user).update(deleted=False, unread=True)
message_sent.send(sender=self.model, message=msg, thread=thread)
return msg
def new_message(self, from_user, to_users, subject, content):
from user_messages.models import Thread
thread = Thread.objects.create(subject=subject)
for user in to_users:
thread.userthread_set.create(user=user, deleted=False, unread=True)
thread.userthread_set.create(user=from_user, deleted=True, unread=False)
msg = self.create(thread=thread, sender=from_user, content=content)
message_sent.send(sender=self.model, message=msg, thread=thread)
return msg
|
from django.db.models import Manager
class ThreadManager(Manager):
def inbox(self, user):
return self.filter(userthread__user=user, userthread__deleted=False)
def unread(self, user):
return self.filter(userthread__user=user, userthread__deleted=False, userthread__unread=True)
class MessageManager(Manager):
def new_reply(self, thread, user, content):
msg = self.create(thread=thread, sender=user, content=content)
thread.userthread_set.exclude(user=user).update(deleted=False, unread=True)
return msg
def new_message(self, from_user, to_users, subject, content):
from user_messages.models import Thread
thread = Thread.objects.create(subject=subject)
for user in to_users:
thread.userthread_set.create(user=user, deleted=False, unread=True)
thread.userthread_set.create(user=from_user, deleted=True, unread=False)
return self.create(thread=thread, sender=from_user, content=content)
Send a signal on each message that is sent to allow for external customization.from django.db.models import Manager
from user_messages.signals import message_sent
class ThreadManager(Manager):
def inbox(self, user):
return self.filter(userthread__user=user, userthread__deleted=False)
def unread(self, user):
return self.filter(userthread__user=user, userthread__deleted=False, userthread__unread=True)
class MessageManager(Manager):
def new_reply(self, thread, user, content):
msg = self.create(thread=thread, sender=user, content=content)
thread.userthread_set.exclude(user=user).update(deleted=False, unread=True)
message_sent.send(sender=self.model, message=msg, thread=thread)
return msg
def new_message(self, from_user, to_users, subject, content):
from user_messages.models import Thread
thread = Thread.objects.create(subject=subject)
for user in to_users:
thread.userthread_set.create(user=user, deleted=False, unread=True)
thread.userthread_set.create(user=from_user, deleted=True, unread=False)
msg = self.create(thread=thread, sender=from_user, content=content)
message_sent.send(sender=self.model, message=msg, thread=thread)
return msg
|
<commit_before>from django.db.models import Manager
class ThreadManager(Manager):
def inbox(self, user):
return self.filter(userthread__user=user, userthread__deleted=False)
def unread(self, user):
return self.filter(userthread__user=user, userthread__deleted=False, userthread__unread=True)
class MessageManager(Manager):
def new_reply(self, thread, user, content):
msg = self.create(thread=thread, sender=user, content=content)
thread.userthread_set.exclude(user=user).update(deleted=False, unread=True)
return msg
def new_message(self, from_user, to_users, subject, content):
from user_messages.models import Thread
thread = Thread.objects.create(subject=subject)
for user in to_users:
thread.userthread_set.create(user=user, deleted=False, unread=True)
thread.userthread_set.create(user=from_user, deleted=True, unread=False)
return self.create(thread=thread, sender=from_user, content=content)
<commit_msg>Send a signal on each message that is sent to allow for external customization.<commit_after>from django.db.models import Manager
from user_messages.signals import message_sent
class ThreadManager(Manager):
def inbox(self, user):
return self.filter(userthread__user=user, userthread__deleted=False)
def unread(self, user):
return self.filter(userthread__user=user, userthread__deleted=False, userthread__unread=True)
class MessageManager(Manager):
def new_reply(self, thread, user, content):
msg = self.create(thread=thread, sender=user, content=content)
thread.userthread_set.exclude(user=user).update(deleted=False, unread=True)
message_sent.send(sender=self.model, message=msg, thread=thread)
return msg
def new_message(self, from_user, to_users, subject, content):
from user_messages.models import Thread
thread = Thread.objects.create(subject=subject)
for user in to_users:
thread.userthread_set.create(user=user, deleted=False, unread=True)
thread.userthread_set.create(user=from_user, deleted=True, unread=False)
msg = self.create(thread=thread, sender=from_user, content=content)
message_sent.send(sender=self.model, message=msg, thread=thread)
return msg
|
5e1815f094f40b527406a07ea1ce751ee0b074a6
|
tests/__init__.py
|
tests/__init__.py
|
tests = (
'parse_token',
'variable_fields',
'filters',
'blockextend',
'template',
)
|
tests = (
'parse_token',
'variable_fields',
'filters',
'default_filters',
'blockextend',
'template',
)
|
Add defaults filters tests into all tests list
|
Add defaults filters tests into all tests list
|
Python
|
bsd-3-clause
|
GrAndSE/lighty-template,GrAndSE/lighty
|
tests = (
'parse_token',
'variable_fields',
'filters',
'blockextend',
'template',
)
Add defaults filters tests into all tests list
|
tests = (
'parse_token',
'variable_fields',
'filters',
'default_filters',
'blockextend',
'template',
)
|
<commit_before>tests = (
'parse_token',
'variable_fields',
'filters',
'blockextend',
'template',
)
<commit_msg>Add defaults filters tests into all tests list<commit_after>
|
tests = (
'parse_token',
'variable_fields',
'filters',
'default_filters',
'blockextend',
'template',
)
|
tests = (
'parse_token',
'variable_fields',
'filters',
'blockextend',
'template',
)
Add defaults filters tests into all tests listtests = (
'parse_token',
'variable_fields',
'filters',
'default_filters',
'blockextend',
'template',
)
|
<commit_before>tests = (
'parse_token',
'variable_fields',
'filters',
'blockextend',
'template',
)
<commit_msg>Add defaults filters tests into all tests list<commit_after>tests = (
'parse_token',
'variable_fields',
'filters',
'default_filters',
'blockextend',
'template',
)
|
7fef5ea08c88b59513ff4fce59ccfbd7862122d8
|
salt/beacons/proxy_example.py
|
salt/beacons/proxy_example.py
|
# -*- coding: utf-8 -*-
'''
Example beacon to use with salt-proxy
.. code-block:: yaml
beacons:
proxy_example:
foo: bar
'''
# Import Python libs
from __future__ import absolute_import
# Important: If used with salt-proxy
# this is required for the beacon to load!!!
__proxyenabled__ = ['*']
__virtualname__ = 'proxy_example'
import logging
log = logging.getLogger(__name__)
def __virtual__():
'''
Trivially let the beacon load for the test example.
For a production beacon we should probably have some expression here.
'''
return True
def validate(config):
'''
Validate the beacon configuration
'''
if not isinstance(config, dict):
log.info('Configuration for rest_example beacon must be a dictionary.')
return False
return True
def beacon(config):
'''
Called several times each second
https://docs.saltstack.com/en/latest/topics/beacons/#the-beacon-function
.. code-block:: yaml
beacons:
proxy_example:
foo: bar
'''
# TBD
# Call rest.py and return the result
ret = [{'foo': config['foo']}]
return ret
|
# -*- coding: utf-8 -*-
'''
Example beacon to use with salt-proxy
.. code-block:: yaml
beacons:
proxy_example:
endpoint: beacon
'''
# Import Python libs
from __future__ import absolute_import
import logging
# Import salt libs
import salt.utils.http
# Important: If used with salt-proxy
# this is required for the beacon to load!!!
__proxyenabled__ = ['*']
__virtualname__ = 'proxy_example'
log = logging.getLogger(__name__)
def __virtual__():
'''
Trivially let the beacon load for the test example.
For a production beacon we should probably have some expression here.
'''
return True
def validate(config):
'''
Validate the beacon configuration
'''
if not isinstance(config, dict):
log.info('Configuration for rest_example beacon must be a dictionary.')
return False
return True
def beacon(config):
'''
Called several times each second
https://docs.saltstack.com/en/latest/topics/beacons/#the-beacon-function
.. code-block:: yaml
beacons:
proxy_example:
endpoint: beacon
'''
# Important!!!
# Although this toy example makes an HTTP call
# to get beacon information
# please be advised that doing CPU or IO intensive
# operations in this method will cause the beacon loop
# to block.
beacon_url = '{}{}'.format(__opts__['proxy']['url'],
config['endpoint'])
r = salt.utils.http.query(beacon_url,
decode_type='json',
decode=True)
return [r['dict']]
|
Make a call to beacon end point
|
Make a call to beacon end point
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
# -*- coding: utf-8 -*-
'''
Example beacon to use with salt-proxy
.. code-block:: yaml
beacons:
proxy_example:
foo: bar
'''
# Import Python libs
from __future__ import absolute_import
# Important: If used with salt-proxy
# this is required for the beacon to load!!!
__proxyenabled__ = ['*']
__virtualname__ = 'proxy_example'
import logging
log = logging.getLogger(__name__)
def __virtual__():
'''
Trivially let the beacon load for the test example.
For a production beacon we should probably have some expression here.
'''
return True
def validate(config):
'''
Validate the beacon configuration
'''
if not isinstance(config, dict):
log.info('Configuration for rest_example beacon must be a dictionary.')
return False
return True
def beacon(config):
'''
Called several times each second
https://docs.saltstack.com/en/latest/topics/beacons/#the-beacon-function
.. code-block:: yaml
beacons:
proxy_example:
foo: bar
'''
# TBD
# Call rest.py and return the result
ret = [{'foo': config['foo']}]
return ret
Make a call to beacon end point
|
# -*- coding: utf-8 -*-
'''
Example beacon to use with salt-proxy
.. code-block:: yaml
beacons:
proxy_example:
endpoint: beacon
'''
# Import Python libs
from __future__ import absolute_import
import logging
# Import salt libs
import salt.utils.http
# Important: If used with salt-proxy
# this is required for the beacon to load!!!
__proxyenabled__ = ['*']
__virtualname__ = 'proxy_example'
log = logging.getLogger(__name__)
def __virtual__():
'''
Trivially let the beacon load for the test example.
For a production beacon we should probably have some expression here.
'''
return True
def validate(config):
'''
Validate the beacon configuration
'''
if not isinstance(config, dict):
log.info('Configuration for rest_example beacon must be a dictionary.')
return False
return True
def beacon(config):
'''
Called several times each second
https://docs.saltstack.com/en/latest/topics/beacons/#the-beacon-function
.. code-block:: yaml
beacons:
proxy_example:
endpoint: beacon
'''
# Important!!!
# Although this toy example makes an HTTP call
# to get beacon information
# please be advised that doing CPU or IO intensive
# operations in this method will cause the beacon loop
# to block.
beacon_url = '{}{}'.format(__opts__['proxy']['url'],
config['endpoint'])
r = salt.utils.http.query(beacon_url,
decode_type='json',
decode=True)
return [r['dict']]
|
<commit_before># -*- coding: utf-8 -*-
'''
Example beacon to use with salt-proxy
.. code-block:: yaml
beacons:
proxy_example:
foo: bar
'''
# Import Python libs
from __future__ import absolute_import
# Important: If used with salt-proxy
# this is required for the beacon to load!!!
__proxyenabled__ = ['*']
__virtualname__ = 'proxy_example'
import logging
log = logging.getLogger(__name__)
def __virtual__():
'''
Trivially let the beacon load for the test example.
For a production beacon we should probably have some expression here.
'''
return True
def validate(config):
'''
Validate the beacon configuration
'''
if not isinstance(config, dict):
log.info('Configuration for rest_example beacon must be a dictionary.')
return False
return True
def beacon(config):
'''
Called several times each second
https://docs.saltstack.com/en/latest/topics/beacons/#the-beacon-function
.. code-block:: yaml
beacons:
proxy_example:
foo: bar
'''
# TBD
# Call rest.py and return the result
ret = [{'foo': config['foo']}]
return ret
<commit_msg>Make a call to beacon end point<commit_after>
|
# -*- coding: utf-8 -*-
'''
Example beacon to use with salt-proxy
.. code-block:: yaml
beacons:
proxy_example:
endpoint: beacon
'''
# Import Python libs
from __future__ import absolute_import
import logging
# Import salt libs
import salt.utils.http
# Important: If used with salt-proxy
# this is required for the beacon to load!!!
__proxyenabled__ = ['*']
__virtualname__ = 'proxy_example'
log = logging.getLogger(__name__)
def __virtual__():
'''
Trivially let the beacon load for the test example.
For a production beacon we should probably have some expression here.
'''
return True
def validate(config):
'''
Validate the beacon configuration
'''
if not isinstance(config, dict):
log.info('Configuration for rest_example beacon must be a dictionary.')
return False
return True
def beacon(config):
'''
Called several times each second
https://docs.saltstack.com/en/latest/topics/beacons/#the-beacon-function
.. code-block:: yaml
beacons:
proxy_example:
endpoint: beacon
'''
# Important!!!
# Although this toy example makes an HTTP call
# to get beacon information
# please be advised that doing CPU or IO intensive
# operations in this method will cause the beacon loop
# to block.
beacon_url = '{}{}'.format(__opts__['proxy']['url'],
config['endpoint'])
r = salt.utils.http.query(beacon_url,
decode_type='json',
decode=True)
return [r['dict']]
|
# -*- coding: utf-8 -*-
'''
Example beacon to use with salt-proxy
.. code-block:: yaml
beacons:
proxy_example:
foo: bar
'''
# Import Python libs
from __future__ import absolute_import
# Important: If used with salt-proxy
# this is required for the beacon to load!!!
__proxyenabled__ = ['*']
__virtualname__ = 'proxy_example'
import logging
log = logging.getLogger(__name__)
def __virtual__():
'''
Trivially let the beacon load for the test example.
For a production beacon we should probably have some expression here.
'''
return True
def validate(config):
'''
Validate the beacon configuration
'''
if not isinstance(config, dict):
log.info('Configuration for rest_example beacon must be a dictionary.')
return False
return True
def beacon(config):
'''
Called several times each second
https://docs.saltstack.com/en/latest/topics/beacons/#the-beacon-function
.. code-block:: yaml
beacons:
proxy_example:
foo: bar
'''
# TBD
# Call rest.py and return the result
ret = [{'foo': config['foo']}]
return ret
Make a call to beacon end point# -*- coding: utf-8 -*-
'''
Example beacon to use with salt-proxy
.. code-block:: yaml
beacons:
proxy_example:
endpoint: beacon
'''
# Import Python libs
from __future__ import absolute_import
import logging
# Import salt libs
import salt.utils.http
# Important: If used with salt-proxy
# this is required for the beacon to load!!!
__proxyenabled__ = ['*']
__virtualname__ = 'proxy_example'
log = logging.getLogger(__name__)
def __virtual__():
'''
Trivially let the beacon load for the test example.
For a production beacon we should probably have some expression here.
'''
return True
def validate(config):
'''
Validate the beacon configuration
'''
if not isinstance(config, dict):
log.info('Configuration for rest_example beacon must be a dictionary.')
return False
return True
def beacon(config):
'''
Called several times each second
https://docs.saltstack.com/en/latest/topics/beacons/#the-beacon-function
.. code-block:: yaml
beacons:
proxy_example:
endpoint: beacon
'''
# Important!!!
# Although this toy example makes an HTTP call
# to get beacon information
# please be advised that doing CPU or IO intensive
# operations in this method will cause the beacon loop
# to block.
beacon_url = '{}{}'.format(__opts__['proxy']['url'],
config['endpoint'])
r = salt.utils.http.query(beacon_url,
decode_type='json',
decode=True)
return [r['dict']]
|
<commit_before># -*- coding: utf-8 -*-
'''
Example beacon to use with salt-proxy
.. code-block:: yaml
beacons:
proxy_example:
foo: bar
'''
# Import Python libs
from __future__ import absolute_import
# Important: If used with salt-proxy
# this is required for the beacon to load!!!
__proxyenabled__ = ['*']
__virtualname__ = 'proxy_example'
import logging
log = logging.getLogger(__name__)
def __virtual__():
'''
Trivially let the beacon load for the test example.
For a production beacon we should probably have some expression here.
'''
return True
def validate(config):
'''
Validate the beacon configuration
'''
if not isinstance(config, dict):
log.info('Configuration for rest_example beacon must be a dictionary.')
return False
return True
def beacon(config):
'''
Called several times each second
https://docs.saltstack.com/en/latest/topics/beacons/#the-beacon-function
.. code-block:: yaml
beacons:
proxy_example:
foo: bar
'''
# TBD
# Call rest.py and return the result
ret = [{'foo': config['foo']}]
return ret
<commit_msg>Make a call to beacon end point<commit_after># -*- coding: utf-8 -*-
'''
Example beacon to use with salt-proxy
.. code-block:: yaml
beacons:
proxy_example:
endpoint: beacon
'''
# Import Python libs
from __future__ import absolute_import
import logging
# Import salt libs
import salt.utils.http
# Important: If used with salt-proxy
# this is required for the beacon to load!!!
__proxyenabled__ = ['*']
__virtualname__ = 'proxy_example'
log = logging.getLogger(__name__)
def __virtual__():
'''
Trivially let the beacon load for the test example.
For a production beacon we should probably have some expression here.
'''
return True
def validate(config):
'''
Validate the beacon configuration
'''
if not isinstance(config, dict):
log.info('Configuration for rest_example beacon must be a dictionary.')
return False
return True
def beacon(config):
'''
Called several times each second
https://docs.saltstack.com/en/latest/topics/beacons/#the-beacon-function
.. code-block:: yaml
beacons:
proxy_example:
endpoint: beacon
'''
# Important!!!
# Although this toy example makes an HTTP call
# to get beacon information
# please be advised that doing CPU or IO intensive
# operations in this method will cause the beacon loop
# to block.
beacon_url = '{}{}'.format(__opts__['proxy']['url'],
config['endpoint'])
r = salt.utils.http.query(beacon_url,
decode_type='json',
decode=True)
return [r['dict']]
|
7bd3b5a9f1664bd153c934d75f245a605aadee2a
|
ovp_core/models/cause.py
|
ovp_core/models/cause.py
|
from django.db import models
class Cause(models.Model):
name = models.CharField('name', max_length=100)
def __str__(self):
return self.name
class Meta:
app_label = 'ovp_core'
verbose_name = 'cause'
verbose_name_plural = 'causes'
|
import vinaigrette
from django.db import models
class Cause(models.Model):
name = models.CharField('name', max_length=100)
def __str__(self):
return self.name
class Meta:
app_label = 'ovp_core'
verbose_name = 'cause'
verbose_name_plural = 'causes'
vinaigrette.register(Cause, ['name'])
|
Apply django-vinaigrette to Cause model
|
Apply django-vinaigrette to Cause model
|
Python
|
agpl-3.0
|
OpenVolunteeringPlatform/django-ovp-core,OpenVolunteeringPlatform/django-ovp-core
|
from django.db import models
class Cause(models.Model):
name = models.CharField('name', max_length=100)
def __str__(self):
return self.name
class Meta:
app_label = 'ovp_core'
verbose_name = 'cause'
verbose_name_plural = 'causes'
Apply django-vinaigrette to Cause model
|
import vinaigrette
from django.db import models
class Cause(models.Model):
name = models.CharField('name', max_length=100)
def __str__(self):
return self.name
class Meta:
app_label = 'ovp_core'
verbose_name = 'cause'
verbose_name_plural = 'causes'
vinaigrette.register(Cause, ['name'])
|
<commit_before>from django.db import models
class Cause(models.Model):
name = models.CharField('name', max_length=100)
def __str__(self):
return self.name
class Meta:
app_label = 'ovp_core'
verbose_name = 'cause'
verbose_name_plural = 'causes'
<commit_msg>Apply django-vinaigrette to Cause model<commit_after>
|
import vinaigrette
from django.db import models
class Cause(models.Model):
name = models.CharField('name', max_length=100)
def __str__(self):
return self.name
class Meta:
app_label = 'ovp_core'
verbose_name = 'cause'
verbose_name_plural = 'causes'
vinaigrette.register(Cause, ['name'])
|
from django.db import models
class Cause(models.Model):
name = models.CharField('name', max_length=100)
def __str__(self):
return self.name
class Meta:
app_label = 'ovp_core'
verbose_name = 'cause'
verbose_name_plural = 'causes'
Apply django-vinaigrette to Cause modelimport vinaigrette
from django.db import models
class Cause(models.Model):
name = models.CharField('name', max_length=100)
def __str__(self):
return self.name
class Meta:
app_label = 'ovp_core'
verbose_name = 'cause'
verbose_name_plural = 'causes'
vinaigrette.register(Cause, ['name'])
|
<commit_before>from django.db import models
class Cause(models.Model):
name = models.CharField('name', max_length=100)
def __str__(self):
return self.name
class Meta:
app_label = 'ovp_core'
verbose_name = 'cause'
verbose_name_plural = 'causes'
<commit_msg>Apply django-vinaigrette to Cause model<commit_after>import vinaigrette
from django.db import models
class Cause(models.Model):
name = models.CharField('name', max_length=100)
def __str__(self):
return self.name
class Meta:
app_label = 'ovp_core'
verbose_name = 'cause'
verbose_name_plural = 'causes'
vinaigrette.register(Cause, ['name'])
|
3c616f21b962218fafd17fcf4a7c673e49566636
|
ph_unfolder/analysis/functions.py
|
ph_unfolder/analysis/functions.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import (absolute_import, division,
print_function, unicode_literals)
__author__ = "Yuji Ikeda"
import numpy as np
def lorentzian(x, position, width):
return 1.0 / (np.pi * width * (1.0 + ((x - position) / width) ** 2))
def lorentzian_unnormalized(x, position, width, norm):
return norm * lorentzian(position, width)
def gaussian(x, position, width):
sigma = width / np.sqrt(2.0 * np.log(2.0))
tmp = np.exp(- (x - position) ** 2 / (2.0 * sigma ** 2))
return 1.0 / np.sqrt(2.0 * np.pi) / sigma * tmp
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import (absolute_import, division,
print_function, unicode_literals)
__author__ = "Yuji Ikeda"
import numpy as np
def lorentzian(x, position, width):
return 1.0 / (np.pi * width * (1.0 + ((x - position) / width) ** 2))
def lorentzian_unnormalized(x, position, width, norm):
return norm * lorentzian(x, position, width)
def gaussian(x, position, width):
sigma = width / np.sqrt(2.0 * np.log(2.0))
tmp = np.exp(- (x - position) ** 2 / (2.0 * sigma ** 2))
return 1.0 / np.sqrt(2.0 * np.pi) / sigma * tmp
|
Fix to give x to lorentzian in lorentzian_unnormalized
|
Fix to give x to lorentzian in lorentzian_unnormalized
|
Python
|
mit
|
yuzie007/upho,yuzie007/ph_unfolder
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import (absolute_import, division,
print_function, unicode_literals)
__author__ = "Yuji Ikeda"
import numpy as np
def lorentzian(x, position, width):
return 1.0 / (np.pi * width * (1.0 + ((x - position) / width) ** 2))
def lorentzian_unnormalized(x, position, width, norm):
return norm * lorentzian(position, width)
def gaussian(x, position, width):
sigma = width / np.sqrt(2.0 * np.log(2.0))
tmp = np.exp(- (x - position) ** 2 / (2.0 * sigma ** 2))
return 1.0 / np.sqrt(2.0 * np.pi) / sigma * tmp
Fix to give x to lorentzian in lorentzian_unnormalized
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import (absolute_import, division,
print_function, unicode_literals)
__author__ = "Yuji Ikeda"
import numpy as np
def lorentzian(x, position, width):
return 1.0 / (np.pi * width * (1.0 + ((x - position) / width) ** 2))
def lorentzian_unnormalized(x, position, width, norm):
return norm * lorentzian(x, position, width)
def gaussian(x, position, width):
sigma = width / np.sqrt(2.0 * np.log(2.0))
tmp = np.exp(- (x - position) ** 2 / (2.0 * sigma ** 2))
return 1.0 / np.sqrt(2.0 * np.pi) / sigma * tmp
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import (absolute_import, division,
print_function, unicode_literals)
__author__ = "Yuji Ikeda"
import numpy as np
def lorentzian(x, position, width):
return 1.0 / (np.pi * width * (1.0 + ((x - position) / width) ** 2))
def lorentzian_unnormalized(x, position, width, norm):
return norm * lorentzian(position, width)
def gaussian(x, position, width):
sigma = width / np.sqrt(2.0 * np.log(2.0))
tmp = np.exp(- (x - position) ** 2 / (2.0 * sigma ** 2))
return 1.0 / np.sqrt(2.0 * np.pi) / sigma * tmp
<commit_msg>Fix to give x to lorentzian in lorentzian_unnormalized<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import (absolute_import, division,
print_function, unicode_literals)
__author__ = "Yuji Ikeda"
import numpy as np
def lorentzian(x, position, width):
return 1.0 / (np.pi * width * (1.0 + ((x - position) / width) ** 2))
def lorentzian_unnormalized(x, position, width, norm):
return norm * lorentzian(x, position, width)
def gaussian(x, position, width):
sigma = width / np.sqrt(2.0 * np.log(2.0))
tmp = np.exp(- (x - position) ** 2 / (2.0 * sigma ** 2))
return 1.0 / np.sqrt(2.0 * np.pi) / sigma * tmp
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import (absolute_import, division,
print_function, unicode_literals)
__author__ = "Yuji Ikeda"
import numpy as np
def lorentzian(x, position, width):
return 1.0 / (np.pi * width * (1.0 + ((x - position) / width) ** 2))
def lorentzian_unnormalized(x, position, width, norm):
return norm * lorentzian(position, width)
def gaussian(x, position, width):
sigma = width / np.sqrt(2.0 * np.log(2.0))
tmp = np.exp(- (x - position) ** 2 / (2.0 * sigma ** 2))
return 1.0 / np.sqrt(2.0 * np.pi) / sigma * tmp
Fix to give x to lorentzian in lorentzian_unnormalized#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import (absolute_import, division,
print_function, unicode_literals)
__author__ = "Yuji Ikeda"
import numpy as np
def lorentzian(x, position, width):
return 1.0 / (np.pi * width * (1.0 + ((x - position) / width) ** 2))
def lorentzian_unnormalized(x, position, width, norm):
return norm * lorentzian(x, position, width)
def gaussian(x, position, width):
sigma = width / np.sqrt(2.0 * np.log(2.0))
tmp = np.exp(- (x - position) ** 2 / (2.0 * sigma ** 2))
return 1.0 / np.sqrt(2.0 * np.pi) / sigma * tmp
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import (absolute_import, division,
print_function, unicode_literals)
__author__ = "Yuji Ikeda"
import numpy as np
def lorentzian(x, position, width):
return 1.0 / (np.pi * width * (1.0 + ((x - position) / width) ** 2))
def lorentzian_unnormalized(x, position, width, norm):
return norm * lorentzian(position, width)
def gaussian(x, position, width):
sigma = width / np.sqrt(2.0 * np.log(2.0))
tmp = np.exp(- (x - position) ** 2 / (2.0 * sigma ** 2))
return 1.0 / np.sqrt(2.0 * np.pi) / sigma * tmp
<commit_msg>Fix to give x to lorentzian in lorentzian_unnormalized<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import (absolute_import, division,
print_function, unicode_literals)
__author__ = "Yuji Ikeda"
import numpy as np
def lorentzian(x, position, width):
return 1.0 / (np.pi * width * (1.0 + ((x - position) / width) ** 2))
def lorentzian_unnormalized(x, position, width, norm):
return norm * lorentzian(x, position, width)
def gaussian(x, position, width):
sigma = width / np.sqrt(2.0 * np.log(2.0))
tmp = np.exp(- (x - position) ** 2 / (2.0 * sigma ** 2))
return 1.0 / np.sqrt(2.0 * np.pi) / sigma * tmp
|
f643e1931ce8e0c4d11db4d8b9eb9ac75b683a80
|
tests/test_credentials.py
|
tests/test_credentials.py
|
from pyutrack import Credentials
from tests import PyutrackTest
class CredentialsTests(PyutrackTest):
def test_empty(self):
c = Credentials('root')
self.assertIsNone(c.password)
self.assertIsNone(c.cookies)
|
import json
import keyring
from pyutrack import Credentials
from tests import PyutrackTest
class CredentialsTests(PyutrackTest):
def test_empty(self):
c = Credentials('root')
self.assertIsNone(c.password)
self.assertIsNone(c.cookies)
def test_persistence(self):
c = Credentials('root', 'passwd', {"key": "value"})
c.persist()
self.assertEqual(
keyring.get_password(Credentials.KEYRING_PASSWORD, 'root'), 'passwd'
)
self.assertEqual(
json.loads(keyring.get_password(Credentials.KEYRING_COOKIE, 'root')),
{"key": "value"}
)
|
Add test for credential persistence
|
Add test for credential persistence
|
Python
|
mit
|
alisaifee/pyutrack,alisaifee/pyutrack
|
from pyutrack import Credentials
from tests import PyutrackTest
class CredentialsTests(PyutrackTest):
def test_empty(self):
c = Credentials('root')
self.assertIsNone(c.password)
self.assertIsNone(c.cookies)
Add test for credential persistence
|
import json
import keyring
from pyutrack import Credentials
from tests import PyutrackTest
class CredentialsTests(PyutrackTest):
def test_empty(self):
c = Credentials('root')
self.assertIsNone(c.password)
self.assertIsNone(c.cookies)
def test_persistence(self):
c = Credentials('root', 'passwd', {"key": "value"})
c.persist()
self.assertEqual(
keyring.get_password(Credentials.KEYRING_PASSWORD, 'root'), 'passwd'
)
self.assertEqual(
json.loads(keyring.get_password(Credentials.KEYRING_COOKIE, 'root')),
{"key": "value"}
)
|
<commit_before>from pyutrack import Credentials
from tests import PyutrackTest
class CredentialsTests(PyutrackTest):
def test_empty(self):
c = Credentials('root')
self.assertIsNone(c.password)
self.assertIsNone(c.cookies)
<commit_msg>Add test for credential persistence<commit_after>
|
import json
import keyring
from pyutrack import Credentials
from tests import PyutrackTest
class CredentialsTests(PyutrackTest):
def test_empty(self):
c = Credentials('root')
self.assertIsNone(c.password)
self.assertIsNone(c.cookies)
def test_persistence(self):
c = Credentials('root', 'passwd', {"key": "value"})
c.persist()
self.assertEqual(
keyring.get_password(Credentials.KEYRING_PASSWORD, 'root'), 'passwd'
)
self.assertEqual(
json.loads(keyring.get_password(Credentials.KEYRING_COOKIE, 'root')),
{"key": "value"}
)
|
from pyutrack import Credentials
from tests import PyutrackTest
class CredentialsTests(PyutrackTest):
def test_empty(self):
c = Credentials('root')
self.assertIsNone(c.password)
self.assertIsNone(c.cookies)
Add test for credential persistenceimport json
import keyring
from pyutrack import Credentials
from tests import PyutrackTest
class CredentialsTests(PyutrackTest):
def test_empty(self):
c = Credentials('root')
self.assertIsNone(c.password)
self.assertIsNone(c.cookies)
def test_persistence(self):
c = Credentials('root', 'passwd', {"key": "value"})
c.persist()
self.assertEqual(
keyring.get_password(Credentials.KEYRING_PASSWORD, 'root'), 'passwd'
)
self.assertEqual(
json.loads(keyring.get_password(Credentials.KEYRING_COOKIE, 'root')),
{"key": "value"}
)
|
<commit_before>from pyutrack import Credentials
from tests import PyutrackTest
class CredentialsTests(PyutrackTest):
def test_empty(self):
c = Credentials('root')
self.assertIsNone(c.password)
self.assertIsNone(c.cookies)
<commit_msg>Add test for credential persistence<commit_after>import json
import keyring
from pyutrack import Credentials
from tests import PyutrackTest
class CredentialsTests(PyutrackTest):
def test_empty(self):
c = Credentials('root')
self.assertIsNone(c.password)
self.assertIsNone(c.cookies)
def test_persistence(self):
c = Credentials('root', 'passwd', {"key": "value"})
c.persist()
self.assertEqual(
keyring.get_password(Credentials.KEYRING_PASSWORD, 'root'), 'passwd'
)
self.assertEqual(
json.loads(keyring.get_password(Credentials.KEYRING_COOKIE, 'root')),
{"key": "value"}
)
|
cf748e2bc4f28a11c79555f2e6c3d1f89d027709
|
tests/test_memory_leak.py
|
tests/test_memory_leak.py
|
import resource
import pytest
from .models import TestModel as DirtyMixinModel
pytestmark = pytest.mark.django_db
def test_rss_usage():
DirtyMixinModel()
rss_1 = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
for _ in range(1000):
DirtyMixinModel()
rss_2 = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
assert rss_2 == rss_1, 'There is a memory leak!'
|
import gc
import resource
import pytest
from .models import TestModel as DirtyMixinModel
pytestmark = pytest.mark.django_db
def test_rss_usage():
DirtyMixinModel()
gc.collect()
rss_1 = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
for _ in range(1000):
DirtyMixinModel()
gc.collect()
rss_2 = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
assert rss_2 == rss_1, 'There is a memory leak!'
|
Call gc.collect() before measuring memory usage.
|
Call gc.collect() before measuring memory usage.
|
Python
|
bsd-3-clause
|
romgar/django-dirtyfields,smn/django-dirtyfields
|
import resource
import pytest
from .models import TestModel as DirtyMixinModel
pytestmark = pytest.mark.django_db
def test_rss_usage():
DirtyMixinModel()
rss_1 = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
for _ in range(1000):
DirtyMixinModel()
rss_2 = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
assert rss_2 == rss_1, 'There is a memory leak!'
Call gc.collect() before measuring memory usage.
|
import gc
import resource
import pytest
from .models import TestModel as DirtyMixinModel
pytestmark = pytest.mark.django_db
def test_rss_usage():
DirtyMixinModel()
gc.collect()
rss_1 = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
for _ in range(1000):
DirtyMixinModel()
gc.collect()
rss_2 = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
assert rss_2 == rss_1, 'There is a memory leak!'
|
<commit_before>import resource
import pytest
from .models import TestModel as DirtyMixinModel
pytestmark = pytest.mark.django_db
def test_rss_usage():
DirtyMixinModel()
rss_1 = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
for _ in range(1000):
DirtyMixinModel()
rss_2 = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
assert rss_2 == rss_1, 'There is a memory leak!'
<commit_msg>Call gc.collect() before measuring memory usage.<commit_after>
|
import gc
import resource
import pytest
from .models import TestModel as DirtyMixinModel
pytestmark = pytest.mark.django_db
def test_rss_usage():
DirtyMixinModel()
gc.collect()
rss_1 = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
for _ in range(1000):
DirtyMixinModel()
gc.collect()
rss_2 = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
assert rss_2 == rss_1, 'There is a memory leak!'
|
import resource
import pytest
from .models import TestModel as DirtyMixinModel
pytestmark = pytest.mark.django_db
def test_rss_usage():
DirtyMixinModel()
rss_1 = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
for _ in range(1000):
DirtyMixinModel()
rss_2 = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
assert rss_2 == rss_1, 'There is a memory leak!'
Call gc.collect() before measuring memory usage.import gc
import resource
import pytest
from .models import TestModel as DirtyMixinModel
pytestmark = pytest.mark.django_db
def test_rss_usage():
DirtyMixinModel()
gc.collect()
rss_1 = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
for _ in range(1000):
DirtyMixinModel()
gc.collect()
rss_2 = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
assert rss_2 == rss_1, 'There is a memory leak!'
|
<commit_before>import resource
import pytest
from .models import TestModel as DirtyMixinModel
pytestmark = pytest.mark.django_db
def test_rss_usage():
DirtyMixinModel()
rss_1 = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
for _ in range(1000):
DirtyMixinModel()
rss_2 = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
assert rss_2 == rss_1, 'There is a memory leak!'
<commit_msg>Call gc.collect() before measuring memory usage.<commit_after>import gc
import resource
import pytest
from .models import TestModel as DirtyMixinModel
pytestmark = pytest.mark.django_db
def test_rss_usage():
DirtyMixinModel()
gc.collect()
rss_1 = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
for _ in range(1000):
DirtyMixinModel()
gc.collect()
rss_2 = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
assert rss_2 == rss_1, 'There is a memory leak!'
|
f15e2f9457fdd6ccc32ac2d8bd8d2c361f219e97
|
tests/test_bib.py
|
tests/test_bib.py
|
"""
Testing module for bib.py
"""
import pytest
from bibtextomd.bib import main, reorder
|
"""
Testing module for bib.py
"""
import pytest
from bibtextomd.bib import main, reorder
class TestReorder():
def test_single_author_good(self):
names = 'Author, First A.'
n = reorder(names, 'F.A. Author')
assert n == '**F.A. Author**'
def test_two_authors_good(self):
names = 'Author, First A. and Name, Second N.'
n = reorder(names, 'F.A. Author')
assert n == '**F.A. Author** and S.N. Name'
def test_three_authors_good(self):
names = 'Author, First A. and Name, Second N. and Name, Unicode C.'
n = reorder(names, 'F.A. Author')
assert n == '**F.A. Author**, S.N. Name, and U.C. Name'
def test_unicode_good(self):
names = 'Namé, Unicode C.'
n = reorder(names, 'U.C. Namé')
assert n == '**U.C. Namé**'
def test_missing_name(self):
names = 'Author, First A.'
with pytest.warns(UserWarning):
reorder(names, 'Missing Author')
def test_no_highlighted_name(self):
names = 'Author, First A.'
n = reorder(names, None)
assert n == 'F.A. Author'
def test_hyphenated_name(self):
names = 'Name, Hypen-Ated'
n = reorder(names, None)
assert n == 'H.A. Name'
|
Add several tests for the name highlighting function
|
Add several tests for the name highlighting function
|
Python
|
mit
|
bryanwweber/bibtextomd
|
"""
Testing module for bib.py
"""
import pytest
from bibtextomd.bib import main, reorder
Add several tests for the name highlighting function
|
"""
Testing module for bib.py
"""
import pytest
from bibtextomd.bib import main, reorder
class TestReorder():
def test_single_author_good(self):
names = 'Author, First A.'
n = reorder(names, 'F.A. Author')
assert n == '**F.A. Author**'
def test_two_authors_good(self):
names = 'Author, First A. and Name, Second N.'
n = reorder(names, 'F.A. Author')
assert n == '**F.A. Author** and S.N. Name'
def test_three_authors_good(self):
names = 'Author, First A. and Name, Second N. and Name, Unicode C.'
n = reorder(names, 'F.A. Author')
assert n == '**F.A. Author**, S.N. Name, and U.C. Name'
def test_unicode_good(self):
names = 'Namé, Unicode C.'
n = reorder(names, 'U.C. Namé')
assert n == '**U.C. Namé**'
def test_missing_name(self):
names = 'Author, First A.'
with pytest.warns(UserWarning):
reorder(names, 'Missing Author')
def test_no_highlighted_name(self):
names = 'Author, First A.'
n = reorder(names, None)
assert n == 'F.A. Author'
def test_hyphenated_name(self):
names = 'Name, Hypen-Ated'
n = reorder(names, None)
assert n == 'H.A. Name'
|
<commit_before>"""
Testing module for bib.py
"""
import pytest
from bibtextomd.bib import main, reorder
<commit_msg>Add several tests for the name highlighting function<commit_after>
|
"""
Testing module for bib.py
"""
import pytest
from bibtextomd.bib import main, reorder
class TestReorder():
def test_single_author_good(self):
names = 'Author, First A.'
n = reorder(names, 'F.A. Author')
assert n == '**F.A. Author**'
def test_two_authors_good(self):
names = 'Author, First A. and Name, Second N.'
n = reorder(names, 'F.A. Author')
assert n == '**F.A. Author** and S.N. Name'
def test_three_authors_good(self):
names = 'Author, First A. and Name, Second N. and Name, Unicode C.'
n = reorder(names, 'F.A. Author')
assert n == '**F.A. Author**, S.N. Name, and U.C. Name'
def test_unicode_good(self):
names = 'Namé, Unicode C.'
n = reorder(names, 'U.C. Namé')
assert n == '**U.C. Namé**'
def test_missing_name(self):
names = 'Author, First A.'
with pytest.warns(UserWarning):
reorder(names, 'Missing Author')
def test_no_highlighted_name(self):
names = 'Author, First A.'
n = reorder(names, None)
assert n == 'F.A. Author'
def test_hyphenated_name(self):
names = 'Name, Hypen-Ated'
n = reorder(names, None)
assert n == 'H.A. Name'
|
"""
Testing module for bib.py
"""
import pytest
from bibtextomd.bib import main, reorder
Add several tests for the name highlighting function"""
Testing module for bib.py
"""
import pytest
from bibtextomd.bib import main, reorder
class TestReorder():
def test_single_author_good(self):
names = 'Author, First A.'
n = reorder(names, 'F.A. Author')
assert n == '**F.A. Author**'
def test_two_authors_good(self):
names = 'Author, First A. and Name, Second N.'
n = reorder(names, 'F.A. Author')
assert n == '**F.A. Author** and S.N. Name'
def test_three_authors_good(self):
names = 'Author, First A. and Name, Second N. and Name, Unicode C.'
n = reorder(names, 'F.A. Author')
assert n == '**F.A. Author**, S.N. Name, and U.C. Name'
def test_unicode_good(self):
names = 'Namé, Unicode C.'
n = reorder(names, 'U.C. Namé')
assert n == '**U.C. Namé**'
def test_missing_name(self):
names = 'Author, First A.'
with pytest.warns(UserWarning):
reorder(names, 'Missing Author')
def test_no_highlighted_name(self):
names = 'Author, First A.'
n = reorder(names, None)
assert n == 'F.A. Author'
def test_hyphenated_name(self):
names = 'Name, Hypen-Ated'
n = reorder(names, None)
assert n == 'H.A. Name'
|
<commit_before>"""
Testing module for bib.py
"""
import pytest
from bibtextomd.bib import main, reorder
<commit_msg>Add several tests for the name highlighting function<commit_after>"""
Testing module for bib.py
"""
import pytest
from bibtextomd.bib import main, reorder
class TestReorder():
def test_single_author_good(self):
names = 'Author, First A.'
n = reorder(names, 'F.A. Author')
assert n == '**F.A. Author**'
def test_two_authors_good(self):
names = 'Author, First A. and Name, Second N.'
n = reorder(names, 'F.A. Author')
assert n == '**F.A. Author** and S.N. Name'
def test_three_authors_good(self):
names = 'Author, First A. and Name, Second N. and Name, Unicode C.'
n = reorder(names, 'F.A. Author')
assert n == '**F.A. Author**, S.N. Name, and U.C. Name'
def test_unicode_good(self):
names = 'Namé, Unicode C.'
n = reorder(names, 'U.C. Namé')
assert n == '**U.C. Namé**'
def test_missing_name(self):
names = 'Author, First A.'
with pytest.warns(UserWarning):
reorder(names, 'Missing Author')
def test_no_highlighted_name(self):
names = 'Author, First A.'
n = reorder(names, None)
assert n == 'F.A. Author'
def test_hyphenated_name(self):
names = 'Name, Hypen-Ated'
n = reorder(names, None)
assert n == 'H.A. Name'
|
71646a47c1d9e47c4920fefe754b648c270eace4
|
tests/test_cli.py
|
tests/test_cli.py
|
import os
import shutil
from click.testing import CliRunner
from gypsy.scripts.cli import cli
from conftest import DATA_DIR
def remove_path_if_exists(*paths):
for path in paths:
try:
if os.path.isdir(path):
shutil.rmtree(path)
else:
os.remove(path)
except OSError:
pass
def test_prep():
input_data_path = os.path.join(DATA_DIR, 'raw_standtable.csv')
expected_output_path = os.path.splitext(input_data_path)[0] + '_prepped.csv'
remove_path_if_exists(expected_output_path)
runner = CliRunner()
result = runner.invoke(cli, ['prep', input_data_path])
assert result.exit_code == 0
assert result.output == ""
assert os.path.exists(expected_output_path)
def test_simulate():
data_path = os.path.join(DATA_DIR, 'raw_standtable_prepped.csv')
expected_output_path = os.path.join(os.getcwd(), 'gypsy-output')
remove_path_if_exists(expected_output_path)
runner = CliRunner()
result = runner.invoke(cli, ['simulate', data_path])
assert result.exit_code == 0
assert result.output == ""
assert os.path.exists(expected_output_path)
|
import os
import shutil
from click.testing import CliRunner
from gypsy.scripts.cli import cli
from conftest import DATA_DIR
def remove_path_if_exists(*paths): #pylint: disable=missing-docstring
for path in paths:
try:
if os.path.isdir(path):
shutil.rmtree(path)
else:
os.remove(path)
except OSError:
pass
def test_prep():
input_data_path = os.path.join(DATA_DIR, 'raw_standtable.csv')
expected_output_path = os.path.splitext(input_data_path)[0] + '_prepped.csv'
remove_path_if_exists(expected_output_path)
runner = CliRunner()
result = runner.invoke(cli, ['prep', input_data_path])
assert result.exit_code == 0
assert result.output == ""
assert os.path.exists(expected_output_path)
def test_simulate():
data_path = os.path.join(DATA_DIR, 'raw_standtable_prepped.csv')
expected_output_path = os.path.join(os.getcwd(), 'gypsy-output')
remove_path_if_exists(expected_output_path)
runner = CliRunner()
result = runner.invoke(cli, ['simulate', data_path])
assert result.exit_code == 0
assert result.output == ""
assert os.path.exists(expected_output_path)
def test_cli_config_file(): #pylint: disable=missing-docstring
runner = CliRunner()
result = runner.invoke(cli, ['--config-file', 'configfilepath.txt'])
assert result.exit_code == 0
|
Add test that cli accepts config file
|
Add test that cli accepts config file
|
Python
|
mit
|
tesera/pygypsy,tesera/pygypsy
|
import os
import shutil
from click.testing import CliRunner
from gypsy.scripts.cli import cli
from conftest import DATA_DIR
def remove_path_if_exists(*paths):
for path in paths:
try:
if os.path.isdir(path):
shutil.rmtree(path)
else:
os.remove(path)
except OSError:
pass
def test_prep():
input_data_path = os.path.join(DATA_DIR, 'raw_standtable.csv')
expected_output_path = os.path.splitext(input_data_path)[0] + '_prepped.csv'
remove_path_if_exists(expected_output_path)
runner = CliRunner()
result = runner.invoke(cli, ['prep', input_data_path])
assert result.exit_code == 0
assert result.output == ""
assert os.path.exists(expected_output_path)
def test_simulate():
data_path = os.path.join(DATA_DIR, 'raw_standtable_prepped.csv')
expected_output_path = os.path.join(os.getcwd(), 'gypsy-output')
remove_path_if_exists(expected_output_path)
runner = CliRunner()
result = runner.invoke(cli, ['simulate', data_path])
assert result.exit_code == 0
assert result.output == ""
assert os.path.exists(expected_output_path)
Add test that cli accepts config file
|
import os
import shutil
from click.testing import CliRunner
from gypsy.scripts.cli import cli
from conftest import DATA_DIR
def remove_path_if_exists(*paths): #pylint: disable=missing-docstring
for path in paths:
try:
if os.path.isdir(path):
shutil.rmtree(path)
else:
os.remove(path)
except OSError:
pass
def test_prep():
input_data_path = os.path.join(DATA_DIR, 'raw_standtable.csv')
expected_output_path = os.path.splitext(input_data_path)[0] + '_prepped.csv'
remove_path_if_exists(expected_output_path)
runner = CliRunner()
result = runner.invoke(cli, ['prep', input_data_path])
assert result.exit_code == 0
assert result.output == ""
assert os.path.exists(expected_output_path)
def test_simulate():
data_path = os.path.join(DATA_DIR, 'raw_standtable_prepped.csv')
expected_output_path = os.path.join(os.getcwd(), 'gypsy-output')
remove_path_if_exists(expected_output_path)
runner = CliRunner()
result = runner.invoke(cli, ['simulate', data_path])
assert result.exit_code == 0
assert result.output == ""
assert os.path.exists(expected_output_path)
def test_cli_config_file(): #pylint: disable=missing-docstring
runner = CliRunner()
result = runner.invoke(cli, ['--config-file', 'configfilepath.txt'])
assert result.exit_code == 0
|
<commit_before>import os
import shutil
from click.testing import CliRunner
from gypsy.scripts.cli import cli
from conftest import DATA_DIR
def remove_path_if_exists(*paths):
for path in paths:
try:
if os.path.isdir(path):
shutil.rmtree(path)
else:
os.remove(path)
except OSError:
pass
def test_prep():
input_data_path = os.path.join(DATA_DIR, 'raw_standtable.csv')
expected_output_path = os.path.splitext(input_data_path)[0] + '_prepped.csv'
remove_path_if_exists(expected_output_path)
runner = CliRunner()
result = runner.invoke(cli, ['prep', input_data_path])
assert result.exit_code == 0
assert result.output == ""
assert os.path.exists(expected_output_path)
def test_simulate():
data_path = os.path.join(DATA_DIR, 'raw_standtable_prepped.csv')
expected_output_path = os.path.join(os.getcwd(), 'gypsy-output')
remove_path_if_exists(expected_output_path)
runner = CliRunner()
result = runner.invoke(cli, ['simulate', data_path])
assert result.exit_code == 0
assert result.output == ""
assert os.path.exists(expected_output_path)
<commit_msg>Add test that cli accepts config file<commit_after>
|
import os
import shutil
from click.testing import CliRunner
from gypsy.scripts.cli import cli
from conftest import DATA_DIR
def remove_path_if_exists(*paths): #pylint: disable=missing-docstring
for path in paths:
try:
if os.path.isdir(path):
shutil.rmtree(path)
else:
os.remove(path)
except OSError:
pass
def test_prep():
input_data_path = os.path.join(DATA_DIR, 'raw_standtable.csv')
expected_output_path = os.path.splitext(input_data_path)[0] + '_prepped.csv'
remove_path_if_exists(expected_output_path)
runner = CliRunner()
result = runner.invoke(cli, ['prep', input_data_path])
assert result.exit_code == 0
assert result.output == ""
assert os.path.exists(expected_output_path)
def test_simulate():
data_path = os.path.join(DATA_DIR, 'raw_standtable_prepped.csv')
expected_output_path = os.path.join(os.getcwd(), 'gypsy-output')
remove_path_if_exists(expected_output_path)
runner = CliRunner()
result = runner.invoke(cli, ['simulate', data_path])
assert result.exit_code == 0
assert result.output == ""
assert os.path.exists(expected_output_path)
def test_cli_config_file(): #pylint: disable=missing-docstring
runner = CliRunner()
result = runner.invoke(cli, ['--config-file', 'configfilepath.txt'])
assert result.exit_code == 0
|
import os
import shutil
from click.testing import CliRunner
from gypsy.scripts.cli import cli
from conftest import DATA_DIR
def remove_path_if_exists(*paths):
for path in paths:
try:
if os.path.isdir(path):
shutil.rmtree(path)
else:
os.remove(path)
except OSError:
pass
def test_prep():
input_data_path = os.path.join(DATA_DIR, 'raw_standtable.csv')
expected_output_path = os.path.splitext(input_data_path)[0] + '_prepped.csv'
remove_path_if_exists(expected_output_path)
runner = CliRunner()
result = runner.invoke(cli, ['prep', input_data_path])
assert result.exit_code == 0
assert result.output == ""
assert os.path.exists(expected_output_path)
def test_simulate():
data_path = os.path.join(DATA_DIR, 'raw_standtable_prepped.csv')
expected_output_path = os.path.join(os.getcwd(), 'gypsy-output')
remove_path_if_exists(expected_output_path)
runner = CliRunner()
result = runner.invoke(cli, ['simulate', data_path])
assert result.exit_code == 0
assert result.output == ""
assert os.path.exists(expected_output_path)
Add test that cli accepts config fileimport os
import shutil
from click.testing import CliRunner
from gypsy.scripts.cli import cli
from conftest import DATA_DIR
def remove_path_if_exists(*paths): #pylint: disable=missing-docstring
for path in paths:
try:
if os.path.isdir(path):
shutil.rmtree(path)
else:
os.remove(path)
except OSError:
pass
def test_prep():
input_data_path = os.path.join(DATA_DIR, 'raw_standtable.csv')
expected_output_path = os.path.splitext(input_data_path)[0] + '_prepped.csv'
remove_path_if_exists(expected_output_path)
runner = CliRunner()
result = runner.invoke(cli, ['prep', input_data_path])
assert result.exit_code == 0
assert result.output == ""
assert os.path.exists(expected_output_path)
def test_simulate():
data_path = os.path.join(DATA_DIR, 'raw_standtable_prepped.csv')
expected_output_path = os.path.join(os.getcwd(), 'gypsy-output')
remove_path_if_exists(expected_output_path)
runner = CliRunner()
result = runner.invoke(cli, ['simulate', data_path])
assert result.exit_code == 0
assert result.output == ""
assert os.path.exists(expected_output_path)
def test_cli_config_file(): #pylint: disable=missing-docstring
runner = CliRunner()
result = runner.invoke(cli, ['--config-file', 'configfilepath.txt'])
assert result.exit_code == 0
|
<commit_before>import os
import shutil
from click.testing import CliRunner
from gypsy.scripts.cli import cli
from conftest import DATA_DIR
def remove_path_if_exists(*paths):
for path in paths:
try:
if os.path.isdir(path):
shutil.rmtree(path)
else:
os.remove(path)
except OSError:
pass
def test_prep():
input_data_path = os.path.join(DATA_DIR, 'raw_standtable.csv')
expected_output_path = os.path.splitext(input_data_path)[0] + '_prepped.csv'
remove_path_if_exists(expected_output_path)
runner = CliRunner()
result = runner.invoke(cli, ['prep', input_data_path])
assert result.exit_code == 0
assert result.output == ""
assert os.path.exists(expected_output_path)
def test_simulate():
data_path = os.path.join(DATA_DIR, 'raw_standtable_prepped.csv')
expected_output_path = os.path.join(os.getcwd(), 'gypsy-output')
remove_path_if_exists(expected_output_path)
runner = CliRunner()
result = runner.invoke(cli, ['simulate', data_path])
assert result.exit_code == 0
assert result.output == ""
assert os.path.exists(expected_output_path)
<commit_msg>Add test that cli accepts config file<commit_after>import os
import shutil
from click.testing import CliRunner
from gypsy.scripts.cli import cli
from conftest import DATA_DIR
def remove_path_if_exists(*paths): #pylint: disable=missing-docstring
for path in paths:
try:
if os.path.isdir(path):
shutil.rmtree(path)
else:
os.remove(path)
except OSError:
pass
def test_prep():
input_data_path = os.path.join(DATA_DIR, 'raw_standtable.csv')
expected_output_path = os.path.splitext(input_data_path)[0] + '_prepped.csv'
remove_path_if_exists(expected_output_path)
runner = CliRunner()
result = runner.invoke(cli, ['prep', input_data_path])
assert result.exit_code == 0
assert result.output == ""
assert os.path.exists(expected_output_path)
def test_simulate():
data_path = os.path.join(DATA_DIR, 'raw_standtable_prepped.csv')
expected_output_path = os.path.join(os.getcwd(), 'gypsy-output')
remove_path_if_exists(expected_output_path)
runner = CliRunner()
result = runner.invoke(cli, ['simulate', data_path])
assert result.exit_code == 0
assert result.output == ""
assert os.path.exists(expected_output_path)
def test_cli_config_file(): #pylint: disable=missing-docstring
runner = CliRunner()
result = runner.invoke(cli, ['--config-file', 'configfilepath.txt'])
assert result.exit_code == 0
|
f56689c862629da9e0a71918a6585796430a407e
|
__init__.py
|
__init__.py
|
# The version of Review Board.
#
# This is in the format of:
#
# (Major, Minor, Micro, alpha/beta/rc/final, Release Number, Released)
#
VERSION = (1, 1, 0, 'alpha', 1, False)
def get_version_string():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
if VERSION[3] != 'final':
if VERSION[3] == 'rc':
version += ' RC%s' % VERSION[4]
else:
version += ' %s %s' % (release_tag, VERSION[4])
if not is_release():
version += " (dev)"
return version
def get_package_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
if VERSION[3] != 'final':
version += '%s%s' % (VERSION[3], VERSION[4])
return version
def is_release():
return VERSION[5]
|
# The version of Review Board.
#
# This is in the format of:
#
# (Major, Minor, Micro, alpha/beta/rc/final, Release Number, Released)
#
VERSION = (1, 1, 0, 'alpha', 1, False)
def get_version_string():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
if VERSION[3] != 'final':
if VERSION[3] == 'rc':
version += ' RC%s' % VERSION[4]
else:
version += ' %s %s' % (VERSION[3], VERSION[4])
if not is_release():
version += " (dev)"
return version
def get_package_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
if VERSION[3] != 'final':
version += '%s%s' % (VERSION[3], VERSION[4])
return version
def is_release():
return VERSION[5]
|
Fix the version string generation when not a final or RC release.
|
Fix the version string generation when not a final or RC release.
|
Python
|
mit
|
brennie/reviewboard,1tush/reviewboard,chazy/reviewboard,bkochendorfer/reviewboard,bkochendorfer/reviewboard,KnowNo/reviewboard,bkochendorfer/reviewboard,chazy/reviewboard,chazy/reviewboard,1tush/reviewboard,KnowNo/reviewboard,reviewboard/reviewboard,KnowNo/reviewboard,Khan/reviewboard,chazy/reviewboard,chipx86/reviewboard,davidt/reviewboard,beol/reviewboard,chipx86/reviewboard,atagar/ReviewBoard,beol/reviewboard,bkochendorfer/reviewboard,brennie/reviewboard,brennie/reviewboard,custode/reviewboard,atagar/ReviewBoard,Khan/reviewboard,chipx86/reviewboard,Khan/reviewboard,sgallagher/reviewboard,reviewboard/reviewboard,1tush/reviewboard,1tush/reviewboard,davidt/reviewboard,1tush/reviewboard,reviewboard/reviewboard,Khan/reviewboard,davidt/reviewboard,Khan/reviewboard,atagar/ReviewBoard,chipx86/reviewboard,sgallagher/reviewboard,chazy/reviewboard,chazy/reviewboard,chazy/reviewboard,atagar/ReviewBoard,custode/reviewboard,chazy/reviewboard,atagar/ReviewBoard,sgallagher/reviewboard,davidt/reviewboard,atagar/ReviewBoard,1tush/reviewboard,reviewboard/reviewboard,beol/reviewboard,Khan/reviewboard,chazy/reviewboard,Khan/reviewboard,custode/reviewboard,beol/reviewboard,1tush/reviewboard,custode/reviewboard,sgallagher/reviewboard,Khan/reviewboard,Khan/reviewboard,1tush/reviewboard,1tush/reviewboard,brennie/reviewboard,atagar/ReviewBoard,atagar/ReviewBoard,atagar/ReviewBoard,KnowNo/reviewboard
|
# The version of Review Board.
#
# This is in the format of:
#
# (Major, Minor, Micro, alpha/beta/rc/final, Release Number, Released)
#
VERSION = (1, 1, 0, 'alpha', 1, False)
def get_version_string():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
if VERSION[3] != 'final':
if VERSION[3] == 'rc':
version += ' RC%s' % VERSION[4]
else:
version += ' %s %s' % (release_tag, VERSION[4])
if not is_release():
version += " (dev)"
return version
def get_package_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
if VERSION[3] != 'final':
version += '%s%s' % (VERSION[3], VERSION[4])
return version
def is_release():
return VERSION[5]
Fix the version string generation when not a final or RC release.
|
# The version of Review Board.
#
# This is in the format of:
#
# (Major, Minor, Micro, alpha/beta/rc/final, Release Number, Released)
#
VERSION = (1, 1, 0, 'alpha', 1, False)
def get_version_string():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
if VERSION[3] != 'final':
if VERSION[3] == 'rc':
version += ' RC%s' % VERSION[4]
else:
version += ' %s %s' % (VERSION[3], VERSION[4])
if not is_release():
version += " (dev)"
return version
def get_package_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
if VERSION[3] != 'final':
version += '%s%s' % (VERSION[3], VERSION[4])
return version
def is_release():
return VERSION[5]
|
<commit_before># The version of Review Board.
#
# This is in the format of:
#
# (Major, Minor, Micro, alpha/beta/rc/final, Release Number, Released)
#
VERSION = (1, 1, 0, 'alpha', 1, False)
def get_version_string():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
if VERSION[3] != 'final':
if VERSION[3] == 'rc':
version += ' RC%s' % VERSION[4]
else:
version += ' %s %s' % (release_tag, VERSION[4])
if not is_release():
version += " (dev)"
return version
def get_package_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
if VERSION[3] != 'final':
version += '%s%s' % (VERSION[3], VERSION[4])
return version
def is_release():
return VERSION[5]
<commit_msg>Fix the version string generation when not a final or RC release.<commit_after>
|
# The version of Review Board.
#
# This is in the format of:
#
# (Major, Minor, Micro, alpha/beta/rc/final, Release Number, Released)
#
VERSION = (1, 1, 0, 'alpha', 1, False)
def get_version_string():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
if VERSION[3] != 'final':
if VERSION[3] == 'rc':
version += ' RC%s' % VERSION[4]
else:
version += ' %s %s' % (VERSION[3], VERSION[4])
if not is_release():
version += " (dev)"
return version
def get_package_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
if VERSION[3] != 'final':
version += '%s%s' % (VERSION[3], VERSION[4])
return version
def is_release():
return VERSION[5]
|
# The version of Review Board.
#
# This is in the format of:
#
# (Major, Minor, Micro, alpha/beta/rc/final, Release Number, Released)
#
VERSION = (1, 1, 0, 'alpha', 1, False)
def get_version_string():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
if VERSION[3] != 'final':
if VERSION[3] == 'rc':
version += ' RC%s' % VERSION[4]
else:
version += ' %s %s' % (release_tag, VERSION[4])
if not is_release():
version += " (dev)"
return version
def get_package_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
if VERSION[3] != 'final':
version += '%s%s' % (VERSION[3], VERSION[4])
return version
def is_release():
return VERSION[5]
Fix the version string generation when not a final or RC release.# The version of Review Board.
#
# This is in the format of:
#
# (Major, Minor, Micro, alpha/beta/rc/final, Release Number, Released)
#
VERSION = (1, 1, 0, 'alpha', 1, False)
def get_version_string():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
if VERSION[3] != 'final':
if VERSION[3] == 'rc':
version += ' RC%s' % VERSION[4]
else:
version += ' %s %s' % (VERSION[3], VERSION[4])
if not is_release():
version += " (dev)"
return version
def get_package_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
if VERSION[3] != 'final':
version += '%s%s' % (VERSION[3], VERSION[4])
return version
def is_release():
return VERSION[5]
|
<commit_before># The version of Review Board.
#
# This is in the format of:
#
# (Major, Minor, Micro, alpha/beta/rc/final, Release Number, Released)
#
VERSION = (1, 1, 0, 'alpha', 1, False)
def get_version_string():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
if VERSION[3] != 'final':
if VERSION[3] == 'rc':
version += ' RC%s' % VERSION[4]
else:
version += ' %s %s' % (release_tag, VERSION[4])
if not is_release():
version += " (dev)"
return version
def get_package_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
if VERSION[3] != 'final':
version += '%s%s' % (VERSION[3], VERSION[4])
return version
def is_release():
return VERSION[5]
<commit_msg>Fix the version string generation when not a final or RC release.<commit_after># The version of Review Board.
#
# This is in the format of:
#
# (Major, Minor, Micro, alpha/beta/rc/final, Release Number, Released)
#
VERSION = (1, 1, 0, 'alpha', 1, False)
def get_version_string():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
if VERSION[3] != 'final':
if VERSION[3] == 'rc':
version += ' RC%s' % VERSION[4]
else:
version += ' %s %s' % (VERSION[3], VERSION[4])
if not is_release():
version += " (dev)"
return version
def get_package_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
if VERSION[3] != 'final':
version += '%s%s' % (VERSION[3], VERSION[4])
return version
def is_release():
return VERSION[5]
|
a7ed3ae2aedcb74a351b850dfa035adda08ea05b
|
__init__.py
|
__init__.py
|
#Shoopdawoop
from . import CuraEngineBackend
from UM.Preferences import Preferences
def getMetaData():
return { "name": "CuraEngine Backend", "type": "Backend" }
def register(app):
Preferences.addPreference("BackendLocation","../PinkUnicornEngine/CuraEngine")
return CuraEngineBackend.CuraEngineBackend()
|
#Shoopdawoop
from . import CuraEngineBackend
from UM.Preferences import Preferences
def getMetaData():
return {
'type': 'backend',
'plugin': {
'name': "CuraEngine Backend"
}
}
def register(app):
Preferences.addPreference("BackendLocation","../PinkUnicornEngine/CuraEngine")
return CuraEngineBackend.CuraEngineBackend()
|
Update plugin metadata to the new format
|
Update plugin metadata to the new format
|
Python
|
agpl-3.0
|
ad1217/Cura,derekhe/Cura,hmflash/Cura,fieldOfView/Cura,totalretribution/Cura,senttech/Cura,bq/Ultimaker-Cura,DeskboxBrazil/Cura,totalretribution/Cura,quillford/Cura,ynotstartups/Wanhao,lo0ol/Ultimaker-Cura,Curahelper/Cura,hmflash/Cura,derekhe/Cura,quillford/Cura,lo0ol/Ultimaker-Cura,fxtentacle/Cura,ad1217/Cura,DeskboxBrazil/Cura,fieldOfView/Cura,ynotstartups/Wanhao,fxtentacle/Cura,senttech/Cura,markwal/Cura,markwal/Cura,bq/Ultimaker-Cura,Curahelper/Cura
|
#Shoopdawoop
from . import CuraEngineBackend
from UM.Preferences import Preferences
def getMetaData():
return { "name": "CuraEngine Backend", "type": "Backend" }
def register(app):
Preferences.addPreference("BackendLocation","../PinkUnicornEngine/CuraEngine")
return CuraEngineBackend.CuraEngineBackend()
Update plugin metadata to the new format
|
#Shoopdawoop
from . import CuraEngineBackend
from UM.Preferences import Preferences
def getMetaData():
return {
'type': 'backend',
'plugin': {
'name': "CuraEngine Backend"
}
}
def register(app):
Preferences.addPreference("BackendLocation","../PinkUnicornEngine/CuraEngine")
return CuraEngineBackend.CuraEngineBackend()
|
<commit_before>#Shoopdawoop
from . import CuraEngineBackend
from UM.Preferences import Preferences
def getMetaData():
return { "name": "CuraEngine Backend", "type": "Backend" }
def register(app):
Preferences.addPreference("BackendLocation","../PinkUnicornEngine/CuraEngine")
return CuraEngineBackend.CuraEngineBackend()
<commit_msg>Update plugin metadata to the new format<commit_after>
|
#Shoopdawoop
from . import CuraEngineBackend
from UM.Preferences import Preferences
def getMetaData():
return {
'type': 'backend',
'plugin': {
'name': "CuraEngine Backend"
}
}
def register(app):
Preferences.addPreference("BackendLocation","../PinkUnicornEngine/CuraEngine")
return CuraEngineBackend.CuraEngineBackend()
|
#Shoopdawoop
from . import CuraEngineBackend
from UM.Preferences import Preferences
def getMetaData():
return { "name": "CuraEngine Backend", "type": "Backend" }
def register(app):
Preferences.addPreference("BackendLocation","../PinkUnicornEngine/CuraEngine")
return CuraEngineBackend.CuraEngineBackend()
Update plugin metadata to the new format#Shoopdawoop
from . import CuraEngineBackend
from UM.Preferences import Preferences
def getMetaData():
return {
'type': 'backend',
'plugin': {
'name': "CuraEngine Backend"
}
}
def register(app):
Preferences.addPreference("BackendLocation","../PinkUnicornEngine/CuraEngine")
return CuraEngineBackend.CuraEngineBackend()
|
<commit_before>#Shoopdawoop
from . import CuraEngineBackend
from UM.Preferences import Preferences
def getMetaData():
return { "name": "CuraEngine Backend", "type": "Backend" }
def register(app):
Preferences.addPreference("BackendLocation","../PinkUnicornEngine/CuraEngine")
return CuraEngineBackend.CuraEngineBackend()
<commit_msg>Update plugin metadata to the new format<commit_after>#Shoopdawoop
from . import CuraEngineBackend
from UM.Preferences import Preferences
def getMetaData():
return {
'type': 'backend',
'plugin': {
'name': "CuraEngine Backend"
}
}
def register(app):
Preferences.addPreference("BackendLocation","../PinkUnicornEngine/CuraEngine")
return CuraEngineBackend.CuraEngineBackend()
|
71c2891b14beb8c83bccfeda952b0b15edc65256
|
setuptools/tests/test_test.py
|
setuptools/tests/test_test.py
|
import pytest
from jaraco import path
from setuptools.command.test import test
from setuptools.dist import Distribution
from .textwrap import DALS
@pytest.fixture
def quiet_log():
# Running some of the other tests will automatically
# change the log level to info, messing our output.
import distutils.log
distutils.log.set_verbosity(0)
@pytest.mark.usefixtures('tmpdir_cwd', 'quiet_log')
def test_tests_are_run_once(capfd):
params = dict(
name='foo',
packages=['dummy'],
)
files = {
'setup.py':
'from setuptools import setup; setup('
+ ','.join(f'{name}={params[name]!r}' for name in params)
+ ')',
'dummy': {
'__init__.py': '',
'test_dummy.py': DALS(
"""
import unittest
class TestTest(unittest.TestCase):
def test_test(self):
print('Foo')
"""
),
},
}
path.build(files)
dist = Distribution(params)
dist.script_name = 'setup.py'
cmd = test(dist)
cmd.ensure_finalized()
cmd.run()
out, err = capfd.readouterr()
assert out == 'Foo\n'
|
import pytest
from jaraco import path
from setuptools.command.test import test
from setuptools.dist import Distribution
from .textwrap import DALS
@pytest.mark.usefixtures('tmpdir_cwd')
def test_tests_are_run_once(capfd):
params = dict(
name='foo',
packages=['dummy'],
)
files = {
'setup.py':
'from setuptools import setup; setup('
+ ','.join(f'{name}={params[name]!r}' for name in params)
+ ')',
'dummy': {
'__init__.py': '',
'test_dummy.py': DALS(
"""
import unittest
class TestTest(unittest.TestCase):
def test_test(self):
print('Foo')
"""
),
},
}
path.build(files)
dist = Distribution(params)
dist.script_name = 'setup.py'
cmd = test(dist)
cmd.ensure_finalized()
cmd.run()
out, err = capfd.readouterr()
assert out.endswith('Foo\n')
assert len(out.split('Foo')) == 2
|
Remove reliance on distutils log verbosity.
|
Remove reliance on distutils log verbosity.
|
Python
|
mit
|
pypa/setuptools,pypa/setuptools,pypa/setuptools
|
import pytest
from jaraco import path
from setuptools.command.test import test
from setuptools.dist import Distribution
from .textwrap import DALS
@pytest.fixture
def quiet_log():
# Running some of the other tests will automatically
# change the log level to info, messing our output.
import distutils.log
distutils.log.set_verbosity(0)
@pytest.mark.usefixtures('tmpdir_cwd', 'quiet_log')
def test_tests_are_run_once(capfd):
params = dict(
name='foo',
packages=['dummy'],
)
files = {
'setup.py':
'from setuptools import setup; setup('
+ ','.join(f'{name}={params[name]!r}' for name in params)
+ ')',
'dummy': {
'__init__.py': '',
'test_dummy.py': DALS(
"""
import unittest
class TestTest(unittest.TestCase):
def test_test(self):
print('Foo')
"""
),
},
}
path.build(files)
dist = Distribution(params)
dist.script_name = 'setup.py'
cmd = test(dist)
cmd.ensure_finalized()
cmd.run()
out, err = capfd.readouterr()
assert out == 'Foo\n'
Remove reliance on distutils log verbosity.
|
import pytest
from jaraco import path
from setuptools.command.test import test
from setuptools.dist import Distribution
from .textwrap import DALS
@pytest.mark.usefixtures('tmpdir_cwd')
def test_tests_are_run_once(capfd):
params = dict(
name='foo',
packages=['dummy'],
)
files = {
'setup.py':
'from setuptools import setup; setup('
+ ','.join(f'{name}={params[name]!r}' for name in params)
+ ')',
'dummy': {
'__init__.py': '',
'test_dummy.py': DALS(
"""
import unittest
class TestTest(unittest.TestCase):
def test_test(self):
print('Foo')
"""
),
},
}
path.build(files)
dist = Distribution(params)
dist.script_name = 'setup.py'
cmd = test(dist)
cmd.ensure_finalized()
cmd.run()
out, err = capfd.readouterr()
assert out.endswith('Foo\n')
assert len(out.split('Foo')) == 2
|
<commit_before>import pytest
from jaraco import path
from setuptools.command.test import test
from setuptools.dist import Distribution
from .textwrap import DALS
@pytest.fixture
def quiet_log():
# Running some of the other tests will automatically
# change the log level to info, messing our output.
import distutils.log
distutils.log.set_verbosity(0)
@pytest.mark.usefixtures('tmpdir_cwd', 'quiet_log')
def test_tests_are_run_once(capfd):
params = dict(
name='foo',
packages=['dummy'],
)
files = {
'setup.py':
'from setuptools import setup; setup('
+ ','.join(f'{name}={params[name]!r}' for name in params)
+ ')',
'dummy': {
'__init__.py': '',
'test_dummy.py': DALS(
"""
import unittest
class TestTest(unittest.TestCase):
def test_test(self):
print('Foo')
"""
),
},
}
path.build(files)
dist = Distribution(params)
dist.script_name = 'setup.py'
cmd = test(dist)
cmd.ensure_finalized()
cmd.run()
out, err = capfd.readouterr()
assert out == 'Foo\n'
<commit_msg>Remove reliance on distutils log verbosity.<commit_after>
|
import pytest
from jaraco import path
from setuptools.command.test import test
from setuptools.dist import Distribution
from .textwrap import DALS
@pytest.mark.usefixtures('tmpdir_cwd')
def test_tests_are_run_once(capfd):
params = dict(
name='foo',
packages=['dummy'],
)
files = {
'setup.py':
'from setuptools import setup; setup('
+ ','.join(f'{name}={params[name]!r}' for name in params)
+ ')',
'dummy': {
'__init__.py': '',
'test_dummy.py': DALS(
"""
import unittest
class TestTest(unittest.TestCase):
def test_test(self):
print('Foo')
"""
),
},
}
path.build(files)
dist = Distribution(params)
dist.script_name = 'setup.py'
cmd = test(dist)
cmd.ensure_finalized()
cmd.run()
out, err = capfd.readouterr()
assert out.endswith('Foo\n')
assert len(out.split('Foo')) == 2
|
import pytest
from jaraco import path
from setuptools.command.test import test
from setuptools.dist import Distribution
from .textwrap import DALS
@pytest.fixture
def quiet_log():
# Running some of the other tests will automatically
# change the log level to info, messing our output.
import distutils.log
distutils.log.set_verbosity(0)
@pytest.mark.usefixtures('tmpdir_cwd', 'quiet_log')
def test_tests_are_run_once(capfd):
params = dict(
name='foo',
packages=['dummy'],
)
files = {
'setup.py':
'from setuptools import setup; setup('
+ ','.join(f'{name}={params[name]!r}' for name in params)
+ ')',
'dummy': {
'__init__.py': '',
'test_dummy.py': DALS(
"""
import unittest
class TestTest(unittest.TestCase):
def test_test(self):
print('Foo')
"""
),
},
}
path.build(files)
dist = Distribution(params)
dist.script_name = 'setup.py'
cmd = test(dist)
cmd.ensure_finalized()
cmd.run()
out, err = capfd.readouterr()
assert out == 'Foo\n'
Remove reliance on distutils log verbosity.import pytest
from jaraco import path
from setuptools.command.test import test
from setuptools.dist import Distribution
from .textwrap import DALS
@pytest.mark.usefixtures('tmpdir_cwd')
def test_tests_are_run_once(capfd):
params = dict(
name='foo',
packages=['dummy'],
)
files = {
'setup.py':
'from setuptools import setup; setup('
+ ','.join(f'{name}={params[name]!r}' for name in params)
+ ')',
'dummy': {
'__init__.py': '',
'test_dummy.py': DALS(
"""
import unittest
class TestTest(unittest.TestCase):
def test_test(self):
print('Foo')
"""
),
},
}
path.build(files)
dist = Distribution(params)
dist.script_name = 'setup.py'
cmd = test(dist)
cmd.ensure_finalized()
cmd.run()
out, err = capfd.readouterr()
assert out.endswith('Foo\n')
assert len(out.split('Foo')) == 2
|
<commit_before>import pytest
from jaraco import path
from setuptools.command.test import test
from setuptools.dist import Distribution
from .textwrap import DALS
@pytest.fixture
def quiet_log():
# Running some of the other tests will automatically
# change the log level to info, messing our output.
import distutils.log
distutils.log.set_verbosity(0)
@pytest.mark.usefixtures('tmpdir_cwd', 'quiet_log')
def test_tests_are_run_once(capfd):
params = dict(
name='foo',
packages=['dummy'],
)
files = {
'setup.py':
'from setuptools import setup; setup('
+ ','.join(f'{name}={params[name]!r}' for name in params)
+ ')',
'dummy': {
'__init__.py': '',
'test_dummy.py': DALS(
"""
import unittest
class TestTest(unittest.TestCase):
def test_test(self):
print('Foo')
"""
),
},
}
path.build(files)
dist = Distribution(params)
dist.script_name = 'setup.py'
cmd = test(dist)
cmd.ensure_finalized()
cmd.run()
out, err = capfd.readouterr()
assert out == 'Foo\n'
<commit_msg>Remove reliance on distutils log verbosity.<commit_after>import pytest
from jaraco import path
from setuptools.command.test import test
from setuptools.dist import Distribution
from .textwrap import DALS
@pytest.mark.usefixtures('tmpdir_cwd')
def test_tests_are_run_once(capfd):
params = dict(
name='foo',
packages=['dummy'],
)
files = {
'setup.py':
'from setuptools import setup; setup('
+ ','.join(f'{name}={params[name]!r}' for name in params)
+ ')',
'dummy': {
'__init__.py': '',
'test_dummy.py': DALS(
"""
import unittest
class TestTest(unittest.TestCase):
def test_test(self):
print('Foo')
"""
),
},
}
path.build(files)
dist = Distribution(params)
dist.script_name = 'setup.py'
cmd = test(dist)
cmd.ensure_finalized()
cmd.run()
out, err = capfd.readouterr()
assert out.endswith('Foo\n')
assert len(out.split('Foo')) == 2
|
694a87344abf57cd642111c31ccdea31af687fe3
|
src/sleepy/conf/development.py
|
src/sleepy/conf/development.py
|
from sleepy.conf.base import *
DEBUG = True
TEMPLATE_DEBUG = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'sleepy_dev',
}
}
SOCIAL_AUTH_GOOGLE_OAUTH2_KEY = '' # noqa
SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET = ''
SOCIAL_AUTH_TWITTER_KEY = ''
SOCIAL_AUTH_TWITTER_SECRET = ''
SOCIAL_AUTH_FACEBOOK_KEY = ''
SOCIAL_AUTH_FACEBOOK_SECRET = ''
SOCIAL_AUTH_FACEBOOK_SCOPE = ['email']
SOCIAL_AUTH_GITHUB_KEY = ''
SOCIAL_AUTH_GITHUB_SECRET = ''
LOGGING['loggers']['root']['level'] = 'DEBUG'
LOGGING['loggers']['celery']['level'] = 'DEBUG'
LOGGING['loggers']['sleepy']['level'] = 'DEBUG'
|
from sleepy.conf.base import *
DEBUG = True
TEMPLATE_DEBUG = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'sleepy_dev',
}
}
SOCIAL_AUTH_GOOGLE_OAUTH2_KEY = '' # noqa
SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET = ''
SOCIAL_AUTH_TWITTER_KEY = ''
SOCIAL_AUTH_TWITTER_SECRET = ''
SOCIAL_AUTH_FACEBOOK_KEY = ''
SOCIAL_AUTH_FACEBOOK_SECRET = ''
SOCIAL_AUTH_FACEBOOK_SCOPE = ['email']
SOCIAL_AUTH_GITHUB_KEY = ''
SOCIAL_AUTH_GITHUB_SECRET = ''
LOGGING['loggers']['root']['level'] = 'DEBUG'
LOGGING['loggers']['celery']['level'] = 'DEBUG'
LOGGING['loggers']['sleepy']['level'] = 'DEBUG'
SESSION_COOKIE_SECURE = False
CSRF_COOKIE_SECURE = False
|
Disable secure cookies and csrf for dev
|
Disable secure cookies and csrf for dev
|
Python
|
bsd-3-clause
|
YouNeedToSleep/sleepy,YouNeedToSleep/sleepy,YouNeedToSleep/sleepy
|
from sleepy.conf.base import *
DEBUG = True
TEMPLATE_DEBUG = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'sleepy_dev',
}
}
SOCIAL_AUTH_GOOGLE_OAUTH2_KEY = '' # noqa
SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET = ''
SOCIAL_AUTH_TWITTER_KEY = ''
SOCIAL_AUTH_TWITTER_SECRET = ''
SOCIAL_AUTH_FACEBOOK_KEY = ''
SOCIAL_AUTH_FACEBOOK_SECRET = ''
SOCIAL_AUTH_FACEBOOK_SCOPE = ['email']
SOCIAL_AUTH_GITHUB_KEY = ''
SOCIAL_AUTH_GITHUB_SECRET = ''
LOGGING['loggers']['root']['level'] = 'DEBUG'
LOGGING['loggers']['celery']['level'] = 'DEBUG'
LOGGING['loggers']['sleepy']['level'] = 'DEBUG'
Disable secure cookies and csrf for dev
|
from sleepy.conf.base import *
DEBUG = True
TEMPLATE_DEBUG = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'sleepy_dev',
}
}
SOCIAL_AUTH_GOOGLE_OAUTH2_KEY = '' # noqa
SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET = ''
SOCIAL_AUTH_TWITTER_KEY = ''
SOCIAL_AUTH_TWITTER_SECRET = ''
SOCIAL_AUTH_FACEBOOK_KEY = ''
SOCIAL_AUTH_FACEBOOK_SECRET = ''
SOCIAL_AUTH_FACEBOOK_SCOPE = ['email']
SOCIAL_AUTH_GITHUB_KEY = ''
SOCIAL_AUTH_GITHUB_SECRET = ''
LOGGING['loggers']['root']['level'] = 'DEBUG'
LOGGING['loggers']['celery']['level'] = 'DEBUG'
LOGGING['loggers']['sleepy']['level'] = 'DEBUG'
SESSION_COOKIE_SECURE = False
CSRF_COOKIE_SECURE = False
|
<commit_before>from sleepy.conf.base import *
DEBUG = True
TEMPLATE_DEBUG = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'sleepy_dev',
}
}
SOCIAL_AUTH_GOOGLE_OAUTH2_KEY = '' # noqa
SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET = ''
SOCIAL_AUTH_TWITTER_KEY = ''
SOCIAL_AUTH_TWITTER_SECRET = ''
SOCIAL_AUTH_FACEBOOK_KEY = ''
SOCIAL_AUTH_FACEBOOK_SECRET = ''
SOCIAL_AUTH_FACEBOOK_SCOPE = ['email']
SOCIAL_AUTH_GITHUB_KEY = ''
SOCIAL_AUTH_GITHUB_SECRET = ''
LOGGING['loggers']['root']['level'] = 'DEBUG'
LOGGING['loggers']['celery']['level'] = 'DEBUG'
LOGGING['loggers']['sleepy']['level'] = 'DEBUG'
<commit_msg>Disable secure cookies and csrf for dev<commit_after>
|
from sleepy.conf.base import *
DEBUG = True
TEMPLATE_DEBUG = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'sleepy_dev',
}
}
SOCIAL_AUTH_GOOGLE_OAUTH2_KEY = '' # noqa
SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET = ''
SOCIAL_AUTH_TWITTER_KEY = ''
SOCIAL_AUTH_TWITTER_SECRET = ''
SOCIAL_AUTH_FACEBOOK_KEY = ''
SOCIAL_AUTH_FACEBOOK_SECRET = ''
SOCIAL_AUTH_FACEBOOK_SCOPE = ['email']
SOCIAL_AUTH_GITHUB_KEY = ''
SOCIAL_AUTH_GITHUB_SECRET = ''
LOGGING['loggers']['root']['level'] = 'DEBUG'
LOGGING['loggers']['celery']['level'] = 'DEBUG'
LOGGING['loggers']['sleepy']['level'] = 'DEBUG'
SESSION_COOKIE_SECURE = False
CSRF_COOKIE_SECURE = False
|
from sleepy.conf.base import *
DEBUG = True
TEMPLATE_DEBUG = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'sleepy_dev',
}
}
SOCIAL_AUTH_GOOGLE_OAUTH2_KEY = '' # noqa
SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET = ''
SOCIAL_AUTH_TWITTER_KEY = ''
SOCIAL_AUTH_TWITTER_SECRET = ''
SOCIAL_AUTH_FACEBOOK_KEY = ''
SOCIAL_AUTH_FACEBOOK_SECRET = ''
SOCIAL_AUTH_FACEBOOK_SCOPE = ['email']
SOCIAL_AUTH_GITHUB_KEY = ''
SOCIAL_AUTH_GITHUB_SECRET = ''
LOGGING['loggers']['root']['level'] = 'DEBUG'
LOGGING['loggers']['celery']['level'] = 'DEBUG'
LOGGING['loggers']['sleepy']['level'] = 'DEBUG'
Disable secure cookies and csrf for devfrom sleepy.conf.base import *
DEBUG = True
TEMPLATE_DEBUG = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'sleepy_dev',
}
}
SOCIAL_AUTH_GOOGLE_OAUTH2_KEY = '' # noqa
SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET = ''
SOCIAL_AUTH_TWITTER_KEY = ''
SOCIAL_AUTH_TWITTER_SECRET = ''
SOCIAL_AUTH_FACEBOOK_KEY = ''
SOCIAL_AUTH_FACEBOOK_SECRET = ''
SOCIAL_AUTH_FACEBOOK_SCOPE = ['email']
SOCIAL_AUTH_GITHUB_KEY = ''
SOCIAL_AUTH_GITHUB_SECRET = ''
LOGGING['loggers']['root']['level'] = 'DEBUG'
LOGGING['loggers']['celery']['level'] = 'DEBUG'
LOGGING['loggers']['sleepy']['level'] = 'DEBUG'
SESSION_COOKIE_SECURE = False
CSRF_COOKIE_SECURE = False
|
<commit_before>from sleepy.conf.base import *
DEBUG = True
TEMPLATE_DEBUG = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'sleepy_dev',
}
}
SOCIAL_AUTH_GOOGLE_OAUTH2_KEY = '' # noqa
SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET = ''
SOCIAL_AUTH_TWITTER_KEY = ''
SOCIAL_AUTH_TWITTER_SECRET = ''
SOCIAL_AUTH_FACEBOOK_KEY = ''
SOCIAL_AUTH_FACEBOOK_SECRET = ''
SOCIAL_AUTH_FACEBOOK_SCOPE = ['email']
SOCIAL_AUTH_GITHUB_KEY = ''
SOCIAL_AUTH_GITHUB_SECRET = ''
LOGGING['loggers']['root']['level'] = 'DEBUG'
LOGGING['loggers']['celery']['level'] = 'DEBUG'
LOGGING['loggers']['sleepy']['level'] = 'DEBUG'
<commit_msg>Disable secure cookies and csrf for dev<commit_after>from sleepy.conf.base import *
DEBUG = True
TEMPLATE_DEBUG = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'sleepy_dev',
}
}
SOCIAL_AUTH_GOOGLE_OAUTH2_KEY = '' # noqa
SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET = ''
SOCIAL_AUTH_TWITTER_KEY = ''
SOCIAL_AUTH_TWITTER_SECRET = ''
SOCIAL_AUTH_FACEBOOK_KEY = ''
SOCIAL_AUTH_FACEBOOK_SECRET = ''
SOCIAL_AUTH_FACEBOOK_SCOPE = ['email']
SOCIAL_AUTH_GITHUB_KEY = ''
SOCIAL_AUTH_GITHUB_SECRET = ''
LOGGING['loggers']['root']['level'] = 'DEBUG'
LOGGING['loggers']['celery']['level'] = 'DEBUG'
LOGGING['loggers']['sleepy']['level'] = 'DEBUG'
SESSION_COOKIE_SECURE = False
CSRF_COOKIE_SECURE = False
|
0eca2340a4d38b542659505ba386a23129f8ac0b
|
google/cloud/speech/__init__.py
|
google/cloud/speech/__init__.py
|
# Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Google Cloud Speech API wrapper."""
from google.cloud.speech.client import Client
from google.cloud.speech.connection import Connection
|
# Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Google Cloud Speech API wrapper."""
from google.cloud.speech.client import Client
from google.cloud.speech.connection import Connection
from google.cloud.speech.encoding import Encoding
|
Make Encoding accessible from speech.Encoding.
|
Make Encoding accessible from speech.Encoding.
|
Python
|
apache-2.0
|
googleapis/python-speech,googleapis/python-speech
|
# Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Google Cloud Speech API wrapper."""
from google.cloud.speech.client import Client
from google.cloud.speech.connection import Connection
Make Encoding accessible from speech.Encoding.
|
# Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Google Cloud Speech API wrapper."""
from google.cloud.speech.client import Client
from google.cloud.speech.connection import Connection
from google.cloud.speech.encoding import Encoding
|
<commit_before># Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Google Cloud Speech API wrapper."""
from google.cloud.speech.client import Client
from google.cloud.speech.connection import Connection
<commit_msg>Make Encoding accessible from speech.Encoding.<commit_after>
|
# Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Google Cloud Speech API wrapper."""
from google.cloud.speech.client import Client
from google.cloud.speech.connection import Connection
from google.cloud.speech.encoding import Encoding
|
# Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Google Cloud Speech API wrapper."""
from google.cloud.speech.client import Client
from google.cloud.speech.connection import Connection
Make Encoding accessible from speech.Encoding.# Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Google Cloud Speech API wrapper."""
from google.cloud.speech.client import Client
from google.cloud.speech.connection import Connection
from google.cloud.speech.encoding import Encoding
|
<commit_before># Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Google Cloud Speech API wrapper."""
from google.cloud.speech.client import Client
from google.cloud.speech.connection import Connection
<commit_msg>Make Encoding accessible from speech.Encoding.<commit_after># Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Google Cloud Speech API wrapper."""
from google.cloud.speech.client import Client
from google.cloud.speech.connection import Connection
from google.cloud.speech.encoding import Encoding
|
ecacafa1c104c319c0abd92ec965c7d5a4c01786
|
trac/db/tests/__init__.py
|
trac/db/tests/__init__.py
|
import unittest
from trac.db.tests import api
from trac.db.tests import postgres_test
from trac.db.tests import backup
from trac.db.tests.functional import functionalSuite
def suite():
suite = unittest.TestSuite()
suite.addTest(api.suite())
suite.addTest(postgres_test.suite())
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
import unittest
from trac.db.tests import api
from trac.db.tests import postgres_test
from trac.db.tests.functional import functionalSuite
def suite():
suite = unittest.TestSuite()
suite.addTest(api.suite())
suite.addTest(postgres_test.suite())
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
Remove backup from tests - it moved to functional
|
Remove backup from tests - it moved to functional
git-svn-id: aefd11945137af0f31499d1cc9b63b54ed7ebb20@8041 af82e41b-90c4-0310-8c96-b1721e28e2e2
|
Python
|
bsd-3-clause
|
jun66j5/trac-ja,netjunki/trac-Pygit2,walty8/trac,walty8/trac,walty8/trac,netjunki/trac-Pygit2,jun66j5/trac-ja,jun66j5/trac-ja,walty8/trac,jun66j5/trac-ja,netjunki/trac-Pygit2
|
import unittest
from trac.db.tests import api
from trac.db.tests import postgres_test
from trac.db.tests import backup
from trac.db.tests.functional import functionalSuite
def suite():
suite = unittest.TestSuite()
suite.addTest(api.suite())
suite.addTest(postgres_test.suite())
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
Remove backup from tests - it moved to functional
git-svn-id: aefd11945137af0f31499d1cc9b63b54ed7ebb20@8041 af82e41b-90c4-0310-8c96-b1721e28e2e2
|
import unittest
from trac.db.tests import api
from trac.db.tests import postgres_test
from trac.db.tests.functional import functionalSuite
def suite():
suite = unittest.TestSuite()
suite.addTest(api.suite())
suite.addTest(postgres_test.suite())
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
<commit_before>import unittest
from trac.db.tests import api
from trac.db.tests import postgres_test
from trac.db.tests import backup
from trac.db.tests.functional import functionalSuite
def suite():
suite = unittest.TestSuite()
suite.addTest(api.suite())
suite.addTest(postgres_test.suite())
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
<commit_msg>Remove backup from tests - it moved to functional
git-svn-id: aefd11945137af0f31499d1cc9b63b54ed7ebb20@8041 af82e41b-90c4-0310-8c96-b1721e28e2e2<commit_after>
|
import unittest
from trac.db.tests import api
from trac.db.tests import postgres_test
from trac.db.tests.functional import functionalSuite
def suite():
suite = unittest.TestSuite()
suite.addTest(api.suite())
suite.addTest(postgres_test.suite())
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
import unittest
from trac.db.tests import api
from trac.db.tests import postgres_test
from trac.db.tests import backup
from trac.db.tests.functional import functionalSuite
def suite():
suite = unittest.TestSuite()
suite.addTest(api.suite())
suite.addTest(postgres_test.suite())
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
Remove backup from tests - it moved to functional
git-svn-id: aefd11945137af0f31499d1cc9b63b54ed7ebb20@8041 af82e41b-90c4-0310-8c96-b1721e28e2e2import unittest
from trac.db.tests import api
from trac.db.tests import postgres_test
from trac.db.tests.functional import functionalSuite
def suite():
suite = unittest.TestSuite()
suite.addTest(api.suite())
suite.addTest(postgres_test.suite())
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
<commit_before>import unittest
from trac.db.tests import api
from trac.db.tests import postgres_test
from trac.db.tests import backup
from trac.db.tests.functional import functionalSuite
def suite():
suite = unittest.TestSuite()
suite.addTest(api.suite())
suite.addTest(postgres_test.suite())
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
<commit_msg>Remove backup from tests - it moved to functional
git-svn-id: aefd11945137af0f31499d1cc9b63b54ed7ebb20@8041 af82e41b-90c4-0310-8c96-b1721e28e2e2<commit_after>import unittest
from trac.db.tests import api
from trac.db.tests import postgres_test
from trac.db.tests.functional import functionalSuite
def suite():
suite = unittest.TestSuite()
suite.addTest(api.suite())
suite.addTest(postgres_test.suite())
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
fa495f9f2f887533f870ddedef3a1aea0a699419
|
oscar/management/commands/oscar_fork_statics.py
|
oscar/management/commands/oscar_fork_statics.py
|
import logging
import os
import shutil
from django.db.models import get_model
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
ProductAlert = get_model('customer', 'ProductAlert')
logger = logging.getLogger(__name__)
class Command(BaseCommand):
"""
Copy Oscar's statics into local project so they can be used as a base for
styling a new site.
"""
args = '<destination folder>'
help = "Copy Oscar's static files"
def handle(self, *args, **options):
# Determine where to copy to
folder = args[0] if args else 'static'
if not folder.startswith('/'):
destination = os.path.join(os.getcwd(), folder)
else:
destination = folder
if os.path.exists(destination):
raise CommandError(
"The folder %s already exists - aborting!" % destination)
source = os.path.realpath(
os.path.join(os.path.dirname(__file__), '../../static'))
print "Copying Oscar's static files to %s" % (source, destination)
shutil.copytree(source, destination)
# Check if this new folder is in STATICFILES_DIRS
if destination not in settings.STATICFILES_DIRS:
print ("You need to add %s to STATICFILES_DIRS in order for your "
"local overrides to be picked up") % destination
|
import logging
import os
import shutil
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
logger = logging.getLogger(__name__)
class Command(BaseCommand):
"""
Copy Oscar's statics into local project so they can be used as a base for
styling a new site.
"""
args = '<destination folder>'
help = "Copy Oscar's static files"
def handle(self, *args, **options):
# Determine where to copy to
folder = args[0] if args else 'static'
if not folder.startswith('/'):
destination = os.path.join(os.getcwd(), folder)
else:
destination = folder
if os.path.exists(destination):
raise CommandError(
"The folder %s already exists - aborting!" % destination)
source = os.path.realpath(
os.path.join(os.path.dirname(__file__), '../../static'))
print "Copying Oscar's static files to %s" % (destination,)
shutil.copytree(source, destination)
# Check if this new folder is in STATICFILES_DIRS
if destination not in settings.STATICFILES_DIRS:
print ("You need to add %s to STATICFILES_DIRS in order for your "
"local overrides to be picked up") % destination
|
Fix string formatting bug in fork_statics man. command
|
Fix string formatting bug in fork_statics man. command
|
Python
|
bsd-3-clause
|
amirrpp/django-oscar,kapt/django-oscar,ademuk/django-oscar,nickpack/django-oscar,eddiep1101/django-oscar,bschuon/django-oscar,vovanbo/django-oscar,pasqualguerrero/django-oscar,Jannes123/django-oscar,elliotthill/django-oscar,taedori81/django-oscar,marcoantoniooliveira/labweb,sonofatailor/django-oscar,ahmetdaglarbas/e-commerce,ademuk/django-oscar,okfish/django-oscar,manevant/django-oscar,mexeniz/django-oscar,ademuk/django-oscar,jinnykoo/christmas,QLGu/django-oscar,bnprk/django-oscar,bschuon/django-oscar,ka7eh/django-oscar,WadeYuChen/django-oscar,john-parton/django-oscar,binarydud/django-oscar,WillisXChen/django-oscar,kapari/django-oscar,lijoantony/django-oscar,thechampanurag/django-oscar,sasha0/django-oscar,anentropic/django-oscar,anentropic/django-oscar,taedori81/django-oscar,kapt/django-oscar,eddiep1101/django-oscar,john-parton/django-oscar,MatthewWilkes/django-oscar,rocopartners/django-oscar,pdonadeo/django-oscar,elliotthill/django-oscar,django-oscar/django-oscar,jinnykoo/wuyisj.com,pasqualguerrero/django-oscar,solarissmoke/django-oscar,mexeniz/django-oscar,elliotthill/django-oscar,ka7eh/django-oscar,manevant/django-oscar,solarissmoke/django-oscar,rocopartners/django-oscar,kapari/django-oscar,jinnykoo/wuyisj.com,nfletton/django-oscar,makielab/django-oscar,bschuon/django-oscar,dongguangming/django-oscar,sonofatailor/django-oscar,ahmetdaglarbas/e-commerce,MatthewWilkes/django-oscar,dongguangming/django-oscar,saadatqadri/django-oscar,machtfit/django-oscar,eddiep1101/django-oscar,dongguangming/django-oscar,nickpack/django-oscar,jmt4/django-oscar,Jannes123/django-oscar,amirrpp/django-oscar,binarydud/django-oscar,lijoantony/django-oscar,WadeYuChen/django-oscar,itbabu/django-oscar,QLGu/django-oscar,michaelkuty/django-oscar,jlmadurga/django-oscar,bschuon/django-oscar,okfish/django-oscar,bnprk/django-oscar,vovanbo/django-oscar,faratro/django-oscar,sasha0/django-oscar,faratro/django-oscar,solarissmoke/django-oscar,monikasulik/django-oscar,Jannes123/django-oscar,nfletton/django-oscar,manevant/django-oscar,josesanch/django-oscar,eddiep1101/django-oscar,mexeniz/django-oscar,adamend/django-oscar,jinnykoo/wuyisj,lijoantony/django-oscar,jinnykoo/wuyisj.com,jlmadurga/django-oscar,ademuk/django-oscar,jinnykoo/christmas,nickpack/django-oscar,makielab/django-oscar,dongguangming/django-oscar,itbabu/django-oscar,Jannes123/django-oscar,pdonadeo/django-oscar,django-oscar/django-oscar,vovanbo/django-oscar,josesanch/django-oscar,Idematica/django-oscar,thechampanurag/django-oscar,adamend/django-oscar,jinnykoo/wuyisj,bnprk/django-oscar,ahmetdaglarbas/e-commerce,pdonadeo/django-oscar,WillisXChen/django-oscar,nfletton/django-oscar,michaelkuty/django-oscar,marcoantoniooliveira/labweb,DrOctogon/unwash_ecom,monikasulik/django-oscar,django-oscar/django-oscar,amirrpp/django-oscar,WillisXChen/django-oscar,ka7eh/django-oscar,sonofatailor/django-oscar,manevant/django-oscar,WillisXChen/django-oscar,MatthewWilkes/django-oscar,okfish/django-oscar,jinnykoo/wuyisj,lijoantony/django-oscar,saadatqadri/django-oscar,WillisXChen/django-oscar,spartonia/django-oscar,nfletton/django-oscar,taedori81/django-oscar,Bogh/django-oscar,john-parton/django-oscar,binarydud/django-oscar,spartonia/django-oscar,machtfit/django-oscar,Idematica/django-oscar,makielab/django-oscar,jmt4/django-oscar,anentropic/django-oscar,sonofatailor/django-oscar,jlmadurga/django-oscar,spartonia/django-oscar,itbabu/django-oscar,spartonia/django-oscar,QLGu/django-oscar,binarydud/django-oscar,kapari/django-oscar,DrOctogon/unwash_ecom,rocopartners/django-oscar,DrOctogon/unwash_ecom,jinnykoo/christmas,ka7eh/django-oscar,pasqualguerrero/django-oscar,jmt4/django-oscar,michaelkuty/django-oscar,pasqualguerrero/django-oscar,Bogh/django-oscar,taedori81/django-oscar,Idematica/django-oscar,jinnykoo/wuyisj.com,MatthewWilkes/django-oscar,jinnykoo/wuyisj,adamend/django-oscar,Bogh/django-oscar,solarissmoke/django-oscar,kapt/django-oscar,WillisXChen/django-oscar,saadatqadri/django-oscar,michaelkuty/django-oscar,Bogh/django-oscar,pdonadeo/django-oscar,faratro/django-oscar,mexeniz/django-oscar,kapari/django-oscar,thechampanurag/django-oscar,monikasulik/django-oscar,itbabu/django-oscar,bnprk/django-oscar,nickpack/django-oscar,sasha0/django-oscar,saadatqadri/django-oscar,josesanch/django-oscar,WadeYuChen/django-oscar,sasha0/django-oscar,vovanbo/django-oscar,monikasulik/django-oscar,jmt4/django-oscar,adamend/django-oscar,john-parton/django-oscar,thechampanurag/django-oscar,machtfit/django-oscar,marcoantoniooliveira/labweb,jlmadurga/django-oscar,QLGu/django-oscar,django-oscar/django-oscar,rocopartners/django-oscar,makielab/django-oscar,faratro/django-oscar,okfish/django-oscar,anentropic/django-oscar,WadeYuChen/django-oscar,marcoantoniooliveira/labweb,amirrpp/django-oscar,ahmetdaglarbas/e-commerce
|
import logging
import os
import shutil
from django.db.models import get_model
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
ProductAlert = get_model('customer', 'ProductAlert')
logger = logging.getLogger(__name__)
class Command(BaseCommand):
"""
Copy Oscar's statics into local project so they can be used as a base for
styling a new site.
"""
args = '<destination folder>'
help = "Copy Oscar's static files"
def handle(self, *args, **options):
# Determine where to copy to
folder = args[0] if args else 'static'
if not folder.startswith('/'):
destination = os.path.join(os.getcwd(), folder)
else:
destination = folder
if os.path.exists(destination):
raise CommandError(
"The folder %s already exists - aborting!" % destination)
source = os.path.realpath(
os.path.join(os.path.dirname(__file__), '../../static'))
print "Copying Oscar's static files to %s" % (source, destination)
shutil.copytree(source, destination)
# Check if this new folder is in STATICFILES_DIRS
if destination not in settings.STATICFILES_DIRS:
print ("You need to add %s to STATICFILES_DIRS in order for your "
"local overrides to be picked up") % destination
Fix string formatting bug in fork_statics man. command
|
import logging
import os
import shutil
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
logger = logging.getLogger(__name__)
class Command(BaseCommand):
"""
Copy Oscar's statics into local project so they can be used as a base for
styling a new site.
"""
args = '<destination folder>'
help = "Copy Oscar's static files"
def handle(self, *args, **options):
# Determine where to copy to
folder = args[0] if args else 'static'
if not folder.startswith('/'):
destination = os.path.join(os.getcwd(), folder)
else:
destination = folder
if os.path.exists(destination):
raise CommandError(
"The folder %s already exists - aborting!" % destination)
source = os.path.realpath(
os.path.join(os.path.dirname(__file__), '../../static'))
print "Copying Oscar's static files to %s" % (destination,)
shutil.copytree(source, destination)
# Check if this new folder is in STATICFILES_DIRS
if destination not in settings.STATICFILES_DIRS:
print ("You need to add %s to STATICFILES_DIRS in order for your "
"local overrides to be picked up") % destination
|
<commit_before>import logging
import os
import shutil
from django.db.models import get_model
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
ProductAlert = get_model('customer', 'ProductAlert')
logger = logging.getLogger(__name__)
class Command(BaseCommand):
"""
Copy Oscar's statics into local project so they can be used as a base for
styling a new site.
"""
args = '<destination folder>'
help = "Copy Oscar's static files"
def handle(self, *args, **options):
# Determine where to copy to
folder = args[0] if args else 'static'
if not folder.startswith('/'):
destination = os.path.join(os.getcwd(), folder)
else:
destination = folder
if os.path.exists(destination):
raise CommandError(
"The folder %s already exists - aborting!" % destination)
source = os.path.realpath(
os.path.join(os.path.dirname(__file__), '../../static'))
print "Copying Oscar's static files to %s" % (source, destination)
shutil.copytree(source, destination)
# Check if this new folder is in STATICFILES_DIRS
if destination not in settings.STATICFILES_DIRS:
print ("You need to add %s to STATICFILES_DIRS in order for your "
"local overrides to be picked up") % destination
<commit_msg>Fix string formatting bug in fork_statics man. command<commit_after>
|
import logging
import os
import shutil
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
logger = logging.getLogger(__name__)
class Command(BaseCommand):
"""
Copy Oscar's statics into local project so they can be used as a base for
styling a new site.
"""
args = '<destination folder>'
help = "Copy Oscar's static files"
def handle(self, *args, **options):
# Determine where to copy to
folder = args[0] if args else 'static'
if not folder.startswith('/'):
destination = os.path.join(os.getcwd(), folder)
else:
destination = folder
if os.path.exists(destination):
raise CommandError(
"The folder %s already exists - aborting!" % destination)
source = os.path.realpath(
os.path.join(os.path.dirname(__file__), '../../static'))
print "Copying Oscar's static files to %s" % (destination,)
shutil.copytree(source, destination)
# Check if this new folder is in STATICFILES_DIRS
if destination not in settings.STATICFILES_DIRS:
print ("You need to add %s to STATICFILES_DIRS in order for your "
"local overrides to be picked up") % destination
|
import logging
import os
import shutil
from django.db.models import get_model
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
ProductAlert = get_model('customer', 'ProductAlert')
logger = logging.getLogger(__name__)
class Command(BaseCommand):
"""
Copy Oscar's statics into local project so they can be used as a base for
styling a new site.
"""
args = '<destination folder>'
help = "Copy Oscar's static files"
def handle(self, *args, **options):
# Determine where to copy to
folder = args[0] if args else 'static'
if not folder.startswith('/'):
destination = os.path.join(os.getcwd(), folder)
else:
destination = folder
if os.path.exists(destination):
raise CommandError(
"The folder %s already exists - aborting!" % destination)
source = os.path.realpath(
os.path.join(os.path.dirname(__file__), '../../static'))
print "Copying Oscar's static files to %s" % (source, destination)
shutil.copytree(source, destination)
# Check if this new folder is in STATICFILES_DIRS
if destination not in settings.STATICFILES_DIRS:
print ("You need to add %s to STATICFILES_DIRS in order for your "
"local overrides to be picked up") % destination
Fix string formatting bug in fork_statics man. commandimport logging
import os
import shutil
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
logger = logging.getLogger(__name__)
class Command(BaseCommand):
"""
Copy Oscar's statics into local project so they can be used as a base for
styling a new site.
"""
args = '<destination folder>'
help = "Copy Oscar's static files"
def handle(self, *args, **options):
# Determine where to copy to
folder = args[0] if args else 'static'
if not folder.startswith('/'):
destination = os.path.join(os.getcwd(), folder)
else:
destination = folder
if os.path.exists(destination):
raise CommandError(
"The folder %s already exists - aborting!" % destination)
source = os.path.realpath(
os.path.join(os.path.dirname(__file__), '../../static'))
print "Copying Oscar's static files to %s" % (destination,)
shutil.copytree(source, destination)
# Check if this new folder is in STATICFILES_DIRS
if destination not in settings.STATICFILES_DIRS:
print ("You need to add %s to STATICFILES_DIRS in order for your "
"local overrides to be picked up") % destination
|
<commit_before>import logging
import os
import shutil
from django.db.models import get_model
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
ProductAlert = get_model('customer', 'ProductAlert')
logger = logging.getLogger(__name__)
class Command(BaseCommand):
"""
Copy Oscar's statics into local project so they can be used as a base for
styling a new site.
"""
args = '<destination folder>'
help = "Copy Oscar's static files"
def handle(self, *args, **options):
# Determine where to copy to
folder = args[0] if args else 'static'
if not folder.startswith('/'):
destination = os.path.join(os.getcwd(), folder)
else:
destination = folder
if os.path.exists(destination):
raise CommandError(
"The folder %s already exists - aborting!" % destination)
source = os.path.realpath(
os.path.join(os.path.dirname(__file__), '../../static'))
print "Copying Oscar's static files to %s" % (source, destination)
shutil.copytree(source, destination)
# Check if this new folder is in STATICFILES_DIRS
if destination not in settings.STATICFILES_DIRS:
print ("You need to add %s to STATICFILES_DIRS in order for your "
"local overrides to be picked up") % destination
<commit_msg>Fix string formatting bug in fork_statics man. command<commit_after>import logging
import os
import shutil
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
logger = logging.getLogger(__name__)
class Command(BaseCommand):
"""
Copy Oscar's statics into local project so they can be used as a base for
styling a new site.
"""
args = '<destination folder>'
help = "Copy Oscar's static files"
def handle(self, *args, **options):
# Determine where to copy to
folder = args[0] if args else 'static'
if not folder.startswith('/'):
destination = os.path.join(os.getcwd(), folder)
else:
destination = folder
if os.path.exists(destination):
raise CommandError(
"The folder %s already exists - aborting!" % destination)
source = os.path.realpath(
os.path.join(os.path.dirname(__file__), '../../static'))
print "Copying Oscar's static files to %s" % (destination,)
shutil.copytree(source, destination)
# Check if this new folder is in STATICFILES_DIRS
if destination not in settings.STATICFILES_DIRS:
print ("You need to add %s to STATICFILES_DIRS in order for your "
"local overrides to be picked up") % destination
|
99c7c87d1b84b70962250e362dcfff75e77cb3fe
|
txircd/modules/cmode_k.py
|
txircd/modules/cmode_k.py
|
from twisted.words.protocols import irc
from txircd.modbase import Mode
class PasswordMode(Mode):
def commandPermission(self, user, cmd, data):
if cmd != "JOIN":
return data
channels = data["targetchan"]
keys = data["keys"]
removeChannels = []
for index, chan in channels.enumerate():
if "k" in chan.mode and chan.mode["k"] != keys[index]:
removeChannels.append(chan)
user.sendMessage(irc.ERR_BADCHANNELKEY, chan.name, ":Cannot join channel (Incorrect channel key)")
for chan in removeChannels:
index = channels.index(chan) # We need to do this anyway to eliminate the effects of shifting when removing earlier elements
channels.pop(index)
keys.pop(index)
data["targetchan"] = channels
data["keys"] = keys
return data
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
return {
"modes": {
"cuk": PasswordMode()
}
}
def cleanup(self):
self.ircd.removeMode("cuk")
|
from twisted.words.protocols import irc
from txircd.modbase import Mode
class PasswordMode(Mode):
def checkUnset(self, user, target, param):
if param == target.mode["k"]:
return True
return False
def commandPermission(self, user, cmd, data):
if cmd != "JOIN":
return data
channels = data["targetchan"]
keys = data["keys"]
removeChannels = []
for index, chan in channels.enumerate():
if "k" in chan.mode and chan.mode["k"] != keys[index]:
removeChannels.append(chan)
user.sendMessage(irc.ERR_BADCHANNELKEY, chan.name, ":Cannot join channel (Incorrect channel key)")
for chan in removeChannels:
index = channels.index(chan) # We need to do this anyway to eliminate the effects of shifting when removing earlier elements
channels.pop(index)
keys.pop(index)
data["targetchan"] = channels
data["keys"] = keys
return data
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
return {
"modes": {
"cuk": PasswordMode()
}
}
def cleanup(self):
self.ircd.removeMode("cuk")
|
Check that the password parameter when unsetting mode k matches the password that is set
|
Check that the password parameter when unsetting mode k matches the password that is set
|
Python
|
bsd-3-clause
|
Heufneutje/txircd,ElementalAlchemist/txircd,DesertBus/txircd
|
from twisted.words.protocols import irc
from txircd.modbase import Mode
class PasswordMode(Mode):
def commandPermission(self, user, cmd, data):
if cmd != "JOIN":
return data
channels = data["targetchan"]
keys = data["keys"]
removeChannels = []
for index, chan in channels.enumerate():
if "k" in chan.mode and chan.mode["k"] != keys[index]:
removeChannels.append(chan)
user.sendMessage(irc.ERR_BADCHANNELKEY, chan.name, ":Cannot join channel (Incorrect channel key)")
for chan in removeChannels:
index = channels.index(chan) # We need to do this anyway to eliminate the effects of shifting when removing earlier elements
channels.pop(index)
keys.pop(index)
data["targetchan"] = channels
data["keys"] = keys
return data
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
return {
"modes": {
"cuk": PasswordMode()
}
}
def cleanup(self):
self.ircd.removeMode("cuk")Check that the password parameter when unsetting mode k matches the password that is set
|
from twisted.words.protocols import irc
from txircd.modbase import Mode
class PasswordMode(Mode):
def checkUnset(self, user, target, param):
if param == target.mode["k"]:
return True
return False
def commandPermission(self, user, cmd, data):
if cmd != "JOIN":
return data
channels = data["targetchan"]
keys = data["keys"]
removeChannels = []
for index, chan in channels.enumerate():
if "k" in chan.mode and chan.mode["k"] != keys[index]:
removeChannels.append(chan)
user.sendMessage(irc.ERR_BADCHANNELKEY, chan.name, ":Cannot join channel (Incorrect channel key)")
for chan in removeChannels:
index = channels.index(chan) # We need to do this anyway to eliminate the effects of shifting when removing earlier elements
channels.pop(index)
keys.pop(index)
data["targetchan"] = channels
data["keys"] = keys
return data
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
return {
"modes": {
"cuk": PasswordMode()
}
}
def cleanup(self):
self.ircd.removeMode("cuk")
|
<commit_before>from twisted.words.protocols import irc
from txircd.modbase import Mode
class PasswordMode(Mode):
def commandPermission(self, user, cmd, data):
if cmd != "JOIN":
return data
channels = data["targetchan"]
keys = data["keys"]
removeChannels = []
for index, chan in channels.enumerate():
if "k" in chan.mode and chan.mode["k"] != keys[index]:
removeChannels.append(chan)
user.sendMessage(irc.ERR_BADCHANNELKEY, chan.name, ":Cannot join channel (Incorrect channel key)")
for chan in removeChannels:
index = channels.index(chan) # We need to do this anyway to eliminate the effects of shifting when removing earlier elements
channels.pop(index)
keys.pop(index)
data["targetchan"] = channels
data["keys"] = keys
return data
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
return {
"modes": {
"cuk": PasswordMode()
}
}
def cleanup(self):
self.ircd.removeMode("cuk")<commit_msg>Check that the password parameter when unsetting mode k matches the password that is set<commit_after>
|
from twisted.words.protocols import irc
from txircd.modbase import Mode
class PasswordMode(Mode):
def checkUnset(self, user, target, param):
if param == target.mode["k"]:
return True
return False
def commandPermission(self, user, cmd, data):
if cmd != "JOIN":
return data
channels = data["targetchan"]
keys = data["keys"]
removeChannels = []
for index, chan in channels.enumerate():
if "k" in chan.mode and chan.mode["k"] != keys[index]:
removeChannels.append(chan)
user.sendMessage(irc.ERR_BADCHANNELKEY, chan.name, ":Cannot join channel (Incorrect channel key)")
for chan in removeChannels:
index = channels.index(chan) # We need to do this anyway to eliminate the effects of shifting when removing earlier elements
channels.pop(index)
keys.pop(index)
data["targetchan"] = channels
data["keys"] = keys
return data
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
return {
"modes": {
"cuk": PasswordMode()
}
}
def cleanup(self):
self.ircd.removeMode("cuk")
|
from twisted.words.protocols import irc
from txircd.modbase import Mode
class PasswordMode(Mode):
def commandPermission(self, user, cmd, data):
if cmd != "JOIN":
return data
channels = data["targetchan"]
keys = data["keys"]
removeChannels = []
for index, chan in channels.enumerate():
if "k" in chan.mode and chan.mode["k"] != keys[index]:
removeChannels.append(chan)
user.sendMessage(irc.ERR_BADCHANNELKEY, chan.name, ":Cannot join channel (Incorrect channel key)")
for chan in removeChannels:
index = channels.index(chan) # We need to do this anyway to eliminate the effects of shifting when removing earlier elements
channels.pop(index)
keys.pop(index)
data["targetchan"] = channels
data["keys"] = keys
return data
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
return {
"modes": {
"cuk": PasswordMode()
}
}
def cleanup(self):
self.ircd.removeMode("cuk")Check that the password parameter when unsetting mode k matches the password that is setfrom twisted.words.protocols import irc
from txircd.modbase import Mode
class PasswordMode(Mode):
def checkUnset(self, user, target, param):
if param == target.mode["k"]:
return True
return False
def commandPermission(self, user, cmd, data):
if cmd != "JOIN":
return data
channels = data["targetchan"]
keys = data["keys"]
removeChannels = []
for index, chan in channels.enumerate():
if "k" in chan.mode and chan.mode["k"] != keys[index]:
removeChannels.append(chan)
user.sendMessage(irc.ERR_BADCHANNELKEY, chan.name, ":Cannot join channel (Incorrect channel key)")
for chan in removeChannels:
index = channels.index(chan) # We need to do this anyway to eliminate the effects of shifting when removing earlier elements
channels.pop(index)
keys.pop(index)
data["targetchan"] = channels
data["keys"] = keys
return data
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
return {
"modes": {
"cuk": PasswordMode()
}
}
def cleanup(self):
self.ircd.removeMode("cuk")
|
<commit_before>from twisted.words.protocols import irc
from txircd.modbase import Mode
class PasswordMode(Mode):
def commandPermission(self, user, cmd, data):
if cmd != "JOIN":
return data
channels = data["targetchan"]
keys = data["keys"]
removeChannels = []
for index, chan in channels.enumerate():
if "k" in chan.mode and chan.mode["k"] != keys[index]:
removeChannels.append(chan)
user.sendMessage(irc.ERR_BADCHANNELKEY, chan.name, ":Cannot join channel (Incorrect channel key)")
for chan in removeChannels:
index = channels.index(chan) # We need to do this anyway to eliminate the effects of shifting when removing earlier elements
channels.pop(index)
keys.pop(index)
data["targetchan"] = channels
data["keys"] = keys
return data
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
return {
"modes": {
"cuk": PasswordMode()
}
}
def cleanup(self):
self.ircd.removeMode("cuk")<commit_msg>Check that the password parameter when unsetting mode k matches the password that is set<commit_after>from twisted.words.protocols import irc
from txircd.modbase import Mode
class PasswordMode(Mode):
def checkUnset(self, user, target, param):
if param == target.mode["k"]:
return True
return False
def commandPermission(self, user, cmd, data):
if cmd != "JOIN":
return data
channels = data["targetchan"]
keys = data["keys"]
removeChannels = []
for index, chan in channels.enumerate():
if "k" in chan.mode and chan.mode["k"] != keys[index]:
removeChannels.append(chan)
user.sendMessage(irc.ERR_BADCHANNELKEY, chan.name, ":Cannot join channel (Incorrect channel key)")
for chan in removeChannels:
index = channels.index(chan) # We need to do this anyway to eliminate the effects of shifting when removing earlier elements
channels.pop(index)
keys.pop(index)
data["targetchan"] = channels
data["keys"] = keys
return data
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
return {
"modes": {
"cuk": PasswordMode()
}
}
def cleanup(self):
self.ircd.removeMode("cuk")
|
f325b02c66810cff9e3ace8b31e7f3a7b410342f
|
awx/wsgi.py
|
awx/wsgi.py
|
# Copyright (c) 2015 Ansible, Inc.
# All Rights Reserved.
import logging
from django.core.wsgi import get_wsgi_application
from awx import prepare_env
from awx import __version__ as tower_version
"""
WSGI config for AWX project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/dev/howto/deployment/wsgi/
"""
# Prepare the AWX environment.
prepare_env()
logger = logging.getLogger('awx.main.models.jobs')
try:
fd = open("/var/lib/awx/.tower_version", "r")
if fd.read().strip() != tower_version:
raise Exception()
except Exception:
logger.error("Missing or incorrect metadata for Tower version. Ensure Tower was installed using the setup playbook.")
raise Exception("Missing or incorrect metadata for Tower version. Ensure Tower was installed using the setup playbook.")
# Return the default Django WSGI application.
application = get_wsgi_application()
|
# Copyright (c) 2015 Ansible, Inc.
# All Rights Reserved.
import logging
from awx import __version__ as tower_version
# Prepare the AWX environment.
from awx import prepare_env
prepare_env()
from django.core.wsgi import get_wsgi_application
"""
WSGI config for AWX project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/dev/howto/deployment/wsgi/
"""
logger = logging.getLogger('awx.main.models.jobs')
try:
fd = open("/var/lib/awx/.tower_version", "r")
if fd.read().strip() != tower_version:
raise Exception()
except Exception:
logger.error("Missing or incorrect metadata for Tower version. Ensure Tower was installed using the setup playbook.")
raise Exception("Missing or incorrect metadata for Tower version. Ensure Tower was installed using the setup playbook.")
# Return the default Django WSGI application.
application = get_wsgi_application()
|
Fix import error by calling prepare_env first
|
Fix import error by calling prepare_env first
|
Python
|
apache-2.0
|
wwitzel3/awx,wwitzel3/awx,wwitzel3/awx,snahelou/awx,snahelou/awx,snahelou/awx,wwitzel3/awx,snahelou/awx
|
# Copyright (c) 2015 Ansible, Inc.
# All Rights Reserved.
import logging
from django.core.wsgi import get_wsgi_application
from awx import prepare_env
from awx import __version__ as tower_version
"""
WSGI config for AWX project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/dev/howto/deployment/wsgi/
"""
# Prepare the AWX environment.
prepare_env()
logger = logging.getLogger('awx.main.models.jobs')
try:
fd = open("/var/lib/awx/.tower_version", "r")
if fd.read().strip() != tower_version:
raise Exception()
except Exception:
logger.error("Missing or incorrect metadata for Tower version. Ensure Tower was installed using the setup playbook.")
raise Exception("Missing or incorrect metadata for Tower version. Ensure Tower was installed using the setup playbook.")
# Return the default Django WSGI application.
application = get_wsgi_application()
Fix import error by calling prepare_env first
|
# Copyright (c) 2015 Ansible, Inc.
# All Rights Reserved.
import logging
from awx import __version__ as tower_version
# Prepare the AWX environment.
from awx import prepare_env
prepare_env()
from django.core.wsgi import get_wsgi_application
"""
WSGI config for AWX project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/dev/howto/deployment/wsgi/
"""
logger = logging.getLogger('awx.main.models.jobs')
try:
fd = open("/var/lib/awx/.tower_version", "r")
if fd.read().strip() != tower_version:
raise Exception()
except Exception:
logger.error("Missing or incorrect metadata for Tower version. Ensure Tower was installed using the setup playbook.")
raise Exception("Missing or incorrect metadata for Tower version. Ensure Tower was installed using the setup playbook.")
# Return the default Django WSGI application.
application = get_wsgi_application()
|
<commit_before># Copyright (c) 2015 Ansible, Inc.
# All Rights Reserved.
import logging
from django.core.wsgi import get_wsgi_application
from awx import prepare_env
from awx import __version__ as tower_version
"""
WSGI config for AWX project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/dev/howto/deployment/wsgi/
"""
# Prepare the AWX environment.
prepare_env()
logger = logging.getLogger('awx.main.models.jobs')
try:
fd = open("/var/lib/awx/.tower_version", "r")
if fd.read().strip() != tower_version:
raise Exception()
except Exception:
logger.error("Missing or incorrect metadata for Tower version. Ensure Tower was installed using the setup playbook.")
raise Exception("Missing or incorrect metadata for Tower version. Ensure Tower was installed using the setup playbook.")
# Return the default Django WSGI application.
application = get_wsgi_application()
<commit_msg>Fix import error by calling prepare_env first<commit_after>
|
# Copyright (c) 2015 Ansible, Inc.
# All Rights Reserved.
import logging
from awx import __version__ as tower_version
# Prepare the AWX environment.
from awx import prepare_env
prepare_env()
from django.core.wsgi import get_wsgi_application
"""
WSGI config for AWX project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/dev/howto/deployment/wsgi/
"""
logger = logging.getLogger('awx.main.models.jobs')
try:
fd = open("/var/lib/awx/.tower_version", "r")
if fd.read().strip() != tower_version:
raise Exception()
except Exception:
logger.error("Missing or incorrect metadata for Tower version. Ensure Tower was installed using the setup playbook.")
raise Exception("Missing or incorrect metadata for Tower version. Ensure Tower was installed using the setup playbook.")
# Return the default Django WSGI application.
application = get_wsgi_application()
|
# Copyright (c) 2015 Ansible, Inc.
# All Rights Reserved.
import logging
from django.core.wsgi import get_wsgi_application
from awx import prepare_env
from awx import __version__ as tower_version
"""
WSGI config for AWX project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/dev/howto/deployment/wsgi/
"""
# Prepare the AWX environment.
prepare_env()
logger = logging.getLogger('awx.main.models.jobs')
try:
fd = open("/var/lib/awx/.tower_version", "r")
if fd.read().strip() != tower_version:
raise Exception()
except Exception:
logger.error("Missing or incorrect metadata for Tower version. Ensure Tower was installed using the setup playbook.")
raise Exception("Missing or incorrect metadata for Tower version. Ensure Tower was installed using the setup playbook.")
# Return the default Django WSGI application.
application = get_wsgi_application()
Fix import error by calling prepare_env first# Copyright (c) 2015 Ansible, Inc.
# All Rights Reserved.
import logging
from awx import __version__ as tower_version
# Prepare the AWX environment.
from awx import prepare_env
prepare_env()
from django.core.wsgi import get_wsgi_application
"""
WSGI config for AWX project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/dev/howto/deployment/wsgi/
"""
logger = logging.getLogger('awx.main.models.jobs')
try:
fd = open("/var/lib/awx/.tower_version", "r")
if fd.read().strip() != tower_version:
raise Exception()
except Exception:
logger.error("Missing or incorrect metadata for Tower version. Ensure Tower was installed using the setup playbook.")
raise Exception("Missing or incorrect metadata for Tower version. Ensure Tower was installed using the setup playbook.")
# Return the default Django WSGI application.
application = get_wsgi_application()
|
<commit_before># Copyright (c) 2015 Ansible, Inc.
# All Rights Reserved.
import logging
from django.core.wsgi import get_wsgi_application
from awx import prepare_env
from awx import __version__ as tower_version
"""
WSGI config for AWX project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/dev/howto/deployment/wsgi/
"""
# Prepare the AWX environment.
prepare_env()
logger = logging.getLogger('awx.main.models.jobs')
try:
fd = open("/var/lib/awx/.tower_version", "r")
if fd.read().strip() != tower_version:
raise Exception()
except Exception:
logger.error("Missing or incorrect metadata for Tower version. Ensure Tower was installed using the setup playbook.")
raise Exception("Missing or incorrect metadata for Tower version. Ensure Tower was installed using the setup playbook.")
# Return the default Django WSGI application.
application = get_wsgi_application()
<commit_msg>Fix import error by calling prepare_env first<commit_after># Copyright (c) 2015 Ansible, Inc.
# All Rights Reserved.
import logging
from awx import __version__ as tower_version
# Prepare the AWX environment.
from awx import prepare_env
prepare_env()
from django.core.wsgi import get_wsgi_application
"""
WSGI config for AWX project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/dev/howto/deployment/wsgi/
"""
logger = logging.getLogger('awx.main.models.jobs')
try:
fd = open("/var/lib/awx/.tower_version", "r")
if fd.read().strip() != tower_version:
raise Exception()
except Exception:
logger.error("Missing or incorrect metadata for Tower version. Ensure Tower was installed using the setup playbook.")
raise Exception("Missing or incorrect metadata for Tower version. Ensure Tower was installed using the setup playbook.")
# Return the default Django WSGI application.
application = get_wsgi_application()
|
907d8cdd8aa7f27ffb4783155b2621b7903c322b
|
osgtest/tests/test_82_cvmfs.py
|
osgtest/tests/test_82_cvmfs.py
|
import os
import osgtest.library.core as core
import osgtest.library.files as files
import osgtest.library.osgunittest as osgunittest
import unittest
class TestStopCvmfs(osgunittest.OSGTestCase):
def test_01_stop_cvmfs(self):
core.skip_ok_unless_installed('cvmfs')
self.skip_ok_if(['cvmfs.started-server'] == False, 'did not start server')
if core.state['cvmfs.version'] < ('2', '1'):
command = ('service', 'cvmfs', 'stop')
else:
command = ('cvmfs_config', 'umount')
stdout, _, fail = core.check_system(command, 'Stop Cvmfs server')
self.assert_(stdout.find('FAILED') == -1, fail)
files.restore("/etc/fuse.conf","cvmfs")
files.restore("/etc/auto.master","cvmfs")
files.restore("/etc/cvmfs/default.local","cvmfs")
files.restore("/etc/cvmfs/domain.d/cern.ch.local","cvmfs")
|
import os
import osgtest.library.core as core
import osgtest.library.files as files
import osgtest.library.osgunittest as osgunittest
import unittest
class TestStopCvmfs(osgunittest.OSGTestCase):
def test_01_stop_cvmfs(self):
core.skip_ok_unless_installed('cvmfs')
self.skip_ok_if(['cvmfs.started-server'] == False, 'did not start server')
if core.state['cvmfs.version'] < ('2', '1'):
command = ('service', 'cvmfs', 'stop')
else:
command = ('cvmfs_config', 'umount')
stdout, _, fail = core.check_system(command, 'Stop Cvmfs server')
self.assert_(stdout.find('FAILED') == -1, fail)
# Restart autofs to bring network filesystems back (specifically
# homedirs on el5 fermicloud vms)
if core.state['cvmfs.version'] >= ('2', '1'):
stdout, _, fail = core.check_system(('service', 'autofs', 'restart'), 'Restart autofs')
self.assert_(stdout.find('FAILED') == -1, fail)
files.restore("/etc/fuse.conf","cvmfs")
files.restore("/etc/auto.master","cvmfs")
files.restore("/etc/cvmfs/default.local","cvmfs")
files.restore("/etc/cvmfs/domain.d/cern.ch.local","cvmfs")
|
Make cvmfs test cleanup restart autofs so fermicloud sl5 homedirs don't go away.
|
Make cvmfs test cleanup restart autofs so fermicloud sl5 homedirs don't go away.
git-svn-id: 884a03e47e2adb735d896e55bb5ad6bc3421ba19@17187 4e558342-562e-0410-864c-e07659590f8c
|
Python
|
apache-2.0
|
efajardo/osg-test,efajardo/osg-test
|
import os
import osgtest.library.core as core
import osgtest.library.files as files
import osgtest.library.osgunittest as osgunittest
import unittest
class TestStopCvmfs(osgunittest.OSGTestCase):
def test_01_stop_cvmfs(self):
core.skip_ok_unless_installed('cvmfs')
self.skip_ok_if(['cvmfs.started-server'] == False, 'did not start server')
if core.state['cvmfs.version'] < ('2', '1'):
command = ('service', 'cvmfs', 'stop')
else:
command = ('cvmfs_config', 'umount')
stdout, _, fail = core.check_system(command, 'Stop Cvmfs server')
self.assert_(stdout.find('FAILED') == -1, fail)
files.restore("/etc/fuse.conf","cvmfs")
files.restore("/etc/auto.master","cvmfs")
files.restore("/etc/cvmfs/default.local","cvmfs")
files.restore("/etc/cvmfs/domain.d/cern.ch.local","cvmfs")
Make cvmfs test cleanup restart autofs so fermicloud sl5 homedirs don't go away.
git-svn-id: 884a03e47e2adb735d896e55bb5ad6bc3421ba19@17187 4e558342-562e-0410-864c-e07659590f8c
|
import os
import osgtest.library.core as core
import osgtest.library.files as files
import osgtest.library.osgunittest as osgunittest
import unittest
class TestStopCvmfs(osgunittest.OSGTestCase):
def test_01_stop_cvmfs(self):
core.skip_ok_unless_installed('cvmfs')
self.skip_ok_if(['cvmfs.started-server'] == False, 'did not start server')
if core.state['cvmfs.version'] < ('2', '1'):
command = ('service', 'cvmfs', 'stop')
else:
command = ('cvmfs_config', 'umount')
stdout, _, fail = core.check_system(command, 'Stop Cvmfs server')
self.assert_(stdout.find('FAILED') == -1, fail)
# Restart autofs to bring network filesystems back (specifically
# homedirs on el5 fermicloud vms)
if core.state['cvmfs.version'] >= ('2', '1'):
stdout, _, fail = core.check_system(('service', 'autofs', 'restart'), 'Restart autofs')
self.assert_(stdout.find('FAILED') == -1, fail)
files.restore("/etc/fuse.conf","cvmfs")
files.restore("/etc/auto.master","cvmfs")
files.restore("/etc/cvmfs/default.local","cvmfs")
files.restore("/etc/cvmfs/domain.d/cern.ch.local","cvmfs")
|
<commit_before>import os
import osgtest.library.core as core
import osgtest.library.files as files
import osgtest.library.osgunittest as osgunittest
import unittest
class TestStopCvmfs(osgunittest.OSGTestCase):
def test_01_stop_cvmfs(self):
core.skip_ok_unless_installed('cvmfs')
self.skip_ok_if(['cvmfs.started-server'] == False, 'did not start server')
if core.state['cvmfs.version'] < ('2', '1'):
command = ('service', 'cvmfs', 'stop')
else:
command = ('cvmfs_config', 'umount')
stdout, _, fail = core.check_system(command, 'Stop Cvmfs server')
self.assert_(stdout.find('FAILED') == -1, fail)
files.restore("/etc/fuse.conf","cvmfs")
files.restore("/etc/auto.master","cvmfs")
files.restore("/etc/cvmfs/default.local","cvmfs")
files.restore("/etc/cvmfs/domain.d/cern.ch.local","cvmfs")
<commit_msg>Make cvmfs test cleanup restart autofs so fermicloud sl5 homedirs don't go away.
git-svn-id: 884a03e47e2adb735d896e55bb5ad6bc3421ba19@17187 4e558342-562e-0410-864c-e07659590f8c<commit_after>
|
import os
import osgtest.library.core as core
import osgtest.library.files as files
import osgtest.library.osgunittest as osgunittest
import unittest
class TestStopCvmfs(osgunittest.OSGTestCase):
def test_01_stop_cvmfs(self):
core.skip_ok_unless_installed('cvmfs')
self.skip_ok_if(['cvmfs.started-server'] == False, 'did not start server')
if core.state['cvmfs.version'] < ('2', '1'):
command = ('service', 'cvmfs', 'stop')
else:
command = ('cvmfs_config', 'umount')
stdout, _, fail = core.check_system(command, 'Stop Cvmfs server')
self.assert_(stdout.find('FAILED') == -1, fail)
# Restart autofs to bring network filesystems back (specifically
# homedirs on el5 fermicloud vms)
if core.state['cvmfs.version'] >= ('2', '1'):
stdout, _, fail = core.check_system(('service', 'autofs', 'restart'), 'Restart autofs')
self.assert_(stdout.find('FAILED') == -1, fail)
files.restore("/etc/fuse.conf","cvmfs")
files.restore("/etc/auto.master","cvmfs")
files.restore("/etc/cvmfs/default.local","cvmfs")
files.restore("/etc/cvmfs/domain.d/cern.ch.local","cvmfs")
|
import os
import osgtest.library.core as core
import osgtest.library.files as files
import osgtest.library.osgunittest as osgunittest
import unittest
class TestStopCvmfs(osgunittest.OSGTestCase):
def test_01_stop_cvmfs(self):
core.skip_ok_unless_installed('cvmfs')
self.skip_ok_if(['cvmfs.started-server'] == False, 'did not start server')
if core.state['cvmfs.version'] < ('2', '1'):
command = ('service', 'cvmfs', 'stop')
else:
command = ('cvmfs_config', 'umount')
stdout, _, fail = core.check_system(command, 'Stop Cvmfs server')
self.assert_(stdout.find('FAILED') == -1, fail)
files.restore("/etc/fuse.conf","cvmfs")
files.restore("/etc/auto.master","cvmfs")
files.restore("/etc/cvmfs/default.local","cvmfs")
files.restore("/etc/cvmfs/domain.d/cern.ch.local","cvmfs")
Make cvmfs test cleanup restart autofs so fermicloud sl5 homedirs don't go away.
git-svn-id: 884a03e47e2adb735d896e55bb5ad6bc3421ba19@17187 4e558342-562e-0410-864c-e07659590f8cimport os
import osgtest.library.core as core
import osgtest.library.files as files
import osgtest.library.osgunittest as osgunittest
import unittest
class TestStopCvmfs(osgunittest.OSGTestCase):
def test_01_stop_cvmfs(self):
core.skip_ok_unless_installed('cvmfs')
self.skip_ok_if(['cvmfs.started-server'] == False, 'did not start server')
if core.state['cvmfs.version'] < ('2', '1'):
command = ('service', 'cvmfs', 'stop')
else:
command = ('cvmfs_config', 'umount')
stdout, _, fail = core.check_system(command, 'Stop Cvmfs server')
self.assert_(stdout.find('FAILED') == -1, fail)
# Restart autofs to bring network filesystems back (specifically
# homedirs on el5 fermicloud vms)
if core.state['cvmfs.version'] >= ('2', '1'):
stdout, _, fail = core.check_system(('service', 'autofs', 'restart'), 'Restart autofs')
self.assert_(stdout.find('FAILED') == -1, fail)
files.restore("/etc/fuse.conf","cvmfs")
files.restore("/etc/auto.master","cvmfs")
files.restore("/etc/cvmfs/default.local","cvmfs")
files.restore("/etc/cvmfs/domain.d/cern.ch.local","cvmfs")
|
<commit_before>import os
import osgtest.library.core as core
import osgtest.library.files as files
import osgtest.library.osgunittest as osgunittest
import unittest
class TestStopCvmfs(osgunittest.OSGTestCase):
def test_01_stop_cvmfs(self):
core.skip_ok_unless_installed('cvmfs')
self.skip_ok_if(['cvmfs.started-server'] == False, 'did not start server')
if core.state['cvmfs.version'] < ('2', '1'):
command = ('service', 'cvmfs', 'stop')
else:
command = ('cvmfs_config', 'umount')
stdout, _, fail = core.check_system(command, 'Stop Cvmfs server')
self.assert_(stdout.find('FAILED') == -1, fail)
files.restore("/etc/fuse.conf","cvmfs")
files.restore("/etc/auto.master","cvmfs")
files.restore("/etc/cvmfs/default.local","cvmfs")
files.restore("/etc/cvmfs/domain.d/cern.ch.local","cvmfs")
<commit_msg>Make cvmfs test cleanup restart autofs so fermicloud sl5 homedirs don't go away.
git-svn-id: 884a03e47e2adb735d896e55bb5ad6bc3421ba19@17187 4e558342-562e-0410-864c-e07659590f8c<commit_after>import os
import osgtest.library.core as core
import osgtest.library.files as files
import osgtest.library.osgunittest as osgunittest
import unittest
class TestStopCvmfs(osgunittest.OSGTestCase):
def test_01_stop_cvmfs(self):
core.skip_ok_unless_installed('cvmfs')
self.skip_ok_if(['cvmfs.started-server'] == False, 'did not start server')
if core.state['cvmfs.version'] < ('2', '1'):
command = ('service', 'cvmfs', 'stop')
else:
command = ('cvmfs_config', 'umount')
stdout, _, fail = core.check_system(command, 'Stop Cvmfs server')
self.assert_(stdout.find('FAILED') == -1, fail)
# Restart autofs to bring network filesystems back (specifically
# homedirs on el5 fermicloud vms)
if core.state['cvmfs.version'] >= ('2', '1'):
stdout, _, fail = core.check_system(('service', 'autofs', 'restart'), 'Restart autofs')
self.assert_(stdout.find('FAILED') == -1, fail)
files.restore("/etc/fuse.conf","cvmfs")
files.restore("/etc/auto.master","cvmfs")
files.restore("/etc/cvmfs/default.local","cvmfs")
files.restore("/etc/cvmfs/domain.d/cern.ch.local","cvmfs")
|
a75a725af141762b25a77522b43d6e241643baa6
|
medical_insurance/models/medical_patient.py
|
medical_insurance/models/medical_patient.py
|
# -*- coding: utf-8 -*-
# © 2015-TODAY LasLabs Inc.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import fields, models
class MedicalPatient(models.Model):
_inherit = 'medical.patient'
insurance_plan_ids = fields.Many2many(
string='Insurance Plans',
comodel_name='medical.insurance.plan',
help='Past & Present Insurance Plans',
)
|
# -*- coding: utf-8 -*-
# © 2015-TODAY LasLabs Inc.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import fields, models
class MedicalPatient(models.Model):
_inherit = 'medical.patient'
insurance_plan_ids = fields.One2many(
string='Insurance Plans',
comodel_name='medical.insurance.plan',
inverse_name='patient_id',
help='Past & Present Insurance Plans',
)
|
Fix incorrect relation in medical_insurance
|
Fix incorrect relation in medical_insurance
|
Python
|
agpl-3.0
|
laslabs/vertical-medical,laslabs/vertical-medical
|
# -*- coding: utf-8 -*-
# © 2015-TODAY LasLabs Inc.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import fields, models
class MedicalPatient(models.Model):
_inherit = 'medical.patient'
insurance_plan_ids = fields.Many2many(
string='Insurance Plans',
comodel_name='medical.insurance.plan',
help='Past & Present Insurance Plans',
)
Fix incorrect relation in medical_insurance
|
# -*- coding: utf-8 -*-
# © 2015-TODAY LasLabs Inc.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import fields, models
class MedicalPatient(models.Model):
_inherit = 'medical.patient'
insurance_plan_ids = fields.One2many(
string='Insurance Plans',
comodel_name='medical.insurance.plan',
inverse_name='patient_id',
help='Past & Present Insurance Plans',
)
|
<commit_before># -*- coding: utf-8 -*-
# © 2015-TODAY LasLabs Inc.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import fields, models
class MedicalPatient(models.Model):
_inherit = 'medical.patient'
insurance_plan_ids = fields.Many2many(
string='Insurance Plans',
comodel_name='medical.insurance.plan',
help='Past & Present Insurance Plans',
)
<commit_msg>Fix incorrect relation in medical_insurance<commit_after>
|
# -*- coding: utf-8 -*-
# © 2015-TODAY LasLabs Inc.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import fields, models
class MedicalPatient(models.Model):
_inherit = 'medical.patient'
insurance_plan_ids = fields.One2many(
string='Insurance Plans',
comodel_name='medical.insurance.plan',
inverse_name='patient_id',
help='Past & Present Insurance Plans',
)
|
# -*- coding: utf-8 -*-
# © 2015-TODAY LasLabs Inc.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import fields, models
class MedicalPatient(models.Model):
_inherit = 'medical.patient'
insurance_plan_ids = fields.Many2many(
string='Insurance Plans',
comodel_name='medical.insurance.plan',
help='Past & Present Insurance Plans',
)
Fix incorrect relation in medical_insurance# -*- coding: utf-8 -*-
# © 2015-TODAY LasLabs Inc.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import fields, models
class MedicalPatient(models.Model):
_inherit = 'medical.patient'
insurance_plan_ids = fields.One2many(
string='Insurance Plans',
comodel_name='medical.insurance.plan',
inverse_name='patient_id',
help='Past & Present Insurance Plans',
)
|
<commit_before># -*- coding: utf-8 -*-
# © 2015-TODAY LasLabs Inc.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import fields, models
class MedicalPatient(models.Model):
_inherit = 'medical.patient'
insurance_plan_ids = fields.Many2many(
string='Insurance Plans',
comodel_name='medical.insurance.plan',
help='Past & Present Insurance Plans',
)
<commit_msg>Fix incorrect relation in medical_insurance<commit_after># -*- coding: utf-8 -*-
# © 2015-TODAY LasLabs Inc.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import fields, models
class MedicalPatient(models.Model):
_inherit = 'medical.patient'
insurance_plan_ids = fields.One2many(
string='Insurance Plans',
comodel_name='medical.insurance.plan',
inverse_name='patient_id',
help='Past & Present Insurance Plans',
)
|
178fe53c907b8857865236d9066d7f6f3cf3d9a7
|
scripts/authentication/editors.py
|
scripts/authentication/editors.py
|
#!/usr/bin/env python3
"""Manually add editors users"""
import redis
import json
import uuid
import datetime
if __name__ == "__main__":
conn = redis.Redis(decode_responses=True)
group_uid = ""
for guid in conn.hgetall("groups"):
group_details = json.loads(conn.hget("groups", guid))
if group_details.get("name") == "editors":
group_uid = guid
break
if not group_uid:
group_uid = str(uuid.uuid4())
timestamp = datetime.datetime.utcnow().strftime('%b %d %Y %I:%M%p')
conn.hset(
"groups",
group_uid,
json.dumps(
{
"name": "editors",
"admins": [],
"members": ["8ad942fe-490d-453e-bd37-56f252e41603"],
"changed_timestamp": timestamp,
}))
for resource in conn.hgetall("resources"):
_resource = json.loads(conn.hget("resources", resource))
_resource["default_mask"] = {
'data': 'view',
'metadata': 'view',
'admin': 'not-admin',
}
_resource["group_masks"] = {
group_uid: {
'metadata': 'edit',
'data': 'edit',
'admin': 'edit-admins',
}}
conn.hset("resources", resource, json.dumps(_resource))
print("Done adding editor's group to resources!")
|
#!/usr/bin/env python3
"""Manually add editors users"""
import redis
import json
import uuid
import datetime
if __name__ == "__main__":
conn = redis.Redis(decode_responses=True)
group_uid = ""
for guid in conn.hgetall("groups"):
group_details = json.loads(conn.hget("groups", guid))
if group_details.get("name") == "editors":
group_uid = guid
break
if not group_uid:
group_uid = str(uuid.uuid4())
timestamp = datetime.datetime.utcnow().strftime('%b %d %Y %I:%M%p')
conn.hset(
"groups",
group_uid,
json.dumps(
{
"name": "editors",
"admins": [],
"members": ["8ad942fe-490d-453e-bd37-56f252e41603"],
"created_timestamp": timestamp,
"changed_timestamp": timestamp,
}))
for resource in conn.hgetall("resources"):
_resource = json.loads(conn.hget("resources", resource))
_resource["default_mask"] = {
'data': 'view',
'metadata': 'view',
'admin': 'not-admin',
}
_resource["group_masks"] = {
group_uid: {
'metadata': 'edit',
'data': 'edit',
'admin': 'edit-admins',
}}
conn.hset("resources", resource, json.dumps(_resource))
print("Done adding editor's group to resources!")
|
Add created_timestamp when creating a new group
|
Add created_timestamp when creating a new group
|
Python
|
agpl-3.0
|
genenetwork/genenetwork2,pjotrp/genenetwork2,genenetwork/genenetwork2,pjotrp/genenetwork2,pjotrp/genenetwork2,pjotrp/genenetwork2,pjotrp/genenetwork2,genenetwork/genenetwork2,genenetwork/genenetwork2
|
#!/usr/bin/env python3
"""Manually add editors users"""
import redis
import json
import uuid
import datetime
if __name__ == "__main__":
conn = redis.Redis(decode_responses=True)
group_uid = ""
for guid in conn.hgetall("groups"):
group_details = json.loads(conn.hget("groups", guid))
if group_details.get("name") == "editors":
group_uid = guid
break
if not group_uid:
group_uid = str(uuid.uuid4())
timestamp = datetime.datetime.utcnow().strftime('%b %d %Y %I:%M%p')
conn.hset(
"groups",
group_uid,
json.dumps(
{
"name": "editors",
"admins": [],
"members": ["8ad942fe-490d-453e-bd37-56f252e41603"],
"changed_timestamp": timestamp,
}))
for resource in conn.hgetall("resources"):
_resource = json.loads(conn.hget("resources", resource))
_resource["default_mask"] = {
'data': 'view',
'metadata': 'view',
'admin': 'not-admin',
}
_resource["group_masks"] = {
group_uid: {
'metadata': 'edit',
'data': 'edit',
'admin': 'edit-admins',
}}
conn.hset("resources", resource, json.dumps(_resource))
print("Done adding editor's group to resources!")
Add created_timestamp when creating a new group
|
#!/usr/bin/env python3
"""Manually add editors users"""
import redis
import json
import uuid
import datetime
if __name__ == "__main__":
conn = redis.Redis(decode_responses=True)
group_uid = ""
for guid in conn.hgetall("groups"):
group_details = json.loads(conn.hget("groups", guid))
if group_details.get("name") == "editors":
group_uid = guid
break
if not group_uid:
group_uid = str(uuid.uuid4())
timestamp = datetime.datetime.utcnow().strftime('%b %d %Y %I:%M%p')
conn.hset(
"groups",
group_uid,
json.dumps(
{
"name": "editors",
"admins": [],
"members": ["8ad942fe-490d-453e-bd37-56f252e41603"],
"created_timestamp": timestamp,
"changed_timestamp": timestamp,
}))
for resource in conn.hgetall("resources"):
_resource = json.loads(conn.hget("resources", resource))
_resource["default_mask"] = {
'data': 'view',
'metadata': 'view',
'admin': 'not-admin',
}
_resource["group_masks"] = {
group_uid: {
'metadata': 'edit',
'data': 'edit',
'admin': 'edit-admins',
}}
conn.hset("resources", resource, json.dumps(_resource))
print("Done adding editor's group to resources!")
|
<commit_before>#!/usr/bin/env python3
"""Manually add editors users"""
import redis
import json
import uuid
import datetime
if __name__ == "__main__":
conn = redis.Redis(decode_responses=True)
group_uid = ""
for guid in conn.hgetall("groups"):
group_details = json.loads(conn.hget("groups", guid))
if group_details.get("name") == "editors":
group_uid = guid
break
if not group_uid:
group_uid = str(uuid.uuid4())
timestamp = datetime.datetime.utcnow().strftime('%b %d %Y %I:%M%p')
conn.hset(
"groups",
group_uid,
json.dumps(
{
"name": "editors",
"admins": [],
"members": ["8ad942fe-490d-453e-bd37-56f252e41603"],
"changed_timestamp": timestamp,
}))
for resource in conn.hgetall("resources"):
_resource = json.loads(conn.hget("resources", resource))
_resource["default_mask"] = {
'data': 'view',
'metadata': 'view',
'admin': 'not-admin',
}
_resource["group_masks"] = {
group_uid: {
'metadata': 'edit',
'data': 'edit',
'admin': 'edit-admins',
}}
conn.hset("resources", resource, json.dumps(_resource))
print("Done adding editor's group to resources!")
<commit_msg>Add created_timestamp when creating a new group<commit_after>
|
#!/usr/bin/env python3
"""Manually add editors users"""
import redis
import json
import uuid
import datetime
if __name__ == "__main__":
conn = redis.Redis(decode_responses=True)
group_uid = ""
for guid in conn.hgetall("groups"):
group_details = json.loads(conn.hget("groups", guid))
if group_details.get("name") == "editors":
group_uid = guid
break
if not group_uid:
group_uid = str(uuid.uuid4())
timestamp = datetime.datetime.utcnow().strftime('%b %d %Y %I:%M%p')
conn.hset(
"groups",
group_uid,
json.dumps(
{
"name": "editors",
"admins": [],
"members": ["8ad942fe-490d-453e-bd37-56f252e41603"],
"created_timestamp": timestamp,
"changed_timestamp": timestamp,
}))
for resource in conn.hgetall("resources"):
_resource = json.loads(conn.hget("resources", resource))
_resource["default_mask"] = {
'data': 'view',
'metadata': 'view',
'admin': 'not-admin',
}
_resource["group_masks"] = {
group_uid: {
'metadata': 'edit',
'data': 'edit',
'admin': 'edit-admins',
}}
conn.hset("resources", resource, json.dumps(_resource))
print("Done adding editor's group to resources!")
|
#!/usr/bin/env python3
"""Manually add editors users"""
import redis
import json
import uuid
import datetime
if __name__ == "__main__":
conn = redis.Redis(decode_responses=True)
group_uid = ""
for guid in conn.hgetall("groups"):
group_details = json.loads(conn.hget("groups", guid))
if group_details.get("name") == "editors":
group_uid = guid
break
if not group_uid:
group_uid = str(uuid.uuid4())
timestamp = datetime.datetime.utcnow().strftime('%b %d %Y %I:%M%p')
conn.hset(
"groups",
group_uid,
json.dumps(
{
"name": "editors",
"admins": [],
"members": ["8ad942fe-490d-453e-bd37-56f252e41603"],
"changed_timestamp": timestamp,
}))
for resource in conn.hgetall("resources"):
_resource = json.loads(conn.hget("resources", resource))
_resource["default_mask"] = {
'data': 'view',
'metadata': 'view',
'admin': 'not-admin',
}
_resource["group_masks"] = {
group_uid: {
'metadata': 'edit',
'data': 'edit',
'admin': 'edit-admins',
}}
conn.hset("resources", resource, json.dumps(_resource))
print("Done adding editor's group to resources!")
Add created_timestamp when creating a new group#!/usr/bin/env python3
"""Manually add editors users"""
import redis
import json
import uuid
import datetime
if __name__ == "__main__":
conn = redis.Redis(decode_responses=True)
group_uid = ""
for guid in conn.hgetall("groups"):
group_details = json.loads(conn.hget("groups", guid))
if group_details.get("name") == "editors":
group_uid = guid
break
if not group_uid:
group_uid = str(uuid.uuid4())
timestamp = datetime.datetime.utcnow().strftime('%b %d %Y %I:%M%p')
conn.hset(
"groups",
group_uid,
json.dumps(
{
"name": "editors",
"admins": [],
"members": ["8ad942fe-490d-453e-bd37-56f252e41603"],
"created_timestamp": timestamp,
"changed_timestamp": timestamp,
}))
for resource in conn.hgetall("resources"):
_resource = json.loads(conn.hget("resources", resource))
_resource["default_mask"] = {
'data': 'view',
'metadata': 'view',
'admin': 'not-admin',
}
_resource["group_masks"] = {
group_uid: {
'metadata': 'edit',
'data': 'edit',
'admin': 'edit-admins',
}}
conn.hset("resources", resource, json.dumps(_resource))
print("Done adding editor's group to resources!")
|
<commit_before>#!/usr/bin/env python3
"""Manually add editors users"""
import redis
import json
import uuid
import datetime
if __name__ == "__main__":
conn = redis.Redis(decode_responses=True)
group_uid = ""
for guid in conn.hgetall("groups"):
group_details = json.loads(conn.hget("groups", guid))
if group_details.get("name") == "editors":
group_uid = guid
break
if not group_uid:
group_uid = str(uuid.uuid4())
timestamp = datetime.datetime.utcnow().strftime('%b %d %Y %I:%M%p')
conn.hset(
"groups",
group_uid,
json.dumps(
{
"name": "editors",
"admins": [],
"members": ["8ad942fe-490d-453e-bd37-56f252e41603"],
"changed_timestamp": timestamp,
}))
for resource in conn.hgetall("resources"):
_resource = json.loads(conn.hget("resources", resource))
_resource["default_mask"] = {
'data': 'view',
'metadata': 'view',
'admin': 'not-admin',
}
_resource["group_masks"] = {
group_uid: {
'metadata': 'edit',
'data': 'edit',
'admin': 'edit-admins',
}}
conn.hset("resources", resource, json.dumps(_resource))
print("Done adding editor's group to resources!")
<commit_msg>Add created_timestamp when creating a new group<commit_after>#!/usr/bin/env python3
"""Manually add editors users"""
import redis
import json
import uuid
import datetime
if __name__ == "__main__":
conn = redis.Redis(decode_responses=True)
group_uid = ""
for guid in conn.hgetall("groups"):
group_details = json.loads(conn.hget("groups", guid))
if group_details.get("name") == "editors":
group_uid = guid
break
if not group_uid:
group_uid = str(uuid.uuid4())
timestamp = datetime.datetime.utcnow().strftime('%b %d %Y %I:%M%p')
conn.hset(
"groups",
group_uid,
json.dumps(
{
"name": "editors",
"admins": [],
"members": ["8ad942fe-490d-453e-bd37-56f252e41603"],
"created_timestamp": timestamp,
"changed_timestamp": timestamp,
}))
for resource in conn.hgetall("resources"):
_resource = json.loads(conn.hget("resources", resource))
_resource["default_mask"] = {
'data': 'view',
'metadata': 'view',
'admin': 'not-admin',
}
_resource["group_masks"] = {
group_uid: {
'metadata': 'edit',
'data': 'edit',
'admin': 'edit-admins',
}}
conn.hset("resources", resource, json.dumps(_resource))
print("Done adding editor's group to resources!")
|
92b9b33a13b093d1a9bf6ac22b000405a3403234
|
chandra_aca/tests/test_residuals.py
|
chandra_aca/tests/test_residuals.py
|
import numpy as np
from ..centroid_resid import get_obs_slot_residuals
def test_multi_ai():
# obsid 15175 has two aspect intervals
dyags, dzags = get_obs_slot_residuals(15175, 4)
assert np.all(np.abs(dyags) < 3)
assert np.all(np.abs(dzags) < 6)
def test_obc_centroids():
dyags, dzags = get_obs_slot_residuals(15175, 5, centroid_source='obc')
assert np.all(np.abs(dyags) < 6)
assert np.all(np.abs(dzags) < 3)
def test_obc():
dyags, dzags = get_obs_slot_residuals(15175, 6, att_source='obc', centroid_source='obc')
assert np.all(np.abs(dyags) < 4.5)
assert np.all(np.abs(dzags) < 5.5)
|
import numpy as np
from ..centroid_resid import get_obs_slot_residuals
def test_multi_ai():
# obsid 15175 has two aspect intervals
dyags, yt, dzags, zt = get_obs_slot_residuals(15175, 4)
assert np.all(np.abs(dyags) < 3)
assert np.all(np.abs(dzags) < 6)
def test_obc_centroids():
dyags, yt, dzags, zt = get_obs_slot_residuals(15175, 5, centroid_source='obc')
assert np.all(np.abs(dyags) < 6)
assert np.all(np.abs(dzags) < 3)
def test_obc():
dyags, yt, dzags, zt = get_obs_slot_residuals(15175, 6, att_source='obc', centroid_source='obc')
assert np.all(np.abs(dyags) < 4.5)
assert np.all(np.abs(dzags) < 5.5)
|
Update tests using helper method for new API
|
Update tests using helper method for new API
|
Python
|
bsd-2-clause
|
sot/chandra_aca,sot/chandra_aca
|
import numpy as np
from ..centroid_resid import get_obs_slot_residuals
def test_multi_ai():
# obsid 15175 has two aspect intervals
dyags, dzags = get_obs_slot_residuals(15175, 4)
assert np.all(np.abs(dyags) < 3)
assert np.all(np.abs(dzags) < 6)
def test_obc_centroids():
dyags, dzags = get_obs_slot_residuals(15175, 5, centroid_source='obc')
assert np.all(np.abs(dyags) < 6)
assert np.all(np.abs(dzags) < 3)
def test_obc():
dyags, dzags = get_obs_slot_residuals(15175, 6, att_source='obc', centroid_source='obc')
assert np.all(np.abs(dyags) < 4.5)
assert np.all(np.abs(dzags) < 5.5)
Update tests using helper method for new API
|
import numpy as np
from ..centroid_resid import get_obs_slot_residuals
def test_multi_ai():
# obsid 15175 has two aspect intervals
dyags, yt, dzags, zt = get_obs_slot_residuals(15175, 4)
assert np.all(np.abs(dyags) < 3)
assert np.all(np.abs(dzags) < 6)
def test_obc_centroids():
dyags, yt, dzags, zt = get_obs_slot_residuals(15175, 5, centroid_source='obc')
assert np.all(np.abs(dyags) < 6)
assert np.all(np.abs(dzags) < 3)
def test_obc():
dyags, yt, dzags, zt = get_obs_slot_residuals(15175, 6, att_source='obc', centroid_source='obc')
assert np.all(np.abs(dyags) < 4.5)
assert np.all(np.abs(dzags) < 5.5)
|
<commit_before>import numpy as np
from ..centroid_resid import get_obs_slot_residuals
def test_multi_ai():
# obsid 15175 has two aspect intervals
dyags, dzags = get_obs_slot_residuals(15175, 4)
assert np.all(np.abs(dyags) < 3)
assert np.all(np.abs(dzags) < 6)
def test_obc_centroids():
dyags, dzags = get_obs_slot_residuals(15175, 5, centroid_source='obc')
assert np.all(np.abs(dyags) < 6)
assert np.all(np.abs(dzags) < 3)
def test_obc():
dyags, dzags = get_obs_slot_residuals(15175, 6, att_source='obc', centroid_source='obc')
assert np.all(np.abs(dyags) < 4.5)
assert np.all(np.abs(dzags) < 5.5)
<commit_msg>Update tests using helper method for new API<commit_after>
|
import numpy as np
from ..centroid_resid import get_obs_slot_residuals
def test_multi_ai():
# obsid 15175 has two aspect intervals
dyags, yt, dzags, zt = get_obs_slot_residuals(15175, 4)
assert np.all(np.abs(dyags) < 3)
assert np.all(np.abs(dzags) < 6)
def test_obc_centroids():
dyags, yt, dzags, zt = get_obs_slot_residuals(15175, 5, centroid_source='obc')
assert np.all(np.abs(dyags) < 6)
assert np.all(np.abs(dzags) < 3)
def test_obc():
dyags, yt, dzags, zt = get_obs_slot_residuals(15175, 6, att_source='obc', centroid_source='obc')
assert np.all(np.abs(dyags) < 4.5)
assert np.all(np.abs(dzags) < 5.5)
|
import numpy as np
from ..centroid_resid import get_obs_slot_residuals
def test_multi_ai():
# obsid 15175 has two aspect intervals
dyags, dzags = get_obs_slot_residuals(15175, 4)
assert np.all(np.abs(dyags) < 3)
assert np.all(np.abs(dzags) < 6)
def test_obc_centroids():
dyags, dzags = get_obs_slot_residuals(15175, 5, centroid_source='obc')
assert np.all(np.abs(dyags) < 6)
assert np.all(np.abs(dzags) < 3)
def test_obc():
dyags, dzags = get_obs_slot_residuals(15175, 6, att_source='obc', centroid_source='obc')
assert np.all(np.abs(dyags) < 4.5)
assert np.all(np.abs(dzags) < 5.5)
Update tests using helper method for new APIimport numpy as np
from ..centroid_resid import get_obs_slot_residuals
def test_multi_ai():
# obsid 15175 has two aspect intervals
dyags, yt, dzags, zt = get_obs_slot_residuals(15175, 4)
assert np.all(np.abs(dyags) < 3)
assert np.all(np.abs(dzags) < 6)
def test_obc_centroids():
dyags, yt, dzags, zt = get_obs_slot_residuals(15175, 5, centroid_source='obc')
assert np.all(np.abs(dyags) < 6)
assert np.all(np.abs(dzags) < 3)
def test_obc():
dyags, yt, dzags, zt = get_obs_slot_residuals(15175, 6, att_source='obc', centroid_source='obc')
assert np.all(np.abs(dyags) < 4.5)
assert np.all(np.abs(dzags) < 5.5)
|
<commit_before>import numpy as np
from ..centroid_resid import get_obs_slot_residuals
def test_multi_ai():
# obsid 15175 has two aspect intervals
dyags, dzags = get_obs_slot_residuals(15175, 4)
assert np.all(np.abs(dyags) < 3)
assert np.all(np.abs(dzags) < 6)
def test_obc_centroids():
dyags, dzags = get_obs_slot_residuals(15175, 5, centroid_source='obc')
assert np.all(np.abs(dyags) < 6)
assert np.all(np.abs(dzags) < 3)
def test_obc():
dyags, dzags = get_obs_slot_residuals(15175, 6, att_source='obc', centroid_source='obc')
assert np.all(np.abs(dyags) < 4.5)
assert np.all(np.abs(dzags) < 5.5)
<commit_msg>Update tests using helper method for new API<commit_after>import numpy as np
from ..centroid_resid import get_obs_slot_residuals
def test_multi_ai():
# obsid 15175 has two aspect intervals
dyags, yt, dzags, zt = get_obs_slot_residuals(15175, 4)
assert np.all(np.abs(dyags) < 3)
assert np.all(np.abs(dzags) < 6)
def test_obc_centroids():
dyags, yt, dzags, zt = get_obs_slot_residuals(15175, 5, centroid_source='obc')
assert np.all(np.abs(dyags) < 6)
assert np.all(np.abs(dzags) < 3)
def test_obc():
dyags, yt, dzags, zt = get_obs_slot_residuals(15175, 6, att_source='obc', centroid_source='obc')
assert np.all(np.abs(dyags) < 4.5)
assert np.all(np.abs(dzags) < 5.5)
|
7f688164ea3d17038758f36c7d640409c3fedbcf
|
python-pscheduler/pscheduler/tests/psas_test.py
|
python-pscheduler/pscheduler/tests/psas_test.py
|
#!/usr/bin/env python3
"""
test for the Psas module.
"""
import unittest
from base_test import PschedTestBase
from pscheduler.psas import as_bulk_resolve
class TestPsas(PschedTestBase):
"""
Psas tests.
"""
def test_bulk_resolve(self):
"""Bulk resolve test"""
ips = [
'8.8.8.8',
'2607:f8b0:4002:c06::67',
'198.6.1.1',
'this-is-not-valid',
]
ret = as_bulk_resolve(ips)
# Do these only if it looks like anything worked at all.
# Otherwise, we probably don't have a network connection.
if [key for key in ret if ret[key] is not None]:
assert(ret.get('this-is-not-valid') is None)
self.assertEqual(
ret.get('8.8.8.8')[0],
15169, 'GOOGLE, US')
self.assertEqual(
ret.get('2607:f8b0:4002:c06::67')[0],
15169)
self.assertEqual(
ret.get('198.6.1.1')[0],
701)
if __name__ == '__main__':
unittest.main()
|
#!/usr/bin/env python3
"""
test for the Psas module.
"""
import unittest
from base_test import PschedTestBase
from pscheduler.psas import as_bulk_resolve
class TestPsas(PschedTestBase):
"""
Psas tests.
"""
def test_bulk_resolve(self):
"""Bulk resolve test"""
ips = [
'8.8.8.8',
'2607:f8b0:4002:c06::67',
'198.6.1.1',
'this-is-not-valid',
]
ret = as_bulk_resolve(ips)
# Do these only if it looks like anything worked at all.
# Otherwise, we probably don't have a network connection.
assert(ret.get('this-is-not-valid') is None)
# TODO: These aren't going to be stable forever.
if False:
if [key for key in ret if ret[key] is not None]:
self.assertEqual(
ret.get('8.8.8.8')[0],
15169, 'GOOGLE, US')
self.assertEqual(
ret.get('2607:f8b0:4002:c06::67')[0],
15169)
self.assertEqual(
ret.get('198.6.1.1')[0],
701)
if __name__ == '__main__':
unittest.main()
|
Disable tests that aren't network-stable.
|
Disable tests that aren't network-stable.
|
Python
|
apache-2.0
|
perfsonar/pscheduler,perfsonar/pscheduler,perfsonar/pscheduler,perfsonar/pscheduler
|
#!/usr/bin/env python3
"""
test for the Psas module.
"""
import unittest
from base_test import PschedTestBase
from pscheduler.psas import as_bulk_resolve
class TestPsas(PschedTestBase):
"""
Psas tests.
"""
def test_bulk_resolve(self):
"""Bulk resolve test"""
ips = [
'8.8.8.8',
'2607:f8b0:4002:c06::67',
'198.6.1.1',
'this-is-not-valid',
]
ret = as_bulk_resolve(ips)
# Do these only if it looks like anything worked at all.
# Otherwise, we probably don't have a network connection.
if [key for key in ret if ret[key] is not None]:
assert(ret.get('this-is-not-valid') is None)
self.assertEqual(
ret.get('8.8.8.8')[0],
15169, 'GOOGLE, US')
self.assertEqual(
ret.get('2607:f8b0:4002:c06::67')[0],
15169)
self.assertEqual(
ret.get('198.6.1.1')[0],
701)
if __name__ == '__main__':
unittest.main()
Disable tests that aren't network-stable.
|
#!/usr/bin/env python3
"""
test for the Psas module.
"""
import unittest
from base_test import PschedTestBase
from pscheduler.psas import as_bulk_resolve
class TestPsas(PschedTestBase):
"""
Psas tests.
"""
def test_bulk_resolve(self):
"""Bulk resolve test"""
ips = [
'8.8.8.8',
'2607:f8b0:4002:c06::67',
'198.6.1.1',
'this-is-not-valid',
]
ret = as_bulk_resolve(ips)
# Do these only if it looks like anything worked at all.
# Otherwise, we probably don't have a network connection.
assert(ret.get('this-is-not-valid') is None)
# TODO: These aren't going to be stable forever.
if False:
if [key for key in ret if ret[key] is not None]:
self.assertEqual(
ret.get('8.8.8.8')[0],
15169, 'GOOGLE, US')
self.assertEqual(
ret.get('2607:f8b0:4002:c06::67')[0],
15169)
self.assertEqual(
ret.get('198.6.1.1')[0],
701)
if __name__ == '__main__':
unittest.main()
|
<commit_before>#!/usr/bin/env python3
"""
test for the Psas module.
"""
import unittest
from base_test import PschedTestBase
from pscheduler.psas import as_bulk_resolve
class TestPsas(PschedTestBase):
"""
Psas tests.
"""
def test_bulk_resolve(self):
"""Bulk resolve test"""
ips = [
'8.8.8.8',
'2607:f8b0:4002:c06::67',
'198.6.1.1',
'this-is-not-valid',
]
ret = as_bulk_resolve(ips)
# Do these only if it looks like anything worked at all.
# Otherwise, we probably don't have a network connection.
if [key for key in ret if ret[key] is not None]:
assert(ret.get('this-is-not-valid') is None)
self.assertEqual(
ret.get('8.8.8.8')[0],
15169, 'GOOGLE, US')
self.assertEqual(
ret.get('2607:f8b0:4002:c06::67')[0],
15169)
self.assertEqual(
ret.get('198.6.1.1')[0],
701)
if __name__ == '__main__':
unittest.main()
<commit_msg>Disable tests that aren't network-stable.<commit_after>
|
#!/usr/bin/env python3
"""
test for the Psas module.
"""
import unittest
from base_test import PschedTestBase
from pscheduler.psas import as_bulk_resolve
class TestPsas(PschedTestBase):
"""
Psas tests.
"""
def test_bulk_resolve(self):
"""Bulk resolve test"""
ips = [
'8.8.8.8',
'2607:f8b0:4002:c06::67',
'198.6.1.1',
'this-is-not-valid',
]
ret = as_bulk_resolve(ips)
# Do these only if it looks like anything worked at all.
# Otherwise, we probably don't have a network connection.
assert(ret.get('this-is-not-valid') is None)
# TODO: These aren't going to be stable forever.
if False:
if [key for key in ret if ret[key] is not None]:
self.assertEqual(
ret.get('8.8.8.8')[0],
15169, 'GOOGLE, US')
self.assertEqual(
ret.get('2607:f8b0:4002:c06::67')[0],
15169)
self.assertEqual(
ret.get('198.6.1.1')[0],
701)
if __name__ == '__main__':
unittest.main()
|
#!/usr/bin/env python3
"""
test for the Psas module.
"""
import unittest
from base_test import PschedTestBase
from pscheduler.psas import as_bulk_resolve
class TestPsas(PschedTestBase):
"""
Psas tests.
"""
def test_bulk_resolve(self):
"""Bulk resolve test"""
ips = [
'8.8.8.8',
'2607:f8b0:4002:c06::67',
'198.6.1.1',
'this-is-not-valid',
]
ret = as_bulk_resolve(ips)
# Do these only if it looks like anything worked at all.
# Otherwise, we probably don't have a network connection.
if [key for key in ret if ret[key] is not None]:
assert(ret.get('this-is-not-valid') is None)
self.assertEqual(
ret.get('8.8.8.8')[0],
15169, 'GOOGLE, US')
self.assertEqual(
ret.get('2607:f8b0:4002:c06::67')[0],
15169)
self.assertEqual(
ret.get('198.6.1.1')[0],
701)
if __name__ == '__main__':
unittest.main()
Disable tests that aren't network-stable.#!/usr/bin/env python3
"""
test for the Psas module.
"""
import unittest
from base_test import PschedTestBase
from pscheduler.psas import as_bulk_resolve
class TestPsas(PschedTestBase):
"""
Psas tests.
"""
def test_bulk_resolve(self):
"""Bulk resolve test"""
ips = [
'8.8.8.8',
'2607:f8b0:4002:c06::67',
'198.6.1.1',
'this-is-not-valid',
]
ret = as_bulk_resolve(ips)
# Do these only if it looks like anything worked at all.
# Otherwise, we probably don't have a network connection.
assert(ret.get('this-is-not-valid') is None)
# TODO: These aren't going to be stable forever.
if False:
if [key for key in ret if ret[key] is not None]:
self.assertEqual(
ret.get('8.8.8.8')[0],
15169, 'GOOGLE, US')
self.assertEqual(
ret.get('2607:f8b0:4002:c06::67')[0],
15169)
self.assertEqual(
ret.get('198.6.1.1')[0],
701)
if __name__ == '__main__':
unittest.main()
|
<commit_before>#!/usr/bin/env python3
"""
test for the Psas module.
"""
import unittest
from base_test import PschedTestBase
from pscheduler.psas import as_bulk_resolve
class TestPsas(PschedTestBase):
"""
Psas tests.
"""
def test_bulk_resolve(self):
"""Bulk resolve test"""
ips = [
'8.8.8.8',
'2607:f8b0:4002:c06::67',
'198.6.1.1',
'this-is-not-valid',
]
ret = as_bulk_resolve(ips)
# Do these only if it looks like anything worked at all.
# Otherwise, we probably don't have a network connection.
if [key for key in ret if ret[key] is not None]:
assert(ret.get('this-is-not-valid') is None)
self.assertEqual(
ret.get('8.8.8.8')[0],
15169, 'GOOGLE, US')
self.assertEqual(
ret.get('2607:f8b0:4002:c06::67')[0],
15169)
self.assertEqual(
ret.get('198.6.1.1')[0],
701)
if __name__ == '__main__':
unittest.main()
<commit_msg>Disable tests that aren't network-stable.<commit_after>#!/usr/bin/env python3
"""
test for the Psas module.
"""
import unittest
from base_test import PschedTestBase
from pscheduler.psas import as_bulk_resolve
class TestPsas(PschedTestBase):
"""
Psas tests.
"""
def test_bulk_resolve(self):
"""Bulk resolve test"""
ips = [
'8.8.8.8',
'2607:f8b0:4002:c06::67',
'198.6.1.1',
'this-is-not-valid',
]
ret = as_bulk_resolve(ips)
# Do these only if it looks like anything worked at all.
# Otherwise, we probably don't have a network connection.
assert(ret.get('this-is-not-valid') is None)
# TODO: These aren't going to be stable forever.
if False:
if [key for key in ret if ret[key] is not None]:
self.assertEqual(
ret.get('8.8.8.8')[0],
15169, 'GOOGLE, US')
self.assertEqual(
ret.get('2607:f8b0:4002:c06::67')[0],
15169)
self.assertEqual(
ret.get('198.6.1.1')[0],
701)
if __name__ == '__main__':
unittest.main()
|
93d9de9e004896407214c5b67e64cb050bfaa63c
|
curious/utils.py
|
curious/utils.py
|
import time
# for development/debugging
def report_time(f):
def wrap(*args, **kwargs):
t = time.time()
r = f(*args, **kwargs)
print '%s.%s: %.4f' % (f.__module__, f.func_name, time.time()-t)
return r
return wrap
|
from functools import wraps
import time
# for development/debugging
def report_time(f):
@wraps(f)
def wrap(*args, **kwargs):
t = time.time()
r = f(*args, **kwargs)
print '%s.%s: %.4f' % (f.__module__, f.func_name, time.time()-t)
return r
return wrap
|
Add functools wraps to report_time decorator
|
Add functools wraps to report_time decorator
|
Python
|
mit
|
benjiec/curious,benjiec/curious,benjiec/curious
|
import time
# for development/debugging
def report_time(f):
def wrap(*args, **kwargs):
t = time.time()
r = f(*args, **kwargs)
print '%s.%s: %.4f' % (f.__module__, f.func_name, time.time()-t)
return r
return wrap
Add functools wraps to report_time decorator
|
from functools import wraps
import time
# for development/debugging
def report_time(f):
@wraps(f)
def wrap(*args, **kwargs):
t = time.time()
r = f(*args, **kwargs)
print '%s.%s: %.4f' % (f.__module__, f.func_name, time.time()-t)
return r
return wrap
|
<commit_before>import time
# for development/debugging
def report_time(f):
def wrap(*args, **kwargs):
t = time.time()
r = f(*args, **kwargs)
print '%s.%s: %.4f' % (f.__module__, f.func_name, time.time()-t)
return r
return wrap
<commit_msg>Add functools wraps to report_time decorator<commit_after>
|
from functools import wraps
import time
# for development/debugging
def report_time(f):
@wraps(f)
def wrap(*args, **kwargs):
t = time.time()
r = f(*args, **kwargs)
print '%s.%s: %.4f' % (f.__module__, f.func_name, time.time()-t)
return r
return wrap
|
import time
# for development/debugging
def report_time(f):
def wrap(*args, **kwargs):
t = time.time()
r = f(*args, **kwargs)
print '%s.%s: %.4f' % (f.__module__, f.func_name, time.time()-t)
return r
return wrap
Add functools wraps to report_time decoratorfrom functools import wraps
import time
# for development/debugging
def report_time(f):
@wraps(f)
def wrap(*args, **kwargs):
t = time.time()
r = f(*args, **kwargs)
print '%s.%s: %.4f' % (f.__module__, f.func_name, time.time()-t)
return r
return wrap
|
<commit_before>import time
# for development/debugging
def report_time(f):
def wrap(*args, **kwargs):
t = time.time()
r = f(*args, **kwargs)
print '%s.%s: %.4f' % (f.__module__, f.func_name, time.time()-t)
return r
return wrap
<commit_msg>Add functools wraps to report_time decorator<commit_after>from functools import wraps
import time
# for development/debugging
def report_time(f):
@wraps(f)
def wrap(*args, **kwargs):
t = time.time()
r = f(*args, **kwargs)
print '%s.%s: %.4f' % (f.__module__, f.func_name, time.time()-t)
return r
return wrap
|
562ec360635dc07a5814293e08f80497677f87bd
|
dashboard/tasks.py
|
dashboard/tasks.py
|
import json
import requests
from bitcoinmonitor.celeryconfig import app
from channels import Group
app.conf.beat_schedule = {
'add-every-30-seconds': {
'task': 'dashboard.tasks.get_bitcoin_price',
'schedule': 6.0,
'args': ("dale",)
},
}
@app.task
def get_bitcoin_price(arg):
last_price = requests.get("https://bittrex.com/api/v1.1/public/getticker?market=USDT-BTC").json().get("result").get("Last")
Group('btc-price').send({'text': json.dumps({
'last_price': last_price
})})
|
import json
import requests
from coinpricemonitor.celeryconfig import app
from channels import Group
app.conf.beat_schedule = {
'add-every-30-seconds': {
'task': 'dashboard.tasks.get_bitcoin_price',
'schedule': 6.0,
'args': ("dale",)
},
}
@app.task
def get_bitcoin_price(arg):
last_price = requests.get("https://bittrex.com/api/v1.1/public/getticker?market=USDT-BTC").json().get("result").get("Last")
Group('btc-price').send({'text': json.dumps({
'last_price': last_price
})})
|
Change the import for app
|
Change the import for app
|
Python
|
mit
|
alessandroHenrique/coinpricemonitor,alessandroHenrique/coinpricemonitor,alessandroHenrique/coinpricemonitor
|
import json
import requests
from bitcoinmonitor.celeryconfig import app
from channels import Group
app.conf.beat_schedule = {
'add-every-30-seconds': {
'task': 'dashboard.tasks.get_bitcoin_price',
'schedule': 6.0,
'args': ("dale",)
},
}
@app.task
def get_bitcoin_price(arg):
last_price = requests.get("https://bittrex.com/api/v1.1/public/getticker?market=USDT-BTC").json().get("result").get("Last")
Group('btc-price').send({'text': json.dumps({
'last_price': last_price
})})
Change the import for app
|
import json
import requests
from coinpricemonitor.celeryconfig import app
from channels import Group
app.conf.beat_schedule = {
'add-every-30-seconds': {
'task': 'dashboard.tasks.get_bitcoin_price',
'schedule': 6.0,
'args': ("dale",)
},
}
@app.task
def get_bitcoin_price(arg):
last_price = requests.get("https://bittrex.com/api/v1.1/public/getticker?market=USDT-BTC").json().get("result").get("Last")
Group('btc-price').send({'text': json.dumps({
'last_price': last_price
})})
|
<commit_before>import json
import requests
from bitcoinmonitor.celeryconfig import app
from channels import Group
app.conf.beat_schedule = {
'add-every-30-seconds': {
'task': 'dashboard.tasks.get_bitcoin_price',
'schedule': 6.0,
'args': ("dale",)
},
}
@app.task
def get_bitcoin_price(arg):
last_price = requests.get("https://bittrex.com/api/v1.1/public/getticker?market=USDT-BTC").json().get("result").get("Last")
Group('btc-price').send({'text': json.dumps({
'last_price': last_price
})})
<commit_msg>Change the import for app<commit_after>
|
import json
import requests
from coinpricemonitor.celeryconfig import app
from channels import Group
app.conf.beat_schedule = {
'add-every-30-seconds': {
'task': 'dashboard.tasks.get_bitcoin_price',
'schedule': 6.0,
'args': ("dale",)
},
}
@app.task
def get_bitcoin_price(arg):
last_price = requests.get("https://bittrex.com/api/v1.1/public/getticker?market=USDT-BTC").json().get("result").get("Last")
Group('btc-price').send({'text': json.dumps({
'last_price': last_price
})})
|
import json
import requests
from bitcoinmonitor.celeryconfig import app
from channels import Group
app.conf.beat_schedule = {
'add-every-30-seconds': {
'task': 'dashboard.tasks.get_bitcoin_price',
'schedule': 6.0,
'args': ("dale",)
},
}
@app.task
def get_bitcoin_price(arg):
last_price = requests.get("https://bittrex.com/api/v1.1/public/getticker?market=USDT-BTC").json().get("result").get("Last")
Group('btc-price').send({'text': json.dumps({
'last_price': last_price
})})
Change the import for appimport json
import requests
from coinpricemonitor.celeryconfig import app
from channels import Group
app.conf.beat_schedule = {
'add-every-30-seconds': {
'task': 'dashboard.tasks.get_bitcoin_price',
'schedule': 6.0,
'args': ("dale",)
},
}
@app.task
def get_bitcoin_price(arg):
last_price = requests.get("https://bittrex.com/api/v1.1/public/getticker?market=USDT-BTC").json().get("result").get("Last")
Group('btc-price').send({'text': json.dumps({
'last_price': last_price
})})
|
<commit_before>import json
import requests
from bitcoinmonitor.celeryconfig import app
from channels import Group
app.conf.beat_schedule = {
'add-every-30-seconds': {
'task': 'dashboard.tasks.get_bitcoin_price',
'schedule': 6.0,
'args': ("dale",)
},
}
@app.task
def get_bitcoin_price(arg):
last_price = requests.get("https://bittrex.com/api/v1.1/public/getticker?market=USDT-BTC").json().get("result").get("Last")
Group('btc-price').send({'text': json.dumps({
'last_price': last_price
})})
<commit_msg>Change the import for app<commit_after>import json
import requests
from coinpricemonitor.celeryconfig import app
from channels import Group
app.conf.beat_schedule = {
'add-every-30-seconds': {
'task': 'dashboard.tasks.get_bitcoin_price',
'schedule': 6.0,
'args': ("dale",)
},
}
@app.task
def get_bitcoin_price(arg):
last_price = requests.get("https://bittrex.com/api/v1.1/public/getticker?market=USDT-BTC").json().get("result").get("Last")
Group('btc-price').send({'text': json.dumps({
'last_price': last_price
})})
|
ac4ba28e3b3c221f378edb2d0abbc8cc77a8eae4
|
src/libmv/multiview/panography_coeffs.py
|
src/libmv/multiview/panography_coeffs.py
|
import sympy
f2, a12, a1, a2, b12, b1, b2 = sympy.symbols('f2 a12 a1 a2 b12 b1 b2')
# Equation 12 from the brown paper; see panography.h
equation_12 = ((a12 + f2)**2 * (b1 + f2) * (b2 + f2) -
(b12 + f2)**2 * (a1 + f2) * (a2 + f2))
d = equation_12.as_poly(f2).as_dict()
print ' // Coefficients in ascending powers of alpha, i.e. P[N]*x^N.'
print ' double P[4] = {'
for i in range(4):
print ' ', sympy.printing.ccode(d[(i,)])
print ' }'
|
# Minimal Solutions for Panoramic Stitching. M. Brown, R. Hartley and D. Nister.
# International Conference on Computer Vision and Pattern Recognition
# (CVPR2007). Minneapolis, June 2007.
import sympy
f2, a12, a1, a2, b12, b1, b2 = sympy.symbols('f2 a12 a1 a2 b12 b1 b2')
# Equation 12 from the brown paper; see panography.h
equation_12 = ((a12 + f2)**2 * (b1 + f2) * (b2 + f2) -
(b12 + f2)**2 * (a1 + f2) * (a2 + f2))
d = equation_12.as_poly(f2).as_dict()
print ' // Coefficients in ascending powers of alpha, i.e. P[N]*x^N.'
print ' double P[4] = {'
for i in range(4):
print ' ', sympy.printing.ccode(d[(i,)])
print ' }'
# As long as Python code do not return the correct value
# I prefer use "Xcas Computer Algebra System"
# http://www-fourier.ujf-grenoble.fr/~parisse/giac.html
# Solution for the focal length of a pair of images from a rotating camera.
# comment(" Define the base equation that share the Focal length 'f' ")
# FocalSolver:=(a12+f)^2 / (( a1+f)*(a2+f)) - (b12+f)^2 / ((b1+f)*(b2+f));
# comment(" Solve equation=0 with the unknow 'f' ")
# solve(FocalSolver=0,f);
|
Add xcas source code to obtain panography shared Focal polynomial solver.
|
Add xcas source code to obtain panography shared Focal polynomial solver.
|
Python
|
mit
|
leoujz/libmv,Shinohara-Takayuki/libmv,tanmengwen/libmv,KangKyungSoo/libmv,jackyspeed/libmv,Ashwinning/libmv,Shinohara-Takayuki/libmv,jackyspeed/libmv,manjunathnarayana/libmv,manjunathnarayana/libmv,KangKyungSoo/libmv,leoujz/libmv,hjm168168/libmv,jackyspeed/libmv,Shinohara-Takayuki/libmv,sanyaade-g2g-repos/libmv,tanmengwen/libmv,guivi01/libmv,Danath/libmv,manjunathnarayana/libmv,sanyaade-g2g-repos/libmv,Ashwinning/libmv,tanmengwen/libmv,jackyspeed/libmv,hjm168168/libmv,sanyaade-g2g-repos/libmv,pombreda/libmv,hjm168168/libmv,KangKyungSoo/libmv,KangKyungSoo/libmv,Ashwinning/libmv,Danath/libmv,tanmengwen/libmv,guivi01/libmv,guivi01/libmv,pombreda/libmv,Danath/libmv,sanyaade-g2g-repos/libmv,pombreda/libmv,hjm168168/libmv,manjunathnarayana/libmv,leoujz/libmv,leoujz/libmv,Ashwinning/libmv,Shinohara-Takayuki/libmv,guivi01/libmv,Danath/libmv,pombreda/libmv
|
import sympy
f2, a12, a1, a2, b12, b1, b2 = sympy.symbols('f2 a12 a1 a2 b12 b1 b2')
# Equation 12 from the brown paper; see panography.h
equation_12 = ((a12 + f2)**2 * (b1 + f2) * (b2 + f2) -
(b12 + f2)**2 * (a1 + f2) * (a2 + f2))
d = equation_12.as_poly(f2).as_dict()
print ' // Coefficients in ascending powers of alpha, i.e. P[N]*x^N.'
print ' double P[4] = {'
for i in range(4):
print ' ', sympy.printing.ccode(d[(i,)])
print ' }'
Add xcas source code to obtain panography shared Focal polynomial solver.
|
# Minimal Solutions for Panoramic Stitching. M. Brown, R. Hartley and D. Nister.
# International Conference on Computer Vision and Pattern Recognition
# (CVPR2007). Minneapolis, June 2007.
import sympy
f2, a12, a1, a2, b12, b1, b2 = sympy.symbols('f2 a12 a1 a2 b12 b1 b2')
# Equation 12 from the brown paper; see panography.h
equation_12 = ((a12 + f2)**2 * (b1 + f2) * (b2 + f2) -
(b12 + f2)**2 * (a1 + f2) * (a2 + f2))
d = equation_12.as_poly(f2).as_dict()
print ' // Coefficients in ascending powers of alpha, i.e. P[N]*x^N.'
print ' double P[4] = {'
for i in range(4):
print ' ', sympy.printing.ccode(d[(i,)])
print ' }'
# As long as Python code do not return the correct value
# I prefer use "Xcas Computer Algebra System"
# http://www-fourier.ujf-grenoble.fr/~parisse/giac.html
# Solution for the focal length of a pair of images from a rotating camera.
# comment(" Define the base equation that share the Focal length 'f' ")
# FocalSolver:=(a12+f)^2 / (( a1+f)*(a2+f)) - (b12+f)^2 / ((b1+f)*(b2+f));
# comment(" Solve equation=0 with the unknow 'f' ")
# solve(FocalSolver=0,f);
|
<commit_before>import sympy
f2, a12, a1, a2, b12, b1, b2 = sympy.symbols('f2 a12 a1 a2 b12 b1 b2')
# Equation 12 from the brown paper; see panography.h
equation_12 = ((a12 + f2)**2 * (b1 + f2) * (b2 + f2) -
(b12 + f2)**2 * (a1 + f2) * (a2 + f2))
d = equation_12.as_poly(f2).as_dict()
print ' // Coefficients in ascending powers of alpha, i.e. P[N]*x^N.'
print ' double P[4] = {'
for i in range(4):
print ' ', sympy.printing.ccode(d[(i,)])
print ' }'
<commit_msg>Add xcas source code to obtain panography shared Focal polynomial solver.<commit_after>
|
# Minimal Solutions for Panoramic Stitching. M. Brown, R. Hartley and D. Nister.
# International Conference on Computer Vision and Pattern Recognition
# (CVPR2007). Minneapolis, June 2007.
import sympy
f2, a12, a1, a2, b12, b1, b2 = sympy.symbols('f2 a12 a1 a2 b12 b1 b2')
# Equation 12 from the brown paper; see panography.h
equation_12 = ((a12 + f2)**2 * (b1 + f2) * (b2 + f2) -
(b12 + f2)**2 * (a1 + f2) * (a2 + f2))
d = equation_12.as_poly(f2).as_dict()
print ' // Coefficients in ascending powers of alpha, i.e. P[N]*x^N.'
print ' double P[4] = {'
for i in range(4):
print ' ', sympy.printing.ccode(d[(i,)])
print ' }'
# As long as Python code do not return the correct value
# I prefer use "Xcas Computer Algebra System"
# http://www-fourier.ujf-grenoble.fr/~parisse/giac.html
# Solution for the focal length of a pair of images from a rotating camera.
# comment(" Define the base equation that share the Focal length 'f' ")
# FocalSolver:=(a12+f)^2 / (( a1+f)*(a2+f)) - (b12+f)^2 / ((b1+f)*(b2+f));
# comment(" Solve equation=0 with the unknow 'f' ")
# solve(FocalSolver=0,f);
|
import sympy
f2, a12, a1, a2, b12, b1, b2 = sympy.symbols('f2 a12 a1 a2 b12 b1 b2')
# Equation 12 from the brown paper; see panography.h
equation_12 = ((a12 + f2)**2 * (b1 + f2) * (b2 + f2) -
(b12 + f2)**2 * (a1 + f2) * (a2 + f2))
d = equation_12.as_poly(f2).as_dict()
print ' // Coefficients in ascending powers of alpha, i.e. P[N]*x^N.'
print ' double P[4] = {'
for i in range(4):
print ' ', sympy.printing.ccode(d[(i,)])
print ' }'
Add xcas source code to obtain panography shared Focal polynomial solver.# Minimal Solutions for Panoramic Stitching. M. Brown, R. Hartley and D. Nister.
# International Conference on Computer Vision and Pattern Recognition
# (CVPR2007). Minneapolis, June 2007.
import sympy
f2, a12, a1, a2, b12, b1, b2 = sympy.symbols('f2 a12 a1 a2 b12 b1 b2')
# Equation 12 from the brown paper; see panography.h
equation_12 = ((a12 + f2)**2 * (b1 + f2) * (b2 + f2) -
(b12 + f2)**2 * (a1 + f2) * (a2 + f2))
d = equation_12.as_poly(f2).as_dict()
print ' // Coefficients in ascending powers of alpha, i.e. P[N]*x^N.'
print ' double P[4] = {'
for i in range(4):
print ' ', sympy.printing.ccode(d[(i,)])
print ' }'
# As long as Python code do not return the correct value
# I prefer use "Xcas Computer Algebra System"
# http://www-fourier.ujf-grenoble.fr/~parisse/giac.html
# Solution for the focal length of a pair of images from a rotating camera.
# comment(" Define the base equation that share the Focal length 'f' ")
# FocalSolver:=(a12+f)^2 / (( a1+f)*(a2+f)) - (b12+f)^2 / ((b1+f)*(b2+f));
# comment(" Solve equation=0 with the unknow 'f' ")
# solve(FocalSolver=0,f);
|
<commit_before>import sympy
f2, a12, a1, a2, b12, b1, b2 = sympy.symbols('f2 a12 a1 a2 b12 b1 b2')
# Equation 12 from the brown paper; see panography.h
equation_12 = ((a12 + f2)**2 * (b1 + f2) * (b2 + f2) -
(b12 + f2)**2 * (a1 + f2) * (a2 + f2))
d = equation_12.as_poly(f2).as_dict()
print ' // Coefficients in ascending powers of alpha, i.e. P[N]*x^N.'
print ' double P[4] = {'
for i in range(4):
print ' ', sympy.printing.ccode(d[(i,)])
print ' }'
<commit_msg>Add xcas source code to obtain panography shared Focal polynomial solver.<commit_after># Minimal Solutions for Panoramic Stitching. M. Brown, R. Hartley and D. Nister.
# International Conference on Computer Vision and Pattern Recognition
# (CVPR2007). Minneapolis, June 2007.
import sympy
f2, a12, a1, a2, b12, b1, b2 = sympy.symbols('f2 a12 a1 a2 b12 b1 b2')
# Equation 12 from the brown paper; see panography.h
equation_12 = ((a12 + f2)**2 * (b1 + f2) * (b2 + f2) -
(b12 + f2)**2 * (a1 + f2) * (a2 + f2))
d = equation_12.as_poly(f2).as_dict()
print ' // Coefficients in ascending powers of alpha, i.e. P[N]*x^N.'
print ' double P[4] = {'
for i in range(4):
print ' ', sympy.printing.ccode(d[(i,)])
print ' }'
# As long as Python code do not return the correct value
# I prefer use "Xcas Computer Algebra System"
# http://www-fourier.ujf-grenoble.fr/~parisse/giac.html
# Solution for the focal length of a pair of images from a rotating camera.
# comment(" Define the base equation that share the Focal length 'f' ")
# FocalSolver:=(a12+f)^2 / (( a1+f)*(a2+f)) - (b12+f)^2 / ((b1+f)*(b2+f));
# comment(" Solve equation=0 with the unknow 'f' ")
# solve(FocalSolver=0,f);
|
7e5d8eb0d6eabb427d7e9bd02bac3ee7b90d228d
|
src/config.py
|
src/config.py
|
import urllib
import urllib.request
proxies = [
False,
False
]
|
import urllib
import urllib.request
from pprint import pprint
proxies = [
'',
''
]
_tested_proxies = False
def test_proxies():
global _tested_proxies
if _tested_proxies:
return
_tested_proxies = {}
def _testproxy(proxyid):
if proxyid=='':
return True
if _tested_proxies.get(proxyid) is not None:
return _tested_proxies.get(proxyid)
print("Pretesting proxy",proxyid)
proxy = urllib.request.ProxyHandler( {'http': proxyid , 'https': proxyid } )
opener = urllib.request.build_opener(proxy)
#urllib.request.install_opener(opener)
try:
opened = opener.open('http://example.com')
if not opened:
_tested_proxies[proxyid] = False
return False
assert(opened.read().find(b"Example Domain")>-1)
except urllib.error.URLError as e:
try:
opened = opener.open('http://google.com')
if not opened:
_tested_proxies[proxyid] = False
return False
except urllib.error.URLError as e:
print("Proxy error",proxyid,e)
_tested_proxies[proxyid] = False
return False
_tested_proxies[proxyid] = True
return True
proxies[:] = [tup for tup in proxies if _testproxy(tup)]
_tested_proxies = True
|
Test proxies before using them.
|
Test proxies before using them.
|
Python
|
mit
|
koivunen/whoisabusetool
|
import urllib
import urllib.request
proxies = [
False,
False
]Test proxies before using them.
|
import urllib
import urllib.request
from pprint import pprint
proxies = [
'',
''
]
_tested_proxies = False
def test_proxies():
global _tested_proxies
if _tested_proxies:
return
_tested_proxies = {}
def _testproxy(proxyid):
if proxyid=='':
return True
if _tested_proxies.get(proxyid) is not None:
return _tested_proxies.get(proxyid)
print("Pretesting proxy",proxyid)
proxy = urllib.request.ProxyHandler( {'http': proxyid , 'https': proxyid } )
opener = urllib.request.build_opener(proxy)
#urllib.request.install_opener(opener)
try:
opened = opener.open('http://example.com')
if not opened:
_tested_proxies[proxyid] = False
return False
assert(opened.read().find(b"Example Domain")>-1)
except urllib.error.URLError as e:
try:
opened = opener.open('http://google.com')
if not opened:
_tested_proxies[proxyid] = False
return False
except urllib.error.URLError as e:
print("Proxy error",proxyid,e)
_tested_proxies[proxyid] = False
return False
_tested_proxies[proxyid] = True
return True
proxies[:] = [tup for tup in proxies if _testproxy(tup)]
_tested_proxies = True
|
<commit_before>
import urllib
import urllib.request
proxies = [
False,
False
]<commit_msg>Test proxies before using them.<commit_after>
|
import urllib
import urllib.request
from pprint import pprint
proxies = [
'',
''
]
_tested_proxies = False
def test_proxies():
global _tested_proxies
if _tested_proxies:
return
_tested_proxies = {}
def _testproxy(proxyid):
if proxyid=='':
return True
if _tested_proxies.get(proxyid) is not None:
return _tested_proxies.get(proxyid)
print("Pretesting proxy",proxyid)
proxy = urllib.request.ProxyHandler( {'http': proxyid , 'https': proxyid } )
opener = urllib.request.build_opener(proxy)
#urllib.request.install_opener(opener)
try:
opened = opener.open('http://example.com')
if not opened:
_tested_proxies[proxyid] = False
return False
assert(opened.read().find(b"Example Domain")>-1)
except urllib.error.URLError as e:
try:
opened = opener.open('http://google.com')
if not opened:
_tested_proxies[proxyid] = False
return False
except urllib.error.URLError as e:
print("Proxy error",proxyid,e)
_tested_proxies[proxyid] = False
return False
_tested_proxies[proxyid] = True
return True
proxies[:] = [tup for tup in proxies if _testproxy(tup)]
_tested_proxies = True
|
import urllib
import urllib.request
proxies = [
False,
False
]Test proxies before using them.
import urllib
import urllib.request
from pprint import pprint
proxies = [
'',
''
]
_tested_proxies = False
def test_proxies():
global _tested_proxies
if _tested_proxies:
return
_tested_proxies = {}
def _testproxy(proxyid):
if proxyid=='':
return True
if _tested_proxies.get(proxyid) is not None:
return _tested_proxies.get(proxyid)
print("Pretesting proxy",proxyid)
proxy = urllib.request.ProxyHandler( {'http': proxyid , 'https': proxyid } )
opener = urllib.request.build_opener(proxy)
#urllib.request.install_opener(opener)
try:
opened = opener.open('http://example.com')
if not opened:
_tested_proxies[proxyid] = False
return False
assert(opened.read().find(b"Example Domain")>-1)
except urllib.error.URLError as e:
try:
opened = opener.open('http://google.com')
if not opened:
_tested_proxies[proxyid] = False
return False
except urllib.error.URLError as e:
print("Proxy error",proxyid,e)
_tested_proxies[proxyid] = False
return False
_tested_proxies[proxyid] = True
return True
proxies[:] = [tup for tup in proxies if _testproxy(tup)]
_tested_proxies = True
|
<commit_before>
import urllib
import urllib.request
proxies = [
False,
False
]<commit_msg>Test proxies before using them.<commit_after>
import urllib
import urllib.request
from pprint import pprint
proxies = [
'',
''
]
_tested_proxies = False
def test_proxies():
global _tested_proxies
if _tested_proxies:
return
_tested_proxies = {}
def _testproxy(proxyid):
if proxyid=='':
return True
if _tested_proxies.get(proxyid) is not None:
return _tested_proxies.get(proxyid)
print("Pretesting proxy",proxyid)
proxy = urllib.request.ProxyHandler( {'http': proxyid , 'https': proxyid } )
opener = urllib.request.build_opener(proxy)
#urllib.request.install_opener(opener)
try:
opened = opener.open('http://example.com')
if not opened:
_tested_proxies[proxyid] = False
return False
assert(opened.read().find(b"Example Domain")>-1)
except urllib.error.URLError as e:
try:
opened = opener.open('http://google.com')
if not opened:
_tested_proxies[proxyid] = False
return False
except urllib.error.URLError as e:
print("Proxy error",proxyid,e)
_tested_proxies[proxyid] = False
return False
_tested_proxies[proxyid] = True
return True
proxies[:] = [tup for tup in proxies if _testproxy(tup)]
_tested_proxies = True
|
c99f5ffe9e23eeeb8f168c54d5f4b419e553d3b3
|
wizd/gnotifier.py
|
wizd/gnotifier.py
|
import pyinotify
import gobject
class GNotifier(pyinotify.Notifier):
"""
A notifier that can be attached to a mainloop
"""
def __init__(self, watch_manager, default_proc_fun=ProcessEvent()):
"""
Initialization.
@param watch_manager: Watch Manager.
@type watch_manager: WatchManager instance
@param default_proc_fun: Default processing method.
@type default_proc_fun: instance of ProcessEvent
"""
pyinotify.Notifier.__init__(self, watch_manager, default_proc_fun)
self._handler = gobject.io_add_watch(self._fd, gobject.IO_IN, self._process_io)
def _process_io(self):
self.read_events()
self.process_events()
def stop(self):
gobject.source_remove(self._handler)
pyinotify.Notifier.stop(self)
|
import pyinotify
import gobject
class GNotifier(pyinotify.Notifier):
"""
A notifier that can be attached to a mainloop
"""
def __init__(self, watch_manager, default_proc_fun=pyinotify.ProcessEvent()):
"""
Initialization.
@param watch_manager: Watch Manager.
@type watch_manager: WatchManager instance
@param default_proc_fun: Default processing method.
@type default_proc_fun: instance of ProcessEvent
"""
pyinotify.Notifier.__init__(self, watch_manager, default_proc_fun)
self._handler = gobject.io_add_watch(self._fd, gobject.IO_IN, self._process_io)
def _process_io(self, foo, bar):
self.read_events()
self.process_events()
return True
def stop(self):
gobject.source_remove(self._handler)
pyinotify.Notifier.stop(self)
if __name__ == "__main__":
import sys
wm = pyinotify.WatchManager()
n = GNotifier(wm)
if len(sys.argv) > 1:
name = sys.argv[1]
else:
name = "/tmp"
wm.add_watch(name, pyinotify.EventsCodes.ALL_EVENTS, rec=True, auto_add=True)
mainloop = gobject.MainLoop()
try:
mainloop.run()
except KeyboardInterrupt:
pass
|
Add test for GNotifier, fix up bugs
|
Add test for GNotifier, fix up bugs
|
Python
|
lgpl-2.1
|
wizbit-archive/wizbit,wizbit-archive/wizbit
|
import pyinotify
import gobject
class GNotifier(pyinotify.Notifier):
"""
A notifier that can be attached to a mainloop
"""
def __init__(self, watch_manager, default_proc_fun=ProcessEvent()):
"""
Initialization.
@param watch_manager: Watch Manager.
@type watch_manager: WatchManager instance
@param default_proc_fun: Default processing method.
@type default_proc_fun: instance of ProcessEvent
"""
pyinotify.Notifier.__init__(self, watch_manager, default_proc_fun)
self._handler = gobject.io_add_watch(self._fd, gobject.IO_IN, self._process_io)
def _process_io(self):
self.read_events()
self.process_events()
def stop(self):
gobject.source_remove(self._handler)
pyinotify.Notifier.stop(self)
Add test for GNotifier, fix up bugs
|
import pyinotify
import gobject
class GNotifier(pyinotify.Notifier):
"""
A notifier that can be attached to a mainloop
"""
def __init__(self, watch_manager, default_proc_fun=pyinotify.ProcessEvent()):
"""
Initialization.
@param watch_manager: Watch Manager.
@type watch_manager: WatchManager instance
@param default_proc_fun: Default processing method.
@type default_proc_fun: instance of ProcessEvent
"""
pyinotify.Notifier.__init__(self, watch_manager, default_proc_fun)
self._handler = gobject.io_add_watch(self._fd, gobject.IO_IN, self._process_io)
def _process_io(self, foo, bar):
self.read_events()
self.process_events()
return True
def stop(self):
gobject.source_remove(self._handler)
pyinotify.Notifier.stop(self)
if __name__ == "__main__":
import sys
wm = pyinotify.WatchManager()
n = GNotifier(wm)
if len(sys.argv) > 1:
name = sys.argv[1]
else:
name = "/tmp"
wm.add_watch(name, pyinotify.EventsCodes.ALL_EVENTS, rec=True, auto_add=True)
mainloop = gobject.MainLoop()
try:
mainloop.run()
except KeyboardInterrupt:
pass
|
<commit_before>import pyinotify
import gobject
class GNotifier(pyinotify.Notifier):
"""
A notifier that can be attached to a mainloop
"""
def __init__(self, watch_manager, default_proc_fun=ProcessEvent()):
"""
Initialization.
@param watch_manager: Watch Manager.
@type watch_manager: WatchManager instance
@param default_proc_fun: Default processing method.
@type default_proc_fun: instance of ProcessEvent
"""
pyinotify.Notifier.__init__(self, watch_manager, default_proc_fun)
self._handler = gobject.io_add_watch(self._fd, gobject.IO_IN, self._process_io)
def _process_io(self):
self.read_events()
self.process_events()
def stop(self):
gobject.source_remove(self._handler)
pyinotify.Notifier.stop(self)
<commit_msg>Add test for GNotifier, fix up bugs<commit_after>
|
import pyinotify
import gobject
class GNotifier(pyinotify.Notifier):
"""
A notifier that can be attached to a mainloop
"""
def __init__(self, watch_manager, default_proc_fun=pyinotify.ProcessEvent()):
"""
Initialization.
@param watch_manager: Watch Manager.
@type watch_manager: WatchManager instance
@param default_proc_fun: Default processing method.
@type default_proc_fun: instance of ProcessEvent
"""
pyinotify.Notifier.__init__(self, watch_manager, default_proc_fun)
self._handler = gobject.io_add_watch(self._fd, gobject.IO_IN, self._process_io)
def _process_io(self, foo, bar):
self.read_events()
self.process_events()
return True
def stop(self):
gobject.source_remove(self._handler)
pyinotify.Notifier.stop(self)
if __name__ == "__main__":
import sys
wm = pyinotify.WatchManager()
n = GNotifier(wm)
if len(sys.argv) > 1:
name = sys.argv[1]
else:
name = "/tmp"
wm.add_watch(name, pyinotify.EventsCodes.ALL_EVENTS, rec=True, auto_add=True)
mainloop = gobject.MainLoop()
try:
mainloop.run()
except KeyboardInterrupt:
pass
|
import pyinotify
import gobject
class GNotifier(pyinotify.Notifier):
"""
A notifier that can be attached to a mainloop
"""
def __init__(self, watch_manager, default_proc_fun=ProcessEvent()):
"""
Initialization.
@param watch_manager: Watch Manager.
@type watch_manager: WatchManager instance
@param default_proc_fun: Default processing method.
@type default_proc_fun: instance of ProcessEvent
"""
pyinotify.Notifier.__init__(self, watch_manager, default_proc_fun)
self._handler = gobject.io_add_watch(self._fd, gobject.IO_IN, self._process_io)
def _process_io(self):
self.read_events()
self.process_events()
def stop(self):
gobject.source_remove(self._handler)
pyinotify.Notifier.stop(self)
Add test for GNotifier, fix up bugsimport pyinotify
import gobject
class GNotifier(pyinotify.Notifier):
"""
A notifier that can be attached to a mainloop
"""
def __init__(self, watch_manager, default_proc_fun=pyinotify.ProcessEvent()):
"""
Initialization.
@param watch_manager: Watch Manager.
@type watch_manager: WatchManager instance
@param default_proc_fun: Default processing method.
@type default_proc_fun: instance of ProcessEvent
"""
pyinotify.Notifier.__init__(self, watch_manager, default_proc_fun)
self._handler = gobject.io_add_watch(self._fd, gobject.IO_IN, self._process_io)
def _process_io(self, foo, bar):
self.read_events()
self.process_events()
return True
def stop(self):
gobject.source_remove(self._handler)
pyinotify.Notifier.stop(self)
if __name__ == "__main__":
import sys
wm = pyinotify.WatchManager()
n = GNotifier(wm)
if len(sys.argv) > 1:
name = sys.argv[1]
else:
name = "/tmp"
wm.add_watch(name, pyinotify.EventsCodes.ALL_EVENTS, rec=True, auto_add=True)
mainloop = gobject.MainLoop()
try:
mainloop.run()
except KeyboardInterrupt:
pass
|
<commit_before>import pyinotify
import gobject
class GNotifier(pyinotify.Notifier):
"""
A notifier that can be attached to a mainloop
"""
def __init__(self, watch_manager, default_proc_fun=ProcessEvent()):
"""
Initialization.
@param watch_manager: Watch Manager.
@type watch_manager: WatchManager instance
@param default_proc_fun: Default processing method.
@type default_proc_fun: instance of ProcessEvent
"""
pyinotify.Notifier.__init__(self, watch_manager, default_proc_fun)
self._handler = gobject.io_add_watch(self._fd, gobject.IO_IN, self._process_io)
def _process_io(self):
self.read_events()
self.process_events()
def stop(self):
gobject.source_remove(self._handler)
pyinotify.Notifier.stop(self)
<commit_msg>Add test for GNotifier, fix up bugs<commit_after>import pyinotify
import gobject
class GNotifier(pyinotify.Notifier):
"""
A notifier that can be attached to a mainloop
"""
def __init__(self, watch_manager, default_proc_fun=pyinotify.ProcessEvent()):
"""
Initialization.
@param watch_manager: Watch Manager.
@type watch_manager: WatchManager instance
@param default_proc_fun: Default processing method.
@type default_proc_fun: instance of ProcessEvent
"""
pyinotify.Notifier.__init__(self, watch_manager, default_proc_fun)
self._handler = gobject.io_add_watch(self._fd, gobject.IO_IN, self._process_io)
def _process_io(self, foo, bar):
self.read_events()
self.process_events()
return True
def stop(self):
gobject.source_remove(self._handler)
pyinotify.Notifier.stop(self)
if __name__ == "__main__":
import sys
wm = pyinotify.WatchManager()
n = GNotifier(wm)
if len(sys.argv) > 1:
name = sys.argv[1]
else:
name = "/tmp"
wm.add_watch(name, pyinotify.EventsCodes.ALL_EVENTS, rec=True, auto_add=True)
mainloop = gobject.MainLoop()
try:
mainloop.run()
except KeyboardInterrupt:
pass
|
aaa352e63092e44c7b4decec0942c915051f5e2e
|
cypher/cypher.py
|
cypher/cypher.py
|
import argparse
from .util import identify
parser = argparse.ArgumentParser(
prog="cypher",
description="A source code identification tool."
)
parser.add_argument(
"src",
nargs="?",
help="Path to unknown source code."
)
parser.add_argument(
"-v",
"--verbose",
action="store_true",
help="Return all scores."
)
parser.add_argument(
"-f",
"--file",
action="store_true",
help="Indicates the the source is being passed as a file."
)
args = vars(parser.parse_args())
def main():
print(identify(args["src"], is_file=args["file"], verbose=args["verbose"]))
|
import argparse
from .util import identify
parser = argparse.ArgumentParser(
prog="cypher",
description="A source code identification tool."
)
parser.add_argument(
"src",
nargs=1,
help="Path to unknown source code."
)
parser.add_argument(
"-v",
"--verbose",
action="store_true",
help="Return all scores."
)
parser.add_argument(
"-f",
"--file",
action="store_true",
help="Indicates the the source is being passed as a file."
)
args = vars(parser.parse_args())
def main():
print(identify(args["src"], is_file=args["file"], verbose=args["verbose"]))
|
Change nargs from ? to 1
|
Change nargs from ? to 1
|
Python
|
mit
|
jdkato/codetype,jdkato/codetype
|
import argparse
from .util import identify
parser = argparse.ArgumentParser(
prog="cypher",
description="A source code identification tool."
)
parser.add_argument(
"src",
nargs="?",
help="Path to unknown source code."
)
parser.add_argument(
"-v",
"--verbose",
action="store_true",
help="Return all scores."
)
parser.add_argument(
"-f",
"--file",
action="store_true",
help="Indicates the the source is being passed as a file."
)
args = vars(parser.parse_args())
def main():
print(identify(args["src"], is_file=args["file"], verbose=args["verbose"]))
Change nargs from ? to 1
|
import argparse
from .util import identify
parser = argparse.ArgumentParser(
prog="cypher",
description="A source code identification tool."
)
parser.add_argument(
"src",
nargs=1,
help="Path to unknown source code."
)
parser.add_argument(
"-v",
"--verbose",
action="store_true",
help="Return all scores."
)
parser.add_argument(
"-f",
"--file",
action="store_true",
help="Indicates the the source is being passed as a file."
)
args = vars(parser.parse_args())
def main():
print(identify(args["src"], is_file=args["file"], verbose=args["verbose"]))
|
<commit_before>import argparse
from .util import identify
parser = argparse.ArgumentParser(
prog="cypher",
description="A source code identification tool."
)
parser.add_argument(
"src",
nargs="?",
help="Path to unknown source code."
)
parser.add_argument(
"-v",
"--verbose",
action="store_true",
help="Return all scores."
)
parser.add_argument(
"-f",
"--file",
action="store_true",
help="Indicates the the source is being passed as a file."
)
args = vars(parser.parse_args())
def main():
print(identify(args["src"], is_file=args["file"], verbose=args["verbose"]))
<commit_msg>Change nargs from ? to 1<commit_after>
|
import argparse
from .util import identify
parser = argparse.ArgumentParser(
prog="cypher",
description="A source code identification tool."
)
parser.add_argument(
"src",
nargs=1,
help="Path to unknown source code."
)
parser.add_argument(
"-v",
"--verbose",
action="store_true",
help="Return all scores."
)
parser.add_argument(
"-f",
"--file",
action="store_true",
help="Indicates the the source is being passed as a file."
)
args = vars(parser.parse_args())
def main():
print(identify(args["src"], is_file=args["file"], verbose=args["verbose"]))
|
import argparse
from .util import identify
parser = argparse.ArgumentParser(
prog="cypher",
description="A source code identification tool."
)
parser.add_argument(
"src",
nargs="?",
help="Path to unknown source code."
)
parser.add_argument(
"-v",
"--verbose",
action="store_true",
help="Return all scores."
)
parser.add_argument(
"-f",
"--file",
action="store_true",
help="Indicates the the source is being passed as a file."
)
args = vars(parser.parse_args())
def main():
print(identify(args["src"], is_file=args["file"], verbose=args["verbose"]))
Change nargs from ? to 1import argparse
from .util import identify
parser = argparse.ArgumentParser(
prog="cypher",
description="A source code identification tool."
)
parser.add_argument(
"src",
nargs=1,
help="Path to unknown source code."
)
parser.add_argument(
"-v",
"--verbose",
action="store_true",
help="Return all scores."
)
parser.add_argument(
"-f",
"--file",
action="store_true",
help="Indicates the the source is being passed as a file."
)
args = vars(parser.parse_args())
def main():
print(identify(args["src"], is_file=args["file"], verbose=args["verbose"]))
|
<commit_before>import argparse
from .util import identify
parser = argparse.ArgumentParser(
prog="cypher",
description="A source code identification tool."
)
parser.add_argument(
"src",
nargs="?",
help="Path to unknown source code."
)
parser.add_argument(
"-v",
"--verbose",
action="store_true",
help="Return all scores."
)
parser.add_argument(
"-f",
"--file",
action="store_true",
help="Indicates the the source is being passed as a file."
)
args = vars(parser.parse_args())
def main():
print(identify(args["src"], is_file=args["file"], verbose=args["verbose"]))
<commit_msg>Change nargs from ? to 1<commit_after>import argparse
from .util import identify
parser = argparse.ArgumentParser(
prog="cypher",
description="A source code identification tool."
)
parser.add_argument(
"src",
nargs=1,
help="Path to unknown source code."
)
parser.add_argument(
"-v",
"--verbose",
action="store_true",
help="Return all scores."
)
parser.add_argument(
"-f",
"--file",
action="store_true",
help="Indicates the the source is being passed as a file."
)
args = vars(parser.parse_args())
def main():
print(identify(args["src"], is_file=args["file"], verbose=args["verbose"]))
|
6adc4b650ef0de41110b6038634139c2d0bb33c3
|
pymanopt/manifolds/__init__.py
|
pymanopt/manifolds/__init__.py
|
from .grassmann import Grassmann
from .sphere import Sphere
from .stiefel import Stiefel
from .psd import PSDFixedRank, PSDFixedRankComplex, Elliptope, PositiveDefinite
from .oblique import Oblique
from .euclidean import Euclidean
from .product import Product
__all__ = ["Grassmann", "Sphere", "Stiefel", "PSDFixedRank",
"PSDFixedRankComplex", "Elliptope", "Oblique",
"Euclidean", "Product"]
|
from .grassmann import Grassmann
from .sphere import Sphere
from .stiefel import Stiefel
from .psd import PSDFixedRank, PSDFixedRankComplex, Elliptope, PositiveDefinite
from .oblique import Oblique
from .euclidean import Euclidean
from .product import Product
__all__ = ["Grassmann", "Sphere", "Stiefel", "PSDFixedRank",
"PSDFixedRankComplex", "Elliptope", "PositiveDefinite", "Oblique",
"Euclidean", "Product"]
|
Add Positive definite to manifolds init __all__
|
Add Positive definite to manifolds init __all__
Signed-off-by: Jamie Townsend <712d3bf917252432b8abdc41edb77e32fa2cc414@gmail.com>
|
Python
|
bsd-3-clause
|
nkoep/pymanopt,tingelst/pymanopt,j-towns/pymanopt,nkoep/pymanopt,pymanopt/pymanopt,nkoep/pymanopt,pymanopt/pymanopt
|
from .grassmann import Grassmann
from .sphere import Sphere
from .stiefel import Stiefel
from .psd import PSDFixedRank, PSDFixedRankComplex, Elliptope, PositiveDefinite
from .oblique import Oblique
from .euclidean import Euclidean
from .product import Product
__all__ = ["Grassmann", "Sphere", "Stiefel", "PSDFixedRank",
"PSDFixedRankComplex", "Elliptope", "Oblique",
"Euclidean", "Product"]
Add Positive definite to manifolds init __all__
Signed-off-by: Jamie Townsend <712d3bf917252432b8abdc41edb77e32fa2cc414@gmail.com>
|
from .grassmann import Grassmann
from .sphere import Sphere
from .stiefel import Stiefel
from .psd import PSDFixedRank, PSDFixedRankComplex, Elliptope, PositiveDefinite
from .oblique import Oblique
from .euclidean import Euclidean
from .product import Product
__all__ = ["Grassmann", "Sphere", "Stiefel", "PSDFixedRank",
"PSDFixedRankComplex", "Elliptope", "PositiveDefinite", "Oblique",
"Euclidean", "Product"]
|
<commit_before>from .grassmann import Grassmann
from .sphere import Sphere
from .stiefel import Stiefel
from .psd import PSDFixedRank, PSDFixedRankComplex, Elliptope, PositiveDefinite
from .oblique import Oblique
from .euclidean import Euclidean
from .product import Product
__all__ = ["Grassmann", "Sphere", "Stiefel", "PSDFixedRank",
"PSDFixedRankComplex", "Elliptope", "Oblique",
"Euclidean", "Product"]
<commit_msg>Add Positive definite to manifolds init __all__
Signed-off-by: Jamie Townsend <712d3bf917252432b8abdc41edb77e32fa2cc414@gmail.com><commit_after>
|
from .grassmann import Grassmann
from .sphere import Sphere
from .stiefel import Stiefel
from .psd import PSDFixedRank, PSDFixedRankComplex, Elliptope, PositiveDefinite
from .oblique import Oblique
from .euclidean import Euclidean
from .product import Product
__all__ = ["Grassmann", "Sphere", "Stiefel", "PSDFixedRank",
"PSDFixedRankComplex", "Elliptope", "PositiveDefinite", "Oblique",
"Euclidean", "Product"]
|
from .grassmann import Grassmann
from .sphere import Sphere
from .stiefel import Stiefel
from .psd import PSDFixedRank, PSDFixedRankComplex, Elliptope, PositiveDefinite
from .oblique import Oblique
from .euclidean import Euclidean
from .product import Product
__all__ = ["Grassmann", "Sphere", "Stiefel", "PSDFixedRank",
"PSDFixedRankComplex", "Elliptope", "Oblique",
"Euclidean", "Product"]
Add Positive definite to manifolds init __all__
Signed-off-by: Jamie Townsend <712d3bf917252432b8abdc41edb77e32fa2cc414@gmail.com>from .grassmann import Grassmann
from .sphere import Sphere
from .stiefel import Stiefel
from .psd import PSDFixedRank, PSDFixedRankComplex, Elliptope, PositiveDefinite
from .oblique import Oblique
from .euclidean import Euclidean
from .product import Product
__all__ = ["Grassmann", "Sphere", "Stiefel", "PSDFixedRank",
"PSDFixedRankComplex", "Elliptope", "PositiveDefinite", "Oblique",
"Euclidean", "Product"]
|
<commit_before>from .grassmann import Grassmann
from .sphere import Sphere
from .stiefel import Stiefel
from .psd import PSDFixedRank, PSDFixedRankComplex, Elliptope, PositiveDefinite
from .oblique import Oblique
from .euclidean import Euclidean
from .product import Product
__all__ = ["Grassmann", "Sphere", "Stiefel", "PSDFixedRank",
"PSDFixedRankComplex", "Elliptope", "Oblique",
"Euclidean", "Product"]
<commit_msg>Add Positive definite to manifolds init __all__
Signed-off-by: Jamie Townsend <712d3bf917252432b8abdc41edb77e32fa2cc414@gmail.com><commit_after>from .grassmann import Grassmann
from .sphere import Sphere
from .stiefel import Stiefel
from .psd import PSDFixedRank, PSDFixedRankComplex, Elliptope, PositiveDefinite
from .oblique import Oblique
from .euclidean import Euclidean
from .product import Product
__all__ = ["Grassmann", "Sphere", "Stiefel", "PSDFixedRank",
"PSDFixedRankComplex", "Elliptope", "PositiveDefinite", "Oblique",
"Euclidean", "Product"]
|
d2f02fa4171cb490df87a4426c78ffc37560c5d6
|
equadratures/distributions/__init__.py
|
equadratures/distributions/__init__.py
|
import equadratures.distributions.template
import equadratures.distributions.gaussian
import equadratures.distributions.truncated_gaussian
import equadratures.distributions.chebyshev
import equadratures.distributions.cauchy
import equadratures.distributions.chisquared
import equadratures.distributions.beta
import equadratures.distributions.gamma
import equadratures.distributions.rayleigh
import equadratures.distributions.uniform
import equadratures.distributions.weibull
import equadratures.distributions.chi
import equadratures.distributions.pareto
import equadratures.distributions.gumbel
import equadratures.distributions.studentst
import equadratures.distributions.lognormal
|
import equadratures.distributions.template
import equadratures.distributions.gaussian
import equadratures.distributions.truncated_gaussian
import equadratures.distributions.chebyshev
import equadratures.distributions.cauchy
import equadratures.distributions.chisquared
import equadratures.distributions.beta
import equadratures.distributions.gamma
import equadratures.distributions.rayleigh
import equadratures.distributions.uniform
import equadratures.distributions.triangular
import equadratures.distributions.weibull
import equadratures.distributions.chi
import equadratures.distributions.pareto
import equadratures.distributions.gumbel
import equadratures.distributions.studentst
import equadratures.distributions.lognormal
|
Add tri distribution import to init.
|
Add tri distribution import to init.
|
Python
|
lgpl-2.1
|
Effective-Quadratures/Effective-Quadratures
|
import equadratures.distributions.template
import equadratures.distributions.gaussian
import equadratures.distributions.truncated_gaussian
import equadratures.distributions.chebyshev
import equadratures.distributions.cauchy
import equadratures.distributions.chisquared
import equadratures.distributions.beta
import equadratures.distributions.gamma
import equadratures.distributions.rayleigh
import equadratures.distributions.uniform
import equadratures.distributions.weibull
import equadratures.distributions.chi
import equadratures.distributions.pareto
import equadratures.distributions.gumbel
import equadratures.distributions.studentst
import equadratures.distributions.lognormalAdd tri distribution import to init.
|
import equadratures.distributions.template
import equadratures.distributions.gaussian
import equadratures.distributions.truncated_gaussian
import equadratures.distributions.chebyshev
import equadratures.distributions.cauchy
import equadratures.distributions.chisquared
import equadratures.distributions.beta
import equadratures.distributions.gamma
import equadratures.distributions.rayleigh
import equadratures.distributions.uniform
import equadratures.distributions.triangular
import equadratures.distributions.weibull
import equadratures.distributions.chi
import equadratures.distributions.pareto
import equadratures.distributions.gumbel
import equadratures.distributions.studentst
import equadratures.distributions.lognormal
|
<commit_before>import equadratures.distributions.template
import equadratures.distributions.gaussian
import equadratures.distributions.truncated_gaussian
import equadratures.distributions.chebyshev
import equadratures.distributions.cauchy
import equadratures.distributions.chisquared
import equadratures.distributions.beta
import equadratures.distributions.gamma
import equadratures.distributions.rayleigh
import equadratures.distributions.uniform
import equadratures.distributions.weibull
import equadratures.distributions.chi
import equadratures.distributions.pareto
import equadratures.distributions.gumbel
import equadratures.distributions.studentst
import equadratures.distributions.lognormal<commit_msg>Add tri distribution import to init.<commit_after>
|
import equadratures.distributions.template
import equadratures.distributions.gaussian
import equadratures.distributions.truncated_gaussian
import equadratures.distributions.chebyshev
import equadratures.distributions.cauchy
import equadratures.distributions.chisquared
import equadratures.distributions.beta
import equadratures.distributions.gamma
import equadratures.distributions.rayleigh
import equadratures.distributions.uniform
import equadratures.distributions.triangular
import equadratures.distributions.weibull
import equadratures.distributions.chi
import equadratures.distributions.pareto
import equadratures.distributions.gumbel
import equadratures.distributions.studentst
import equadratures.distributions.lognormal
|
import equadratures.distributions.template
import equadratures.distributions.gaussian
import equadratures.distributions.truncated_gaussian
import equadratures.distributions.chebyshev
import equadratures.distributions.cauchy
import equadratures.distributions.chisquared
import equadratures.distributions.beta
import equadratures.distributions.gamma
import equadratures.distributions.rayleigh
import equadratures.distributions.uniform
import equadratures.distributions.weibull
import equadratures.distributions.chi
import equadratures.distributions.pareto
import equadratures.distributions.gumbel
import equadratures.distributions.studentst
import equadratures.distributions.lognormalAdd tri distribution import to init.import equadratures.distributions.template
import equadratures.distributions.gaussian
import equadratures.distributions.truncated_gaussian
import equadratures.distributions.chebyshev
import equadratures.distributions.cauchy
import equadratures.distributions.chisquared
import equadratures.distributions.beta
import equadratures.distributions.gamma
import equadratures.distributions.rayleigh
import equadratures.distributions.uniform
import equadratures.distributions.triangular
import equadratures.distributions.weibull
import equadratures.distributions.chi
import equadratures.distributions.pareto
import equadratures.distributions.gumbel
import equadratures.distributions.studentst
import equadratures.distributions.lognormal
|
<commit_before>import equadratures.distributions.template
import equadratures.distributions.gaussian
import equadratures.distributions.truncated_gaussian
import equadratures.distributions.chebyshev
import equadratures.distributions.cauchy
import equadratures.distributions.chisquared
import equadratures.distributions.beta
import equadratures.distributions.gamma
import equadratures.distributions.rayleigh
import equadratures.distributions.uniform
import equadratures.distributions.weibull
import equadratures.distributions.chi
import equadratures.distributions.pareto
import equadratures.distributions.gumbel
import equadratures.distributions.studentst
import equadratures.distributions.lognormal<commit_msg>Add tri distribution import to init.<commit_after>import equadratures.distributions.template
import equadratures.distributions.gaussian
import equadratures.distributions.truncated_gaussian
import equadratures.distributions.chebyshev
import equadratures.distributions.cauchy
import equadratures.distributions.chisquared
import equadratures.distributions.beta
import equadratures.distributions.gamma
import equadratures.distributions.rayleigh
import equadratures.distributions.uniform
import equadratures.distributions.triangular
import equadratures.distributions.weibull
import equadratures.distributions.chi
import equadratures.distributions.pareto
import equadratures.distributions.gumbel
import equadratures.distributions.studentst
import equadratures.distributions.lognormal
|
7ec92591324717cfdefc8531549654f146e8b15c
|
test/unit/test_id_iterators.py
|
test/unit/test_id_iterators.py
|
from unittest import TestCase, main
import re
from uuid import UUID
from jsonrpcclient.id_iterators import hex_iterator, uuid_iterator, \
random_iterator
class TestHexIterator(TestCase):
def test(self):
i = hex_iterator()
self.assertEqual('1', next(i))
i = hex_iterator(9)
self.assertEqual('9', next(i))
self.assertEqual('a', next(i))
class TestUUIDIterator(TestCase):
def test(self):
i = uuid_iterator()
# Raise ValueError if badly formed hexadecimal UUID string
UUID(next(i), version=4)
class TestRandomIterator(TestCase):
def test(self):
i = random_iterator()
self.assertTrue(re.match('^[0-9,a-z]{8}$', next(i)))
|
from unittest import TestCase, main
import re
from uuid import UUID
from jsonrpcclient.id_iterators import hex_iterator, uuid_iterator, \
random_iterator
class TestHexIterator(TestCase):
def test(self):
i = hex_iterator()
self.assertEqual('1', next(i))
i = hex_iterator(9)
self.assertEqual('9', next(i))
self.assertEqual('a', next(i))
class TestUUIDIterator(TestCase):
def test(self):
i = uuid_iterator()
# Raise ValueError if badly formed hexadecimal UUID string
UUID(next(i), version=4)
class TestRandomIterator(TestCase):
def test(self):
i = random_iterator()
self.assertTrue(re.match('^[0-9,a-z]{8}$', next(i)))
if __name__ == '__main__':
main()
|
Add statements to run id_iterators tests
|
Add statements to run id_iterators tests
Closes #8
|
Python
|
mit
|
bcb/jsonrpcclient
|
from unittest import TestCase, main
import re
from uuid import UUID
from jsonrpcclient.id_iterators import hex_iterator, uuid_iterator, \
random_iterator
class TestHexIterator(TestCase):
def test(self):
i = hex_iterator()
self.assertEqual('1', next(i))
i = hex_iterator(9)
self.assertEqual('9', next(i))
self.assertEqual('a', next(i))
class TestUUIDIterator(TestCase):
def test(self):
i = uuid_iterator()
# Raise ValueError if badly formed hexadecimal UUID string
UUID(next(i), version=4)
class TestRandomIterator(TestCase):
def test(self):
i = random_iterator()
self.assertTrue(re.match('^[0-9,a-z]{8}$', next(i)))
Add statements to run id_iterators tests
Closes #8
|
from unittest import TestCase, main
import re
from uuid import UUID
from jsonrpcclient.id_iterators import hex_iterator, uuid_iterator, \
random_iterator
class TestHexIterator(TestCase):
def test(self):
i = hex_iterator()
self.assertEqual('1', next(i))
i = hex_iterator(9)
self.assertEqual('9', next(i))
self.assertEqual('a', next(i))
class TestUUIDIterator(TestCase):
def test(self):
i = uuid_iterator()
# Raise ValueError if badly formed hexadecimal UUID string
UUID(next(i), version=4)
class TestRandomIterator(TestCase):
def test(self):
i = random_iterator()
self.assertTrue(re.match('^[0-9,a-z]{8}$', next(i)))
if __name__ == '__main__':
main()
|
<commit_before>from unittest import TestCase, main
import re
from uuid import UUID
from jsonrpcclient.id_iterators import hex_iterator, uuid_iterator, \
random_iterator
class TestHexIterator(TestCase):
def test(self):
i = hex_iterator()
self.assertEqual('1', next(i))
i = hex_iterator(9)
self.assertEqual('9', next(i))
self.assertEqual('a', next(i))
class TestUUIDIterator(TestCase):
def test(self):
i = uuid_iterator()
# Raise ValueError if badly formed hexadecimal UUID string
UUID(next(i), version=4)
class TestRandomIterator(TestCase):
def test(self):
i = random_iterator()
self.assertTrue(re.match('^[0-9,a-z]{8}$', next(i)))
<commit_msg>Add statements to run id_iterators tests
Closes #8<commit_after>
|
from unittest import TestCase, main
import re
from uuid import UUID
from jsonrpcclient.id_iterators import hex_iterator, uuid_iterator, \
random_iterator
class TestHexIterator(TestCase):
def test(self):
i = hex_iterator()
self.assertEqual('1', next(i))
i = hex_iterator(9)
self.assertEqual('9', next(i))
self.assertEqual('a', next(i))
class TestUUIDIterator(TestCase):
def test(self):
i = uuid_iterator()
# Raise ValueError if badly formed hexadecimal UUID string
UUID(next(i), version=4)
class TestRandomIterator(TestCase):
def test(self):
i = random_iterator()
self.assertTrue(re.match('^[0-9,a-z]{8}$', next(i)))
if __name__ == '__main__':
main()
|
from unittest import TestCase, main
import re
from uuid import UUID
from jsonrpcclient.id_iterators import hex_iterator, uuid_iterator, \
random_iterator
class TestHexIterator(TestCase):
def test(self):
i = hex_iterator()
self.assertEqual('1', next(i))
i = hex_iterator(9)
self.assertEqual('9', next(i))
self.assertEqual('a', next(i))
class TestUUIDIterator(TestCase):
def test(self):
i = uuid_iterator()
# Raise ValueError if badly formed hexadecimal UUID string
UUID(next(i), version=4)
class TestRandomIterator(TestCase):
def test(self):
i = random_iterator()
self.assertTrue(re.match('^[0-9,a-z]{8}$', next(i)))
Add statements to run id_iterators tests
Closes #8from unittest import TestCase, main
import re
from uuid import UUID
from jsonrpcclient.id_iterators import hex_iterator, uuid_iterator, \
random_iterator
class TestHexIterator(TestCase):
def test(self):
i = hex_iterator()
self.assertEqual('1', next(i))
i = hex_iterator(9)
self.assertEqual('9', next(i))
self.assertEqual('a', next(i))
class TestUUIDIterator(TestCase):
def test(self):
i = uuid_iterator()
# Raise ValueError if badly formed hexadecimal UUID string
UUID(next(i), version=4)
class TestRandomIterator(TestCase):
def test(self):
i = random_iterator()
self.assertTrue(re.match('^[0-9,a-z]{8}$', next(i)))
if __name__ == '__main__':
main()
|
<commit_before>from unittest import TestCase, main
import re
from uuid import UUID
from jsonrpcclient.id_iterators import hex_iterator, uuid_iterator, \
random_iterator
class TestHexIterator(TestCase):
def test(self):
i = hex_iterator()
self.assertEqual('1', next(i))
i = hex_iterator(9)
self.assertEqual('9', next(i))
self.assertEqual('a', next(i))
class TestUUIDIterator(TestCase):
def test(self):
i = uuid_iterator()
# Raise ValueError if badly formed hexadecimal UUID string
UUID(next(i), version=4)
class TestRandomIterator(TestCase):
def test(self):
i = random_iterator()
self.assertTrue(re.match('^[0-9,a-z]{8}$', next(i)))
<commit_msg>Add statements to run id_iterators tests
Closes #8<commit_after>from unittest import TestCase, main
import re
from uuid import UUID
from jsonrpcclient.id_iterators import hex_iterator, uuid_iterator, \
random_iterator
class TestHexIterator(TestCase):
def test(self):
i = hex_iterator()
self.assertEqual('1', next(i))
i = hex_iterator(9)
self.assertEqual('9', next(i))
self.assertEqual('a', next(i))
class TestUUIDIterator(TestCase):
def test(self):
i = uuid_iterator()
# Raise ValueError if badly formed hexadecimal UUID string
UUID(next(i), version=4)
class TestRandomIterator(TestCase):
def test(self):
i = random_iterator()
self.assertTrue(re.match('^[0-9,a-z]{8}$', next(i)))
if __name__ == '__main__':
main()
|
094c3e428644631c78837def24ac65ba4c84b1c7
|
pythainlp/tokenize/__init__.py
|
pythainlp/tokenize/__init__.py
|
# -*- coding: utf-8 -*-
"""
Thai tokenizers
"""
from pythainlp.corpus import thai_syllables, thai_words
from pythainlp.util.trie import Trie
DEFAULT_WORD_TOKENIZE_ENGINE = "newmm"
DEFAULT_SENT_TOKENIZE_ENGINE = "crfcut"
DEFAULT_SUBWORD_TOKENIZE_ENGINE = "tcc"
DEFAULT_SYLLABLE_TOKENIZE_ENGINE = "dict"
DEFAULT_WORD_DICT_TRIE = Trie(thai_words())
DEFAULT_SYLLABLE_DICT_TRIE = Trie(thai_syllables())
DEFAULT_DICT_TRIE = DEFAULT_WORD_DICT_TRIE
from pythainlp.tokenize.tokenize import (
Tokenizer,
sent_tokenize,
subword_tokenize,
syllable_tokenize,
word_tokenize,
)
__all__ = [
"Tokenizer",
"sent_tokenize",
"subword_tokenize",
"syllable_tokenize",
"word_tokenize",
]
|
# -*- coding: utf-8 -*-
"""
Thai tokenizers
"""
from pythainlp.corpus import thai_syllables, thai_words
from pythainlp.util.trie import Trie
DEFAULT_WORD_TOKENIZE_ENGINE = "newmm"
DEFAULT_SENT_TOKENIZE_ENGINE = "crfcut"
DEFAULT_SUBWORD_TOKENIZE_ENGINE = "tcc"
DEFAULT_SYLLABLE_TOKENIZE_ENGINE = "dict"
DEFAULT_WORD_DICT_TRIE = Trie(thai_words())
DEFAULT_SYLLABLE_DICT_TRIE = Trie(thai_syllables())
DEFAULT_DICT_TRIE = DEFAULT_WORD_DICT_TRIE
from pythainlp.tokenize.tokenize import (
Tokenizer,
sent_tokenize,
subword_tokenize,
syllable_tokenize,
word_tokenize,
)
__all__ = [
"Tokenizer",
"Trie",
"sent_tokenize",
"subword_tokenize",
"syllable_tokenize",
"word_tokenize",
]
|
Make pythainlp.util.Trie still accessibl through pythainlp.tokenize.Trie (will deprecate later)
|
Make pythainlp.util.Trie still accessibl through pythainlp.tokenize.Trie (will deprecate later)
|
Python
|
apache-2.0
|
PyThaiNLP/pythainlp
|
# -*- coding: utf-8 -*-
"""
Thai tokenizers
"""
from pythainlp.corpus import thai_syllables, thai_words
from pythainlp.util.trie import Trie
DEFAULT_WORD_TOKENIZE_ENGINE = "newmm"
DEFAULT_SENT_TOKENIZE_ENGINE = "crfcut"
DEFAULT_SUBWORD_TOKENIZE_ENGINE = "tcc"
DEFAULT_SYLLABLE_TOKENIZE_ENGINE = "dict"
DEFAULT_WORD_DICT_TRIE = Trie(thai_words())
DEFAULT_SYLLABLE_DICT_TRIE = Trie(thai_syllables())
DEFAULT_DICT_TRIE = DEFAULT_WORD_DICT_TRIE
from pythainlp.tokenize.tokenize import (
Tokenizer,
sent_tokenize,
subword_tokenize,
syllable_tokenize,
word_tokenize,
)
__all__ = [
"Tokenizer",
"sent_tokenize",
"subword_tokenize",
"syllable_tokenize",
"word_tokenize",
]
Make pythainlp.util.Trie still accessibl through pythainlp.tokenize.Trie (will deprecate later)
|
# -*- coding: utf-8 -*-
"""
Thai tokenizers
"""
from pythainlp.corpus import thai_syllables, thai_words
from pythainlp.util.trie import Trie
DEFAULT_WORD_TOKENIZE_ENGINE = "newmm"
DEFAULT_SENT_TOKENIZE_ENGINE = "crfcut"
DEFAULT_SUBWORD_TOKENIZE_ENGINE = "tcc"
DEFAULT_SYLLABLE_TOKENIZE_ENGINE = "dict"
DEFAULT_WORD_DICT_TRIE = Trie(thai_words())
DEFAULT_SYLLABLE_DICT_TRIE = Trie(thai_syllables())
DEFAULT_DICT_TRIE = DEFAULT_WORD_DICT_TRIE
from pythainlp.tokenize.tokenize import (
Tokenizer,
sent_tokenize,
subword_tokenize,
syllable_tokenize,
word_tokenize,
)
__all__ = [
"Tokenizer",
"Trie",
"sent_tokenize",
"subword_tokenize",
"syllable_tokenize",
"word_tokenize",
]
|
<commit_before># -*- coding: utf-8 -*-
"""
Thai tokenizers
"""
from pythainlp.corpus import thai_syllables, thai_words
from pythainlp.util.trie import Trie
DEFAULT_WORD_TOKENIZE_ENGINE = "newmm"
DEFAULT_SENT_TOKENIZE_ENGINE = "crfcut"
DEFAULT_SUBWORD_TOKENIZE_ENGINE = "tcc"
DEFAULT_SYLLABLE_TOKENIZE_ENGINE = "dict"
DEFAULT_WORD_DICT_TRIE = Trie(thai_words())
DEFAULT_SYLLABLE_DICT_TRIE = Trie(thai_syllables())
DEFAULT_DICT_TRIE = DEFAULT_WORD_DICT_TRIE
from pythainlp.tokenize.tokenize import (
Tokenizer,
sent_tokenize,
subword_tokenize,
syllable_tokenize,
word_tokenize,
)
__all__ = [
"Tokenizer",
"sent_tokenize",
"subword_tokenize",
"syllable_tokenize",
"word_tokenize",
]
<commit_msg>Make pythainlp.util.Trie still accessibl through pythainlp.tokenize.Trie (will deprecate later)<commit_after>
|
# -*- coding: utf-8 -*-
"""
Thai tokenizers
"""
from pythainlp.corpus import thai_syllables, thai_words
from pythainlp.util.trie import Trie
DEFAULT_WORD_TOKENIZE_ENGINE = "newmm"
DEFAULT_SENT_TOKENIZE_ENGINE = "crfcut"
DEFAULT_SUBWORD_TOKENIZE_ENGINE = "tcc"
DEFAULT_SYLLABLE_TOKENIZE_ENGINE = "dict"
DEFAULT_WORD_DICT_TRIE = Trie(thai_words())
DEFAULT_SYLLABLE_DICT_TRIE = Trie(thai_syllables())
DEFAULT_DICT_TRIE = DEFAULT_WORD_DICT_TRIE
from pythainlp.tokenize.tokenize import (
Tokenizer,
sent_tokenize,
subword_tokenize,
syllable_tokenize,
word_tokenize,
)
__all__ = [
"Tokenizer",
"Trie",
"sent_tokenize",
"subword_tokenize",
"syllable_tokenize",
"word_tokenize",
]
|
# -*- coding: utf-8 -*-
"""
Thai tokenizers
"""
from pythainlp.corpus import thai_syllables, thai_words
from pythainlp.util.trie import Trie
DEFAULT_WORD_TOKENIZE_ENGINE = "newmm"
DEFAULT_SENT_TOKENIZE_ENGINE = "crfcut"
DEFAULT_SUBWORD_TOKENIZE_ENGINE = "tcc"
DEFAULT_SYLLABLE_TOKENIZE_ENGINE = "dict"
DEFAULT_WORD_DICT_TRIE = Trie(thai_words())
DEFAULT_SYLLABLE_DICT_TRIE = Trie(thai_syllables())
DEFAULT_DICT_TRIE = DEFAULT_WORD_DICT_TRIE
from pythainlp.tokenize.tokenize import (
Tokenizer,
sent_tokenize,
subword_tokenize,
syllable_tokenize,
word_tokenize,
)
__all__ = [
"Tokenizer",
"sent_tokenize",
"subword_tokenize",
"syllable_tokenize",
"word_tokenize",
]
Make pythainlp.util.Trie still accessibl through pythainlp.tokenize.Trie (will deprecate later)# -*- coding: utf-8 -*-
"""
Thai tokenizers
"""
from pythainlp.corpus import thai_syllables, thai_words
from pythainlp.util.trie import Trie
DEFAULT_WORD_TOKENIZE_ENGINE = "newmm"
DEFAULT_SENT_TOKENIZE_ENGINE = "crfcut"
DEFAULT_SUBWORD_TOKENIZE_ENGINE = "tcc"
DEFAULT_SYLLABLE_TOKENIZE_ENGINE = "dict"
DEFAULT_WORD_DICT_TRIE = Trie(thai_words())
DEFAULT_SYLLABLE_DICT_TRIE = Trie(thai_syllables())
DEFAULT_DICT_TRIE = DEFAULT_WORD_DICT_TRIE
from pythainlp.tokenize.tokenize import (
Tokenizer,
sent_tokenize,
subword_tokenize,
syllable_tokenize,
word_tokenize,
)
__all__ = [
"Tokenizer",
"Trie",
"sent_tokenize",
"subword_tokenize",
"syllable_tokenize",
"word_tokenize",
]
|
<commit_before># -*- coding: utf-8 -*-
"""
Thai tokenizers
"""
from pythainlp.corpus import thai_syllables, thai_words
from pythainlp.util.trie import Trie
DEFAULT_WORD_TOKENIZE_ENGINE = "newmm"
DEFAULT_SENT_TOKENIZE_ENGINE = "crfcut"
DEFAULT_SUBWORD_TOKENIZE_ENGINE = "tcc"
DEFAULT_SYLLABLE_TOKENIZE_ENGINE = "dict"
DEFAULT_WORD_DICT_TRIE = Trie(thai_words())
DEFAULT_SYLLABLE_DICT_TRIE = Trie(thai_syllables())
DEFAULT_DICT_TRIE = DEFAULT_WORD_DICT_TRIE
from pythainlp.tokenize.tokenize import (
Tokenizer,
sent_tokenize,
subword_tokenize,
syllable_tokenize,
word_tokenize,
)
__all__ = [
"Tokenizer",
"sent_tokenize",
"subword_tokenize",
"syllable_tokenize",
"word_tokenize",
]
<commit_msg>Make pythainlp.util.Trie still accessibl through pythainlp.tokenize.Trie (will deprecate later)<commit_after># -*- coding: utf-8 -*-
"""
Thai tokenizers
"""
from pythainlp.corpus import thai_syllables, thai_words
from pythainlp.util.trie import Trie
DEFAULT_WORD_TOKENIZE_ENGINE = "newmm"
DEFAULT_SENT_TOKENIZE_ENGINE = "crfcut"
DEFAULT_SUBWORD_TOKENIZE_ENGINE = "tcc"
DEFAULT_SYLLABLE_TOKENIZE_ENGINE = "dict"
DEFAULT_WORD_DICT_TRIE = Trie(thai_words())
DEFAULT_SYLLABLE_DICT_TRIE = Trie(thai_syllables())
DEFAULT_DICT_TRIE = DEFAULT_WORD_DICT_TRIE
from pythainlp.tokenize.tokenize import (
Tokenizer,
sent_tokenize,
subword_tokenize,
syllable_tokenize,
word_tokenize,
)
__all__ = [
"Tokenizer",
"Trie",
"sent_tokenize",
"subword_tokenize",
"syllable_tokenize",
"word_tokenize",
]
|
d72eb62bc0afe1b37c675babed8373bd536de73c
|
python/challenges/plusMinus.py
|
python/challenges/plusMinus.py
|
"""
Problem Statement:
Given an array of integers, calculate which fraction of the elements are positive, negative, and zeroes, respectively. Print the decimal value of each fraction.
Input Format:
The first line, N, is the size of the array.
The second line contains N space-separated integers describing the array of numbers (A1,A2,A3,⋯,AN).
Output Format:
Print each value on its own line with the fraction of positive numbers first, negative numbers second, and zeroes third.
"""
|
import unittest
"""
Problem Statement:
Given an array of integers, calculate which fraction of the elements are positive, negative, and zeroes, respectively. Print the decimal value of each fraction.
Input Format:
The first line, N, is the size of the array.
The second line contains N space-separated integers describing the array of numbers (A1,A2,A3,⋯,AN).
Output Format:
Print each value on its own line with the fraction of positive numbers first, negative numbers second, and zeroes third.
There are 3 positive numbers, 2 negative numbers, and 1 zero in the array.
The fraction of the positive numbers, negative numbers and zeroes are 36=0.500000, 26=0.333333 and 16=0.166667, respectively.
"""
def plusMinus(arr):
def roundToPrecision(num):
return round(num / n, 6)
n = len(arr)
pos, neg, zer = 0, 0, 0
for item in arr:
if item == 0:
zer += 1
elif item > 0:
pos += 1
elif item < 0:
neg += 1
results = []
for result in [pos, neg, zer]:
results.append(roundToPrecision(result))
return results
class TestPlusMinus(unittest.TestCase):
def test_plus_minus(self):
arr = [-4, 3, -9, 0, 4, 1]
self.assertEqual(plusMinus(arr), [0.500000, 0.333333, 0.166667])
if __name__ == '__main__':
unittest.main()
|
Create way to compute ratios of each number type
|
Create way to compute ratios of each number type
|
Python
|
mit
|
markthethomas/algorithms,markthethomas/algorithms,markthethomas/algorithms,markthethomas/algorithms
|
"""
Problem Statement:
Given an array of integers, calculate which fraction of the elements are positive, negative, and zeroes, respectively. Print the decimal value of each fraction.
Input Format:
The first line, N, is the size of the array.
The second line contains N space-separated integers describing the array of numbers (A1,A2,A3,⋯,AN).
Output Format:
Print each value on its own line with the fraction of positive numbers first, negative numbers second, and zeroes third.
"""Create way to compute ratios of each number type
|
import unittest
"""
Problem Statement:
Given an array of integers, calculate which fraction of the elements are positive, negative, and zeroes, respectively. Print the decimal value of each fraction.
Input Format:
The first line, N, is the size of the array.
The second line contains N space-separated integers describing the array of numbers (A1,A2,A3,⋯,AN).
Output Format:
Print each value on its own line with the fraction of positive numbers first, negative numbers second, and zeroes third.
There are 3 positive numbers, 2 negative numbers, and 1 zero in the array.
The fraction of the positive numbers, negative numbers and zeroes are 36=0.500000, 26=0.333333 and 16=0.166667, respectively.
"""
def plusMinus(arr):
def roundToPrecision(num):
return round(num / n, 6)
n = len(arr)
pos, neg, zer = 0, 0, 0
for item in arr:
if item == 0:
zer += 1
elif item > 0:
pos += 1
elif item < 0:
neg += 1
results = []
for result in [pos, neg, zer]:
results.append(roundToPrecision(result))
return results
class TestPlusMinus(unittest.TestCase):
def test_plus_minus(self):
arr = [-4, 3, -9, 0, 4, 1]
self.assertEqual(plusMinus(arr), [0.500000, 0.333333, 0.166667])
if __name__ == '__main__':
unittest.main()
|
<commit_before>"""
Problem Statement:
Given an array of integers, calculate which fraction of the elements are positive, negative, and zeroes, respectively. Print the decimal value of each fraction.
Input Format:
The first line, N, is the size of the array.
The second line contains N space-separated integers describing the array of numbers (A1,A2,A3,⋯,AN).
Output Format:
Print each value on its own line with the fraction of positive numbers first, negative numbers second, and zeroes third.
"""<commit_msg>Create way to compute ratios of each number type<commit_after>
|
import unittest
"""
Problem Statement:
Given an array of integers, calculate which fraction of the elements are positive, negative, and zeroes, respectively. Print the decimal value of each fraction.
Input Format:
The first line, N, is the size of the array.
The second line contains N space-separated integers describing the array of numbers (A1,A2,A3,⋯,AN).
Output Format:
Print each value on its own line with the fraction of positive numbers first, negative numbers second, and zeroes third.
There are 3 positive numbers, 2 negative numbers, and 1 zero in the array.
The fraction of the positive numbers, negative numbers and zeroes are 36=0.500000, 26=0.333333 and 16=0.166667, respectively.
"""
def plusMinus(arr):
def roundToPrecision(num):
return round(num / n, 6)
n = len(arr)
pos, neg, zer = 0, 0, 0
for item in arr:
if item == 0:
zer += 1
elif item > 0:
pos += 1
elif item < 0:
neg += 1
results = []
for result in [pos, neg, zer]:
results.append(roundToPrecision(result))
return results
class TestPlusMinus(unittest.TestCase):
def test_plus_minus(self):
arr = [-4, 3, -9, 0, 4, 1]
self.assertEqual(plusMinus(arr), [0.500000, 0.333333, 0.166667])
if __name__ == '__main__':
unittest.main()
|
"""
Problem Statement:
Given an array of integers, calculate which fraction of the elements are positive, negative, and zeroes, respectively. Print the decimal value of each fraction.
Input Format:
The first line, N, is the size of the array.
The second line contains N space-separated integers describing the array of numbers (A1,A2,A3,⋯,AN).
Output Format:
Print each value on its own line with the fraction of positive numbers first, negative numbers second, and zeroes third.
"""Create way to compute ratios of each number typeimport unittest
"""
Problem Statement:
Given an array of integers, calculate which fraction of the elements are positive, negative, and zeroes, respectively. Print the decimal value of each fraction.
Input Format:
The first line, N, is the size of the array.
The second line contains N space-separated integers describing the array of numbers (A1,A2,A3,⋯,AN).
Output Format:
Print each value on its own line with the fraction of positive numbers first, negative numbers second, and zeroes third.
There are 3 positive numbers, 2 negative numbers, and 1 zero in the array.
The fraction of the positive numbers, negative numbers and zeroes are 36=0.500000, 26=0.333333 and 16=0.166667, respectively.
"""
def plusMinus(arr):
def roundToPrecision(num):
return round(num / n, 6)
n = len(arr)
pos, neg, zer = 0, 0, 0
for item in arr:
if item == 0:
zer += 1
elif item > 0:
pos += 1
elif item < 0:
neg += 1
results = []
for result in [pos, neg, zer]:
results.append(roundToPrecision(result))
return results
class TestPlusMinus(unittest.TestCase):
def test_plus_minus(self):
arr = [-4, 3, -9, 0, 4, 1]
self.assertEqual(plusMinus(arr), [0.500000, 0.333333, 0.166667])
if __name__ == '__main__':
unittest.main()
|
<commit_before>"""
Problem Statement:
Given an array of integers, calculate which fraction of the elements are positive, negative, and zeroes, respectively. Print the decimal value of each fraction.
Input Format:
The first line, N, is the size of the array.
The second line contains N space-separated integers describing the array of numbers (A1,A2,A3,⋯,AN).
Output Format:
Print each value on its own line with the fraction of positive numbers first, negative numbers second, and zeroes third.
"""<commit_msg>Create way to compute ratios of each number type<commit_after>import unittest
"""
Problem Statement:
Given an array of integers, calculate which fraction of the elements are positive, negative, and zeroes, respectively. Print the decimal value of each fraction.
Input Format:
The first line, N, is the size of the array.
The second line contains N space-separated integers describing the array of numbers (A1,A2,A3,⋯,AN).
Output Format:
Print each value on its own line with the fraction of positive numbers first, negative numbers second, and zeroes third.
There are 3 positive numbers, 2 negative numbers, and 1 zero in the array.
The fraction of the positive numbers, negative numbers and zeroes are 36=0.500000, 26=0.333333 and 16=0.166667, respectively.
"""
def plusMinus(arr):
def roundToPrecision(num):
return round(num / n, 6)
n = len(arr)
pos, neg, zer = 0, 0, 0
for item in arr:
if item == 0:
zer += 1
elif item > 0:
pos += 1
elif item < 0:
neg += 1
results = []
for result in [pos, neg, zer]:
results.append(roundToPrecision(result))
return results
class TestPlusMinus(unittest.TestCase):
def test_plus_minus(self):
arr = [-4, 3, -9, 0, 4, 1]
self.assertEqual(plusMinus(arr), [0.500000, 0.333333, 0.166667])
if __name__ == '__main__':
unittest.main()
|
31632158c7882e20122a91643aebfbba7ae602e7
|
tests/test_modules/git_test.py
|
tests/test_modules/git_test.py
|
from subprocess import call, check_output
class TestGit:
""" Test that the user has a modern version of git installed """
|
from subprocess import call, check_output
class TestGit:
""" Test that the user has a modern version of git installed """
def git_version(self):
""" returns a tuple with execution of git --version """
exit_code = call(["git", "--version"])
output = check_output(["git", "--version"]).decode("utf-8").lstrip().rstrip()
return (output, exit_code)
def test_git_version(self):
""" tests the output from git_version() """
assert self.git_version()[1] == 0
assert self.git_version()[0].index('git version') >= 0
|
Add tests for git installation
|
Add tests for git installation
|
Python
|
bsd-3-clause
|
DevBlend/zenias,DevBlend/zenias,DevBlend/DevBlend,DevBlend/zenias,DevBlend/DevBlend,DevBlend/DevBlend,DevBlend/DevBlend,byteknacker/fcc-python-vagrant,DevBlend/zenias,DevBlend/DevBlend,byteknacker/fcc-python-vagrant,DevBlend/zenias,DevBlend/DevBlend,DevBlend/zenias
|
from subprocess import call, check_output
class TestGit:
""" Test that the user has a modern version of git installed """Add tests for git installation
|
from subprocess import call, check_output
class TestGit:
""" Test that the user has a modern version of git installed """
def git_version(self):
""" returns a tuple with execution of git --version """
exit_code = call(["git", "--version"])
output = check_output(["git", "--version"]).decode("utf-8").lstrip().rstrip()
return (output, exit_code)
def test_git_version(self):
""" tests the output from git_version() """
assert self.git_version()[1] == 0
assert self.git_version()[0].index('git version') >= 0
|
<commit_before>from subprocess import call, check_output
class TestGit:
""" Test that the user has a modern version of git installed """<commit_msg>Add tests for git installation<commit_after>
|
from subprocess import call, check_output
class TestGit:
""" Test that the user has a modern version of git installed """
def git_version(self):
""" returns a tuple with execution of git --version """
exit_code = call(["git", "--version"])
output = check_output(["git", "--version"]).decode("utf-8").lstrip().rstrip()
return (output, exit_code)
def test_git_version(self):
""" tests the output from git_version() """
assert self.git_version()[1] == 0
assert self.git_version()[0].index('git version') >= 0
|
from subprocess import call, check_output
class TestGit:
""" Test that the user has a modern version of git installed """Add tests for git installationfrom subprocess import call, check_output
class TestGit:
""" Test that the user has a modern version of git installed """
def git_version(self):
""" returns a tuple with execution of git --version """
exit_code = call(["git", "--version"])
output = check_output(["git", "--version"]).decode("utf-8").lstrip().rstrip()
return (output, exit_code)
def test_git_version(self):
""" tests the output from git_version() """
assert self.git_version()[1] == 0
assert self.git_version()[0].index('git version') >= 0
|
<commit_before>from subprocess import call, check_output
class TestGit:
""" Test that the user has a modern version of git installed """<commit_msg>Add tests for git installation<commit_after>from subprocess import call, check_output
class TestGit:
""" Test that the user has a modern version of git installed """
def git_version(self):
""" returns a tuple with execution of git --version """
exit_code = call(["git", "--version"])
output = check_output(["git", "--version"]).decode("utf-8").lstrip().rstrip()
return (output, exit_code)
def test_git_version(self):
""" tests the output from git_version() """
assert self.git_version()[1] == 0
assert self.git_version()[0].index('git version') >= 0
|
75b7160fa4d110257ac4d785ffbe7416d1e22000
|
della/user_manager/email_service.py
|
della/user_manager/email_service.py
|
from django.conf import settings
from django.core.mail import send_mail
def send_email(subject, message, recipient_list=()):
send_mail(
subject=subject,
message=message,
html_message=message.replace('\n', '<br />'),
from_email=settings.SENDER_EMAIL,
recipient_list=recipient_list,
)
|
from django.conf import settings
from django.core.mail import send_mail
def send_email(subject, message, recipient_list):
send_mail(
subject=subject,
message=message,
html_message=message.replace('\n', '<br />'),
from_email=settings.SENDER_EMAIL,
recipient_list=recipient_list,
)
|
Make `recipient_list` required parameter in `send_email`
|
Make `recipient_list` required parameter in `send_email`
|
Python
|
mit
|
avinassh/della,avinassh/della,avinassh/della
|
from django.conf import settings
from django.core.mail import send_mail
def send_email(subject, message, recipient_list=()):
send_mail(
subject=subject,
message=message,
html_message=message.replace('\n', '<br />'),
from_email=settings.SENDER_EMAIL,
recipient_list=recipient_list,
)
Make `recipient_list` required parameter in `send_email`
|
from django.conf import settings
from django.core.mail import send_mail
def send_email(subject, message, recipient_list):
send_mail(
subject=subject,
message=message,
html_message=message.replace('\n', '<br />'),
from_email=settings.SENDER_EMAIL,
recipient_list=recipient_list,
)
|
<commit_before>from django.conf import settings
from django.core.mail import send_mail
def send_email(subject, message, recipient_list=()):
send_mail(
subject=subject,
message=message,
html_message=message.replace('\n', '<br />'),
from_email=settings.SENDER_EMAIL,
recipient_list=recipient_list,
)
<commit_msg>Make `recipient_list` required parameter in `send_email`<commit_after>
|
from django.conf import settings
from django.core.mail import send_mail
def send_email(subject, message, recipient_list):
send_mail(
subject=subject,
message=message,
html_message=message.replace('\n', '<br />'),
from_email=settings.SENDER_EMAIL,
recipient_list=recipient_list,
)
|
from django.conf import settings
from django.core.mail import send_mail
def send_email(subject, message, recipient_list=()):
send_mail(
subject=subject,
message=message,
html_message=message.replace('\n', '<br />'),
from_email=settings.SENDER_EMAIL,
recipient_list=recipient_list,
)
Make `recipient_list` required parameter in `send_email`from django.conf import settings
from django.core.mail import send_mail
def send_email(subject, message, recipient_list):
send_mail(
subject=subject,
message=message,
html_message=message.replace('\n', '<br />'),
from_email=settings.SENDER_EMAIL,
recipient_list=recipient_list,
)
|
<commit_before>from django.conf import settings
from django.core.mail import send_mail
def send_email(subject, message, recipient_list=()):
send_mail(
subject=subject,
message=message,
html_message=message.replace('\n', '<br />'),
from_email=settings.SENDER_EMAIL,
recipient_list=recipient_list,
)
<commit_msg>Make `recipient_list` required parameter in `send_email`<commit_after>from django.conf import settings
from django.core.mail import send_mail
def send_email(subject, message, recipient_list):
send_mail(
subject=subject,
message=message,
html_message=message.replace('\n', '<br />'),
from_email=settings.SENDER_EMAIL,
recipient_list=recipient_list,
)
|
25b35032828593af3220b4310e6a5bd65f90d197
|
db/editdbfile.py
|
db/editdbfile.py
|
#!/usr/bin/python
# Edit an AES encrypted json/pickle file.
import os
import sys
import json
import getpass
import tempfile
import subprocess
import aespckfile
import aesjsonfile
def editfile(fn, password):
filetype = aespckfile
if ".json" in fn:
filetype = aesjsonfile
db = filetype.load(fn, password)
f = tempfile.NamedTemporaryFile()
json.dump(db, f, indent=2)
f.flush()
while True:
subprocess.call([os.getenv("EDITOR") or "editor", f.name])
try:
f.seek(0)
db = json.load(f)
filetype.dump(fn, db, password)
break
except Exception, e:
print "Error in json"
print e
print "Try again (y/n)? ",
input = raw_input()
if not input.lower().startswith("y"):
break
f.seek(0,2)
len = f.tell()
f.seek(0)
f.write(" " * len)
f.flush()
f.close()
if __name__ == "__main__":
if len(sys.argv) < 2:
sys.exit(1)
fn = sys.argv[1]
password = getpass.getpass()
editfile(fn, password)
|
#!/usr/bin/python
# Edit an AES encrypted json/pickle file.
import os
import sys
import json
import getpass
import tempfile
import subprocess
import aespckfile
import aesjsonfile
def editfile(fn, password):
filetype = aespckfile
if ".json" in fn:
filetype = aesjsonfile
db = filetype.load(fn, password)
f = tempfile.NamedTemporaryFile()
json.dump(db, f, indent=2)
f.flush()
mtime = os.path.getmtime(f.name)
while True:
subprocess.call([os.getenv("EDITOR") or "editor", f.name])
if os.path.getmtime(f.name) == mtime:
print "Not updated"
break
try:
f.seek(0)
db = json.load(f)
filetype.dump(fn, db, password)
break
except Exception, e:
print "Error in json"
print e
print "Try again (y/n)? ",
input = raw_input()
if not input.lower().startswith("y"):
break
# Over-write our temp file
f.seek(0,2)
len = f.tell()
f.seek(0)
f.write(" " * len)
f.flush()
f.close()
if __name__ == "__main__":
if len(sys.argv) < 2:
sys.exit(1)
fn = sys.argv[1]
password = getpass.getpass()
editfile(fn, password)
|
Check mtime to see if we need to write out new db file.
|
Check mtime to see if we need to write out new db file.
|
Python
|
agpl-3.0
|
vincebusam/pyWebCash,vincebusam/pyWebCash,vincebusam/pyWebCash
|
#!/usr/bin/python
# Edit an AES encrypted json/pickle file.
import os
import sys
import json
import getpass
import tempfile
import subprocess
import aespckfile
import aesjsonfile
def editfile(fn, password):
filetype = aespckfile
if ".json" in fn:
filetype = aesjsonfile
db = filetype.load(fn, password)
f = tempfile.NamedTemporaryFile()
json.dump(db, f, indent=2)
f.flush()
while True:
subprocess.call([os.getenv("EDITOR") or "editor", f.name])
try:
f.seek(0)
db = json.load(f)
filetype.dump(fn, db, password)
break
except Exception, e:
print "Error in json"
print e
print "Try again (y/n)? ",
input = raw_input()
if not input.lower().startswith("y"):
break
f.seek(0,2)
len = f.tell()
f.seek(0)
f.write(" " * len)
f.flush()
f.close()
if __name__ == "__main__":
if len(sys.argv) < 2:
sys.exit(1)
fn = sys.argv[1]
password = getpass.getpass()
editfile(fn, password)
Check mtime to see if we need to write out new db file.
|
#!/usr/bin/python
# Edit an AES encrypted json/pickle file.
import os
import sys
import json
import getpass
import tempfile
import subprocess
import aespckfile
import aesjsonfile
def editfile(fn, password):
filetype = aespckfile
if ".json" in fn:
filetype = aesjsonfile
db = filetype.load(fn, password)
f = tempfile.NamedTemporaryFile()
json.dump(db, f, indent=2)
f.flush()
mtime = os.path.getmtime(f.name)
while True:
subprocess.call([os.getenv("EDITOR") or "editor", f.name])
if os.path.getmtime(f.name) == mtime:
print "Not updated"
break
try:
f.seek(0)
db = json.load(f)
filetype.dump(fn, db, password)
break
except Exception, e:
print "Error in json"
print e
print "Try again (y/n)? ",
input = raw_input()
if not input.lower().startswith("y"):
break
# Over-write our temp file
f.seek(0,2)
len = f.tell()
f.seek(0)
f.write(" " * len)
f.flush()
f.close()
if __name__ == "__main__":
if len(sys.argv) < 2:
sys.exit(1)
fn = sys.argv[1]
password = getpass.getpass()
editfile(fn, password)
|
<commit_before>#!/usr/bin/python
# Edit an AES encrypted json/pickle file.
import os
import sys
import json
import getpass
import tempfile
import subprocess
import aespckfile
import aesjsonfile
def editfile(fn, password):
filetype = aespckfile
if ".json" in fn:
filetype = aesjsonfile
db = filetype.load(fn, password)
f = tempfile.NamedTemporaryFile()
json.dump(db, f, indent=2)
f.flush()
while True:
subprocess.call([os.getenv("EDITOR") or "editor", f.name])
try:
f.seek(0)
db = json.load(f)
filetype.dump(fn, db, password)
break
except Exception, e:
print "Error in json"
print e
print "Try again (y/n)? ",
input = raw_input()
if not input.lower().startswith("y"):
break
f.seek(0,2)
len = f.tell()
f.seek(0)
f.write(" " * len)
f.flush()
f.close()
if __name__ == "__main__":
if len(sys.argv) < 2:
sys.exit(1)
fn = sys.argv[1]
password = getpass.getpass()
editfile(fn, password)
<commit_msg>Check mtime to see if we need to write out new db file.<commit_after>
|
#!/usr/bin/python
# Edit an AES encrypted json/pickle file.
import os
import sys
import json
import getpass
import tempfile
import subprocess
import aespckfile
import aesjsonfile
def editfile(fn, password):
filetype = aespckfile
if ".json" in fn:
filetype = aesjsonfile
db = filetype.load(fn, password)
f = tempfile.NamedTemporaryFile()
json.dump(db, f, indent=2)
f.flush()
mtime = os.path.getmtime(f.name)
while True:
subprocess.call([os.getenv("EDITOR") or "editor", f.name])
if os.path.getmtime(f.name) == mtime:
print "Not updated"
break
try:
f.seek(0)
db = json.load(f)
filetype.dump(fn, db, password)
break
except Exception, e:
print "Error in json"
print e
print "Try again (y/n)? ",
input = raw_input()
if not input.lower().startswith("y"):
break
# Over-write our temp file
f.seek(0,2)
len = f.tell()
f.seek(0)
f.write(" " * len)
f.flush()
f.close()
if __name__ == "__main__":
if len(sys.argv) < 2:
sys.exit(1)
fn = sys.argv[1]
password = getpass.getpass()
editfile(fn, password)
|
#!/usr/bin/python
# Edit an AES encrypted json/pickle file.
import os
import sys
import json
import getpass
import tempfile
import subprocess
import aespckfile
import aesjsonfile
def editfile(fn, password):
filetype = aespckfile
if ".json" in fn:
filetype = aesjsonfile
db = filetype.load(fn, password)
f = tempfile.NamedTemporaryFile()
json.dump(db, f, indent=2)
f.flush()
while True:
subprocess.call([os.getenv("EDITOR") or "editor", f.name])
try:
f.seek(0)
db = json.load(f)
filetype.dump(fn, db, password)
break
except Exception, e:
print "Error in json"
print e
print "Try again (y/n)? ",
input = raw_input()
if not input.lower().startswith("y"):
break
f.seek(0,2)
len = f.tell()
f.seek(0)
f.write(" " * len)
f.flush()
f.close()
if __name__ == "__main__":
if len(sys.argv) < 2:
sys.exit(1)
fn = sys.argv[1]
password = getpass.getpass()
editfile(fn, password)
Check mtime to see if we need to write out new db file.#!/usr/bin/python
# Edit an AES encrypted json/pickle file.
import os
import sys
import json
import getpass
import tempfile
import subprocess
import aespckfile
import aesjsonfile
def editfile(fn, password):
filetype = aespckfile
if ".json" in fn:
filetype = aesjsonfile
db = filetype.load(fn, password)
f = tempfile.NamedTemporaryFile()
json.dump(db, f, indent=2)
f.flush()
mtime = os.path.getmtime(f.name)
while True:
subprocess.call([os.getenv("EDITOR") or "editor", f.name])
if os.path.getmtime(f.name) == mtime:
print "Not updated"
break
try:
f.seek(0)
db = json.load(f)
filetype.dump(fn, db, password)
break
except Exception, e:
print "Error in json"
print e
print "Try again (y/n)? ",
input = raw_input()
if not input.lower().startswith("y"):
break
# Over-write our temp file
f.seek(0,2)
len = f.tell()
f.seek(0)
f.write(" " * len)
f.flush()
f.close()
if __name__ == "__main__":
if len(sys.argv) < 2:
sys.exit(1)
fn = sys.argv[1]
password = getpass.getpass()
editfile(fn, password)
|
<commit_before>#!/usr/bin/python
# Edit an AES encrypted json/pickle file.
import os
import sys
import json
import getpass
import tempfile
import subprocess
import aespckfile
import aesjsonfile
def editfile(fn, password):
filetype = aespckfile
if ".json" in fn:
filetype = aesjsonfile
db = filetype.load(fn, password)
f = tempfile.NamedTemporaryFile()
json.dump(db, f, indent=2)
f.flush()
while True:
subprocess.call([os.getenv("EDITOR") or "editor", f.name])
try:
f.seek(0)
db = json.load(f)
filetype.dump(fn, db, password)
break
except Exception, e:
print "Error in json"
print e
print "Try again (y/n)? ",
input = raw_input()
if not input.lower().startswith("y"):
break
f.seek(0,2)
len = f.tell()
f.seek(0)
f.write(" " * len)
f.flush()
f.close()
if __name__ == "__main__":
if len(sys.argv) < 2:
sys.exit(1)
fn = sys.argv[1]
password = getpass.getpass()
editfile(fn, password)
<commit_msg>Check mtime to see if we need to write out new db file.<commit_after>#!/usr/bin/python
# Edit an AES encrypted json/pickle file.
import os
import sys
import json
import getpass
import tempfile
import subprocess
import aespckfile
import aesjsonfile
def editfile(fn, password):
filetype = aespckfile
if ".json" in fn:
filetype = aesjsonfile
db = filetype.load(fn, password)
f = tempfile.NamedTemporaryFile()
json.dump(db, f, indent=2)
f.flush()
mtime = os.path.getmtime(f.name)
while True:
subprocess.call([os.getenv("EDITOR") or "editor", f.name])
if os.path.getmtime(f.name) == mtime:
print "Not updated"
break
try:
f.seek(0)
db = json.load(f)
filetype.dump(fn, db, password)
break
except Exception, e:
print "Error in json"
print e
print "Try again (y/n)? ",
input = raw_input()
if not input.lower().startswith("y"):
break
# Over-write our temp file
f.seek(0,2)
len = f.tell()
f.seek(0)
f.write(" " * len)
f.flush()
f.close()
if __name__ == "__main__":
if len(sys.argv) < 2:
sys.exit(1)
fn = sys.argv[1]
password = getpass.getpass()
editfile(fn, password)
|
3dde4b27453456afe8197d9fa925d1743c5dafd0
|
setup.py
|
setup.py
|
#!/usr/bin/env python
import os.path
from setuptools import setup, find_packages
import paypal
def read(*rnames):
return open(os.path.join(os.path.dirname(__file__), *rnames)).read()
DESCRIPTION = 'A pluggable Django application for integrating PayPal Payments Standard or Payments Pro'
URL = 'https://github.com/spookylukey/django-paypal'
DOCS_URL = 'https://django-paypal.readthedocs.org'
setup(
name='django-paypal',
version="0.2.5",
author='John Boxall',
author_email='john@handimobility.ca',
maintainer="Luke Plant",
maintainer_email="L.Plant.98@cantab.net",
url=URL,
install_requires=[
'Django>=1.4',
'six>=1.4.1',
'South>=1.0',
'requests>=2.5.3',
],
description=DESCRIPTION,
long_description="%s\n\nHome page: %s\n\nDocs: %s\n\n%s" % (DESCRIPTION, URL, DOCS_URL, read("CHANGES.rst")),
packages=find_packages(),
include_package_data=True,
classifiers=[
"Framework :: Django",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"Operating System :: OS Independent",
"Topic :: Software Development",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
],
)
|
#!/usr/bin/env python
import os.path
from setuptools import setup, find_packages
import paypal
def read(*rnames):
return open(os.path.join(os.path.dirname(__file__), *rnames)).read()
DESCRIPTION = 'A pluggable Django application for integrating PayPal Payments Standard or Payments Pro'
URL = 'https://github.com/spookylukey/django-paypal'
DOCS_URL = 'https://django-paypal.readthedocs.org'
setup(
name='django-paypal',
version="0.2.5",
author='John Boxall',
author_email='john@handimobility.ca',
maintainer="Luke Plant",
maintainer_email="L.Plant.98@cantab.net",
url=URL,
install_requires=[
'Django>=1.4',
'six>=1.4.1',
'requests>=2.5.3',
],
description=DESCRIPTION,
long_description="%s\n\nHome page: %s\n\nDocs: %s\n\n%s" % (DESCRIPTION, URL, DOCS_URL, read("CHANGES.rst")),
packages=find_packages(),
include_package_data=True,
classifiers=[
"Framework :: Django",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"Operating System :: OS Independent",
"Topic :: Software Development",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
],
)
|
Remove stupid South thing that is messing up Heroku
|
Remove stupid South thing that is messing up Heroku
|
Python
|
mit
|
millanp/django-paypal,millanp/django-paypal
|
#!/usr/bin/env python
import os.path
from setuptools import setup, find_packages
import paypal
def read(*rnames):
return open(os.path.join(os.path.dirname(__file__), *rnames)).read()
DESCRIPTION = 'A pluggable Django application for integrating PayPal Payments Standard or Payments Pro'
URL = 'https://github.com/spookylukey/django-paypal'
DOCS_URL = 'https://django-paypal.readthedocs.org'
setup(
name='django-paypal',
version="0.2.5",
author='John Boxall',
author_email='john@handimobility.ca',
maintainer="Luke Plant",
maintainer_email="L.Plant.98@cantab.net",
url=URL,
install_requires=[
'Django>=1.4',
'six>=1.4.1',
'South>=1.0',
'requests>=2.5.3',
],
description=DESCRIPTION,
long_description="%s\n\nHome page: %s\n\nDocs: %s\n\n%s" % (DESCRIPTION, URL, DOCS_URL, read("CHANGES.rst")),
packages=find_packages(),
include_package_data=True,
classifiers=[
"Framework :: Django",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"Operating System :: OS Independent",
"Topic :: Software Development",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
],
)
Remove stupid South thing that is messing up Heroku
|
#!/usr/bin/env python
import os.path
from setuptools import setup, find_packages
import paypal
def read(*rnames):
return open(os.path.join(os.path.dirname(__file__), *rnames)).read()
DESCRIPTION = 'A pluggable Django application for integrating PayPal Payments Standard or Payments Pro'
URL = 'https://github.com/spookylukey/django-paypal'
DOCS_URL = 'https://django-paypal.readthedocs.org'
setup(
name='django-paypal',
version="0.2.5",
author='John Boxall',
author_email='john@handimobility.ca',
maintainer="Luke Plant",
maintainer_email="L.Plant.98@cantab.net",
url=URL,
install_requires=[
'Django>=1.4',
'six>=1.4.1',
'requests>=2.5.3',
],
description=DESCRIPTION,
long_description="%s\n\nHome page: %s\n\nDocs: %s\n\n%s" % (DESCRIPTION, URL, DOCS_URL, read("CHANGES.rst")),
packages=find_packages(),
include_package_data=True,
classifiers=[
"Framework :: Django",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"Operating System :: OS Independent",
"Topic :: Software Development",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
],
)
|
<commit_before>#!/usr/bin/env python
import os.path
from setuptools import setup, find_packages
import paypal
def read(*rnames):
return open(os.path.join(os.path.dirname(__file__), *rnames)).read()
DESCRIPTION = 'A pluggable Django application for integrating PayPal Payments Standard or Payments Pro'
URL = 'https://github.com/spookylukey/django-paypal'
DOCS_URL = 'https://django-paypal.readthedocs.org'
setup(
name='django-paypal',
version="0.2.5",
author='John Boxall',
author_email='john@handimobility.ca',
maintainer="Luke Plant",
maintainer_email="L.Plant.98@cantab.net",
url=URL,
install_requires=[
'Django>=1.4',
'six>=1.4.1',
'South>=1.0',
'requests>=2.5.3',
],
description=DESCRIPTION,
long_description="%s\n\nHome page: %s\n\nDocs: %s\n\n%s" % (DESCRIPTION, URL, DOCS_URL, read("CHANGES.rst")),
packages=find_packages(),
include_package_data=True,
classifiers=[
"Framework :: Django",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"Operating System :: OS Independent",
"Topic :: Software Development",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
],
)
<commit_msg>Remove stupid South thing that is messing up Heroku<commit_after>
|
#!/usr/bin/env python
import os.path
from setuptools import setup, find_packages
import paypal
def read(*rnames):
return open(os.path.join(os.path.dirname(__file__), *rnames)).read()
DESCRIPTION = 'A pluggable Django application for integrating PayPal Payments Standard or Payments Pro'
URL = 'https://github.com/spookylukey/django-paypal'
DOCS_URL = 'https://django-paypal.readthedocs.org'
setup(
name='django-paypal',
version="0.2.5",
author='John Boxall',
author_email='john@handimobility.ca',
maintainer="Luke Plant",
maintainer_email="L.Plant.98@cantab.net",
url=URL,
install_requires=[
'Django>=1.4',
'six>=1.4.1',
'requests>=2.5.3',
],
description=DESCRIPTION,
long_description="%s\n\nHome page: %s\n\nDocs: %s\n\n%s" % (DESCRIPTION, URL, DOCS_URL, read("CHANGES.rst")),
packages=find_packages(),
include_package_data=True,
classifiers=[
"Framework :: Django",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"Operating System :: OS Independent",
"Topic :: Software Development",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
],
)
|
#!/usr/bin/env python
import os.path
from setuptools import setup, find_packages
import paypal
def read(*rnames):
return open(os.path.join(os.path.dirname(__file__), *rnames)).read()
DESCRIPTION = 'A pluggable Django application for integrating PayPal Payments Standard or Payments Pro'
URL = 'https://github.com/spookylukey/django-paypal'
DOCS_URL = 'https://django-paypal.readthedocs.org'
setup(
name='django-paypal',
version="0.2.5",
author='John Boxall',
author_email='john@handimobility.ca',
maintainer="Luke Plant",
maintainer_email="L.Plant.98@cantab.net",
url=URL,
install_requires=[
'Django>=1.4',
'six>=1.4.1',
'South>=1.0',
'requests>=2.5.3',
],
description=DESCRIPTION,
long_description="%s\n\nHome page: %s\n\nDocs: %s\n\n%s" % (DESCRIPTION, URL, DOCS_URL, read("CHANGES.rst")),
packages=find_packages(),
include_package_data=True,
classifiers=[
"Framework :: Django",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"Operating System :: OS Independent",
"Topic :: Software Development",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
],
)
Remove stupid South thing that is messing up Heroku#!/usr/bin/env python
import os.path
from setuptools import setup, find_packages
import paypal
def read(*rnames):
return open(os.path.join(os.path.dirname(__file__), *rnames)).read()
DESCRIPTION = 'A pluggable Django application for integrating PayPal Payments Standard or Payments Pro'
URL = 'https://github.com/spookylukey/django-paypal'
DOCS_URL = 'https://django-paypal.readthedocs.org'
setup(
name='django-paypal',
version="0.2.5",
author='John Boxall',
author_email='john@handimobility.ca',
maintainer="Luke Plant",
maintainer_email="L.Plant.98@cantab.net",
url=URL,
install_requires=[
'Django>=1.4',
'six>=1.4.1',
'requests>=2.5.3',
],
description=DESCRIPTION,
long_description="%s\n\nHome page: %s\n\nDocs: %s\n\n%s" % (DESCRIPTION, URL, DOCS_URL, read("CHANGES.rst")),
packages=find_packages(),
include_package_data=True,
classifiers=[
"Framework :: Django",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"Operating System :: OS Independent",
"Topic :: Software Development",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
],
)
|
<commit_before>#!/usr/bin/env python
import os.path
from setuptools import setup, find_packages
import paypal
def read(*rnames):
return open(os.path.join(os.path.dirname(__file__), *rnames)).read()
DESCRIPTION = 'A pluggable Django application for integrating PayPal Payments Standard or Payments Pro'
URL = 'https://github.com/spookylukey/django-paypal'
DOCS_URL = 'https://django-paypal.readthedocs.org'
setup(
name='django-paypal',
version="0.2.5",
author='John Boxall',
author_email='john@handimobility.ca',
maintainer="Luke Plant",
maintainer_email="L.Plant.98@cantab.net",
url=URL,
install_requires=[
'Django>=1.4',
'six>=1.4.1',
'South>=1.0',
'requests>=2.5.3',
],
description=DESCRIPTION,
long_description="%s\n\nHome page: %s\n\nDocs: %s\n\n%s" % (DESCRIPTION, URL, DOCS_URL, read("CHANGES.rst")),
packages=find_packages(),
include_package_data=True,
classifiers=[
"Framework :: Django",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"Operating System :: OS Independent",
"Topic :: Software Development",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
],
)
<commit_msg>Remove stupid South thing that is messing up Heroku<commit_after>#!/usr/bin/env python
import os.path
from setuptools import setup, find_packages
import paypal
def read(*rnames):
return open(os.path.join(os.path.dirname(__file__), *rnames)).read()
DESCRIPTION = 'A pluggable Django application for integrating PayPal Payments Standard or Payments Pro'
URL = 'https://github.com/spookylukey/django-paypal'
DOCS_URL = 'https://django-paypal.readthedocs.org'
setup(
name='django-paypal',
version="0.2.5",
author='John Boxall',
author_email='john@handimobility.ca',
maintainer="Luke Plant",
maintainer_email="L.Plant.98@cantab.net",
url=URL,
install_requires=[
'Django>=1.4',
'six>=1.4.1',
'requests>=2.5.3',
],
description=DESCRIPTION,
long_description="%s\n\nHome page: %s\n\nDocs: %s\n\n%s" % (DESCRIPTION, URL, DOCS_URL, read("CHANGES.rst")),
packages=find_packages(),
include_package_data=True,
classifiers=[
"Framework :: Django",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"Operating System :: OS Independent",
"Topic :: Software Development",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
],
)
|
6150121c20c26dfc1dd7cd2b2bffd2f828ce65d6
|
setup.py
|
setup.py
|
import os
from os.path import relpath, join
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
def find_package_data(data_root, package_root):
files = []
for root, dirnames, filenames in os.walk(data_root):
for fn in filenames:
files.append(relpath(join(root, fn), package_root))
return files
setup(
name = "smarty",
version = "0.1.4",
author = "John Chodera, David Mobley, and others",
author_email = "john.chodera@choderalab.org",
description = ("Automated Bayesian atomtype sampling"),
license = "GNU Lesser General Public License (LGPL), Version 3",
keywords = "Bayesian atomtype sampling forcefield parameterization",
url = "http://github.com/open-forcefield-group/smarty",
packages=['smarty', 'smarty/tests', 'smarty/data'],
long_description=read('README.md'),
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: GNU Lesser General Public License (LGPL), Version 3",
],
entry_points={'console_scripts': ['smarty = smarty.cli_smarty:main', 'smirky = smarty.cli_smirky:main']},
package_data={'smarty': find_package_data('smarty/data', 'smarty')},
)
|
import os
from os.path import relpath, join
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
def find_package_data(data_root, package_root):
files = []
for root, dirnames, filenames in os.walk(data_root):
for fn in filenames:
files.append(relpath(join(root, fn), package_root))
return files
setup(
name = "smarty",
version = "0.1.5",
author = "John Chodera, David Mobley, and others",
author_email = "john.chodera@choderalab.org",
description = ("Automated Bayesian atomtype sampling"),
license = "GNU Lesser General Public License (LGPL), Version 3",
keywords = "Bayesian atomtype sampling forcefield parameterization",
url = "http://github.com/open-forcefield-group/smarty",
packages=['smarty', 'smarty/tests', 'smarty/data'],
long_description=read('README.md'),
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: GNU Lesser General Public License (LGPL), Version 3",
],
entry_points={'console_scripts': ['smarty = smarty.cli_smarty:main', 'smirky = smarty.cli_smirky:main']},
package_data={'smarty': find_package_data('smarty/data', 'smarty')},
)
|
Increment version number for release
|
Increment version number for release
|
Python
|
mit
|
open-forcefield-group/openforcefield,open-forcefield-group/openforcefield,openforcefield/openff-toolkit,open-forcefield-group/openforcefield,openforcefield/openff-toolkit
|
import os
from os.path import relpath, join
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
def find_package_data(data_root, package_root):
files = []
for root, dirnames, filenames in os.walk(data_root):
for fn in filenames:
files.append(relpath(join(root, fn), package_root))
return files
setup(
name = "smarty",
version = "0.1.4",
author = "John Chodera, David Mobley, and others",
author_email = "john.chodera@choderalab.org",
description = ("Automated Bayesian atomtype sampling"),
license = "GNU Lesser General Public License (LGPL), Version 3",
keywords = "Bayesian atomtype sampling forcefield parameterization",
url = "http://github.com/open-forcefield-group/smarty",
packages=['smarty', 'smarty/tests', 'smarty/data'],
long_description=read('README.md'),
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: GNU Lesser General Public License (LGPL), Version 3",
],
entry_points={'console_scripts': ['smarty = smarty.cli_smarty:main', 'smirky = smarty.cli_smirky:main']},
package_data={'smarty': find_package_data('smarty/data', 'smarty')},
)
Increment version number for release
|
import os
from os.path import relpath, join
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
def find_package_data(data_root, package_root):
files = []
for root, dirnames, filenames in os.walk(data_root):
for fn in filenames:
files.append(relpath(join(root, fn), package_root))
return files
setup(
name = "smarty",
version = "0.1.5",
author = "John Chodera, David Mobley, and others",
author_email = "john.chodera@choderalab.org",
description = ("Automated Bayesian atomtype sampling"),
license = "GNU Lesser General Public License (LGPL), Version 3",
keywords = "Bayesian atomtype sampling forcefield parameterization",
url = "http://github.com/open-forcefield-group/smarty",
packages=['smarty', 'smarty/tests', 'smarty/data'],
long_description=read('README.md'),
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: GNU Lesser General Public License (LGPL), Version 3",
],
entry_points={'console_scripts': ['smarty = smarty.cli_smarty:main', 'smirky = smarty.cli_smirky:main']},
package_data={'smarty': find_package_data('smarty/data', 'smarty')},
)
|
<commit_before>import os
from os.path import relpath, join
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
def find_package_data(data_root, package_root):
files = []
for root, dirnames, filenames in os.walk(data_root):
for fn in filenames:
files.append(relpath(join(root, fn), package_root))
return files
setup(
name = "smarty",
version = "0.1.4",
author = "John Chodera, David Mobley, and others",
author_email = "john.chodera@choderalab.org",
description = ("Automated Bayesian atomtype sampling"),
license = "GNU Lesser General Public License (LGPL), Version 3",
keywords = "Bayesian atomtype sampling forcefield parameterization",
url = "http://github.com/open-forcefield-group/smarty",
packages=['smarty', 'smarty/tests', 'smarty/data'],
long_description=read('README.md'),
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: GNU Lesser General Public License (LGPL), Version 3",
],
entry_points={'console_scripts': ['smarty = smarty.cli_smarty:main', 'smirky = smarty.cli_smirky:main']},
package_data={'smarty': find_package_data('smarty/data', 'smarty')},
)
<commit_msg>Increment version number for release<commit_after>
|
import os
from os.path import relpath, join
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
def find_package_data(data_root, package_root):
files = []
for root, dirnames, filenames in os.walk(data_root):
for fn in filenames:
files.append(relpath(join(root, fn), package_root))
return files
setup(
name = "smarty",
version = "0.1.5",
author = "John Chodera, David Mobley, and others",
author_email = "john.chodera@choderalab.org",
description = ("Automated Bayesian atomtype sampling"),
license = "GNU Lesser General Public License (LGPL), Version 3",
keywords = "Bayesian atomtype sampling forcefield parameterization",
url = "http://github.com/open-forcefield-group/smarty",
packages=['smarty', 'smarty/tests', 'smarty/data'],
long_description=read('README.md'),
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: GNU Lesser General Public License (LGPL), Version 3",
],
entry_points={'console_scripts': ['smarty = smarty.cli_smarty:main', 'smirky = smarty.cli_smirky:main']},
package_data={'smarty': find_package_data('smarty/data', 'smarty')},
)
|
import os
from os.path import relpath, join
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
def find_package_data(data_root, package_root):
files = []
for root, dirnames, filenames in os.walk(data_root):
for fn in filenames:
files.append(relpath(join(root, fn), package_root))
return files
setup(
name = "smarty",
version = "0.1.4",
author = "John Chodera, David Mobley, and others",
author_email = "john.chodera@choderalab.org",
description = ("Automated Bayesian atomtype sampling"),
license = "GNU Lesser General Public License (LGPL), Version 3",
keywords = "Bayesian atomtype sampling forcefield parameterization",
url = "http://github.com/open-forcefield-group/smarty",
packages=['smarty', 'smarty/tests', 'smarty/data'],
long_description=read('README.md'),
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: GNU Lesser General Public License (LGPL), Version 3",
],
entry_points={'console_scripts': ['smarty = smarty.cli_smarty:main', 'smirky = smarty.cli_smirky:main']},
package_data={'smarty': find_package_data('smarty/data', 'smarty')},
)
Increment version number for releaseimport os
from os.path import relpath, join
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
def find_package_data(data_root, package_root):
files = []
for root, dirnames, filenames in os.walk(data_root):
for fn in filenames:
files.append(relpath(join(root, fn), package_root))
return files
setup(
name = "smarty",
version = "0.1.5",
author = "John Chodera, David Mobley, and others",
author_email = "john.chodera@choderalab.org",
description = ("Automated Bayesian atomtype sampling"),
license = "GNU Lesser General Public License (LGPL), Version 3",
keywords = "Bayesian atomtype sampling forcefield parameterization",
url = "http://github.com/open-forcefield-group/smarty",
packages=['smarty', 'smarty/tests', 'smarty/data'],
long_description=read('README.md'),
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: GNU Lesser General Public License (LGPL), Version 3",
],
entry_points={'console_scripts': ['smarty = smarty.cli_smarty:main', 'smirky = smarty.cli_smirky:main']},
package_data={'smarty': find_package_data('smarty/data', 'smarty')},
)
|
<commit_before>import os
from os.path import relpath, join
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
def find_package_data(data_root, package_root):
files = []
for root, dirnames, filenames in os.walk(data_root):
for fn in filenames:
files.append(relpath(join(root, fn), package_root))
return files
setup(
name = "smarty",
version = "0.1.4",
author = "John Chodera, David Mobley, and others",
author_email = "john.chodera@choderalab.org",
description = ("Automated Bayesian atomtype sampling"),
license = "GNU Lesser General Public License (LGPL), Version 3",
keywords = "Bayesian atomtype sampling forcefield parameterization",
url = "http://github.com/open-forcefield-group/smarty",
packages=['smarty', 'smarty/tests', 'smarty/data'],
long_description=read('README.md'),
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: GNU Lesser General Public License (LGPL), Version 3",
],
entry_points={'console_scripts': ['smarty = smarty.cli_smarty:main', 'smirky = smarty.cli_smirky:main']},
package_data={'smarty': find_package_data('smarty/data', 'smarty')},
)
<commit_msg>Increment version number for release<commit_after>import os
from os.path import relpath, join
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
def find_package_data(data_root, package_root):
files = []
for root, dirnames, filenames in os.walk(data_root):
for fn in filenames:
files.append(relpath(join(root, fn), package_root))
return files
setup(
name = "smarty",
version = "0.1.5",
author = "John Chodera, David Mobley, and others",
author_email = "john.chodera@choderalab.org",
description = ("Automated Bayesian atomtype sampling"),
license = "GNU Lesser General Public License (LGPL), Version 3",
keywords = "Bayesian atomtype sampling forcefield parameterization",
url = "http://github.com/open-forcefield-group/smarty",
packages=['smarty', 'smarty/tests', 'smarty/data'],
long_description=read('README.md'),
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: GNU Lesser General Public License (LGPL), Version 3",
],
entry_points={'console_scripts': ['smarty = smarty.cli_smarty:main', 'smirky = smarty.cli_smirky:main']},
package_data={'smarty': find_package_data('smarty/data', 'smarty')},
)
|
29a3e0ebf69531357d832d6014d15a3fe2f76682
|
setup.py
|
setup.py
|
from os.path import abspath, dirname, join
from setuptools import find_packages, setup
REQUIREMENTS = [
'requests',
'six',
'websocket-client',
]
HERE = dirname(abspath(__file__))
DESCRIPTION = '\n\n'.join(open(join(HERE, _), encoding="utf8").read() for _ in [
'README.rst',
'CHANGES.rst',
])
setup(
name='socketIO_client',
version='0.6.3',
description='A socket.io client library',
long_description=DESCRIPTION,
license='MIT',
classifiers=[
'Intended Audience :: Developers',
'Programming Language :: Python',
'License :: OSI Approved :: MIT License',
],
keywords='socket.io node.js',
author='Roy Hyunjin Han',
author_email='rhh@crosscompute.com',
url='https://github.com/invisibleroads/socketIO-client',
install_requires=REQUIREMENTS,
tests_require=[
'nose',
'coverage',
],
packages=find_packages(),
include_package_data=True,
zip_safe=False)
|
import io
from os.path import abspath, dirname, join
from setuptools import find_packages, setup
REQUIREMENTS = [
'requests',
'six',
'websocket-client',
]
HERE = dirname(abspath(__file__))
LOAD_TEXT = lambda name: io.open(join(HERE, name), encoding='UTF-8').read()
DESCRIPTION = '\n\n'.join(LOAD_TEXT(_) for _ in [
'README.rst',
'CHANGES.rst',
])
setup(
name='socketIO_client',
version='0.6.3',
description='A socket.io client library',
long_description=DESCRIPTION,
license='MIT',
classifiers=[
'Intended Audience :: Developers',
'Programming Language :: Python',
'License :: OSI Approved :: MIT License',
],
keywords='socket.io node.js',
author='Roy Hyunjin Han',
author_email='rhh@crosscompute.com',
url='https://github.com/invisibleroads/socketIO-client',
install_requires=REQUIREMENTS,
tests_require=[
'nose',
'coverage',
],
packages=find_packages(),
include_package_data=True,
zip_safe=False)
|
Support Python 2.6 & 2.7
|
Support Python 2.6 & 2.7
|
Python
|
mit
|
connor4312/socketIO-client,invisibleroads/socketIO-client,invisibleroads/socketIO-client,danielquinn/socketIO-client,feus4177/socketIO-client-2,connor4312/socketIO-client,feus4177/socketIO-client-2,invisibleroads/socketIO-client,danielquinn/socketIO-client,feus4177/socketIO-client-2,danielquinn/socketIO-client,connor4312/socketIO-client
|
from os.path import abspath, dirname, join
from setuptools import find_packages, setup
REQUIREMENTS = [
'requests',
'six',
'websocket-client',
]
HERE = dirname(abspath(__file__))
DESCRIPTION = '\n\n'.join(open(join(HERE, _), encoding="utf8").read() for _ in [
'README.rst',
'CHANGES.rst',
])
setup(
name='socketIO_client',
version='0.6.3',
description='A socket.io client library',
long_description=DESCRIPTION,
license='MIT',
classifiers=[
'Intended Audience :: Developers',
'Programming Language :: Python',
'License :: OSI Approved :: MIT License',
],
keywords='socket.io node.js',
author='Roy Hyunjin Han',
author_email='rhh@crosscompute.com',
url='https://github.com/invisibleroads/socketIO-client',
install_requires=REQUIREMENTS,
tests_require=[
'nose',
'coverage',
],
packages=find_packages(),
include_package_data=True,
zip_safe=False)
Support Python 2.6 & 2.7
|
import io
from os.path import abspath, dirname, join
from setuptools import find_packages, setup
REQUIREMENTS = [
'requests',
'six',
'websocket-client',
]
HERE = dirname(abspath(__file__))
LOAD_TEXT = lambda name: io.open(join(HERE, name), encoding='UTF-8').read()
DESCRIPTION = '\n\n'.join(LOAD_TEXT(_) for _ in [
'README.rst',
'CHANGES.rst',
])
setup(
name='socketIO_client',
version='0.6.3',
description='A socket.io client library',
long_description=DESCRIPTION,
license='MIT',
classifiers=[
'Intended Audience :: Developers',
'Programming Language :: Python',
'License :: OSI Approved :: MIT License',
],
keywords='socket.io node.js',
author='Roy Hyunjin Han',
author_email='rhh@crosscompute.com',
url='https://github.com/invisibleroads/socketIO-client',
install_requires=REQUIREMENTS,
tests_require=[
'nose',
'coverage',
],
packages=find_packages(),
include_package_data=True,
zip_safe=False)
|
<commit_before>from os.path import abspath, dirname, join
from setuptools import find_packages, setup
REQUIREMENTS = [
'requests',
'six',
'websocket-client',
]
HERE = dirname(abspath(__file__))
DESCRIPTION = '\n\n'.join(open(join(HERE, _), encoding="utf8").read() for _ in [
'README.rst',
'CHANGES.rst',
])
setup(
name='socketIO_client',
version='0.6.3',
description='A socket.io client library',
long_description=DESCRIPTION,
license='MIT',
classifiers=[
'Intended Audience :: Developers',
'Programming Language :: Python',
'License :: OSI Approved :: MIT License',
],
keywords='socket.io node.js',
author='Roy Hyunjin Han',
author_email='rhh@crosscompute.com',
url='https://github.com/invisibleroads/socketIO-client',
install_requires=REQUIREMENTS,
tests_require=[
'nose',
'coverage',
],
packages=find_packages(),
include_package_data=True,
zip_safe=False)
<commit_msg>Support Python 2.6 & 2.7<commit_after>
|
import io
from os.path import abspath, dirname, join
from setuptools import find_packages, setup
REQUIREMENTS = [
'requests',
'six',
'websocket-client',
]
HERE = dirname(abspath(__file__))
LOAD_TEXT = lambda name: io.open(join(HERE, name), encoding='UTF-8').read()
DESCRIPTION = '\n\n'.join(LOAD_TEXT(_) for _ in [
'README.rst',
'CHANGES.rst',
])
setup(
name='socketIO_client',
version='0.6.3',
description='A socket.io client library',
long_description=DESCRIPTION,
license='MIT',
classifiers=[
'Intended Audience :: Developers',
'Programming Language :: Python',
'License :: OSI Approved :: MIT License',
],
keywords='socket.io node.js',
author='Roy Hyunjin Han',
author_email='rhh@crosscompute.com',
url='https://github.com/invisibleroads/socketIO-client',
install_requires=REQUIREMENTS,
tests_require=[
'nose',
'coverage',
],
packages=find_packages(),
include_package_data=True,
zip_safe=False)
|
from os.path import abspath, dirname, join
from setuptools import find_packages, setup
REQUIREMENTS = [
'requests',
'six',
'websocket-client',
]
HERE = dirname(abspath(__file__))
DESCRIPTION = '\n\n'.join(open(join(HERE, _), encoding="utf8").read() for _ in [
'README.rst',
'CHANGES.rst',
])
setup(
name='socketIO_client',
version='0.6.3',
description='A socket.io client library',
long_description=DESCRIPTION,
license='MIT',
classifiers=[
'Intended Audience :: Developers',
'Programming Language :: Python',
'License :: OSI Approved :: MIT License',
],
keywords='socket.io node.js',
author='Roy Hyunjin Han',
author_email='rhh@crosscompute.com',
url='https://github.com/invisibleroads/socketIO-client',
install_requires=REQUIREMENTS,
tests_require=[
'nose',
'coverage',
],
packages=find_packages(),
include_package_data=True,
zip_safe=False)
Support Python 2.6 & 2.7import io
from os.path import abspath, dirname, join
from setuptools import find_packages, setup
REQUIREMENTS = [
'requests',
'six',
'websocket-client',
]
HERE = dirname(abspath(__file__))
LOAD_TEXT = lambda name: io.open(join(HERE, name), encoding='UTF-8').read()
DESCRIPTION = '\n\n'.join(LOAD_TEXT(_) for _ in [
'README.rst',
'CHANGES.rst',
])
setup(
name='socketIO_client',
version='0.6.3',
description='A socket.io client library',
long_description=DESCRIPTION,
license='MIT',
classifiers=[
'Intended Audience :: Developers',
'Programming Language :: Python',
'License :: OSI Approved :: MIT License',
],
keywords='socket.io node.js',
author='Roy Hyunjin Han',
author_email='rhh@crosscompute.com',
url='https://github.com/invisibleroads/socketIO-client',
install_requires=REQUIREMENTS,
tests_require=[
'nose',
'coverage',
],
packages=find_packages(),
include_package_data=True,
zip_safe=False)
|
<commit_before>from os.path import abspath, dirname, join
from setuptools import find_packages, setup
REQUIREMENTS = [
'requests',
'six',
'websocket-client',
]
HERE = dirname(abspath(__file__))
DESCRIPTION = '\n\n'.join(open(join(HERE, _), encoding="utf8").read() for _ in [
'README.rst',
'CHANGES.rst',
])
setup(
name='socketIO_client',
version='0.6.3',
description='A socket.io client library',
long_description=DESCRIPTION,
license='MIT',
classifiers=[
'Intended Audience :: Developers',
'Programming Language :: Python',
'License :: OSI Approved :: MIT License',
],
keywords='socket.io node.js',
author='Roy Hyunjin Han',
author_email='rhh@crosscompute.com',
url='https://github.com/invisibleroads/socketIO-client',
install_requires=REQUIREMENTS,
tests_require=[
'nose',
'coverage',
],
packages=find_packages(),
include_package_data=True,
zip_safe=False)
<commit_msg>Support Python 2.6 & 2.7<commit_after>import io
from os.path import abspath, dirname, join
from setuptools import find_packages, setup
REQUIREMENTS = [
'requests',
'six',
'websocket-client',
]
HERE = dirname(abspath(__file__))
LOAD_TEXT = lambda name: io.open(join(HERE, name), encoding='UTF-8').read()
DESCRIPTION = '\n\n'.join(LOAD_TEXT(_) for _ in [
'README.rst',
'CHANGES.rst',
])
setup(
name='socketIO_client',
version='0.6.3',
description='A socket.io client library',
long_description=DESCRIPTION,
license='MIT',
classifiers=[
'Intended Audience :: Developers',
'Programming Language :: Python',
'License :: OSI Approved :: MIT License',
],
keywords='socket.io node.js',
author='Roy Hyunjin Han',
author_email='rhh@crosscompute.com',
url='https://github.com/invisibleroads/socketIO-client',
install_requires=REQUIREMENTS,
tests_require=[
'nose',
'coverage',
],
packages=find_packages(),
include_package_data=True,
zip_safe=False)
|
56155820573d27fcbd73402758f5483010a00ec9
|
setup.py
|
setup.py
|
from setuptools import setup
from tools.generate_pyi import generate_pyi
def main():
# Generate .pyi files
import pyxtf.xtf_ctypes
generate_pyi(pyxtf.xtf_ctypes)
import pyxtf.vendors.kongsberg
generate_pyi(pyxtf.vendors.kongsberg)
# Run setup script
setup(name='pyxtf',
version='0.1',
description='eXtended Triton Format (XTF) file interface',
author='Oystein Sture',
author_email='oysstu@gmail.com',
url='https://github.com/oysstu/pyxtf',
license='MIT',
setup_requires=['numpy'],
install_requires=['numpy'],
packages=['pyxtf', 'pyxtf.vendors'],
package_data={'':['*.pyi']},
use_2to3=False,
classifiers=[
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Intended Audience :: Other Audience',
'Intended Audience :: Science/Research',
'Natural Language :: English',
'Topic :: Scientific/Engineering',
'Programming Language:: Python:: 3:: Only'
])
if __name__ == '__main__':
main()
|
from setuptools import setup
from tools.generate_pyi import generate_pyi
def main():
# Generate .pyi files
import pyxtf.xtf_ctypes
generate_pyi(pyxtf.xtf_ctypes)
import pyxtf.vendors.kongsberg
generate_pyi(pyxtf.vendors.kongsberg)
# Run setup script
setup(name='pyxtf',
version='0.1',
description='eXtended Triton Format (XTF) file interface',
author='Oystein Sture',
author_email='oysstu@gmail.com',
url='https://github.com/oysstu/pyxtf',
license='MIT',
setup_requires=['numpy>=1.11'],
install_requires=['numpy>=1.11'],
packages=['pyxtf', 'pyxtf.vendors'],
package_data={'':['*.pyi']},
use_2to3=False,
classifiers=[
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Intended Audience :: Other Audience',
'Intended Audience :: Science/Research',
'Natural Language :: English',
'Topic :: Scientific/Engineering',
'Programming Language:: Python:: 3:: Only'
])
if __name__ == '__main__':
main()
|
Set minimum numpy version to 1.11, due to backwards compatibility issues for numpy.datetime64 (localtime used by default)
|
Set minimum numpy version to 1.11, due to backwards compatibility issues for numpy.datetime64 (localtime used by default)
|
Python
|
mit
|
oysstu/pyxtf
|
from setuptools import setup
from tools.generate_pyi import generate_pyi
def main():
# Generate .pyi files
import pyxtf.xtf_ctypes
generate_pyi(pyxtf.xtf_ctypes)
import pyxtf.vendors.kongsberg
generate_pyi(pyxtf.vendors.kongsberg)
# Run setup script
setup(name='pyxtf',
version='0.1',
description='eXtended Triton Format (XTF) file interface',
author='Oystein Sture',
author_email='oysstu@gmail.com',
url='https://github.com/oysstu/pyxtf',
license='MIT',
setup_requires=['numpy'],
install_requires=['numpy'],
packages=['pyxtf', 'pyxtf.vendors'],
package_data={'':['*.pyi']},
use_2to3=False,
classifiers=[
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Intended Audience :: Other Audience',
'Intended Audience :: Science/Research',
'Natural Language :: English',
'Topic :: Scientific/Engineering',
'Programming Language:: Python:: 3:: Only'
])
if __name__ == '__main__':
main()Set minimum numpy version to 1.11, due to backwards compatibility issues for numpy.datetime64 (localtime used by default)
|
from setuptools import setup
from tools.generate_pyi import generate_pyi
def main():
# Generate .pyi files
import pyxtf.xtf_ctypes
generate_pyi(pyxtf.xtf_ctypes)
import pyxtf.vendors.kongsberg
generate_pyi(pyxtf.vendors.kongsberg)
# Run setup script
setup(name='pyxtf',
version='0.1',
description='eXtended Triton Format (XTF) file interface',
author='Oystein Sture',
author_email='oysstu@gmail.com',
url='https://github.com/oysstu/pyxtf',
license='MIT',
setup_requires=['numpy>=1.11'],
install_requires=['numpy>=1.11'],
packages=['pyxtf', 'pyxtf.vendors'],
package_data={'':['*.pyi']},
use_2to3=False,
classifiers=[
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Intended Audience :: Other Audience',
'Intended Audience :: Science/Research',
'Natural Language :: English',
'Topic :: Scientific/Engineering',
'Programming Language:: Python:: 3:: Only'
])
if __name__ == '__main__':
main()
|
<commit_before>from setuptools import setup
from tools.generate_pyi import generate_pyi
def main():
# Generate .pyi files
import pyxtf.xtf_ctypes
generate_pyi(pyxtf.xtf_ctypes)
import pyxtf.vendors.kongsberg
generate_pyi(pyxtf.vendors.kongsberg)
# Run setup script
setup(name='pyxtf',
version='0.1',
description='eXtended Triton Format (XTF) file interface',
author='Oystein Sture',
author_email='oysstu@gmail.com',
url='https://github.com/oysstu/pyxtf',
license='MIT',
setup_requires=['numpy'],
install_requires=['numpy'],
packages=['pyxtf', 'pyxtf.vendors'],
package_data={'':['*.pyi']},
use_2to3=False,
classifiers=[
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Intended Audience :: Other Audience',
'Intended Audience :: Science/Research',
'Natural Language :: English',
'Topic :: Scientific/Engineering',
'Programming Language:: Python:: 3:: Only'
])
if __name__ == '__main__':
main()<commit_msg>Set minimum numpy version to 1.11, due to backwards compatibility issues for numpy.datetime64 (localtime used by default)<commit_after>
|
from setuptools import setup
from tools.generate_pyi import generate_pyi
def main():
# Generate .pyi files
import pyxtf.xtf_ctypes
generate_pyi(pyxtf.xtf_ctypes)
import pyxtf.vendors.kongsberg
generate_pyi(pyxtf.vendors.kongsberg)
# Run setup script
setup(name='pyxtf',
version='0.1',
description='eXtended Triton Format (XTF) file interface',
author='Oystein Sture',
author_email='oysstu@gmail.com',
url='https://github.com/oysstu/pyxtf',
license='MIT',
setup_requires=['numpy>=1.11'],
install_requires=['numpy>=1.11'],
packages=['pyxtf', 'pyxtf.vendors'],
package_data={'':['*.pyi']},
use_2to3=False,
classifiers=[
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Intended Audience :: Other Audience',
'Intended Audience :: Science/Research',
'Natural Language :: English',
'Topic :: Scientific/Engineering',
'Programming Language:: Python:: 3:: Only'
])
if __name__ == '__main__':
main()
|
from setuptools import setup
from tools.generate_pyi import generate_pyi
def main():
# Generate .pyi files
import pyxtf.xtf_ctypes
generate_pyi(pyxtf.xtf_ctypes)
import pyxtf.vendors.kongsberg
generate_pyi(pyxtf.vendors.kongsberg)
# Run setup script
setup(name='pyxtf',
version='0.1',
description='eXtended Triton Format (XTF) file interface',
author='Oystein Sture',
author_email='oysstu@gmail.com',
url='https://github.com/oysstu/pyxtf',
license='MIT',
setup_requires=['numpy'],
install_requires=['numpy'],
packages=['pyxtf', 'pyxtf.vendors'],
package_data={'':['*.pyi']},
use_2to3=False,
classifiers=[
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Intended Audience :: Other Audience',
'Intended Audience :: Science/Research',
'Natural Language :: English',
'Topic :: Scientific/Engineering',
'Programming Language:: Python:: 3:: Only'
])
if __name__ == '__main__':
main()Set minimum numpy version to 1.11, due to backwards compatibility issues for numpy.datetime64 (localtime used by default)from setuptools import setup
from tools.generate_pyi import generate_pyi
def main():
# Generate .pyi files
import pyxtf.xtf_ctypes
generate_pyi(pyxtf.xtf_ctypes)
import pyxtf.vendors.kongsberg
generate_pyi(pyxtf.vendors.kongsberg)
# Run setup script
setup(name='pyxtf',
version='0.1',
description='eXtended Triton Format (XTF) file interface',
author='Oystein Sture',
author_email='oysstu@gmail.com',
url='https://github.com/oysstu/pyxtf',
license='MIT',
setup_requires=['numpy>=1.11'],
install_requires=['numpy>=1.11'],
packages=['pyxtf', 'pyxtf.vendors'],
package_data={'':['*.pyi']},
use_2to3=False,
classifiers=[
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Intended Audience :: Other Audience',
'Intended Audience :: Science/Research',
'Natural Language :: English',
'Topic :: Scientific/Engineering',
'Programming Language:: Python:: 3:: Only'
])
if __name__ == '__main__':
main()
|
<commit_before>from setuptools import setup
from tools.generate_pyi import generate_pyi
def main():
# Generate .pyi files
import pyxtf.xtf_ctypes
generate_pyi(pyxtf.xtf_ctypes)
import pyxtf.vendors.kongsberg
generate_pyi(pyxtf.vendors.kongsberg)
# Run setup script
setup(name='pyxtf',
version='0.1',
description='eXtended Triton Format (XTF) file interface',
author='Oystein Sture',
author_email='oysstu@gmail.com',
url='https://github.com/oysstu/pyxtf',
license='MIT',
setup_requires=['numpy'],
install_requires=['numpy'],
packages=['pyxtf', 'pyxtf.vendors'],
package_data={'':['*.pyi']},
use_2to3=False,
classifiers=[
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Intended Audience :: Other Audience',
'Intended Audience :: Science/Research',
'Natural Language :: English',
'Topic :: Scientific/Engineering',
'Programming Language:: Python:: 3:: Only'
])
if __name__ == '__main__':
main()<commit_msg>Set minimum numpy version to 1.11, due to backwards compatibility issues for numpy.datetime64 (localtime used by default)<commit_after>from setuptools import setup
from tools.generate_pyi import generate_pyi
def main():
# Generate .pyi files
import pyxtf.xtf_ctypes
generate_pyi(pyxtf.xtf_ctypes)
import pyxtf.vendors.kongsberg
generate_pyi(pyxtf.vendors.kongsberg)
# Run setup script
setup(name='pyxtf',
version='0.1',
description='eXtended Triton Format (XTF) file interface',
author='Oystein Sture',
author_email='oysstu@gmail.com',
url='https://github.com/oysstu/pyxtf',
license='MIT',
setup_requires=['numpy>=1.11'],
install_requires=['numpy>=1.11'],
packages=['pyxtf', 'pyxtf.vendors'],
package_data={'':['*.pyi']},
use_2to3=False,
classifiers=[
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Intended Audience :: Other Audience',
'Intended Audience :: Science/Research',
'Natural Language :: English',
'Topic :: Scientific/Engineering',
'Programming Language:: Python:: 3:: Only'
])
if __name__ == '__main__':
main()
|
1e6dc553f757014bb4a194c55eabc44364674fb9
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
# Dynamically calculate the version
version = __import__('device_inventory').get_version()
# Collect installation requirements
with open('requirements.txt') as reqf:
import re
dep_re = re.compile(r'^([^\s#]+)')
inst_reqs = [m.group(0) for m in
[dep_re.match(l) for l in reqf]
if m]
setup(
name="device-inventory",
version=version,
packages=find_packages(),
license='AGPLv3 License',
description=('The Device Inventory is a tool to help the inventory '
'of computers. It retrieves details of the hardware '
'information and, optionally, runs some health and '
'benchmark tests.'),
scripts=['scripts/device-inventory', 'scripts/di-stress-test'],
package_data={'device_inventory': [
'config.ini',
'config_logging.json',
'data/*'
]},
url='https://github.com/eReuse/device-inventory',
author='eReuse team',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Information Technology',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: System :: Logging',
'Topic :: Utilities',
],
install_requires=inst_reqs,
)
|
import re
from setuptools import setup, find_packages
# Dynamically calculate the version
version = __import__('device_inventory').get_version()
# Collect installation requirements
def read_requirements(path):
with open(path) as reqf:
dep_re = re.compile(r'^([^\s#]+)')
return [m.group(0) for m in
(dep_re.match(l) for l in reqf)
if m]
inst_reqs = read_requirements('requirements.txt')
setup(
name="device-inventory",
version=version,
packages=find_packages(),
license='AGPLv3 License',
description=('The Device Inventory is a tool to help the inventory '
'of computers. It retrieves details of the hardware '
'information and, optionally, runs some health and '
'benchmark tests.'),
scripts=['scripts/device-inventory', 'scripts/di-stress-test'],
package_data={'device_inventory': [
'config.ini',
'config_logging.json',
'data/*'
]},
url='https://github.com/eReuse/device-inventory',
author='eReuse team',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Information Technology',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: System :: Logging',
'Topic :: Utilities',
],
install_requires=inst_reqs,
)
|
Move requirements read code info function
|
Move requirements read code info function
|
Python
|
agpl-3.0
|
eReuse/workbench,eReuse/device-inventory,eReuse/workbench,eReuse/device-inventory
|
from setuptools import setup, find_packages
# Dynamically calculate the version
version = __import__('device_inventory').get_version()
# Collect installation requirements
with open('requirements.txt') as reqf:
import re
dep_re = re.compile(r'^([^\s#]+)')
inst_reqs = [m.group(0) for m in
[dep_re.match(l) for l in reqf]
if m]
setup(
name="device-inventory",
version=version,
packages=find_packages(),
license='AGPLv3 License',
description=('The Device Inventory is a tool to help the inventory '
'of computers. It retrieves details of the hardware '
'information and, optionally, runs some health and '
'benchmark tests.'),
scripts=['scripts/device-inventory', 'scripts/di-stress-test'],
package_data={'device_inventory': [
'config.ini',
'config_logging.json',
'data/*'
]},
url='https://github.com/eReuse/device-inventory',
author='eReuse team',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Information Technology',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: System :: Logging',
'Topic :: Utilities',
],
install_requires=inst_reqs,
)
Move requirements read code info function
|
import re
from setuptools import setup, find_packages
# Dynamically calculate the version
version = __import__('device_inventory').get_version()
# Collect installation requirements
def read_requirements(path):
with open(path) as reqf:
dep_re = re.compile(r'^([^\s#]+)')
return [m.group(0) for m in
(dep_re.match(l) for l in reqf)
if m]
inst_reqs = read_requirements('requirements.txt')
setup(
name="device-inventory",
version=version,
packages=find_packages(),
license='AGPLv3 License',
description=('The Device Inventory is a tool to help the inventory '
'of computers. It retrieves details of the hardware '
'information and, optionally, runs some health and '
'benchmark tests.'),
scripts=['scripts/device-inventory', 'scripts/di-stress-test'],
package_data={'device_inventory': [
'config.ini',
'config_logging.json',
'data/*'
]},
url='https://github.com/eReuse/device-inventory',
author='eReuse team',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Information Technology',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: System :: Logging',
'Topic :: Utilities',
],
install_requires=inst_reqs,
)
|
<commit_before>from setuptools import setup, find_packages
# Dynamically calculate the version
version = __import__('device_inventory').get_version()
# Collect installation requirements
with open('requirements.txt') as reqf:
import re
dep_re = re.compile(r'^([^\s#]+)')
inst_reqs = [m.group(0) for m in
[dep_re.match(l) for l in reqf]
if m]
setup(
name="device-inventory",
version=version,
packages=find_packages(),
license='AGPLv3 License',
description=('The Device Inventory is a tool to help the inventory '
'of computers. It retrieves details of the hardware '
'information and, optionally, runs some health and '
'benchmark tests.'),
scripts=['scripts/device-inventory', 'scripts/di-stress-test'],
package_data={'device_inventory': [
'config.ini',
'config_logging.json',
'data/*'
]},
url='https://github.com/eReuse/device-inventory',
author='eReuse team',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Information Technology',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: System :: Logging',
'Topic :: Utilities',
],
install_requires=inst_reqs,
)
<commit_msg>Move requirements read code info function<commit_after>
|
import re
from setuptools import setup, find_packages
# Dynamically calculate the version
version = __import__('device_inventory').get_version()
# Collect installation requirements
def read_requirements(path):
with open(path) as reqf:
dep_re = re.compile(r'^([^\s#]+)')
return [m.group(0) for m in
(dep_re.match(l) for l in reqf)
if m]
inst_reqs = read_requirements('requirements.txt')
setup(
name="device-inventory",
version=version,
packages=find_packages(),
license='AGPLv3 License',
description=('The Device Inventory is a tool to help the inventory '
'of computers. It retrieves details of the hardware '
'information and, optionally, runs some health and '
'benchmark tests.'),
scripts=['scripts/device-inventory', 'scripts/di-stress-test'],
package_data={'device_inventory': [
'config.ini',
'config_logging.json',
'data/*'
]},
url='https://github.com/eReuse/device-inventory',
author='eReuse team',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Information Technology',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: System :: Logging',
'Topic :: Utilities',
],
install_requires=inst_reqs,
)
|
from setuptools import setup, find_packages
# Dynamically calculate the version
version = __import__('device_inventory').get_version()
# Collect installation requirements
with open('requirements.txt') as reqf:
import re
dep_re = re.compile(r'^([^\s#]+)')
inst_reqs = [m.group(0) for m in
[dep_re.match(l) for l in reqf]
if m]
setup(
name="device-inventory",
version=version,
packages=find_packages(),
license='AGPLv3 License',
description=('The Device Inventory is a tool to help the inventory '
'of computers. It retrieves details of the hardware '
'information and, optionally, runs some health and '
'benchmark tests.'),
scripts=['scripts/device-inventory', 'scripts/di-stress-test'],
package_data={'device_inventory': [
'config.ini',
'config_logging.json',
'data/*'
]},
url='https://github.com/eReuse/device-inventory',
author='eReuse team',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Information Technology',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: System :: Logging',
'Topic :: Utilities',
],
install_requires=inst_reqs,
)
Move requirements read code info functionimport re
from setuptools import setup, find_packages
# Dynamically calculate the version
version = __import__('device_inventory').get_version()
# Collect installation requirements
def read_requirements(path):
with open(path) as reqf:
dep_re = re.compile(r'^([^\s#]+)')
return [m.group(0) for m in
(dep_re.match(l) for l in reqf)
if m]
inst_reqs = read_requirements('requirements.txt')
setup(
name="device-inventory",
version=version,
packages=find_packages(),
license='AGPLv3 License',
description=('The Device Inventory is a tool to help the inventory '
'of computers. It retrieves details of the hardware '
'information and, optionally, runs some health and '
'benchmark tests.'),
scripts=['scripts/device-inventory', 'scripts/di-stress-test'],
package_data={'device_inventory': [
'config.ini',
'config_logging.json',
'data/*'
]},
url='https://github.com/eReuse/device-inventory',
author='eReuse team',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Information Technology',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: System :: Logging',
'Topic :: Utilities',
],
install_requires=inst_reqs,
)
|
<commit_before>from setuptools import setup, find_packages
# Dynamically calculate the version
version = __import__('device_inventory').get_version()
# Collect installation requirements
with open('requirements.txt') as reqf:
import re
dep_re = re.compile(r'^([^\s#]+)')
inst_reqs = [m.group(0) for m in
[dep_re.match(l) for l in reqf]
if m]
setup(
name="device-inventory",
version=version,
packages=find_packages(),
license='AGPLv3 License',
description=('The Device Inventory is a tool to help the inventory '
'of computers. It retrieves details of the hardware '
'information and, optionally, runs some health and '
'benchmark tests.'),
scripts=['scripts/device-inventory', 'scripts/di-stress-test'],
package_data={'device_inventory': [
'config.ini',
'config_logging.json',
'data/*'
]},
url='https://github.com/eReuse/device-inventory',
author='eReuse team',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Information Technology',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: System :: Logging',
'Topic :: Utilities',
],
install_requires=inst_reqs,
)
<commit_msg>Move requirements read code info function<commit_after>import re
from setuptools import setup, find_packages
# Dynamically calculate the version
version = __import__('device_inventory').get_version()
# Collect installation requirements
def read_requirements(path):
with open(path) as reqf:
dep_re = re.compile(r'^([^\s#]+)')
return [m.group(0) for m in
(dep_re.match(l) for l in reqf)
if m]
inst_reqs = read_requirements('requirements.txt')
setup(
name="device-inventory",
version=version,
packages=find_packages(),
license='AGPLv3 License',
description=('The Device Inventory is a tool to help the inventory '
'of computers. It retrieves details of the hardware '
'information and, optionally, runs some health and '
'benchmark tests.'),
scripts=['scripts/device-inventory', 'scripts/di-stress-test'],
package_data={'device_inventory': [
'config.ini',
'config_logging.json',
'data/*'
]},
url='https://github.com/eReuse/device-inventory',
author='eReuse team',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Information Technology',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: System :: Logging',
'Topic :: Utilities',
],
install_requires=inst_reqs,
)
|
de2b4fca41de35df72a30cc7269f2bc8c0d083ea
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name='kibana',
version='0.1',
description='Kibana configuration index (.kibana in v4) command line interface and python API (visualization import/export and mappings refresh)',
author='Ryan Farley',
author_email='rfarley@mitre.org',
packages=['kibana'],
install_requires=(
'elasticsearch',
'requests>=2.5.0',
)
)
|
from setuptools import setup
setup(
name='kibana',
version='0.1',
description='Kibana configuration index (.kibana in v4) command line interface and python API (visualization import/export and mappings refresh)',
author='Ryan Farley',
author_email='rfarley@mitre.org',
packages=['kibana'],
install_requires=(
'elasticsearch',
'requests>=2.5.0',
'argparse',
'requests',
)
)
|
Add argparse, requests install reqs
|
Add argparse, requests install reqs
|
Python
|
mit
|
rfarley3/Kibana
|
from setuptools import setup
setup(
name='kibana',
version='0.1',
description='Kibana configuration index (.kibana in v4) command line interface and python API (visualization import/export and mappings refresh)',
author='Ryan Farley',
author_email='rfarley@mitre.org',
packages=['kibana'],
install_requires=(
'elasticsearch',
'requests>=2.5.0',
)
)
Add argparse, requests install reqs
|
from setuptools import setup
setup(
name='kibana',
version='0.1',
description='Kibana configuration index (.kibana in v4) command line interface and python API (visualization import/export and mappings refresh)',
author='Ryan Farley',
author_email='rfarley@mitre.org',
packages=['kibana'],
install_requires=(
'elasticsearch',
'requests>=2.5.0',
'argparse',
'requests',
)
)
|
<commit_before>from setuptools import setup
setup(
name='kibana',
version='0.1',
description='Kibana configuration index (.kibana in v4) command line interface and python API (visualization import/export and mappings refresh)',
author='Ryan Farley',
author_email='rfarley@mitre.org',
packages=['kibana'],
install_requires=(
'elasticsearch',
'requests>=2.5.0',
)
)
<commit_msg>Add argparse, requests install reqs<commit_after>
|
from setuptools import setup
setup(
name='kibana',
version='0.1',
description='Kibana configuration index (.kibana in v4) command line interface and python API (visualization import/export and mappings refresh)',
author='Ryan Farley',
author_email='rfarley@mitre.org',
packages=['kibana'],
install_requires=(
'elasticsearch',
'requests>=2.5.0',
'argparse',
'requests',
)
)
|
from setuptools import setup
setup(
name='kibana',
version='0.1',
description='Kibana configuration index (.kibana in v4) command line interface and python API (visualization import/export and mappings refresh)',
author='Ryan Farley',
author_email='rfarley@mitre.org',
packages=['kibana'],
install_requires=(
'elasticsearch',
'requests>=2.5.0',
)
)
Add argparse, requests install reqsfrom setuptools import setup
setup(
name='kibana',
version='0.1',
description='Kibana configuration index (.kibana in v4) command line interface and python API (visualization import/export and mappings refresh)',
author='Ryan Farley',
author_email='rfarley@mitre.org',
packages=['kibana'],
install_requires=(
'elasticsearch',
'requests>=2.5.0',
'argparse',
'requests',
)
)
|
<commit_before>from setuptools import setup
setup(
name='kibana',
version='0.1',
description='Kibana configuration index (.kibana in v4) command line interface and python API (visualization import/export and mappings refresh)',
author='Ryan Farley',
author_email='rfarley@mitre.org',
packages=['kibana'],
install_requires=(
'elasticsearch',
'requests>=2.5.0',
)
)
<commit_msg>Add argparse, requests install reqs<commit_after>from setuptools import setup
setup(
name='kibana',
version='0.1',
description='Kibana configuration index (.kibana in v4) command line interface and python API (visualization import/export and mappings refresh)',
author='Ryan Farley',
author_email='rfarley@mitre.org',
packages=['kibana'],
install_requires=(
'elasticsearch',
'requests>=2.5.0',
'argparse',
'requests',
)
)
|
71f0de6cb4fb01dd5926d9280eadce14e6fe8e08
|
setup.py
|
setup.py
|
#! /usr/bin/env python3
# -*- coding: utf-8 -*-
from setuptools import setup
from dlstats import version
import os
setup(name='dlstats',
version=version.version,
description='A python module that provides an interface between statistics providers and pandas.',
author='Widukind team',
author_email='dev@michaelmalter.fr',
url='https://github.com/Widukind',
package_dir={'dlstats': 'dlstats', 'dlstats.fetchers': 'dlstats/fetchers'},
packages=['dlstats', 'dlstats.fetchers'],
data_files=[('/usr/local/bin',['dlstats/dlstats_server.py']),
('/etc/systemd/system',['os_specific/dlstats.service'])],
install_requires=[
'requests>=2.4.3',
'pymongo>=2.7.2',
'pandas>=0.12',
'docopt>=0.6.0',
'voluptuous>=0.8',
'xlrd>=0.8',
'configobj>=5.0',
'elasticsearch>=1.0.0,<2.0.0'
]
)
with open('/etc/systemd/system/dlstats.service'):
os.chmod('/etc/systemd/system/dlstats.service', 0o755)
with open('/usr/local/bin/dlstats_server.py'):
os.chmod('/usr/local/bin/dlstats_server.py', 0o755)
|
#! /usr/bin/env python3
# -*- coding: utf-8 -*-
from setuptools import setup
from dlstats import version
import os
setup(name='dlstats',
version=version.version,
description='A python module that provides an interface between statistics providers and pandas.',
author='Widukind team',
author_email='dev@michaelmalter.fr',
url='https://github.com/Widukind',
package_dir={'dlstats': 'dlstats', 'dlstats.fetchers': 'dlstats/fetchers'},
packages=['dlstats', 'dlstats.fetchers'],
data_files=[('/usr/local/bin',['dlstats/dlstats_server.py']),
('/etc/systemd/system',['os_specific/dlstats.service']),
('/etc/',['config/dlstats'])],
install_requires=[
'requests>=2.4.3',
'pymongo>=2.7.2',
'pandas>=0.12',
'docopt>=0.6.0',
'voluptuous>=0.8',
'xlrd>=0.8',
'configobj>=5.0',
'elasticsearch>=1.0.0,<2.0.0'
]
)
with open('/etc/systemd/system/dlstats.service'):
os.chmod('/etc/systemd/system/dlstats.service', 0o755)
with open('/usr/local/bin/dlstats_server.py'):
os.chmod('/usr/local/bin/dlstats_server.py', 0o755)
with open('/etc/dlstats'):
os.chmod('/etc/dlstats', 0o755)
|
Add conf file to installation script
|
Add conf file to installation script
|
Python
|
agpl-3.0
|
mmalter/dlstats,MichelJuillard/dlstats,mmalter/dlstats,Widukind/dlstats,Widukind/dlstats,MichelJuillard/dlstats,MichelJuillard/dlstats,mmalter/dlstats
|
#! /usr/bin/env python3
# -*- coding: utf-8 -*-
from setuptools import setup
from dlstats import version
import os
setup(name='dlstats',
version=version.version,
description='A python module that provides an interface between statistics providers and pandas.',
author='Widukind team',
author_email='dev@michaelmalter.fr',
url='https://github.com/Widukind',
package_dir={'dlstats': 'dlstats', 'dlstats.fetchers': 'dlstats/fetchers'},
packages=['dlstats', 'dlstats.fetchers'],
data_files=[('/usr/local/bin',['dlstats/dlstats_server.py']),
('/etc/systemd/system',['os_specific/dlstats.service'])],
install_requires=[
'requests>=2.4.3',
'pymongo>=2.7.2',
'pandas>=0.12',
'docopt>=0.6.0',
'voluptuous>=0.8',
'xlrd>=0.8',
'configobj>=5.0',
'elasticsearch>=1.0.0,<2.0.0'
]
)
with open('/etc/systemd/system/dlstats.service'):
os.chmod('/etc/systemd/system/dlstats.service', 0o755)
with open('/usr/local/bin/dlstats_server.py'):
os.chmod('/usr/local/bin/dlstats_server.py', 0o755)
Add conf file to installation script
|
#! /usr/bin/env python3
# -*- coding: utf-8 -*-
from setuptools import setup
from dlstats import version
import os
setup(name='dlstats',
version=version.version,
description='A python module that provides an interface between statistics providers and pandas.',
author='Widukind team',
author_email='dev@michaelmalter.fr',
url='https://github.com/Widukind',
package_dir={'dlstats': 'dlstats', 'dlstats.fetchers': 'dlstats/fetchers'},
packages=['dlstats', 'dlstats.fetchers'],
data_files=[('/usr/local/bin',['dlstats/dlstats_server.py']),
('/etc/systemd/system',['os_specific/dlstats.service']),
('/etc/',['config/dlstats'])],
install_requires=[
'requests>=2.4.3',
'pymongo>=2.7.2',
'pandas>=0.12',
'docopt>=0.6.0',
'voluptuous>=0.8',
'xlrd>=0.8',
'configobj>=5.0',
'elasticsearch>=1.0.0,<2.0.0'
]
)
with open('/etc/systemd/system/dlstats.service'):
os.chmod('/etc/systemd/system/dlstats.service', 0o755)
with open('/usr/local/bin/dlstats_server.py'):
os.chmod('/usr/local/bin/dlstats_server.py', 0o755)
with open('/etc/dlstats'):
os.chmod('/etc/dlstats', 0o755)
|
<commit_before>#! /usr/bin/env python3
# -*- coding: utf-8 -*-
from setuptools import setup
from dlstats import version
import os
setup(name='dlstats',
version=version.version,
description='A python module that provides an interface between statistics providers and pandas.',
author='Widukind team',
author_email='dev@michaelmalter.fr',
url='https://github.com/Widukind',
package_dir={'dlstats': 'dlstats', 'dlstats.fetchers': 'dlstats/fetchers'},
packages=['dlstats', 'dlstats.fetchers'],
data_files=[('/usr/local/bin',['dlstats/dlstats_server.py']),
('/etc/systemd/system',['os_specific/dlstats.service'])],
install_requires=[
'requests>=2.4.3',
'pymongo>=2.7.2',
'pandas>=0.12',
'docopt>=0.6.0',
'voluptuous>=0.8',
'xlrd>=0.8',
'configobj>=5.0',
'elasticsearch>=1.0.0,<2.0.0'
]
)
with open('/etc/systemd/system/dlstats.service'):
os.chmod('/etc/systemd/system/dlstats.service', 0o755)
with open('/usr/local/bin/dlstats_server.py'):
os.chmod('/usr/local/bin/dlstats_server.py', 0o755)
<commit_msg>Add conf file to installation script<commit_after>
|
#! /usr/bin/env python3
# -*- coding: utf-8 -*-
from setuptools import setup
from dlstats import version
import os
setup(name='dlstats',
version=version.version,
description='A python module that provides an interface between statistics providers and pandas.',
author='Widukind team',
author_email='dev@michaelmalter.fr',
url='https://github.com/Widukind',
package_dir={'dlstats': 'dlstats', 'dlstats.fetchers': 'dlstats/fetchers'},
packages=['dlstats', 'dlstats.fetchers'],
data_files=[('/usr/local/bin',['dlstats/dlstats_server.py']),
('/etc/systemd/system',['os_specific/dlstats.service']),
('/etc/',['config/dlstats'])],
install_requires=[
'requests>=2.4.3',
'pymongo>=2.7.2',
'pandas>=0.12',
'docopt>=0.6.0',
'voluptuous>=0.8',
'xlrd>=0.8',
'configobj>=5.0',
'elasticsearch>=1.0.0,<2.0.0'
]
)
with open('/etc/systemd/system/dlstats.service'):
os.chmod('/etc/systemd/system/dlstats.service', 0o755)
with open('/usr/local/bin/dlstats_server.py'):
os.chmod('/usr/local/bin/dlstats_server.py', 0o755)
with open('/etc/dlstats'):
os.chmod('/etc/dlstats', 0o755)
|
#! /usr/bin/env python3
# -*- coding: utf-8 -*-
from setuptools import setup
from dlstats import version
import os
setup(name='dlstats',
version=version.version,
description='A python module that provides an interface between statistics providers and pandas.',
author='Widukind team',
author_email='dev@michaelmalter.fr',
url='https://github.com/Widukind',
package_dir={'dlstats': 'dlstats', 'dlstats.fetchers': 'dlstats/fetchers'},
packages=['dlstats', 'dlstats.fetchers'],
data_files=[('/usr/local/bin',['dlstats/dlstats_server.py']),
('/etc/systemd/system',['os_specific/dlstats.service'])],
install_requires=[
'requests>=2.4.3',
'pymongo>=2.7.2',
'pandas>=0.12',
'docopt>=0.6.0',
'voluptuous>=0.8',
'xlrd>=0.8',
'configobj>=5.0',
'elasticsearch>=1.0.0,<2.0.0'
]
)
with open('/etc/systemd/system/dlstats.service'):
os.chmod('/etc/systemd/system/dlstats.service', 0o755)
with open('/usr/local/bin/dlstats_server.py'):
os.chmod('/usr/local/bin/dlstats_server.py', 0o755)
Add conf file to installation script#! /usr/bin/env python3
# -*- coding: utf-8 -*-
from setuptools import setup
from dlstats import version
import os
setup(name='dlstats',
version=version.version,
description='A python module that provides an interface between statistics providers and pandas.',
author='Widukind team',
author_email='dev@michaelmalter.fr',
url='https://github.com/Widukind',
package_dir={'dlstats': 'dlstats', 'dlstats.fetchers': 'dlstats/fetchers'},
packages=['dlstats', 'dlstats.fetchers'],
data_files=[('/usr/local/bin',['dlstats/dlstats_server.py']),
('/etc/systemd/system',['os_specific/dlstats.service']),
('/etc/',['config/dlstats'])],
install_requires=[
'requests>=2.4.3',
'pymongo>=2.7.2',
'pandas>=0.12',
'docopt>=0.6.0',
'voluptuous>=0.8',
'xlrd>=0.8',
'configobj>=5.0',
'elasticsearch>=1.0.0,<2.0.0'
]
)
with open('/etc/systemd/system/dlstats.service'):
os.chmod('/etc/systemd/system/dlstats.service', 0o755)
with open('/usr/local/bin/dlstats_server.py'):
os.chmod('/usr/local/bin/dlstats_server.py', 0o755)
with open('/etc/dlstats'):
os.chmod('/etc/dlstats', 0o755)
|
<commit_before>#! /usr/bin/env python3
# -*- coding: utf-8 -*-
from setuptools import setup
from dlstats import version
import os
setup(name='dlstats',
version=version.version,
description='A python module that provides an interface between statistics providers and pandas.',
author='Widukind team',
author_email='dev@michaelmalter.fr',
url='https://github.com/Widukind',
package_dir={'dlstats': 'dlstats', 'dlstats.fetchers': 'dlstats/fetchers'},
packages=['dlstats', 'dlstats.fetchers'],
data_files=[('/usr/local/bin',['dlstats/dlstats_server.py']),
('/etc/systemd/system',['os_specific/dlstats.service'])],
install_requires=[
'requests>=2.4.3',
'pymongo>=2.7.2',
'pandas>=0.12',
'docopt>=0.6.0',
'voluptuous>=0.8',
'xlrd>=0.8',
'configobj>=5.0',
'elasticsearch>=1.0.0,<2.0.0'
]
)
with open('/etc/systemd/system/dlstats.service'):
os.chmod('/etc/systemd/system/dlstats.service', 0o755)
with open('/usr/local/bin/dlstats_server.py'):
os.chmod('/usr/local/bin/dlstats_server.py', 0o755)
<commit_msg>Add conf file to installation script<commit_after>#! /usr/bin/env python3
# -*- coding: utf-8 -*-
from setuptools import setup
from dlstats import version
import os
setup(name='dlstats',
version=version.version,
description='A python module that provides an interface between statistics providers and pandas.',
author='Widukind team',
author_email='dev@michaelmalter.fr',
url='https://github.com/Widukind',
package_dir={'dlstats': 'dlstats', 'dlstats.fetchers': 'dlstats/fetchers'},
packages=['dlstats', 'dlstats.fetchers'],
data_files=[('/usr/local/bin',['dlstats/dlstats_server.py']),
('/etc/systemd/system',['os_specific/dlstats.service']),
('/etc/',['config/dlstats'])],
install_requires=[
'requests>=2.4.3',
'pymongo>=2.7.2',
'pandas>=0.12',
'docopt>=0.6.0',
'voluptuous>=0.8',
'xlrd>=0.8',
'configobj>=5.0',
'elasticsearch>=1.0.0,<2.0.0'
]
)
with open('/etc/systemd/system/dlstats.service'):
os.chmod('/etc/systemd/system/dlstats.service', 0o755)
with open('/usr/local/bin/dlstats_server.py'):
os.chmod('/usr/local/bin/dlstats_server.py', 0o755)
with open('/etc/dlstats'):
os.chmod('/etc/dlstats', 0o755)
|
aca3c7500e017a0a02f0343405e6f065ebe083d7
|
setup.py
|
setup.py
|
# coding=utf-8
from __future__ import absolute_import, division, print_function
import sys
from setuptools import setup
VERSION='0.1.0'
setup(
name='vfp2py',
version=VERSION,
description='Convert foxpro code to python',
author='Michael Wisslead',
author_email='michael.wisslead@gmail.com',
url='https://github.com/mwisslead',
packages=['vfp2py'],
classifiers=[
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Foxpro",
"Topic :: Software Development :: Libraries :: Python Modules",
],
install_requires=[
'antlr4-python2-runtime==4.8; python_version < \'3\'',
'antlr4-python3-runtime==4.8; python_version >= \'3\'',
'dbf',
'autopep8',
'isort<5',
'python-dateutil',
'pyodbc'
],
test_suite='nose.collector',
tests_require=['nose', 'Faker'],
entry_points = {
'console_scripts': ['vfp2py=vfp2py.__main__:main'],
}
)
|
# coding=utf-8
from __future__ import absolute_import, division, print_function
import sys
from setuptools import setup
VERSION='0.1.0'
setup(
name='vfp2py',
version=VERSION,
description='Convert foxpro code to python',
author='Michael Wisslead',
author_email='michael.wisslead@gmail.com',
url='https://github.com/mwisslead',
packages=['vfp2py'],
classifiers=[
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Software Development :: Libraries :: Python Modules",
],
install_requires=[
'antlr4-python2-runtime==4.8; python_version < \'3\'',
'antlr4-python3-runtime==4.8; python_version >= \'3\'',
'dbf',
'autopep8',
'isort<5',
'python-dateutil',
'pyodbc'
],
test_suite='nose.collector',
tests_require=['nose', 'Faker'],
entry_points = {
'console_scripts': ['vfp2py=vfp2py.__main__:main'],
}
)
|
Remove "Programming Language :: Foxpro" classifier.
|
Remove "Programming Language :: Foxpro" classifier.
|
Python
|
mit
|
mwisslead/vfp2py,mwisslead/vfp2py
|
# coding=utf-8
from __future__ import absolute_import, division, print_function
import sys
from setuptools import setup
VERSION='0.1.0'
setup(
name='vfp2py',
version=VERSION,
description='Convert foxpro code to python',
author='Michael Wisslead',
author_email='michael.wisslead@gmail.com',
url='https://github.com/mwisslead',
packages=['vfp2py'],
classifiers=[
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Foxpro",
"Topic :: Software Development :: Libraries :: Python Modules",
],
install_requires=[
'antlr4-python2-runtime==4.8; python_version < \'3\'',
'antlr4-python3-runtime==4.8; python_version >= \'3\'',
'dbf',
'autopep8',
'isort<5',
'python-dateutil',
'pyodbc'
],
test_suite='nose.collector',
tests_require=['nose', 'Faker'],
entry_points = {
'console_scripts': ['vfp2py=vfp2py.__main__:main'],
}
)
Remove "Programming Language :: Foxpro" classifier.
|
# coding=utf-8
from __future__ import absolute_import, division, print_function
import sys
from setuptools import setup
VERSION='0.1.0'
setup(
name='vfp2py',
version=VERSION,
description='Convert foxpro code to python',
author='Michael Wisslead',
author_email='michael.wisslead@gmail.com',
url='https://github.com/mwisslead',
packages=['vfp2py'],
classifiers=[
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Software Development :: Libraries :: Python Modules",
],
install_requires=[
'antlr4-python2-runtime==4.8; python_version < \'3\'',
'antlr4-python3-runtime==4.8; python_version >= \'3\'',
'dbf',
'autopep8',
'isort<5',
'python-dateutil',
'pyodbc'
],
test_suite='nose.collector',
tests_require=['nose', 'Faker'],
entry_points = {
'console_scripts': ['vfp2py=vfp2py.__main__:main'],
}
)
|
<commit_before># coding=utf-8
from __future__ import absolute_import, division, print_function
import sys
from setuptools import setup
VERSION='0.1.0'
setup(
name='vfp2py',
version=VERSION,
description='Convert foxpro code to python',
author='Michael Wisslead',
author_email='michael.wisslead@gmail.com',
url='https://github.com/mwisslead',
packages=['vfp2py'],
classifiers=[
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Foxpro",
"Topic :: Software Development :: Libraries :: Python Modules",
],
install_requires=[
'antlr4-python2-runtime==4.8; python_version < \'3\'',
'antlr4-python3-runtime==4.8; python_version >= \'3\'',
'dbf',
'autopep8',
'isort<5',
'python-dateutil',
'pyodbc'
],
test_suite='nose.collector',
tests_require=['nose', 'Faker'],
entry_points = {
'console_scripts': ['vfp2py=vfp2py.__main__:main'],
}
)
<commit_msg>Remove "Programming Language :: Foxpro" classifier.<commit_after>
|
# coding=utf-8
from __future__ import absolute_import, division, print_function
import sys
from setuptools import setup
VERSION='0.1.0'
setup(
name='vfp2py',
version=VERSION,
description='Convert foxpro code to python',
author='Michael Wisslead',
author_email='michael.wisslead@gmail.com',
url='https://github.com/mwisslead',
packages=['vfp2py'],
classifiers=[
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Software Development :: Libraries :: Python Modules",
],
install_requires=[
'antlr4-python2-runtime==4.8; python_version < \'3\'',
'antlr4-python3-runtime==4.8; python_version >= \'3\'',
'dbf',
'autopep8',
'isort<5',
'python-dateutil',
'pyodbc'
],
test_suite='nose.collector',
tests_require=['nose', 'Faker'],
entry_points = {
'console_scripts': ['vfp2py=vfp2py.__main__:main'],
}
)
|
# coding=utf-8
from __future__ import absolute_import, division, print_function
import sys
from setuptools import setup
VERSION='0.1.0'
setup(
name='vfp2py',
version=VERSION,
description='Convert foxpro code to python',
author='Michael Wisslead',
author_email='michael.wisslead@gmail.com',
url='https://github.com/mwisslead',
packages=['vfp2py'],
classifiers=[
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Foxpro",
"Topic :: Software Development :: Libraries :: Python Modules",
],
install_requires=[
'antlr4-python2-runtime==4.8; python_version < \'3\'',
'antlr4-python3-runtime==4.8; python_version >= \'3\'',
'dbf',
'autopep8',
'isort<5',
'python-dateutil',
'pyodbc'
],
test_suite='nose.collector',
tests_require=['nose', 'Faker'],
entry_points = {
'console_scripts': ['vfp2py=vfp2py.__main__:main'],
}
)
Remove "Programming Language :: Foxpro" classifier.# coding=utf-8
from __future__ import absolute_import, division, print_function
import sys
from setuptools import setup
VERSION='0.1.0'
setup(
name='vfp2py',
version=VERSION,
description='Convert foxpro code to python',
author='Michael Wisslead',
author_email='michael.wisslead@gmail.com',
url='https://github.com/mwisslead',
packages=['vfp2py'],
classifiers=[
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Software Development :: Libraries :: Python Modules",
],
install_requires=[
'antlr4-python2-runtime==4.8; python_version < \'3\'',
'antlr4-python3-runtime==4.8; python_version >= \'3\'',
'dbf',
'autopep8',
'isort<5',
'python-dateutil',
'pyodbc'
],
test_suite='nose.collector',
tests_require=['nose', 'Faker'],
entry_points = {
'console_scripts': ['vfp2py=vfp2py.__main__:main'],
}
)
|
<commit_before># coding=utf-8
from __future__ import absolute_import, division, print_function
import sys
from setuptools import setup
VERSION='0.1.0'
setup(
name='vfp2py',
version=VERSION,
description='Convert foxpro code to python',
author='Michael Wisslead',
author_email='michael.wisslead@gmail.com',
url='https://github.com/mwisslead',
packages=['vfp2py'],
classifiers=[
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Foxpro",
"Topic :: Software Development :: Libraries :: Python Modules",
],
install_requires=[
'antlr4-python2-runtime==4.8; python_version < \'3\'',
'antlr4-python3-runtime==4.8; python_version >= \'3\'',
'dbf',
'autopep8',
'isort<5',
'python-dateutil',
'pyodbc'
],
test_suite='nose.collector',
tests_require=['nose', 'Faker'],
entry_points = {
'console_scripts': ['vfp2py=vfp2py.__main__:main'],
}
)
<commit_msg>Remove "Programming Language :: Foxpro" classifier.<commit_after># coding=utf-8
from __future__ import absolute_import, division, print_function
import sys
from setuptools import setup
VERSION='0.1.0'
setup(
name='vfp2py',
version=VERSION,
description='Convert foxpro code to python',
author='Michael Wisslead',
author_email='michael.wisslead@gmail.com',
url='https://github.com/mwisslead',
packages=['vfp2py'],
classifiers=[
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Software Development :: Libraries :: Python Modules",
],
install_requires=[
'antlr4-python2-runtime==4.8; python_version < \'3\'',
'antlr4-python3-runtime==4.8; python_version >= \'3\'',
'dbf',
'autopep8',
'isort<5',
'python-dateutil',
'pyodbc'
],
test_suite='nose.collector',
tests_require=['nose', 'Faker'],
entry_points = {
'console_scripts': ['vfp2py=vfp2py.__main__:main'],
}
)
|
aa03b1737e6760cc81daa3ff36f5eb5319334c54
|
setup.py
|
setup.py
|
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
from aldryn_categories import __version__
# git tag '[version]'
# git push --tags origin master
# python setup.py sdist upload
setup(
name='aldryn-categories',
version=__version__,
url='https://github.com/aldryn/aldryn-categories',
license='BSD License',
description='Heirarchical categories/taxonomies for your Django project',
author='Divio AG',
author_email='info@divio.ch',
package_data={},
packages=find_packages(),
platforms=['OS Independent'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries',
],
install_requires=[
'django-parler>=1.2.1',
'django-treebeard>=2.0',
],
include_package_data=True,
zip_safe=False
)
|
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
from aldryn_categories import __version__
# git tag '[version]'
# git push --tags origin master
# python setup.py sdist upload
# python setup.py bdist_wheel upload
setup(
name='aldryn-categories',
version=__version__,
url='https://github.com/aldryn/aldryn-categories',
license='BSD License',
description='Heirarchical categories/taxonomies for your Django project',
author='Divio AG',
author_email='info@divio.ch',
package_data={},
packages=find_packages(),
platforms=['OS Independent'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries',
],
install_requires=[
'django-parler>=1.2.1',
'django-treebeard>=2.0',
],
include_package_data=True,
zip_safe=False
)
|
Add reminder comment on wheel creation
|
Add reminder comment on wheel creation
|
Python
|
bsd-3-clause
|
aldryn/aldryn-categories,aldryn/aldryn-categories
|
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
from aldryn_categories import __version__
# git tag '[version]'
# git push --tags origin master
# python setup.py sdist upload
setup(
name='aldryn-categories',
version=__version__,
url='https://github.com/aldryn/aldryn-categories',
license='BSD License',
description='Heirarchical categories/taxonomies for your Django project',
author='Divio AG',
author_email='info@divio.ch',
package_data={},
packages=find_packages(),
platforms=['OS Independent'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries',
],
install_requires=[
'django-parler>=1.2.1',
'django-treebeard>=2.0',
],
include_package_data=True,
zip_safe=False
)
Add reminder comment on wheel creation
|
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
from aldryn_categories import __version__
# git tag '[version]'
# git push --tags origin master
# python setup.py sdist upload
# python setup.py bdist_wheel upload
setup(
name='aldryn-categories',
version=__version__,
url='https://github.com/aldryn/aldryn-categories',
license='BSD License',
description='Heirarchical categories/taxonomies for your Django project',
author='Divio AG',
author_email='info@divio.ch',
package_data={},
packages=find_packages(),
platforms=['OS Independent'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries',
],
install_requires=[
'django-parler>=1.2.1',
'django-treebeard>=2.0',
],
include_package_data=True,
zip_safe=False
)
|
<commit_before># -*- coding: utf-8 -*-
from setuptools import setup, find_packages
from aldryn_categories import __version__
# git tag '[version]'
# git push --tags origin master
# python setup.py sdist upload
setup(
name='aldryn-categories',
version=__version__,
url='https://github.com/aldryn/aldryn-categories',
license='BSD License',
description='Heirarchical categories/taxonomies for your Django project',
author='Divio AG',
author_email='info@divio.ch',
package_data={},
packages=find_packages(),
platforms=['OS Independent'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries',
],
install_requires=[
'django-parler>=1.2.1',
'django-treebeard>=2.0',
],
include_package_data=True,
zip_safe=False
)
<commit_msg>Add reminder comment on wheel creation<commit_after>
|
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
from aldryn_categories import __version__
# git tag '[version]'
# git push --tags origin master
# python setup.py sdist upload
# python setup.py bdist_wheel upload
setup(
name='aldryn-categories',
version=__version__,
url='https://github.com/aldryn/aldryn-categories',
license='BSD License',
description='Heirarchical categories/taxonomies for your Django project',
author='Divio AG',
author_email='info@divio.ch',
package_data={},
packages=find_packages(),
platforms=['OS Independent'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries',
],
install_requires=[
'django-parler>=1.2.1',
'django-treebeard>=2.0',
],
include_package_data=True,
zip_safe=False
)
|
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
from aldryn_categories import __version__
# git tag '[version]'
# git push --tags origin master
# python setup.py sdist upload
setup(
name='aldryn-categories',
version=__version__,
url='https://github.com/aldryn/aldryn-categories',
license='BSD License',
description='Heirarchical categories/taxonomies for your Django project',
author='Divio AG',
author_email='info@divio.ch',
package_data={},
packages=find_packages(),
platforms=['OS Independent'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries',
],
install_requires=[
'django-parler>=1.2.1',
'django-treebeard>=2.0',
],
include_package_data=True,
zip_safe=False
)
Add reminder comment on wheel creation# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
from aldryn_categories import __version__
# git tag '[version]'
# git push --tags origin master
# python setup.py sdist upload
# python setup.py bdist_wheel upload
setup(
name='aldryn-categories',
version=__version__,
url='https://github.com/aldryn/aldryn-categories',
license='BSD License',
description='Heirarchical categories/taxonomies for your Django project',
author='Divio AG',
author_email='info@divio.ch',
package_data={},
packages=find_packages(),
platforms=['OS Independent'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries',
],
install_requires=[
'django-parler>=1.2.1',
'django-treebeard>=2.0',
],
include_package_data=True,
zip_safe=False
)
|
<commit_before># -*- coding: utf-8 -*-
from setuptools import setup, find_packages
from aldryn_categories import __version__
# git tag '[version]'
# git push --tags origin master
# python setup.py sdist upload
setup(
name='aldryn-categories',
version=__version__,
url='https://github.com/aldryn/aldryn-categories',
license='BSD License',
description='Heirarchical categories/taxonomies for your Django project',
author='Divio AG',
author_email='info@divio.ch',
package_data={},
packages=find_packages(),
platforms=['OS Independent'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries',
],
install_requires=[
'django-parler>=1.2.1',
'django-treebeard>=2.0',
],
include_package_data=True,
zip_safe=False
)
<commit_msg>Add reminder comment on wheel creation<commit_after># -*- coding: utf-8 -*-
from setuptools import setup, find_packages
from aldryn_categories import __version__
# git tag '[version]'
# git push --tags origin master
# python setup.py sdist upload
# python setup.py bdist_wheel upload
setup(
name='aldryn-categories',
version=__version__,
url='https://github.com/aldryn/aldryn-categories',
license='BSD License',
description='Heirarchical categories/taxonomies for your Django project',
author='Divio AG',
author_email='info@divio.ch',
package_data={},
packages=find_packages(),
platforms=['OS Independent'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries',
],
install_requires=[
'django-parler>=1.2.1',
'django-treebeard>=2.0',
],
include_package_data=True,
zip_safe=False
)
|
90fb3c5d50f4816967c3a36fc5ddfa93caff610f
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name="CoinboxMod-auth",
version="0.1",
packages=find_packages(),
zip_safe=True,
namespace_packages=['cbpos', 'cbpos.mod'],
include_package_data=True,
install_requires=['sqlalchemy>=0.7','PyDispatcher>=2.0.3','ProxyTypes>=0.9','PySide>=1.1.2','py-bcrypt==0.2'],
author='Coinbox POS Team',
author_email='coinboxpos@googlegroups.com',
description='Coinbox POS core package',
license='GPLv3',
url='http://coinboxpos.org/'
)
|
from setuptools import setup, find_packages
setup(
name="CoinboxMod-auth",
version="0.1",
packages=find_packages(),
zip_safe=True,
namespace_packages=['cbpos', 'cbpos.mod'],
include_package_data=True,
install_requires=['sqlalchemy>=0.7','PyDispatcher>=2.0.3','ProxyTypes>=0.9','PySide>=1.1.2','py-bcrypt==0.4'],
author='Coinbox POS Team',
author_email='coinboxpos@googlegroups.com',
description='Coinbox POS core package',
license='GPLv3',
url='http://coinboxpos.org/'
)
|
Use py-bcrypt version 0.4 to fix a security issue with py-bcrypt
|
Use py-bcrypt version 0.4 to fix a security issue with py-bcrypt
|
Python
|
mit
|
coinbox/coinbox-mod-auth
|
from setuptools import setup, find_packages
setup(
name="CoinboxMod-auth",
version="0.1",
packages=find_packages(),
zip_safe=True,
namespace_packages=['cbpos', 'cbpos.mod'],
include_package_data=True,
install_requires=['sqlalchemy>=0.7','PyDispatcher>=2.0.3','ProxyTypes>=0.9','PySide>=1.1.2','py-bcrypt==0.2'],
author='Coinbox POS Team',
author_email='coinboxpos@googlegroups.com',
description='Coinbox POS core package',
license='GPLv3',
url='http://coinboxpos.org/'
)
Use py-bcrypt version 0.4 to fix a security issue with py-bcrypt
|
from setuptools import setup, find_packages
setup(
name="CoinboxMod-auth",
version="0.1",
packages=find_packages(),
zip_safe=True,
namespace_packages=['cbpos', 'cbpos.mod'],
include_package_data=True,
install_requires=['sqlalchemy>=0.7','PyDispatcher>=2.0.3','ProxyTypes>=0.9','PySide>=1.1.2','py-bcrypt==0.4'],
author='Coinbox POS Team',
author_email='coinboxpos@googlegroups.com',
description='Coinbox POS core package',
license='GPLv3',
url='http://coinboxpos.org/'
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name="CoinboxMod-auth",
version="0.1",
packages=find_packages(),
zip_safe=True,
namespace_packages=['cbpos', 'cbpos.mod'],
include_package_data=True,
install_requires=['sqlalchemy>=0.7','PyDispatcher>=2.0.3','ProxyTypes>=0.9','PySide>=1.1.2','py-bcrypt==0.2'],
author='Coinbox POS Team',
author_email='coinboxpos@googlegroups.com',
description='Coinbox POS core package',
license='GPLv3',
url='http://coinboxpos.org/'
)
<commit_msg>Use py-bcrypt version 0.4 to fix a security issue with py-bcrypt<commit_after>
|
from setuptools import setup, find_packages
setup(
name="CoinboxMod-auth",
version="0.1",
packages=find_packages(),
zip_safe=True,
namespace_packages=['cbpos', 'cbpos.mod'],
include_package_data=True,
install_requires=['sqlalchemy>=0.7','PyDispatcher>=2.0.3','ProxyTypes>=0.9','PySide>=1.1.2','py-bcrypt==0.4'],
author='Coinbox POS Team',
author_email='coinboxpos@googlegroups.com',
description='Coinbox POS core package',
license='GPLv3',
url='http://coinboxpos.org/'
)
|
from setuptools import setup, find_packages
setup(
name="CoinboxMod-auth",
version="0.1",
packages=find_packages(),
zip_safe=True,
namespace_packages=['cbpos', 'cbpos.mod'],
include_package_data=True,
install_requires=['sqlalchemy>=0.7','PyDispatcher>=2.0.3','ProxyTypes>=0.9','PySide>=1.1.2','py-bcrypt==0.2'],
author='Coinbox POS Team',
author_email='coinboxpos@googlegroups.com',
description='Coinbox POS core package',
license='GPLv3',
url='http://coinboxpos.org/'
)
Use py-bcrypt version 0.4 to fix a security issue with py-bcryptfrom setuptools import setup, find_packages
setup(
name="CoinboxMod-auth",
version="0.1",
packages=find_packages(),
zip_safe=True,
namespace_packages=['cbpos', 'cbpos.mod'],
include_package_data=True,
install_requires=['sqlalchemy>=0.7','PyDispatcher>=2.0.3','ProxyTypes>=0.9','PySide>=1.1.2','py-bcrypt==0.4'],
author='Coinbox POS Team',
author_email='coinboxpos@googlegroups.com',
description='Coinbox POS core package',
license='GPLv3',
url='http://coinboxpos.org/'
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name="CoinboxMod-auth",
version="0.1",
packages=find_packages(),
zip_safe=True,
namespace_packages=['cbpos', 'cbpos.mod'],
include_package_data=True,
install_requires=['sqlalchemy>=0.7','PyDispatcher>=2.0.3','ProxyTypes>=0.9','PySide>=1.1.2','py-bcrypt==0.2'],
author='Coinbox POS Team',
author_email='coinboxpos@googlegroups.com',
description='Coinbox POS core package',
license='GPLv3',
url='http://coinboxpos.org/'
)
<commit_msg>Use py-bcrypt version 0.4 to fix a security issue with py-bcrypt<commit_after>from setuptools import setup, find_packages
setup(
name="CoinboxMod-auth",
version="0.1",
packages=find_packages(),
zip_safe=True,
namespace_packages=['cbpos', 'cbpos.mod'],
include_package_data=True,
install_requires=['sqlalchemy>=0.7','PyDispatcher>=2.0.3','ProxyTypes>=0.9','PySide>=1.1.2','py-bcrypt==0.4'],
author='Coinbox POS Team',
author_email='coinboxpos@googlegroups.com',
description='Coinbox POS core package',
license='GPLv3',
url='http://coinboxpos.org/'
)
|
10588fc5caa881a82d78449e29216a68e8b12074
|
setup.py
|
setup.py
|
from distutils.core import setup
import os
setup(name='django-flashpolicies',
version='1.3',
description='Flash cross-domain policies for Django sites',
long_description=open(os.path.join(os.path.dirname(__file__), 'README')).read(),
author='James Bennett',
author_email='james@b-list.org',
url='http://bitbucket.org/ubernostrum/django-flashpolicies/overview/',
download_url='http://bitbucket.org/ubernostrum/django-flashpolicies/downloads/django-flashpolicies-1.3.tar.gz',
packages=['flashpolicies'],
classifiers=['Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities'],
)
|
from distutils.core import setup
import os
setup(name='django-flashpolicies',
version='1.3.1',
description='Flash cross-domain policies for Django sites',
long_description=open(os.path.join(os.path.dirname(__file__), 'README')).read(),
author='James Bennett',
author_email='james@b-list.org',
url='http://bitbucket.org/ubernostrum/django-flashpolicies/overview/',
download_url='http://bitbucket.org/ubernostrum/django-flashpolicies/downloads/django-flashpolicies-1.3.tar.gz',
packages=['flashpolicies', 'flashpolicies.tests'],
classifiers=['Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities'],
)
|
Make the packaging process include the tests.
|
Make the packaging process include the tests.
|
Python
|
bsd-3-clause
|
ubernostrum/django-flashpolicies
|
from distutils.core import setup
import os
setup(name='django-flashpolicies',
version='1.3',
description='Flash cross-domain policies for Django sites',
long_description=open(os.path.join(os.path.dirname(__file__), 'README')).read(),
author='James Bennett',
author_email='james@b-list.org',
url='http://bitbucket.org/ubernostrum/django-flashpolicies/overview/',
download_url='http://bitbucket.org/ubernostrum/django-flashpolicies/downloads/django-flashpolicies-1.3.tar.gz',
packages=['flashpolicies'],
classifiers=['Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities'],
)
Make the packaging process include the tests.
|
from distutils.core import setup
import os
setup(name='django-flashpolicies',
version='1.3.1',
description='Flash cross-domain policies for Django sites',
long_description=open(os.path.join(os.path.dirname(__file__), 'README')).read(),
author='James Bennett',
author_email='james@b-list.org',
url='http://bitbucket.org/ubernostrum/django-flashpolicies/overview/',
download_url='http://bitbucket.org/ubernostrum/django-flashpolicies/downloads/django-flashpolicies-1.3.tar.gz',
packages=['flashpolicies', 'flashpolicies.tests'],
classifiers=['Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities'],
)
|
<commit_before>from distutils.core import setup
import os
setup(name='django-flashpolicies',
version='1.3',
description='Flash cross-domain policies for Django sites',
long_description=open(os.path.join(os.path.dirname(__file__), 'README')).read(),
author='James Bennett',
author_email='james@b-list.org',
url='http://bitbucket.org/ubernostrum/django-flashpolicies/overview/',
download_url='http://bitbucket.org/ubernostrum/django-flashpolicies/downloads/django-flashpolicies-1.3.tar.gz',
packages=['flashpolicies'],
classifiers=['Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities'],
)
<commit_msg>Make the packaging process include the tests.<commit_after>
|
from distutils.core import setup
import os
setup(name='django-flashpolicies',
version='1.3.1',
description='Flash cross-domain policies for Django sites',
long_description=open(os.path.join(os.path.dirname(__file__), 'README')).read(),
author='James Bennett',
author_email='james@b-list.org',
url='http://bitbucket.org/ubernostrum/django-flashpolicies/overview/',
download_url='http://bitbucket.org/ubernostrum/django-flashpolicies/downloads/django-flashpolicies-1.3.tar.gz',
packages=['flashpolicies', 'flashpolicies.tests'],
classifiers=['Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities'],
)
|
from distutils.core import setup
import os
setup(name='django-flashpolicies',
version='1.3',
description='Flash cross-domain policies for Django sites',
long_description=open(os.path.join(os.path.dirname(__file__), 'README')).read(),
author='James Bennett',
author_email='james@b-list.org',
url='http://bitbucket.org/ubernostrum/django-flashpolicies/overview/',
download_url='http://bitbucket.org/ubernostrum/django-flashpolicies/downloads/django-flashpolicies-1.3.tar.gz',
packages=['flashpolicies'],
classifiers=['Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities'],
)
Make the packaging process include the tests.from distutils.core import setup
import os
setup(name='django-flashpolicies',
version='1.3.1',
description='Flash cross-domain policies for Django sites',
long_description=open(os.path.join(os.path.dirname(__file__), 'README')).read(),
author='James Bennett',
author_email='james@b-list.org',
url='http://bitbucket.org/ubernostrum/django-flashpolicies/overview/',
download_url='http://bitbucket.org/ubernostrum/django-flashpolicies/downloads/django-flashpolicies-1.3.tar.gz',
packages=['flashpolicies', 'flashpolicies.tests'],
classifiers=['Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities'],
)
|
<commit_before>from distutils.core import setup
import os
setup(name='django-flashpolicies',
version='1.3',
description='Flash cross-domain policies for Django sites',
long_description=open(os.path.join(os.path.dirname(__file__), 'README')).read(),
author='James Bennett',
author_email='james@b-list.org',
url='http://bitbucket.org/ubernostrum/django-flashpolicies/overview/',
download_url='http://bitbucket.org/ubernostrum/django-flashpolicies/downloads/django-flashpolicies-1.3.tar.gz',
packages=['flashpolicies'],
classifiers=['Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities'],
)
<commit_msg>Make the packaging process include the tests.<commit_after>from distutils.core import setup
import os
setup(name='django-flashpolicies',
version='1.3.1',
description='Flash cross-domain policies for Django sites',
long_description=open(os.path.join(os.path.dirname(__file__), 'README')).read(),
author='James Bennett',
author_email='james@b-list.org',
url='http://bitbucket.org/ubernostrum/django-flashpolicies/overview/',
download_url='http://bitbucket.org/ubernostrum/django-flashpolicies/downloads/django-flashpolicies-1.3.tar.gz',
packages=['flashpolicies', 'flashpolicies.tests'],
classifiers=['Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities'],
)
|
27b4a870756882faf56894b2d9969fa1eab4a906
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name='django-migration-testcase',
version='0.0.3',
author='Andrew Plummer',
author_email='plummer574@gmail.com',
url='https://github.com/plumdog/django_migration_testcase',
install_requires=['Django>=1.4'])
|
from setuptools import find_packages, setup
setup(
name='django-migration-testcase',
version='0.0.3',
author='Andrew Plummer',
author_email='plummer574@gmail.com',
url='https://github.com/plumdog/django_migration_testcase',
packages=find_packages(),
install_requires=['Django>=1.4'])
|
Fix installing the package from pip.
|
Fix installing the package from pip.
|
Python
|
mit
|
percipient/django_migration_testcase,plumdog/django_migration_test,percipient/django_migration_testcase,plumdog/django_migration_testcase,plumdog/django_migration_test,plumdog/django_migration_testcase
|
from setuptools import setup
setup(
name='django-migration-testcase',
version='0.0.3',
author='Andrew Plummer',
author_email='plummer574@gmail.com',
url='https://github.com/plumdog/django_migration_testcase',
install_requires=['Django>=1.4'])
Fix installing the package from pip.
|
from setuptools import find_packages, setup
setup(
name='django-migration-testcase',
version='0.0.3',
author='Andrew Plummer',
author_email='plummer574@gmail.com',
url='https://github.com/plumdog/django_migration_testcase',
packages=find_packages(),
install_requires=['Django>=1.4'])
|
<commit_before>from setuptools import setup
setup(
name='django-migration-testcase',
version='0.0.3',
author='Andrew Plummer',
author_email='plummer574@gmail.com',
url='https://github.com/plumdog/django_migration_testcase',
install_requires=['Django>=1.4'])
<commit_msg>Fix installing the package from pip.<commit_after>
|
from setuptools import find_packages, setup
setup(
name='django-migration-testcase',
version='0.0.3',
author='Andrew Plummer',
author_email='plummer574@gmail.com',
url='https://github.com/plumdog/django_migration_testcase',
packages=find_packages(),
install_requires=['Django>=1.4'])
|
from setuptools import setup
setup(
name='django-migration-testcase',
version='0.0.3',
author='Andrew Plummer',
author_email='plummer574@gmail.com',
url='https://github.com/plumdog/django_migration_testcase',
install_requires=['Django>=1.4'])
Fix installing the package from pip.from setuptools import find_packages, setup
setup(
name='django-migration-testcase',
version='0.0.3',
author='Andrew Plummer',
author_email='plummer574@gmail.com',
url='https://github.com/plumdog/django_migration_testcase',
packages=find_packages(),
install_requires=['Django>=1.4'])
|
<commit_before>from setuptools import setup
setup(
name='django-migration-testcase',
version='0.0.3',
author='Andrew Plummer',
author_email='plummer574@gmail.com',
url='https://github.com/plumdog/django_migration_testcase',
install_requires=['Django>=1.4'])
<commit_msg>Fix installing the package from pip.<commit_after>from setuptools import find_packages, setup
setup(
name='django-migration-testcase',
version='0.0.3',
author='Andrew Plummer',
author_email='plummer574@gmail.com',
url='https://github.com/plumdog/django_migration_testcase',
packages=find_packages(),
install_requires=['Django>=1.4'])
|
c8e0b0e110c91120b63a62c5ae941a88c7010780
|
setup.py
|
setup.py
|
from setuptools import setup
import os
setup(
name = "merky",
version = "0.0.1a",
author = "Ethan Rowe",
author_email = "ethan@the-rowes.com",
description = ("JSON-oriented merkle tree utilities"),
license = "MIT",
url = "https://github.com/ethanrowe/python-merky",
packages = ["merky",
"merky.test",
],
long_description = """
Merky - compute merkle trees for JSON-friendly data.
""",
test_suite = "nose.collector",
install_requires = [
'six >= 1.5',
],
setup_requires = [
'nose',
'mock >= 1.0.1',
],
tests_require = [
'nose',
'mock >= 1.0.1',
],
)
|
from setuptools import setup
import os
setup(
name = "merky",
version = "0.0.2a",
author = "Ethan Rowe",
author_email = "ethan@the-rowes.com",
description = ("JSON-oriented merkle tree utilities"),
license = "MIT",
url = "https://github.com/ethanrowe/python-merky",
packages = ["merky",
"merky.cases",
"merky.test",
"merky.test.usecases",
],
long_description = """
Merky - compute merkle trees for JSON-friendly data.
""",
test_suite = "nose.collector",
install_requires = [
'six >= 1.5',
],
setup_requires = [
'nose',
'mock >= 1.0.1',
],
tests_require = [
'nose',
'mock >= 1.0.1',
],
)
|
Bump to 0.0.2a for attr. graph feature
|
Bump to 0.0.2a for attr. graph feature
Add new subpackages to the setup package list.
|
Python
|
mit
|
ethanrowe/python-merky
|
from setuptools import setup
import os
setup(
name = "merky",
version = "0.0.1a",
author = "Ethan Rowe",
author_email = "ethan@the-rowes.com",
description = ("JSON-oriented merkle tree utilities"),
license = "MIT",
url = "https://github.com/ethanrowe/python-merky",
packages = ["merky",
"merky.test",
],
long_description = """
Merky - compute merkle trees for JSON-friendly data.
""",
test_suite = "nose.collector",
install_requires = [
'six >= 1.5',
],
setup_requires = [
'nose',
'mock >= 1.0.1',
],
tests_require = [
'nose',
'mock >= 1.0.1',
],
)
Bump to 0.0.2a for attr. graph feature
Add new subpackages to the setup package list.
|
from setuptools import setup
import os
setup(
name = "merky",
version = "0.0.2a",
author = "Ethan Rowe",
author_email = "ethan@the-rowes.com",
description = ("JSON-oriented merkle tree utilities"),
license = "MIT",
url = "https://github.com/ethanrowe/python-merky",
packages = ["merky",
"merky.cases",
"merky.test",
"merky.test.usecases",
],
long_description = """
Merky - compute merkle trees for JSON-friendly data.
""",
test_suite = "nose.collector",
install_requires = [
'six >= 1.5',
],
setup_requires = [
'nose',
'mock >= 1.0.1',
],
tests_require = [
'nose',
'mock >= 1.0.1',
],
)
|
<commit_before>from setuptools import setup
import os
setup(
name = "merky",
version = "0.0.1a",
author = "Ethan Rowe",
author_email = "ethan@the-rowes.com",
description = ("JSON-oriented merkle tree utilities"),
license = "MIT",
url = "https://github.com/ethanrowe/python-merky",
packages = ["merky",
"merky.test",
],
long_description = """
Merky - compute merkle trees for JSON-friendly data.
""",
test_suite = "nose.collector",
install_requires = [
'six >= 1.5',
],
setup_requires = [
'nose',
'mock >= 1.0.1',
],
tests_require = [
'nose',
'mock >= 1.0.1',
],
)
<commit_msg>Bump to 0.0.2a for attr. graph feature
Add new subpackages to the setup package list.<commit_after>
|
from setuptools import setup
import os
setup(
name = "merky",
version = "0.0.2a",
author = "Ethan Rowe",
author_email = "ethan@the-rowes.com",
description = ("JSON-oriented merkle tree utilities"),
license = "MIT",
url = "https://github.com/ethanrowe/python-merky",
packages = ["merky",
"merky.cases",
"merky.test",
"merky.test.usecases",
],
long_description = """
Merky - compute merkle trees for JSON-friendly data.
""",
test_suite = "nose.collector",
install_requires = [
'six >= 1.5',
],
setup_requires = [
'nose',
'mock >= 1.0.1',
],
tests_require = [
'nose',
'mock >= 1.0.1',
],
)
|
from setuptools import setup
import os
setup(
name = "merky",
version = "0.0.1a",
author = "Ethan Rowe",
author_email = "ethan@the-rowes.com",
description = ("JSON-oriented merkle tree utilities"),
license = "MIT",
url = "https://github.com/ethanrowe/python-merky",
packages = ["merky",
"merky.test",
],
long_description = """
Merky - compute merkle trees for JSON-friendly data.
""",
test_suite = "nose.collector",
install_requires = [
'six >= 1.5',
],
setup_requires = [
'nose',
'mock >= 1.0.1',
],
tests_require = [
'nose',
'mock >= 1.0.1',
],
)
Bump to 0.0.2a for attr. graph feature
Add new subpackages to the setup package list.from setuptools import setup
import os
setup(
name = "merky",
version = "0.0.2a",
author = "Ethan Rowe",
author_email = "ethan@the-rowes.com",
description = ("JSON-oriented merkle tree utilities"),
license = "MIT",
url = "https://github.com/ethanrowe/python-merky",
packages = ["merky",
"merky.cases",
"merky.test",
"merky.test.usecases",
],
long_description = """
Merky - compute merkle trees for JSON-friendly data.
""",
test_suite = "nose.collector",
install_requires = [
'six >= 1.5',
],
setup_requires = [
'nose',
'mock >= 1.0.1',
],
tests_require = [
'nose',
'mock >= 1.0.1',
],
)
|
<commit_before>from setuptools import setup
import os
setup(
name = "merky",
version = "0.0.1a",
author = "Ethan Rowe",
author_email = "ethan@the-rowes.com",
description = ("JSON-oriented merkle tree utilities"),
license = "MIT",
url = "https://github.com/ethanrowe/python-merky",
packages = ["merky",
"merky.test",
],
long_description = """
Merky - compute merkle trees for JSON-friendly data.
""",
test_suite = "nose.collector",
install_requires = [
'six >= 1.5',
],
setup_requires = [
'nose',
'mock >= 1.0.1',
],
tests_require = [
'nose',
'mock >= 1.0.1',
],
)
<commit_msg>Bump to 0.0.2a for attr. graph feature
Add new subpackages to the setup package list.<commit_after>from setuptools import setup
import os
setup(
name = "merky",
version = "0.0.2a",
author = "Ethan Rowe",
author_email = "ethan@the-rowes.com",
description = ("JSON-oriented merkle tree utilities"),
license = "MIT",
url = "https://github.com/ethanrowe/python-merky",
packages = ["merky",
"merky.cases",
"merky.test",
"merky.test.usecases",
],
long_description = """
Merky - compute merkle trees for JSON-friendly data.
""",
test_suite = "nose.collector",
install_requires = [
'six >= 1.5',
],
setup_requires = [
'nose',
'mock >= 1.0.1',
],
tests_require = [
'nose',
'mock >= 1.0.1',
],
)
|
31b439a1d53b27436dd183abb51fabd488cbeab5
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='django-admin-extend',
version='0.0.2',
description=('Provides functionality for extending'
'ModelAdmin classes that have already'
'been registered by other apps'),
author='Ioan Alexandru Cucu',
author_email='alexandruioan.cucu@gmail.com',
url='https://github.com/kux/django-admin-extend',
download_url='https://github.com/kux/django-admin-extend/archive/0.0.2.tar.gz,
install_requires=('Django>=1.3',),
packages=find_packages(),
include_package_data=True,
)
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='django-admin-extend',
version='0.0.3',
description=('Provides functionality for extending'
'ModelAdmin classes that have already'
'been registered by other apps'),
author='Ioan Alexandru Cucu',
author_email='alexandruioan.cucu@gmail.com',
url='https://github.com/kux/django-admin-extend',
download_url='https://github.com/kux/django-admin-extend/archive/0.0.3.tar.gz',
install_requires=('Django>=1.3',),
packages=find_packages(),
include_package_data=True,
)
|
Fix download link and bump version
|
Fix download link and bump version
|
Python
|
mit
|
kux/django-admin-extend
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='django-admin-extend',
version='0.0.2',
description=('Provides functionality for extending'
'ModelAdmin classes that have already'
'been registered by other apps'),
author='Ioan Alexandru Cucu',
author_email='alexandruioan.cucu@gmail.com',
url='https://github.com/kux/django-admin-extend',
download_url='https://github.com/kux/django-admin-extend/archive/0.0.2.tar.gz,
install_requires=('Django>=1.3',),
packages=find_packages(),
include_package_data=True,
)
Fix download link and bump version
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='django-admin-extend',
version='0.0.3',
description=('Provides functionality for extending'
'ModelAdmin classes that have already'
'been registered by other apps'),
author='Ioan Alexandru Cucu',
author_email='alexandruioan.cucu@gmail.com',
url='https://github.com/kux/django-admin-extend',
download_url='https://github.com/kux/django-admin-extend/archive/0.0.3.tar.gz',
install_requires=('Django>=1.3',),
packages=find_packages(),
include_package_data=True,
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='django-admin-extend',
version='0.0.2',
description=('Provides functionality for extending'
'ModelAdmin classes that have already'
'been registered by other apps'),
author='Ioan Alexandru Cucu',
author_email='alexandruioan.cucu@gmail.com',
url='https://github.com/kux/django-admin-extend',
download_url='https://github.com/kux/django-admin-extend/archive/0.0.2.tar.gz,
install_requires=('Django>=1.3',),
packages=find_packages(),
include_package_data=True,
)
<commit_msg>Fix download link and bump version<commit_after>
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='django-admin-extend',
version='0.0.3',
description=('Provides functionality for extending'
'ModelAdmin classes that have already'
'been registered by other apps'),
author='Ioan Alexandru Cucu',
author_email='alexandruioan.cucu@gmail.com',
url='https://github.com/kux/django-admin-extend',
download_url='https://github.com/kux/django-admin-extend/archive/0.0.3.tar.gz',
install_requires=('Django>=1.3',),
packages=find_packages(),
include_package_data=True,
)
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='django-admin-extend',
version='0.0.2',
description=('Provides functionality for extending'
'ModelAdmin classes that have already'
'been registered by other apps'),
author='Ioan Alexandru Cucu',
author_email='alexandruioan.cucu@gmail.com',
url='https://github.com/kux/django-admin-extend',
download_url='https://github.com/kux/django-admin-extend/archive/0.0.2.tar.gz,
install_requires=('Django>=1.3',),
packages=find_packages(),
include_package_data=True,
)
Fix download link and bump version#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='django-admin-extend',
version='0.0.3',
description=('Provides functionality for extending'
'ModelAdmin classes that have already'
'been registered by other apps'),
author='Ioan Alexandru Cucu',
author_email='alexandruioan.cucu@gmail.com',
url='https://github.com/kux/django-admin-extend',
download_url='https://github.com/kux/django-admin-extend/archive/0.0.3.tar.gz',
install_requires=('Django>=1.3',),
packages=find_packages(),
include_package_data=True,
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='django-admin-extend',
version='0.0.2',
description=('Provides functionality for extending'
'ModelAdmin classes that have already'
'been registered by other apps'),
author='Ioan Alexandru Cucu',
author_email='alexandruioan.cucu@gmail.com',
url='https://github.com/kux/django-admin-extend',
download_url='https://github.com/kux/django-admin-extend/archive/0.0.2.tar.gz,
install_requires=('Django>=1.3',),
packages=find_packages(),
include_package_data=True,
)
<commit_msg>Fix download link and bump version<commit_after>#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='django-admin-extend',
version='0.0.3',
description=('Provides functionality for extending'
'ModelAdmin classes that have already'
'been registered by other apps'),
author='Ioan Alexandru Cucu',
author_email='alexandruioan.cucu@gmail.com',
url='https://github.com/kux/django-admin-extend',
download_url='https://github.com/kux/django-admin-extend/archive/0.0.3.tar.gz',
install_requires=('Django>=1.3',),
packages=find_packages(),
include_package_data=True,
)
|
412d84fd08f55e20a23314cb09a8e49751df38c2
|
setup.py
|
setup.py
|
from distutils.core import Extension, setup
try:
from Cython.Distutils import build_ext
except ImportError:
use_cython = False
else:
use_cython = True
if use_cython:
extensions = [
Extension('mathix.vector', ['mathix/vector.pyx']),
]
cmdclass = {
'build_ext': build_ext
}
else:
extensions = [
Extension('mathix.vector', ['mathix/vector.c']),
]
cmdclass = {}
setup(
name='mathix',
author='Peith Vergil',
version='0.1',
license='MIT',
cmdclass=cmdclass,
packages=[
'mathix',
],
keywords='useless simple math library',
description='A useless simple math library.',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Programming Language :: Cython',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
ext_modules=extensions
)
|
from distutils.core import Extension, setup
try:
from Cython.Build import cythonize
from Cython.Distutils import build_ext
except ImportError:
use_cython = False
else:
use_cython = True
if use_cython:
extensions = cythonize([
Extension('mathix.vector', ['mathix/vector.pyx']),
])
cmdclass = {
'build_ext': build_ext
}
else:
extensions = [
Extension('mathix.vector', ['mathix/vector.c']),
]
cmdclass = {}
setup(
name='mathix',
author='Peith Vergil',
version='0.1',
license='MIT',
cmdclass=cmdclass,
packages=[
'mathix',
],
keywords='useless simple math library',
description='A useless simple math library.',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Programming Language :: Cython',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
ext_modules=extensions
)
|
Use "cythonize" if Cython is installed.
|
Use "cythonize" if Cython is installed.
|
Python
|
mit
|
PeithVergil/cython-example
|
from distutils.core import Extension, setup
try:
from Cython.Distutils import build_ext
except ImportError:
use_cython = False
else:
use_cython = True
if use_cython:
extensions = [
Extension('mathix.vector', ['mathix/vector.pyx']),
]
cmdclass = {
'build_ext': build_ext
}
else:
extensions = [
Extension('mathix.vector', ['mathix/vector.c']),
]
cmdclass = {}
setup(
name='mathix',
author='Peith Vergil',
version='0.1',
license='MIT',
cmdclass=cmdclass,
packages=[
'mathix',
],
keywords='useless simple math library',
description='A useless simple math library.',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Programming Language :: Cython',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
ext_modules=extensions
)
Use "cythonize" if Cython is installed.
|
from distutils.core import Extension, setup
try:
from Cython.Build import cythonize
from Cython.Distutils import build_ext
except ImportError:
use_cython = False
else:
use_cython = True
if use_cython:
extensions = cythonize([
Extension('mathix.vector', ['mathix/vector.pyx']),
])
cmdclass = {
'build_ext': build_ext
}
else:
extensions = [
Extension('mathix.vector', ['mathix/vector.c']),
]
cmdclass = {}
setup(
name='mathix',
author='Peith Vergil',
version='0.1',
license='MIT',
cmdclass=cmdclass,
packages=[
'mathix',
],
keywords='useless simple math library',
description='A useless simple math library.',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Programming Language :: Cython',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
ext_modules=extensions
)
|
<commit_before>from distutils.core import Extension, setup
try:
from Cython.Distutils import build_ext
except ImportError:
use_cython = False
else:
use_cython = True
if use_cython:
extensions = [
Extension('mathix.vector', ['mathix/vector.pyx']),
]
cmdclass = {
'build_ext': build_ext
}
else:
extensions = [
Extension('mathix.vector', ['mathix/vector.c']),
]
cmdclass = {}
setup(
name='mathix',
author='Peith Vergil',
version='0.1',
license='MIT',
cmdclass=cmdclass,
packages=[
'mathix',
],
keywords='useless simple math library',
description='A useless simple math library.',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Programming Language :: Cython',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
ext_modules=extensions
)
<commit_msg>Use "cythonize" if Cython is installed.<commit_after>
|
from distutils.core import Extension, setup
try:
from Cython.Build import cythonize
from Cython.Distutils import build_ext
except ImportError:
use_cython = False
else:
use_cython = True
if use_cython:
extensions = cythonize([
Extension('mathix.vector', ['mathix/vector.pyx']),
])
cmdclass = {
'build_ext': build_ext
}
else:
extensions = [
Extension('mathix.vector', ['mathix/vector.c']),
]
cmdclass = {}
setup(
name='mathix',
author='Peith Vergil',
version='0.1',
license='MIT',
cmdclass=cmdclass,
packages=[
'mathix',
],
keywords='useless simple math library',
description='A useless simple math library.',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Programming Language :: Cython',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
ext_modules=extensions
)
|
from distutils.core import Extension, setup
try:
from Cython.Distutils import build_ext
except ImportError:
use_cython = False
else:
use_cython = True
if use_cython:
extensions = [
Extension('mathix.vector', ['mathix/vector.pyx']),
]
cmdclass = {
'build_ext': build_ext
}
else:
extensions = [
Extension('mathix.vector', ['mathix/vector.c']),
]
cmdclass = {}
setup(
name='mathix',
author='Peith Vergil',
version='0.1',
license='MIT',
cmdclass=cmdclass,
packages=[
'mathix',
],
keywords='useless simple math library',
description='A useless simple math library.',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Programming Language :: Cython',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
ext_modules=extensions
)
Use "cythonize" if Cython is installed.from distutils.core import Extension, setup
try:
from Cython.Build import cythonize
from Cython.Distutils import build_ext
except ImportError:
use_cython = False
else:
use_cython = True
if use_cython:
extensions = cythonize([
Extension('mathix.vector', ['mathix/vector.pyx']),
])
cmdclass = {
'build_ext': build_ext
}
else:
extensions = [
Extension('mathix.vector', ['mathix/vector.c']),
]
cmdclass = {}
setup(
name='mathix',
author='Peith Vergil',
version='0.1',
license='MIT',
cmdclass=cmdclass,
packages=[
'mathix',
],
keywords='useless simple math library',
description='A useless simple math library.',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Programming Language :: Cython',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
ext_modules=extensions
)
|
<commit_before>from distutils.core import Extension, setup
try:
from Cython.Distutils import build_ext
except ImportError:
use_cython = False
else:
use_cython = True
if use_cython:
extensions = [
Extension('mathix.vector', ['mathix/vector.pyx']),
]
cmdclass = {
'build_ext': build_ext
}
else:
extensions = [
Extension('mathix.vector', ['mathix/vector.c']),
]
cmdclass = {}
setup(
name='mathix',
author='Peith Vergil',
version='0.1',
license='MIT',
cmdclass=cmdclass,
packages=[
'mathix',
],
keywords='useless simple math library',
description='A useless simple math library.',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Programming Language :: Cython',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
ext_modules=extensions
)
<commit_msg>Use "cythonize" if Cython is installed.<commit_after>from distutils.core import Extension, setup
try:
from Cython.Build import cythonize
from Cython.Distutils import build_ext
except ImportError:
use_cython = False
else:
use_cython = True
if use_cython:
extensions = cythonize([
Extension('mathix.vector', ['mathix/vector.pyx']),
])
cmdclass = {
'build_ext': build_ext
}
else:
extensions = [
Extension('mathix.vector', ['mathix/vector.c']),
]
cmdclass = {}
setup(
name='mathix',
author='Peith Vergil',
version='0.1',
license='MIT',
cmdclass=cmdclass,
packages=[
'mathix',
],
keywords='useless simple math library',
description='A useless simple math library.',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Programming Language :: Cython',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
ext_modules=extensions
)
|
ed141f53bb4a1469ebfcdcf07d3d98169673198d
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import io
from setuptools import setup
setup(
name='django-brotli',
version='0.1.0',
description="""Middleware that compresses response using brotli algorithm.""",
long_description=io.open("README.rst", 'r', encoding="utf-8").read(),
url='https://github.com/illagrenan/django-brotli',
license='MIT',
author='Vasek Dohnal',
author_email='vaclav.dohnal@gmail.com',
packages=['django_brotli'],
install_requires=['django', 'brotlipy'],
include_package_data=True,
zip_safe=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.9',
'Framework :: Django :: 1.10',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 3.5',
]
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import io
from setuptools import setup
setup(
name='django-brotli',
version='0.1.0',
description="""Middleware that compresses response using brotli algorithm.""",
long_description=io.open("README.rst", 'r', encoding="utf-8").read(),
url='https://github.com/illagrenan/django-brotli',
license='MIT',
author='Vasek Dohnal',
author_email='vaclav.dohnal@gmail.com',
packages=['django_brotli'],
install_requires=[
":python_version<'3.5'": ['typing'],
'django',
'brotlipy'
],
include_package_data=True,
zip_safe=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.9',
'Framework :: Django :: 1.10',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 3.5',
]
)
|
Install typing module for older python versions.
|
Install typing module for older python versions.
|
Python
|
mit
|
illagrenan/django-brotli
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import io
from setuptools import setup
setup(
name='django-brotli',
version='0.1.0',
description="""Middleware that compresses response using brotli algorithm.""",
long_description=io.open("README.rst", 'r', encoding="utf-8").read(),
url='https://github.com/illagrenan/django-brotli',
license='MIT',
author='Vasek Dohnal',
author_email='vaclav.dohnal@gmail.com',
packages=['django_brotli'],
install_requires=['django', 'brotlipy'],
include_package_data=True,
zip_safe=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.9',
'Framework :: Django :: 1.10',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 3.5',
]
)
Install typing module for older python versions.
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import io
from setuptools import setup
setup(
name='django-brotli',
version='0.1.0',
description="""Middleware that compresses response using brotli algorithm.""",
long_description=io.open("README.rst", 'r', encoding="utf-8").read(),
url='https://github.com/illagrenan/django-brotli',
license='MIT',
author='Vasek Dohnal',
author_email='vaclav.dohnal@gmail.com',
packages=['django_brotli'],
install_requires=[
":python_version<'3.5'": ['typing'],
'django',
'brotlipy'
],
include_package_data=True,
zip_safe=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.9',
'Framework :: Django :: 1.10',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 3.5',
]
)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import io
from setuptools import setup
setup(
name='django-brotli',
version='0.1.0',
description="""Middleware that compresses response using brotli algorithm.""",
long_description=io.open("README.rst", 'r', encoding="utf-8").read(),
url='https://github.com/illagrenan/django-brotli',
license='MIT',
author='Vasek Dohnal',
author_email='vaclav.dohnal@gmail.com',
packages=['django_brotli'],
install_requires=['django', 'brotlipy'],
include_package_data=True,
zip_safe=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.9',
'Framework :: Django :: 1.10',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 3.5',
]
)
<commit_msg>Install typing module for older python versions.<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import io
from setuptools import setup
setup(
name='django-brotli',
version='0.1.0',
description="""Middleware that compresses response using brotli algorithm.""",
long_description=io.open("README.rst", 'r', encoding="utf-8").read(),
url='https://github.com/illagrenan/django-brotli',
license='MIT',
author='Vasek Dohnal',
author_email='vaclav.dohnal@gmail.com',
packages=['django_brotli'],
install_requires=[
":python_version<'3.5'": ['typing'],
'django',
'brotlipy'
],
include_package_data=True,
zip_safe=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.9',
'Framework :: Django :: 1.10',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 3.5',
]
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import io
from setuptools import setup
setup(
name='django-brotli',
version='0.1.0',
description="""Middleware that compresses response using brotli algorithm.""",
long_description=io.open("README.rst", 'r', encoding="utf-8").read(),
url='https://github.com/illagrenan/django-brotli',
license='MIT',
author='Vasek Dohnal',
author_email='vaclav.dohnal@gmail.com',
packages=['django_brotli'],
install_requires=['django', 'brotlipy'],
include_package_data=True,
zip_safe=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.9',
'Framework :: Django :: 1.10',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 3.5',
]
)
Install typing module for older python versions.#!/usr/bin/env python
# -*- coding: utf-8 -*-
import io
from setuptools import setup
setup(
name='django-brotli',
version='0.1.0',
description="""Middleware that compresses response using brotli algorithm.""",
long_description=io.open("README.rst", 'r', encoding="utf-8").read(),
url='https://github.com/illagrenan/django-brotli',
license='MIT',
author='Vasek Dohnal',
author_email='vaclav.dohnal@gmail.com',
packages=['django_brotli'],
install_requires=[
":python_version<'3.5'": ['typing'],
'django',
'brotlipy'
],
include_package_data=True,
zip_safe=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.9',
'Framework :: Django :: 1.10',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 3.5',
]
)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import io
from setuptools import setup
setup(
name='django-brotli',
version='0.1.0',
description="""Middleware that compresses response using brotli algorithm.""",
long_description=io.open("README.rst", 'r', encoding="utf-8").read(),
url='https://github.com/illagrenan/django-brotli',
license='MIT',
author='Vasek Dohnal',
author_email='vaclav.dohnal@gmail.com',
packages=['django_brotli'],
install_requires=['django', 'brotlipy'],
include_package_data=True,
zip_safe=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.9',
'Framework :: Django :: 1.10',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 3.5',
]
)
<commit_msg>Install typing module for older python versions.<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import io
from setuptools import setup
setup(
name='django-brotli',
version='0.1.0',
description="""Middleware that compresses response using brotli algorithm.""",
long_description=io.open("README.rst", 'r', encoding="utf-8").read(),
url='https://github.com/illagrenan/django-brotli',
license='MIT',
author='Vasek Dohnal',
author_email='vaclav.dohnal@gmail.com',
packages=['django_brotli'],
install_requires=[
":python_version<'3.5'": ['typing'],
'django',
'brotlipy'
],
include_package_data=True,
zip_safe=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.9',
'Framework :: Django :: 1.10',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 3.5',
]
)
|
0c5b68ac40aeda415d1ccd551780f00eeafb54e8
|
setup.py
|
setup.py
|
from setuptools import setup
import sys
install_requires = [
'oauth2client>=1.3.2',
'pyOpenSSL==0.14',
'simplejson>=2.3.2',
]
tests_require = list(install_requires)
# Python 2 requires Mock to run tests
if sys.version_info < (3, 0):
tests_require += ['pbr==1.6', 'Mock']
packages = ['identitytoolkit',]
setup(
name = 'identity-toolkit-python-client',
packages = packages,
license="Apache 2.0",
version = '0.1.7',
description = 'Google Identity Toolkit python client library',
author = 'Jin Liu',
url = 'https://github.com/google/identity-toolkit-python-client',
download_url = 'https://github.com/google/identity-toolkit-python-client/archive/master.zip',
keywords = ['identity', 'google', 'login', 'toolkit'], # arbitrary keywords
classifiers = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP',
],
install_requires = install_requires,
tests_require = tests_require,
test_suite = 'tests',
)
|
from setuptools import setup
import sys
install_requires = [
'oauth2client>=1.3.2',
'pyOpenSSL>=0.14',
'simplejson>=2.3.2',
]
tests_require = list(install_requires)
# Python 2 requires Mock to run tests
if sys.version_info < (3, 0):
tests_require += ['pbr==1.6', 'Mock']
packages = ['identitytoolkit',]
setup(
name = 'identity-toolkit-python-client',
packages = packages,
license="Apache 2.0",
version = '0.1.7',
description = 'Google Identity Toolkit python client library',
author = 'Jin Liu',
url = 'https://github.com/google/identity-toolkit-python-client',
download_url = 'https://github.com/google/identity-toolkit-python-client/archive/master.zip',
keywords = ['identity', 'google', 'login', 'toolkit'], # arbitrary keywords
classifiers = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP',
],
install_requires = install_requires,
tests_require = tests_require,
test_suite = 'tests',
)
|
Fix the pyOpenSSL version requirement.
|
Fix the pyOpenSSL version requirement.
|
Python
|
apache-2.0
|
google/identity-toolkit-python-client
|
from setuptools import setup
import sys
install_requires = [
'oauth2client>=1.3.2',
'pyOpenSSL==0.14',
'simplejson>=2.3.2',
]
tests_require = list(install_requires)
# Python 2 requires Mock to run tests
if sys.version_info < (3, 0):
tests_require += ['pbr==1.6', 'Mock']
packages = ['identitytoolkit',]
setup(
name = 'identity-toolkit-python-client',
packages = packages,
license="Apache 2.0",
version = '0.1.7',
description = 'Google Identity Toolkit python client library',
author = 'Jin Liu',
url = 'https://github.com/google/identity-toolkit-python-client',
download_url = 'https://github.com/google/identity-toolkit-python-client/archive/master.zip',
keywords = ['identity', 'google', 'login', 'toolkit'], # arbitrary keywords
classifiers = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP',
],
install_requires = install_requires,
tests_require = tests_require,
test_suite = 'tests',
)
Fix the pyOpenSSL version requirement.
|
from setuptools import setup
import sys
install_requires = [
'oauth2client>=1.3.2',
'pyOpenSSL>=0.14',
'simplejson>=2.3.2',
]
tests_require = list(install_requires)
# Python 2 requires Mock to run tests
if sys.version_info < (3, 0):
tests_require += ['pbr==1.6', 'Mock']
packages = ['identitytoolkit',]
setup(
name = 'identity-toolkit-python-client',
packages = packages,
license="Apache 2.0",
version = '0.1.7',
description = 'Google Identity Toolkit python client library',
author = 'Jin Liu',
url = 'https://github.com/google/identity-toolkit-python-client',
download_url = 'https://github.com/google/identity-toolkit-python-client/archive/master.zip',
keywords = ['identity', 'google', 'login', 'toolkit'], # arbitrary keywords
classifiers = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP',
],
install_requires = install_requires,
tests_require = tests_require,
test_suite = 'tests',
)
|
<commit_before>from setuptools import setup
import sys
install_requires = [
'oauth2client>=1.3.2',
'pyOpenSSL==0.14',
'simplejson>=2.3.2',
]
tests_require = list(install_requires)
# Python 2 requires Mock to run tests
if sys.version_info < (3, 0):
tests_require += ['pbr==1.6', 'Mock']
packages = ['identitytoolkit',]
setup(
name = 'identity-toolkit-python-client',
packages = packages,
license="Apache 2.0",
version = '0.1.7',
description = 'Google Identity Toolkit python client library',
author = 'Jin Liu',
url = 'https://github.com/google/identity-toolkit-python-client',
download_url = 'https://github.com/google/identity-toolkit-python-client/archive/master.zip',
keywords = ['identity', 'google', 'login', 'toolkit'], # arbitrary keywords
classifiers = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP',
],
install_requires = install_requires,
tests_require = tests_require,
test_suite = 'tests',
)
<commit_msg>Fix the pyOpenSSL version requirement.<commit_after>
|
from setuptools import setup
import sys
install_requires = [
'oauth2client>=1.3.2',
'pyOpenSSL>=0.14',
'simplejson>=2.3.2',
]
tests_require = list(install_requires)
# Python 2 requires Mock to run tests
if sys.version_info < (3, 0):
tests_require += ['pbr==1.6', 'Mock']
packages = ['identitytoolkit',]
setup(
name = 'identity-toolkit-python-client',
packages = packages,
license="Apache 2.0",
version = '0.1.7',
description = 'Google Identity Toolkit python client library',
author = 'Jin Liu',
url = 'https://github.com/google/identity-toolkit-python-client',
download_url = 'https://github.com/google/identity-toolkit-python-client/archive/master.zip',
keywords = ['identity', 'google', 'login', 'toolkit'], # arbitrary keywords
classifiers = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP',
],
install_requires = install_requires,
tests_require = tests_require,
test_suite = 'tests',
)
|
from setuptools import setup
import sys
install_requires = [
'oauth2client>=1.3.2',
'pyOpenSSL==0.14',
'simplejson>=2.3.2',
]
tests_require = list(install_requires)
# Python 2 requires Mock to run tests
if sys.version_info < (3, 0):
tests_require += ['pbr==1.6', 'Mock']
packages = ['identitytoolkit',]
setup(
name = 'identity-toolkit-python-client',
packages = packages,
license="Apache 2.0",
version = '0.1.7',
description = 'Google Identity Toolkit python client library',
author = 'Jin Liu',
url = 'https://github.com/google/identity-toolkit-python-client',
download_url = 'https://github.com/google/identity-toolkit-python-client/archive/master.zip',
keywords = ['identity', 'google', 'login', 'toolkit'], # arbitrary keywords
classifiers = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP',
],
install_requires = install_requires,
tests_require = tests_require,
test_suite = 'tests',
)
Fix the pyOpenSSL version requirement.from setuptools import setup
import sys
install_requires = [
'oauth2client>=1.3.2',
'pyOpenSSL>=0.14',
'simplejson>=2.3.2',
]
tests_require = list(install_requires)
# Python 2 requires Mock to run tests
if sys.version_info < (3, 0):
tests_require += ['pbr==1.6', 'Mock']
packages = ['identitytoolkit',]
setup(
name = 'identity-toolkit-python-client',
packages = packages,
license="Apache 2.0",
version = '0.1.7',
description = 'Google Identity Toolkit python client library',
author = 'Jin Liu',
url = 'https://github.com/google/identity-toolkit-python-client',
download_url = 'https://github.com/google/identity-toolkit-python-client/archive/master.zip',
keywords = ['identity', 'google', 'login', 'toolkit'], # arbitrary keywords
classifiers = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP',
],
install_requires = install_requires,
tests_require = tests_require,
test_suite = 'tests',
)
|
<commit_before>from setuptools import setup
import sys
install_requires = [
'oauth2client>=1.3.2',
'pyOpenSSL==0.14',
'simplejson>=2.3.2',
]
tests_require = list(install_requires)
# Python 2 requires Mock to run tests
if sys.version_info < (3, 0):
tests_require += ['pbr==1.6', 'Mock']
packages = ['identitytoolkit',]
setup(
name = 'identity-toolkit-python-client',
packages = packages,
license="Apache 2.0",
version = '0.1.7',
description = 'Google Identity Toolkit python client library',
author = 'Jin Liu',
url = 'https://github.com/google/identity-toolkit-python-client',
download_url = 'https://github.com/google/identity-toolkit-python-client/archive/master.zip',
keywords = ['identity', 'google', 'login', 'toolkit'], # arbitrary keywords
classifiers = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP',
],
install_requires = install_requires,
tests_require = tests_require,
test_suite = 'tests',
)
<commit_msg>Fix the pyOpenSSL version requirement.<commit_after>from setuptools import setup
import sys
install_requires = [
'oauth2client>=1.3.2',
'pyOpenSSL>=0.14',
'simplejson>=2.3.2',
]
tests_require = list(install_requires)
# Python 2 requires Mock to run tests
if sys.version_info < (3, 0):
tests_require += ['pbr==1.6', 'Mock']
packages = ['identitytoolkit',]
setup(
name = 'identity-toolkit-python-client',
packages = packages,
license="Apache 2.0",
version = '0.1.7',
description = 'Google Identity Toolkit python client library',
author = 'Jin Liu',
url = 'https://github.com/google/identity-toolkit-python-client',
download_url = 'https://github.com/google/identity-toolkit-python-client/archive/master.zip',
keywords = ['identity', 'google', 'login', 'toolkit'], # arbitrary keywords
classifiers = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP',
],
install_requires = install_requires,
tests_require = tests_require,
test_suite = 'tests',
)
|
3aa6936bbcda5f1af356bff9ca2d09ed1b83111c
|
setup.py
|
setup.py
|
#!/usr/bin/env python
import os
from setuptools import setup, find_packages
long_description = 'Baler is a tool that makes it easy to bundle and use resources (images, strings files, etc.) in a compiled static library.'
if os.path.exists('README.rst'):
long_description = open('README.rst').read()
if os.path.exists('LICENSE'):
license = open('LICENSE').read()
setup(name='baler',
version='1.0.2',
description='Bundle assets into iOS static libraries',
long_description=long_description,
keywords=['ios', 'objective-c', 'generation', 'static', 'resource', 'NSBundle', 'mobile'],
author='PayPal SDK Team',
author_email='brfitzgerald@paypal.com, jbleechersnyder@paypal.com',
url='https://github.com/paypal/baler',
scripts=['bale'],
packages=find_packages(),
package_dir={'baler': 'baler'},
package_data={'baler': ['templates/*.j2']},
license=license,
install_requires=[
'Jinja2 >= 2.6',
'argparse >= 1.2.1',
'biplist >= 0.5',
'six >= 1.2.0',
'wsgiref >= 0.1.2',
])
|
#!/usr/bin/env python
import os
from setuptools import setup, find_packages
long_description = 'Baler is a tool that makes it easy to bundle and use resources (images, strings files, etc.) in a compiled static library.'
if os.path.exists('README.rst'):
long_description = open('README.rst').read()
if os.path.exists('LICENSE'):
license = open('LICENSE').read()
setup(name='baler',
version='1.0.3',
description='Bundle assets into iOS static libraries',
long_description=long_description,
keywords=['ios', 'objective-c', 'generation', 'static', 'resource', 'NSBundle', 'mobile'],
author='PayPal SDK Team',
author_email='brfitzgerald@paypal.com, jbleechersnyder@paypal.com',
url='https://github.com/paypal/baler',
scripts=['bale'],
packages=find_packages(),
package_dir={'baler': 'baler'},
package_data={'baler': ['templates/*.j2']},
license=license,
install_requires=[
'Jinja2 >= 2.6',
'argparse >= 1.1',
'biplist >= 0.5',
'six >= 1.2.0',
'wsgiref >= 0.1.2',
])
|
Change argparse requirement to >=1.1
|
Change argparse requirement to >=1.1
|
Python
|
mit
|
paypal/baler
|
#!/usr/bin/env python
import os
from setuptools import setup, find_packages
long_description = 'Baler is a tool that makes it easy to bundle and use resources (images, strings files, etc.) in a compiled static library.'
if os.path.exists('README.rst'):
long_description = open('README.rst').read()
if os.path.exists('LICENSE'):
license = open('LICENSE').read()
setup(name='baler',
version='1.0.2',
description='Bundle assets into iOS static libraries',
long_description=long_description,
keywords=['ios', 'objective-c', 'generation', 'static', 'resource', 'NSBundle', 'mobile'],
author='PayPal SDK Team',
author_email='brfitzgerald@paypal.com, jbleechersnyder@paypal.com',
url='https://github.com/paypal/baler',
scripts=['bale'],
packages=find_packages(),
package_dir={'baler': 'baler'},
package_data={'baler': ['templates/*.j2']},
license=license,
install_requires=[
'Jinja2 >= 2.6',
'argparse >= 1.2.1',
'biplist >= 0.5',
'six >= 1.2.0',
'wsgiref >= 0.1.2',
])
Change argparse requirement to >=1.1
|
#!/usr/bin/env python
import os
from setuptools import setup, find_packages
long_description = 'Baler is a tool that makes it easy to bundle and use resources (images, strings files, etc.) in a compiled static library.'
if os.path.exists('README.rst'):
long_description = open('README.rst').read()
if os.path.exists('LICENSE'):
license = open('LICENSE').read()
setup(name='baler',
version='1.0.3',
description='Bundle assets into iOS static libraries',
long_description=long_description,
keywords=['ios', 'objective-c', 'generation', 'static', 'resource', 'NSBundle', 'mobile'],
author='PayPal SDK Team',
author_email='brfitzgerald@paypal.com, jbleechersnyder@paypal.com',
url='https://github.com/paypal/baler',
scripts=['bale'],
packages=find_packages(),
package_dir={'baler': 'baler'},
package_data={'baler': ['templates/*.j2']},
license=license,
install_requires=[
'Jinja2 >= 2.6',
'argparse >= 1.1',
'biplist >= 0.5',
'six >= 1.2.0',
'wsgiref >= 0.1.2',
])
|
<commit_before>#!/usr/bin/env python
import os
from setuptools import setup, find_packages
long_description = 'Baler is a tool that makes it easy to bundle and use resources (images, strings files, etc.) in a compiled static library.'
if os.path.exists('README.rst'):
long_description = open('README.rst').read()
if os.path.exists('LICENSE'):
license = open('LICENSE').read()
setup(name='baler',
version='1.0.2',
description='Bundle assets into iOS static libraries',
long_description=long_description,
keywords=['ios', 'objective-c', 'generation', 'static', 'resource', 'NSBundle', 'mobile'],
author='PayPal SDK Team',
author_email='brfitzgerald@paypal.com, jbleechersnyder@paypal.com',
url='https://github.com/paypal/baler',
scripts=['bale'],
packages=find_packages(),
package_dir={'baler': 'baler'},
package_data={'baler': ['templates/*.j2']},
license=license,
install_requires=[
'Jinja2 >= 2.6',
'argparse >= 1.2.1',
'biplist >= 0.5',
'six >= 1.2.0',
'wsgiref >= 0.1.2',
])
<commit_msg>Change argparse requirement to >=1.1<commit_after>
|
#!/usr/bin/env python
import os
from setuptools import setup, find_packages
long_description = 'Baler is a tool that makes it easy to bundle and use resources (images, strings files, etc.) in a compiled static library.'
if os.path.exists('README.rst'):
long_description = open('README.rst').read()
if os.path.exists('LICENSE'):
license = open('LICENSE').read()
setup(name='baler',
version='1.0.3',
description='Bundle assets into iOS static libraries',
long_description=long_description,
keywords=['ios', 'objective-c', 'generation', 'static', 'resource', 'NSBundle', 'mobile'],
author='PayPal SDK Team',
author_email='brfitzgerald@paypal.com, jbleechersnyder@paypal.com',
url='https://github.com/paypal/baler',
scripts=['bale'],
packages=find_packages(),
package_dir={'baler': 'baler'},
package_data={'baler': ['templates/*.j2']},
license=license,
install_requires=[
'Jinja2 >= 2.6',
'argparse >= 1.1',
'biplist >= 0.5',
'six >= 1.2.0',
'wsgiref >= 0.1.2',
])
|
#!/usr/bin/env python
import os
from setuptools import setup, find_packages
long_description = 'Baler is a tool that makes it easy to bundle and use resources (images, strings files, etc.) in a compiled static library.'
if os.path.exists('README.rst'):
long_description = open('README.rst').read()
if os.path.exists('LICENSE'):
license = open('LICENSE').read()
setup(name='baler',
version='1.0.2',
description='Bundle assets into iOS static libraries',
long_description=long_description,
keywords=['ios', 'objective-c', 'generation', 'static', 'resource', 'NSBundle', 'mobile'],
author='PayPal SDK Team',
author_email='brfitzgerald@paypal.com, jbleechersnyder@paypal.com',
url='https://github.com/paypal/baler',
scripts=['bale'],
packages=find_packages(),
package_dir={'baler': 'baler'},
package_data={'baler': ['templates/*.j2']},
license=license,
install_requires=[
'Jinja2 >= 2.6',
'argparse >= 1.2.1',
'biplist >= 0.5',
'six >= 1.2.0',
'wsgiref >= 0.1.2',
])
Change argparse requirement to >=1.1#!/usr/bin/env python
import os
from setuptools import setup, find_packages
long_description = 'Baler is a tool that makes it easy to bundle and use resources (images, strings files, etc.) in a compiled static library.'
if os.path.exists('README.rst'):
long_description = open('README.rst').read()
if os.path.exists('LICENSE'):
license = open('LICENSE').read()
setup(name='baler',
version='1.0.3',
description='Bundle assets into iOS static libraries',
long_description=long_description,
keywords=['ios', 'objective-c', 'generation', 'static', 'resource', 'NSBundle', 'mobile'],
author='PayPal SDK Team',
author_email='brfitzgerald@paypal.com, jbleechersnyder@paypal.com',
url='https://github.com/paypal/baler',
scripts=['bale'],
packages=find_packages(),
package_dir={'baler': 'baler'},
package_data={'baler': ['templates/*.j2']},
license=license,
install_requires=[
'Jinja2 >= 2.6',
'argparse >= 1.1',
'biplist >= 0.5',
'six >= 1.2.0',
'wsgiref >= 0.1.2',
])
|
<commit_before>#!/usr/bin/env python
import os
from setuptools import setup, find_packages
long_description = 'Baler is a tool that makes it easy to bundle and use resources (images, strings files, etc.) in a compiled static library.'
if os.path.exists('README.rst'):
long_description = open('README.rst').read()
if os.path.exists('LICENSE'):
license = open('LICENSE').read()
setup(name='baler',
version='1.0.2',
description='Bundle assets into iOS static libraries',
long_description=long_description,
keywords=['ios', 'objective-c', 'generation', 'static', 'resource', 'NSBundle', 'mobile'],
author='PayPal SDK Team',
author_email='brfitzgerald@paypal.com, jbleechersnyder@paypal.com',
url='https://github.com/paypal/baler',
scripts=['bale'],
packages=find_packages(),
package_dir={'baler': 'baler'},
package_data={'baler': ['templates/*.j2']},
license=license,
install_requires=[
'Jinja2 >= 2.6',
'argparse >= 1.2.1',
'biplist >= 0.5',
'six >= 1.2.0',
'wsgiref >= 0.1.2',
])
<commit_msg>Change argparse requirement to >=1.1<commit_after>#!/usr/bin/env python
import os
from setuptools import setup, find_packages
long_description = 'Baler is a tool that makes it easy to bundle and use resources (images, strings files, etc.) in a compiled static library.'
if os.path.exists('README.rst'):
long_description = open('README.rst').read()
if os.path.exists('LICENSE'):
license = open('LICENSE').read()
setup(name='baler',
version='1.0.3',
description='Bundle assets into iOS static libraries',
long_description=long_description,
keywords=['ios', 'objective-c', 'generation', 'static', 'resource', 'NSBundle', 'mobile'],
author='PayPal SDK Team',
author_email='brfitzgerald@paypal.com, jbleechersnyder@paypal.com',
url='https://github.com/paypal/baler',
scripts=['bale'],
packages=find_packages(),
package_dir={'baler': 'baler'},
package_data={'baler': ['templates/*.j2']},
license=license,
install_requires=[
'Jinja2 >= 2.6',
'argparse >= 1.1',
'biplist >= 0.5',
'six >= 1.2.0',
'wsgiref >= 0.1.2',
])
|
ecc73414fae494e3519b58eeee3aff6fee26a884
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name='plutokore',
packages=['plutokore'],
version='0.1',
description='Python tool for analysing PLUTO simulation data',
author='Patrick Yates',
author_email='patrick.yates@utas.edu.au',
url='https://github.com/opcon/plutokore',
keywords=['pluto', 'astrophsyics'],
license='MIT',
requires=['numpy', 'matplotlib', 'tabulate'],
classifiers=[
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
]
)
|
from setuptools import setup, find_packages
setup(
name='plutokore',
packages=find_packages(),
version='0.1',
description='Python tool for analysing PLUTO simulation data',
author='Patrick Yates',
author_email='patrick.yates@utas.edu.au',
url='https://github.com/opcon/plutokore',
keywords=['pluto', 'astrophsyics'],
license='MIT',
requires=['numpy', 'matplotlib', 'tabulate'],
classifiers=[
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
]
)
|
Use find_packages to get all packages
|
Use find_packages to get all packages
|
Python
|
mit
|
opcon/plutokore,opcon/plutokore
|
from setuptools import setup
setup(
name='plutokore',
packages=['plutokore'],
version='0.1',
description='Python tool for analysing PLUTO simulation data',
author='Patrick Yates',
author_email='patrick.yates@utas.edu.au',
url='https://github.com/opcon/plutokore',
keywords=['pluto', 'astrophsyics'],
license='MIT',
requires=['numpy', 'matplotlib', 'tabulate'],
classifiers=[
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
]
)
Use find_packages to get all packages
|
from setuptools import setup, find_packages
setup(
name='plutokore',
packages=find_packages(),
version='0.1',
description='Python tool for analysing PLUTO simulation data',
author='Patrick Yates',
author_email='patrick.yates@utas.edu.au',
url='https://github.com/opcon/plutokore',
keywords=['pluto', 'astrophsyics'],
license='MIT',
requires=['numpy', 'matplotlib', 'tabulate'],
classifiers=[
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
]
)
|
<commit_before>from setuptools import setup
setup(
name='plutokore',
packages=['plutokore'],
version='0.1',
description='Python tool for analysing PLUTO simulation data',
author='Patrick Yates',
author_email='patrick.yates@utas.edu.au',
url='https://github.com/opcon/plutokore',
keywords=['pluto', 'astrophsyics'],
license='MIT',
requires=['numpy', 'matplotlib', 'tabulate'],
classifiers=[
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
]
)
<commit_msg>Use find_packages to get all packages<commit_after>
|
from setuptools import setup, find_packages
setup(
name='plutokore',
packages=find_packages(),
version='0.1',
description='Python tool for analysing PLUTO simulation data',
author='Patrick Yates',
author_email='patrick.yates@utas.edu.au',
url='https://github.com/opcon/plutokore',
keywords=['pluto', 'astrophsyics'],
license='MIT',
requires=['numpy', 'matplotlib', 'tabulate'],
classifiers=[
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
]
)
|
from setuptools import setup
setup(
name='plutokore',
packages=['plutokore'],
version='0.1',
description='Python tool for analysing PLUTO simulation data',
author='Patrick Yates',
author_email='patrick.yates@utas.edu.au',
url='https://github.com/opcon/plutokore',
keywords=['pluto', 'astrophsyics'],
license='MIT',
requires=['numpy', 'matplotlib', 'tabulate'],
classifiers=[
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
]
)
Use find_packages to get all packagesfrom setuptools import setup, find_packages
setup(
name='plutokore',
packages=find_packages(),
version='0.1',
description='Python tool for analysing PLUTO simulation data',
author='Patrick Yates',
author_email='patrick.yates@utas.edu.au',
url='https://github.com/opcon/plutokore',
keywords=['pluto', 'astrophsyics'],
license='MIT',
requires=['numpy', 'matplotlib', 'tabulate'],
classifiers=[
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
]
)
|
<commit_before>from setuptools import setup
setup(
name='plutokore',
packages=['plutokore'],
version='0.1',
description='Python tool for analysing PLUTO simulation data',
author='Patrick Yates',
author_email='patrick.yates@utas.edu.au',
url='https://github.com/opcon/plutokore',
keywords=['pluto', 'astrophsyics'],
license='MIT',
requires=['numpy', 'matplotlib', 'tabulate'],
classifiers=[
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
]
)
<commit_msg>Use find_packages to get all packages<commit_after>from setuptools import setup, find_packages
setup(
name='plutokore',
packages=find_packages(),
version='0.1',
description='Python tool for analysing PLUTO simulation data',
author='Patrick Yates',
author_email='patrick.yates@utas.edu.au',
url='https://github.com/opcon/plutokore',
keywords=['pluto', 'astrophsyics'],
license='MIT',
requires=['numpy', 'matplotlib', 'tabulate'],
classifiers=[
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
]
)
|
b02bc19d8326d6cc010a1feffe9d047eabcf7351
|
setup.py
|
setup.py
|
import codecs
import os
from setuptools import setup, find_packages
HERE = os.path.abspath(os.path.dirname(__file__))
def read(*parts):
# intentionally *not* adding an encoding option to open, See:
# https://github.com/pypa/virtualenv/issues/201#issuecomment-3145690
return codecs.open(os.path.join(HERE, *parts), 'r').read()
setup(
name='kafka-dev-tools',
version='0.0.1',
author='Neha Narkhede',
author_email='neha.narkhede@gmail.com',
maintainer='Evgeny Vereshchagin',
maintainer_email='evvers@ya.ru',
url='https://github.com/evvers/kafka-dev-tools',
description='Tools for Kafka developers',
long_description=read('README.rst'),
packages=find_packages(),
install_requires=[
'jira-python',
'RBTools',
],
entry_points = {
'console_scripts': [
'kafka-patch-review=kafka_patch_review:main',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
],
keywords='kafka',
)
|
import codecs
import os
from setuptools import setup, find_packages
HERE = os.path.abspath(os.path.dirname(__file__))
def read(*parts):
# intentionally *not* adding an encoding option to open, See:
# https://github.com/pypa/virtualenv/issues/201#issuecomment-3145690
return codecs.open(os.path.join(HERE, *parts), 'r').read()
setup(
name='kafka-dev-tools',
version='0.0.1',
author='Neha Narkhede',
author_email='neha.narkhede@gmail.com',
maintainer='Evgeny Vereshchagin',
maintainer_email='evvers@ya.ru',
url='https://github.com/evvers/kafka-dev-tools',
description='Tools for Kafka developers',
long_description=read('README.rst'),
packages=find_packages(),
install_requires=[
'jira-python',
'RBTools',
],
entry_points = {
'console_scripts': [
'kafka-patch-review=kafka_dev_tools.utils.kafka_patch_review:main',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
],
keywords='kafka',
)
|
Change path to kafka-patch-review source
|
Change path to kafka-patch-review source
|
Python
|
apache-2.0
|
evvers/kafka-dev-tools
|
import codecs
import os
from setuptools import setup, find_packages
HERE = os.path.abspath(os.path.dirname(__file__))
def read(*parts):
# intentionally *not* adding an encoding option to open, See:
# https://github.com/pypa/virtualenv/issues/201#issuecomment-3145690
return codecs.open(os.path.join(HERE, *parts), 'r').read()
setup(
name='kafka-dev-tools',
version='0.0.1',
author='Neha Narkhede',
author_email='neha.narkhede@gmail.com',
maintainer='Evgeny Vereshchagin',
maintainer_email='evvers@ya.ru',
url='https://github.com/evvers/kafka-dev-tools',
description='Tools for Kafka developers',
long_description=read('README.rst'),
packages=find_packages(),
install_requires=[
'jira-python',
'RBTools',
],
entry_points = {
'console_scripts': [
'kafka-patch-review=kafka_patch_review:main',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
],
keywords='kafka',
)
Change path to kafka-patch-review source
|
import codecs
import os
from setuptools import setup, find_packages
HERE = os.path.abspath(os.path.dirname(__file__))
def read(*parts):
# intentionally *not* adding an encoding option to open, See:
# https://github.com/pypa/virtualenv/issues/201#issuecomment-3145690
return codecs.open(os.path.join(HERE, *parts), 'r').read()
setup(
name='kafka-dev-tools',
version='0.0.1',
author='Neha Narkhede',
author_email='neha.narkhede@gmail.com',
maintainer='Evgeny Vereshchagin',
maintainer_email='evvers@ya.ru',
url='https://github.com/evvers/kafka-dev-tools',
description='Tools for Kafka developers',
long_description=read('README.rst'),
packages=find_packages(),
install_requires=[
'jira-python',
'RBTools',
],
entry_points = {
'console_scripts': [
'kafka-patch-review=kafka_dev_tools.utils.kafka_patch_review:main',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
],
keywords='kafka',
)
|
<commit_before>import codecs
import os
from setuptools import setup, find_packages
HERE = os.path.abspath(os.path.dirname(__file__))
def read(*parts):
# intentionally *not* adding an encoding option to open, See:
# https://github.com/pypa/virtualenv/issues/201#issuecomment-3145690
return codecs.open(os.path.join(HERE, *parts), 'r').read()
setup(
name='kafka-dev-tools',
version='0.0.1',
author='Neha Narkhede',
author_email='neha.narkhede@gmail.com',
maintainer='Evgeny Vereshchagin',
maintainer_email='evvers@ya.ru',
url='https://github.com/evvers/kafka-dev-tools',
description='Tools for Kafka developers',
long_description=read('README.rst'),
packages=find_packages(),
install_requires=[
'jira-python',
'RBTools',
],
entry_points = {
'console_scripts': [
'kafka-patch-review=kafka_patch_review:main',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
],
keywords='kafka',
)
<commit_msg>Change path to kafka-patch-review source<commit_after>
|
import codecs
import os
from setuptools import setup, find_packages
HERE = os.path.abspath(os.path.dirname(__file__))
def read(*parts):
# intentionally *not* adding an encoding option to open, See:
# https://github.com/pypa/virtualenv/issues/201#issuecomment-3145690
return codecs.open(os.path.join(HERE, *parts), 'r').read()
setup(
name='kafka-dev-tools',
version='0.0.1',
author='Neha Narkhede',
author_email='neha.narkhede@gmail.com',
maintainer='Evgeny Vereshchagin',
maintainer_email='evvers@ya.ru',
url='https://github.com/evvers/kafka-dev-tools',
description='Tools for Kafka developers',
long_description=read('README.rst'),
packages=find_packages(),
install_requires=[
'jira-python',
'RBTools',
],
entry_points = {
'console_scripts': [
'kafka-patch-review=kafka_dev_tools.utils.kafka_patch_review:main',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
],
keywords='kafka',
)
|
import codecs
import os
from setuptools import setup, find_packages
HERE = os.path.abspath(os.path.dirname(__file__))
def read(*parts):
# intentionally *not* adding an encoding option to open, See:
# https://github.com/pypa/virtualenv/issues/201#issuecomment-3145690
return codecs.open(os.path.join(HERE, *parts), 'r').read()
setup(
name='kafka-dev-tools',
version='0.0.1',
author='Neha Narkhede',
author_email='neha.narkhede@gmail.com',
maintainer='Evgeny Vereshchagin',
maintainer_email='evvers@ya.ru',
url='https://github.com/evvers/kafka-dev-tools',
description='Tools for Kafka developers',
long_description=read('README.rst'),
packages=find_packages(),
install_requires=[
'jira-python',
'RBTools',
],
entry_points = {
'console_scripts': [
'kafka-patch-review=kafka_patch_review:main',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
],
keywords='kafka',
)
Change path to kafka-patch-review sourceimport codecs
import os
from setuptools import setup, find_packages
HERE = os.path.abspath(os.path.dirname(__file__))
def read(*parts):
# intentionally *not* adding an encoding option to open, See:
# https://github.com/pypa/virtualenv/issues/201#issuecomment-3145690
return codecs.open(os.path.join(HERE, *parts), 'r').read()
setup(
name='kafka-dev-tools',
version='0.0.1',
author='Neha Narkhede',
author_email='neha.narkhede@gmail.com',
maintainer='Evgeny Vereshchagin',
maintainer_email='evvers@ya.ru',
url='https://github.com/evvers/kafka-dev-tools',
description='Tools for Kafka developers',
long_description=read('README.rst'),
packages=find_packages(),
install_requires=[
'jira-python',
'RBTools',
],
entry_points = {
'console_scripts': [
'kafka-patch-review=kafka_dev_tools.utils.kafka_patch_review:main',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
],
keywords='kafka',
)
|
<commit_before>import codecs
import os
from setuptools import setup, find_packages
HERE = os.path.abspath(os.path.dirname(__file__))
def read(*parts):
# intentionally *not* adding an encoding option to open, See:
# https://github.com/pypa/virtualenv/issues/201#issuecomment-3145690
return codecs.open(os.path.join(HERE, *parts), 'r').read()
setup(
name='kafka-dev-tools',
version='0.0.1',
author='Neha Narkhede',
author_email='neha.narkhede@gmail.com',
maintainer='Evgeny Vereshchagin',
maintainer_email='evvers@ya.ru',
url='https://github.com/evvers/kafka-dev-tools',
description='Tools for Kafka developers',
long_description=read('README.rst'),
packages=find_packages(),
install_requires=[
'jira-python',
'RBTools',
],
entry_points = {
'console_scripts': [
'kafka-patch-review=kafka_patch_review:main',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
],
keywords='kafka',
)
<commit_msg>Change path to kafka-patch-review source<commit_after>import codecs
import os
from setuptools import setup, find_packages
HERE = os.path.abspath(os.path.dirname(__file__))
def read(*parts):
# intentionally *not* adding an encoding option to open, See:
# https://github.com/pypa/virtualenv/issues/201#issuecomment-3145690
return codecs.open(os.path.join(HERE, *parts), 'r').read()
setup(
name='kafka-dev-tools',
version='0.0.1',
author='Neha Narkhede',
author_email='neha.narkhede@gmail.com',
maintainer='Evgeny Vereshchagin',
maintainer_email='evvers@ya.ru',
url='https://github.com/evvers/kafka-dev-tools',
description='Tools for Kafka developers',
long_description=read('README.rst'),
packages=find_packages(),
install_requires=[
'jira-python',
'RBTools',
],
entry_points = {
'console_scripts': [
'kafka-patch-review=kafka_dev_tools.utils.kafka_patch_review:main',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
],
keywords='kafka',
)
|
6b65eb6767390ac01bb74306cabdb76e97f96054
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(
name='wagtail-commons',
version='0.0.1',
author=u'Brett Grace',
author_email='brett@codigious.com',
packages=['wagtail_commons'],
url='http://github.com/bgrace/wagtail-commons',
license='BSD licence, see LICENCE file',
description='Utility commands and mixins for Wagtail CMS',
long_description=open('README').read(),
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
],
install_requires=[
'pyyaml >= 3.11',
'markdown >= 2.4.1',
]
)
|
from distutils.core import setup
setup(
name='wagtail-commons',
version='0.0.1',
author=u'Brett Grace',
author_email='brett@codigious.com',
packages=['wagtail_commons', 'wagtail_commons.core'],
url='http://github.com/bgrace/wagtail-commons',
license='BSD licence, see LICENCE file',
description='Utility commands and mixins for Wagtail CMS',
long_description=open('README').read(),
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
],
install_requires=[
'pyyaml >= 3.11',
'markdown >= 2.4.1',
]
)
|
Add subpaackage to allow use with -e
|
Add subpaackage to allow use with -e
Rookie mistake
|
Python
|
bsd-3-clause
|
bgrace/wagtail-commons
|
from distutils.core import setup
setup(
name='wagtail-commons',
version='0.0.1',
author=u'Brett Grace',
author_email='brett@codigious.com',
packages=['wagtail_commons'],
url='http://github.com/bgrace/wagtail-commons',
license='BSD licence, see LICENCE file',
description='Utility commands and mixins for Wagtail CMS',
long_description=open('README').read(),
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
],
install_requires=[
'pyyaml >= 3.11',
'markdown >= 2.4.1',
]
)Add subpaackage to allow use with -e
Rookie mistake
|
from distutils.core import setup
setup(
name='wagtail-commons',
version='0.0.1',
author=u'Brett Grace',
author_email='brett@codigious.com',
packages=['wagtail_commons', 'wagtail_commons.core'],
url='http://github.com/bgrace/wagtail-commons',
license='BSD licence, see LICENCE file',
description='Utility commands and mixins for Wagtail CMS',
long_description=open('README').read(),
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
],
install_requires=[
'pyyaml >= 3.11',
'markdown >= 2.4.1',
]
)
|
<commit_before>from distutils.core import setup
setup(
name='wagtail-commons',
version='0.0.1',
author=u'Brett Grace',
author_email='brett@codigious.com',
packages=['wagtail_commons'],
url='http://github.com/bgrace/wagtail-commons',
license='BSD licence, see LICENCE file',
description='Utility commands and mixins for Wagtail CMS',
long_description=open('README').read(),
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
],
install_requires=[
'pyyaml >= 3.11',
'markdown >= 2.4.1',
]
)<commit_msg>Add subpaackage to allow use with -e
Rookie mistake<commit_after>
|
from distutils.core import setup
setup(
name='wagtail-commons',
version='0.0.1',
author=u'Brett Grace',
author_email='brett@codigious.com',
packages=['wagtail_commons', 'wagtail_commons.core'],
url='http://github.com/bgrace/wagtail-commons',
license='BSD licence, see LICENCE file',
description='Utility commands and mixins for Wagtail CMS',
long_description=open('README').read(),
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
],
install_requires=[
'pyyaml >= 3.11',
'markdown >= 2.4.1',
]
)
|
from distutils.core import setup
setup(
name='wagtail-commons',
version='0.0.1',
author=u'Brett Grace',
author_email='brett@codigious.com',
packages=['wagtail_commons'],
url='http://github.com/bgrace/wagtail-commons',
license='BSD licence, see LICENCE file',
description='Utility commands and mixins for Wagtail CMS',
long_description=open('README').read(),
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
],
install_requires=[
'pyyaml >= 3.11',
'markdown >= 2.4.1',
]
)Add subpaackage to allow use with -e
Rookie mistakefrom distutils.core import setup
setup(
name='wagtail-commons',
version='0.0.1',
author=u'Brett Grace',
author_email='brett@codigious.com',
packages=['wagtail_commons', 'wagtail_commons.core'],
url='http://github.com/bgrace/wagtail-commons',
license='BSD licence, see LICENCE file',
description='Utility commands and mixins for Wagtail CMS',
long_description=open('README').read(),
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
],
install_requires=[
'pyyaml >= 3.11',
'markdown >= 2.4.1',
]
)
|
<commit_before>from distutils.core import setup
setup(
name='wagtail-commons',
version='0.0.1',
author=u'Brett Grace',
author_email='brett@codigious.com',
packages=['wagtail_commons'],
url='http://github.com/bgrace/wagtail-commons',
license='BSD licence, see LICENCE file',
description='Utility commands and mixins for Wagtail CMS',
long_description=open('README').read(),
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
],
install_requires=[
'pyyaml >= 3.11',
'markdown >= 2.4.1',
]
)<commit_msg>Add subpaackage to allow use with -e
Rookie mistake<commit_after>from distutils.core import setup
setup(
name='wagtail-commons',
version='0.0.1',
author=u'Brett Grace',
author_email='brett@codigious.com',
packages=['wagtail_commons', 'wagtail_commons.core'],
url='http://github.com/bgrace/wagtail-commons',
license='BSD licence, see LICENCE file',
description='Utility commands and mixins for Wagtail CMS',
long_description=open('README').read(),
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
],
install_requires=[
'pyyaml >= 3.11',
'markdown >= 2.4.1',
]
)
|
24b2e522f88338ed660744058274d0c233acb745
|
setup.py
|
setup.py
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from setuptools import setup
setup(
name="atsy",
version="0.0.1",
description="AreTheySlimYet",
long_description="A set of tools for measuring cross-browser, cross-platform memory usage.",
url="https://github.com/EricRahm/atsy",
author="Eric Rahm",
author_email="erahm@mozilla.com",
license="MPL 2.0",
classifiers=[
"License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)"
],
packages=["atsy"],
install_requires=[
"selenium",
"marionette-client",
"psutil==3.5.0",
],
dependency_links=[
# We need to use a fork of psutil until USS calculations get integrated.
"git+ssh://git@github.com/ericrahm/psutil@release-3.4.2-uss#egg=psutil-3.5.0"
],
)
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from setuptools import setup
setup(
name="atsy",
version="0.0.1",
description="AreTheySlimYet",
long_description="A set of tools for measuring cross-browser, cross-platform memory usage.",
url="https://github.com/EricRahm/atsy",
author="Eric Rahm",
author_email="erahm@mozilla.com",
license="MPL 2.0",
classifiers=[
"License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)"
],
packages=["atsy"],
install_requires=[
"selenium",
"marionette-client",
"psutil==3.5.0",
],
dependency_links=[
# We need to use a fork of psutil until USS calculations get integrated.
"git+ssh://git@github.com/ericrahm/psutil@release-3.5.0#egg=psutil-3.5.0"
],
)
|
Update tag to use for forked psutil
|
Update tag to use for forked psutil
|
Python
|
mpl-2.0
|
EricRahm/atsy,EricRahm/atsy,EricRahm/atsy
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from setuptools import setup
setup(
name="atsy",
version="0.0.1",
description="AreTheySlimYet",
long_description="A set of tools for measuring cross-browser, cross-platform memory usage.",
url="https://github.com/EricRahm/atsy",
author="Eric Rahm",
author_email="erahm@mozilla.com",
license="MPL 2.0",
classifiers=[
"License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)"
],
packages=["atsy"],
install_requires=[
"selenium",
"marionette-client",
"psutil==3.5.0",
],
dependency_links=[
# We need to use a fork of psutil until USS calculations get integrated.
"git+ssh://git@github.com/ericrahm/psutil@release-3.4.2-uss#egg=psutil-3.5.0"
],
)
Update tag to use for forked psutil
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from setuptools import setup
setup(
name="atsy",
version="0.0.1",
description="AreTheySlimYet",
long_description="A set of tools for measuring cross-browser, cross-platform memory usage.",
url="https://github.com/EricRahm/atsy",
author="Eric Rahm",
author_email="erahm@mozilla.com",
license="MPL 2.0",
classifiers=[
"License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)"
],
packages=["atsy"],
install_requires=[
"selenium",
"marionette-client",
"psutil==3.5.0",
],
dependency_links=[
# We need to use a fork of psutil until USS calculations get integrated.
"git+ssh://git@github.com/ericrahm/psutil@release-3.5.0#egg=psutil-3.5.0"
],
)
|
<commit_before># This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from setuptools import setup
setup(
name="atsy",
version="0.0.1",
description="AreTheySlimYet",
long_description="A set of tools for measuring cross-browser, cross-platform memory usage.",
url="https://github.com/EricRahm/atsy",
author="Eric Rahm",
author_email="erahm@mozilla.com",
license="MPL 2.0",
classifiers=[
"License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)"
],
packages=["atsy"],
install_requires=[
"selenium",
"marionette-client",
"psutil==3.5.0",
],
dependency_links=[
# We need to use a fork of psutil until USS calculations get integrated.
"git+ssh://git@github.com/ericrahm/psutil@release-3.4.2-uss#egg=psutil-3.5.0"
],
)
<commit_msg>Update tag to use for forked psutil<commit_after>
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from setuptools import setup
setup(
name="atsy",
version="0.0.1",
description="AreTheySlimYet",
long_description="A set of tools for measuring cross-browser, cross-platform memory usage.",
url="https://github.com/EricRahm/atsy",
author="Eric Rahm",
author_email="erahm@mozilla.com",
license="MPL 2.0",
classifiers=[
"License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)"
],
packages=["atsy"],
install_requires=[
"selenium",
"marionette-client",
"psutil==3.5.0",
],
dependency_links=[
# We need to use a fork of psutil until USS calculations get integrated.
"git+ssh://git@github.com/ericrahm/psutil@release-3.5.0#egg=psutil-3.5.0"
],
)
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from setuptools import setup
setup(
name="atsy",
version="0.0.1",
description="AreTheySlimYet",
long_description="A set of tools for measuring cross-browser, cross-platform memory usage.",
url="https://github.com/EricRahm/atsy",
author="Eric Rahm",
author_email="erahm@mozilla.com",
license="MPL 2.0",
classifiers=[
"License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)"
],
packages=["atsy"],
install_requires=[
"selenium",
"marionette-client",
"psutil==3.5.0",
],
dependency_links=[
# We need to use a fork of psutil until USS calculations get integrated.
"git+ssh://git@github.com/ericrahm/psutil@release-3.4.2-uss#egg=psutil-3.5.0"
],
)
Update tag to use for forked psutil# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from setuptools import setup
setup(
name="atsy",
version="0.0.1",
description="AreTheySlimYet",
long_description="A set of tools for measuring cross-browser, cross-platform memory usage.",
url="https://github.com/EricRahm/atsy",
author="Eric Rahm",
author_email="erahm@mozilla.com",
license="MPL 2.0",
classifiers=[
"License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)"
],
packages=["atsy"],
install_requires=[
"selenium",
"marionette-client",
"psutil==3.5.0",
],
dependency_links=[
# We need to use a fork of psutil until USS calculations get integrated.
"git+ssh://git@github.com/ericrahm/psutil@release-3.5.0#egg=psutil-3.5.0"
],
)
|
<commit_before># This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from setuptools import setup
setup(
name="atsy",
version="0.0.1",
description="AreTheySlimYet",
long_description="A set of tools for measuring cross-browser, cross-platform memory usage.",
url="https://github.com/EricRahm/atsy",
author="Eric Rahm",
author_email="erahm@mozilla.com",
license="MPL 2.0",
classifiers=[
"License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)"
],
packages=["atsy"],
install_requires=[
"selenium",
"marionette-client",
"psutil==3.5.0",
],
dependency_links=[
# We need to use a fork of psutil until USS calculations get integrated.
"git+ssh://git@github.com/ericrahm/psutil@release-3.4.2-uss#egg=psutil-3.5.0"
],
)
<commit_msg>Update tag to use for forked psutil<commit_after># This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from setuptools import setup
setup(
name="atsy",
version="0.0.1",
description="AreTheySlimYet",
long_description="A set of tools for measuring cross-browser, cross-platform memory usage.",
url="https://github.com/EricRahm/atsy",
author="Eric Rahm",
author_email="erahm@mozilla.com",
license="MPL 2.0",
classifiers=[
"License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)"
],
packages=["atsy"],
install_requires=[
"selenium",
"marionette-client",
"psutil==3.5.0",
],
dependency_links=[
# We need to use a fork of psutil until USS calculations get integrated.
"git+ssh://git@github.com/ericrahm/psutil@release-3.5.0#egg=psutil-3.5.0"
],
)
|
52fd4086b0ef1ac290b393b8cd534a042826b145
|
scripts/addStitleToBlastTab.py
|
scripts/addStitleToBlastTab.py
|
import sys, argparse
parser = argparse.ArgumentParser()
parser.add_argument('--db2Name', help='tab-separated database lookup: full name file for reference (eg nr or swissprot)')
parser.add_argument('-b','--blast', help='blast input file')
args = parser.parse_args()
blastOrder = []
blastD = {}
with open(args.blast, 'r') as f:
for line in f:
line = line.rstrip().split('\t')
#import pdb; pdb.set_trace()
blastOrder.append(line[1])
blastD[line[1]] = line
f.close()
#potentially huge file --> don't want this in memory
with open(args.db2Name, 'r') as f:
for line in f:
line = line.rstrip().split('\t')
hitInfo = blastD.get(line[0], None)
if hitInfo is not None:
hitInfo.extend(line[1:])
f.close()
outExtendedTab = open(args.blast, 'w')
for hit in blastOrder:
outExtendedTab.write('\t'.join(map(str,blastD[hit])) + '\n')
|
import sys, argparse
parser = argparse.ArgumentParser()
parser.add_argument('--db2Name', help='tab-separated database lookup: full name file for reference (eg nr or swissprot)')
parser.add_argument('-b','--blast', help='blast input file')
args = parser.parse_args()
blastOrder = []
blastD = {}
with open(args.blast, 'r') as f:
for line in f:
line = line.rstrip().split('\t')
#import pdb; pdb.set_trace()
blastOrder.append(line[1])
blastD[line[1]] = line
f.close()
#potentially huge file --> don't want this in memory
with open(args.db2Name, 'r') as f:
for line in f:
line = line.rstrip().split('\t')
hitInfo = blastD.get(line[0], None)
if hitInfo is not None:
hitInfo.extend(line[1:])
f.close()
outExtendedTab = open(args.blast, 'w')
for hit in blastOrder:
outExtendedTab.write('\t'.join(map(str,blastD[hit])) + '\n')
|
Fix mixed indents. replaced tabs with spaces
|
Fix mixed indents. replaced tabs with spaces
|
Python
|
bsd-3-clause
|
bluegenes/MakeMyTranscriptome,bluegenes/MakeMyTranscriptome,bluegenes/MakeMyTranscriptome
|
import sys, argparse
parser = argparse.ArgumentParser()
parser.add_argument('--db2Name', help='tab-separated database lookup: full name file for reference (eg nr or swissprot)')
parser.add_argument('-b','--blast', help='blast input file')
args = parser.parse_args()
blastOrder = []
blastD = {}
with open(args.blast, 'r') as f:
for line in f:
line = line.rstrip().split('\t')
#import pdb; pdb.set_trace()
blastOrder.append(line[1])
blastD[line[1]] = line
f.close()
#potentially huge file --> don't want this in memory
with open(args.db2Name, 'r') as f:
for line in f:
line = line.rstrip().split('\t')
hitInfo = blastD.get(line[0], None)
if hitInfo is not None:
hitInfo.extend(line[1:])
f.close()
outExtendedTab = open(args.blast, 'w')
for hit in blastOrder:
outExtendedTab.write('\t'.join(map(str,blastD[hit])) + '\n')
Fix mixed indents. replaced tabs with spaces
|
import sys, argparse
parser = argparse.ArgumentParser()
parser.add_argument('--db2Name', help='tab-separated database lookup: full name file for reference (eg nr or swissprot)')
parser.add_argument('-b','--blast', help='blast input file')
args = parser.parse_args()
blastOrder = []
blastD = {}
with open(args.blast, 'r') as f:
for line in f:
line = line.rstrip().split('\t')
#import pdb; pdb.set_trace()
blastOrder.append(line[1])
blastD[line[1]] = line
f.close()
#potentially huge file --> don't want this in memory
with open(args.db2Name, 'r') as f:
for line in f:
line = line.rstrip().split('\t')
hitInfo = blastD.get(line[0], None)
if hitInfo is not None:
hitInfo.extend(line[1:])
f.close()
outExtendedTab = open(args.blast, 'w')
for hit in blastOrder:
outExtendedTab.write('\t'.join(map(str,blastD[hit])) + '\n')
|
<commit_before>import sys, argparse
parser = argparse.ArgumentParser()
parser.add_argument('--db2Name', help='tab-separated database lookup: full name file for reference (eg nr or swissprot)')
parser.add_argument('-b','--blast', help='blast input file')
args = parser.parse_args()
blastOrder = []
blastD = {}
with open(args.blast, 'r') as f:
for line in f:
line = line.rstrip().split('\t')
#import pdb; pdb.set_trace()
blastOrder.append(line[1])
blastD[line[1]] = line
f.close()
#potentially huge file --> don't want this in memory
with open(args.db2Name, 'r') as f:
for line in f:
line = line.rstrip().split('\t')
hitInfo = blastD.get(line[0], None)
if hitInfo is not None:
hitInfo.extend(line[1:])
f.close()
outExtendedTab = open(args.blast, 'w')
for hit in blastOrder:
outExtendedTab.write('\t'.join(map(str,blastD[hit])) + '\n')
<commit_msg>Fix mixed indents. replaced tabs with spaces<commit_after>
|
import sys, argparse
parser = argparse.ArgumentParser()
parser.add_argument('--db2Name', help='tab-separated database lookup: full name file for reference (eg nr or swissprot)')
parser.add_argument('-b','--blast', help='blast input file')
args = parser.parse_args()
blastOrder = []
blastD = {}
with open(args.blast, 'r') as f:
for line in f:
line = line.rstrip().split('\t')
#import pdb; pdb.set_trace()
blastOrder.append(line[1])
blastD[line[1]] = line
f.close()
#potentially huge file --> don't want this in memory
with open(args.db2Name, 'r') as f:
for line in f:
line = line.rstrip().split('\t')
hitInfo = blastD.get(line[0], None)
if hitInfo is not None:
hitInfo.extend(line[1:])
f.close()
outExtendedTab = open(args.blast, 'w')
for hit in blastOrder:
outExtendedTab.write('\t'.join(map(str,blastD[hit])) + '\n')
|
import sys, argparse
parser = argparse.ArgumentParser()
parser.add_argument('--db2Name', help='tab-separated database lookup: full name file for reference (eg nr or swissprot)')
parser.add_argument('-b','--blast', help='blast input file')
args = parser.parse_args()
blastOrder = []
blastD = {}
with open(args.blast, 'r') as f:
for line in f:
line = line.rstrip().split('\t')
#import pdb; pdb.set_trace()
blastOrder.append(line[1])
blastD[line[1]] = line
f.close()
#potentially huge file --> don't want this in memory
with open(args.db2Name, 'r') as f:
for line in f:
line = line.rstrip().split('\t')
hitInfo = blastD.get(line[0], None)
if hitInfo is not None:
hitInfo.extend(line[1:])
f.close()
outExtendedTab = open(args.blast, 'w')
for hit in blastOrder:
outExtendedTab.write('\t'.join(map(str,blastD[hit])) + '\n')
Fix mixed indents. replaced tabs with spacesimport sys, argparse
parser = argparse.ArgumentParser()
parser.add_argument('--db2Name', help='tab-separated database lookup: full name file for reference (eg nr or swissprot)')
parser.add_argument('-b','--blast', help='blast input file')
args = parser.parse_args()
blastOrder = []
blastD = {}
with open(args.blast, 'r') as f:
for line in f:
line = line.rstrip().split('\t')
#import pdb; pdb.set_trace()
blastOrder.append(line[1])
blastD[line[1]] = line
f.close()
#potentially huge file --> don't want this in memory
with open(args.db2Name, 'r') as f:
for line in f:
line = line.rstrip().split('\t')
hitInfo = blastD.get(line[0], None)
if hitInfo is not None:
hitInfo.extend(line[1:])
f.close()
outExtendedTab = open(args.blast, 'w')
for hit in blastOrder:
outExtendedTab.write('\t'.join(map(str,blastD[hit])) + '\n')
|
<commit_before>import sys, argparse
parser = argparse.ArgumentParser()
parser.add_argument('--db2Name', help='tab-separated database lookup: full name file for reference (eg nr or swissprot)')
parser.add_argument('-b','--blast', help='blast input file')
args = parser.parse_args()
blastOrder = []
blastD = {}
with open(args.blast, 'r') as f:
for line in f:
line = line.rstrip().split('\t')
#import pdb; pdb.set_trace()
blastOrder.append(line[1])
blastD[line[1]] = line
f.close()
#potentially huge file --> don't want this in memory
with open(args.db2Name, 'r') as f:
for line in f:
line = line.rstrip().split('\t')
hitInfo = blastD.get(line[0], None)
if hitInfo is not None:
hitInfo.extend(line[1:])
f.close()
outExtendedTab = open(args.blast, 'w')
for hit in blastOrder:
outExtendedTab.write('\t'.join(map(str,blastD[hit])) + '\n')
<commit_msg>Fix mixed indents. replaced tabs with spaces<commit_after>import sys, argparse
parser = argparse.ArgumentParser()
parser.add_argument('--db2Name', help='tab-separated database lookup: full name file for reference (eg nr or swissprot)')
parser.add_argument('-b','--blast', help='blast input file')
args = parser.parse_args()
blastOrder = []
blastD = {}
with open(args.blast, 'r') as f:
for line in f:
line = line.rstrip().split('\t')
#import pdb; pdb.set_trace()
blastOrder.append(line[1])
blastD[line[1]] = line
f.close()
#potentially huge file --> don't want this in memory
with open(args.db2Name, 'r') as f:
for line in f:
line = line.rstrip().split('\t')
hitInfo = blastD.get(line[0], None)
if hitInfo is not None:
hitInfo.extend(line[1:])
f.close()
outExtendedTab = open(args.blast, 'w')
for hit in blastOrder:
outExtendedTab.write('\t'.join(map(str,blastD[hit])) + '\n')
|
cacc51595ec59c90b7f6ea60469bc804593916f3
|
queue.py
|
queue.py
|
#!/usr/bin/env python
'''Implementation of a simple queue data structure.
The queue has `enqueue`, `dequeue`, and `peek` methods.
Items in the queue have `value` and `behind` attributes.
The queue has a `front` attribute.
'''
class Item(object):
def __init__(self, value, behind=None):
self.value = value
self.behind = behind
def __str__(self):
return self.value
class Queue(object):
def __init__(self, front=None):
pass
def enqueue(self, value):
pass
def dequeue(self):
pass
def peek(self):
pass
|
#!/usr/bin/env python
'''Implementation of a simple queue data structure.
The queue has `enqueue`, `dequeue`, and `peek` methods.
Items in the queue have `value` and `behind` attributes.
The queue has a `front` attribute.
'''
class Item(object):
def __init__(self, value, behind=None):
self.value = value
self.behind = behind
def __str__(self):
return self.value
class Queue(object):
def __init__(self, front=None):
self.front = front
def enqueue(self, value):
pass
def dequeue(self):
pass
def peek(self):
pass
|
Add init function for Queue class
|
Add init function for Queue class
|
Python
|
mit
|
jwarren116/data-structures-deux
|
#!/usr/bin/env python
'''Implementation of a simple queue data structure.
The queue has `enqueue`, `dequeue`, and `peek` methods.
Items in the queue have `value` and `behind` attributes.
The queue has a `front` attribute.
'''
class Item(object):
def __init__(self, value, behind=None):
self.value = value
self.behind = behind
def __str__(self):
return self.value
class Queue(object):
def __init__(self, front=None):
pass
def enqueue(self, value):
pass
def dequeue(self):
pass
def peek(self):
pass
Add init function for Queue class
|
#!/usr/bin/env python
'''Implementation of a simple queue data structure.
The queue has `enqueue`, `dequeue`, and `peek` methods.
Items in the queue have `value` and `behind` attributes.
The queue has a `front` attribute.
'''
class Item(object):
def __init__(self, value, behind=None):
self.value = value
self.behind = behind
def __str__(self):
return self.value
class Queue(object):
def __init__(self, front=None):
self.front = front
def enqueue(self, value):
pass
def dequeue(self):
pass
def peek(self):
pass
|
<commit_before>#!/usr/bin/env python
'''Implementation of a simple queue data structure.
The queue has `enqueue`, `dequeue`, and `peek` methods.
Items in the queue have `value` and `behind` attributes.
The queue has a `front` attribute.
'''
class Item(object):
def __init__(self, value, behind=None):
self.value = value
self.behind = behind
def __str__(self):
return self.value
class Queue(object):
def __init__(self, front=None):
pass
def enqueue(self, value):
pass
def dequeue(self):
pass
def peek(self):
pass
<commit_msg>Add init function for Queue class<commit_after>
|
#!/usr/bin/env python
'''Implementation of a simple queue data structure.
The queue has `enqueue`, `dequeue`, and `peek` methods.
Items in the queue have `value` and `behind` attributes.
The queue has a `front` attribute.
'''
class Item(object):
def __init__(self, value, behind=None):
self.value = value
self.behind = behind
def __str__(self):
return self.value
class Queue(object):
def __init__(self, front=None):
self.front = front
def enqueue(self, value):
pass
def dequeue(self):
pass
def peek(self):
pass
|
#!/usr/bin/env python
'''Implementation of a simple queue data structure.
The queue has `enqueue`, `dequeue`, and `peek` methods.
Items in the queue have `value` and `behind` attributes.
The queue has a `front` attribute.
'''
class Item(object):
def __init__(self, value, behind=None):
self.value = value
self.behind = behind
def __str__(self):
return self.value
class Queue(object):
def __init__(self, front=None):
pass
def enqueue(self, value):
pass
def dequeue(self):
pass
def peek(self):
pass
Add init function for Queue class#!/usr/bin/env python
'''Implementation of a simple queue data structure.
The queue has `enqueue`, `dequeue`, and `peek` methods.
Items in the queue have `value` and `behind` attributes.
The queue has a `front` attribute.
'''
class Item(object):
def __init__(self, value, behind=None):
self.value = value
self.behind = behind
def __str__(self):
return self.value
class Queue(object):
def __init__(self, front=None):
self.front = front
def enqueue(self, value):
pass
def dequeue(self):
pass
def peek(self):
pass
|
<commit_before>#!/usr/bin/env python
'''Implementation of a simple queue data structure.
The queue has `enqueue`, `dequeue`, and `peek` methods.
Items in the queue have `value` and `behind` attributes.
The queue has a `front` attribute.
'''
class Item(object):
def __init__(self, value, behind=None):
self.value = value
self.behind = behind
def __str__(self):
return self.value
class Queue(object):
def __init__(self, front=None):
pass
def enqueue(self, value):
pass
def dequeue(self):
pass
def peek(self):
pass
<commit_msg>Add init function for Queue class<commit_after>#!/usr/bin/env python
'''Implementation of a simple queue data structure.
The queue has `enqueue`, `dequeue`, and `peek` methods.
Items in the queue have `value` and `behind` attributes.
The queue has a `front` attribute.
'''
class Item(object):
def __init__(self, value, behind=None):
self.value = value
self.behind = behind
def __str__(self):
return self.value
class Queue(object):
def __init__(self, front=None):
self.front = front
def enqueue(self, value):
pass
def dequeue(self):
pass
def peek(self):
pass
|
0595cc06357a572ef604d6c3e0b560974720524c
|
spacy/tests/regression/test_issue595.py
|
spacy/tests/regression/test_issue595.py
|
import pytest
import spacy
@pytest.mark.models
def test_not_lemmatize_base_forms():
nlp = spacy.load('en', parser=False)
doc = nlp(u"Don't feed the dog")
feed = doc[2]
feed.tag_ = u'VB'
assert feed.text == u'feed'
assert feed.lemma_ == u'feed'
|
from __future__ import unicode_literals
import pytest
from ...symbols import POS, VERB, VerbForm_inf
from ...tokens import Doc
from ...vocab import Vocab
from ...lemmatizer import Lemmatizer
@pytest.fixture
def index():
return {'verb': {}}
@pytest.fixture
def exceptions():
return {'verb': {}}
@pytest.fixture
def rules():
return {"verb": [["ed", "e"]]}
@pytest.fixture
def lemmatizer(index, exceptions, rules):
return Lemmatizer(index, exceptions, rules)
@pytest.fixture
def tag_map():
return {'VB': {POS: VERB, 'morph': VerbForm_inf}}
@pytest.fixture
def vocab(lemmatizer, tag_map):
return Vocab(lemmatizer=lemmatizer, tag_map=tag_map)
def test_not_lemmatize_base_forms(vocab, lemmatizer):
doc = Doc(vocab, words=["Do", "n't", "feed", "the", "dog"])
feed = doc[2]
feed.tag_ = u'VB'
assert feed.text == u'feed'
assert feed.lemma_ == u'feed'
|
Change test595 to mock data, instead of requiring model.
|
Change test595 to mock data, instead of requiring model.
|
Python
|
mit
|
honnibal/spaCy,aikramer2/spaCy,Gregory-Howard/spaCy,aikramer2/spaCy,raphael0202/spaCy,recognai/spaCy,Gregory-Howard/spaCy,banglakit/spaCy,honnibal/spaCy,Gregory-Howard/spaCy,spacy-io/spaCy,recognai/spaCy,raphael0202/spaCy,spacy-io/spaCy,explosion/spaCy,explosion/spaCy,recognai/spaCy,recognai/spaCy,oroszgy/spaCy.hu,aikramer2/spaCy,banglakit/spaCy,raphael0202/spaCy,aikramer2/spaCy,banglakit/spaCy,oroszgy/spaCy.hu,explosion/spaCy,aikramer2/spaCy,oroszgy/spaCy.hu,explosion/spaCy,recognai/spaCy,spacy-io/spaCy,oroszgy/spaCy.hu,banglakit/spaCy,raphael0202/spaCy,Gregory-Howard/spaCy,explosion/spaCy,spacy-io/spaCy,Gregory-Howard/spaCy,spacy-io/spaCy,oroszgy/spaCy.hu,honnibal/spaCy,raphael0202/spaCy,recognai/spaCy,banglakit/spaCy,banglakit/spaCy,oroszgy/spaCy.hu,spacy-io/spaCy,raphael0202/spaCy,honnibal/spaCy,explosion/spaCy,Gregory-Howard/spaCy,aikramer2/spaCy
|
import pytest
import spacy
@pytest.mark.models
def test_not_lemmatize_base_forms():
nlp = spacy.load('en', parser=False)
doc = nlp(u"Don't feed the dog")
feed = doc[2]
feed.tag_ = u'VB'
assert feed.text == u'feed'
assert feed.lemma_ == u'feed'
Change test595 to mock data, instead of requiring model.
|
from __future__ import unicode_literals
import pytest
from ...symbols import POS, VERB, VerbForm_inf
from ...tokens import Doc
from ...vocab import Vocab
from ...lemmatizer import Lemmatizer
@pytest.fixture
def index():
return {'verb': {}}
@pytest.fixture
def exceptions():
return {'verb': {}}
@pytest.fixture
def rules():
return {"verb": [["ed", "e"]]}
@pytest.fixture
def lemmatizer(index, exceptions, rules):
return Lemmatizer(index, exceptions, rules)
@pytest.fixture
def tag_map():
return {'VB': {POS: VERB, 'morph': VerbForm_inf}}
@pytest.fixture
def vocab(lemmatizer, tag_map):
return Vocab(lemmatizer=lemmatizer, tag_map=tag_map)
def test_not_lemmatize_base_forms(vocab, lemmatizer):
doc = Doc(vocab, words=["Do", "n't", "feed", "the", "dog"])
feed = doc[2]
feed.tag_ = u'VB'
assert feed.text == u'feed'
assert feed.lemma_ == u'feed'
|
<commit_before>import pytest
import spacy
@pytest.mark.models
def test_not_lemmatize_base_forms():
nlp = spacy.load('en', parser=False)
doc = nlp(u"Don't feed the dog")
feed = doc[2]
feed.tag_ = u'VB'
assert feed.text == u'feed'
assert feed.lemma_ == u'feed'
<commit_msg>Change test595 to mock data, instead of requiring model.<commit_after>
|
from __future__ import unicode_literals
import pytest
from ...symbols import POS, VERB, VerbForm_inf
from ...tokens import Doc
from ...vocab import Vocab
from ...lemmatizer import Lemmatizer
@pytest.fixture
def index():
return {'verb': {}}
@pytest.fixture
def exceptions():
return {'verb': {}}
@pytest.fixture
def rules():
return {"verb": [["ed", "e"]]}
@pytest.fixture
def lemmatizer(index, exceptions, rules):
return Lemmatizer(index, exceptions, rules)
@pytest.fixture
def tag_map():
return {'VB': {POS: VERB, 'morph': VerbForm_inf}}
@pytest.fixture
def vocab(lemmatizer, tag_map):
return Vocab(lemmatizer=lemmatizer, tag_map=tag_map)
def test_not_lemmatize_base_forms(vocab, lemmatizer):
doc = Doc(vocab, words=["Do", "n't", "feed", "the", "dog"])
feed = doc[2]
feed.tag_ = u'VB'
assert feed.text == u'feed'
assert feed.lemma_ == u'feed'
|
import pytest
import spacy
@pytest.mark.models
def test_not_lemmatize_base_forms():
nlp = spacy.load('en', parser=False)
doc = nlp(u"Don't feed the dog")
feed = doc[2]
feed.tag_ = u'VB'
assert feed.text == u'feed'
assert feed.lemma_ == u'feed'
Change test595 to mock data, instead of requiring model.from __future__ import unicode_literals
import pytest
from ...symbols import POS, VERB, VerbForm_inf
from ...tokens import Doc
from ...vocab import Vocab
from ...lemmatizer import Lemmatizer
@pytest.fixture
def index():
return {'verb': {}}
@pytest.fixture
def exceptions():
return {'verb': {}}
@pytest.fixture
def rules():
return {"verb": [["ed", "e"]]}
@pytest.fixture
def lemmatizer(index, exceptions, rules):
return Lemmatizer(index, exceptions, rules)
@pytest.fixture
def tag_map():
return {'VB': {POS: VERB, 'morph': VerbForm_inf}}
@pytest.fixture
def vocab(lemmatizer, tag_map):
return Vocab(lemmatizer=lemmatizer, tag_map=tag_map)
def test_not_lemmatize_base_forms(vocab, lemmatizer):
doc = Doc(vocab, words=["Do", "n't", "feed", "the", "dog"])
feed = doc[2]
feed.tag_ = u'VB'
assert feed.text == u'feed'
assert feed.lemma_ == u'feed'
|
<commit_before>import pytest
import spacy
@pytest.mark.models
def test_not_lemmatize_base_forms():
nlp = spacy.load('en', parser=False)
doc = nlp(u"Don't feed the dog")
feed = doc[2]
feed.tag_ = u'VB'
assert feed.text == u'feed'
assert feed.lemma_ == u'feed'
<commit_msg>Change test595 to mock data, instead of requiring model.<commit_after>from __future__ import unicode_literals
import pytest
from ...symbols import POS, VERB, VerbForm_inf
from ...tokens import Doc
from ...vocab import Vocab
from ...lemmatizer import Lemmatizer
@pytest.fixture
def index():
return {'verb': {}}
@pytest.fixture
def exceptions():
return {'verb': {}}
@pytest.fixture
def rules():
return {"verb": [["ed", "e"]]}
@pytest.fixture
def lemmatizer(index, exceptions, rules):
return Lemmatizer(index, exceptions, rules)
@pytest.fixture
def tag_map():
return {'VB': {POS: VERB, 'morph': VerbForm_inf}}
@pytest.fixture
def vocab(lemmatizer, tag_map):
return Vocab(lemmatizer=lemmatizer, tag_map=tag_map)
def test_not_lemmatize_base_forms(vocab, lemmatizer):
doc = Doc(vocab, words=["Do", "n't", "feed", "the", "dog"])
feed = doc[2]
feed.tag_ = u'VB'
assert feed.text == u'feed'
assert feed.lemma_ == u'feed'
|
2607d142a32ad31fd4c432c0830c3173daee79fb
|
src/util/results.py
|
src/util/results.py
|
import datetime
from context_manager.db_context_manager import DBContextManager
from util.constants import QUERIES
def export_results(data, controller_name, trajectory_name, database_path):
creation_datetime = datetime.datetime.now()
with DBContextManager(database_path) as cursor:
table_name = ('_'.join([controller_name, trajectory_name, creation_datetime.strftime('%Y_%m_%d_%H_%M_%S')]))
cursor.execute(QUERIES['create_sims'])
cursor.execute(QUERIES['insert_sim'], (table_name, creation_datetime))
cursor.execute(QUERIES['create_sim'].format(table_name))
for i in range(len(data['t'])):
cursor.execute(
QUERIES['insert_data'].format(table_name),
(data['t'][i], data['x'][i], data['x_ref'][i],
data['y'][i], data['y_ref'][i], data['theta'][i],
data['theta_ref'][i], data['v_c'][i], data['w_c'][i])
)
|
import datetime
from context_manager.db_context_manager import DBContextManager
from util.constants import QUERIES
def export_results(data, controller_name, trajectory_name, database_path):
def get_table_name(controller, trajectory, date_time):
return '_'.join([controller,
trajectory,
date_time.strftime('%Y_%m_%d_%H_%M_%S')])
date_time = datetime.datetime.now()
with DBContextManager(database_path) as cursor:
table_name = get_table_name(
controller_name,
trajectory_name,
date_time
)
cursor.execute(QUERIES['create_sims'])
cursor.execute(QUERIES['insert_sim'], (table_name, date_time))
cursor.execute(QUERIES['create_sim'].format(table_name))
for i in range(len(data['t'])):
cursor.execute(
QUERIES['insert_data'].format(table_name),
(data['t'][i], data['x'][i], data['x_ref'][i],
data['y'][i], data['y_ref'][i], data['theta'][i],
data['theta_ref'][i], data['v_c'][i], data['w_c'][i])
)
|
Create inner function and rename variables
|
refactor: Create inner function and rename variables
|
Python
|
mit
|
bit0001/trajectory_tracking,bit0001/trajectory_tracking
|
import datetime
from context_manager.db_context_manager import DBContextManager
from util.constants import QUERIES
def export_results(data, controller_name, trajectory_name, database_path):
creation_datetime = datetime.datetime.now()
with DBContextManager(database_path) as cursor:
table_name = ('_'.join([controller_name, trajectory_name, creation_datetime.strftime('%Y_%m_%d_%H_%M_%S')]))
cursor.execute(QUERIES['create_sims'])
cursor.execute(QUERIES['insert_sim'], (table_name, creation_datetime))
cursor.execute(QUERIES['create_sim'].format(table_name))
for i in range(len(data['t'])):
cursor.execute(
QUERIES['insert_data'].format(table_name),
(data['t'][i], data['x'][i], data['x_ref'][i],
data['y'][i], data['y_ref'][i], data['theta'][i],
data['theta_ref'][i], data['v_c'][i], data['w_c'][i])
)
refactor: Create inner function and rename variables
|
import datetime
from context_manager.db_context_manager import DBContextManager
from util.constants import QUERIES
def export_results(data, controller_name, trajectory_name, database_path):
def get_table_name(controller, trajectory, date_time):
return '_'.join([controller,
trajectory,
date_time.strftime('%Y_%m_%d_%H_%M_%S')])
date_time = datetime.datetime.now()
with DBContextManager(database_path) as cursor:
table_name = get_table_name(
controller_name,
trajectory_name,
date_time
)
cursor.execute(QUERIES['create_sims'])
cursor.execute(QUERIES['insert_sim'], (table_name, date_time))
cursor.execute(QUERIES['create_sim'].format(table_name))
for i in range(len(data['t'])):
cursor.execute(
QUERIES['insert_data'].format(table_name),
(data['t'][i], data['x'][i], data['x_ref'][i],
data['y'][i], data['y_ref'][i], data['theta'][i],
data['theta_ref'][i], data['v_c'][i], data['w_c'][i])
)
|
<commit_before>import datetime
from context_manager.db_context_manager import DBContextManager
from util.constants import QUERIES
def export_results(data, controller_name, trajectory_name, database_path):
creation_datetime = datetime.datetime.now()
with DBContextManager(database_path) as cursor:
table_name = ('_'.join([controller_name, trajectory_name, creation_datetime.strftime('%Y_%m_%d_%H_%M_%S')]))
cursor.execute(QUERIES['create_sims'])
cursor.execute(QUERIES['insert_sim'], (table_name, creation_datetime))
cursor.execute(QUERIES['create_sim'].format(table_name))
for i in range(len(data['t'])):
cursor.execute(
QUERIES['insert_data'].format(table_name),
(data['t'][i], data['x'][i], data['x_ref'][i],
data['y'][i], data['y_ref'][i], data['theta'][i],
data['theta_ref'][i], data['v_c'][i], data['w_c'][i])
)
<commit_msg>refactor: Create inner function and rename variables<commit_after>
|
import datetime
from context_manager.db_context_manager import DBContextManager
from util.constants import QUERIES
def export_results(data, controller_name, trajectory_name, database_path):
def get_table_name(controller, trajectory, date_time):
return '_'.join([controller,
trajectory,
date_time.strftime('%Y_%m_%d_%H_%M_%S')])
date_time = datetime.datetime.now()
with DBContextManager(database_path) as cursor:
table_name = get_table_name(
controller_name,
trajectory_name,
date_time
)
cursor.execute(QUERIES['create_sims'])
cursor.execute(QUERIES['insert_sim'], (table_name, date_time))
cursor.execute(QUERIES['create_sim'].format(table_name))
for i in range(len(data['t'])):
cursor.execute(
QUERIES['insert_data'].format(table_name),
(data['t'][i], data['x'][i], data['x_ref'][i],
data['y'][i], data['y_ref'][i], data['theta'][i],
data['theta_ref'][i], data['v_c'][i], data['w_c'][i])
)
|
import datetime
from context_manager.db_context_manager import DBContextManager
from util.constants import QUERIES
def export_results(data, controller_name, trajectory_name, database_path):
creation_datetime = datetime.datetime.now()
with DBContextManager(database_path) as cursor:
table_name = ('_'.join([controller_name, trajectory_name, creation_datetime.strftime('%Y_%m_%d_%H_%M_%S')]))
cursor.execute(QUERIES['create_sims'])
cursor.execute(QUERIES['insert_sim'], (table_name, creation_datetime))
cursor.execute(QUERIES['create_sim'].format(table_name))
for i in range(len(data['t'])):
cursor.execute(
QUERIES['insert_data'].format(table_name),
(data['t'][i], data['x'][i], data['x_ref'][i],
data['y'][i], data['y_ref'][i], data['theta'][i],
data['theta_ref'][i], data['v_c'][i], data['w_c'][i])
)
refactor: Create inner function and rename variablesimport datetime
from context_manager.db_context_manager import DBContextManager
from util.constants import QUERIES
def export_results(data, controller_name, trajectory_name, database_path):
def get_table_name(controller, trajectory, date_time):
return '_'.join([controller,
trajectory,
date_time.strftime('%Y_%m_%d_%H_%M_%S')])
date_time = datetime.datetime.now()
with DBContextManager(database_path) as cursor:
table_name = get_table_name(
controller_name,
trajectory_name,
date_time
)
cursor.execute(QUERIES['create_sims'])
cursor.execute(QUERIES['insert_sim'], (table_name, date_time))
cursor.execute(QUERIES['create_sim'].format(table_name))
for i in range(len(data['t'])):
cursor.execute(
QUERIES['insert_data'].format(table_name),
(data['t'][i], data['x'][i], data['x_ref'][i],
data['y'][i], data['y_ref'][i], data['theta'][i],
data['theta_ref'][i], data['v_c'][i], data['w_c'][i])
)
|
<commit_before>import datetime
from context_manager.db_context_manager import DBContextManager
from util.constants import QUERIES
def export_results(data, controller_name, trajectory_name, database_path):
creation_datetime = datetime.datetime.now()
with DBContextManager(database_path) as cursor:
table_name = ('_'.join([controller_name, trajectory_name, creation_datetime.strftime('%Y_%m_%d_%H_%M_%S')]))
cursor.execute(QUERIES['create_sims'])
cursor.execute(QUERIES['insert_sim'], (table_name, creation_datetime))
cursor.execute(QUERIES['create_sim'].format(table_name))
for i in range(len(data['t'])):
cursor.execute(
QUERIES['insert_data'].format(table_name),
(data['t'][i], data['x'][i], data['x_ref'][i],
data['y'][i], data['y_ref'][i], data['theta'][i],
data['theta_ref'][i], data['v_c'][i], data['w_c'][i])
)
<commit_msg>refactor: Create inner function and rename variables<commit_after>import datetime
from context_manager.db_context_manager import DBContextManager
from util.constants import QUERIES
def export_results(data, controller_name, trajectory_name, database_path):
def get_table_name(controller, trajectory, date_time):
return '_'.join([controller,
trajectory,
date_time.strftime('%Y_%m_%d_%H_%M_%S')])
date_time = datetime.datetime.now()
with DBContextManager(database_path) as cursor:
table_name = get_table_name(
controller_name,
trajectory_name,
date_time
)
cursor.execute(QUERIES['create_sims'])
cursor.execute(QUERIES['insert_sim'], (table_name, date_time))
cursor.execute(QUERIES['create_sim'].format(table_name))
for i in range(len(data['t'])):
cursor.execute(
QUERIES['insert_data'].format(table_name),
(data['t'][i], data['x'][i], data['x_ref'][i],
data['y'][i], data['y_ref'][i], data['theta'][i],
data['theta_ref'][i], data['v_c'][i], data['w_c'][i])
)
|
5d8a09ebff9cc8a8e8bdf4bff3963cee7a1aae6a
|
tools/skp/page_sets/skia_ebay_desktop.py
|
tools/skp/page_sets/skia_ebay_desktop.py
|
# Copyright 2019 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# pylint: disable=W0401,W0614
from telemetry import story
from telemetry.page import page as page_module
from telemetry.page import shared_page_state
class SkiaDesktopPage(page_module.Page):
def __init__(self, url, page_set):
super(SkiaDesktopPage, self).__init__(
url=url,
name=url,
page_set=page_set,
shared_page_state_class=shared_page_state.SharedDesktopPageState)
self.archive_data_file = 'data/skia_ebay_desktop.json'
def RunNavigateSteps(self, action_runner):
action_runner.Navigate(self.url)
action_runner.Wait(15)
class SkiaEbayDesktopPageSet(story.StorySet):
""" Pages designed to represent the median, not highly optimized web """
def __init__(self):
super(SkiaEbayDesktopPageSet, self).__init__(
archive_data_file='data/skia_ebay_desktop.json')
urls_list = [
# go/skia-skps-3-2019
'http://www.ebay.com',
]
for url in urls_list:
self.AddStory(SkiaDesktopPage(url, self))
|
# Copyright 2019 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# pylint: disable=W0401,W0614
from telemetry import story
from telemetry.page import page as page_module
from telemetry.page import shared_page_state
class SkiaDesktopPage(page_module.Page):
def __init__(self, url, page_set):
super(SkiaDesktopPage, self).__init__(
url=url,
name=url,
page_set=page_set,
shared_page_state_class=shared_page_state.SharedDesktopPageState)
self.archive_data_file = 'data/skia_ebay_desktop.json'
def RunNavigateSteps(self, action_runner):
action_runner.Navigate(self.url, timeout_in_seconds=120)
class SkiaEbayDesktopPageSet(story.StorySet):
""" Pages designed to represent the median, not highly optimized web """
def __init__(self):
super(SkiaEbayDesktopPageSet, self).__init__(
archive_data_file='data/skia_ebay_desktop.json')
urls_list = [
# go/skia-skps-3-2019
'http://www.ebay.com',
]
for url in urls_list:
self.AddStory(SkiaDesktopPage(url, self))
|
Add wait time to ebay pageset
|
Add wait time to ebay pageset
Bug: skia:11898
Change-Id: I0bb58f1d8e9c6ad48148d50b840f152fc158f071
Reviewed-on: https://skia-review.googlesource.com/c/skia/+/400538
Reviewed-by: Ravi Mistry <9fa2e7438b8cb730f96b74865492597170561628@google.com>
Commit-Queue: Ravi Mistry <9fa2e7438b8cb730f96b74865492597170561628@google.com>
|
Python
|
bsd-3-clause
|
aosp-mirror/platform_external_skia,aosp-mirror/platform_external_skia,google/skia,aosp-mirror/platform_external_skia,google/skia,google/skia,aosp-mirror/platform_external_skia,google/skia,aosp-mirror/platform_external_skia,google/skia,google/skia,aosp-mirror/platform_external_skia,google/skia,google/skia,google/skia,google/skia,aosp-mirror/platform_external_skia,aosp-mirror/platform_external_skia,aosp-mirror/platform_external_skia,aosp-mirror/platform_external_skia
|
# Copyright 2019 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# pylint: disable=W0401,W0614
from telemetry import story
from telemetry.page import page as page_module
from telemetry.page import shared_page_state
class SkiaDesktopPage(page_module.Page):
def __init__(self, url, page_set):
super(SkiaDesktopPage, self).__init__(
url=url,
name=url,
page_set=page_set,
shared_page_state_class=shared_page_state.SharedDesktopPageState)
self.archive_data_file = 'data/skia_ebay_desktop.json'
def RunNavigateSteps(self, action_runner):
action_runner.Navigate(self.url)
action_runner.Wait(15)
class SkiaEbayDesktopPageSet(story.StorySet):
""" Pages designed to represent the median, not highly optimized web """
def __init__(self):
super(SkiaEbayDesktopPageSet, self).__init__(
archive_data_file='data/skia_ebay_desktop.json')
urls_list = [
# go/skia-skps-3-2019
'http://www.ebay.com',
]
for url in urls_list:
self.AddStory(SkiaDesktopPage(url, self))Add wait time to ebay pageset
Bug: skia:11898
Change-Id: I0bb58f1d8e9c6ad48148d50b840f152fc158f071
Reviewed-on: https://skia-review.googlesource.com/c/skia/+/400538
Reviewed-by: Ravi Mistry <9fa2e7438b8cb730f96b74865492597170561628@google.com>
Commit-Queue: Ravi Mistry <9fa2e7438b8cb730f96b74865492597170561628@google.com>
|
# Copyright 2019 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# pylint: disable=W0401,W0614
from telemetry import story
from telemetry.page import page as page_module
from telemetry.page import shared_page_state
class SkiaDesktopPage(page_module.Page):
def __init__(self, url, page_set):
super(SkiaDesktopPage, self).__init__(
url=url,
name=url,
page_set=page_set,
shared_page_state_class=shared_page_state.SharedDesktopPageState)
self.archive_data_file = 'data/skia_ebay_desktop.json'
def RunNavigateSteps(self, action_runner):
action_runner.Navigate(self.url, timeout_in_seconds=120)
class SkiaEbayDesktopPageSet(story.StorySet):
""" Pages designed to represent the median, not highly optimized web """
def __init__(self):
super(SkiaEbayDesktopPageSet, self).__init__(
archive_data_file='data/skia_ebay_desktop.json')
urls_list = [
# go/skia-skps-3-2019
'http://www.ebay.com',
]
for url in urls_list:
self.AddStory(SkiaDesktopPage(url, self))
|
<commit_before># Copyright 2019 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# pylint: disable=W0401,W0614
from telemetry import story
from telemetry.page import page as page_module
from telemetry.page import shared_page_state
class SkiaDesktopPage(page_module.Page):
def __init__(self, url, page_set):
super(SkiaDesktopPage, self).__init__(
url=url,
name=url,
page_set=page_set,
shared_page_state_class=shared_page_state.SharedDesktopPageState)
self.archive_data_file = 'data/skia_ebay_desktop.json'
def RunNavigateSteps(self, action_runner):
action_runner.Navigate(self.url)
action_runner.Wait(15)
class SkiaEbayDesktopPageSet(story.StorySet):
""" Pages designed to represent the median, not highly optimized web """
def __init__(self):
super(SkiaEbayDesktopPageSet, self).__init__(
archive_data_file='data/skia_ebay_desktop.json')
urls_list = [
# go/skia-skps-3-2019
'http://www.ebay.com',
]
for url in urls_list:
self.AddStory(SkiaDesktopPage(url, self))<commit_msg>Add wait time to ebay pageset
Bug: skia:11898
Change-Id: I0bb58f1d8e9c6ad48148d50b840f152fc158f071
Reviewed-on: https://skia-review.googlesource.com/c/skia/+/400538
Reviewed-by: Ravi Mistry <9fa2e7438b8cb730f96b74865492597170561628@google.com>
Commit-Queue: Ravi Mistry <9fa2e7438b8cb730f96b74865492597170561628@google.com><commit_after>
|
# Copyright 2019 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# pylint: disable=W0401,W0614
from telemetry import story
from telemetry.page import page as page_module
from telemetry.page import shared_page_state
class SkiaDesktopPage(page_module.Page):
def __init__(self, url, page_set):
super(SkiaDesktopPage, self).__init__(
url=url,
name=url,
page_set=page_set,
shared_page_state_class=shared_page_state.SharedDesktopPageState)
self.archive_data_file = 'data/skia_ebay_desktop.json'
def RunNavigateSteps(self, action_runner):
action_runner.Navigate(self.url, timeout_in_seconds=120)
class SkiaEbayDesktopPageSet(story.StorySet):
""" Pages designed to represent the median, not highly optimized web """
def __init__(self):
super(SkiaEbayDesktopPageSet, self).__init__(
archive_data_file='data/skia_ebay_desktop.json')
urls_list = [
# go/skia-skps-3-2019
'http://www.ebay.com',
]
for url in urls_list:
self.AddStory(SkiaDesktopPage(url, self))
|
# Copyright 2019 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# pylint: disable=W0401,W0614
from telemetry import story
from telemetry.page import page as page_module
from telemetry.page import shared_page_state
class SkiaDesktopPage(page_module.Page):
def __init__(self, url, page_set):
super(SkiaDesktopPage, self).__init__(
url=url,
name=url,
page_set=page_set,
shared_page_state_class=shared_page_state.SharedDesktopPageState)
self.archive_data_file = 'data/skia_ebay_desktop.json'
def RunNavigateSteps(self, action_runner):
action_runner.Navigate(self.url)
action_runner.Wait(15)
class SkiaEbayDesktopPageSet(story.StorySet):
""" Pages designed to represent the median, not highly optimized web """
def __init__(self):
super(SkiaEbayDesktopPageSet, self).__init__(
archive_data_file='data/skia_ebay_desktop.json')
urls_list = [
# go/skia-skps-3-2019
'http://www.ebay.com',
]
for url in urls_list:
self.AddStory(SkiaDesktopPage(url, self))Add wait time to ebay pageset
Bug: skia:11898
Change-Id: I0bb58f1d8e9c6ad48148d50b840f152fc158f071
Reviewed-on: https://skia-review.googlesource.com/c/skia/+/400538
Reviewed-by: Ravi Mistry <9fa2e7438b8cb730f96b74865492597170561628@google.com>
Commit-Queue: Ravi Mistry <9fa2e7438b8cb730f96b74865492597170561628@google.com># Copyright 2019 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# pylint: disable=W0401,W0614
from telemetry import story
from telemetry.page import page as page_module
from telemetry.page import shared_page_state
class SkiaDesktopPage(page_module.Page):
def __init__(self, url, page_set):
super(SkiaDesktopPage, self).__init__(
url=url,
name=url,
page_set=page_set,
shared_page_state_class=shared_page_state.SharedDesktopPageState)
self.archive_data_file = 'data/skia_ebay_desktop.json'
def RunNavigateSteps(self, action_runner):
action_runner.Navigate(self.url, timeout_in_seconds=120)
class SkiaEbayDesktopPageSet(story.StorySet):
""" Pages designed to represent the median, not highly optimized web """
def __init__(self):
super(SkiaEbayDesktopPageSet, self).__init__(
archive_data_file='data/skia_ebay_desktop.json')
urls_list = [
# go/skia-skps-3-2019
'http://www.ebay.com',
]
for url in urls_list:
self.AddStory(SkiaDesktopPage(url, self))
|
<commit_before># Copyright 2019 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# pylint: disable=W0401,W0614
from telemetry import story
from telemetry.page import page as page_module
from telemetry.page import shared_page_state
class SkiaDesktopPage(page_module.Page):
def __init__(self, url, page_set):
super(SkiaDesktopPage, self).__init__(
url=url,
name=url,
page_set=page_set,
shared_page_state_class=shared_page_state.SharedDesktopPageState)
self.archive_data_file = 'data/skia_ebay_desktop.json'
def RunNavigateSteps(self, action_runner):
action_runner.Navigate(self.url)
action_runner.Wait(15)
class SkiaEbayDesktopPageSet(story.StorySet):
""" Pages designed to represent the median, not highly optimized web """
def __init__(self):
super(SkiaEbayDesktopPageSet, self).__init__(
archive_data_file='data/skia_ebay_desktop.json')
urls_list = [
# go/skia-skps-3-2019
'http://www.ebay.com',
]
for url in urls_list:
self.AddStory(SkiaDesktopPage(url, self))<commit_msg>Add wait time to ebay pageset
Bug: skia:11898
Change-Id: I0bb58f1d8e9c6ad48148d50b840f152fc158f071
Reviewed-on: https://skia-review.googlesource.com/c/skia/+/400538
Reviewed-by: Ravi Mistry <9fa2e7438b8cb730f96b74865492597170561628@google.com>
Commit-Queue: Ravi Mistry <9fa2e7438b8cb730f96b74865492597170561628@google.com><commit_after># Copyright 2019 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# pylint: disable=W0401,W0614
from telemetry import story
from telemetry.page import page as page_module
from telemetry.page import shared_page_state
class SkiaDesktopPage(page_module.Page):
def __init__(self, url, page_set):
super(SkiaDesktopPage, self).__init__(
url=url,
name=url,
page_set=page_set,
shared_page_state_class=shared_page_state.SharedDesktopPageState)
self.archive_data_file = 'data/skia_ebay_desktop.json'
def RunNavigateSteps(self, action_runner):
action_runner.Navigate(self.url, timeout_in_seconds=120)
class SkiaEbayDesktopPageSet(story.StorySet):
""" Pages designed to represent the median, not highly optimized web """
def __init__(self):
super(SkiaEbayDesktopPageSet, self).__init__(
archive_data_file='data/skia_ebay_desktop.json')
urls_list = [
# go/skia-skps-3-2019
'http://www.ebay.com',
]
for url in urls_list:
self.AddStory(SkiaDesktopPage(url, self))
|
acf2729f368ad4eabc0219d1a191089e8d5f740f
|
dmz/geolocate.py
|
dmz/geolocate.py
|
#Using MaxMind, so import pygeoip
import pygeoip
def geolocate(ip_addresses):
#Read in files, storing in memory for speed
ip4_geo = pygeoip.GeoIP(filename = "/usr/share/GeoIP/GeoIP.dat", flags = 1)
ip6_geo = pygeoip.GeoIP(filename = "/usr/share/GeoIP/GeoIPv6.dat", flags = 1)
#Check type
if not(isinstance(ip_addresses,list)):
ip_addresses = [ip_addresses]
#Construct output list
output = []
#For each entry in the input list, retrieve the country code and add it to the output object
for entry in ip_addresses:
if(bool(re.search(":",entry))):
try:
output.append(ip6_geo.country_code_by_addr(entry))
except:
output.append("Invalid")
else:
try:
output.append(ip4_geo.country_code_by_addr(entry))
except:
output.append("Invalid")
#Return
return output
|
"""
Provides simple functions that geo-locate an IP address (IPv4 or IPv4) using the
MaxMind Geo Database.
"""
import pygeoip
class GeoLocator(object):
"""Geo locate IP addresses using the MaxMind database"""
def __init__(self, ipv4_geo_path='/usr/share/GeoIP/GeoIP.dat',
ipv6_geo_path='/usr/share/GeoIP/GeoIPv6.dat'):
self.ipv4_geo_path = ipv4_geo_path
self.ipv6_geo_path = ipv6_geo_path
@property
def ipv4_geo(self):
"""Return an instance of pygeoip.GeoIP loaded with IPv4 geolocation data.
The data is stored in memory, and loaded up only when first requested"""
if not hasattr(self, '_ipv4_geo'):
self._ipv4_geo = pygeoip.GeoIP(filename=self.ipv4_geo_path, flags=1)
return self._ipv4_geo
@property
def ipv6_geo(self):
"""Return an instance of pygeoip.GeoIP loaded with IPv6 geolocation data.
The data is stored in memory, and loaded up only when first requested"""
if not hasattr(self, '_ipv6_geo'):
self._ipv6_geo = pygeoip.GeoIP(filename=self.ipv6_geo_path, flags=1)
return self._ipv6_geo
def _check_if_ipv6(self, ip_address):
"""Return true if given ip_address is IPv6, false otherwise"""
try:
# socket.inet_pton throws an exception if it isn't a valid address
# of the stated address class
socket.inet_pton(socket.AF_INET6, ip_address)
return True
except:
return False
def find_country(self, ip_address):
"""Return best guess of country in which this IP address resides"""
if self._check_if_ipv6(ip_address):
return self.ipv6_geo.country_code_by_addr(ip_address)
else:
return self.ipv4_geo.country_code_by_addr(ip_address)
|
Move the Geo Location stuff into a class
|
Move the Geo Location stuff into a class
|
Python
|
mit
|
yuvipanda/edit-stats
|
#Using MaxMind, so import pygeoip
import pygeoip
def geolocate(ip_addresses):
#Read in files, storing in memory for speed
ip4_geo = pygeoip.GeoIP(filename = "/usr/share/GeoIP/GeoIP.dat", flags = 1)
ip6_geo = pygeoip.GeoIP(filename = "/usr/share/GeoIP/GeoIPv6.dat", flags = 1)
#Check type
if not(isinstance(ip_addresses,list)):
ip_addresses = [ip_addresses]
#Construct output list
output = []
#For each entry in the input list, retrieve the country code and add it to the output object
for entry in ip_addresses:
if(bool(re.search(":",entry))):
try:
output.append(ip6_geo.country_code_by_addr(entry))
except:
output.append("Invalid")
else:
try:
output.append(ip4_geo.country_code_by_addr(entry))
except:
output.append("Invalid")
#Return
return output
Move the Geo Location stuff into a class
|
"""
Provides simple functions that geo-locate an IP address (IPv4 or IPv4) using the
MaxMind Geo Database.
"""
import pygeoip
class GeoLocator(object):
"""Geo locate IP addresses using the MaxMind database"""
def __init__(self, ipv4_geo_path='/usr/share/GeoIP/GeoIP.dat',
ipv6_geo_path='/usr/share/GeoIP/GeoIPv6.dat'):
self.ipv4_geo_path = ipv4_geo_path
self.ipv6_geo_path = ipv6_geo_path
@property
def ipv4_geo(self):
"""Return an instance of pygeoip.GeoIP loaded with IPv4 geolocation data.
The data is stored in memory, and loaded up only when first requested"""
if not hasattr(self, '_ipv4_geo'):
self._ipv4_geo = pygeoip.GeoIP(filename=self.ipv4_geo_path, flags=1)
return self._ipv4_geo
@property
def ipv6_geo(self):
"""Return an instance of pygeoip.GeoIP loaded with IPv6 geolocation data.
The data is stored in memory, and loaded up only when first requested"""
if not hasattr(self, '_ipv6_geo'):
self._ipv6_geo = pygeoip.GeoIP(filename=self.ipv6_geo_path, flags=1)
return self._ipv6_geo
def _check_if_ipv6(self, ip_address):
"""Return true if given ip_address is IPv6, false otherwise"""
try:
# socket.inet_pton throws an exception if it isn't a valid address
# of the stated address class
socket.inet_pton(socket.AF_INET6, ip_address)
return True
except:
return False
def find_country(self, ip_address):
"""Return best guess of country in which this IP address resides"""
if self._check_if_ipv6(ip_address):
return self.ipv6_geo.country_code_by_addr(ip_address)
else:
return self.ipv4_geo.country_code_by_addr(ip_address)
|
<commit_before>#Using MaxMind, so import pygeoip
import pygeoip
def geolocate(ip_addresses):
#Read in files, storing in memory for speed
ip4_geo = pygeoip.GeoIP(filename = "/usr/share/GeoIP/GeoIP.dat", flags = 1)
ip6_geo = pygeoip.GeoIP(filename = "/usr/share/GeoIP/GeoIPv6.dat", flags = 1)
#Check type
if not(isinstance(ip_addresses,list)):
ip_addresses = [ip_addresses]
#Construct output list
output = []
#For each entry in the input list, retrieve the country code and add it to the output object
for entry in ip_addresses:
if(bool(re.search(":",entry))):
try:
output.append(ip6_geo.country_code_by_addr(entry))
except:
output.append("Invalid")
else:
try:
output.append(ip4_geo.country_code_by_addr(entry))
except:
output.append("Invalid")
#Return
return output
<commit_msg>Move the Geo Location stuff into a class<commit_after>
|
"""
Provides simple functions that geo-locate an IP address (IPv4 or IPv4) using the
MaxMind Geo Database.
"""
import pygeoip
class GeoLocator(object):
"""Geo locate IP addresses using the MaxMind database"""
def __init__(self, ipv4_geo_path='/usr/share/GeoIP/GeoIP.dat',
ipv6_geo_path='/usr/share/GeoIP/GeoIPv6.dat'):
self.ipv4_geo_path = ipv4_geo_path
self.ipv6_geo_path = ipv6_geo_path
@property
def ipv4_geo(self):
"""Return an instance of pygeoip.GeoIP loaded with IPv4 geolocation data.
The data is stored in memory, and loaded up only when first requested"""
if not hasattr(self, '_ipv4_geo'):
self._ipv4_geo = pygeoip.GeoIP(filename=self.ipv4_geo_path, flags=1)
return self._ipv4_geo
@property
def ipv6_geo(self):
"""Return an instance of pygeoip.GeoIP loaded with IPv6 geolocation data.
The data is stored in memory, and loaded up only when first requested"""
if not hasattr(self, '_ipv6_geo'):
self._ipv6_geo = pygeoip.GeoIP(filename=self.ipv6_geo_path, flags=1)
return self._ipv6_geo
def _check_if_ipv6(self, ip_address):
"""Return true if given ip_address is IPv6, false otherwise"""
try:
# socket.inet_pton throws an exception if it isn't a valid address
# of the stated address class
socket.inet_pton(socket.AF_INET6, ip_address)
return True
except:
return False
def find_country(self, ip_address):
"""Return best guess of country in which this IP address resides"""
if self._check_if_ipv6(ip_address):
return self.ipv6_geo.country_code_by_addr(ip_address)
else:
return self.ipv4_geo.country_code_by_addr(ip_address)
|
#Using MaxMind, so import pygeoip
import pygeoip
def geolocate(ip_addresses):
#Read in files, storing in memory for speed
ip4_geo = pygeoip.GeoIP(filename = "/usr/share/GeoIP/GeoIP.dat", flags = 1)
ip6_geo = pygeoip.GeoIP(filename = "/usr/share/GeoIP/GeoIPv6.dat", flags = 1)
#Check type
if not(isinstance(ip_addresses,list)):
ip_addresses = [ip_addresses]
#Construct output list
output = []
#For each entry in the input list, retrieve the country code and add it to the output object
for entry in ip_addresses:
if(bool(re.search(":",entry))):
try:
output.append(ip6_geo.country_code_by_addr(entry))
except:
output.append("Invalid")
else:
try:
output.append(ip4_geo.country_code_by_addr(entry))
except:
output.append("Invalid")
#Return
return output
Move the Geo Location stuff into a class"""
Provides simple functions that geo-locate an IP address (IPv4 or IPv4) using the
MaxMind Geo Database.
"""
import pygeoip
class GeoLocator(object):
"""Geo locate IP addresses using the MaxMind database"""
def __init__(self, ipv4_geo_path='/usr/share/GeoIP/GeoIP.dat',
ipv6_geo_path='/usr/share/GeoIP/GeoIPv6.dat'):
self.ipv4_geo_path = ipv4_geo_path
self.ipv6_geo_path = ipv6_geo_path
@property
def ipv4_geo(self):
"""Return an instance of pygeoip.GeoIP loaded with IPv4 geolocation data.
The data is stored in memory, and loaded up only when first requested"""
if not hasattr(self, '_ipv4_geo'):
self._ipv4_geo = pygeoip.GeoIP(filename=self.ipv4_geo_path, flags=1)
return self._ipv4_geo
@property
def ipv6_geo(self):
"""Return an instance of pygeoip.GeoIP loaded with IPv6 geolocation data.
The data is stored in memory, and loaded up only when first requested"""
if not hasattr(self, '_ipv6_geo'):
self._ipv6_geo = pygeoip.GeoIP(filename=self.ipv6_geo_path, flags=1)
return self._ipv6_geo
def _check_if_ipv6(self, ip_address):
"""Return true if given ip_address is IPv6, false otherwise"""
try:
# socket.inet_pton throws an exception if it isn't a valid address
# of the stated address class
socket.inet_pton(socket.AF_INET6, ip_address)
return True
except:
return False
def find_country(self, ip_address):
"""Return best guess of country in which this IP address resides"""
if self._check_if_ipv6(ip_address):
return self.ipv6_geo.country_code_by_addr(ip_address)
else:
return self.ipv4_geo.country_code_by_addr(ip_address)
|
<commit_before>#Using MaxMind, so import pygeoip
import pygeoip
def geolocate(ip_addresses):
#Read in files, storing in memory for speed
ip4_geo = pygeoip.GeoIP(filename = "/usr/share/GeoIP/GeoIP.dat", flags = 1)
ip6_geo = pygeoip.GeoIP(filename = "/usr/share/GeoIP/GeoIPv6.dat", flags = 1)
#Check type
if not(isinstance(ip_addresses,list)):
ip_addresses = [ip_addresses]
#Construct output list
output = []
#For each entry in the input list, retrieve the country code and add it to the output object
for entry in ip_addresses:
if(bool(re.search(":",entry))):
try:
output.append(ip6_geo.country_code_by_addr(entry))
except:
output.append("Invalid")
else:
try:
output.append(ip4_geo.country_code_by_addr(entry))
except:
output.append("Invalid")
#Return
return output
<commit_msg>Move the Geo Location stuff into a class<commit_after>"""
Provides simple functions that geo-locate an IP address (IPv4 or IPv4) using the
MaxMind Geo Database.
"""
import pygeoip
class GeoLocator(object):
"""Geo locate IP addresses using the MaxMind database"""
def __init__(self, ipv4_geo_path='/usr/share/GeoIP/GeoIP.dat',
ipv6_geo_path='/usr/share/GeoIP/GeoIPv6.dat'):
self.ipv4_geo_path = ipv4_geo_path
self.ipv6_geo_path = ipv6_geo_path
@property
def ipv4_geo(self):
"""Return an instance of pygeoip.GeoIP loaded with IPv4 geolocation data.
The data is stored in memory, and loaded up only when first requested"""
if not hasattr(self, '_ipv4_geo'):
self._ipv4_geo = pygeoip.GeoIP(filename=self.ipv4_geo_path, flags=1)
return self._ipv4_geo
@property
def ipv6_geo(self):
"""Return an instance of pygeoip.GeoIP loaded with IPv6 geolocation data.
The data is stored in memory, and loaded up only when first requested"""
if not hasattr(self, '_ipv6_geo'):
self._ipv6_geo = pygeoip.GeoIP(filename=self.ipv6_geo_path, flags=1)
return self._ipv6_geo
def _check_if_ipv6(self, ip_address):
"""Return true if given ip_address is IPv6, false otherwise"""
try:
# socket.inet_pton throws an exception if it isn't a valid address
# of the stated address class
socket.inet_pton(socket.AF_INET6, ip_address)
return True
except:
return False
def find_country(self, ip_address):
"""Return best guess of country in which this IP address resides"""
if self._check_if_ipv6(ip_address):
return self.ipv6_geo.country_code_by_addr(ip_address)
else:
return self.ipv4_geo.country_code_by_addr(ip_address)
|
35b45fd793ac695f6ec6a792534fdde77a3023aa
|
napalm_yang/supported_models.py
|
napalm_yang/supported_models.py
|
SUPPORTED_MODELS = (
# module_name, models
("openconfig-interfaces", ["interfaces"]),
("openconfig-network-instance", ["network_instances"]),
("openconfig-platform", ["components"]),
("openconfig-vlan", ["vlans"]),
)
|
SUPPORTED_MODELS = (
# module_name, models
("openconfig-interfaces", ["interfaces"]),
("openconfig-network-instance", ["network_instances"]),
("openconfig-platform", ["components"]),
("openconfig-vlan", ["vlans"]),
('openconfig-system', ['system'])
)
|
Add system as supported models
|
Add system as supported models
|
Python
|
apache-2.0
|
napalm-automation/napalm-yang,napalm-automation/napalm-yang
|
SUPPORTED_MODELS = (
# module_name, models
("openconfig-interfaces", ["interfaces"]),
("openconfig-network-instance", ["network_instances"]),
("openconfig-platform", ["components"]),
("openconfig-vlan", ["vlans"]),
)
Add system as supported models
|
SUPPORTED_MODELS = (
# module_name, models
("openconfig-interfaces", ["interfaces"]),
("openconfig-network-instance", ["network_instances"]),
("openconfig-platform", ["components"]),
("openconfig-vlan", ["vlans"]),
('openconfig-system', ['system'])
)
|
<commit_before>SUPPORTED_MODELS = (
# module_name, models
("openconfig-interfaces", ["interfaces"]),
("openconfig-network-instance", ["network_instances"]),
("openconfig-platform", ["components"]),
("openconfig-vlan", ["vlans"]),
)
<commit_msg>Add system as supported models<commit_after>
|
SUPPORTED_MODELS = (
# module_name, models
("openconfig-interfaces", ["interfaces"]),
("openconfig-network-instance", ["network_instances"]),
("openconfig-platform", ["components"]),
("openconfig-vlan", ["vlans"]),
('openconfig-system', ['system'])
)
|
SUPPORTED_MODELS = (
# module_name, models
("openconfig-interfaces", ["interfaces"]),
("openconfig-network-instance", ["network_instances"]),
("openconfig-platform", ["components"]),
("openconfig-vlan", ["vlans"]),
)
Add system as supported modelsSUPPORTED_MODELS = (
# module_name, models
("openconfig-interfaces", ["interfaces"]),
("openconfig-network-instance", ["network_instances"]),
("openconfig-platform", ["components"]),
("openconfig-vlan", ["vlans"]),
('openconfig-system', ['system'])
)
|
<commit_before>SUPPORTED_MODELS = (
# module_name, models
("openconfig-interfaces", ["interfaces"]),
("openconfig-network-instance", ["network_instances"]),
("openconfig-platform", ["components"]),
("openconfig-vlan", ["vlans"]),
)
<commit_msg>Add system as supported models<commit_after>SUPPORTED_MODELS = (
# module_name, models
("openconfig-interfaces", ["interfaces"]),
("openconfig-network-instance", ["network_instances"]),
("openconfig-platform", ["components"]),
("openconfig-vlan", ["vlans"]),
('openconfig-system', ['system'])
)
|
8420bf735b64ceaba965b946c1f11c77b0732038
|
setup.py
|
setup.py
|
from setuptools import setup
setup(name = 'graphysio',
version = '0.6',
description = 'Graphical visualization of physiologic time series',
url = 'https://github.com/jaj42/graphysio',
author = 'Jona JOACHIM',
author_email = 'jona@joachim.cc',
license = 'ISC',
install_requires = ['pyqtgraph', 'pandas'],
scripts = ['scripts/graphysioui.py'],
packages = ['graphysio', 'graphysio.ui'],
)
|
from setuptools import setup
setup(name = 'graphysio',
version = '0.61',
description = 'Graphical visualization of physiologic time series',
url = 'https://github.com/jaj42/graphysio',
author = 'Jona JOACHIM',
author_email = 'jona@joachim.cc',
license = 'ISC',
python_requires = '>=3.4',
install_requires = ['pyqtgraph', 'pandas'],
scripts = ['scripts/graphysioui.py'],
packages = ['graphysio', 'graphysio.ui'],
)
|
Add python 3 dependency, bump minor
|
Add python 3 dependency, bump minor
|
Python
|
isc
|
jaj42/GraPhysio,jaj42/GraPhysio,jaj42/dyngraph
|
from setuptools import setup
setup(name = 'graphysio',
version = '0.6',
description = 'Graphical visualization of physiologic time series',
url = 'https://github.com/jaj42/graphysio',
author = 'Jona JOACHIM',
author_email = 'jona@joachim.cc',
license = 'ISC',
install_requires = ['pyqtgraph', 'pandas'],
scripts = ['scripts/graphysioui.py'],
packages = ['graphysio', 'graphysio.ui'],
)
Add python 3 dependency, bump minor
|
from setuptools import setup
setup(name = 'graphysio',
version = '0.61',
description = 'Graphical visualization of physiologic time series',
url = 'https://github.com/jaj42/graphysio',
author = 'Jona JOACHIM',
author_email = 'jona@joachim.cc',
license = 'ISC',
python_requires = '>=3.4',
install_requires = ['pyqtgraph', 'pandas'],
scripts = ['scripts/graphysioui.py'],
packages = ['graphysio', 'graphysio.ui'],
)
|
<commit_before>from setuptools import setup
setup(name = 'graphysio',
version = '0.6',
description = 'Graphical visualization of physiologic time series',
url = 'https://github.com/jaj42/graphysio',
author = 'Jona JOACHIM',
author_email = 'jona@joachim.cc',
license = 'ISC',
install_requires = ['pyqtgraph', 'pandas'],
scripts = ['scripts/graphysioui.py'],
packages = ['graphysio', 'graphysio.ui'],
)
<commit_msg>Add python 3 dependency, bump minor<commit_after>
|
from setuptools import setup
setup(name = 'graphysio',
version = '0.61',
description = 'Graphical visualization of physiologic time series',
url = 'https://github.com/jaj42/graphysio',
author = 'Jona JOACHIM',
author_email = 'jona@joachim.cc',
license = 'ISC',
python_requires = '>=3.4',
install_requires = ['pyqtgraph', 'pandas'],
scripts = ['scripts/graphysioui.py'],
packages = ['graphysio', 'graphysio.ui'],
)
|
from setuptools import setup
setup(name = 'graphysio',
version = '0.6',
description = 'Graphical visualization of physiologic time series',
url = 'https://github.com/jaj42/graphysio',
author = 'Jona JOACHIM',
author_email = 'jona@joachim.cc',
license = 'ISC',
install_requires = ['pyqtgraph', 'pandas'],
scripts = ['scripts/graphysioui.py'],
packages = ['graphysio', 'graphysio.ui'],
)
Add python 3 dependency, bump minorfrom setuptools import setup
setup(name = 'graphysio',
version = '0.61',
description = 'Graphical visualization of physiologic time series',
url = 'https://github.com/jaj42/graphysio',
author = 'Jona JOACHIM',
author_email = 'jona@joachim.cc',
license = 'ISC',
python_requires = '>=3.4',
install_requires = ['pyqtgraph', 'pandas'],
scripts = ['scripts/graphysioui.py'],
packages = ['graphysio', 'graphysio.ui'],
)
|
<commit_before>from setuptools import setup
setup(name = 'graphysio',
version = '0.6',
description = 'Graphical visualization of physiologic time series',
url = 'https://github.com/jaj42/graphysio',
author = 'Jona JOACHIM',
author_email = 'jona@joachim.cc',
license = 'ISC',
install_requires = ['pyqtgraph', 'pandas'],
scripts = ['scripts/graphysioui.py'],
packages = ['graphysio', 'graphysio.ui'],
)
<commit_msg>Add python 3 dependency, bump minor<commit_after>from setuptools import setup
setup(name = 'graphysio',
version = '0.61',
description = 'Graphical visualization of physiologic time series',
url = 'https://github.com/jaj42/graphysio',
author = 'Jona JOACHIM',
author_email = 'jona@joachim.cc',
license = 'ISC',
python_requires = '>=3.4',
install_requires = ['pyqtgraph', 'pandas'],
scripts = ['scripts/graphysioui.py'],
packages = ['graphysio', 'graphysio.ui'],
)
|
5f1fa23dd8e0850a9f0e6a054ec6738e5a174ff7
|
database/tables.py
|
database/tables.py
|
#!/usr/bin/env python3
# vim:fileencoding=utf-8:ts=8:et:sw=4:sts=4:tw=79
"""
tables.py
Table definitions for pearbot database submodule.
Tables are not assigned to variables, they can be accessed using the global
metadata object though.
Copyright (c) 2015 Twisted Pear <pear at twistedpear dot at>
See the file LICENSE for copying permission.
"""
from sqlalchemy import MetaData, Table, Column, Integer, String
METADATA = MetaData()
Table("quote", METADATA,
Column("qid", Integer, primary_key=True),
Column("text", String, nullable=False))
|
#!/usr/bin/env python3
# vim:fileencoding=utf-8:ts=8:et:sw=4:sts=4:tw=79
"""
tables.py
Table definitions for pearbot database submodule.
Tables are not assigned to variables, they can be accessed using the global
metadata object though.
Copyright (c) 2015 Twisted Pear <pear at twistedpear dot at>
See the file LICENSE for copying permission.
"""
from sqlalchemy import MetaData, Table, Column, Integer, String
METADATA = MetaData()
Table("quote", METADATA,
Column("qid", Integer, primary_key=True),
Column("text", String, nullable=False))
Table("moderator", METADATA,
Column("stream", String, primary_key=True),
Column("name", String, primary_key=True))
|
Add a table for caching moderators
|
Add a table for caching moderators
|
Python
|
mit
|
pyrige/pump19
|
#!/usr/bin/env python3
# vim:fileencoding=utf-8:ts=8:et:sw=4:sts=4:tw=79
"""
tables.py
Table definitions for pearbot database submodule.
Tables are not assigned to variables, they can be accessed using the global
metadata object though.
Copyright (c) 2015 Twisted Pear <pear at twistedpear dot at>
See the file LICENSE for copying permission.
"""
from sqlalchemy import MetaData, Table, Column, Integer, String
METADATA = MetaData()
Table("quote", METADATA,
Column("qid", Integer, primary_key=True),
Column("text", String, nullable=False))
Add a table for caching moderators
|
#!/usr/bin/env python3
# vim:fileencoding=utf-8:ts=8:et:sw=4:sts=4:tw=79
"""
tables.py
Table definitions for pearbot database submodule.
Tables are not assigned to variables, they can be accessed using the global
metadata object though.
Copyright (c) 2015 Twisted Pear <pear at twistedpear dot at>
See the file LICENSE for copying permission.
"""
from sqlalchemy import MetaData, Table, Column, Integer, String
METADATA = MetaData()
Table("quote", METADATA,
Column("qid", Integer, primary_key=True),
Column("text", String, nullable=False))
Table("moderator", METADATA,
Column("stream", String, primary_key=True),
Column("name", String, primary_key=True))
|
<commit_before>#!/usr/bin/env python3
# vim:fileencoding=utf-8:ts=8:et:sw=4:sts=4:tw=79
"""
tables.py
Table definitions for pearbot database submodule.
Tables are not assigned to variables, they can be accessed using the global
metadata object though.
Copyright (c) 2015 Twisted Pear <pear at twistedpear dot at>
See the file LICENSE for copying permission.
"""
from sqlalchemy import MetaData, Table, Column, Integer, String
METADATA = MetaData()
Table("quote", METADATA,
Column("qid", Integer, primary_key=True),
Column("text", String, nullable=False))
<commit_msg>Add a table for caching moderators<commit_after>
|
#!/usr/bin/env python3
# vim:fileencoding=utf-8:ts=8:et:sw=4:sts=4:tw=79
"""
tables.py
Table definitions for pearbot database submodule.
Tables are not assigned to variables, they can be accessed using the global
metadata object though.
Copyright (c) 2015 Twisted Pear <pear at twistedpear dot at>
See the file LICENSE for copying permission.
"""
from sqlalchemy import MetaData, Table, Column, Integer, String
METADATA = MetaData()
Table("quote", METADATA,
Column("qid", Integer, primary_key=True),
Column("text", String, nullable=False))
Table("moderator", METADATA,
Column("stream", String, primary_key=True),
Column("name", String, primary_key=True))
|
#!/usr/bin/env python3
# vim:fileencoding=utf-8:ts=8:et:sw=4:sts=4:tw=79
"""
tables.py
Table definitions for pearbot database submodule.
Tables are not assigned to variables, they can be accessed using the global
metadata object though.
Copyright (c) 2015 Twisted Pear <pear at twistedpear dot at>
See the file LICENSE for copying permission.
"""
from sqlalchemy import MetaData, Table, Column, Integer, String
METADATA = MetaData()
Table("quote", METADATA,
Column("qid", Integer, primary_key=True),
Column("text", String, nullable=False))
Add a table for caching moderators#!/usr/bin/env python3
# vim:fileencoding=utf-8:ts=8:et:sw=4:sts=4:tw=79
"""
tables.py
Table definitions for pearbot database submodule.
Tables are not assigned to variables, they can be accessed using the global
metadata object though.
Copyright (c) 2015 Twisted Pear <pear at twistedpear dot at>
See the file LICENSE for copying permission.
"""
from sqlalchemy import MetaData, Table, Column, Integer, String
METADATA = MetaData()
Table("quote", METADATA,
Column("qid", Integer, primary_key=True),
Column("text", String, nullable=False))
Table("moderator", METADATA,
Column("stream", String, primary_key=True),
Column("name", String, primary_key=True))
|
<commit_before>#!/usr/bin/env python3
# vim:fileencoding=utf-8:ts=8:et:sw=4:sts=4:tw=79
"""
tables.py
Table definitions for pearbot database submodule.
Tables are not assigned to variables, they can be accessed using the global
metadata object though.
Copyright (c) 2015 Twisted Pear <pear at twistedpear dot at>
See the file LICENSE for copying permission.
"""
from sqlalchemy import MetaData, Table, Column, Integer, String
METADATA = MetaData()
Table("quote", METADATA,
Column("qid", Integer, primary_key=True),
Column("text", String, nullable=False))
<commit_msg>Add a table for caching moderators<commit_after>#!/usr/bin/env python3
# vim:fileencoding=utf-8:ts=8:et:sw=4:sts=4:tw=79
"""
tables.py
Table definitions for pearbot database submodule.
Tables are not assigned to variables, they can be accessed using the global
metadata object though.
Copyright (c) 2015 Twisted Pear <pear at twistedpear dot at>
See the file LICENSE for copying permission.
"""
from sqlalchemy import MetaData, Table, Column, Integer, String
METADATA = MetaData()
Table("quote", METADATA,
Column("qid", Integer, primary_key=True),
Column("text", String, nullable=False))
Table("moderator", METADATA,
Column("stream", String, primary_key=True),
Column("name", String, primary_key=True))
|
13dc38f5ab1e77ed9fcd2654e37ccb9da1c497b2
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# coding=utf-8
import sys
import os
import re
from setuptools import setup
from setuptools.command.test import test as TestCommand
HERE = os.path.abspath(os.path.dirname(__file__))
PACKAGE_NAME = 'mailjet_rest'
VERSION = 'v1.2.2'
setup(
name=PACKAGE_NAME,
version=VERSION,
author='starenka',
author_email='starenka0@gmail.com',
maintainer='Guillaume Badi',
maintainer_email='gbadi@mailjet.com',
download_url='https://github.com/mailjet/mailjet-apiv3-python/releases/tag/v1.2.2',
url='https://github.com/mailjet/mailjet-apiv3-python',
description=('Mailjet V3 API wrapper'),
classifiers=['Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.5',
'Topic :: Utilities'],
license='GPLv3',
keywords='mailjet api wrapper email client',
include_package_data=True,
install_requires=['requests>=2.4.3'],
tests_require=['unittest'],
entry_points={},
packages=['mailjet_rest'],
)
|
#!/usr/bin/env python
# coding=utf-8
import sys
import os
import re
from setuptools import setup
from setuptools.command.test import test as TestCommand
HERE = os.path.abspath(os.path.dirname(__file__))
PACKAGE_NAME = 'mailjet_rest'
VERSION = 'v1.2.2'
setup(
name=PACKAGE_NAME,
version=VERSION,
author='starenka',
author_email='starenka0@gmail.com',
maintainer='Guillaume Badi',
maintainer_email='gbadi@mailjet.com',
download_url='https://github.com/mailjet/mailjet-apiv3-python/releases/tag/v1.2.2',
url='https://github.com/mailjet/mailjet-apiv3-python',
description=('Mailjet V3 API wrapper'),
classifiers=['Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Utilities'],
license='GPLv3',
keywords='mailjet api wrapper email client',
include_package_data=True,
install_requires=['requests>=2.4.3'],
tests_require=['unittest'],
entry_points={},
packages=['mailjet_rest'],
)
|
Add mention of Python 3.6 support
|
Add mention of Python 3.6 support
|
Python
|
mit
|
mailjet/mailjet-apiv3-python
|
#!/usr/bin/env python
# coding=utf-8
import sys
import os
import re
from setuptools import setup
from setuptools.command.test import test as TestCommand
HERE = os.path.abspath(os.path.dirname(__file__))
PACKAGE_NAME = 'mailjet_rest'
VERSION = 'v1.2.2'
setup(
name=PACKAGE_NAME,
version=VERSION,
author='starenka',
author_email='starenka0@gmail.com',
maintainer='Guillaume Badi',
maintainer_email='gbadi@mailjet.com',
download_url='https://github.com/mailjet/mailjet-apiv3-python/releases/tag/v1.2.2',
url='https://github.com/mailjet/mailjet-apiv3-python',
description=('Mailjet V3 API wrapper'),
classifiers=['Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.5',
'Topic :: Utilities'],
license='GPLv3',
keywords='mailjet api wrapper email client',
include_package_data=True,
install_requires=['requests>=2.4.3'],
tests_require=['unittest'],
entry_points={},
packages=['mailjet_rest'],
)
Add mention of Python 3.6 support
|
#!/usr/bin/env python
# coding=utf-8
import sys
import os
import re
from setuptools import setup
from setuptools.command.test import test as TestCommand
HERE = os.path.abspath(os.path.dirname(__file__))
PACKAGE_NAME = 'mailjet_rest'
VERSION = 'v1.2.2'
setup(
name=PACKAGE_NAME,
version=VERSION,
author='starenka',
author_email='starenka0@gmail.com',
maintainer='Guillaume Badi',
maintainer_email='gbadi@mailjet.com',
download_url='https://github.com/mailjet/mailjet-apiv3-python/releases/tag/v1.2.2',
url='https://github.com/mailjet/mailjet-apiv3-python',
description=('Mailjet V3 API wrapper'),
classifiers=['Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Utilities'],
license='GPLv3',
keywords='mailjet api wrapper email client',
include_package_data=True,
install_requires=['requests>=2.4.3'],
tests_require=['unittest'],
entry_points={},
packages=['mailjet_rest'],
)
|
<commit_before>#!/usr/bin/env python
# coding=utf-8
import sys
import os
import re
from setuptools import setup
from setuptools.command.test import test as TestCommand
HERE = os.path.abspath(os.path.dirname(__file__))
PACKAGE_NAME = 'mailjet_rest'
VERSION = 'v1.2.2'
setup(
name=PACKAGE_NAME,
version=VERSION,
author='starenka',
author_email='starenka0@gmail.com',
maintainer='Guillaume Badi',
maintainer_email='gbadi@mailjet.com',
download_url='https://github.com/mailjet/mailjet-apiv3-python/releases/tag/v1.2.2',
url='https://github.com/mailjet/mailjet-apiv3-python',
description=('Mailjet V3 API wrapper'),
classifiers=['Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.5',
'Topic :: Utilities'],
license='GPLv3',
keywords='mailjet api wrapper email client',
include_package_data=True,
install_requires=['requests>=2.4.3'],
tests_require=['unittest'],
entry_points={},
packages=['mailjet_rest'],
)
<commit_msg>Add mention of Python 3.6 support<commit_after>
|
#!/usr/bin/env python
# coding=utf-8
import sys
import os
import re
from setuptools import setup
from setuptools.command.test import test as TestCommand
HERE = os.path.abspath(os.path.dirname(__file__))
PACKAGE_NAME = 'mailjet_rest'
VERSION = 'v1.2.2'
setup(
name=PACKAGE_NAME,
version=VERSION,
author='starenka',
author_email='starenka0@gmail.com',
maintainer='Guillaume Badi',
maintainer_email='gbadi@mailjet.com',
download_url='https://github.com/mailjet/mailjet-apiv3-python/releases/tag/v1.2.2',
url='https://github.com/mailjet/mailjet-apiv3-python',
description=('Mailjet V3 API wrapper'),
classifiers=['Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Utilities'],
license='GPLv3',
keywords='mailjet api wrapper email client',
include_package_data=True,
install_requires=['requests>=2.4.3'],
tests_require=['unittest'],
entry_points={},
packages=['mailjet_rest'],
)
|
#!/usr/bin/env python
# coding=utf-8
import sys
import os
import re
from setuptools import setup
from setuptools.command.test import test as TestCommand
HERE = os.path.abspath(os.path.dirname(__file__))
PACKAGE_NAME = 'mailjet_rest'
VERSION = 'v1.2.2'
setup(
name=PACKAGE_NAME,
version=VERSION,
author='starenka',
author_email='starenka0@gmail.com',
maintainer='Guillaume Badi',
maintainer_email='gbadi@mailjet.com',
download_url='https://github.com/mailjet/mailjet-apiv3-python/releases/tag/v1.2.2',
url='https://github.com/mailjet/mailjet-apiv3-python',
description=('Mailjet V3 API wrapper'),
classifiers=['Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.5',
'Topic :: Utilities'],
license='GPLv3',
keywords='mailjet api wrapper email client',
include_package_data=True,
install_requires=['requests>=2.4.3'],
tests_require=['unittest'],
entry_points={},
packages=['mailjet_rest'],
)
Add mention of Python 3.6 support#!/usr/bin/env python
# coding=utf-8
import sys
import os
import re
from setuptools import setup
from setuptools.command.test import test as TestCommand
HERE = os.path.abspath(os.path.dirname(__file__))
PACKAGE_NAME = 'mailjet_rest'
VERSION = 'v1.2.2'
setup(
name=PACKAGE_NAME,
version=VERSION,
author='starenka',
author_email='starenka0@gmail.com',
maintainer='Guillaume Badi',
maintainer_email='gbadi@mailjet.com',
download_url='https://github.com/mailjet/mailjet-apiv3-python/releases/tag/v1.2.2',
url='https://github.com/mailjet/mailjet-apiv3-python',
description=('Mailjet V3 API wrapper'),
classifiers=['Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Utilities'],
license='GPLv3',
keywords='mailjet api wrapper email client',
include_package_data=True,
install_requires=['requests>=2.4.3'],
tests_require=['unittest'],
entry_points={},
packages=['mailjet_rest'],
)
|
<commit_before>#!/usr/bin/env python
# coding=utf-8
import sys
import os
import re
from setuptools import setup
from setuptools.command.test import test as TestCommand
HERE = os.path.abspath(os.path.dirname(__file__))
PACKAGE_NAME = 'mailjet_rest'
VERSION = 'v1.2.2'
setup(
name=PACKAGE_NAME,
version=VERSION,
author='starenka',
author_email='starenka0@gmail.com',
maintainer='Guillaume Badi',
maintainer_email='gbadi@mailjet.com',
download_url='https://github.com/mailjet/mailjet-apiv3-python/releases/tag/v1.2.2',
url='https://github.com/mailjet/mailjet-apiv3-python',
description=('Mailjet V3 API wrapper'),
classifiers=['Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.5',
'Topic :: Utilities'],
license='GPLv3',
keywords='mailjet api wrapper email client',
include_package_data=True,
install_requires=['requests>=2.4.3'],
tests_require=['unittest'],
entry_points={},
packages=['mailjet_rest'],
)
<commit_msg>Add mention of Python 3.6 support<commit_after>#!/usr/bin/env python
# coding=utf-8
import sys
import os
import re
from setuptools import setup
from setuptools.command.test import test as TestCommand
HERE = os.path.abspath(os.path.dirname(__file__))
PACKAGE_NAME = 'mailjet_rest'
VERSION = 'v1.2.2'
setup(
name=PACKAGE_NAME,
version=VERSION,
author='starenka',
author_email='starenka0@gmail.com',
maintainer='Guillaume Badi',
maintainer_email='gbadi@mailjet.com',
download_url='https://github.com/mailjet/mailjet-apiv3-python/releases/tag/v1.2.2',
url='https://github.com/mailjet/mailjet-apiv3-python',
description=('Mailjet V3 API wrapper'),
classifiers=['Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Utilities'],
license='GPLv3',
keywords='mailjet api wrapper email client',
include_package_data=True,
install_requires=['requests>=2.4.3'],
tests_require=['unittest'],
entry_points={},
packages=['mailjet_rest'],
)
|
acefafd45604b4444157b6678cbbc327d4ae1614
|
setup.py
|
setup.py
|
#!/usr/bin/env python
"""
Erply-API
---------
Python wrapper for Erply API
"""
from distutils.core import setup
setup(
name='ErplyAPI',
version='0-2014.12.10',
description='Python wrapper for Erply API',
license='BSD',
author='Priit Laes',
author_email='plaes@plaes.org',
long_description=__doc__,
install_requires=['requests'],
py_modules=['erply_api'],
classifiers=[
'Development Status :: 1 - Planning',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Internet :: WWW/HTTP :: Site Management',
'Topic :: Office/Business :: Financial :: Point-Of-Sale',
'Topic :: Software Development :: Libraries',
]
)
|
#!/usr/bin/env python
"""
Erply-API
---------
Python wrapper for Erply API
"""
from distutils.core import setup
setup(
name='ErplyAPI',
version='0-2014.12.10-dev',
description='Python wrapper for Erply API',
license='BSD',
author='Priit Laes',
author_email='plaes@plaes.org',
long_description=__doc__,
install_requires=['requests'],
py_modules=['erply_api'],
classifiers=[
'Development Status :: 1 - Planning',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Internet :: WWW/HTTP :: Site Management',
'Topic :: Office/Business :: Financial :: Point-Of-Sale',
'Topic :: Software Development :: Libraries',
]
)
|
Move on to new development cycle
|
Move on to new development cycle
|
Python
|
bsd-3-clause
|
tteearu/python-erply-api
|
#!/usr/bin/env python
"""
Erply-API
---------
Python wrapper for Erply API
"""
from distutils.core import setup
setup(
name='ErplyAPI',
version='0-2014.12.10',
description='Python wrapper for Erply API',
license='BSD',
author='Priit Laes',
author_email='plaes@plaes.org',
long_description=__doc__,
install_requires=['requests'],
py_modules=['erply_api'],
classifiers=[
'Development Status :: 1 - Planning',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Internet :: WWW/HTTP :: Site Management',
'Topic :: Office/Business :: Financial :: Point-Of-Sale',
'Topic :: Software Development :: Libraries',
]
)
Move on to new development cycle
|
#!/usr/bin/env python
"""
Erply-API
---------
Python wrapper for Erply API
"""
from distutils.core import setup
setup(
name='ErplyAPI',
version='0-2014.12.10-dev',
description='Python wrapper for Erply API',
license='BSD',
author='Priit Laes',
author_email='plaes@plaes.org',
long_description=__doc__,
install_requires=['requests'],
py_modules=['erply_api'],
classifiers=[
'Development Status :: 1 - Planning',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Internet :: WWW/HTTP :: Site Management',
'Topic :: Office/Business :: Financial :: Point-Of-Sale',
'Topic :: Software Development :: Libraries',
]
)
|
<commit_before>#!/usr/bin/env python
"""
Erply-API
---------
Python wrapper for Erply API
"""
from distutils.core import setup
setup(
name='ErplyAPI',
version='0-2014.12.10',
description='Python wrapper for Erply API',
license='BSD',
author='Priit Laes',
author_email='plaes@plaes.org',
long_description=__doc__,
install_requires=['requests'],
py_modules=['erply_api'],
classifiers=[
'Development Status :: 1 - Planning',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Internet :: WWW/HTTP :: Site Management',
'Topic :: Office/Business :: Financial :: Point-Of-Sale',
'Topic :: Software Development :: Libraries',
]
)
<commit_msg>Move on to new development cycle<commit_after>
|
#!/usr/bin/env python
"""
Erply-API
---------
Python wrapper for Erply API
"""
from distutils.core import setup
setup(
name='ErplyAPI',
version='0-2014.12.10-dev',
description='Python wrapper for Erply API',
license='BSD',
author='Priit Laes',
author_email='plaes@plaes.org',
long_description=__doc__,
install_requires=['requests'],
py_modules=['erply_api'],
classifiers=[
'Development Status :: 1 - Planning',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Internet :: WWW/HTTP :: Site Management',
'Topic :: Office/Business :: Financial :: Point-Of-Sale',
'Topic :: Software Development :: Libraries',
]
)
|
#!/usr/bin/env python
"""
Erply-API
---------
Python wrapper for Erply API
"""
from distutils.core import setup
setup(
name='ErplyAPI',
version='0-2014.12.10',
description='Python wrapper for Erply API',
license='BSD',
author='Priit Laes',
author_email='plaes@plaes.org',
long_description=__doc__,
install_requires=['requests'],
py_modules=['erply_api'],
classifiers=[
'Development Status :: 1 - Planning',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Internet :: WWW/HTTP :: Site Management',
'Topic :: Office/Business :: Financial :: Point-Of-Sale',
'Topic :: Software Development :: Libraries',
]
)
Move on to new development cycle#!/usr/bin/env python
"""
Erply-API
---------
Python wrapper for Erply API
"""
from distutils.core import setup
setup(
name='ErplyAPI',
version='0-2014.12.10-dev',
description='Python wrapper for Erply API',
license='BSD',
author='Priit Laes',
author_email='plaes@plaes.org',
long_description=__doc__,
install_requires=['requests'],
py_modules=['erply_api'],
classifiers=[
'Development Status :: 1 - Planning',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Internet :: WWW/HTTP :: Site Management',
'Topic :: Office/Business :: Financial :: Point-Of-Sale',
'Topic :: Software Development :: Libraries',
]
)
|
<commit_before>#!/usr/bin/env python
"""
Erply-API
---------
Python wrapper for Erply API
"""
from distutils.core import setup
setup(
name='ErplyAPI',
version='0-2014.12.10',
description='Python wrapper for Erply API',
license='BSD',
author='Priit Laes',
author_email='plaes@plaes.org',
long_description=__doc__,
install_requires=['requests'],
py_modules=['erply_api'],
classifiers=[
'Development Status :: 1 - Planning',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Internet :: WWW/HTTP :: Site Management',
'Topic :: Office/Business :: Financial :: Point-Of-Sale',
'Topic :: Software Development :: Libraries',
]
)
<commit_msg>Move on to new development cycle<commit_after>#!/usr/bin/env python
"""
Erply-API
---------
Python wrapper for Erply API
"""
from distutils.core import setup
setup(
name='ErplyAPI',
version='0-2014.12.10-dev',
description='Python wrapper for Erply API',
license='BSD',
author='Priit Laes',
author_email='plaes@plaes.org',
long_description=__doc__,
install_requires=['requests'],
py_modules=['erply_api'],
classifiers=[
'Development Status :: 1 - Planning',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Internet :: WWW/HTTP :: Site Management',
'Topic :: Office/Business :: Financial :: Point-Of-Sale',
'Topic :: Software Development :: Libraries',
]
)
|
b33408e590dc9fa5ae47d728893a5a007406207f
|
setup.py
|
setup.py
|
#! /usr/bin/env python3
# -*- coding: utf-8 -*-
import sys
from setuptools import setup
if sys.argv[-1] == 'setup.py':
print('To install, run \'python setup.py install\'')
print()
sys.path.insert(0, 'potterscript')
import release
if __name__ == "__main__":
setup(
name = release.name,
version = release.__version__,
author = release.__author__,
author_email = release.__email__,
description = release.__description__,
url='https://github.com/OrkoHunter/PotterScript',
keywords='Harry Potter Programming Language Potter Script',
packages = ['potterscript'],
license = 'MIT License',
entry_points = {
'console_scripts': [
'potterscript = potterscript.pottershell:main',
]
},
install_requires = [],
test_suite = 'nose.collector',
tests_require = ['nose>=0.10.1']
)
|
#! /usr/bin/env python3
# -*- coding: utf-8 -*-
import sys
from setuptools import setup
if sys.argv[-1] == 'setup.py':
print('To install, run \'python setup.py install\'')
print()
if sys.version[0] < '3':
print("Please install with Python 3. Aborting installation.")
sys.exit(0)
sys.path.insert(0, 'potterscript')
import release
if __name__ == "__main__":
setup(
name = release.name,
version = release.__version__,
author = release.__author__,
author_email = release.__email__,
description = release.__description__,
url='https://github.com/OrkoHunter/PotterScript',
keywords='Harry Potter Programming Language Potter Script',
packages = ['potterscript'],
license = 'MIT License',
entry_points = {
'console_scripts': [
'potterscript = potterscript.pottershell:main',
]
},
install_requires = [],
test_suite = 'nose.collector',
tests_require = ['nose>=0.10.1']
)
|
Install only with python 3
|
Install only with python 3
|
Python
|
mit
|
OrkoHunter/PotterScript
|
#! /usr/bin/env python3
# -*- coding: utf-8 -*-
import sys
from setuptools import setup
if sys.argv[-1] == 'setup.py':
print('To install, run \'python setup.py install\'')
print()
sys.path.insert(0, 'potterscript')
import release
if __name__ == "__main__":
setup(
name = release.name,
version = release.__version__,
author = release.__author__,
author_email = release.__email__,
description = release.__description__,
url='https://github.com/OrkoHunter/PotterScript',
keywords='Harry Potter Programming Language Potter Script',
packages = ['potterscript'],
license = 'MIT License',
entry_points = {
'console_scripts': [
'potterscript = potterscript.pottershell:main',
]
},
install_requires = [],
test_suite = 'nose.collector',
tests_require = ['nose>=0.10.1']
)
Install only with python 3
|
#! /usr/bin/env python3
# -*- coding: utf-8 -*-
import sys
from setuptools import setup
if sys.argv[-1] == 'setup.py':
print('To install, run \'python setup.py install\'')
print()
if sys.version[0] < '3':
print("Please install with Python 3. Aborting installation.")
sys.exit(0)
sys.path.insert(0, 'potterscript')
import release
if __name__ == "__main__":
setup(
name = release.name,
version = release.__version__,
author = release.__author__,
author_email = release.__email__,
description = release.__description__,
url='https://github.com/OrkoHunter/PotterScript',
keywords='Harry Potter Programming Language Potter Script',
packages = ['potterscript'],
license = 'MIT License',
entry_points = {
'console_scripts': [
'potterscript = potterscript.pottershell:main',
]
},
install_requires = [],
test_suite = 'nose.collector',
tests_require = ['nose>=0.10.1']
)
|
<commit_before>#! /usr/bin/env python3
# -*- coding: utf-8 -*-
import sys
from setuptools import setup
if sys.argv[-1] == 'setup.py':
print('To install, run \'python setup.py install\'')
print()
sys.path.insert(0, 'potterscript')
import release
if __name__ == "__main__":
setup(
name = release.name,
version = release.__version__,
author = release.__author__,
author_email = release.__email__,
description = release.__description__,
url='https://github.com/OrkoHunter/PotterScript',
keywords='Harry Potter Programming Language Potter Script',
packages = ['potterscript'],
license = 'MIT License',
entry_points = {
'console_scripts': [
'potterscript = potterscript.pottershell:main',
]
},
install_requires = [],
test_suite = 'nose.collector',
tests_require = ['nose>=0.10.1']
)
<commit_msg>Install only with python 3<commit_after>
|
#! /usr/bin/env python3
# -*- coding: utf-8 -*-
import sys
from setuptools import setup
if sys.argv[-1] == 'setup.py':
print('To install, run \'python setup.py install\'')
print()
if sys.version[0] < '3':
print("Please install with Python 3. Aborting installation.")
sys.exit(0)
sys.path.insert(0, 'potterscript')
import release
if __name__ == "__main__":
setup(
name = release.name,
version = release.__version__,
author = release.__author__,
author_email = release.__email__,
description = release.__description__,
url='https://github.com/OrkoHunter/PotterScript',
keywords='Harry Potter Programming Language Potter Script',
packages = ['potterscript'],
license = 'MIT License',
entry_points = {
'console_scripts': [
'potterscript = potterscript.pottershell:main',
]
},
install_requires = [],
test_suite = 'nose.collector',
tests_require = ['nose>=0.10.1']
)
|
#! /usr/bin/env python3
# -*- coding: utf-8 -*-
import sys
from setuptools import setup
if sys.argv[-1] == 'setup.py':
print('To install, run \'python setup.py install\'')
print()
sys.path.insert(0, 'potterscript')
import release
if __name__ == "__main__":
setup(
name = release.name,
version = release.__version__,
author = release.__author__,
author_email = release.__email__,
description = release.__description__,
url='https://github.com/OrkoHunter/PotterScript',
keywords='Harry Potter Programming Language Potter Script',
packages = ['potterscript'],
license = 'MIT License',
entry_points = {
'console_scripts': [
'potterscript = potterscript.pottershell:main',
]
},
install_requires = [],
test_suite = 'nose.collector',
tests_require = ['nose>=0.10.1']
)
Install only with python 3#! /usr/bin/env python3
# -*- coding: utf-8 -*-
import sys
from setuptools import setup
if sys.argv[-1] == 'setup.py':
print('To install, run \'python setup.py install\'')
print()
if sys.version[0] < '3':
print("Please install with Python 3. Aborting installation.")
sys.exit(0)
sys.path.insert(0, 'potterscript')
import release
if __name__ == "__main__":
setup(
name = release.name,
version = release.__version__,
author = release.__author__,
author_email = release.__email__,
description = release.__description__,
url='https://github.com/OrkoHunter/PotterScript',
keywords='Harry Potter Programming Language Potter Script',
packages = ['potterscript'],
license = 'MIT License',
entry_points = {
'console_scripts': [
'potterscript = potterscript.pottershell:main',
]
},
install_requires = [],
test_suite = 'nose.collector',
tests_require = ['nose>=0.10.1']
)
|
<commit_before>#! /usr/bin/env python3
# -*- coding: utf-8 -*-
import sys
from setuptools import setup
if sys.argv[-1] == 'setup.py':
print('To install, run \'python setup.py install\'')
print()
sys.path.insert(0, 'potterscript')
import release
if __name__ == "__main__":
setup(
name = release.name,
version = release.__version__,
author = release.__author__,
author_email = release.__email__,
description = release.__description__,
url='https://github.com/OrkoHunter/PotterScript',
keywords='Harry Potter Programming Language Potter Script',
packages = ['potterscript'],
license = 'MIT License',
entry_points = {
'console_scripts': [
'potterscript = potterscript.pottershell:main',
]
},
install_requires = [],
test_suite = 'nose.collector',
tests_require = ['nose>=0.10.1']
)
<commit_msg>Install only with python 3<commit_after>#! /usr/bin/env python3
# -*- coding: utf-8 -*-
import sys
from setuptools import setup
if sys.argv[-1] == 'setup.py':
print('To install, run \'python setup.py install\'')
print()
if sys.version[0] < '3':
print("Please install with Python 3. Aborting installation.")
sys.exit(0)
sys.path.insert(0, 'potterscript')
import release
if __name__ == "__main__":
setup(
name = release.name,
version = release.__version__,
author = release.__author__,
author_email = release.__email__,
description = release.__description__,
url='https://github.com/OrkoHunter/PotterScript',
keywords='Harry Potter Programming Language Potter Script',
packages = ['potterscript'],
license = 'MIT License',
entry_points = {
'console_scripts': [
'potterscript = potterscript.pottershell:main',
]
},
install_requires = [],
test_suite = 'nose.collector',
tests_require = ['nose>=0.10.1']
)
|
64744628725d20bda7f5c931db81037e3de8efcb
|
setup.py
|
setup.py
|
import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.rst')).read()
CHANGES = open(os.path.join(here, 'CHANGES.txt')).read()
requires = [
'pyramid',
'pyramid_debugtoolbar',
'waitress',
]
setup(name='acmeio',
version='0.0',
description='acmeio',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
"Framework :: Pyramid",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='',
author_email='',
url='',
keywords='web pyramid pylons',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=requires,
tests_require=requires,
test_suite="acmeio",
entry_points="""\
[paste.app_factory]
main = acmeio:main
""",
)
|
import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.rst')).read()
CHANGES = open(os.path.join(here, 'CHANGES.txt')).read()
requires = [
'pyramid',
'pyramid_debugtoolbar',
'waitress',
# PyBit and dependencies
'pybit',
'psycopg2',
'amqplib',
'jsonpickle',
]
setup(name='acmeio',
version='0.0',
description='acmeio',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
"Framework :: Pyramid",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='',
author_email='',
url='',
keywords='web pyramid pylons',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=requires,
tests_require=requires,
test_suite="acmeio",
entry_points="""\
[paste.app_factory]
main = acmeio:main
""",
)
|
Add pybit and its dependencies to this package's dependency list.
|
Add pybit and its dependencies to this package's dependency list.
|
Python
|
agpl-3.0
|
Connexions/acmeio
|
import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.rst')).read()
CHANGES = open(os.path.join(here, 'CHANGES.txt')).read()
requires = [
'pyramid',
'pyramid_debugtoolbar',
'waitress',
]
setup(name='acmeio',
version='0.0',
description='acmeio',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
"Framework :: Pyramid",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='',
author_email='',
url='',
keywords='web pyramid pylons',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=requires,
tests_require=requires,
test_suite="acmeio",
entry_points="""\
[paste.app_factory]
main = acmeio:main
""",
)
Add pybit and its dependencies to this package's dependency list.
|
import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.rst')).read()
CHANGES = open(os.path.join(here, 'CHANGES.txt')).read()
requires = [
'pyramid',
'pyramid_debugtoolbar',
'waitress',
# PyBit and dependencies
'pybit',
'psycopg2',
'amqplib',
'jsonpickle',
]
setup(name='acmeio',
version='0.0',
description='acmeio',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
"Framework :: Pyramid",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='',
author_email='',
url='',
keywords='web pyramid pylons',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=requires,
tests_require=requires,
test_suite="acmeio",
entry_points="""\
[paste.app_factory]
main = acmeio:main
""",
)
|
<commit_before>import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.rst')).read()
CHANGES = open(os.path.join(here, 'CHANGES.txt')).read()
requires = [
'pyramid',
'pyramid_debugtoolbar',
'waitress',
]
setup(name='acmeio',
version='0.0',
description='acmeio',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
"Framework :: Pyramid",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='',
author_email='',
url='',
keywords='web pyramid pylons',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=requires,
tests_require=requires,
test_suite="acmeio",
entry_points="""\
[paste.app_factory]
main = acmeio:main
""",
)
<commit_msg>Add pybit and its dependencies to this package's dependency list.<commit_after>
|
import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.rst')).read()
CHANGES = open(os.path.join(here, 'CHANGES.txt')).read()
requires = [
'pyramid',
'pyramid_debugtoolbar',
'waitress',
# PyBit and dependencies
'pybit',
'psycopg2',
'amqplib',
'jsonpickle',
]
setup(name='acmeio',
version='0.0',
description='acmeio',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
"Framework :: Pyramid",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='',
author_email='',
url='',
keywords='web pyramid pylons',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=requires,
tests_require=requires,
test_suite="acmeio",
entry_points="""\
[paste.app_factory]
main = acmeio:main
""",
)
|
import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.rst')).read()
CHANGES = open(os.path.join(here, 'CHANGES.txt')).read()
requires = [
'pyramid',
'pyramid_debugtoolbar',
'waitress',
]
setup(name='acmeio',
version='0.0',
description='acmeio',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
"Framework :: Pyramid",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='',
author_email='',
url='',
keywords='web pyramid pylons',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=requires,
tests_require=requires,
test_suite="acmeio",
entry_points="""\
[paste.app_factory]
main = acmeio:main
""",
)
Add pybit and its dependencies to this package's dependency list.import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.rst')).read()
CHANGES = open(os.path.join(here, 'CHANGES.txt')).read()
requires = [
'pyramid',
'pyramid_debugtoolbar',
'waitress',
# PyBit and dependencies
'pybit',
'psycopg2',
'amqplib',
'jsonpickle',
]
setup(name='acmeio',
version='0.0',
description='acmeio',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
"Framework :: Pyramid",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='',
author_email='',
url='',
keywords='web pyramid pylons',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=requires,
tests_require=requires,
test_suite="acmeio",
entry_points="""\
[paste.app_factory]
main = acmeio:main
""",
)
|
<commit_before>import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.rst')).read()
CHANGES = open(os.path.join(here, 'CHANGES.txt')).read()
requires = [
'pyramid',
'pyramid_debugtoolbar',
'waitress',
]
setup(name='acmeio',
version='0.0',
description='acmeio',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
"Framework :: Pyramid",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='',
author_email='',
url='',
keywords='web pyramid pylons',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=requires,
tests_require=requires,
test_suite="acmeio",
entry_points="""\
[paste.app_factory]
main = acmeio:main
""",
)
<commit_msg>Add pybit and its dependencies to this package's dependency list.<commit_after>import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.rst')).read()
CHANGES = open(os.path.join(here, 'CHANGES.txt')).read()
requires = [
'pyramid',
'pyramid_debugtoolbar',
'waitress',
# PyBit and dependencies
'pybit',
'psycopg2',
'amqplib',
'jsonpickle',
]
setup(name='acmeio',
version='0.0',
description='acmeio',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
"Framework :: Pyramid",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='',
author_email='',
url='',
keywords='web pyramid pylons',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=requires,
tests_require=requires,
test_suite="acmeio",
entry_points="""\
[paste.app_factory]
main = acmeio:main
""",
)
|
5d0d12b90d530cdeab154153a79cea00c9d6df98
|
setup.py
|
setup.py
|
#!/usr/bin/env python
import os
from distutils.core import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(name='django-csv-importer',
version='0.1.3.4',
description='Convert csv files into python object or django model',
author='Anthony Tresontani',
author_email='dev.tresontani@gmail.com',
long_description =read('README.txt'),
license = "BSD",
keywords = "CSV Django loader",
packages=['csvImporter'],
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: BSD License",
],
)
|
#!/usr/bin/env python
import os
from distutils.core import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(name='django-csv-importer',
version='0.1.3.3',
description='Convert csv files into python object or django model',
author='Anthony Tresontani',
author_email='dev.tresontani@gmail.com',
long_description =read('README.txt'),
license = "BSD",
keywords = "CSV Django loader",
packages=['csvImporter'],
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: BSD License",
],
)
|
Set the version to the right one
|
Set the version to the right one
|
Python
|
bsd-3-clause
|
anthony-tresontani/csv_importer,anthony-tresontani/django-adaptors
|
#!/usr/bin/env python
import os
from distutils.core import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(name='django-csv-importer',
version='0.1.3.4',
description='Convert csv files into python object or django model',
author='Anthony Tresontani',
author_email='dev.tresontani@gmail.com',
long_description =read('README.txt'),
license = "BSD",
keywords = "CSV Django loader",
packages=['csvImporter'],
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: BSD License",
],
)
Set the version to the right one
|
#!/usr/bin/env python
import os
from distutils.core import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(name='django-csv-importer',
version='0.1.3.3',
description='Convert csv files into python object or django model',
author='Anthony Tresontani',
author_email='dev.tresontani@gmail.com',
long_description =read('README.txt'),
license = "BSD",
keywords = "CSV Django loader",
packages=['csvImporter'],
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: BSD License",
],
)
|
<commit_before>#!/usr/bin/env python
import os
from distutils.core import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(name='django-csv-importer',
version='0.1.3.4',
description='Convert csv files into python object or django model',
author='Anthony Tresontani',
author_email='dev.tresontani@gmail.com',
long_description =read('README.txt'),
license = "BSD",
keywords = "CSV Django loader",
packages=['csvImporter'],
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: BSD License",
],
)
<commit_msg>Set the version to the right one<commit_after>
|
#!/usr/bin/env python
import os
from distutils.core import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(name='django-csv-importer',
version='0.1.3.3',
description='Convert csv files into python object or django model',
author='Anthony Tresontani',
author_email='dev.tresontani@gmail.com',
long_description =read('README.txt'),
license = "BSD",
keywords = "CSV Django loader",
packages=['csvImporter'],
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: BSD License",
],
)
|
#!/usr/bin/env python
import os
from distutils.core import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(name='django-csv-importer',
version='0.1.3.4',
description='Convert csv files into python object or django model',
author='Anthony Tresontani',
author_email='dev.tresontani@gmail.com',
long_description =read('README.txt'),
license = "BSD",
keywords = "CSV Django loader",
packages=['csvImporter'],
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: BSD License",
],
)
Set the version to the right one#!/usr/bin/env python
import os
from distutils.core import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(name='django-csv-importer',
version='0.1.3.3',
description='Convert csv files into python object or django model',
author='Anthony Tresontani',
author_email='dev.tresontani@gmail.com',
long_description =read('README.txt'),
license = "BSD",
keywords = "CSV Django loader",
packages=['csvImporter'],
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: BSD License",
],
)
|
<commit_before>#!/usr/bin/env python
import os
from distutils.core import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(name='django-csv-importer',
version='0.1.3.4',
description='Convert csv files into python object or django model',
author='Anthony Tresontani',
author_email='dev.tresontani@gmail.com',
long_description =read('README.txt'),
license = "BSD",
keywords = "CSV Django loader",
packages=['csvImporter'],
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: BSD License",
],
)
<commit_msg>Set the version to the right one<commit_after>#!/usr/bin/env python
import os
from distutils.core import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(name='django-csv-importer',
version='0.1.3.3',
description='Convert csv files into python object or django model',
author='Anthony Tresontani',
author_email='dev.tresontani@gmail.com',
long_description =read('README.txt'),
license = "BSD",
keywords = "CSV Django loader",
packages=['csvImporter'],
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: BSD License",
],
)
|
cefce07b066939f72b3cb51d52c6d496ab99466a
|
setup.py
|
setup.py
|
from os import path
from setuptools import setup
here = path.abspath(path.dirname(__file__))
# Get the long description from the relevant file
with open(path.join(here, "README.rst")) as f:
long_description = f.read()
classifiers = ["License :: OSI Approved :: Apache Software License",
"Topic :: Software Development :: Testing",
"Operating System :: Microsoft :: Windows",
"Operating System :: MacOS :: MacOS X"] + [
("Programming Language :: Python :: %s" % x) for x in
"2.7 3.4".split()]
def main():
setup(
name="easyium",
description="easy use of selenium and appium",
long_description=long_description,
install_requires = ['selenium>=2.52.0', 'appium>=0.21'],
version="1.1.5",
keywords="selenium appium test testing framework automation",
author="Karl Gong",
author_email="karl.gong@outlook.com",
url="https://github.com/KarlGong/easyium-python",
license="Apache",
classifiers=classifiers,
packages=["easyium"],
zip_safe=False,
)
if __name__ == "__main__":
main()
|
from os import path
from setuptools import setup
here = path.abspath(path.dirname(__file__))
# Get the long description from the relevant file
with open(path.join(here, "README.rst")) as f:
long_description = f.read()
classifiers = ["License :: OSI Approved :: Apache Software License",
"Topic :: Software Development :: Testing",
"Operating System :: Microsoft :: Windows",
"Operating System :: MacOS :: MacOS X"] + [
("Programming Language :: Python :: %s" % x) for x in
"2.7 3.4".split()]
def main():
setup(
name="easyium",
description="easy use of selenium and appium",
long_description=long_description,
install_requires = ['selenium>=2.52.0', 'appium-python-client>=0.21'],
version="1.1.5",
keywords="selenium appium test testing framework automation",
author="Karl Gong",
author_email="karl.gong@outlook.com",
url="https://github.com/KarlGong/easyium-python",
license="Apache",
classifiers=classifiers,
packages=["easyium"],
zip_safe=False,
)
if __name__ == "__main__":
main()
|
Change the appium package name.
|
Change the appium package name.
|
Python
|
apache-2.0
|
KarlGong/easyium-python,KarlGong/easyium
|
from os import path
from setuptools import setup
here = path.abspath(path.dirname(__file__))
# Get the long description from the relevant file
with open(path.join(here, "README.rst")) as f:
long_description = f.read()
classifiers = ["License :: OSI Approved :: Apache Software License",
"Topic :: Software Development :: Testing",
"Operating System :: Microsoft :: Windows",
"Operating System :: MacOS :: MacOS X"] + [
("Programming Language :: Python :: %s" % x) for x in
"2.7 3.4".split()]
def main():
setup(
name="easyium",
description="easy use of selenium and appium",
long_description=long_description,
install_requires = ['selenium>=2.52.0', 'appium>=0.21'],
version="1.1.5",
keywords="selenium appium test testing framework automation",
author="Karl Gong",
author_email="karl.gong@outlook.com",
url="https://github.com/KarlGong/easyium-python",
license="Apache",
classifiers=classifiers,
packages=["easyium"],
zip_safe=False,
)
if __name__ == "__main__":
main()Change the appium package name.
|
from os import path
from setuptools import setup
here = path.abspath(path.dirname(__file__))
# Get the long description from the relevant file
with open(path.join(here, "README.rst")) as f:
long_description = f.read()
classifiers = ["License :: OSI Approved :: Apache Software License",
"Topic :: Software Development :: Testing",
"Operating System :: Microsoft :: Windows",
"Operating System :: MacOS :: MacOS X"] + [
("Programming Language :: Python :: %s" % x) for x in
"2.7 3.4".split()]
def main():
setup(
name="easyium",
description="easy use of selenium and appium",
long_description=long_description,
install_requires = ['selenium>=2.52.0', 'appium-python-client>=0.21'],
version="1.1.5",
keywords="selenium appium test testing framework automation",
author="Karl Gong",
author_email="karl.gong@outlook.com",
url="https://github.com/KarlGong/easyium-python",
license="Apache",
classifiers=classifiers,
packages=["easyium"],
zip_safe=False,
)
if __name__ == "__main__":
main()
|
<commit_before>from os import path
from setuptools import setup
here = path.abspath(path.dirname(__file__))
# Get the long description from the relevant file
with open(path.join(here, "README.rst")) as f:
long_description = f.read()
classifiers = ["License :: OSI Approved :: Apache Software License",
"Topic :: Software Development :: Testing",
"Operating System :: Microsoft :: Windows",
"Operating System :: MacOS :: MacOS X"] + [
("Programming Language :: Python :: %s" % x) for x in
"2.7 3.4".split()]
def main():
setup(
name="easyium",
description="easy use of selenium and appium",
long_description=long_description,
install_requires = ['selenium>=2.52.0', 'appium>=0.21'],
version="1.1.5",
keywords="selenium appium test testing framework automation",
author="Karl Gong",
author_email="karl.gong@outlook.com",
url="https://github.com/KarlGong/easyium-python",
license="Apache",
classifiers=classifiers,
packages=["easyium"],
zip_safe=False,
)
if __name__ == "__main__":
main()<commit_msg>Change the appium package name.<commit_after>
|
from os import path
from setuptools import setup
here = path.abspath(path.dirname(__file__))
# Get the long description from the relevant file
with open(path.join(here, "README.rst")) as f:
long_description = f.read()
classifiers = ["License :: OSI Approved :: Apache Software License",
"Topic :: Software Development :: Testing",
"Operating System :: Microsoft :: Windows",
"Operating System :: MacOS :: MacOS X"] + [
("Programming Language :: Python :: %s" % x) for x in
"2.7 3.4".split()]
def main():
setup(
name="easyium",
description="easy use of selenium and appium",
long_description=long_description,
install_requires = ['selenium>=2.52.0', 'appium-python-client>=0.21'],
version="1.1.5",
keywords="selenium appium test testing framework automation",
author="Karl Gong",
author_email="karl.gong@outlook.com",
url="https://github.com/KarlGong/easyium-python",
license="Apache",
classifiers=classifiers,
packages=["easyium"],
zip_safe=False,
)
if __name__ == "__main__":
main()
|
from os import path
from setuptools import setup
here = path.abspath(path.dirname(__file__))
# Get the long description from the relevant file
with open(path.join(here, "README.rst")) as f:
long_description = f.read()
classifiers = ["License :: OSI Approved :: Apache Software License",
"Topic :: Software Development :: Testing",
"Operating System :: Microsoft :: Windows",
"Operating System :: MacOS :: MacOS X"] + [
("Programming Language :: Python :: %s" % x) for x in
"2.7 3.4".split()]
def main():
setup(
name="easyium",
description="easy use of selenium and appium",
long_description=long_description,
install_requires = ['selenium>=2.52.0', 'appium>=0.21'],
version="1.1.5",
keywords="selenium appium test testing framework automation",
author="Karl Gong",
author_email="karl.gong@outlook.com",
url="https://github.com/KarlGong/easyium-python",
license="Apache",
classifiers=classifiers,
packages=["easyium"],
zip_safe=False,
)
if __name__ == "__main__":
main()Change the appium package name.from os import path
from setuptools import setup
here = path.abspath(path.dirname(__file__))
# Get the long description from the relevant file
with open(path.join(here, "README.rst")) as f:
long_description = f.read()
classifiers = ["License :: OSI Approved :: Apache Software License",
"Topic :: Software Development :: Testing",
"Operating System :: Microsoft :: Windows",
"Operating System :: MacOS :: MacOS X"] + [
("Programming Language :: Python :: %s" % x) for x in
"2.7 3.4".split()]
def main():
setup(
name="easyium",
description="easy use of selenium and appium",
long_description=long_description,
install_requires = ['selenium>=2.52.0', 'appium-python-client>=0.21'],
version="1.1.5",
keywords="selenium appium test testing framework automation",
author="Karl Gong",
author_email="karl.gong@outlook.com",
url="https://github.com/KarlGong/easyium-python",
license="Apache",
classifiers=classifiers,
packages=["easyium"],
zip_safe=False,
)
if __name__ == "__main__":
main()
|
<commit_before>from os import path
from setuptools import setup
here = path.abspath(path.dirname(__file__))
# Get the long description from the relevant file
with open(path.join(here, "README.rst")) as f:
long_description = f.read()
classifiers = ["License :: OSI Approved :: Apache Software License",
"Topic :: Software Development :: Testing",
"Operating System :: Microsoft :: Windows",
"Operating System :: MacOS :: MacOS X"] + [
("Programming Language :: Python :: %s" % x) for x in
"2.7 3.4".split()]
def main():
setup(
name="easyium",
description="easy use of selenium and appium",
long_description=long_description,
install_requires = ['selenium>=2.52.0', 'appium>=0.21'],
version="1.1.5",
keywords="selenium appium test testing framework automation",
author="Karl Gong",
author_email="karl.gong@outlook.com",
url="https://github.com/KarlGong/easyium-python",
license="Apache",
classifiers=classifiers,
packages=["easyium"],
zip_safe=False,
)
if __name__ == "__main__":
main()<commit_msg>Change the appium package name.<commit_after>from os import path
from setuptools import setup
here = path.abspath(path.dirname(__file__))
# Get the long description from the relevant file
with open(path.join(here, "README.rst")) as f:
long_description = f.read()
classifiers = ["License :: OSI Approved :: Apache Software License",
"Topic :: Software Development :: Testing",
"Operating System :: Microsoft :: Windows",
"Operating System :: MacOS :: MacOS X"] + [
("Programming Language :: Python :: %s" % x) for x in
"2.7 3.4".split()]
def main():
setup(
name="easyium",
description="easy use of selenium and appium",
long_description=long_description,
install_requires = ['selenium>=2.52.0', 'appium-python-client>=0.21'],
version="1.1.5",
keywords="selenium appium test testing framework automation",
author="Karl Gong",
author_email="karl.gong@outlook.com",
url="https://github.com/KarlGong/easyium-python",
license="Apache",
classifiers=classifiers,
packages=["easyium"],
zip_safe=False,
)
if __name__ == "__main__":
main()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.