commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
4b43a2f50740bbeab95f64137eb8993ed8ac4617
|
other/password_generator.py
|
other/password_generator.py
|
import string
from random import *
letters = string.ascii_letters
digits = string.digits
symbols = string.punctuation
chars = letters + digits + symbols
min_length = 8
max_length = 16
password = ''.join(choice(chars) for x in range(randint(min_length, max_length)))
print('Password: %s' % password)
print('[ If you are thinking of using this passsword, You better save it. ]')
|
import string
import random
letters = [letter for letter in string.ascii_letters]
digits = [digit for digit in string.digits]
symbols = [symbol for symbol in string.punctuation]
chars = letters + digits + symbols
random.shuffle(chars)
min_length = 8
max_length = 16
password = ''.join(random.choice(chars) for x in range(random.randint(min_length, max_length)))
print('Password: ' + password)
print('[ If you are thinking of using this passsword, You better save it. ]')
|
Add another randomness into the password generator
|
Add another randomness into the password generator
Uses import random for namespace cleanliness
Uses list instead of string for 'chars' variable in order to shuffle, increases randomness
Instead of string formatting, uses string concatenation because (currently) it is simpler
|
Python
|
mit
|
TheAlgorithms/Python
|
import string
from random import *
letters = string.ascii_letters
digits = string.digits
symbols = string.punctuation
chars = letters + digits + symbols
min_length = 8
max_length = 16
password = ''.join(choice(chars) for x in range(randint(min_length, max_length)))
print('Password: %s' % password)
print('[ If you are thinking of using this passsword, You better save it. ]')
Add another randomness into the password generator
Uses import random for namespace cleanliness
Uses list instead of string for 'chars' variable in order to shuffle, increases randomness
Instead of string formatting, uses string concatenation because (currently) it is simpler
|
import string
import random
letters = [letter for letter in string.ascii_letters]
digits = [digit for digit in string.digits]
symbols = [symbol for symbol in string.punctuation]
chars = letters + digits + symbols
random.shuffle(chars)
min_length = 8
max_length = 16
password = ''.join(random.choice(chars) for x in range(random.randint(min_length, max_length)))
print('Password: ' + password)
print('[ If you are thinking of using this passsword, You better save it. ]')
|
<commit_before>import string
from random import *
letters = string.ascii_letters
digits = string.digits
symbols = string.punctuation
chars = letters + digits + symbols
min_length = 8
max_length = 16
password = ''.join(choice(chars) for x in range(randint(min_length, max_length)))
print('Password: %s' % password)
print('[ If you are thinking of using this passsword, You better save it. ]')
<commit_msg>Add another randomness into the password generator
Uses import random for namespace cleanliness
Uses list instead of string for 'chars' variable in order to shuffle, increases randomness
Instead of string formatting, uses string concatenation because (currently) it is simpler<commit_after>
|
import string
import random
letters = [letter for letter in string.ascii_letters]
digits = [digit for digit in string.digits]
symbols = [symbol for symbol in string.punctuation]
chars = letters + digits + symbols
random.shuffle(chars)
min_length = 8
max_length = 16
password = ''.join(random.choice(chars) for x in range(random.randint(min_length, max_length)))
print('Password: ' + password)
print('[ If you are thinking of using this passsword, You better save it. ]')
|
import string
from random import *
letters = string.ascii_letters
digits = string.digits
symbols = string.punctuation
chars = letters + digits + symbols
min_length = 8
max_length = 16
password = ''.join(choice(chars) for x in range(randint(min_length, max_length)))
print('Password: %s' % password)
print('[ If you are thinking of using this passsword, You better save it. ]')
Add another randomness into the password generator
Uses import random for namespace cleanliness
Uses list instead of string for 'chars' variable in order to shuffle, increases randomness
Instead of string formatting, uses string concatenation because (currently) it is simplerimport string
import random
letters = [letter for letter in string.ascii_letters]
digits = [digit for digit in string.digits]
symbols = [symbol for symbol in string.punctuation]
chars = letters + digits + symbols
random.shuffle(chars)
min_length = 8
max_length = 16
password = ''.join(random.choice(chars) for x in range(random.randint(min_length, max_length)))
print('Password: ' + password)
print('[ If you are thinking of using this passsword, You better save it. ]')
|
<commit_before>import string
from random import *
letters = string.ascii_letters
digits = string.digits
symbols = string.punctuation
chars = letters + digits + symbols
min_length = 8
max_length = 16
password = ''.join(choice(chars) for x in range(randint(min_length, max_length)))
print('Password: %s' % password)
print('[ If you are thinking of using this passsword, You better save it. ]')
<commit_msg>Add another randomness into the password generator
Uses import random for namespace cleanliness
Uses list instead of string for 'chars' variable in order to shuffle, increases randomness
Instead of string formatting, uses string concatenation because (currently) it is simpler<commit_after>import string
import random
letters = [letter for letter in string.ascii_letters]
digits = [digit for digit in string.digits]
symbols = [symbol for symbol in string.punctuation]
chars = letters + digits + symbols
random.shuffle(chars)
min_length = 8
max_length = 16
password = ''.join(random.choice(chars) for x in range(random.randint(min_length, max_length)))
print('Password: ' + password)
print('[ If you are thinking of using this passsword, You better save it. ]')
|
9e02f92fc19b7f833b25d0273143e98261a3b484
|
democracy/admin/__init__.py
|
democracy/admin/__init__.py
|
from django.contrib import admin
from nested_admin.nested import NestedAdmin, NestedStackedInline
from democracy import models
# Inlines
class HearingImageInline(NestedStackedInline):
model = models.HearingImage
extra = 0
class SectionImageInline(NestedStackedInline):
model = models.SectionImage
extra = 0
class SectionInline(NestedStackedInline):
model = models.Section
extra = 1
inlines = [SectionImageInline]
# Admins
class HearingAdmin(NestedAdmin):
inlines = [HearingImageInline, SectionInline]
list_display = ("id", "published", "title", "open_at", "close_at", "force_closed")
list_filter = ("published",)
search_fields = ("id", "title")
# Wire it up!
admin.site.register(models.Label)
admin.site.register(models.Hearing, HearingAdmin)
|
from django.contrib import admin
from django.utils.translation import ugettext_lazy as _
from nested_admin.nested import NestedAdmin, NestedStackedInline
from democracy import models
# Inlines
class HearingImageInline(NestedStackedInline):
model = models.HearingImage
extra = 0
exclude = ("public", "title")
class SectionImageInline(NestedStackedInline):
model = models.SectionImage
extra = 0
exclude = ("public", "title")
class SectionInline(NestedStackedInline):
model = models.Section
extra = 1
inlines = [SectionImageInline]
exclude = ("public", "commenting",)
# Admins
class HearingAdmin(NestedAdmin):
inlines = [HearingImageInline, SectionInline]
list_display = ("id", "published", "title", "open_at", "close_at", "force_closed")
list_filter = ("published",)
search_fields = ("id", "title")
fieldsets = (
(None, {
"fields": ("title", "abstract", "labels", "id")
}),
(_("Availability"), {
"fields": ("published", "open_at", "close_at", "force_closed", "commenting")
}),
)
def save_related(self, request, form, formsets, change):
super().save_related(request, form, formsets, change)
hearing = form.instance
assert isinstance(hearing, models.Hearing)
hearing.sections.update(commenting=hearing.commenting)
class LabelAdmin(admin.ModelAdmin):
exclude = ("public",)
# Wire it up!
admin.site.register(models.Label, LabelAdmin)
admin.site.register(models.Hearing, HearingAdmin)
|
Hide unnecessary fields in the admins
|
Hide unnecessary fields in the admins
* Hide some unnecessary fields from Hearings
* Hide Public and Commenting flags from Sections
(Section commenting option follows that of hearings.)
* Hide Public and Title fields from images
* Hide Public field from labels
Refs #118
|
Python
|
mit
|
vikoivun/kerrokantasi,stephawe/kerrokantasi,stephawe/kerrokantasi,City-of-Helsinki/kerrokantasi,City-of-Helsinki/kerrokantasi,stephawe/kerrokantasi,vikoivun/kerrokantasi,City-of-Helsinki/kerrokantasi,City-of-Helsinki/kerrokantasi,vikoivun/kerrokantasi
|
from django.contrib import admin
from nested_admin.nested import NestedAdmin, NestedStackedInline
from democracy import models
# Inlines
class HearingImageInline(NestedStackedInline):
model = models.HearingImage
extra = 0
class SectionImageInline(NestedStackedInline):
model = models.SectionImage
extra = 0
class SectionInline(NestedStackedInline):
model = models.Section
extra = 1
inlines = [SectionImageInline]
# Admins
class HearingAdmin(NestedAdmin):
inlines = [HearingImageInline, SectionInline]
list_display = ("id", "published", "title", "open_at", "close_at", "force_closed")
list_filter = ("published",)
search_fields = ("id", "title")
# Wire it up!
admin.site.register(models.Label)
admin.site.register(models.Hearing, HearingAdmin)
Hide unnecessary fields in the admins
* Hide some unnecessary fields from Hearings
* Hide Public and Commenting flags from Sections
(Section commenting option follows that of hearings.)
* Hide Public and Title fields from images
* Hide Public field from labels
Refs #118
|
from django.contrib import admin
from django.utils.translation import ugettext_lazy as _
from nested_admin.nested import NestedAdmin, NestedStackedInline
from democracy import models
# Inlines
class HearingImageInline(NestedStackedInline):
model = models.HearingImage
extra = 0
exclude = ("public", "title")
class SectionImageInline(NestedStackedInline):
model = models.SectionImage
extra = 0
exclude = ("public", "title")
class SectionInline(NestedStackedInline):
model = models.Section
extra = 1
inlines = [SectionImageInline]
exclude = ("public", "commenting",)
# Admins
class HearingAdmin(NestedAdmin):
inlines = [HearingImageInline, SectionInline]
list_display = ("id", "published", "title", "open_at", "close_at", "force_closed")
list_filter = ("published",)
search_fields = ("id", "title")
fieldsets = (
(None, {
"fields": ("title", "abstract", "labels", "id")
}),
(_("Availability"), {
"fields": ("published", "open_at", "close_at", "force_closed", "commenting")
}),
)
def save_related(self, request, form, formsets, change):
super().save_related(request, form, formsets, change)
hearing = form.instance
assert isinstance(hearing, models.Hearing)
hearing.sections.update(commenting=hearing.commenting)
class LabelAdmin(admin.ModelAdmin):
exclude = ("public",)
# Wire it up!
admin.site.register(models.Label, LabelAdmin)
admin.site.register(models.Hearing, HearingAdmin)
|
<commit_before>from django.contrib import admin
from nested_admin.nested import NestedAdmin, NestedStackedInline
from democracy import models
# Inlines
class HearingImageInline(NestedStackedInline):
model = models.HearingImage
extra = 0
class SectionImageInline(NestedStackedInline):
model = models.SectionImage
extra = 0
class SectionInline(NestedStackedInline):
model = models.Section
extra = 1
inlines = [SectionImageInline]
# Admins
class HearingAdmin(NestedAdmin):
inlines = [HearingImageInline, SectionInline]
list_display = ("id", "published", "title", "open_at", "close_at", "force_closed")
list_filter = ("published",)
search_fields = ("id", "title")
# Wire it up!
admin.site.register(models.Label)
admin.site.register(models.Hearing, HearingAdmin)
<commit_msg>Hide unnecessary fields in the admins
* Hide some unnecessary fields from Hearings
* Hide Public and Commenting flags from Sections
(Section commenting option follows that of hearings.)
* Hide Public and Title fields from images
* Hide Public field from labels
Refs #118<commit_after>
|
from django.contrib import admin
from django.utils.translation import ugettext_lazy as _
from nested_admin.nested import NestedAdmin, NestedStackedInline
from democracy import models
# Inlines
class HearingImageInline(NestedStackedInline):
model = models.HearingImage
extra = 0
exclude = ("public", "title")
class SectionImageInline(NestedStackedInline):
model = models.SectionImage
extra = 0
exclude = ("public", "title")
class SectionInline(NestedStackedInline):
model = models.Section
extra = 1
inlines = [SectionImageInline]
exclude = ("public", "commenting",)
# Admins
class HearingAdmin(NestedAdmin):
inlines = [HearingImageInline, SectionInline]
list_display = ("id", "published", "title", "open_at", "close_at", "force_closed")
list_filter = ("published",)
search_fields = ("id", "title")
fieldsets = (
(None, {
"fields": ("title", "abstract", "labels", "id")
}),
(_("Availability"), {
"fields": ("published", "open_at", "close_at", "force_closed", "commenting")
}),
)
def save_related(self, request, form, formsets, change):
super().save_related(request, form, formsets, change)
hearing = form.instance
assert isinstance(hearing, models.Hearing)
hearing.sections.update(commenting=hearing.commenting)
class LabelAdmin(admin.ModelAdmin):
exclude = ("public",)
# Wire it up!
admin.site.register(models.Label, LabelAdmin)
admin.site.register(models.Hearing, HearingAdmin)
|
from django.contrib import admin
from nested_admin.nested import NestedAdmin, NestedStackedInline
from democracy import models
# Inlines
class HearingImageInline(NestedStackedInline):
model = models.HearingImage
extra = 0
class SectionImageInline(NestedStackedInline):
model = models.SectionImage
extra = 0
class SectionInline(NestedStackedInline):
model = models.Section
extra = 1
inlines = [SectionImageInline]
# Admins
class HearingAdmin(NestedAdmin):
inlines = [HearingImageInline, SectionInline]
list_display = ("id", "published", "title", "open_at", "close_at", "force_closed")
list_filter = ("published",)
search_fields = ("id", "title")
# Wire it up!
admin.site.register(models.Label)
admin.site.register(models.Hearing, HearingAdmin)
Hide unnecessary fields in the admins
* Hide some unnecessary fields from Hearings
* Hide Public and Commenting flags from Sections
(Section commenting option follows that of hearings.)
* Hide Public and Title fields from images
* Hide Public field from labels
Refs #118from django.contrib import admin
from django.utils.translation import ugettext_lazy as _
from nested_admin.nested import NestedAdmin, NestedStackedInline
from democracy import models
# Inlines
class HearingImageInline(NestedStackedInline):
model = models.HearingImage
extra = 0
exclude = ("public", "title")
class SectionImageInline(NestedStackedInline):
model = models.SectionImage
extra = 0
exclude = ("public", "title")
class SectionInline(NestedStackedInline):
model = models.Section
extra = 1
inlines = [SectionImageInline]
exclude = ("public", "commenting",)
# Admins
class HearingAdmin(NestedAdmin):
inlines = [HearingImageInline, SectionInline]
list_display = ("id", "published", "title", "open_at", "close_at", "force_closed")
list_filter = ("published",)
search_fields = ("id", "title")
fieldsets = (
(None, {
"fields": ("title", "abstract", "labels", "id")
}),
(_("Availability"), {
"fields": ("published", "open_at", "close_at", "force_closed", "commenting")
}),
)
def save_related(self, request, form, formsets, change):
super().save_related(request, form, formsets, change)
hearing = form.instance
assert isinstance(hearing, models.Hearing)
hearing.sections.update(commenting=hearing.commenting)
class LabelAdmin(admin.ModelAdmin):
exclude = ("public",)
# Wire it up!
admin.site.register(models.Label, LabelAdmin)
admin.site.register(models.Hearing, HearingAdmin)
|
<commit_before>from django.contrib import admin
from nested_admin.nested import NestedAdmin, NestedStackedInline
from democracy import models
# Inlines
class HearingImageInline(NestedStackedInline):
model = models.HearingImage
extra = 0
class SectionImageInline(NestedStackedInline):
model = models.SectionImage
extra = 0
class SectionInline(NestedStackedInline):
model = models.Section
extra = 1
inlines = [SectionImageInline]
# Admins
class HearingAdmin(NestedAdmin):
inlines = [HearingImageInline, SectionInline]
list_display = ("id", "published", "title", "open_at", "close_at", "force_closed")
list_filter = ("published",)
search_fields = ("id", "title")
# Wire it up!
admin.site.register(models.Label)
admin.site.register(models.Hearing, HearingAdmin)
<commit_msg>Hide unnecessary fields in the admins
* Hide some unnecessary fields from Hearings
* Hide Public and Commenting flags from Sections
(Section commenting option follows that of hearings.)
* Hide Public and Title fields from images
* Hide Public field from labels
Refs #118<commit_after>from django.contrib import admin
from django.utils.translation import ugettext_lazy as _
from nested_admin.nested import NestedAdmin, NestedStackedInline
from democracy import models
# Inlines
class HearingImageInline(NestedStackedInline):
model = models.HearingImage
extra = 0
exclude = ("public", "title")
class SectionImageInline(NestedStackedInline):
model = models.SectionImage
extra = 0
exclude = ("public", "title")
class SectionInline(NestedStackedInline):
model = models.Section
extra = 1
inlines = [SectionImageInline]
exclude = ("public", "commenting",)
# Admins
class HearingAdmin(NestedAdmin):
inlines = [HearingImageInline, SectionInline]
list_display = ("id", "published", "title", "open_at", "close_at", "force_closed")
list_filter = ("published",)
search_fields = ("id", "title")
fieldsets = (
(None, {
"fields": ("title", "abstract", "labels", "id")
}),
(_("Availability"), {
"fields": ("published", "open_at", "close_at", "force_closed", "commenting")
}),
)
def save_related(self, request, form, formsets, change):
super().save_related(request, form, formsets, change)
hearing = form.instance
assert isinstance(hearing, models.Hearing)
hearing.sections.update(commenting=hearing.commenting)
class LabelAdmin(admin.ModelAdmin):
exclude = ("public",)
# Wire it up!
admin.site.register(models.Label, LabelAdmin)
admin.site.register(models.Hearing, HearingAdmin)
|
6b4e34a5091ec00dffb1add55fa8dc279cbc2c89
|
scattertext/frequencyreaders/DefaultBackgroundFrequencies.py
|
scattertext/frequencyreaders/DefaultBackgroundFrequencies.py
|
import pkgutil
from io import StringIO
import pandas as pd
from scipy.stats import rankdata
class BackgroundFrequencies(object):
@staticmethod
def get_background_frequency_df(frequency_path=None):
raise Exception
@classmethod
def get_background_rank_df(cls, frequency_path=None):
df = cls.get_background_frequency_df(frequency_path)
df['rank'] = rankdata(df.background, method='dense')
df['background'] = df['rank'] / df['rank'].max()
return df[['background']]
class DefaultBackgroundFrequencies(BackgroundFrequencies):
@staticmethod
def get_background_frequency_df(frequency_path=None):
if frequency_path:
unigram_freq_table_buf = open(frequency_path)
else:
unigram_freq_table_buf = StringIO(pkgutil.get_data('scattertext', 'data/count_1w.txt')
.decode('utf-8'))
to_ret = (pd.read_table(unigram_freq_table_buf,
names=['word', 'background'])
.sort_values(ascending=False, by='background')
.drop_duplicates(['word'])
.set_index('word'))
return to_ret
|
import pkgutil
from io import StringIO
import pandas as pd
from scipy.stats import rankdata
class BackgroundFrequencies(object):
@staticmethod
def get_background_frequency_df(frequency_path=None):
raise Exception
@classmethod
def get_background_rank_df(cls, frequency_path=None):
df = cls.get_background_frequency_df(frequency_path)
df['rank'] = rankdata(df.background, method='dense')
df['background'] = df['rank'] / df['rank'].max()
return df[['background']]
class DefaultBackgroundFrequencies(BackgroundFrequencies):
@staticmethod
def get_background_frequency_df(frequency_path=None):
if frequency_path:
unigram_freq_table_buf = open(frequency_path)
else:
unigram_freq_table_buf = StringIO(pkgutil.get_data('scattertext', 'data/count_1w.txt')
.decode('utf-8'))
to_ret = (pd.read_csv(unigram_freq_table_buf,
sep='\t',
names=['word', 'background'])
.sort_values(ascending=False, by='background')
.drop_duplicates(['word'])
.set_index('word'))
return to_ret
|
Fix FutureWarning: read_table is deprecated, use read_csv instead, passing sep='\t'
|
Fix FutureWarning: read_table is deprecated, use read_csv instead, passing sep='\t'
|
Python
|
apache-2.0
|
JasonKessler/scattertext,JasonKessler/scattertext,JasonKessler/scattertext,JasonKessler/scattertext
|
import pkgutil
from io import StringIO
import pandas as pd
from scipy.stats import rankdata
class BackgroundFrequencies(object):
@staticmethod
def get_background_frequency_df(frequency_path=None):
raise Exception
@classmethod
def get_background_rank_df(cls, frequency_path=None):
df = cls.get_background_frequency_df(frequency_path)
df['rank'] = rankdata(df.background, method='dense')
df['background'] = df['rank'] / df['rank'].max()
return df[['background']]
class DefaultBackgroundFrequencies(BackgroundFrequencies):
@staticmethod
def get_background_frequency_df(frequency_path=None):
if frequency_path:
unigram_freq_table_buf = open(frequency_path)
else:
unigram_freq_table_buf = StringIO(pkgutil.get_data('scattertext', 'data/count_1w.txt')
.decode('utf-8'))
to_ret = (pd.read_table(unigram_freq_table_buf,
names=['word', 'background'])
.sort_values(ascending=False, by='background')
.drop_duplicates(['word'])
.set_index('word'))
return to_ret
Fix FutureWarning: read_table is deprecated, use read_csv instead, passing sep='\t'
|
import pkgutil
from io import StringIO
import pandas as pd
from scipy.stats import rankdata
class BackgroundFrequencies(object):
@staticmethod
def get_background_frequency_df(frequency_path=None):
raise Exception
@classmethod
def get_background_rank_df(cls, frequency_path=None):
df = cls.get_background_frequency_df(frequency_path)
df['rank'] = rankdata(df.background, method='dense')
df['background'] = df['rank'] / df['rank'].max()
return df[['background']]
class DefaultBackgroundFrequencies(BackgroundFrequencies):
@staticmethod
def get_background_frequency_df(frequency_path=None):
if frequency_path:
unigram_freq_table_buf = open(frequency_path)
else:
unigram_freq_table_buf = StringIO(pkgutil.get_data('scattertext', 'data/count_1w.txt')
.decode('utf-8'))
to_ret = (pd.read_csv(unigram_freq_table_buf,
sep='\t',
names=['word', 'background'])
.sort_values(ascending=False, by='background')
.drop_duplicates(['word'])
.set_index('word'))
return to_ret
|
<commit_before>import pkgutil
from io import StringIO
import pandas as pd
from scipy.stats import rankdata
class BackgroundFrequencies(object):
@staticmethod
def get_background_frequency_df(frequency_path=None):
raise Exception
@classmethod
def get_background_rank_df(cls, frequency_path=None):
df = cls.get_background_frequency_df(frequency_path)
df['rank'] = rankdata(df.background, method='dense')
df['background'] = df['rank'] / df['rank'].max()
return df[['background']]
class DefaultBackgroundFrequencies(BackgroundFrequencies):
@staticmethod
def get_background_frequency_df(frequency_path=None):
if frequency_path:
unigram_freq_table_buf = open(frequency_path)
else:
unigram_freq_table_buf = StringIO(pkgutil.get_data('scattertext', 'data/count_1w.txt')
.decode('utf-8'))
to_ret = (pd.read_table(unigram_freq_table_buf,
names=['word', 'background'])
.sort_values(ascending=False, by='background')
.drop_duplicates(['word'])
.set_index('word'))
return to_ret
<commit_msg>Fix FutureWarning: read_table is deprecated, use read_csv instead, passing sep='\t'<commit_after>
|
import pkgutil
from io import StringIO
import pandas as pd
from scipy.stats import rankdata
class BackgroundFrequencies(object):
@staticmethod
def get_background_frequency_df(frequency_path=None):
raise Exception
@classmethod
def get_background_rank_df(cls, frequency_path=None):
df = cls.get_background_frequency_df(frequency_path)
df['rank'] = rankdata(df.background, method='dense')
df['background'] = df['rank'] / df['rank'].max()
return df[['background']]
class DefaultBackgroundFrequencies(BackgroundFrequencies):
@staticmethod
def get_background_frequency_df(frequency_path=None):
if frequency_path:
unigram_freq_table_buf = open(frequency_path)
else:
unigram_freq_table_buf = StringIO(pkgutil.get_data('scattertext', 'data/count_1w.txt')
.decode('utf-8'))
to_ret = (pd.read_csv(unigram_freq_table_buf,
sep='\t',
names=['word', 'background'])
.sort_values(ascending=False, by='background')
.drop_duplicates(['word'])
.set_index('word'))
return to_ret
|
import pkgutil
from io import StringIO
import pandas as pd
from scipy.stats import rankdata
class BackgroundFrequencies(object):
@staticmethod
def get_background_frequency_df(frequency_path=None):
raise Exception
@classmethod
def get_background_rank_df(cls, frequency_path=None):
df = cls.get_background_frequency_df(frequency_path)
df['rank'] = rankdata(df.background, method='dense')
df['background'] = df['rank'] / df['rank'].max()
return df[['background']]
class DefaultBackgroundFrequencies(BackgroundFrequencies):
@staticmethod
def get_background_frequency_df(frequency_path=None):
if frequency_path:
unigram_freq_table_buf = open(frequency_path)
else:
unigram_freq_table_buf = StringIO(pkgutil.get_data('scattertext', 'data/count_1w.txt')
.decode('utf-8'))
to_ret = (pd.read_table(unigram_freq_table_buf,
names=['word', 'background'])
.sort_values(ascending=False, by='background')
.drop_duplicates(['word'])
.set_index('word'))
return to_ret
Fix FutureWarning: read_table is deprecated, use read_csv instead, passing sep='\t'import pkgutil
from io import StringIO
import pandas as pd
from scipy.stats import rankdata
class BackgroundFrequencies(object):
@staticmethod
def get_background_frequency_df(frequency_path=None):
raise Exception
@classmethod
def get_background_rank_df(cls, frequency_path=None):
df = cls.get_background_frequency_df(frequency_path)
df['rank'] = rankdata(df.background, method='dense')
df['background'] = df['rank'] / df['rank'].max()
return df[['background']]
class DefaultBackgroundFrequencies(BackgroundFrequencies):
@staticmethod
def get_background_frequency_df(frequency_path=None):
if frequency_path:
unigram_freq_table_buf = open(frequency_path)
else:
unigram_freq_table_buf = StringIO(pkgutil.get_data('scattertext', 'data/count_1w.txt')
.decode('utf-8'))
to_ret = (pd.read_csv(unigram_freq_table_buf,
sep='\t',
names=['word', 'background'])
.sort_values(ascending=False, by='background')
.drop_duplicates(['word'])
.set_index('word'))
return to_ret
|
<commit_before>import pkgutil
from io import StringIO
import pandas as pd
from scipy.stats import rankdata
class BackgroundFrequencies(object):
@staticmethod
def get_background_frequency_df(frequency_path=None):
raise Exception
@classmethod
def get_background_rank_df(cls, frequency_path=None):
df = cls.get_background_frequency_df(frequency_path)
df['rank'] = rankdata(df.background, method='dense')
df['background'] = df['rank'] / df['rank'].max()
return df[['background']]
class DefaultBackgroundFrequencies(BackgroundFrequencies):
@staticmethod
def get_background_frequency_df(frequency_path=None):
if frequency_path:
unigram_freq_table_buf = open(frequency_path)
else:
unigram_freq_table_buf = StringIO(pkgutil.get_data('scattertext', 'data/count_1w.txt')
.decode('utf-8'))
to_ret = (pd.read_table(unigram_freq_table_buf,
names=['word', 'background'])
.sort_values(ascending=False, by='background')
.drop_duplicates(['word'])
.set_index('word'))
return to_ret
<commit_msg>Fix FutureWarning: read_table is deprecated, use read_csv instead, passing sep='\t'<commit_after>import pkgutil
from io import StringIO
import pandas as pd
from scipy.stats import rankdata
class BackgroundFrequencies(object):
@staticmethod
def get_background_frequency_df(frequency_path=None):
raise Exception
@classmethod
def get_background_rank_df(cls, frequency_path=None):
df = cls.get_background_frequency_df(frequency_path)
df['rank'] = rankdata(df.background, method='dense')
df['background'] = df['rank'] / df['rank'].max()
return df[['background']]
class DefaultBackgroundFrequencies(BackgroundFrequencies):
@staticmethod
def get_background_frequency_df(frequency_path=None):
if frequency_path:
unigram_freq_table_buf = open(frequency_path)
else:
unigram_freq_table_buf = StringIO(pkgutil.get_data('scattertext', 'data/count_1w.txt')
.decode('utf-8'))
to_ret = (pd.read_csv(unigram_freq_table_buf,
sep='\t',
names=['word', 'background'])
.sort_values(ascending=False, by='background')
.drop_duplicates(['word'])
.set_index('word'))
return to_ret
|
40fc5c555e471f28959cbe3ad7d929636384595a
|
casexml/apps/stock/utils.py
|
casexml/apps/stock/utils.py
|
UNDERSTOCK_THRESHOLD = 0.5 # months
OVERSTOCK_THRESHOLD = 2. # months
def months_of_stock_remaining(stock, daily_consumption):
try:
return stock / (daily_consumption * 30)
except (TypeError, ZeroDivisionError):
return None
def stock_category(stock, daily_consumption):
if stock is None:
return 'nodata'
elif stock == 0:
return 'stockout'
elif daily_consumption is None:
return 'nodata'
elif daily_consumption == 0:
return 'overstock'
months_left = months_of_stock_remaining(stock, daily_consumption)
if months_left is None:
return 'nodata'
elif months_left < UNDERSTOCK_THRESHOLD:
return 'understock'
elif months_left > OVERSTOCK_THRESHOLD:
return 'overstock'
else:
return 'adequate'
|
from decimal import Decimal
UNDERSTOCK_THRESHOLD = 0.5 # months
OVERSTOCK_THRESHOLD = 2. # months
def months_of_stock_remaining(stock, daily_consumption):
if daily_consumption:
return stock / Decimal((daily_consumption * 30))
else:
return None
def stock_category(stock, daily_consumption):
if stock is None:
return 'nodata'
elif stock == 0:
return 'stockout'
elif daily_consumption is None:
return 'nodata'
elif daily_consumption == 0:
return 'overstock'
months_left = months_of_stock_remaining(stock, daily_consumption)
if months_left is None:
return 'nodata'
elif months_left < UNDERSTOCK_THRESHOLD:
return 'understock'
elif months_left > OVERSTOCK_THRESHOLD:
return 'overstock'
else:
return 'adequate'
|
Fix error handling on aggregate status report
|
Fix error handling on aggregate status report
Previously the catch block was a little too aggressive. It was swallowing a
real error (since aggregate reports pass a float, not a decimal). Now we
prevent the original possible errors by converting no matter what the type is
and checking for zero/null values first.
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,qedsoftware/commcare-hq,SEL-Columbia/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,SEL-Columbia/commcare-hq,SEL-Columbia/commcare-hq,puttarajubr/commcare-hq,puttarajubr/commcare-hq
|
UNDERSTOCK_THRESHOLD = 0.5 # months
OVERSTOCK_THRESHOLD = 2. # months
def months_of_stock_remaining(stock, daily_consumption):
try:
return stock / (daily_consumption * 30)
except (TypeError, ZeroDivisionError):
return None
def stock_category(stock, daily_consumption):
if stock is None:
return 'nodata'
elif stock == 0:
return 'stockout'
elif daily_consumption is None:
return 'nodata'
elif daily_consumption == 0:
return 'overstock'
months_left = months_of_stock_remaining(stock, daily_consumption)
if months_left is None:
return 'nodata'
elif months_left < UNDERSTOCK_THRESHOLD:
return 'understock'
elif months_left > OVERSTOCK_THRESHOLD:
return 'overstock'
else:
return 'adequate'
Fix error handling on aggregate status report
Previously the catch block was a little too aggressive. It was swallowing a
real error (since aggregate reports pass a float, not a decimal). Now we
prevent the original possible errors by converting no matter what the type is
and checking for zero/null values first.
|
from decimal import Decimal
UNDERSTOCK_THRESHOLD = 0.5 # months
OVERSTOCK_THRESHOLD = 2. # months
def months_of_stock_remaining(stock, daily_consumption):
if daily_consumption:
return stock / Decimal((daily_consumption * 30))
else:
return None
def stock_category(stock, daily_consumption):
if stock is None:
return 'nodata'
elif stock == 0:
return 'stockout'
elif daily_consumption is None:
return 'nodata'
elif daily_consumption == 0:
return 'overstock'
months_left = months_of_stock_remaining(stock, daily_consumption)
if months_left is None:
return 'nodata'
elif months_left < UNDERSTOCK_THRESHOLD:
return 'understock'
elif months_left > OVERSTOCK_THRESHOLD:
return 'overstock'
else:
return 'adequate'
|
<commit_before>UNDERSTOCK_THRESHOLD = 0.5 # months
OVERSTOCK_THRESHOLD = 2. # months
def months_of_stock_remaining(stock, daily_consumption):
try:
return stock / (daily_consumption * 30)
except (TypeError, ZeroDivisionError):
return None
def stock_category(stock, daily_consumption):
if stock is None:
return 'nodata'
elif stock == 0:
return 'stockout'
elif daily_consumption is None:
return 'nodata'
elif daily_consumption == 0:
return 'overstock'
months_left = months_of_stock_remaining(stock, daily_consumption)
if months_left is None:
return 'nodata'
elif months_left < UNDERSTOCK_THRESHOLD:
return 'understock'
elif months_left > OVERSTOCK_THRESHOLD:
return 'overstock'
else:
return 'adequate'
<commit_msg>Fix error handling on aggregate status report
Previously the catch block was a little too aggressive. It was swallowing a
real error (since aggregate reports pass a float, not a decimal). Now we
prevent the original possible errors by converting no matter what the type is
and checking for zero/null values first.<commit_after>
|
from decimal import Decimal
UNDERSTOCK_THRESHOLD = 0.5 # months
OVERSTOCK_THRESHOLD = 2. # months
def months_of_stock_remaining(stock, daily_consumption):
if daily_consumption:
return stock / Decimal((daily_consumption * 30))
else:
return None
def stock_category(stock, daily_consumption):
if stock is None:
return 'nodata'
elif stock == 0:
return 'stockout'
elif daily_consumption is None:
return 'nodata'
elif daily_consumption == 0:
return 'overstock'
months_left = months_of_stock_remaining(stock, daily_consumption)
if months_left is None:
return 'nodata'
elif months_left < UNDERSTOCK_THRESHOLD:
return 'understock'
elif months_left > OVERSTOCK_THRESHOLD:
return 'overstock'
else:
return 'adequate'
|
UNDERSTOCK_THRESHOLD = 0.5 # months
OVERSTOCK_THRESHOLD = 2. # months
def months_of_stock_remaining(stock, daily_consumption):
try:
return stock / (daily_consumption * 30)
except (TypeError, ZeroDivisionError):
return None
def stock_category(stock, daily_consumption):
if stock is None:
return 'nodata'
elif stock == 0:
return 'stockout'
elif daily_consumption is None:
return 'nodata'
elif daily_consumption == 0:
return 'overstock'
months_left = months_of_stock_remaining(stock, daily_consumption)
if months_left is None:
return 'nodata'
elif months_left < UNDERSTOCK_THRESHOLD:
return 'understock'
elif months_left > OVERSTOCK_THRESHOLD:
return 'overstock'
else:
return 'adequate'
Fix error handling on aggregate status report
Previously the catch block was a little too aggressive. It was swallowing a
real error (since aggregate reports pass a float, not a decimal). Now we
prevent the original possible errors by converting no matter what the type is
and checking for zero/null values first.from decimal import Decimal
UNDERSTOCK_THRESHOLD = 0.5 # months
OVERSTOCK_THRESHOLD = 2. # months
def months_of_stock_remaining(stock, daily_consumption):
if daily_consumption:
return stock / Decimal((daily_consumption * 30))
else:
return None
def stock_category(stock, daily_consumption):
if stock is None:
return 'nodata'
elif stock == 0:
return 'stockout'
elif daily_consumption is None:
return 'nodata'
elif daily_consumption == 0:
return 'overstock'
months_left = months_of_stock_remaining(stock, daily_consumption)
if months_left is None:
return 'nodata'
elif months_left < UNDERSTOCK_THRESHOLD:
return 'understock'
elif months_left > OVERSTOCK_THRESHOLD:
return 'overstock'
else:
return 'adequate'
|
<commit_before>UNDERSTOCK_THRESHOLD = 0.5 # months
OVERSTOCK_THRESHOLD = 2. # months
def months_of_stock_remaining(stock, daily_consumption):
try:
return stock / (daily_consumption * 30)
except (TypeError, ZeroDivisionError):
return None
def stock_category(stock, daily_consumption):
if stock is None:
return 'nodata'
elif stock == 0:
return 'stockout'
elif daily_consumption is None:
return 'nodata'
elif daily_consumption == 0:
return 'overstock'
months_left = months_of_stock_remaining(stock, daily_consumption)
if months_left is None:
return 'nodata'
elif months_left < UNDERSTOCK_THRESHOLD:
return 'understock'
elif months_left > OVERSTOCK_THRESHOLD:
return 'overstock'
else:
return 'adequate'
<commit_msg>Fix error handling on aggregate status report
Previously the catch block was a little too aggressive. It was swallowing a
real error (since aggregate reports pass a float, not a decimal). Now we
prevent the original possible errors by converting no matter what the type is
and checking for zero/null values first.<commit_after>from decimal import Decimal
UNDERSTOCK_THRESHOLD = 0.5 # months
OVERSTOCK_THRESHOLD = 2. # months
def months_of_stock_remaining(stock, daily_consumption):
if daily_consumption:
return stock / Decimal((daily_consumption * 30))
else:
return None
def stock_category(stock, daily_consumption):
if stock is None:
return 'nodata'
elif stock == 0:
return 'stockout'
elif daily_consumption is None:
return 'nodata'
elif daily_consumption == 0:
return 'overstock'
months_left = months_of_stock_remaining(stock, daily_consumption)
if months_left is None:
return 'nodata'
elif months_left < UNDERSTOCK_THRESHOLD:
return 'understock'
elif months_left > OVERSTOCK_THRESHOLD:
return 'overstock'
else:
return 'adequate'
|
ceb32eb2cefadc04fdf7cf5c474a96d307a1618f
|
core/observables/file.py
|
core/observables/file.py
|
from __future__ import unicode_literals
from mongoengine import *
from core.observables import Observable
from core.observables import Hash
class File(Observable):
value = StringField(verbose_name="SHA256 hash")
mime_type = StringField(verbose_name="MIME type")
hashes = DictField(verbose_name="Hashes")
body = ReferenceField("AttachedFile")
filenames = ListField(StringField(), verbose_name="Filenames")
DISPLAY_FIELDS = Observable.DISPLAY_FIELDS + [("mime_type", "MIME Type")]
@staticmethod
def check_type(txt):
return True
def info(self):
i = Observable.info(self)
i['mime_type'] = self.mime_type
i['hashes'] = self.hashes
return i
|
from __future__ import unicode_literals
from flask import url_for
from flask_mongoengine.wtf import model_form
from mongoengine import *
from core.observables import Observable
from core.database import StringListField
class File(Observable):
value = StringField(verbose_name="Value")
mime_type = StringField(verbose_name="MIME type")
hashes = DictField(verbose_name="Hashes")
body = ReferenceField("AttachedFile")
filenames = ListField(StringField(), verbose_name="Filenames")
DISPLAY_FIELDS = Observable.DISPLAY_FIELDS + [("mime_type", "MIME Type")]
exclude_fields = Observable.exclude_fields + ['hashes', 'body']
@classmethod
def get_form(klass):
form = model_form(klass, exclude=klass.exclude_fields)
form.filenames = StringListField("Filenames")
return form
@staticmethod
def check_type(txt):
return True
def info(self):
i = Observable.info(self)
i['mime_type'] = self.mime_type
i['hashes'] = self.hashes
return i
|
Clean up File edit view
|
Clean up File edit view
|
Python
|
apache-2.0
|
yeti-platform/yeti,yeti-platform/yeti,yeti-platform/yeti,yeti-platform/yeti
|
from __future__ import unicode_literals
from mongoengine import *
from core.observables import Observable
from core.observables import Hash
class File(Observable):
value = StringField(verbose_name="SHA256 hash")
mime_type = StringField(verbose_name="MIME type")
hashes = DictField(verbose_name="Hashes")
body = ReferenceField("AttachedFile")
filenames = ListField(StringField(), verbose_name="Filenames")
DISPLAY_FIELDS = Observable.DISPLAY_FIELDS + [("mime_type", "MIME Type")]
@staticmethod
def check_type(txt):
return True
def info(self):
i = Observable.info(self)
i['mime_type'] = self.mime_type
i['hashes'] = self.hashes
return i
Clean up File edit view
|
from __future__ import unicode_literals
from flask import url_for
from flask_mongoengine.wtf import model_form
from mongoengine import *
from core.observables import Observable
from core.database import StringListField
class File(Observable):
value = StringField(verbose_name="Value")
mime_type = StringField(verbose_name="MIME type")
hashes = DictField(verbose_name="Hashes")
body = ReferenceField("AttachedFile")
filenames = ListField(StringField(), verbose_name="Filenames")
DISPLAY_FIELDS = Observable.DISPLAY_FIELDS + [("mime_type", "MIME Type")]
exclude_fields = Observable.exclude_fields + ['hashes', 'body']
@classmethod
def get_form(klass):
form = model_form(klass, exclude=klass.exclude_fields)
form.filenames = StringListField("Filenames")
return form
@staticmethod
def check_type(txt):
return True
def info(self):
i = Observable.info(self)
i['mime_type'] = self.mime_type
i['hashes'] = self.hashes
return i
|
<commit_before>from __future__ import unicode_literals
from mongoengine import *
from core.observables import Observable
from core.observables import Hash
class File(Observable):
value = StringField(verbose_name="SHA256 hash")
mime_type = StringField(verbose_name="MIME type")
hashes = DictField(verbose_name="Hashes")
body = ReferenceField("AttachedFile")
filenames = ListField(StringField(), verbose_name="Filenames")
DISPLAY_FIELDS = Observable.DISPLAY_FIELDS + [("mime_type", "MIME Type")]
@staticmethod
def check_type(txt):
return True
def info(self):
i = Observable.info(self)
i['mime_type'] = self.mime_type
i['hashes'] = self.hashes
return i
<commit_msg>Clean up File edit view<commit_after>
|
from __future__ import unicode_literals
from flask import url_for
from flask_mongoengine.wtf import model_form
from mongoengine import *
from core.observables import Observable
from core.database import StringListField
class File(Observable):
value = StringField(verbose_name="Value")
mime_type = StringField(verbose_name="MIME type")
hashes = DictField(verbose_name="Hashes")
body = ReferenceField("AttachedFile")
filenames = ListField(StringField(), verbose_name="Filenames")
DISPLAY_FIELDS = Observable.DISPLAY_FIELDS + [("mime_type", "MIME Type")]
exclude_fields = Observable.exclude_fields + ['hashes', 'body']
@classmethod
def get_form(klass):
form = model_form(klass, exclude=klass.exclude_fields)
form.filenames = StringListField("Filenames")
return form
@staticmethod
def check_type(txt):
return True
def info(self):
i = Observable.info(self)
i['mime_type'] = self.mime_type
i['hashes'] = self.hashes
return i
|
from __future__ import unicode_literals
from mongoengine import *
from core.observables import Observable
from core.observables import Hash
class File(Observable):
value = StringField(verbose_name="SHA256 hash")
mime_type = StringField(verbose_name="MIME type")
hashes = DictField(verbose_name="Hashes")
body = ReferenceField("AttachedFile")
filenames = ListField(StringField(), verbose_name="Filenames")
DISPLAY_FIELDS = Observable.DISPLAY_FIELDS + [("mime_type", "MIME Type")]
@staticmethod
def check_type(txt):
return True
def info(self):
i = Observable.info(self)
i['mime_type'] = self.mime_type
i['hashes'] = self.hashes
return i
Clean up File edit viewfrom __future__ import unicode_literals
from flask import url_for
from flask_mongoengine.wtf import model_form
from mongoengine import *
from core.observables import Observable
from core.database import StringListField
class File(Observable):
value = StringField(verbose_name="Value")
mime_type = StringField(verbose_name="MIME type")
hashes = DictField(verbose_name="Hashes")
body = ReferenceField("AttachedFile")
filenames = ListField(StringField(), verbose_name="Filenames")
DISPLAY_FIELDS = Observable.DISPLAY_FIELDS + [("mime_type", "MIME Type")]
exclude_fields = Observable.exclude_fields + ['hashes', 'body']
@classmethod
def get_form(klass):
form = model_form(klass, exclude=klass.exclude_fields)
form.filenames = StringListField("Filenames")
return form
@staticmethod
def check_type(txt):
return True
def info(self):
i = Observable.info(self)
i['mime_type'] = self.mime_type
i['hashes'] = self.hashes
return i
|
<commit_before>from __future__ import unicode_literals
from mongoengine import *
from core.observables import Observable
from core.observables import Hash
class File(Observable):
value = StringField(verbose_name="SHA256 hash")
mime_type = StringField(verbose_name="MIME type")
hashes = DictField(verbose_name="Hashes")
body = ReferenceField("AttachedFile")
filenames = ListField(StringField(), verbose_name="Filenames")
DISPLAY_FIELDS = Observable.DISPLAY_FIELDS + [("mime_type", "MIME Type")]
@staticmethod
def check_type(txt):
return True
def info(self):
i = Observable.info(self)
i['mime_type'] = self.mime_type
i['hashes'] = self.hashes
return i
<commit_msg>Clean up File edit view<commit_after>from __future__ import unicode_literals
from flask import url_for
from flask_mongoengine.wtf import model_form
from mongoengine import *
from core.observables import Observable
from core.database import StringListField
class File(Observable):
value = StringField(verbose_name="Value")
mime_type = StringField(verbose_name="MIME type")
hashes = DictField(verbose_name="Hashes")
body = ReferenceField("AttachedFile")
filenames = ListField(StringField(), verbose_name="Filenames")
DISPLAY_FIELDS = Observable.DISPLAY_FIELDS + [("mime_type", "MIME Type")]
exclude_fields = Observable.exclude_fields + ['hashes', 'body']
@classmethod
def get_form(klass):
form = model_form(klass, exclude=klass.exclude_fields)
form.filenames = StringListField("Filenames")
return form
@staticmethod
def check_type(txt):
return True
def info(self):
i = Observable.info(self)
i['mime_type'] = self.mime_type
i['hashes'] = self.hashes
return i
|
6172eafbaf65b859462985056bb33490b98b0749
|
peloid/app/shell/service.py
|
peloid/app/shell/service.py
|
from twisted.cred import portal
from twisted.conch import manhole_ssh
from twisted.conch.checkers import SSHPublicKeyDatabase
from carapace.util import ssh as util
from peloid.app.shell import gameshell, setupshell
def getGameShellFactory(**namespace):
game = None
sshRealm = gameshell.TerminalRealm(namespace, game)
sshPortal = portal.Portal(sshRealm)
factory = manhole_ssh.ConchFactory(sshPortal)
factory.privateKeys = {'ssh-rsa': util.getPrivKey()}
factory.publicKeys = {'ssh-rsa': util.getPubKey()}
factory.portal.registerChecker(SSHPublicKeyDatabase())
return factory
def getSetupShellFactory(**namespace):
#telnetRealm = setupshell.X
#telnetPortal = portal.Portal(telnetRealm)
pass
|
from twisted.cred import portal
from twisted.conch import manhole_ssh
from twisted.conch.checkers import SSHPublicKeyDatabase
from carapace.util import ssh as util
from peloid.app import mud
from peloid.app.shell import gameshell, setupshell
def getGameShellFactory(**namespace):
game = mud.Game()
sshRealm = gameshell.TerminalRealm(namespace, game)
sshPortal = portal.Portal(sshRealm)
factory = manhole_ssh.ConchFactory(sshPortal)
factory.privateKeys = {'ssh-rsa': util.getPrivKey()}
factory.publicKeys = {'ssh-rsa': util.getPubKey()}
factory.portal.registerChecker(SSHPublicKeyDatabase())
return factory
def getSetupShellFactory(**namespace):
#telnetRealm = setupshell.X
#telnetPortal = portal.Portal(telnetRealm)
pass
|
Use the new Game class.
|
Use the new Game class.
|
Python
|
mit
|
oubiwann/peloid
|
from twisted.cred import portal
from twisted.conch import manhole_ssh
from twisted.conch.checkers import SSHPublicKeyDatabase
from carapace.util import ssh as util
from peloid.app.shell import gameshell, setupshell
def getGameShellFactory(**namespace):
game = None
sshRealm = gameshell.TerminalRealm(namespace, game)
sshPortal = portal.Portal(sshRealm)
factory = manhole_ssh.ConchFactory(sshPortal)
factory.privateKeys = {'ssh-rsa': util.getPrivKey()}
factory.publicKeys = {'ssh-rsa': util.getPubKey()}
factory.portal.registerChecker(SSHPublicKeyDatabase())
return factory
def getSetupShellFactory(**namespace):
#telnetRealm = setupshell.X
#telnetPortal = portal.Portal(telnetRealm)
pass
Use the new Game class.
|
from twisted.cred import portal
from twisted.conch import manhole_ssh
from twisted.conch.checkers import SSHPublicKeyDatabase
from carapace.util import ssh as util
from peloid.app import mud
from peloid.app.shell import gameshell, setupshell
def getGameShellFactory(**namespace):
game = mud.Game()
sshRealm = gameshell.TerminalRealm(namespace, game)
sshPortal = portal.Portal(sshRealm)
factory = manhole_ssh.ConchFactory(sshPortal)
factory.privateKeys = {'ssh-rsa': util.getPrivKey()}
factory.publicKeys = {'ssh-rsa': util.getPubKey()}
factory.portal.registerChecker(SSHPublicKeyDatabase())
return factory
def getSetupShellFactory(**namespace):
#telnetRealm = setupshell.X
#telnetPortal = portal.Portal(telnetRealm)
pass
|
<commit_before>from twisted.cred import portal
from twisted.conch import manhole_ssh
from twisted.conch.checkers import SSHPublicKeyDatabase
from carapace.util import ssh as util
from peloid.app.shell import gameshell, setupshell
def getGameShellFactory(**namespace):
game = None
sshRealm = gameshell.TerminalRealm(namespace, game)
sshPortal = portal.Portal(sshRealm)
factory = manhole_ssh.ConchFactory(sshPortal)
factory.privateKeys = {'ssh-rsa': util.getPrivKey()}
factory.publicKeys = {'ssh-rsa': util.getPubKey()}
factory.portal.registerChecker(SSHPublicKeyDatabase())
return factory
def getSetupShellFactory(**namespace):
#telnetRealm = setupshell.X
#telnetPortal = portal.Portal(telnetRealm)
pass
<commit_msg>Use the new Game class.<commit_after>
|
from twisted.cred import portal
from twisted.conch import manhole_ssh
from twisted.conch.checkers import SSHPublicKeyDatabase
from carapace.util import ssh as util
from peloid.app import mud
from peloid.app.shell import gameshell, setupshell
def getGameShellFactory(**namespace):
game = mud.Game()
sshRealm = gameshell.TerminalRealm(namespace, game)
sshPortal = portal.Portal(sshRealm)
factory = manhole_ssh.ConchFactory(sshPortal)
factory.privateKeys = {'ssh-rsa': util.getPrivKey()}
factory.publicKeys = {'ssh-rsa': util.getPubKey()}
factory.portal.registerChecker(SSHPublicKeyDatabase())
return factory
def getSetupShellFactory(**namespace):
#telnetRealm = setupshell.X
#telnetPortal = portal.Portal(telnetRealm)
pass
|
from twisted.cred import portal
from twisted.conch import manhole_ssh
from twisted.conch.checkers import SSHPublicKeyDatabase
from carapace.util import ssh as util
from peloid.app.shell import gameshell, setupshell
def getGameShellFactory(**namespace):
game = None
sshRealm = gameshell.TerminalRealm(namespace, game)
sshPortal = portal.Portal(sshRealm)
factory = manhole_ssh.ConchFactory(sshPortal)
factory.privateKeys = {'ssh-rsa': util.getPrivKey()}
factory.publicKeys = {'ssh-rsa': util.getPubKey()}
factory.portal.registerChecker(SSHPublicKeyDatabase())
return factory
def getSetupShellFactory(**namespace):
#telnetRealm = setupshell.X
#telnetPortal = portal.Portal(telnetRealm)
pass
Use the new Game class.from twisted.cred import portal
from twisted.conch import manhole_ssh
from twisted.conch.checkers import SSHPublicKeyDatabase
from carapace.util import ssh as util
from peloid.app import mud
from peloid.app.shell import gameshell, setupshell
def getGameShellFactory(**namespace):
game = mud.Game()
sshRealm = gameshell.TerminalRealm(namespace, game)
sshPortal = portal.Portal(sshRealm)
factory = manhole_ssh.ConchFactory(sshPortal)
factory.privateKeys = {'ssh-rsa': util.getPrivKey()}
factory.publicKeys = {'ssh-rsa': util.getPubKey()}
factory.portal.registerChecker(SSHPublicKeyDatabase())
return factory
def getSetupShellFactory(**namespace):
#telnetRealm = setupshell.X
#telnetPortal = portal.Portal(telnetRealm)
pass
|
<commit_before>from twisted.cred import portal
from twisted.conch import manhole_ssh
from twisted.conch.checkers import SSHPublicKeyDatabase
from carapace.util import ssh as util
from peloid.app.shell import gameshell, setupshell
def getGameShellFactory(**namespace):
game = None
sshRealm = gameshell.TerminalRealm(namespace, game)
sshPortal = portal.Portal(sshRealm)
factory = manhole_ssh.ConchFactory(sshPortal)
factory.privateKeys = {'ssh-rsa': util.getPrivKey()}
factory.publicKeys = {'ssh-rsa': util.getPubKey()}
factory.portal.registerChecker(SSHPublicKeyDatabase())
return factory
def getSetupShellFactory(**namespace):
#telnetRealm = setupshell.X
#telnetPortal = portal.Portal(telnetRealm)
pass
<commit_msg>Use the new Game class.<commit_after>from twisted.cred import portal
from twisted.conch import manhole_ssh
from twisted.conch.checkers import SSHPublicKeyDatabase
from carapace.util import ssh as util
from peloid.app import mud
from peloid.app.shell import gameshell, setupshell
def getGameShellFactory(**namespace):
game = mud.Game()
sshRealm = gameshell.TerminalRealm(namespace, game)
sshPortal = portal.Portal(sshRealm)
factory = manhole_ssh.ConchFactory(sshPortal)
factory.privateKeys = {'ssh-rsa': util.getPrivKey()}
factory.publicKeys = {'ssh-rsa': util.getPubKey()}
factory.portal.registerChecker(SSHPublicKeyDatabase())
return factory
def getSetupShellFactory(**namespace):
#telnetRealm = setupshell.X
#telnetPortal = portal.Portal(telnetRealm)
pass
|
f4b1b92033995eb4552401fb9e09669411787964
|
setup.py
|
setup.py
|
from setuptools import find_packages
from setuptools import setup
REQUIRED_PACKAGES = ['distance', 'tensorflow', 'numpy', 'six', 'pillow']
VERSION = '0.0.2'
try:
import pypandoc
README = pypandoc.convert('README.md', 'rst')
except(IOError, ImportError):
README = open('README.md').read()
setup(
name='aocr',
url='https://github.com/emedvedev/attention-ocr',
download_url='https://github.com/emedvedev/attention-ocr/archive/{}.tar.gz'.format(VERSION),
author='Ed Medvedev',
author_email='edward.medvedev@gmail.com',
version=VERSION,
install_requires=REQUIRED_PACKAGES,
packages=find_packages(),
include_package_data=True,
license='MIT',
description=('''Optical character recognition model '''
'''for Tensorflow based on Visual Attention.'''),
long_description=README,
entry_points={
'console_scripts': ['aocr=aocr.__main__:main'],
}
)
|
from setuptools import find_packages
from setuptools import setup
REQUIRED_PACKAGES = ['distance', 'numpy', 'six', 'pillow']
VERSION = '0.0.3'
try:
import pypandoc
README = pypandoc.convert('README.md', 'rst')
except(IOError, ImportError):
README = open('README.md').read()
setup(
name='aocr',
url='https://github.com/emedvedev/attention-ocr',
download_url='https://github.com/emedvedev/attention-ocr/archive/{}.tar.gz'.format(VERSION),
author='Ed Medvedev',
author_email='edward.medvedev@gmail.com',
version=VERSION,
install_requires=REQUIRED_PACKAGES,
packages=find_packages(),
include_package_data=True,
license='MIT',
description=('''Optical character recognition model '''
'''for Tensorflow based on Visual Attention.'''),
long_description=README,
entry_points={
'console_scripts': ['aocr=aocr.__main__:main'],
}
)
|
Remove tensorflow from hard dependencies and update the version
|
Remove tensorflow from hard dependencies and update the version
|
Python
|
mit
|
emedvedev/attention-ocr
|
from setuptools import find_packages
from setuptools import setup
REQUIRED_PACKAGES = ['distance', 'tensorflow', 'numpy', 'six', 'pillow']
VERSION = '0.0.2'
try:
import pypandoc
README = pypandoc.convert('README.md', 'rst')
except(IOError, ImportError):
README = open('README.md').read()
setup(
name='aocr',
url='https://github.com/emedvedev/attention-ocr',
download_url='https://github.com/emedvedev/attention-ocr/archive/{}.tar.gz'.format(VERSION),
author='Ed Medvedev',
author_email='edward.medvedev@gmail.com',
version=VERSION,
install_requires=REQUIRED_PACKAGES,
packages=find_packages(),
include_package_data=True,
license='MIT',
description=('''Optical character recognition model '''
'''for Tensorflow based on Visual Attention.'''),
long_description=README,
entry_points={
'console_scripts': ['aocr=aocr.__main__:main'],
}
)
Remove tensorflow from hard dependencies and update the version
|
from setuptools import find_packages
from setuptools import setup
REQUIRED_PACKAGES = ['distance', 'numpy', 'six', 'pillow']
VERSION = '0.0.3'
try:
import pypandoc
README = pypandoc.convert('README.md', 'rst')
except(IOError, ImportError):
README = open('README.md').read()
setup(
name='aocr',
url='https://github.com/emedvedev/attention-ocr',
download_url='https://github.com/emedvedev/attention-ocr/archive/{}.tar.gz'.format(VERSION),
author='Ed Medvedev',
author_email='edward.medvedev@gmail.com',
version=VERSION,
install_requires=REQUIRED_PACKAGES,
packages=find_packages(),
include_package_data=True,
license='MIT',
description=('''Optical character recognition model '''
'''for Tensorflow based on Visual Attention.'''),
long_description=README,
entry_points={
'console_scripts': ['aocr=aocr.__main__:main'],
}
)
|
<commit_before>from setuptools import find_packages
from setuptools import setup
REQUIRED_PACKAGES = ['distance', 'tensorflow', 'numpy', 'six', 'pillow']
VERSION = '0.0.2'
try:
import pypandoc
README = pypandoc.convert('README.md', 'rst')
except(IOError, ImportError):
README = open('README.md').read()
setup(
name='aocr',
url='https://github.com/emedvedev/attention-ocr',
download_url='https://github.com/emedvedev/attention-ocr/archive/{}.tar.gz'.format(VERSION),
author='Ed Medvedev',
author_email='edward.medvedev@gmail.com',
version=VERSION,
install_requires=REQUIRED_PACKAGES,
packages=find_packages(),
include_package_data=True,
license='MIT',
description=('''Optical character recognition model '''
'''for Tensorflow based on Visual Attention.'''),
long_description=README,
entry_points={
'console_scripts': ['aocr=aocr.__main__:main'],
}
)
<commit_msg>Remove tensorflow from hard dependencies and update the version<commit_after>
|
from setuptools import find_packages
from setuptools import setup
REQUIRED_PACKAGES = ['distance', 'numpy', 'six', 'pillow']
VERSION = '0.0.3'
try:
import pypandoc
README = pypandoc.convert('README.md', 'rst')
except(IOError, ImportError):
README = open('README.md').read()
setup(
name='aocr',
url='https://github.com/emedvedev/attention-ocr',
download_url='https://github.com/emedvedev/attention-ocr/archive/{}.tar.gz'.format(VERSION),
author='Ed Medvedev',
author_email='edward.medvedev@gmail.com',
version=VERSION,
install_requires=REQUIRED_PACKAGES,
packages=find_packages(),
include_package_data=True,
license='MIT',
description=('''Optical character recognition model '''
'''for Tensorflow based on Visual Attention.'''),
long_description=README,
entry_points={
'console_scripts': ['aocr=aocr.__main__:main'],
}
)
|
from setuptools import find_packages
from setuptools import setup
REQUIRED_PACKAGES = ['distance', 'tensorflow', 'numpy', 'six', 'pillow']
VERSION = '0.0.2'
try:
import pypandoc
README = pypandoc.convert('README.md', 'rst')
except(IOError, ImportError):
README = open('README.md').read()
setup(
name='aocr',
url='https://github.com/emedvedev/attention-ocr',
download_url='https://github.com/emedvedev/attention-ocr/archive/{}.tar.gz'.format(VERSION),
author='Ed Medvedev',
author_email='edward.medvedev@gmail.com',
version=VERSION,
install_requires=REQUIRED_PACKAGES,
packages=find_packages(),
include_package_data=True,
license='MIT',
description=('''Optical character recognition model '''
'''for Tensorflow based on Visual Attention.'''),
long_description=README,
entry_points={
'console_scripts': ['aocr=aocr.__main__:main'],
}
)
Remove tensorflow from hard dependencies and update the versionfrom setuptools import find_packages
from setuptools import setup
REQUIRED_PACKAGES = ['distance', 'numpy', 'six', 'pillow']
VERSION = '0.0.3'
try:
import pypandoc
README = pypandoc.convert('README.md', 'rst')
except(IOError, ImportError):
README = open('README.md').read()
setup(
name='aocr',
url='https://github.com/emedvedev/attention-ocr',
download_url='https://github.com/emedvedev/attention-ocr/archive/{}.tar.gz'.format(VERSION),
author='Ed Medvedev',
author_email='edward.medvedev@gmail.com',
version=VERSION,
install_requires=REQUIRED_PACKAGES,
packages=find_packages(),
include_package_data=True,
license='MIT',
description=('''Optical character recognition model '''
'''for Tensorflow based on Visual Attention.'''),
long_description=README,
entry_points={
'console_scripts': ['aocr=aocr.__main__:main'],
}
)
|
<commit_before>from setuptools import find_packages
from setuptools import setup
REQUIRED_PACKAGES = ['distance', 'tensorflow', 'numpy', 'six', 'pillow']
VERSION = '0.0.2'
try:
import pypandoc
README = pypandoc.convert('README.md', 'rst')
except(IOError, ImportError):
README = open('README.md').read()
setup(
name='aocr',
url='https://github.com/emedvedev/attention-ocr',
download_url='https://github.com/emedvedev/attention-ocr/archive/{}.tar.gz'.format(VERSION),
author='Ed Medvedev',
author_email='edward.medvedev@gmail.com',
version=VERSION,
install_requires=REQUIRED_PACKAGES,
packages=find_packages(),
include_package_data=True,
license='MIT',
description=('''Optical character recognition model '''
'''for Tensorflow based on Visual Attention.'''),
long_description=README,
entry_points={
'console_scripts': ['aocr=aocr.__main__:main'],
}
)
<commit_msg>Remove tensorflow from hard dependencies and update the version<commit_after>from setuptools import find_packages
from setuptools import setup
REQUIRED_PACKAGES = ['distance', 'numpy', 'six', 'pillow']
VERSION = '0.0.3'
try:
import pypandoc
README = pypandoc.convert('README.md', 'rst')
except(IOError, ImportError):
README = open('README.md').read()
setup(
name='aocr',
url='https://github.com/emedvedev/attention-ocr',
download_url='https://github.com/emedvedev/attention-ocr/archive/{}.tar.gz'.format(VERSION),
author='Ed Medvedev',
author_email='edward.medvedev@gmail.com',
version=VERSION,
install_requires=REQUIRED_PACKAGES,
packages=find_packages(),
include_package_data=True,
license='MIT',
description=('''Optical character recognition model '''
'''for Tensorflow based on Visual Attention.'''),
long_description=README,
entry_points={
'console_scripts': ['aocr=aocr.__main__:main'],
}
)
|
258049876c4e9edd2c52d2f25f3f27caf976dd80
|
setup.py
|
setup.py
|
# -*- encoding: utf8 -*-
from setuptools import setup, find_packages
import os
setup(
name="aspectlib",
version="0.7.0",
url='https://github.com/ionelmc/python-aspectlib',
download_url='',
license='BSD',
description="Aspect-Oriented Programming toolkit.",
long_description=open(os.path.join(os.path.dirname(__file__), 'README.rst')).read(),
author='Ionel Cristian Mărieș',
author_email='contact@ionelmc.ro',
packages=find_packages('src'),
package_dir={'': 'src'},
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: Unix',
'Operating System :: POSIX',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python',
'Topic :: Utilities',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
],
keywords=[
'python', 'aop', 'aspects', 'aspect oriented programming', 'decorators', 'patch', 'monkeypatch', 'weave',
'debug', 'log', 'tests', 'mock'
],
install_requires=[
],
extras_require={
}
)
|
# -*- encoding: utf8 -*-
from setuptools import setup, find_packages
import os
read = lambda *names: open(os.path.join(os.path.dirname(__file__), *names)).read()
setup(
name="aspectlib",
version="0.8.0",
url='https://github.com/ionelmc/python-aspectlib',
download_url='',
license='BSD',
description="Aspect-Oriented Programming toolkit.",
long_description="%s\n%s" % (read('README.rst'), read('docs', 'changelog.rst').replace(':obj:', '')),
author='Ionel Cristian Mărieș',
author_email='contact@ionelmc.ro',
packages=find_packages('src'),
package_dir={'': 'src'},
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: Unix',
'Operating System :: POSIX',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python',
'Topic :: Utilities',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
],
keywords=[
'python', 'aop', 'aspects', 'aspect oriented programming', 'decorators', 'patch', 'monkeypatch', 'weave',
'debug', 'log', 'tests', 'mock'
],
install_requires=[
],
extras_require={
}
)
|
Include changelog in package registration. Up version.
|
Include changelog in package registration. Up version.
|
Python
|
bsd-2-clause
|
svetlyak40wt/python-aspectlib,ionelmc/python-aspectlib
|
# -*- encoding: utf8 -*-
from setuptools import setup, find_packages
import os
setup(
name="aspectlib",
version="0.7.0",
url='https://github.com/ionelmc/python-aspectlib',
download_url='',
license='BSD',
description="Aspect-Oriented Programming toolkit.",
long_description=open(os.path.join(os.path.dirname(__file__), 'README.rst')).read(),
author='Ionel Cristian Mărieș',
author_email='contact@ionelmc.ro',
packages=find_packages('src'),
package_dir={'': 'src'},
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: Unix',
'Operating System :: POSIX',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python',
'Topic :: Utilities',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
],
keywords=[
'python', 'aop', 'aspects', 'aspect oriented programming', 'decorators', 'patch', 'monkeypatch', 'weave',
'debug', 'log', 'tests', 'mock'
],
install_requires=[
],
extras_require={
}
)
Include changelog in package registration. Up version.
|
# -*- encoding: utf8 -*-
from setuptools import setup, find_packages
import os
read = lambda *names: open(os.path.join(os.path.dirname(__file__), *names)).read()
setup(
name="aspectlib",
version="0.8.0",
url='https://github.com/ionelmc/python-aspectlib',
download_url='',
license='BSD',
description="Aspect-Oriented Programming toolkit.",
long_description="%s\n%s" % (read('README.rst'), read('docs', 'changelog.rst').replace(':obj:', '')),
author='Ionel Cristian Mărieș',
author_email='contact@ionelmc.ro',
packages=find_packages('src'),
package_dir={'': 'src'},
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: Unix',
'Operating System :: POSIX',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python',
'Topic :: Utilities',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
],
keywords=[
'python', 'aop', 'aspects', 'aspect oriented programming', 'decorators', 'patch', 'monkeypatch', 'weave',
'debug', 'log', 'tests', 'mock'
],
install_requires=[
],
extras_require={
}
)
|
<commit_before># -*- encoding: utf8 -*-
from setuptools import setup, find_packages
import os
setup(
name="aspectlib",
version="0.7.0",
url='https://github.com/ionelmc/python-aspectlib',
download_url='',
license='BSD',
description="Aspect-Oriented Programming toolkit.",
long_description=open(os.path.join(os.path.dirname(__file__), 'README.rst')).read(),
author='Ionel Cristian Mărieș',
author_email='contact@ionelmc.ro',
packages=find_packages('src'),
package_dir={'': 'src'},
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: Unix',
'Operating System :: POSIX',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python',
'Topic :: Utilities',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
],
keywords=[
'python', 'aop', 'aspects', 'aspect oriented programming', 'decorators', 'patch', 'monkeypatch', 'weave',
'debug', 'log', 'tests', 'mock'
],
install_requires=[
],
extras_require={
}
)
<commit_msg>Include changelog in package registration. Up version.<commit_after>
|
# -*- encoding: utf8 -*-
from setuptools import setup, find_packages
import os
read = lambda *names: open(os.path.join(os.path.dirname(__file__), *names)).read()
setup(
name="aspectlib",
version="0.8.0",
url='https://github.com/ionelmc/python-aspectlib',
download_url='',
license='BSD',
description="Aspect-Oriented Programming toolkit.",
long_description="%s\n%s" % (read('README.rst'), read('docs', 'changelog.rst').replace(':obj:', '')),
author='Ionel Cristian Mărieș',
author_email='contact@ionelmc.ro',
packages=find_packages('src'),
package_dir={'': 'src'},
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: Unix',
'Operating System :: POSIX',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python',
'Topic :: Utilities',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
],
keywords=[
'python', 'aop', 'aspects', 'aspect oriented programming', 'decorators', 'patch', 'monkeypatch', 'weave',
'debug', 'log', 'tests', 'mock'
],
install_requires=[
],
extras_require={
}
)
|
# -*- encoding: utf8 -*-
from setuptools import setup, find_packages
import os
setup(
name="aspectlib",
version="0.7.0",
url='https://github.com/ionelmc/python-aspectlib',
download_url='',
license='BSD',
description="Aspect-Oriented Programming toolkit.",
long_description=open(os.path.join(os.path.dirname(__file__), 'README.rst')).read(),
author='Ionel Cristian Mărieș',
author_email='contact@ionelmc.ro',
packages=find_packages('src'),
package_dir={'': 'src'},
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: Unix',
'Operating System :: POSIX',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python',
'Topic :: Utilities',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
],
keywords=[
'python', 'aop', 'aspects', 'aspect oriented programming', 'decorators', 'patch', 'monkeypatch', 'weave',
'debug', 'log', 'tests', 'mock'
],
install_requires=[
],
extras_require={
}
)
Include changelog in package registration. Up version.# -*- encoding: utf8 -*-
from setuptools import setup, find_packages
import os
read = lambda *names: open(os.path.join(os.path.dirname(__file__), *names)).read()
setup(
name="aspectlib",
version="0.8.0",
url='https://github.com/ionelmc/python-aspectlib',
download_url='',
license='BSD',
description="Aspect-Oriented Programming toolkit.",
long_description="%s\n%s" % (read('README.rst'), read('docs', 'changelog.rst').replace(':obj:', '')),
author='Ionel Cristian Mărieș',
author_email='contact@ionelmc.ro',
packages=find_packages('src'),
package_dir={'': 'src'},
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: Unix',
'Operating System :: POSIX',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python',
'Topic :: Utilities',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
],
keywords=[
'python', 'aop', 'aspects', 'aspect oriented programming', 'decorators', 'patch', 'monkeypatch', 'weave',
'debug', 'log', 'tests', 'mock'
],
install_requires=[
],
extras_require={
}
)
|
<commit_before># -*- encoding: utf8 -*-
from setuptools import setup, find_packages
import os
setup(
name="aspectlib",
version="0.7.0",
url='https://github.com/ionelmc/python-aspectlib',
download_url='',
license='BSD',
description="Aspect-Oriented Programming toolkit.",
long_description=open(os.path.join(os.path.dirname(__file__), 'README.rst')).read(),
author='Ionel Cristian Mărieș',
author_email='contact@ionelmc.ro',
packages=find_packages('src'),
package_dir={'': 'src'},
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: Unix',
'Operating System :: POSIX',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python',
'Topic :: Utilities',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
],
keywords=[
'python', 'aop', 'aspects', 'aspect oriented programming', 'decorators', 'patch', 'monkeypatch', 'weave',
'debug', 'log', 'tests', 'mock'
],
install_requires=[
],
extras_require={
}
)
<commit_msg>Include changelog in package registration. Up version.<commit_after># -*- encoding: utf8 -*-
from setuptools import setup, find_packages
import os
read = lambda *names: open(os.path.join(os.path.dirname(__file__), *names)).read()
setup(
name="aspectlib",
version="0.8.0",
url='https://github.com/ionelmc/python-aspectlib',
download_url='',
license='BSD',
description="Aspect-Oriented Programming toolkit.",
long_description="%s\n%s" % (read('README.rst'), read('docs', 'changelog.rst').replace(':obj:', '')),
author='Ionel Cristian Mărieș',
author_email='contact@ionelmc.ro',
packages=find_packages('src'),
package_dir={'': 'src'},
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: Unix',
'Operating System :: POSIX',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python',
'Topic :: Utilities',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
],
keywords=[
'python', 'aop', 'aspects', 'aspect oriented programming', 'decorators', 'patch', 'monkeypatch', 'weave',
'debug', 'log', 'tests', 'mock'
],
install_requires=[
],
extras_require={
}
)
|
261e76669480df4becbc5fbc81fbd5f8d5fa3e5c
|
setup.py
|
setup.py
|
#!/usr/bin/env python3
from setuptools import setup
with open("README.rst") as fd:
long_description = fd.read()
setup(
name="tvnamer",
version="1.0.0-dev",
description="Utility to rename lots of TV video files using the TheTVDB.",
long_description=long_description,
author="Tom Leese",
author_email="tom@tomleese.me.uk",
url="https://github.com/tomleese/tvnamer",
packages=["tvnamer"],
test_suite="tests",
install_requires=[
"pytvdbapi",
"pyside"
],
entry_points={
"console_scripts": [
"tvnamer = tvnamer:main",
"tvnamer-cli = tvnamer.cli:main",
"tvnamer-gui = tvnamer.gui:main",
]
},
classifiers=[
"Topic :: Internet",
"Topic :: Multimedia :: Video",
"Development Status :: 2 - Pre-Alpha",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4"
]
)
|
#!/usr/bin/env python3
from setuptools import setup
with open("README.rst") as fd:
long_description = fd.read()
setup(
name="tvnamer",
version="1.0.0-dev",
description="Utility to rename lots of TV video files using the TheTVDB.",
long_description=long_description,
author="Tom Leese",
author_email="tom@tomleese.me.uk",
url="https://github.com/tomleese/tvnamer",
packages=["tvnamer"],
test_suite="tests",
install_requires=[
"pytvdbapi",
"pyside"
],
entry_points={
"console_scripts": [
"tvnamer = tvnamer:main",
"tvnamer-cli = tvnamer.cli:main"
],
'gui_scripts': [
"tvnamer-gui = tvnamer.gui:main",
]
},
classifiers=[
"Topic :: Internet",
"Topic :: Multimedia :: Video",
"Development Status :: 2 - Pre-Alpha",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4"
]
)
|
Mark 'tvnamer-gui' as a GUI script
|
Mark 'tvnamer-gui' as a GUI script
|
Python
|
mit
|
tomleese/tvnamer,thomasleese/tvnamer
|
#!/usr/bin/env python3
from setuptools import setup
with open("README.rst") as fd:
long_description = fd.read()
setup(
name="tvnamer",
version="1.0.0-dev",
description="Utility to rename lots of TV video files using the TheTVDB.",
long_description=long_description,
author="Tom Leese",
author_email="tom@tomleese.me.uk",
url="https://github.com/tomleese/tvnamer",
packages=["tvnamer"],
test_suite="tests",
install_requires=[
"pytvdbapi",
"pyside"
],
entry_points={
"console_scripts": [
"tvnamer = tvnamer:main",
"tvnamer-cli = tvnamer.cli:main",
"tvnamer-gui = tvnamer.gui:main",
]
},
classifiers=[
"Topic :: Internet",
"Topic :: Multimedia :: Video",
"Development Status :: 2 - Pre-Alpha",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4"
]
)
Mark 'tvnamer-gui' as a GUI script
|
#!/usr/bin/env python3
from setuptools import setup
with open("README.rst") as fd:
long_description = fd.read()
setup(
name="tvnamer",
version="1.0.0-dev",
description="Utility to rename lots of TV video files using the TheTVDB.",
long_description=long_description,
author="Tom Leese",
author_email="tom@tomleese.me.uk",
url="https://github.com/tomleese/tvnamer",
packages=["tvnamer"],
test_suite="tests",
install_requires=[
"pytvdbapi",
"pyside"
],
entry_points={
"console_scripts": [
"tvnamer = tvnamer:main",
"tvnamer-cli = tvnamer.cli:main"
],
'gui_scripts': [
"tvnamer-gui = tvnamer.gui:main",
]
},
classifiers=[
"Topic :: Internet",
"Topic :: Multimedia :: Video",
"Development Status :: 2 - Pre-Alpha",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4"
]
)
|
<commit_before>#!/usr/bin/env python3
from setuptools import setup
with open("README.rst") as fd:
long_description = fd.read()
setup(
name="tvnamer",
version="1.0.0-dev",
description="Utility to rename lots of TV video files using the TheTVDB.",
long_description=long_description,
author="Tom Leese",
author_email="tom@tomleese.me.uk",
url="https://github.com/tomleese/tvnamer",
packages=["tvnamer"],
test_suite="tests",
install_requires=[
"pytvdbapi",
"pyside"
],
entry_points={
"console_scripts": [
"tvnamer = tvnamer:main",
"tvnamer-cli = tvnamer.cli:main",
"tvnamer-gui = tvnamer.gui:main",
]
},
classifiers=[
"Topic :: Internet",
"Topic :: Multimedia :: Video",
"Development Status :: 2 - Pre-Alpha",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4"
]
)
<commit_msg>Mark 'tvnamer-gui' as a GUI script<commit_after>
|
#!/usr/bin/env python3
from setuptools import setup
with open("README.rst") as fd:
long_description = fd.read()
setup(
name="tvnamer",
version="1.0.0-dev",
description="Utility to rename lots of TV video files using the TheTVDB.",
long_description=long_description,
author="Tom Leese",
author_email="tom@tomleese.me.uk",
url="https://github.com/tomleese/tvnamer",
packages=["tvnamer"],
test_suite="tests",
install_requires=[
"pytvdbapi",
"pyside"
],
entry_points={
"console_scripts": [
"tvnamer = tvnamer:main",
"tvnamer-cli = tvnamer.cli:main"
],
'gui_scripts': [
"tvnamer-gui = tvnamer.gui:main",
]
},
classifiers=[
"Topic :: Internet",
"Topic :: Multimedia :: Video",
"Development Status :: 2 - Pre-Alpha",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4"
]
)
|
#!/usr/bin/env python3
from setuptools import setup
with open("README.rst") as fd:
long_description = fd.read()
setup(
name="tvnamer",
version="1.0.0-dev",
description="Utility to rename lots of TV video files using the TheTVDB.",
long_description=long_description,
author="Tom Leese",
author_email="tom@tomleese.me.uk",
url="https://github.com/tomleese/tvnamer",
packages=["tvnamer"],
test_suite="tests",
install_requires=[
"pytvdbapi",
"pyside"
],
entry_points={
"console_scripts": [
"tvnamer = tvnamer:main",
"tvnamer-cli = tvnamer.cli:main",
"tvnamer-gui = tvnamer.gui:main",
]
},
classifiers=[
"Topic :: Internet",
"Topic :: Multimedia :: Video",
"Development Status :: 2 - Pre-Alpha",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4"
]
)
Mark 'tvnamer-gui' as a GUI script#!/usr/bin/env python3
from setuptools import setup
with open("README.rst") as fd:
long_description = fd.read()
setup(
name="tvnamer",
version="1.0.0-dev",
description="Utility to rename lots of TV video files using the TheTVDB.",
long_description=long_description,
author="Tom Leese",
author_email="tom@tomleese.me.uk",
url="https://github.com/tomleese/tvnamer",
packages=["tvnamer"],
test_suite="tests",
install_requires=[
"pytvdbapi",
"pyside"
],
entry_points={
"console_scripts": [
"tvnamer = tvnamer:main",
"tvnamer-cli = tvnamer.cli:main"
],
'gui_scripts': [
"tvnamer-gui = tvnamer.gui:main",
]
},
classifiers=[
"Topic :: Internet",
"Topic :: Multimedia :: Video",
"Development Status :: 2 - Pre-Alpha",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4"
]
)
|
<commit_before>#!/usr/bin/env python3
from setuptools import setup
with open("README.rst") as fd:
long_description = fd.read()
setup(
name="tvnamer",
version="1.0.0-dev",
description="Utility to rename lots of TV video files using the TheTVDB.",
long_description=long_description,
author="Tom Leese",
author_email="tom@tomleese.me.uk",
url="https://github.com/tomleese/tvnamer",
packages=["tvnamer"],
test_suite="tests",
install_requires=[
"pytvdbapi",
"pyside"
],
entry_points={
"console_scripts": [
"tvnamer = tvnamer:main",
"tvnamer-cli = tvnamer.cli:main",
"tvnamer-gui = tvnamer.gui:main",
]
},
classifiers=[
"Topic :: Internet",
"Topic :: Multimedia :: Video",
"Development Status :: 2 - Pre-Alpha",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4"
]
)
<commit_msg>Mark 'tvnamer-gui' as a GUI script<commit_after>#!/usr/bin/env python3
from setuptools import setup
with open("README.rst") as fd:
long_description = fd.read()
setup(
name="tvnamer",
version="1.0.0-dev",
description="Utility to rename lots of TV video files using the TheTVDB.",
long_description=long_description,
author="Tom Leese",
author_email="tom@tomleese.me.uk",
url="https://github.com/tomleese/tvnamer",
packages=["tvnamer"],
test_suite="tests",
install_requires=[
"pytvdbapi",
"pyside"
],
entry_points={
"console_scripts": [
"tvnamer = tvnamer:main",
"tvnamer-cli = tvnamer.cli:main"
],
'gui_scripts': [
"tvnamer-gui = tvnamer.gui:main",
]
},
classifiers=[
"Topic :: Internet",
"Topic :: Multimedia :: Video",
"Development Status :: 2 - Pre-Alpha",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4"
]
)
|
d33700b259cf281162352315c91743e3c26d94f7
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name="yasha",
author="Kim Blomqvist",
author_email="kblomqvist@iki.fi",
version="1.1",
description="A command-line tool to render Jinja templates",
keywords=["jinja", "code generator"],
packages=find_packages(),
include_package_data=True,
install_requires=[
"Click",
"Jinja2",
"pytoml",
"pyyaml",
],
entry_points='''
[console_scripts]
yasha=yasha.scripts.yasha:cli
''',
classifiers=[
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Topic :: Software Development :: Code Generators",
],
url="https://github.com/kblomqvist/yasha",
download_url="https://github.com/kblomqvist/yasha/tarball/1.1",
)
|
from setuptools import setup, find_packages
setup(
name="yasha",
author="Kim Blomqvist",
author_email="kblomqvist@iki.fi",
version="1.1",
description="A command-line tool to render Jinja templates",
keywords=["jinja", "code generator"],
packages=find_packages(),
include_package_data=True,
install_requires=[
"Click",
"Jinja2",
"pytoml",
"pyyaml",
],
entry_points='''
[console_scripts]
yasha=yasha.scripts.yasha:cli
''',
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Topic :: Software Development :: Code Generators",
"Development Status :: 5 - Production/Stable",
"License :: OSI Approved :: MIT License",
],
url="https://github.com/kblomqvist/yasha",
download_url="https://github.com/kblomqvist/yasha/tarball/1.1",
)
|
Add new PyPi classifier "Development Status"
|
Add new PyPi classifier "Development Status"
|
Python
|
mit
|
kblomqvist/yasha
|
from setuptools import setup, find_packages
setup(
name="yasha",
author="Kim Blomqvist",
author_email="kblomqvist@iki.fi",
version="1.1",
description="A command-line tool to render Jinja templates",
keywords=["jinja", "code generator"],
packages=find_packages(),
include_package_data=True,
install_requires=[
"Click",
"Jinja2",
"pytoml",
"pyyaml",
],
entry_points='''
[console_scripts]
yasha=yasha.scripts.yasha:cli
''',
classifiers=[
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Topic :: Software Development :: Code Generators",
],
url="https://github.com/kblomqvist/yasha",
download_url="https://github.com/kblomqvist/yasha/tarball/1.1",
)
Add new PyPi classifier "Development Status"
|
from setuptools import setup, find_packages
setup(
name="yasha",
author="Kim Blomqvist",
author_email="kblomqvist@iki.fi",
version="1.1",
description="A command-line tool to render Jinja templates",
keywords=["jinja", "code generator"],
packages=find_packages(),
include_package_data=True,
install_requires=[
"Click",
"Jinja2",
"pytoml",
"pyyaml",
],
entry_points='''
[console_scripts]
yasha=yasha.scripts.yasha:cli
''',
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Topic :: Software Development :: Code Generators",
"Development Status :: 5 - Production/Stable",
"License :: OSI Approved :: MIT License",
],
url="https://github.com/kblomqvist/yasha",
download_url="https://github.com/kblomqvist/yasha/tarball/1.1",
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name="yasha",
author="Kim Blomqvist",
author_email="kblomqvist@iki.fi",
version="1.1",
description="A command-line tool to render Jinja templates",
keywords=["jinja", "code generator"],
packages=find_packages(),
include_package_data=True,
install_requires=[
"Click",
"Jinja2",
"pytoml",
"pyyaml",
],
entry_points='''
[console_scripts]
yasha=yasha.scripts.yasha:cli
''',
classifiers=[
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Topic :: Software Development :: Code Generators",
],
url="https://github.com/kblomqvist/yasha",
download_url="https://github.com/kblomqvist/yasha/tarball/1.1",
)
<commit_msg>Add new PyPi classifier "Development Status"<commit_after>
|
from setuptools import setup, find_packages
setup(
name="yasha",
author="Kim Blomqvist",
author_email="kblomqvist@iki.fi",
version="1.1",
description="A command-line tool to render Jinja templates",
keywords=["jinja", "code generator"],
packages=find_packages(),
include_package_data=True,
install_requires=[
"Click",
"Jinja2",
"pytoml",
"pyyaml",
],
entry_points='''
[console_scripts]
yasha=yasha.scripts.yasha:cli
''',
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Topic :: Software Development :: Code Generators",
"Development Status :: 5 - Production/Stable",
"License :: OSI Approved :: MIT License",
],
url="https://github.com/kblomqvist/yasha",
download_url="https://github.com/kblomqvist/yasha/tarball/1.1",
)
|
from setuptools import setup, find_packages
setup(
name="yasha",
author="Kim Blomqvist",
author_email="kblomqvist@iki.fi",
version="1.1",
description="A command-line tool to render Jinja templates",
keywords=["jinja", "code generator"],
packages=find_packages(),
include_package_data=True,
install_requires=[
"Click",
"Jinja2",
"pytoml",
"pyyaml",
],
entry_points='''
[console_scripts]
yasha=yasha.scripts.yasha:cli
''',
classifiers=[
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Topic :: Software Development :: Code Generators",
],
url="https://github.com/kblomqvist/yasha",
download_url="https://github.com/kblomqvist/yasha/tarball/1.1",
)
Add new PyPi classifier "Development Status"from setuptools import setup, find_packages
setup(
name="yasha",
author="Kim Blomqvist",
author_email="kblomqvist@iki.fi",
version="1.1",
description="A command-line tool to render Jinja templates",
keywords=["jinja", "code generator"],
packages=find_packages(),
include_package_data=True,
install_requires=[
"Click",
"Jinja2",
"pytoml",
"pyyaml",
],
entry_points='''
[console_scripts]
yasha=yasha.scripts.yasha:cli
''',
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Topic :: Software Development :: Code Generators",
"Development Status :: 5 - Production/Stable",
"License :: OSI Approved :: MIT License",
],
url="https://github.com/kblomqvist/yasha",
download_url="https://github.com/kblomqvist/yasha/tarball/1.1",
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name="yasha",
author="Kim Blomqvist",
author_email="kblomqvist@iki.fi",
version="1.1",
description="A command-line tool to render Jinja templates",
keywords=["jinja", "code generator"],
packages=find_packages(),
include_package_data=True,
install_requires=[
"Click",
"Jinja2",
"pytoml",
"pyyaml",
],
entry_points='''
[console_scripts]
yasha=yasha.scripts.yasha:cli
''',
classifiers=[
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Topic :: Software Development :: Code Generators",
],
url="https://github.com/kblomqvist/yasha",
download_url="https://github.com/kblomqvist/yasha/tarball/1.1",
)
<commit_msg>Add new PyPi classifier "Development Status"<commit_after>from setuptools import setup, find_packages
setup(
name="yasha",
author="Kim Blomqvist",
author_email="kblomqvist@iki.fi",
version="1.1",
description="A command-line tool to render Jinja templates",
keywords=["jinja", "code generator"],
packages=find_packages(),
include_package_data=True,
install_requires=[
"Click",
"Jinja2",
"pytoml",
"pyyaml",
],
entry_points='''
[console_scripts]
yasha=yasha.scripts.yasha:cli
''',
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Topic :: Software Development :: Code Generators",
"Development Status :: 5 - Production/Stable",
"License :: OSI Approved :: MIT License",
],
url="https://github.com/kblomqvist/yasha",
download_url="https://github.com/kblomqvist/yasha/tarball/1.1",
)
|
6b86d37808c6d13ef9c7c79c879b86ff9c04104d
|
setup.py
|
setup.py
|
import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='mafiademonstration',
version='0.4.5',
author='Isaac Smith, Hei Jing Tsang',
author_email='sentherus@gmail.com',
description='A user friendly interface for playing a simplified game of Mafia.',
long_description=read('README.rst'),
license='MIT',
keywords=(
"Python, kivy, pytest, projects, project, "
"documentation, setup.py, package "
),
url='https://github.com/zenohm/mafiademonstration',
install_requires=[
'kivy>=1.9.1',
'click',
],
zip_safe=False,
packages=find_packages(),
include_package_data=True,
tests_require=['unittest'],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: X11 Applications',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 3',
'Topic :: Multimedia :: Graphics :: Presentation',
'Topic :: Software Development :: User Interfaces',
],
)
|
import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='mafiademonstration',
version='0.4.5',
author='Isaac Smith, Hei Jing Tsang',
author_email='sentherus@gmail.com',
description='A user friendly interface for playing a simplified game of Mafia.',
long_description=read('README.rst'),
license='MIT',
keywords=(
"Python, kivy, pytest, projects, project, "
"documentation, setup.py, package "
),
url='https://github.com/zenohm/mafiademonstration',
install_requires=[
'kivy>=1.9.1',
'click',
],
zip_safe=False,
packages=find_packages(),
include_package_data=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: X11 Applications',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 3',
'Topic :: Multimedia :: Graphics :: Presentation',
'Topic :: Software Development :: User Interfaces',
],
)
|
Fix the test command in the makefile
|
Fix the test command in the makefile
|
Python
|
mit
|
Zenohm/mafiademonstration
|
import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='mafiademonstration',
version='0.4.5',
author='Isaac Smith, Hei Jing Tsang',
author_email='sentherus@gmail.com',
description='A user friendly interface for playing a simplified game of Mafia.',
long_description=read('README.rst'),
license='MIT',
keywords=(
"Python, kivy, pytest, projects, project, "
"documentation, setup.py, package "
),
url='https://github.com/zenohm/mafiademonstration',
install_requires=[
'kivy>=1.9.1',
'click',
],
zip_safe=False,
packages=find_packages(),
include_package_data=True,
tests_require=['unittest'],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: X11 Applications',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 3',
'Topic :: Multimedia :: Graphics :: Presentation',
'Topic :: Software Development :: User Interfaces',
],
)
Fix the test command in the makefile
|
import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='mafiademonstration',
version='0.4.5',
author='Isaac Smith, Hei Jing Tsang',
author_email='sentherus@gmail.com',
description='A user friendly interface for playing a simplified game of Mafia.',
long_description=read('README.rst'),
license='MIT',
keywords=(
"Python, kivy, pytest, projects, project, "
"documentation, setup.py, package "
),
url='https://github.com/zenohm/mafiademonstration',
install_requires=[
'kivy>=1.9.1',
'click',
],
zip_safe=False,
packages=find_packages(),
include_package_data=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: X11 Applications',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 3',
'Topic :: Multimedia :: Graphics :: Presentation',
'Topic :: Software Development :: User Interfaces',
],
)
|
<commit_before>import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='mafiademonstration',
version='0.4.5',
author='Isaac Smith, Hei Jing Tsang',
author_email='sentherus@gmail.com',
description='A user friendly interface for playing a simplified game of Mafia.',
long_description=read('README.rst'),
license='MIT',
keywords=(
"Python, kivy, pytest, projects, project, "
"documentation, setup.py, package "
),
url='https://github.com/zenohm/mafiademonstration',
install_requires=[
'kivy>=1.9.1',
'click',
],
zip_safe=False,
packages=find_packages(),
include_package_data=True,
tests_require=['unittest'],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: X11 Applications',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 3',
'Topic :: Multimedia :: Graphics :: Presentation',
'Topic :: Software Development :: User Interfaces',
],
)
<commit_msg>Fix the test command in the makefile<commit_after>
|
import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='mafiademonstration',
version='0.4.5',
author='Isaac Smith, Hei Jing Tsang',
author_email='sentherus@gmail.com',
description='A user friendly interface for playing a simplified game of Mafia.',
long_description=read('README.rst'),
license='MIT',
keywords=(
"Python, kivy, pytest, projects, project, "
"documentation, setup.py, package "
),
url='https://github.com/zenohm/mafiademonstration',
install_requires=[
'kivy>=1.9.1',
'click',
],
zip_safe=False,
packages=find_packages(),
include_package_data=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: X11 Applications',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 3',
'Topic :: Multimedia :: Graphics :: Presentation',
'Topic :: Software Development :: User Interfaces',
],
)
|
import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='mafiademonstration',
version='0.4.5',
author='Isaac Smith, Hei Jing Tsang',
author_email='sentherus@gmail.com',
description='A user friendly interface for playing a simplified game of Mafia.',
long_description=read('README.rst'),
license='MIT',
keywords=(
"Python, kivy, pytest, projects, project, "
"documentation, setup.py, package "
),
url='https://github.com/zenohm/mafiademonstration',
install_requires=[
'kivy>=1.9.1',
'click',
],
zip_safe=False,
packages=find_packages(),
include_package_data=True,
tests_require=['unittest'],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: X11 Applications',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 3',
'Topic :: Multimedia :: Graphics :: Presentation',
'Topic :: Software Development :: User Interfaces',
],
)
Fix the test command in the makefileimport os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='mafiademonstration',
version='0.4.5',
author='Isaac Smith, Hei Jing Tsang',
author_email='sentherus@gmail.com',
description='A user friendly interface for playing a simplified game of Mafia.',
long_description=read('README.rst'),
license='MIT',
keywords=(
"Python, kivy, pytest, projects, project, "
"documentation, setup.py, package "
),
url='https://github.com/zenohm/mafiademonstration',
install_requires=[
'kivy>=1.9.1',
'click',
],
zip_safe=False,
packages=find_packages(),
include_package_data=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: X11 Applications',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 3',
'Topic :: Multimedia :: Graphics :: Presentation',
'Topic :: Software Development :: User Interfaces',
],
)
|
<commit_before>import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='mafiademonstration',
version='0.4.5',
author='Isaac Smith, Hei Jing Tsang',
author_email='sentherus@gmail.com',
description='A user friendly interface for playing a simplified game of Mafia.',
long_description=read('README.rst'),
license='MIT',
keywords=(
"Python, kivy, pytest, projects, project, "
"documentation, setup.py, package "
),
url='https://github.com/zenohm/mafiademonstration',
install_requires=[
'kivy>=1.9.1',
'click',
],
zip_safe=False,
packages=find_packages(),
include_package_data=True,
tests_require=['unittest'],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: X11 Applications',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 3',
'Topic :: Multimedia :: Graphics :: Presentation',
'Topic :: Software Development :: User Interfaces',
],
)
<commit_msg>Fix the test command in the makefile<commit_after>import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='mafiademonstration',
version='0.4.5',
author='Isaac Smith, Hei Jing Tsang',
author_email='sentherus@gmail.com',
description='A user friendly interface for playing a simplified game of Mafia.',
long_description=read('README.rst'),
license='MIT',
keywords=(
"Python, kivy, pytest, projects, project, "
"documentation, setup.py, package "
),
url='https://github.com/zenohm/mafiademonstration',
install_requires=[
'kivy>=1.9.1',
'click',
],
zip_safe=False,
packages=find_packages(),
include_package_data=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: X11 Applications',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 3',
'Topic :: Multimedia :: Graphics :: Presentation',
'Topic :: Software Development :: User Interfaces',
],
)
|
fa09d3b526bdf04dcabda603ef1e0adac8ae68bd
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name='python-binary-memcached',
version='0.24.6',
author='Jayson Reis',
author_email='santosdosreis@gmail.com',
description='A pure python module to access memcached via it\'s binary protocol with SASL auth support',
url='https://github.com/jaysonsantos/python-binary-memcached',
packages=['bmemcached'],
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
],
install_requires=[
'six'
]
)
|
from setuptools import setup
setup(
name='python-binary-memcached',
version='0.24.6',
author='Jayson Reis',
author_email='santosdosreis@gmail.com',
description='A pure python module to access memcached via its binary protocol with SASL auth support',
url='https://github.com/jaysonsantos/python-binary-memcached',
packages=['bmemcached'],
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
],
install_requires=[
'six'
]
)
|
Fix a typo in description: it's => its
|
Fix a typo in description: it's => its
|
Python
|
mit
|
jaysonsantos/python-binary-memcached,jaysonsantos/python-binary-memcached
|
from setuptools import setup
setup(
name='python-binary-memcached',
version='0.24.6',
author='Jayson Reis',
author_email='santosdosreis@gmail.com',
description='A pure python module to access memcached via it\'s binary protocol with SASL auth support',
url='https://github.com/jaysonsantos/python-binary-memcached',
packages=['bmemcached'],
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
],
install_requires=[
'six'
]
)
Fix a typo in description: it's => its
|
from setuptools import setup
setup(
name='python-binary-memcached',
version='0.24.6',
author='Jayson Reis',
author_email='santosdosreis@gmail.com',
description='A pure python module to access memcached via its binary protocol with SASL auth support',
url='https://github.com/jaysonsantos/python-binary-memcached',
packages=['bmemcached'],
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
],
install_requires=[
'six'
]
)
|
<commit_before>from setuptools import setup
setup(
name='python-binary-memcached',
version='0.24.6',
author='Jayson Reis',
author_email='santosdosreis@gmail.com',
description='A pure python module to access memcached via it\'s binary protocol with SASL auth support',
url='https://github.com/jaysonsantos/python-binary-memcached',
packages=['bmemcached'],
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
],
install_requires=[
'six'
]
)
<commit_msg>Fix a typo in description: it's => its<commit_after>
|
from setuptools import setup
setup(
name='python-binary-memcached',
version='0.24.6',
author='Jayson Reis',
author_email='santosdosreis@gmail.com',
description='A pure python module to access memcached via its binary protocol with SASL auth support',
url='https://github.com/jaysonsantos/python-binary-memcached',
packages=['bmemcached'],
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
],
install_requires=[
'six'
]
)
|
from setuptools import setup
setup(
name='python-binary-memcached',
version='0.24.6',
author='Jayson Reis',
author_email='santosdosreis@gmail.com',
description='A pure python module to access memcached via it\'s binary protocol with SASL auth support',
url='https://github.com/jaysonsantos/python-binary-memcached',
packages=['bmemcached'],
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
],
install_requires=[
'six'
]
)
Fix a typo in description: it's => itsfrom setuptools import setup
setup(
name='python-binary-memcached',
version='0.24.6',
author='Jayson Reis',
author_email='santosdosreis@gmail.com',
description='A pure python module to access memcached via its binary protocol with SASL auth support',
url='https://github.com/jaysonsantos/python-binary-memcached',
packages=['bmemcached'],
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
],
install_requires=[
'six'
]
)
|
<commit_before>from setuptools import setup
setup(
name='python-binary-memcached',
version='0.24.6',
author='Jayson Reis',
author_email='santosdosreis@gmail.com',
description='A pure python module to access memcached via it\'s binary protocol with SASL auth support',
url='https://github.com/jaysonsantos/python-binary-memcached',
packages=['bmemcached'],
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
],
install_requires=[
'six'
]
)
<commit_msg>Fix a typo in description: it's => its<commit_after>from setuptools import setup
setup(
name='python-binary-memcached',
version='0.24.6',
author='Jayson Reis',
author_email='santosdosreis@gmail.com',
description='A pure python module to access memcached via its binary protocol with SASL auth support',
url='https://github.com/jaysonsantos/python-binary-memcached',
packages=['bmemcached'],
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
],
install_requires=[
'six'
]
)
|
2b47f180ed79bbfe553e3c477ad12c5fa69e2823
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import setup, find_packages
tests_require = []
setup(
name='ashlar',
version='0.0.2',
description='Define and validate schemas for metadata for geotemporal event records',
author='Azavea, Inc.',
author_email='info@azavea.com',
keywords='gis jsonschema',
packages=find_packages(exclude=['tests']),
dependency_links=[
'https://github.com/azavea/djsonb/tarball/develop#egg=djsonb-0.1.3'
],
install_requires=[
'Django >=1.8',
'djangorestframework >=3.1.1',
'djangorestframework-gis >=0.8.1',
'django-filter >=0.9.2',
'djsonb >=0.1.3',
'jsonschema >=2.4.0',
'psycopg2 >=2.6',
'django-extensions >=1.5.2',
'python-dateutil >=2.4.2',
'PyYAML >=3.11'
],
extras_require={
'dev': [],
'test': tests_require
},
test_suite='tests',
tests_require=tests_require,
)
|
#!/usr/bin/env python
from setuptools import setup, find_packages
tests_require = []
setup(
name='ashlar',
version='0.0.2',
description='Define and validate schemas for metadata for geotemporal event records',
author='Azavea, Inc.',
author_email='info@azavea.com',
keywords='gis jsonschema',
packages=find_packages(exclude=['tests']),
dependency_links=[
'https://github.com/azavea/djsonb/tarball/develop#egg=djsonb-0.1.4'
],
install_requires=[
'Django >=1.8',
'djangorestframework >=3.1.1',
'djangorestframework-gis >=0.8.1',
'django-filter >=0.9.2',
'djsonb >=0.1.4',
'jsonschema >=2.4.0',
'psycopg2 >=2.6',
'django-extensions >=1.5.2',
'python-dateutil >=2.4.2',
'PyYAML >=3.11'
],
extras_require={
'dev': [],
'test': tests_require
},
test_suite='tests',
tests_require=tests_require,
)
|
Upgrade djsonb to fix syntax error
|
Upgrade djsonb to fix syntax error
|
Python
|
mit
|
flibbertigibbet/ashlar,flibbertigibbet/ashlar,azavea/ashlar,azavea/ashlar
|
#!/usr/bin/env python
from setuptools import setup, find_packages
tests_require = []
setup(
name='ashlar',
version='0.0.2',
description='Define and validate schemas for metadata for geotemporal event records',
author='Azavea, Inc.',
author_email='info@azavea.com',
keywords='gis jsonschema',
packages=find_packages(exclude=['tests']),
dependency_links=[
'https://github.com/azavea/djsonb/tarball/develop#egg=djsonb-0.1.3'
],
install_requires=[
'Django >=1.8',
'djangorestframework >=3.1.1',
'djangorestframework-gis >=0.8.1',
'django-filter >=0.9.2',
'djsonb >=0.1.3',
'jsonschema >=2.4.0',
'psycopg2 >=2.6',
'django-extensions >=1.5.2',
'python-dateutil >=2.4.2',
'PyYAML >=3.11'
],
extras_require={
'dev': [],
'test': tests_require
},
test_suite='tests',
tests_require=tests_require,
)
Upgrade djsonb to fix syntax error
|
#!/usr/bin/env python
from setuptools import setup, find_packages
tests_require = []
setup(
name='ashlar',
version='0.0.2',
description='Define and validate schemas for metadata for geotemporal event records',
author='Azavea, Inc.',
author_email='info@azavea.com',
keywords='gis jsonschema',
packages=find_packages(exclude=['tests']),
dependency_links=[
'https://github.com/azavea/djsonb/tarball/develop#egg=djsonb-0.1.4'
],
install_requires=[
'Django >=1.8',
'djangorestframework >=3.1.1',
'djangorestframework-gis >=0.8.1',
'django-filter >=0.9.2',
'djsonb >=0.1.4',
'jsonschema >=2.4.0',
'psycopg2 >=2.6',
'django-extensions >=1.5.2',
'python-dateutil >=2.4.2',
'PyYAML >=3.11'
],
extras_require={
'dev': [],
'test': tests_require
},
test_suite='tests',
tests_require=tests_require,
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup, find_packages
tests_require = []
setup(
name='ashlar',
version='0.0.2',
description='Define and validate schemas for metadata for geotemporal event records',
author='Azavea, Inc.',
author_email='info@azavea.com',
keywords='gis jsonschema',
packages=find_packages(exclude=['tests']),
dependency_links=[
'https://github.com/azavea/djsonb/tarball/develop#egg=djsonb-0.1.3'
],
install_requires=[
'Django >=1.8',
'djangorestframework >=3.1.1',
'djangorestframework-gis >=0.8.1',
'django-filter >=0.9.2',
'djsonb >=0.1.3',
'jsonschema >=2.4.0',
'psycopg2 >=2.6',
'django-extensions >=1.5.2',
'python-dateutil >=2.4.2',
'PyYAML >=3.11'
],
extras_require={
'dev': [],
'test': tests_require
},
test_suite='tests',
tests_require=tests_require,
)
<commit_msg>Upgrade djsonb to fix syntax error<commit_after>
|
#!/usr/bin/env python
from setuptools import setup, find_packages
tests_require = []
setup(
name='ashlar',
version='0.0.2',
description='Define and validate schemas for metadata for geotemporal event records',
author='Azavea, Inc.',
author_email='info@azavea.com',
keywords='gis jsonschema',
packages=find_packages(exclude=['tests']),
dependency_links=[
'https://github.com/azavea/djsonb/tarball/develop#egg=djsonb-0.1.4'
],
install_requires=[
'Django >=1.8',
'djangorestframework >=3.1.1',
'djangorestframework-gis >=0.8.1',
'django-filter >=0.9.2',
'djsonb >=0.1.4',
'jsonschema >=2.4.0',
'psycopg2 >=2.6',
'django-extensions >=1.5.2',
'python-dateutil >=2.4.2',
'PyYAML >=3.11'
],
extras_require={
'dev': [],
'test': tests_require
},
test_suite='tests',
tests_require=tests_require,
)
|
#!/usr/bin/env python
from setuptools import setup, find_packages
tests_require = []
setup(
name='ashlar',
version='0.0.2',
description='Define and validate schemas for metadata for geotemporal event records',
author='Azavea, Inc.',
author_email='info@azavea.com',
keywords='gis jsonschema',
packages=find_packages(exclude=['tests']),
dependency_links=[
'https://github.com/azavea/djsonb/tarball/develop#egg=djsonb-0.1.3'
],
install_requires=[
'Django >=1.8',
'djangorestframework >=3.1.1',
'djangorestframework-gis >=0.8.1',
'django-filter >=0.9.2',
'djsonb >=0.1.3',
'jsonschema >=2.4.0',
'psycopg2 >=2.6',
'django-extensions >=1.5.2',
'python-dateutil >=2.4.2',
'PyYAML >=3.11'
],
extras_require={
'dev': [],
'test': tests_require
},
test_suite='tests',
tests_require=tests_require,
)
Upgrade djsonb to fix syntax error#!/usr/bin/env python
from setuptools import setup, find_packages
tests_require = []
setup(
name='ashlar',
version='0.0.2',
description='Define and validate schemas for metadata for geotemporal event records',
author='Azavea, Inc.',
author_email='info@azavea.com',
keywords='gis jsonschema',
packages=find_packages(exclude=['tests']),
dependency_links=[
'https://github.com/azavea/djsonb/tarball/develop#egg=djsonb-0.1.4'
],
install_requires=[
'Django >=1.8',
'djangorestframework >=3.1.1',
'djangorestframework-gis >=0.8.1',
'django-filter >=0.9.2',
'djsonb >=0.1.4',
'jsonschema >=2.4.0',
'psycopg2 >=2.6',
'django-extensions >=1.5.2',
'python-dateutil >=2.4.2',
'PyYAML >=3.11'
],
extras_require={
'dev': [],
'test': tests_require
},
test_suite='tests',
tests_require=tests_require,
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup, find_packages
tests_require = []
setup(
name='ashlar',
version='0.0.2',
description='Define and validate schemas for metadata for geotemporal event records',
author='Azavea, Inc.',
author_email='info@azavea.com',
keywords='gis jsonschema',
packages=find_packages(exclude=['tests']),
dependency_links=[
'https://github.com/azavea/djsonb/tarball/develop#egg=djsonb-0.1.3'
],
install_requires=[
'Django >=1.8',
'djangorestframework >=3.1.1',
'djangorestframework-gis >=0.8.1',
'django-filter >=0.9.2',
'djsonb >=0.1.3',
'jsonschema >=2.4.0',
'psycopg2 >=2.6',
'django-extensions >=1.5.2',
'python-dateutil >=2.4.2',
'PyYAML >=3.11'
],
extras_require={
'dev': [],
'test': tests_require
},
test_suite='tests',
tests_require=tests_require,
)
<commit_msg>Upgrade djsonb to fix syntax error<commit_after>#!/usr/bin/env python
from setuptools import setup, find_packages
tests_require = []
setup(
name='ashlar',
version='0.0.2',
description='Define and validate schemas for metadata for geotemporal event records',
author='Azavea, Inc.',
author_email='info@azavea.com',
keywords='gis jsonschema',
packages=find_packages(exclude=['tests']),
dependency_links=[
'https://github.com/azavea/djsonb/tarball/develop#egg=djsonb-0.1.4'
],
install_requires=[
'Django >=1.8',
'djangorestframework >=3.1.1',
'djangorestframework-gis >=0.8.1',
'django-filter >=0.9.2',
'djsonb >=0.1.4',
'jsonschema >=2.4.0',
'psycopg2 >=2.6',
'django-extensions >=1.5.2',
'python-dateutil >=2.4.2',
'PyYAML >=3.11'
],
extras_require={
'dev': [],
'test': tests_require
},
test_suite='tests',
tests_require=tests_require,
)
|
bac7bc1bb9663adebe0c1768d67c4ed1d1f452fc
|
setup.py
|
setup.py
|
#! /usr/bin/env python
from setuptools import find_packages, setup
setup(
name='natsort',
version='6.0.0',
packages=find_packages(),
entry_points={'console_scripts': ['natsort = natsort.__main__:main']},
python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*",
extras_require={
'fast': ["fastnumbers >= 2.0.0"],
'icu': ["PyICU >= 1.0.0"]
}
)
|
#! /usr/bin/env python
from setuptools import find_packages, setup
setup(
name='natsort',
version='6.0.0',
packages=find_packages(),
entry_points={'console_scripts': ['natsort = natsort.__main__:main']},
python_requires=">=3.4",
extras_require={
'fast': ["fastnumbers >= 2.0.0"],
'icu': ["PyICU >= 1.0.0"]
}
)
|
Remove support for Python 2
|
Remove support for Python 2
This commit will prevent pip from installing natsort on any Python
version older than 3.4.
|
Python
|
mit
|
SethMMorton/natsort
|
#! /usr/bin/env python
from setuptools import find_packages, setup
setup(
name='natsort',
version='6.0.0',
packages=find_packages(),
entry_points={'console_scripts': ['natsort = natsort.__main__:main']},
python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*",
extras_require={
'fast': ["fastnumbers >= 2.0.0"],
'icu': ["PyICU >= 1.0.0"]
}
)
Remove support for Python 2
This commit will prevent pip from installing natsort on any Python
version older than 3.4.
|
#! /usr/bin/env python
from setuptools import find_packages, setup
setup(
name='natsort',
version='6.0.0',
packages=find_packages(),
entry_points={'console_scripts': ['natsort = natsort.__main__:main']},
python_requires=">=3.4",
extras_require={
'fast': ["fastnumbers >= 2.0.0"],
'icu': ["PyICU >= 1.0.0"]
}
)
|
<commit_before>#! /usr/bin/env python
from setuptools import find_packages, setup
setup(
name='natsort',
version='6.0.0',
packages=find_packages(),
entry_points={'console_scripts': ['natsort = natsort.__main__:main']},
python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*",
extras_require={
'fast': ["fastnumbers >= 2.0.0"],
'icu': ["PyICU >= 1.0.0"]
}
)
<commit_msg>Remove support for Python 2
This commit will prevent pip from installing natsort on any Python
version older than 3.4.<commit_after>
|
#! /usr/bin/env python
from setuptools import find_packages, setup
setup(
name='natsort',
version='6.0.0',
packages=find_packages(),
entry_points={'console_scripts': ['natsort = natsort.__main__:main']},
python_requires=">=3.4",
extras_require={
'fast': ["fastnumbers >= 2.0.0"],
'icu': ["PyICU >= 1.0.0"]
}
)
|
#! /usr/bin/env python
from setuptools import find_packages, setup
setup(
name='natsort',
version='6.0.0',
packages=find_packages(),
entry_points={'console_scripts': ['natsort = natsort.__main__:main']},
python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*",
extras_require={
'fast': ["fastnumbers >= 2.0.0"],
'icu': ["PyICU >= 1.0.0"]
}
)
Remove support for Python 2
This commit will prevent pip from installing natsort on any Python
version older than 3.4.#! /usr/bin/env python
from setuptools import find_packages, setup
setup(
name='natsort',
version='6.0.0',
packages=find_packages(),
entry_points={'console_scripts': ['natsort = natsort.__main__:main']},
python_requires=">=3.4",
extras_require={
'fast': ["fastnumbers >= 2.0.0"],
'icu': ["PyICU >= 1.0.0"]
}
)
|
<commit_before>#! /usr/bin/env python
from setuptools import find_packages, setup
setup(
name='natsort',
version='6.0.0',
packages=find_packages(),
entry_points={'console_scripts': ['natsort = natsort.__main__:main']},
python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*",
extras_require={
'fast': ["fastnumbers >= 2.0.0"],
'icu': ["PyICU >= 1.0.0"]
}
)
<commit_msg>Remove support for Python 2
This commit will prevent pip from installing natsort on any Python
version older than 3.4.<commit_after>#! /usr/bin/env python
from setuptools import find_packages, setup
setup(
name='natsort',
version='6.0.0',
packages=find_packages(),
entry_points={'console_scripts': ['natsort = natsort.__main__:main']},
python_requires=">=3.4",
extras_require={
'fast': ["fastnumbers >= 2.0.0"],
'icu': ["PyICU >= 1.0.0"]
}
)
|
9564692c1044779467e926f830b8f28e1661cb73
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import setup
import argparse_addons
setup(name='argparse_addons',
version=argparse_addons.__version__,
description=('Additional argparse types and actions.'),
long_description=open('README.rst', 'r').read(),
author='Erik Moqvist',
author_email='erik.moqvist@gmail.com',
license='MIT',
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
],
keywords=['argparse'],
url='https://github.com/eerimoq/argparse_addons',
py_modules=['argparse_addons'],
python_requires='>=3.6',
install_requires=[
],
test_suite="tests")
|
#!/usr/bin/env python
from setuptools import setup
import argparse_addons
setup(name='argparse_addons',
version=argparse_addons.__version__,
description=('Additional argparse types and actions.'),
long_description=open('README.rst', 'r').read(),
author='Erik Moqvist',
author_email='erik.moqvist@gmail.com',
license='MIT',
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3 :: Only',
],
keywords=['argparse'],
url='https://github.com/eerimoq/argparse_addons',
py_modules=['argparse_addons'],
python_requires='>=3.6',
install_requires=[
],
test_suite="tests")
|
Remove per patch version classifiers
|
Remove per patch version classifiers
|
Python
|
mit
|
eerimoq/argparse_addons
|
#!/usr/bin/env python
from setuptools import setup
import argparse_addons
setup(name='argparse_addons',
version=argparse_addons.__version__,
description=('Additional argparse types and actions.'),
long_description=open('README.rst', 'r').read(),
author='Erik Moqvist',
author_email='erik.moqvist@gmail.com',
license='MIT',
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
],
keywords=['argparse'],
url='https://github.com/eerimoq/argparse_addons',
py_modules=['argparse_addons'],
python_requires='>=3.6',
install_requires=[
],
test_suite="tests")
Remove per patch version classifiers
|
#!/usr/bin/env python
from setuptools import setup
import argparse_addons
setup(name='argparse_addons',
version=argparse_addons.__version__,
description=('Additional argparse types and actions.'),
long_description=open('README.rst', 'r').read(),
author='Erik Moqvist',
author_email='erik.moqvist@gmail.com',
license='MIT',
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3 :: Only',
],
keywords=['argparse'],
url='https://github.com/eerimoq/argparse_addons',
py_modules=['argparse_addons'],
python_requires='>=3.6',
install_requires=[
],
test_suite="tests")
|
<commit_before>#!/usr/bin/env python
from setuptools import setup
import argparse_addons
setup(name='argparse_addons',
version=argparse_addons.__version__,
description=('Additional argparse types and actions.'),
long_description=open('README.rst', 'r').read(),
author='Erik Moqvist',
author_email='erik.moqvist@gmail.com',
license='MIT',
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
],
keywords=['argparse'],
url='https://github.com/eerimoq/argparse_addons',
py_modules=['argparse_addons'],
python_requires='>=3.6',
install_requires=[
],
test_suite="tests")
<commit_msg>Remove per patch version classifiers<commit_after>
|
#!/usr/bin/env python
from setuptools import setup
import argparse_addons
setup(name='argparse_addons',
version=argparse_addons.__version__,
description=('Additional argparse types and actions.'),
long_description=open('README.rst', 'r').read(),
author='Erik Moqvist',
author_email='erik.moqvist@gmail.com',
license='MIT',
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3 :: Only',
],
keywords=['argparse'],
url='https://github.com/eerimoq/argparse_addons',
py_modules=['argparse_addons'],
python_requires='>=3.6',
install_requires=[
],
test_suite="tests")
|
#!/usr/bin/env python
from setuptools import setup
import argparse_addons
setup(name='argparse_addons',
version=argparse_addons.__version__,
description=('Additional argparse types and actions.'),
long_description=open('README.rst', 'r').read(),
author='Erik Moqvist',
author_email='erik.moqvist@gmail.com',
license='MIT',
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
],
keywords=['argparse'],
url='https://github.com/eerimoq/argparse_addons',
py_modules=['argparse_addons'],
python_requires='>=3.6',
install_requires=[
],
test_suite="tests")
Remove per patch version classifiers#!/usr/bin/env python
from setuptools import setup
import argparse_addons
setup(name='argparse_addons',
version=argparse_addons.__version__,
description=('Additional argparse types and actions.'),
long_description=open('README.rst', 'r').read(),
author='Erik Moqvist',
author_email='erik.moqvist@gmail.com',
license='MIT',
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3 :: Only',
],
keywords=['argparse'],
url='https://github.com/eerimoq/argparse_addons',
py_modules=['argparse_addons'],
python_requires='>=3.6',
install_requires=[
],
test_suite="tests")
|
<commit_before>#!/usr/bin/env python
from setuptools import setup
import argparse_addons
setup(name='argparse_addons',
version=argparse_addons.__version__,
description=('Additional argparse types and actions.'),
long_description=open('README.rst', 'r').read(),
author='Erik Moqvist',
author_email='erik.moqvist@gmail.com',
license='MIT',
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
],
keywords=['argparse'],
url='https://github.com/eerimoq/argparse_addons',
py_modules=['argparse_addons'],
python_requires='>=3.6',
install_requires=[
],
test_suite="tests")
<commit_msg>Remove per patch version classifiers<commit_after>#!/usr/bin/env python
from setuptools import setup
import argparse_addons
setup(name='argparse_addons',
version=argparse_addons.__version__,
description=('Additional argparse types and actions.'),
long_description=open('README.rst', 'r').read(),
author='Erik Moqvist',
author_email='erik.moqvist@gmail.com',
license='MIT',
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3 :: Only',
],
keywords=['argparse'],
url='https://github.com/eerimoq/argparse_addons',
py_modules=['argparse_addons'],
python_requires='>=3.6',
install_requires=[
],
test_suite="tests")
|
0881a34fe78d8967bfbe85f7c16839ce2a802aa2
|
setup.py
|
setup.py
|
#!/usr/bin/env python3
from setuptools import find_packages, setup
setup(
name='magical',
version='0.0.1',
packages=find_packages(),
install_requires=[
'pymunk~=5.6.0',
'pyglet==1.5.*',
'gym==0.17.*',
'Click>=7.0',
'numpy>=1.17.4',
'cloudpickle>=1.2.2',
'statsmodels>=0.10.2',
])
|
#!/usr/bin/env python3
from setuptools import find_packages, setup
setup(
name='magical-il',
version='0.0.1alpha0',
packages=find_packages(),
install_requires=[
'pymunk~=5.6.0',
'pyglet==1.5.*',
'gym==0.17.*',
'Click>=7.0',
'numpy>=1.17.4',
'cloudpickle>=1.2.2',
'statsmodels>=0.10.2',
])
|
Change PyPI wheel name to 'magical-il'
|
Change PyPI wheel name to 'magical-il'
|
Python
|
isc
|
qxcv/magical,qxcv/magical
|
#!/usr/bin/env python3
from setuptools import find_packages, setup
setup(
name='magical',
version='0.0.1',
packages=find_packages(),
install_requires=[
'pymunk~=5.6.0',
'pyglet==1.5.*',
'gym==0.17.*',
'Click>=7.0',
'numpy>=1.17.4',
'cloudpickle>=1.2.2',
'statsmodels>=0.10.2',
])
Change PyPI wheel name to 'magical-il'
|
#!/usr/bin/env python3
from setuptools import find_packages, setup
setup(
name='magical-il',
version='0.0.1alpha0',
packages=find_packages(),
install_requires=[
'pymunk~=5.6.0',
'pyglet==1.5.*',
'gym==0.17.*',
'Click>=7.0',
'numpy>=1.17.4',
'cloudpickle>=1.2.2',
'statsmodels>=0.10.2',
])
|
<commit_before>#!/usr/bin/env python3
from setuptools import find_packages, setup
setup(
name='magical',
version='0.0.1',
packages=find_packages(),
install_requires=[
'pymunk~=5.6.0',
'pyglet==1.5.*',
'gym==0.17.*',
'Click>=7.0',
'numpy>=1.17.4',
'cloudpickle>=1.2.2',
'statsmodels>=0.10.2',
])
<commit_msg>Change PyPI wheel name to 'magical-il'<commit_after>
|
#!/usr/bin/env python3
from setuptools import find_packages, setup
setup(
name='magical-il',
version='0.0.1alpha0',
packages=find_packages(),
install_requires=[
'pymunk~=5.6.0',
'pyglet==1.5.*',
'gym==0.17.*',
'Click>=7.0',
'numpy>=1.17.4',
'cloudpickle>=1.2.2',
'statsmodels>=0.10.2',
])
|
#!/usr/bin/env python3
from setuptools import find_packages, setup
setup(
name='magical',
version='0.0.1',
packages=find_packages(),
install_requires=[
'pymunk~=5.6.0',
'pyglet==1.5.*',
'gym==0.17.*',
'Click>=7.0',
'numpy>=1.17.4',
'cloudpickle>=1.2.2',
'statsmodels>=0.10.2',
])
Change PyPI wheel name to 'magical-il'#!/usr/bin/env python3
from setuptools import find_packages, setup
setup(
name='magical-il',
version='0.0.1alpha0',
packages=find_packages(),
install_requires=[
'pymunk~=5.6.0',
'pyglet==1.5.*',
'gym==0.17.*',
'Click>=7.0',
'numpy>=1.17.4',
'cloudpickle>=1.2.2',
'statsmodels>=0.10.2',
])
|
<commit_before>#!/usr/bin/env python3
from setuptools import find_packages, setup
setup(
name='magical',
version='0.0.1',
packages=find_packages(),
install_requires=[
'pymunk~=5.6.0',
'pyglet==1.5.*',
'gym==0.17.*',
'Click>=7.0',
'numpy>=1.17.4',
'cloudpickle>=1.2.2',
'statsmodels>=0.10.2',
])
<commit_msg>Change PyPI wheel name to 'magical-il'<commit_after>#!/usr/bin/env python3
from setuptools import find_packages, setup
setup(
name='magical-il',
version='0.0.1alpha0',
packages=find_packages(),
install_requires=[
'pymunk~=5.6.0',
'pyglet==1.5.*',
'gym==0.17.*',
'Click>=7.0',
'numpy>=1.17.4',
'cloudpickle>=1.2.2',
'statsmodels>=0.10.2',
])
|
e89faebd357cc9c929950ef38cafc97524dee205
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
import os
version = '0.1'
long_description = (
open('README.txt').read()
+ '\n' +
'Contributors\n'
'============\n'
+ '\n' +
open('CONTRIBUTORS.txt').read()
+ '\n' +
open('CHANGES.txt').read()
+ '\n')
requires = ['pyramid', 'PasteScript', 'requests', 'pymongo', 'numpy', 'scipy==0.10.0']
setup(name='mist.monitor',
version=version,
description="Monitoring node for the https://mist.io service",
long_description=long_description,
# Get more strings from
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
"Programming Language :: Python",
],
keywords='',
author='',
author_email='',
url='https://mist.io/',
license='copyright',
packages=find_packages('src'),
package_dir = {'': 'src'},
namespace_packages=['mist'],
include_package_data=True,
zip_safe=False,
install_requires= requires,
entry_points="""
# -*- Entry points: -*-
[paste.app_factory]
main = mist.monitor:main
""",
)
|
from setuptools import setup, find_packages
import os
version = '0.1'
long_description = (
open('README.txt').read()
+ '\n' +
'Contributors\n'
'============\n'
+ '\n' +
open('CONTRIBUTORS.txt').read()
+ '\n' +
open('CHANGES.txt').read()
+ '\n')
requires = ['pyramid', 'PasteScript', 'requests', 'pymongo',]# 'numpy', 'scipy==0.10.0']
setup(name='mist.monitor',
version=version,
description="Monitoring node for the https://mist.io service",
long_description=long_description,
# Get more strings from
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
"Programming Language :: Python",
],
keywords='',
author='',
author_email='',
url='https://mist.io/',
license='copyright',
packages=find_packages('src'),
package_dir = {'': 'src'},
namespace_packages=['mist'],
include_package_data=True,
zip_safe=False,
install_requires= requires,
entry_points="""
# -*- Entry points: -*-
[paste.app_factory]
main = mist.monitor:main
""",
)
|
Comment out numpy, scipy which cause problems in buildout
|
Comment out numpy, scipy which cause problems in buildout
|
Python
|
apache-2.0
|
mistio/mist.monitor,mistio/mist.monitor
|
from setuptools import setup, find_packages
import os
version = '0.1'
long_description = (
open('README.txt').read()
+ '\n' +
'Contributors\n'
'============\n'
+ '\n' +
open('CONTRIBUTORS.txt').read()
+ '\n' +
open('CHANGES.txt').read()
+ '\n')
requires = ['pyramid', 'PasteScript', 'requests', 'pymongo', 'numpy', 'scipy==0.10.0']
setup(name='mist.monitor',
version=version,
description="Monitoring node for the https://mist.io service",
long_description=long_description,
# Get more strings from
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
"Programming Language :: Python",
],
keywords='',
author='',
author_email='',
url='https://mist.io/',
license='copyright',
packages=find_packages('src'),
package_dir = {'': 'src'},
namespace_packages=['mist'],
include_package_data=True,
zip_safe=False,
install_requires= requires,
entry_points="""
# -*- Entry points: -*-
[paste.app_factory]
main = mist.monitor:main
""",
)
Comment out numpy, scipy which cause problems in buildout
|
from setuptools import setup, find_packages
import os
version = '0.1'
long_description = (
open('README.txt').read()
+ '\n' +
'Contributors\n'
'============\n'
+ '\n' +
open('CONTRIBUTORS.txt').read()
+ '\n' +
open('CHANGES.txt').read()
+ '\n')
requires = ['pyramid', 'PasteScript', 'requests', 'pymongo',]# 'numpy', 'scipy==0.10.0']
setup(name='mist.monitor',
version=version,
description="Monitoring node for the https://mist.io service",
long_description=long_description,
# Get more strings from
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
"Programming Language :: Python",
],
keywords='',
author='',
author_email='',
url='https://mist.io/',
license='copyright',
packages=find_packages('src'),
package_dir = {'': 'src'},
namespace_packages=['mist'],
include_package_data=True,
zip_safe=False,
install_requires= requires,
entry_points="""
# -*- Entry points: -*-
[paste.app_factory]
main = mist.monitor:main
""",
)
|
<commit_before>from setuptools import setup, find_packages
import os
version = '0.1'
long_description = (
open('README.txt').read()
+ '\n' +
'Contributors\n'
'============\n'
+ '\n' +
open('CONTRIBUTORS.txt').read()
+ '\n' +
open('CHANGES.txt').read()
+ '\n')
requires = ['pyramid', 'PasteScript', 'requests', 'pymongo', 'numpy', 'scipy==0.10.0']
setup(name='mist.monitor',
version=version,
description="Monitoring node for the https://mist.io service",
long_description=long_description,
# Get more strings from
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
"Programming Language :: Python",
],
keywords='',
author='',
author_email='',
url='https://mist.io/',
license='copyright',
packages=find_packages('src'),
package_dir = {'': 'src'},
namespace_packages=['mist'],
include_package_data=True,
zip_safe=False,
install_requires= requires,
entry_points="""
# -*- Entry points: -*-
[paste.app_factory]
main = mist.monitor:main
""",
)
<commit_msg>Comment out numpy, scipy which cause problems in buildout<commit_after>
|
from setuptools import setup, find_packages
import os
version = '0.1'
long_description = (
open('README.txt').read()
+ '\n' +
'Contributors\n'
'============\n'
+ '\n' +
open('CONTRIBUTORS.txt').read()
+ '\n' +
open('CHANGES.txt').read()
+ '\n')
requires = ['pyramid', 'PasteScript', 'requests', 'pymongo',]# 'numpy', 'scipy==0.10.0']
setup(name='mist.monitor',
version=version,
description="Monitoring node for the https://mist.io service",
long_description=long_description,
# Get more strings from
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
"Programming Language :: Python",
],
keywords='',
author='',
author_email='',
url='https://mist.io/',
license='copyright',
packages=find_packages('src'),
package_dir = {'': 'src'},
namespace_packages=['mist'],
include_package_data=True,
zip_safe=False,
install_requires= requires,
entry_points="""
# -*- Entry points: -*-
[paste.app_factory]
main = mist.monitor:main
""",
)
|
from setuptools import setup, find_packages
import os
version = '0.1'
long_description = (
open('README.txt').read()
+ '\n' +
'Contributors\n'
'============\n'
+ '\n' +
open('CONTRIBUTORS.txt').read()
+ '\n' +
open('CHANGES.txt').read()
+ '\n')
requires = ['pyramid', 'PasteScript', 'requests', 'pymongo', 'numpy', 'scipy==0.10.0']
setup(name='mist.monitor',
version=version,
description="Monitoring node for the https://mist.io service",
long_description=long_description,
# Get more strings from
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
"Programming Language :: Python",
],
keywords='',
author='',
author_email='',
url='https://mist.io/',
license='copyright',
packages=find_packages('src'),
package_dir = {'': 'src'},
namespace_packages=['mist'],
include_package_data=True,
zip_safe=False,
install_requires= requires,
entry_points="""
# -*- Entry points: -*-
[paste.app_factory]
main = mist.monitor:main
""",
)
Comment out numpy, scipy which cause problems in buildoutfrom setuptools import setup, find_packages
import os
version = '0.1'
long_description = (
open('README.txt').read()
+ '\n' +
'Contributors\n'
'============\n'
+ '\n' +
open('CONTRIBUTORS.txt').read()
+ '\n' +
open('CHANGES.txt').read()
+ '\n')
requires = ['pyramid', 'PasteScript', 'requests', 'pymongo',]# 'numpy', 'scipy==0.10.0']
setup(name='mist.monitor',
version=version,
description="Monitoring node for the https://mist.io service",
long_description=long_description,
# Get more strings from
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
"Programming Language :: Python",
],
keywords='',
author='',
author_email='',
url='https://mist.io/',
license='copyright',
packages=find_packages('src'),
package_dir = {'': 'src'},
namespace_packages=['mist'],
include_package_data=True,
zip_safe=False,
install_requires= requires,
entry_points="""
# -*- Entry points: -*-
[paste.app_factory]
main = mist.monitor:main
""",
)
|
<commit_before>from setuptools import setup, find_packages
import os
version = '0.1'
long_description = (
open('README.txt').read()
+ '\n' +
'Contributors\n'
'============\n'
+ '\n' +
open('CONTRIBUTORS.txt').read()
+ '\n' +
open('CHANGES.txt').read()
+ '\n')
requires = ['pyramid', 'PasteScript', 'requests', 'pymongo', 'numpy', 'scipy==0.10.0']
setup(name='mist.monitor',
version=version,
description="Monitoring node for the https://mist.io service",
long_description=long_description,
# Get more strings from
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
"Programming Language :: Python",
],
keywords='',
author='',
author_email='',
url='https://mist.io/',
license='copyright',
packages=find_packages('src'),
package_dir = {'': 'src'},
namespace_packages=['mist'],
include_package_data=True,
zip_safe=False,
install_requires= requires,
entry_points="""
# -*- Entry points: -*-
[paste.app_factory]
main = mist.monitor:main
""",
)
<commit_msg>Comment out numpy, scipy which cause problems in buildout<commit_after>from setuptools import setup, find_packages
import os
version = '0.1'
long_description = (
open('README.txt').read()
+ '\n' +
'Contributors\n'
'============\n'
+ '\n' +
open('CONTRIBUTORS.txt').read()
+ '\n' +
open('CHANGES.txt').read()
+ '\n')
requires = ['pyramid', 'PasteScript', 'requests', 'pymongo',]# 'numpy', 'scipy==0.10.0']
setup(name='mist.monitor',
version=version,
description="Monitoring node for the https://mist.io service",
long_description=long_description,
# Get more strings from
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
"Programming Language :: Python",
],
keywords='',
author='',
author_email='',
url='https://mist.io/',
license='copyright',
packages=find_packages('src'),
package_dir = {'': 'src'},
namespace_packages=['mist'],
include_package_data=True,
zip_safe=False,
install_requires= requires,
entry_points="""
# -*- Entry points: -*-
[paste.app_factory]
main = mist.monitor:main
""",
)
|
3d1e073ed73644b5ff0db94b4129cbd6cdd26d89
|
setup.py
|
setup.py
|
import sys
from setuptools import setup, find_packages
import populous
requirements = [
"click",
"cached-property",
"fake-factory",
]
if sys.version_info < (3, 2):
requirements.append('functools32')
setup(
name="populous",
version=populous.__version__,
url=populous.__url__,
description=populous.__doc__,
author=populous.__author__,
license=populous.__license__,
long_description="TODO",
packages=find_packages(),
install_requires=requirements,
entry_points={
'console_scripts': [
'populous = populous.__main__:cli'
]
},
classifiers=[
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Topic :: Utilities",
],
keywords='populous populate database',
)
|
import sys
from setuptools import setup, find_packages
import populous
requirements = [
"click",
"cached-property",
"fake-factory",
"dateutils"
]
if sys.version_info < (3, 2):
requirements.append('functools32')
setup(
name="populous",
version=populous.__version__,
url=populous.__url__,
description=populous.__doc__,
author=populous.__author__,
license=populous.__license__,
long_description="TODO",
packages=find_packages(),
install_requires=requirements,
entry_points={
'console_scripts': [
'populous = populous.__main__:cli'
]
},
classifiers=[
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Topic :: Utilities",
],
keywords='populous populate database',
)
|
Add dateutils to the requirements
|
Add dateutils to the requirements
|
Python
|
mit
|
novafloss/populous
|
import sys
from setuptools import setup, find_packages
import populous
requirements = [
"click",
"cached-property",
"fake-factory",
]
if sys.version_info < (3, 2):
requirements.append('functools32')
setup(
name="populous",
version=populous.__version__,
url=populous.__url__,
description=populous.__doc__,
author=populous.__author__,
license=populous.__license__,
long_description="TODO",
packages=find_packages(),
install_requires=requirements,
entry_points={
'console_scripts': [
'populous = populous.__main__:cli'
]
},
classifiers=[
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Topic :: Utilities",
],
keywords='populous populate database',
)
Add dateutils to the requirements
|
import sys
from setuptools import setup, find_packages
import populous
requirements = [
"click",
"cached-property",
"fake-factory",
"dateutils"
]
if sys.version_info < (3, 2):
requirements.append('functools32')
setup(
name="populous",
version=populous.__version__,
url=populous.__url__,
description=populous.__doc__,
author=populous.__author__,
license=populous.__license__,
long_description="TODO",
packages=find_packages(),
install_requires=requirements,
entry_points={
'console_scripts': [
'populous = populous.__main__:cli'
]
},
classifiers=[
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Topic :: Utilities",
],
keywords='populous populate database',
)
|
<commit_before>import sys
from setuptools import setup, find_packages
import populous
requirements = [
"click",
"cached-property",
"fake-factory",
]
if sys.version_info < (3, 2):
requirements.append('functools32')
setup(
name="populous",
version=populous.__version__,
url=populous.__url__,
description=populous.__doc__,
author=populous.__author__,
license=populous.__license__,
long_description="TODO",
packages=find_packages(),
install_requires=requirements,
entry_points={
'console_scripts': [
'populous = populous.__main__:cli'
]
},
classifiers=[
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Topic :: Utilities",
],
keywords='populous populate database',
)
<commit_msg>Add dateutils to the requirements<commit_after>
|
import sys
from setuptools import setup, find_packages
import populous
requirements = [
"click",
"cached-property",
"fake-factory",
"dateutils"
]
if sys.version_info < (3, 2):
requirements.append('functools32')
setup(
name="populous",
version=populous.__version__,
url=populous.__url__,
description=populous.__doc__,
author=populous.__author__,
license=populous.__license__,
long_description="TODO",
packages=find_packages(),
install_requires=requirements,
entry_points={
'console_scripts': [
'populous = populous.__main__:cli'
]
},
classifiers=[
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Topic :: Utilities",
],
keywords='populous populate database',
)
|
import sys
from setuptools import setup, find_packages
import populous
requirements = [
"click",
"cached-property",
"fake-factory",
]
if sys.version_info < (3, 2):
requirements.append('functools32')
setup(
name="populous",
version=populous.__version__,
url=populous.__url__,
description=populous.__doc__,
author=populous.__author__,
license=populous.__license__,
long_description="TODO",
packages=find_packages(),
install_requires=requirements,
entry_points={
'console_scripts': [
'populous = populous.__main__:cli'
]
},
classifiers=[
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Topic :: Utilities",
],
keywords='populous populate database',
)
Add dateutils to the requirementsimport sys
from setuptools import setup, find_packages
import populous
requirements = [
"click",
"cached-property",
"fake-factory",
"dateutils"
]
if sys.version_info < (3, 2):
requirements.append('functools32')
setup(
name="populous",
version=populous.__version__,
url=populous.__url__,
description=populous.__doc__,
author=populous.__author__,
license=populous.__license__,
long_description="TODO",
packages=find_packages(),
install_requires=requirements,
entry_points={
'console_scripts': [
'populous = populous.__main__:cli'
]
},
classifiers=[
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Topic :: Utilities",
],
keywords='populous populate database',
)
|
<commit_before>import sys
from setuptools import setup, find_packages
import populous
requirements = [
"click",
"cached-property",
"fake-factory",
]
if sys.version_info < (3, 2):
requirements.append('functools32')
setup(
name="populous",
version=populous.__version__,
url=populous.__url__,
description=populous.__doc__,
author=populous.__author__,
license=populous.__license__,
long_description="TODO",
packages=find_packages(),
install_requires=requirements,
entry_points={
'console_scripts': [
'populous = populous.__main__:cli'
]
},
classifiers=[
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Topic :: Utilities",
],
keywords='populous populate database',
)
<commit_msg>Add dateutils to the requirements<commit_after>import sys
from setuptools import setup, find_packages
import populous
requirements = [
"click",
"cached-property",
"fake-factory",
"dateutils"
]
if sys.version_info < (3, 2):
requirements.append('functools32')
setup(
name="populous",
version=populous.__version__,
url=populous.__url__,
description=populous.__doc__,
author=populous.__author__,
license=populous.__license__,
long_description="TODO",
packages=find_packages(),
install_requires=requirements,
entry_points={
'console_scripts': [
'populous = populous.__main__:cli'
]
},
classifiers=[
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Topic :: Utilities",
],
keywords='populous populate database',
)
|
dc5748eb6dad4bf2cdf5c88ab15c489a88c6bf21
|
setup.py
|
setup.py
|
import subprocess
import sys
from setuptools import Command, setup
class RunTests(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
errno = subprocess.call([sys.executable, '-m', 'unittest', 'parserutils.tests.tests'])
raise SystemExit(errno)
with open('README.md') as readme:
long_description = readme.read()
setup(
name='parserutils',
description='A collection of performant parsing utilities',
long_description=long_description,
long_description_content_type='text/markdown',
keywords='parser,parsing,utils,utilities,collections,dates,elements,numbers,strings,url,xml',
version='1.2',
packages=[
'parserutils', 'parserutils.tests'
],
install_requires=[
'defusedxml>=0.4.1', 'python-dateutil>=2.4.2', 'six>=1.9.0'
],
tests_require=['mock'],
url='https://github.com/consbio/parserutils',
license='BSD',
cmdclass={'test': RunTests}
)
|
import subprocess
import sys
from setuptools import Command, setup
class RunTests(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
errno = subprocess.call([sys.executable, '-m', 'unittest', 'parserutils.tests.tests'])
raise SystemExit(errno)
with open('README.md') as readme:
long_description = readme.read()
setup(
name='parserutils',
description='A collection of performant parsing utilities',
long_description=long_description,
long_description_content_type='text/markdown',
keywords='parser,parsing,utils,utilities,collections,dates,elements,numbers,strings,url,xml',
version='1.2.1',
packages=[
'parserutils', 'parserutils.tests'
],
install_requires=[
'defusedxml>=0.4.1', 'python-dateutil>=2.4.2', 'six>=1.9.0'
],
tests_require=['mock'],
url='https://github.com/consbio/parserutils',
license='BSD',
cmdclass={'test': RunTests}
)
|
Increment minor version once more
|
Increment minor version once more
|
Python
|
bsd-3-clause
|
consbio/parserutils
|
import subprocess
import sys
from setuptools import Command, setup
class RunTests(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
errno = subprocess.call([sys.executable, '-m', 'unittest', 'parserutils.tests.tests'])
raise SystemExit(errno)
with open('README.md') as readme:
long_description = readme.read()
setup(
name='parserutils',
description='A collection of performant parsing utilities',
long_description=long_description,
long_description_content_type='text/markdown',
keywords='parser,parsing,utils,utilities,collections,dates,elements,numbers,strings,url,xml',
version='1.2',
packages=[
'parserutils', 'parserutils.tests'
],
install_requires=[
'defusedxml>=0.4.1', 'python-dateutil>=2.4.2', 'six>=1.9.0'
],
tests_require=['mock'],
url='https://github.com/consbio/parserutils',
license='BSD',
cmdclass={'test': RunTests}
)
Increment minor version once more
|
import subprocess
import sys
from setuptools import Command, setup
class RunTests(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
errno = subprocess.call([sys.executable, '-m', 'unittest', 'parserutils.tests.tests'])
raise SystemExit(errno)
with open('README.md') as readme:
long_description = readme.read()
setup(
name='parserutils',
description='A collection of performant parsing utilities',
long_description=long_description,
long_description_content_type='text/markdown',
keywords='parser,parsing,utils,utilities,collections,dates,elements,numbers,strings,url,xml',
version='1.2.1',
packages=[
'parserutils', 'parserutils.tests'
],
install_requires=[
'defusedxml>=0.4.1', 'python-dateutil>=2.4.2', 'six>=1.9.0'
],
tests_require=['mock'],
url='https://github.com/consbio/parserutils',
license='BSD',
cmdclass={'test': RunTests}
)
|
<commit_before>import subprocess
import sys
from setuptools import Command, setup
class RunTests(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
errno = subprocess.call([sys.executable, '-m', 'unittest', 'parserutils.tests.tests'])
raise SystemExit(errno)
with open('README.md') as readme:
long_description = readme.read()
setup(
name='parserutils',
description='A collection of performant parsing utilities',
long_description=long_description,
long_description_content_type='text/markdown',
keywords='parser,parsing,utils,utilities,collections,dates,elements,numbers,strings,url,xml',
version='1.2',
packages=[
'parserutils', 'parserutils.tests'
],
install_requires=[
'defusedxml>=0.4.1', 'python-dateutil>=2.4.2', 'six>=1.9.0'
],
tests_require=['mock'],
url='https://github.com/consbio/parserutils',
license='BSD',
cmdclass={'test': RunTests}
)
<commit_msg>Increment minor version once more<commit_after>
|
import subprocess
import sys
from setuptools import Command, setup
class RunTests(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
errno = subprocess.call([sys.executable, '-m', 'unittest', 'parserutils.tests.tests'])
raise SystemExit(errno)
with open('README.md') as readme:
long_description = readme.read()
setup(
name='parserutils',
description='A collection of performant parsing utilities',
long_description=long_description,
long_description_content_type='text/markdown',
keywords='parser,parsing,utils,utilities,collections,dates,elements,numbers,strings,url,xml',
version='1.2.1',
packages=[
'parserutils', 'parserutils.tests'
],
install_requires=[
'defusedxml>=0.4.1', 'python-dateutil>=2.4.2', 'six>=1.9.0'
],
tests_require=['mock'],
url='https://github.com/consbio/parserutils',
license='BSD',
cmdclass={'test': RunTests}
)
|
import subprocess
import sys
from setuptools import Command, setup
class RunTests(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
errno = subprocess.call([sys.executable, '-m', 'unittest', 'parserutils.tests.tests'])
raise SystemExit(errno)
with open('README.md') as readme:
long_description = readme.read()
setup(
name='parserutils',
description='A collection of performant parsing utilities',
long_description=long_description,
long_description_content_type='text/markdown',
keywords='parser,parsing,utils,utilities,collections,dates,elements,numbers,strings,url,xml',
version='1.2',
packages=[
'parserutils', 'parserutils.tests'
],
install_requires=[
'defusedxml>=0.4.1', 'python-dateutil>=2.4.2', 'six>=1.9.0'
],
tests_require=['mock'],
url='https://github.com/consbio/parserutils',
license='BSD',
cmdclass={'test': RunTests}
)
Increment minor version once moreimport subprocess
import sys
from setuptools import Command, setup
class RunTests(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
errno = subprocess.call([sys.executable, '-m', 'unittest', 'parserutils.tests.tests'])
raise SystemExit(errno)
with open('README.md') as readme:
long_description = readme.read()
setup(
name='parserutils',
description='A collection of performant parsing utilities',
long_description=long_description,
long_description_content_type='text/markdown',
keywords='parser,parsing,utils,utilities,collections,dates,elements,numbers,strings,url,xml',
version='1.2.1',
packages=[
'parserutils', 'parserutils.tests'
],
install_requires=[
'defusedxml>=0.4.1', 'python-dateutil>=2.4.2', 'six>=1.9.0'
],
tests_require=['mock'],
url='https://github.com/consbio/parserutils',
license='BSD',
cmdclass={'test': RunTests}
)
|
<commit_before>import subprocess
import sys
from setuptools import Command, setup
class RunTests(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
errno = subprocess.call([sys.executable, '-m', 'unittest', 'parserutils.tests.tests'])
raise SystemExit(errno)
with open('README.md') as readme:
long_description = readme.read()
setup(
name='parserutils',
description='A collection of performant parsing utilities',
long_description=long_description,
long_description_content_type='text/markdown',
keywords='parser,parsing,utils,utilities,collections,dates,elements,numbers,strings,url,xml',
version='1.2',
packages=[
'parserutils', 'parserutils.tests'
],
install_requires=[
'defusedxml>=0.4.1', 'python-dateutil>=2.4.2', 'six>=1.9.0'
],
tests_require=['mock'],
url='https://github.com/consbio/parserutils',
license='BSD',
cmdclass={'test': RunTests}
)
<commit_msg>Increment minor version once more<commit_after>import subprocess
import sys
from setuptools import Command, setup
class RunTests(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
errno = subprocess.call([sys.executable, '-m', 'unittest', 'parserutils.tests.tests'])
raise SystemExit(errno)
with open('README.md') as readme:
long_description = readme.read()
setup(
name='parserutils',
description='A collection of performant parsing utilities',
long_description=long_description,
long_description_content_type='text/markdown',
keywords='parser,parsing,utils,utilities,collections,dates,elements,numbers,strings,url,xml',
version='1.2.1',
packages=[
'parserutils', 'parserutils.tests'
],
install_requires=[
'defusedxml>=0.4.1', 'python-dateutil>=2.4.2', 'six>=1.9.0'
],
tests_require=['mock'],
url='https://github.com/consbio/parserutils',
license='BSD',
cmdclass={'test': RunTests}
)
|
f70d73b5a67ca13dc243f72ed701e1f8d5924405
|
setup.py
|
setup.py
|
from setuptools import setup
DESCRIPTION = "A Django oriented templated / transaction email abstraction"
LONG_DESCRIPTION = None
try:
LONG_DESCRIPTION = open('README.rst').read()
except:
pass
CLASSIFIERS = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Framework :: Django',
]
setup(
name='denaje-django-templated-email',
version='0.4.9',
packages=['templated_email', 'templated_email.backends'],
author='Bradley Whittington',
author_email='radbrad182@gmail.com',
url='http://github.com/bradwhittington/django-templated-email/',
license='MIT',
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
platforms=['any'],
classifiers=CLASSIFIERS,
)
|
from setuptools import setup
DESCRIPTION = "A Django oriented templated / transaction email abstraction"
LONG_DESCRIPTION = None
try:
LONG_DESCRIPTION = open('README.rst').read()
except:
pass
CLASSIFIERS = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Framework :: Django',
]
setup(
name='django-templated-email',
version='0.4.9',
packages=['templated_email', 'templated_email.backends'],
author='Bradley Whittington',
author_email='radbrad182@gmail.com',
url='http://github.com/bradwhittington/django-templated-email/',
license='MIT',
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
platforms=['any'],
classifiers=CLASSIFIERS,
)
|
Revert "Perhaps need to modify the name"
|
Revert "Perhaps need to modify the name"
This reverts commit d4ee1a1d91cd13bf0cb844be032eaa527806fad1.
|
Python
|
mit
|
dpetzold/django-templated-email,vintasoftware/django-templated-email,ScanTrust/django-templated-email,vintasoftware/django-templated-email,mypebble/django-templated-email,dpetzold/django-templated-email,BradWhittington/django-templated-email,hator/django-templated-email,ScanTrust/django-templated-email,BradWhittington/django-templated-email,mypebble/django-templated-email,hator/django-templated-email
|
from setuptools import setup
DESCRIPTION = "A Django oriented templated / transaction email abstraction"
LONG_DESCRIPTION = None
try:
LONG_DESCRIPTION = open('README.rst').read()
except:
pass
CLASSIFIERS = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Framework :: Django',
]
setup(
name='denaje-django-templated-email',
version='0.4.9',
packages=['templated_email', 'templated_email.backends'],
author='Bradley Whittington',
author_email='radbrad182@gmail.com',
url='http://github.com/bradwhittington/django-templated-email/',
license='MIT',
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
platforms=['any'],
classifiers=CLASSIFIERS,
)
Revert "Perhaps need to modify the name"
This reverts commit d4ee1a1d91cd13bf0cb844be032eaa527806fad1.
|
from setuptools import setup
DESCRIPTION = "A Django oriented templated / transaction email abstraction"
LONG_DESCRIPTION = None
try:
LONG_DESCRIPTION = open('README.rst').read()
except:
pass
CLASSIFIERS = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Framework :: Django',
]
setup(
name='django-templated-email',
version='0.4.9',
packages=['templated_email', 'templated_email.backends'],
author='Bradley Whittington',
author_email='radbrad182@gmail.com',
url='http://github.com/bradwhittington/django-templated-email/',
license='MIT',
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
platforms=['any'],
classifiers=CLASSIFIERS,
)
|
<commit_before>from setuptools import setup
DESCRIPTION = "A Django oriented templated / transaction email abstraction"
LONG_DESCRIPTION = None
try:
LONG_DESCRIPTION = open('README.rst').read()
except:
pass
CLASSIFIERS = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Framework :: Django',
]
setup(
name='denaje-django-templated-email',
version='0.4.9',
packages=['templated_email', 'templated_email.backends'],
author='Bradley Whittington',
author_email='radbrad182@gmail.com',
url='http://github.com/bradwhittington/django-templated-email/',
license='MIT',
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
platforms=['any'],
classifiers=CLASSIFIERS,
)
<commit_msg>Revert "Perhaps need to modify the name"
This reverts commit d4ee1a1d91cd13bf0cb844be032eaa527806fad1.<commit_after>
|
from setuptools import setup
DESCRIPTION = "A Django oriented templated / transaction email abstraction"
LONG_DESCRIPTION = None
try:
LONG_DESCRIPTION = open('README.rst').read()
except:
pass
CLASSIFIERS = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Framework :: Django',
]
setup(
name='django-templated-email',
version='0.4.9',
packages=['templated_email', 'templated_email.backends'],
author='Bradley Whittington',
author_email='radbrad182@gmail.com',
url='http://github.com/bradwhittington/django-templated-email/',
license='MIT',
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
platforms=['any'],
classifiers=CLASSIFIERS,
)
|
from setuptools import setup
DESCRIPTION = "A Django oriented templated / transaction email abstraction"
LONG_DESCRIPTION = None
try:
LONG_DESCRIPTION = open('README.rst').read()
except:
pass
CLASSIFIERS = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Framework :: Django',
]
setup(
name='denaje-django-templated-email',
version='0.4.9',
packages=['templated_email', 'templated_email.backends'],
author='Bradley Whittington',
author_email='radbrad182@gmail.com',
url='http://github.com/bradwhittington/django-templated-email/',
license='MIT',
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
platforms=['any'],
classifiers=CLASSIFIERS,
)
Revert "Perhaps need to modify the name"
This reverts commit d4ee1a1d91cd13bf0cb844be032eaa527806fad1.from setuptools import setup
DESCRIPTION = "A Django oriented templated / transaction email abstraction"
LONG_DESCRIPTION = None
try:
LONG_DESCRIPTION = open('README.rst').read()
except:
pass
CLASSIFIERS = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Framework :: Django',
]
setup(
name='django-templated-email',
version='0.4.9',
packages=['templated_email', 'templated_email.backends'],
author='Bradley Whittington',
author_email='radbrad182@gmail.com',
url='http://github.com/bradwhittington/django-templated-email/',
license='MIT',
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
platforms=['any'],
classifiers=CLASSIFIERS,
)
|
<commit_before>from setuptools import setup
DESCRIPTION = "A Django oriented templated / transaction email abstraction"
LONG_DESCRIPTION = None
try:
LONG_DESCRIPTION = open('README.rst').read()
except:
pass
CLASSIFIERS = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Framework :: Django',
]
setup(
name='denaje-django-templated-email',
version='0.4.9',
packages=['templated_email', 'templated_email.backends'],
author='Bradley Whittington',
author_email='radbrad182@gmail.com',
url='http://github.com/bradwhittington/django-templated-email/',
license='MIT',
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
platforms=['any'],
classifiers=CLASSIFIERS,
)
<commit_msg>Revert "Perhaps need to modify the name"
This reverts commit d4ee1a1d91cd13bf0cb844be032eaa527806fad1.<commit_after>from setuptools import setup
DESCRIPTION = "A Django oriented templated / transaction email abstraction"
LONG_DESCRIPTION = None
try:
LONG_DESCRIPTION = open('README.rst').read()
except:
pass
CLASSIFIERS = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Framework :: Django',
]
setup(
name='django-templated-email',
version='0.4.9',
packages=['templated_email', 'templated_email.backends'],
author='Bradley Whittington',
author_email='radbrad182@gmail.com',
url='http://github.com/bradwhittington/django-templated-email/',
license='MIT',
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
platforms=['any'],
classifiers=CLASSIFIERS,
)
|
7b77297f9099019f4424c7115deb933dd51eaf80
|
setup.py
|
setup.py
|
#!/usr/local/bin/python3
from distutils.core import setup, Extension
setup(
name = 'Encoder',
version = '1.0',
description = 'Encode stuff',
ext_modules = [
Extension(
name = '_encoder',
sources = [
'src/encoder.c',
'src/module.c',
],
include_dirs = [
'include',
],
),
],
)
|
#!/usr/local/bin/python3
from distutils.core import setup, Extension
setup(
name = 'Encoder',
version = '1.0',
description = 'Encode stuff',
ext_modules = [
Extension(
name = '_encoder',
sources = [
'src/encoder.c',
'src/module.c',
],
include_dirs = [
'include',
],
depends = [
'include/buffer.h', # As this is essentially a source file
],
),
],
)
|
Include buffer.h as a dependency for rebuilds
|
Include buffer.h as a dependency for rebuilds
|
Python
|
apache-2.0
|
blake-sheridan/py-serializer,blake-sheridan/py-serializer
|
#!/usr/local/bin/python3
from distutils.core import setup, Extension
setup(
name = 'Encoder',
version = '1.0',
description = 'Encode stuff',
ext_modules = [
Extension(
name = '_encoder',
sources = [
'src/encoder.c',
'src/module.c',
],
include_dirs = [
'include',
],
),
],
)
Include buffer.h as a dependency for rebuilds
|
#!/usr/local/bin/python3
from distutils.core import setup, Extension
setup(
name = 'Encoder',
version = '1.0',
description = 'Encode stuff',
ext_modules = [
Extension(
name = '_encoder',
sources = [
'src/encoder.c',
'src/module.c',
],
include_dirs = [
'include',
],
depends = [
'include/buffer.h', # As this is essentially a source file
],
),
],
)
|
<commit_before>#!/usr/local/bin/python3
from distutils.core import setup, Extension
setup(
name = 'Encoder',
version = '1.0',
description = 'Encode stuff',
ext_modules = [
Extension(
name = '_encoder',
sources = [
'src/encoder.c',
'src/module.c',
],
include_dirs = [
'include',
],
),
],
)
<commit_msg>Include buffer.h as a dependency for rebuilds<commit_after>
|
#!/usr/local/bin/python3
from distutils.core import setup, Extension
setup(
name = 'Encoder',
version = '1.0',
description = 'Encode stuff',
ext_modules = [
Extension(
name = '_encoder',
sources = [
'src/encoder.c',
'src/module.c',
],
include_dirs = [
'include',
],
depends = [
'include/buffer.h', # As this is essentially a source file
],
),
],
)
|
#!/usr/local/bin/python3
from distutils.core import setup, Extension
setup(
name = 'Encoder',
version = '1.0',
description = 'Encode stuff',
ext_modules = [
Extension(
name = '_encoder',
sources = [
'src/encoder.c',
'src/module.c',
],
include_dirs = [
'include',
],
),
],
)
Include buffer.h as a dependency for rebuilds#!/usr/local/bin/python3
from distutils.core import setup, Extension
setup(
name = 'Encoder',
version = '1.0',
description = 'Encode stuff',
ext_modules = [
Extension(
name = '_encoder',
sources = [
'src/encoder.c',
'src/module.c',
],
include_dirs = [
'include',
],
depends = [
'include/buffer.h', # As this is essentially a source file
],
),
],
)
|
<commit_before>#!/usr/local/bin/python3
from distutils.core import setup, Extension
setup(
name = 'Encoder',
version = '1.0',
description = 'Encode stuff',
ext_modules = [
Extension(
name = '_encoder',
sources = [
'src/encoder.c',
'src/module.c',
],
include_dirs = [
'include',
],
),
],
)
<commit_msg>Include buffer.h as a dependency for rebuilds<commit_after>#!/usr/local/bin/python3
from distutils.core import setup, Extension
setup(
name = 'Encoder',
version = '1.0',
description = 'Encode stuff',
ext_modules = [
Extension(
name = '_encoder',
sources = [
'src/encoder.c',
'src/module.c',
],
include_dirs = [
'include',
],
depends = [
'include/buffer.h', # As this is essentially a source file
],
),
],
)
|
32a1f4915216dc77e4c2c0a834a26c5401068e25
|
setup.py
|
setup.py
|
#!/usr/bin/python
# Copyright 2011 Tomo Krajina
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import setup
setup(
name='gpxpy',
version='1.3.3',
description='GPX file parser and GPS track manipulation library',
license='Apache License, Version 2.0',
author='Tomo Krajina',
author_email='tkrajina@gmail.com',
url='http://www.trackprofiler.com/gpxpy/index.html',
packages=['gpxpy', ],
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
scripts=['gpxinfo']
)
|
#!/usr/bin/python
# Copyright 2011 Tomo Krajina
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import setup
setup(
name='gpxpy',
version='1.3.3',
description='GPX file parser and GPS track manipulation library',
license='Apache License, Version 2.0',
author='Tomo Krajina',
author_email='tkrajina@gmail.com',
url='http://www.trackprofiler.com/gpxpy/index.html',
packages=['gpxpy', ],
python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*",
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
scripts=['gpxinfo']
)
|
Add python_requires to help pip
|
Add python_requires to help pip
|
Python
|
apache-2.0
|
tkrajina/gpxpy
|
#!/usr/bin/python
# Copyright 2011 Tomo Krajina
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import setup
setup(
name='gpxpy',
version='1.3.3',
description='GPX file parser and GPS track manipulation library',
license='Apache License, Version 2.0',
author='Tomo Krajina',
author_email='tkrajina@gmail.com',
url='http://www.trackprofiler.com/gpxpy/index.html',
packages=['gpxpy', ],
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
scripts=['gpxinfo']
)
Add python_requires to help pip
|
#!/usr/bin/python
# Copyright 2011 Tomo Krajina
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import setup
setup(
name='gpxpy',
version='1.3.3',
description='GPX file parser and GPS track manipulation library',
license='Apache License, Version 2.0',
author='Tomo Krajina',
author_email='tkrajina@gmail.com',
url='http://www.trackprofiler.com/gpxpy/index.html',
packages=['gpxpy', ],
python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*",
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
scripts=['gpxinfo']
)
|
<commit_before>#!/usr/bin/python
# Copyright 2011 Tomo Krajina
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import setup
setup(
name='gpxpy',
version='1.3.3',
description='GPX file parser and GPS track manipulation library',
license='Apache License, Version 2.0',
author='Tomo Krajina',
author_email='tkrajina@gmail.com',
url='http://www.trackprofiler.com/gpxpy/index.html',
packages=['gpxpy', ],
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
scripts=['gpxinfo']
)
<commit_msg>Add python_requires to help pip<commit_after>
|
#!/usr/bin/python
# Copyright 2011 Tomo Krajina
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import setup
setup(
name='gpxpy',
version='1.3.3',
description='GPX file parser and GPS track manipulation library',
license='Apache License, Version 2.0',
author='Tomo Krajina',
author_email='tkrajina@gmail.com',
url='http://www.trackprofiler.com/gpxpy/index.html',
packages=['gpxpy', ],
python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*",
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
scripts=['gpxinfo']
)
|
#!/usr/bin/python
# Copyright 2011 Tomo Krajina
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import setup
setup(
name='gpxpy',
version='1.3.3',
description='GPX file parser and GPS track manipulation library',
license='Apache License, Version 2.0',
author='Tomo Krajina',
author_email='tkrajina@gmail.com',
url='http://www.trackprofiler.com/gpxpy/index.html',
packages=['gpxpy', ],
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
scripts=['gpxinfo']
)
Add python_requires to help pip#!/usr/bin/python
# Copyright 2011 Tomo Krajina
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import setup
setup(
name='gpxpy',
version='1.3.3',
description='GPX file parser and GPS track manipulation library',
license='Apache License, Version 2.0',
author='Tomo Krajina',
author_email='tkrajina@gmail.com',
url='http://www.trackprofiler.com/gpxpy/index.html',
packages=['gpxpy', ],
python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*",
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
scripts=['gpxinfo']
)
|
<commit_before>#!/usr/bin/python
# Copyright 2011 Tomo Krajina
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import setup
setup(
name='gpxpy',
version='1.3.3',
description='GPX file parser and GPS track manipulation library',
license='Apache License, Version 2.0',
author='Tomo Krajina',
author_email='tkrajina@gmail.com',
url='http://www.trackprofiler.com/gpxpy/index.html',
packages=['gpxpy', ],
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
scripts=['gpxinfo']
)
<commit_msg>Add python_requires to help pip<commit_after>#!/usr/bin/python
# Copyright 2011 Tomo Krajina
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import setup
setup(
name='gpxpy',
version='1.3.3',
description='GPX file parser and GPS track manipulation library',
license='Apache License, Version 2.0',
author='Tomo Krajina',
author_email='tkrajina@gmail.com',
url='http://www.trackprofiler.com/gpxpy/index.html',
packages=['gpxpy', ],
python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*",
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
scripts=['gpxinfo']
)
|
d4c37810c430f3b1a5a0bdc85c481cc313fc2a72
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
long_desc = """
XYPath is aiming to be XPath for spreadsheets: it offers a framework for
navigating around and extracting values from tabular data.
"""
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers for classifiers
setup(
name='xypath',
version='1.0.12',
description="Extract fields from tabular data with complex expressions.",
long_description=long_desc,
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python",
],
keywords='',
author='The Sensible Code Company Ltd',
author_email='feedback@sensiblecode.io',
url='http://sensiblecode.io',
license='BSD',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
namespace_packages=[],
include_package_data=False,
zip_safe=False,
install_requires=[
'messytables>=0.14.5',
],
tests_require=[],
entry_points=\
"""
""",
)
|
from setuptools import setup, find_packages
long_desc = """
XYPath is aiming to be XPath for spreadsheets: it offers a framework for
navigating around and extracting values from tabular data.
"""
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers for classifiers
setup(
name='xypath',
version='1.0.12',
description="Extract fields from tabular data with complex expressions.",
long_description=long_desc,
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python",
],
keywords='',
author='The Sensible Code Company Ltd',
author_email='feedback@sensiblecode.io',
url='http://sensiblecode.io',
license='BSD',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
namespace_packages=[],
include_package_data=False,
zip_safe=False,
install_requires=[
'messytables==0.15',
],
tests_require=[],
entry_points=\
"""
""",
)
|
Use same version of messytables as in requirements
|
Use same version of messytables as in requirements
Also pin the version too as it is in requirements.txt.
|
Python
|
bsd-2-clause
|
scraperwiki/xypath
|
from setuptools import setup, find_packages
long_desc = """
XYPath is aiming to be XPath for spreadsheets: it offers a framework for
navigating around and extracting values from tabular data.
"""
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers for classifiers
setup(
name='xypath',
version='1.0.12',
description="Extract fields from tabular data with complex expressions.",
long_description=long_desc,
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python",
],
keywords='',
author='The Sensible Code Company Ltd',
author_email='feedback@sensiblecode.io',
url='http://sensiblecode.io',
license='BSD',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
namespace_packages=[],
include_package_data=False,
zip_safe=False,
install_requires=[
'messytables>=0.14.5',
],
tests_require=[],
entry_points=\
"""
""",
)
Use same version of messytables as in requirements
Also pin the version too as it is in requirements.txt.
|
from setuptools import setup, find_packages
long_desc = """
XYPath is aiming to be XPath for spreadsheets: it offers a framework for
navigating around and extracting values from tabular data.
"""
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers for classifiers
setup(
name='xypath',
version='1.0.12',
description="Extract fields from tabular data with complex expressions.",
long_description=long_desc,
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python",
],
keywords='',
author='The Sensible Code Company Ltd',
author_email='feedback@sensiblecode.io',
url='http://sensiblecode.io',
license='BSD',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
namespace_packages=[],
include_package_data=False,
zip_safe=False,
install_requires=[
'messytables==0.15',
],
tests_require=[],
entry_points=\
"""
""",
)
|
<commit_before>from setuptools import setup, find_packages
long_desc = """
XYPath is aiming to be XPath for spreadsheets: it offers a framework for
navigating around and extracting values from tabular data.
"""
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers for classifiers
setup(
name='xypath',
version='1.0.12',
description="Extract fields from tabular data with complex expressions.",
long_description=long_desc,
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python",
],
keywords='',
author='The Sensible Code Company Ltd',
author_email='feedback@sensiblecode.io',
url='http://sensiblecode.io',
license='BSD',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
namespace_packages=[],
include_package_data=False,
zip_safe=False,
install_requires=[
'messytables>=0.14.5',
],
tests_require=[],
entry_points=\
"""
""",
)
<commit_msg>Use same version of messytables as in requirements
Also pin the version too as it is in requirements.txt.<commit_after>
|
from setuptools import setup, find_packages
long_desc = """
XYPath is aiming to be XPath for spreadsheets: it offers a framework for
navigating around and extracting values from tabular data.
"""
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers for classifiers
setup(
name='xypath',
version='1.0.12',
description="Extract fields from tabular data with complex expressions.",
long_description=long_desc,
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python",
],
keywords='',
author='The Sensible Code Company Ltd',
author_email='feedback@sensiblecode.io',
url='http://sensiblecode.io',
license='BSD',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
namespace_packages=[],
include_package_data=False,
zip_safe=False,
install_requires=[
'messytables==0.15',
],
tests_require=[],
entry_points=\
"""
""",
)
|
from setuptools import setup, find_packages
long_desc = """
XYPath is aiming to be XPath for spreadsheets: it offers a framework for
navigating around and extracting values from tabular data.
"""
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers for classifiers
setup(
name='xypath',
version='1.0.12',
description="Extract fields from tabular data with complex expressions.",
long_description=long_desc,
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python",
],
keywords='',
author='The Sensible Code Company Ltd',
author_email='feedback@sensiblecode.io',
url='http://sensiblecode.io',
license='BSD',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
namespace_packages=[],
include_package_data=False,
zip_safe=False,
install_requires=[
'messytables>=0.14.5',
],
tests_require=[],
entry_points=\
"""
""",
)
Use same version of messytables as in requirements
Also pin the version too as it is in requirements.txt.from setuptools import setup, find_packages
long_desc = """
XYPath is aiming to be XPath for spreadsheets: it offers a framework for
navigating around and extracting values from tabular data.
"""
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers for classifiers
setup(
name='xypath',
version='1.0.12',
description="Extract fields from tabular data with complex expressions.",
long_description=long_desc,
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python",
],
keywords='',
author='The Sensible Code Company Ltd',
author_email='feedback@sensiblecode.io',
url='http://sensiblecode.io',
license='BSD',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
namespace_packages=[],
include_package_data=False,
zip_safe=False,
install_requires=[
'messytables==0.15',
],
tests_require=[],
entry_points=\
"""
""",
)
|
<commit_before>from setuptools import setup, find_packages
long_desc = """
XYPath is aiming to be XPath for spreadsheets: it offers a framework for
navigating around and extracting values from tabular data.
"""
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers for classifiers
setup(
name='xypath',
version='1.0.12',
description="Extract fields from tabular data with complex expressions.",
long_description=long_desc,
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python",
],
keywords='',
author='The Sensible Code Company Ltd',
author_email='feedback@sensiblecode.io',
url='http://sensiblecode.io',
license='BSD',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
namespace_packages=[],
include_package_data=False,
zip_safe=False,
install_requires=[
'messytables>=0.14.5',
],
tests_require=[],
entry_points=\
"""
""",
)
<commit_msg>Use same version of messytables as in requirements
Also pin the version too as it is in requirements.txt.<commit_after>from setuptools import setup, find_packages
long_desc = """
XYPath is aiming to be XPath for spreadsheets: it offers a framework for
navigating around and extracting values from tabular data.
"""
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers for classifiers
setup(
name='xypath',
version='1.0.12',
description="Extract fields from tabular data with complex expressions.",
long_description=long_desc,
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python",
],
keywords='',
author='The Sensible Code Company Ltd',
author_email='feedback@sensiblecode.io',
url='http://sensiblecode.io',
license='BSD',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
namespace_packages=[],
include_package_data=False,
zip_safe=False,
install_requires=[
'messytables==0.15',
],
tests_require=[],
entry_points=\
"""
""",
)
|
3c7d840c56f70e7f6ec5df1cd7457e4b086aebe6
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
import io
from setuptools import setup
with open('requirements.txt', 'r') as fh:
dependencies = [l.strip() for l in fh]
setup(name='coil',
version='1.3.1',
description='A user-friendly CMS frontend for Nikola.',
keywords='coil,nikola,cms',
author='Chris Warrick, Roberto Alsina, Henry Hirsch et al.',
author_email='chris@getnikola.com',
url='https://github.com/getnikola/coil',
license='MIT',
long_description=io.open('./README.rst', 'r', encoding='utf-8').read(),
platforms='any',
zip_safe=False,
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=['Development Status :: 4 - Beta',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4'],
packages=['coil'],
install_requires=dependencies,
include_package_data=True,
entry_points={
'console_scripts': [
'coil = coil.__main__:main',
]
},
)
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
import io
from setuptools import setup
with open('requirements.txt', 'r') as fh:
dependencies = [l.strip() for l in fh]
setup(name='coil',
version='1.3.1',
description='A user-friendly CMS frontend for Nikola.',
keywords='coil,nikola,cms',
author='Chris Warrick, Roberto Alsina, Henry Hirsch et al.',
author_email='chris@getnikola.com',
url='https://github.com/getnikola/coil',
license='MIT',
long_description=io.open('./README.rst', 'r', encoding='utf-8').read(),
platforms='any',
zip_safe=False,
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=['Development Status :: 5 - Production/Stable',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4'],
packages=['coil'],
install_requires=dependencies,
include_package_data=True,
entry_points={
'console_scripts': [
'coil = coil.__main__:main',
]
},
)
|
Set status to 5 - Production/Stable
|
Set status to 5 - Production/Stable
Signed-off-by: Chris Warrick <de6f931166e131a07f31c96c765aee08f061d1a5@gmail.com>
|
Python
|
mit
|
dpaleino/coil,getnikola/coil,dpaleino/coil,dpaleino/coil,getnikola/coil,getnikola/coil
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
import io
from setuptools import setup
with open('requirements.txt', 'r') as fh:
dependencies = [l.strip() for l in fh]
setup(name='coil',
version='1.3.1',
description='A user-friendly CMS frontend for Nikola.',
keywords='coil,nikola,cms',
author='Chris Warrick, Roberto Alsina, Henry Hirsch et al.',
author_email='chris@getnikola.com',
url='https://github.com/getnikola/coil',
license='MIT',
long_description=io.open('./README.rst', 'r', encoding='utf-8').read(),
platforms='any',
zip_safe=False,
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=['Development Status :: 4 - Beta',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4'],
packages=['coil'],
install_requires=dependencies,
include_package_data=True,
entry_points={
'console_scripts': [
'coil = coil.__main__:main',
]
},
)
Set status to 5 - Production/Stable
Signed-off-by: Chris Warrick <de6f931166e131a07f31c96c765aee08f061d1a5@gmail.com>
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
import io
from setuptools import setup
with open('requirements.txt', 'r') as fh:
dependencies = [l.strip() for l in fh]
setup(name='coil',
version='1.3.1',
description='A user-friendly CMS frontend for Nikola.',
keywords='coil,nikola,cms',
author='Chris Warrick, Roberto Alsina, Henry Hirsch et al.',
author_email='chris@getnikola.com',
url='https://github.com/getnikola/coil',
license='MIT',
long_description=io.open('./README.rst', 'r', encoding='utf-8').read(),
platforms='any',
zip_safe=False,
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=['Development Status :: 5 - Production/Stable',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4'],
packages=['coil'],
install_requires=dependencies,
include_package_data=True,
entry_points={
'console_scripts': [
'coil = coil.__main__:main',
]
},
)
|
<commit_before>#!/usr/bin/env python
# -*- encoding: utf-8 -*-
import io
from setuptools import setup
with open('requirements.txt', 'r') as fh:
dependencies = [l.strip() for l in fh]
setup(name='coil',
version='1.3.1',
description='A user-friendly CMS frontend for Nikola.',
keywords='coil,nikola,cms',
author='Chris Warrick, Roberto Alsina, Henry Hirsch et al.',
author_email='chris@getnikola.com',
url='https://github.com/getnikola/coil',
license='MIT',
long_description=io.open('./README.rst', 'r', encoding='utf-8').read(),
platforms='any',
zip_safe=False,
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=['Development Status :: 4 - Beta',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4'],
packages=['coil'],
install_requires=dependencies,
include_package_data=True,
entry_points={
'console_scripts': [
'coil = coil.__main__:main',
]
},
)
<commit_msg>Set status to 5 - Production/Stable
Signed-off-by: Chris Warrick <de6f931166e131a07f31c96c765aee08f061d1a5@gmail.com><commit_after>
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
import io
from setuptools import setup
with open('requirements.txt', 'r') as fh:
dependencies = [l.strip() for l in fh]
setup(name='coil',
version='1.3.1',
description='A user-friendly CMS frontend for Nikola.',
keywords='coil,nikola,cms',
author='Chris Warrick, Roberto Alsina, Henry Hirsch et al.',
author_email='chris@getnikola.com',
url='https://github.com/getnikola/coil',
license='MIT',
long_description=io.open('./README.rst', 'r', encoding='utf-8').read(),
platforms='any',
zip_safe=False,
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=['Development Status :: 5 - Production/Stable',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4'],
packages=['coil'],
install_requires=dependencies,
include_package_data=True,
entry_points={
'console_scripts': [
'coil = coil.__main__:main',
]
},
)
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
import io
from setuptools import setup
with open('requirements.txt', 'r') as fh:
dependencies = [l.strip() for l in fh]
setup(name='coil',
version='1.3.1',
description='A user-friendly CMS frontend for Nikola.',
keywords='coil,nikola,cms',
author='Chris Warrick, Roberto Alsina, Henry Hirsch et al.',
author_email='chris@getnikola.com',
url='https://github.com/getnikola/coil',
license='MIT',
long_description=io.open('./README.rst', 'r', encoding='utf-8').read(),
platforms='any',
zip_safe=False,
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=['Development Status :: 4 - Beta',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4'],
packages=['coil'],
install_requires=dependencies,
include_package_data=True,
entry_points={
'console_scripts': [
'coil = coil.__main__:main',
]
},
)
Set status to 5 - Production/Stable
Signed-off-by: Chris Warrick <de6f931166e131a07f31c96c765aee08f061d1a5@gmail.com>#!/usr/bin/env python
# -*- encoding: utf-8 -*-
import io
from setuptools import setup
with open('requirements.txt', 'r') as fh:
dependencies = [l.strip() for l in fh]
setup(name='coil',
version='1.3.1',
description='A user-friendly CMS frontend for Nikola.',
keywords='coil,nikola,cms',
author='Chris Warrick, Roberto Alsina, Henry Hirsch et al.',
author_email='chris@getnikola.com',
url='https://github.com/getnikola/coil',
license='MIT',
long_description=io.open('./README.rst', 'r', encoding='utf-8').read(),
platforms='any',
zip_safe=False,
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=['Development Status :: 5 - Production/Stable',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4'],
packages=['coil'],
install_requires=dependencies,
include_package_data=True,
entry_points={
'console_scripts': [
'coil = coil.__main__:main',
]
},
)
|
<commit_before>#!/usr/bin/env python
# -*- encoding: utf-8 -*-
import io
from setuptools import setup
with open('requirements.txt', 'r') as fh:
dependencies = [l.strip() for l in fh]
setup(name='coil',
version='1.3.1',
description='A user-friendly CMS frontend for Nikola.',
keywords='coil,nikola,cms',
author='Chris Warrick, Roberto Alsina, Henry Hirsch et al.',
author_email='chris@getnikola.com',
url='https://github.com/getnikola/coil',
license='MIT',
long_description=io.open('./README.rst', 'r', encoding='utf-8').read(),
platforms='any',
zip_safe=False,
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=['Development Status :: 4 - Beta',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4'],
packages=['coil'],
install_requires=dependencies,
include_package_data=True,
entry_points={
'console_scripts': [
'coil = coil.__main__:main',
]
},
)
<commit_msg>Set status to 5 - Production/Stable
Signed-off-by: Chris Warrick <de6f931166e131a07f31c96c765aee08f061d1a5@gmail.com><commit_after>#!/usr/bin/env python
# -*- encoding: utf-8 -*-
import io
from setuptools import setup
with open('requirements.txt', 'r') as fh:
dependencies = [l.strip() for l in fh]
setup(name='coil',
version='1.3.1',
description='A user-friendly CMS frontend for Nikola.',
keywords='coil,nikola,cms',
author='Chris Warrick, Roberto Alsina, Henry Hirsch et al.',
author_email='chris@getnikola.com',
url='https://github.com/getnikola/coil',
license='MIT',
long_description=io.open('./README.rst', 'r', encoding='utf-8').read(),
platforms='any',
zip_safe=False,
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=['Development Status :: 5 - Production/Stable',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4'],
packages=['coil'],
install_requires=dependencies,
include_package_data=True,
entry_points={
'console_scripts': [
'coil = coil.__main__:main',
]
},
)
|
381e26a02ee46437ea840babe6462f48c496f5fe
|
setup.py
|
setup.py
|
#!/usr/bin/env python
import os
from setuptools import setup
README_PATH = os.path.join(os.path.dirname(__file__), "README.md")
with open(README_PATH, "r") as README_FILE:
README = README_FILE.read()
setup(
name="parse_this",
version="1.0.3",
description=(
"Makes it easy to create a command line interface for any "
"function, method or classmethod.."
),
long_description=README,
packages=["parse_this", "test"],
author="Bertrand Vidal",
author_email="vidal.bertrand@gmail.com",
download_url="https://pypi.python.org/pypi/parse_this",
url="https://github.com/bertrandvidal/parse_this",
license="License :: MIT",
classifiers=[
"Intended Audience :: Developers",
"Intended Audience :: Information Technology",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3.9",
],
)
|
#!/usr/bin/env python
import os
from setuptools import setup
README_PATH = os.path.join(os.path.dirname(__file__), "README.md")
with open(README_PATH, "r") as README_FILE:
README = README_FILE.read()
setup(
name="parse_this",
version="2.0.0",
description=(
"Makes it easy to create a command line interface for any "
"function, method or classmethod.."
),
long_description=README,
packages=["parse_this", "test"],
author="Bertrand Vidal",
author_email="vidal.bertrand@gmail.com",
download_url="https://pypi.python.org/pypi/parse_this",
url="https://github.com/bertrandvidal/parse_this",
license="License :: MIT",
classifiers=[
"Intended Audience :: Developers",
"Intended Audience :: Information Technology",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3 :: Only"
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
],
)
|
Update to v2.0.0 with py3 only support
|
Update to v2.0.0 with py3 only support
|
Python
|
mit
|
bertrandvidal/parse_this
|
#!/usr/bin/env python
import os
from setuptools import setup
README_PATH = os.path.join(os.path.dirname(__file__), "README.md")
with open(README_PATH, "r") as README_FILE:
README = README_FILE.read()
setup(
name="parse_this",
version="1.0.3",
description=(
"Makes it easy to create a command line interface for any "
"function, method or classmethod.."
),
long_description=README,
packages=["parse_this", "test"],
author="Bertrand Vidal",
author_email="vidal.bertrand@gmail.com",
download_url="https://pypi.python.org/pypi/parse_this",
url="https://github.com/bertrandvidal/parse_this",
license="License :: MIT",
classifiers=[
"Intended Audience :: Developers",
"Intended Audience :: Information Technology",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3.9",
],
)
Update to v2.0.0 with py3 only support
|
#!/usr/bin/env python
import os
from setuptools import setup
README_PATH = os.path.join(os.path.dirname(__file__), "README.md")
with open(README_PATH, "r") as README_FILE:
README = README_FILE.read()
setup(
name="parse_this",
version="2.0.0",
description=(
"Makes it easy to create a command line interface for any "
"function, method or classmethod.."
),
long_description=README,
packages=["parse_this", "test"],
author="Bertrand Vidal",
author_email="vidal.bertrand@gmail.com",
download_url="https://pypi.python.org/pypi/parse_this",
url="https://github.com/bertrandvidal/parse_this",
license="License :: MIT",
classifiers=[
"Intended Audience :: Developers",
"Intended Audience :: Information Technology",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3 :: Only"
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
],
)
|
<commit_before>#!/usr/bin/env python
import os
from setuptools import setup
README_PATH = os.path.join(os.path.dirname(__file__), "README.md")
with open(README_PATH, "r") as README_FILE:
README = README_FILE.read()
setup(
name="parse_this",
version="1.0.3",
description=(
"Makes it easy to create a command line interface for any "
"function, method or classmethod.."
),
long_description=README,
packages=["parse_this", "test"],
author="Bertrand Vidal",
author_email="vidal.bertrand@gmail.com",
download_url="https://pypi.python.org/pypi/parse_this",
url="https://github.com/bertrandvidal/parse_this",
license="License :: MIT",
classifiers=[
"Intended Audience :: Developers",
"Intended Audience :: Information Technology",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3.9",
],
)
<commit_msg>Update to v2.0.0 with py3 only support<commit_after>
|
#!/usr/bin/env python
import os
from setuptools import setup
README_PATH = os.path.join(os.path.dirname(__file__), "README.md")
with open(README_PATH, "r") as README_FILE:
README = README_FILE.read()
setup(
name="parse_this",
version="2.0.0",
description=(
"Makes it easy to create a command line interface for any "
"function, method or classmethod.."
),
long_description=README,
packages=["parse_this", "test"],
author="Bertrand Vidal",
author_email="vidal.bertrand@gmail.com",
download_url="https://pypi.python.org/pypi/parse_this",
url="https://github.com/bertrandvidal/parse_this",
license="License :: MIT",
classifiers=[
"Intended Audience :: Developers",
"Intended Audience :: Information Technology",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3 :: Only"
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
],
)
|
#!/usr/bin/env python
import os
from setuptools import setup
README_PATH = os.path.join(os.path.dirname(__file__), "README.md")
with open(README_PATH, "r") as README_FILE:
README = README_FILE.read()
setup(
name="parse_this",
version="1.0.3",
description=(
"Makes it easy to create a command line interface for any "
"function, method or classmethod.."
),
long_description=README,
packages=["parse_this", "test"],
author="Bertrand Vidal",
author_email="vidal.bertrand@gmail.com",
download_url="https://pypi.python.org/pypi/parse_this",
url="https://github.com/bertrandvidal/parse_this",
license="License :: MIT",
classifiers=[
"Intended Audience :: Developers",
"Intended Audience :: Information Technology",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3.9",
],
)
Update to v2.0.0 with py3 only support#!/usr/bin/env python
import os
from setuptools import setup
README_PATH = os.path.join(os.path.dirname(__file__), "README.md")
with open(README_PATH, "r") as README_FILE:
README = README_FILE.read()
setup(
name="parse_this",
version="2.0.0",
description=(
"Makes it easy to create a command line interface for any "
"function, method or classmethod.."
),
long_description=README,
packages=["parse_this", "test"],
author="Bertrand Vidal",
author_email="vidal.bertrand@gmail.com",
download_url="https://pypi.python.org/pypi/parse_this",
url="https://github.com/bertrandvidal/parse_this",
license="License :: MIT",
classifiers=[
"Intended Audience :: Developers",
"Intended Audience :: Information Technology",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3 :: Only"
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
],
)
|
<commit_before>#!/usr/bin/env python
import os
from setuptools import setup
README_PATH = os.path.join(os.path.dirname(__file__), "README.md")
with open(README_PATH, "r") as README_FILE:
README = README_FILE.read()
setup(
name="parse_this",
version="1.0.3",
description=(
"Makes it easy to create a command line interface for any "
"function, method or classmethod.."
),
long_description=README,
packages=["parse_this", "test"],
author="Bertrand Vidal",
author_email="vidal.bertrand@gmail.com",
download_url="https://pypi.python.org/pypi/parse_this",
url="https://github.com/bertrandvidal/parse_this",
license="License :: MIT",
classifiers=[
"Intended Audience :: Developers",
"Intended Audience :: Information Technology",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3.9",
],
)
<commit_msg>Update to v2.0.0 with py3 only support<commit_after>#!/usr/bin/env python
import os
from setuptools import setup
README_PATH = os.path.join(os.path.dirname(__file__), "README.md")
with open(README_PATH, "r") as README_FILE:
README = README_FILE.read()
setup(
name="parse_this",
version="2.0.0",
description=(
"Makes it easy to create a command line interface for any "
"function, method or classmethod.."
),
long_description=README,
packages=["parse_this", "test"],
author="Bertrand Vidal",
author_email="vidal.bertrand@gmail.com",
download_url="https://pypi.python.org/pypi/parse_this",
url="https://github.com/bertrandvidal/parse_this",
license="License :: MIT",
classifiers=[
"Intended Audience :: Developers",
"Intended Audience :: Information Technology",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3 :: Only"
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
],
)
|
0d2ff0efacea836be7a1fbfa49c6fec0dd5fe689
|
setup.py
|
setup.py
|
#!/usr/bin/env python
import os
from glob import glob
if os.environ.get('USE_SETUPTOOLS'):
from setuptools import setup
setup_kwargs = dict(zip_safe=0)
else:
from distutils.core import setup
setup_kwargs = dict()
storage_dirs = [ ('storage/whisper',[]), ('storage/lists',[]),
('storage/log',[]), ('storage/rrd',[]) ]
conf_files = [ ('conf', glob('conf/*.example')) ]
setup(
name='carbon',
version='0.9.10-pre2',
url='https://launchpad.net/graphite',
author='Chris Davis',
author_email='chrismd@gmail.com',
license='Apache Software License 2.0',
description='Backend data caching and persistence daemon for Graphite',
packages=['carbon', 'carbon.aggregator', 'twisted.plugins'],
package_dir={'' : 'lib'},
scripts=glob('bin/*'),
package_data={ 'carbon' : ['*.xml'] },
data_files=storage_dirs + conf_files,
install_requires=['twisted', 'txamqp'],
**setup_kwargs
)
|
#!/usr/bin/env python
import os
from glob import glob
if os.environ.get('USE_SETUPTOOLS'):
from setuptools import setup
setup_kwargs = dict(zip_safe=0)
else:
from distutils.core import setup
setup_kwargs = dict()
storage_dirs = [ ('storage/whisper',[]), ('storage/lists',[]),
('storage/log',[]), ('storage/rrd',[]) ]
conf_files = [ ('conf', glob('conf/*.example')) ]
init_scripts = [ ('/etc/init.d', ['init/carbon', 'init/carbon-relay']) ]
setup(
name='carbon',
version='0.9.10_pre2',
url='https://launchpad.net/graphite',
author='Chris Davis',
author_email='chrismd@gmail.com',
license='Apache Software License 2.0',
description='Backend data caching and persistence daemon for Graphite',
packages=['carbon', 'carbon.aggregator', 'twisted.plugins'],
package_dir={'' : 'lib'},
scripts=glob('bin/*'),
package_data={ 'carbon' : ['*.xml'] },
data_files=storage_dirs + conf_files + init_scripts,
install_requires=['twisted', 'txamqp'],
**setup_kwargs
)
|
Modify version string, data files to include init script
|
Modify version string, data files to include init script
|
Python
|
apache-2.0
|
kharandziuk/carbon,deniszh/carbon,graphite-server/carbon,benburry/carbon,graphite-server/carbon,pratX/carbon,piotr1212/carbon,lyft/carbon,johnseekins/carbon,graphite-project/carbon,protochron/carbon,piotr1212/carbon,benburry/carbon,iain-buclaw-sociomantic/carbon,pu239ppy/carbon,mleinart/carbon,cbowman0/carbon,cbowman0/carbon,obfuscurity/carbon,criteo-forks/carbon,xadjmerripen/carbon,protochron/carbon,graphite-project/carbon,pu239ppy/carbon,johnseekins/carbon,iain-buclaw-sociomantic/carbon,kharandziuk/carbon,pratX/carbon,JeanFred/carbon,obfuscurity/carbon,lyft/carbon,criteo-forks/carbon,krux/carbon,mleinart/carbon,krux/carbon,xadjmerripen/carbon,deniszh/carbon,JeanFred/carbon
|
#!/usr/bin/env python
import os
from glob import glob
if os.environ.get('USE_SETUPTOOLS'):
from setuptools import setup
setup_kwargs = dict(zip_safe=0)
else:
from distutils.core import setup
setup_kwargs = dict()
storage_dirs = [ ('storage/whisper',[]), ('storage/lists',[]),
('storage/log',[]), ('storage/rrd',[]) ]
conf_files = [ ('conf', glob('conf/*.example')) ]
setup(
name='carbon',
version='0.9.10-pre2',
url='https://launchpad.net/graphite',
author='Chris Davis',
author_email='chrismd@gmail.com',
license='Apache Software License 2.0',
description='Backend data caching and persistence daemon for Graphite',
packages=['carbon', 'carbon.aggregator', 'twisted.plugins'],
package_dir={'' : 'lib'},
scripts=glob('bin/*'),
package_data={ 'carbon' : ['*.xml'] },
data_files=storage_dirs + conf_files,
install_requires=['twisted', 'txamqp'],
**setup_kwargs
)
Modify version string, data files to include init script
|
#!/usr/bin/env python
import os
from glob import glob
if os.environ.get('USE_SETUPTOOLS'):
from setuptools import setup
setup_kwargs = dict(zip_safe=0)
else:
from distutils.core import setup
setup_kwargs = dict()
storage_dirs = [ ('storage/whisper',[]), ('storage/lists',[]),
('storage/log',[]), ('storage/rrd',[]) ]
conf_files = [ ('conf', glob('conf/*.example')) ]
init_scripts = [ ('/etc/init.d', ['init/carbon', 'init/carbon-relay']) ]
setup(
name='carbon',
version='0.9.10_pre2',
url='https://launchpad.net/graphite',
author='Chris Davis',
author_email='chrismd@gmail.com',
license='Apache Software License 2.0',
description='Backend data caching and persistence daemon for Graphite',
packages=['carbon', 'carbon.aggregator', 'twisted.plugins'],
package_dir={'' : 'lib'},
scripts=glob('bin/*'),
package_data={ 'carbon' : ['*.xml'] },
data_files=storage_dirs + conf_files + init_scripts,
install_requires=['twisted', 'txamqp'],
**setup_kwargs
)
|
<commit_before>#!/usr/bin/env python
import os
from glob import glob
if os.environ.get('USE_SETUPTOOLS'):
from setuptools import setup
setup_kwargs = dict(zip_safe=0)
else:
from distutils.core import setup
setup_kwargs = dict()
storage_dirs = [ ('storage/whisper',[]), ('storage/lists',[]),
('storage/log',[]), ('storage/rrd',[]) ]
conf_files = [ ('conf', glob('conf/*.example')) ]
setup(
name='carbon',
version='0.9.10-pre2',
url='https://launchpad.net/graphite',
author='Chris Davis',
author_email='chrismd@gmail.com',
license='Apache Software License 2.0',
description='Backend data caching and persistence daemon for Graphite',
packages=['carbon', 'carbon.aggregator', 'twisted.plugins'],
package_dir={'' : 'lib'},
scripts=glob('bin/*'),
package_data={ 'carbon' : ['*.xml'] },
data_files=storage_dirs + conf_files,
install_requires=['twisted', 'txamqp'],
**setup_kwargs
)
<commit_msg>Modify version string, data files to include init script<commit_after>
|
#!/usr/bin/env python
import os
from glob import glob
if os.environ.get('USE_SETUPTOOLS'):
from setuptools import setup
setup_kwargs = dict(zip_safe=0)
else:
from distutils.core import setup
setup_kwargs = dict()
storage_dirs = [ ('storage/whisper',[]), ('storage/lists',[]),
('storage/log',[]), ('storage/rrd',[]) ]
conf_files = [ ('conf', glob('conf/*.example')) ]
init_scripts = [ ('/etc/init.d', ['init/carbon', 'init/carbon-relay']) ]
setup(
name='carbon',
version='0.9.10_pre2',
url='https://launchpad.net/graphite',
author='Chris Davis',
author_email='chrismd@gmail.com',
license='Apache Software License 2.0',
description='Backend data caching and persistence daemon for Graphite',
packages=['carbon', 'carbon.aggregator', 'twisted.plugins'],
package_dir={'' : 'lib'},
scripts=glob('bin/*'),
package_data={ 'carbon' : ['*.xml'] },
data_files=storage_dirs + conf_files + init_scripts,
install_requires=['twisted', 'txamqp'],
**setup_kwargs
)
|
#!/usr/bin/env python
import os
from glob import glob
if os.environ.get('USE_SETUPTOOLS'):
from setuptools import setup
setup_kwargs = dict(zip_safe=0)
else:
from distutils.core import setup
setup_kwargs = dict()
storage_dirs = [ ('storage/whisper',[]), ('storage/lists',[]),
('storage/log',[]), ('storage/rrd',[]) ]
conf_files = [ ('conf', glob('conf/*.example')) ]
setup(
name='carbon',
version='0.9.10-pre2',
url='https://launchpad.net/graphite',
author='Chris Davis',
author_email='chrismd@gmail.com',
license='Apache Software License 2.0',
description='Backend data caching and persistence daemon for Graphite',
packages=['carbon', 'carbon.aggregator', 'twisted.plugins'],
package_dir={'' : 'lib'},
scripts=glob('bin/*'),
package_data={ 'carbon' : ['*.xml'] },
data_files=storage_dirs + conf_files,
install_requires=['twisted', 'txamqp'],
**setup_kwargs
)
Modify version string, data files to include init script#!/usr/bin/env python
import os
from glob import glob
if os.environ.get('USE_SETUPTOOLS'):
from setuptools import setup
setup_kwargs = dict(zip_safe=0)
else:
from distutils.core import setup
setup_kwargs = dict()
storage_dirs = [ ('storage/whisper',[]), ('storage/lists',[]),
('storage/log',[]), ('storage/rrd',[]) ]
conf_files = [ ('conf', glob('conf/*.example')) ]
init_scripts = [ ('/etc/init.d', ['init/carbon', 'init/carbon-relay']) ]
setup(
name='carbon',
version='0.9.10_pre2',
url='https://launchpad.net/graphite',
author='Chris Davis',
author_email='chrismd@gmail.com',
license='Apache Software License 2.0',
description='Backend data caching and persistence daemon for Graphite',
packages=['carbon', 'carbon.aggregator', 'twisted.plugins'],
package_dir={'' : 'lib'},
scripts=glob('bin/*'),
package_data={ 'carbon' : ['*.xml'] },
data_files=storage_dirs + conf_files + init_scripts,
install_requires=['twisted', 'txamqp'],
**setup_kwargs
)
|
<commit_before>#!/usr/bin/env python
import os
from glob import glob
if os.environ.get('USE_SETUPTOOLS'):
from setuptools import setup
setup_kwargs = dict(zip_safe=0)
else:
from distutils.core import setup
setup_kwargs = dict()
storage_dirs = [ ('storage/whisper',[]), ('storage/lists',[]),
('storage/log',[]), ('storage/rrd',[]) ]
conf_files = [ ('conf', glob('conf/*.example')) ]
setup(
name='carbon',
version='0.9.10-pre2',
url='https://launchpad.net/graphite',
author='Chris Davis',
author_email='chrismd@gmail.com',
license='Apache Software License 2.0',
description='Backend data caching and persistence daemon for Graphite',
packages=['carbon', 'carbon.aggregator', 'twisted.plugins'],
package_dir={'' : 'lib'},
scripts=glob('bin/*'),
package_data={ 'carbon' : ['*.xml'] },
data_files=storage_dirs + conf_files,
install_requires=['twisted', 'txamqp'],
**setup_kwargs
)
<commit_msg>Modify version string, data files to include init script<commit_after>#!/usr/bin/env python
import os
from glob import glob
if os.environ.get('USE_SETUPTOOLS'):
from setuptools import setup
setup_kwargs = dict(zip_safe=0)
else:
from distutils.core import setup
setup_kwargs = dict()
storage_dirs = [ ('storage/whisper',[]), ('storage/lists',[]),
('storage/log',[]), ('storage/rrd',[]) ]
conf_files = [ ('conf', glob('conf/*.example')) ]
init_scripts = [ ('/etc/init.d', ['init/carbon', 'init/carbon-relay']) ]
setup(
name='carbon',
version='0.9.10_pre2',
url='https://launchpad.net/graphite',
author='Chris Davis',
author_email='chrismd@gmail.com',
license='Apache Software License 2.0',
description='Backend data caching and persistence daemon for Graphite',
packages=['carbon', 'carbon.aggregator', 'twisted.plugins'],
package_dir={'' : 'lib'},
scripts=glob('bin/*'),
package_data={ 'carbon' : ['*.xml'] },
data_files=storage_dirs + conf_files + init_scripts,
install_requires=['twisted', 'txamqp'],
**setup_kwargs
)
|
010b209090ce31de1f20b60e641fd6b4296f834c
|
base/view_utils.py
|
base/view_utils.py
|
# django
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
# standard library
def paginate(request, objects, page_size=25):
paginator = Paginator(objects, page_size)
page = request.GET.get('p')
try:
paginated_objects = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
paginated_objects = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
paginated_objects = paginator.page(paginator.num_pages)
return paginated_objects
def clean_query_string(request):
clean_query_set = request.GET.copy()
clean_query_set = dict(
(k, v) for k, v in request.GET.items() if not k.startswith('o_')
)
try:
del clean_query_set['p']
except:
pass
mstring = []
for key in clean_query_set.iterkeys():
valuelist = request.GET.getlist(key)
mstring.extend(['%s=%s' % (key, val) for val in valuelist])
return '&'.join(mstring)
|
# django
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
# standard library
def paginate(request, objects, page_size=25):
paginator = Paginator(objects, page_size)
page = request.GET.get('p')
try:
paginated_objects = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
paginated_objects = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
paginated_objects = paginator.page(paginator.num_pages)
return paginated_objects
def clean_query_string(request):
clean_query_set = request.GET.copy()
clean_query_set = dict(
(k, v) for k, v in request.GET.items() if not k.startswith('o_')
)
try:
del clean_query_set['p']
except:
pass
mstring = []
for key in clean_query_set.keys():
valuelist = request.GET.getlist(key)
mstring.extend(['%s=%s' % (key, val) for val in valuelist])
return '&'.join(mstring)
|
Use keys instead of iterkeys to go through all keys on clean_query_set
|
Use keys instead of iterkeys to go through all keys on clean_query_set
|
Python
|
mit
|
magnet-cl/django-project-template-py3,magnet-cl/django-project-template-py3,Angoreher/xcero,Angoreher/xcero,Angoreher/xcero,magnet-cl/django-project-template-py3,Angoreher/xcero,magnet-cl/django-project-template-py3
|
# django
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
# standard library
def paginate(request, objects, page_size=25):
paginator = Paginator(objects, page_size)
page = request.GET.get('p')
try:
paginated_objects = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
paginated_objects = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
paginated_objects = paginator.page(paginator.num_pages)
return paginated_objects
def clean_query_string(request):
clean_query_set = request.GET.copy()
clean_query_set = dict(
(k, v) for k, v in request.GET.items() if not k.startswith('o_')
)
try:
del clean_query_set['p']
except:
pass
mstring = []
for key in clean_query_set.iterkeys():
valuelist = request.GET.getlist(key)
mstring.extend(['%s=%s' % (key, val) for val in valuelist])
return '&'.join(mstring)
Use keys instead of iterkeys to go through all keys on clean_query_set
|
# django
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
# standard library
def paginate(request, objects, page_size=25):
paginator = Paginator(objects, page_size)
page = request.GET.get('p')
try:
paginated_objects = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
paginated_objects = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
paginated_objects = paginator.page(paginator.num_pages)
return paginated_objects
def clean_query_string(request):
clean_query_set = request.GET.copy()
clean_query_set = dict(
(k, v) for k, v in request.GET.items() if not k.startswith('o_')
)
try:
del clean_query_set['p']
except:
pass
mstring = []
for key in clean_query_set.keys():
valuelist = request.GET.getlist(key)
mstring.extend(['%s=%s' % (key, val) for val in valuelist])
return '&'.join(mstring)
|
<commit_before># django
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
# standard library
def paginate(request, objects, page_size=25):
paginator = Paginator(objects, page_size)
page = request.GET.get('p')
try:
paginated_objects = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
paginated_objects = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
paginated_objects = paginator.page(paginator.num_pages)
return paginated_objects
def clean_query_string(request):
clean_query_set = request.GET.copy()
clean_query_set = dict(
(k, v) for k, v in request.GET.items() if not k.startswith('o_')
)
try:
del clean_query_set['p']
except:
pass
mstring = []
for key in clean_query_set.iterkeys():
valuelist = request.GET.getlist(key)
mstring.extend(['%s=%s' % (key, val) for val in valuelist])
return '&'.join(mstring)
<commit_msg>Use keys instead of iterkeys to go through all keys on clean_query_set<commit_after>
|
# django
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
# standard library
def paginate(request, objects, page_size=25):
paginator = Paginator(objects, page_size)
page = request.GET.get('p')
try:
paginated_objects = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
paginated_objects = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
paginated_objects = paginator.page(paginator.num_pages)
return paginated_objects
def clean_query_string(request):
clean_query_set = request.GET.copy()
clean_query_set = dict(
(k, v) for k, v in request.GET.items() if not k.startswith('o_')
)
try:
del clean_query_set['p']
except:
pass
mstring = []
for key in clean_query_set.keys():
valuelist = request.GET.getlist(key)
mstring.extend(['%s=%s' % (key, val) for val in valuelist])
return '&'.join(mstring)
|
# django
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
# standard library
def paginate(request, objects, page_size=25):
paginator = Paginator(objects, page_size)
page = request.GET.get('p')
try:
paginated_objects = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
paginated_objects = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
paginated_objects = paginator.page(paginator.num_pages)
return paginated_objects
def clean_query_string(request):
clean_query_set = request.GET.copy()
clean_query_set = dict(
(k, v) for k, v in request.GET.items() if not k.startswith('o_')
)
try:
del clean_query_set['p']
except:
pass
mstring = []
for key in clean_query_set.iterkeys():
valuelist = request.GET.getlist(key)
mstring.extend(['%s=%s' % (key, val) for val in valuelist])
return '&'.join(mstring)
Use keys instead of iterkeys to go through all keys on clean_query_set# django
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
# standard library
def paginate(request, objects, page_size=25):
paginator = Paginator(objects, page_size)
page = request.GET.get('p')
try:
paginated_objects = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
paginated_objects = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
paginated_objects = paginator.page(paginator.num_pages)
return paginated_objects
def clean_query_string(request):
clean_query_set = request.GET.copy()
clean_query_set = dict(
(k, v) for k, v in request.GET.items() if not k.startswith('o_')
)
try:
del clean_query_set['p']
except:
pass
mstring = []
for key in clean_query_set.keys():
valuelist = request.GET.getlist(key)
mstring.extend(['%s=%s' % (key, val) for val in valuelist])
return '&'.join(mstring)
|
<commit_before># django
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
# standard library
def paginate(request, objects, page_size=25):
paginator = Paginator(objects, page_size)
page = request.GET.get('p')
try:
paginated_objects = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
paginated_objects = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
paginated_objects = paginator.page(paginator.num_pages)
return paginated_objects
def clean_query_string(request):
clean_query_set = request.GET.copy()
clean_query_set = dict(
(k, v) for k, v in request.GET.items() if not k.startswith('o_')
)
try:
del clean_query_set['p']
except:
pass
mstring = []
for key in clean_query_set.iterkeys():
valuelist = request.GET.getlist(key)
mstring.extend(['%s=%s' % (key, val) for val in valuelist])
return '&'.join(mstring)
<commit_msg>Use keys instead of iterkeys to go through all keys on clean_query_set<commit_after># django
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
# standard library
def paginate(request, objects, page_size=25):
paginator = Paginator(objects, page_size)
page = request.GET.get('p')
try:
paginated_objects = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
paginated_objects = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
paginated_objects = paginator.page(paginator.num_pages)
return paginated_objects
def clean_query_string(request):
clean_query_set = request.GET.copy()
clean_query_set = dict(
(k, v) for k, v in request.GET.items() if not k.startswith('o_')
)
try:
del clean_query_set['p']
except:
pass
mstring = []
for key in clean_query_set.keys():
valuelist = request.GET.getlist(key)
mstring.extend(['%s=%s' % (key, val) for val in valuelist])
return '&'.join(mstring)
|
346ddf5e26351fe1fadbed1bf06482565080a728
|
stack.py
|
stack.py
|
#!/usr/bin/env python
'''Implementation of a simple stack data structure.
The stack has push, pop, and peek methods. Items in the stack have a value,
and next_item attribute. The stack has a top attribute.
'''
class Item(object):
def __init__(self, value, next_item=None):
self.value = value
self.next_item = next_item
def __str__(self):
return self.value
class Stack(object):
def __init__(self, top=None):
self.top = top
def push(self, value):
item = Item(value)
item.next_item = self.top
self.top = item
def pop(self):
pass
def peek(self):
return self.top.value
|
#!/usr/bin/env python
'''Implementation of a simple stack data structure.
The stack has push, pop, and peek methods. Items in the stack have a value,
and next_item attribute. The stack has a top attribute.
'''
class Item(object):
def __init__(self, value, next_item=None):
self.value = value
self.next_item = next_item
def __str__(self):
return self.value
class Stack(object):
def __init__(self, top=None):
self.top = top
def push(self, value):
item = Item(value)
item.next_item = self.top
self.top = item
def pop(self):
try:
pop_item = self.top
self.top = pop_item.next_item
return pop_item.value
except AttributeError:
raise ValueError('No items in stack')
def peek(self):
return self.top.value
|
Add pop method on Stack class
|
Add pop method on Stack class
|
Python
|
mit
|
jwarren116/data-structures-deux
|
#!/usr/bin/env python
'''Implementation of a simple stack data structure.
The stack has push, pop, and peek methods. Items in the stack have a value,
and next_item attribute. The stack has a top attribute.
'''
class Item(object):
def __init__(self, value, next_item=None):
self.value = value
self.next_item = next_item
def __str__(self):
return self.value
class Stack(object):
def __init__(self, top=None):
self.top = top
def push(self, value):
item = Item(value)
item.next_item = self.top
self.top = item
def pop(self):
pass
def peek(self):
return self.top.value
Add pop method on Stack class
|
#!/usr/bin/env python
'''Implementation of a simple stack data structure.
The stack has push, pop, and peek methods. Items in the stack have a value,
and next_item attribute. The stack has a top attribute.
'''
class Item(object):
def __init__(self, value, next_item=None):
self.value = value
self.next_item = next_item
def __str__(self):
return self.value
class Stack(object):
def __init__(self, top=None):
self.top = top
def push(self, value):
item = Item(value)
item.next_item = self.top
self.top = item
def pop(self):
try:
pop_item = self.top
self.top = pop_item.next_item
return pop_item.value
except AttributeError:
raise ValueError('No items in stack')
def peek(self):
return self.top.value
|
<commit_before>#!/usr/bin/env python
'''Implementation of a simple stack data structure.
The stack has push, pop, and peek methods. Items in the stack have a value,
and next_item attribute. The stack has a top attribute.
'''
class Item(object):
def __init__(self, value, next_item=None):
self.value = value
self.next_item = next_item
def __str__(self):
return self.value
class Stack(object):
def __init__(self, top=None):
self.top = top
def push(self, value):
item = Item(value)
item.next_item = self.top
self.top = item
def pop(self):
pass
def peek(self):
return self.top.value
<commit_msg>Add pop method on Stack class<commit_after>
|
#!/usr/bin/env python
'''Implementation of a simple stack data structure.
The stack has push, pop, and peek methods. Items in the stack have a value,
and next_item attribute. The stack has a top attribute.
'''
class Item(object):
def __init__(self, value, next_item=None):
self.value = value
self.next_item = next_item
def __str__(self):
return self.value
class Stack(object):
def __init__(self, top=None):
self.top = top
def push(self, value):
item = Item(value)
item.next_item = self.top
self.top = item
def pop(self):
try:
pop_item = self.top
self.top = pop_item.next_item
return pop_item.value
except AttributeError:
raise ValueError('No items in stack')
def peek(self):
return self.top.value
|
#!/usr/bin/env python
'''Implementation of a simple stack data structure.
The stack has push, pop, and peek methods. Items in the stack have a value,
and next_item attribute. The stack has a top attribute.
'''
class Item(object):
def __init__(self, value, next_item=None):
self.value = value
self.next_item = next_item
def __str__(self):
return self.value
class Stack(object):
def __init__(self, top=None):
self.top = top
def push(self, value):
item = Item(value)
item.next_item = self.top
self.top = item
def pop(self):
pass
def peek(self):
return self.top.value
Add pop method on Stack class#!/usr/bin/env python
'''Implementation of a simple stack data structure.
The stack has push, pop, and peek methods. Items in the stack have a value,
and next_item attribute. The stack has a top attribute.
'''
class Item(object):
def __init__(self, value, next_item=None):
self.value = value
self.next_item = next_item
def __str__(self):
return self.value
class Stack(object):
def __init__(self, top=None):
self.top = top
def push(self, value):
item = Item(value)
item.next_item = self.top
self.top = item
def pop(self):
try:
pop_item = self.top
self.top = pop_item.next_item
return pop_item.value
except AttributeError:
raise ValueError('No items in stack')
def peek(self):
return self.top.value
|
<commit_before>#!/usr/bin/env python
'''Implementation of a simple stack data structure.
The stack has push, pop, and peek methods. Items in the stack have a value,
and next_item attribute. The stack has a top attribute.
'''
class Item(object):
def __init__(self, value, next_item=None):
self.value = value
self.next_item = next_item
def __str__(self):
return self.value
class Stack(object):
def __init__(self, top=None):
self.top = top
def push(self, value):
item = Item(value)
item.next_item = self.top
self.top = item
def pop(self):
pass
def peek(self):
return self.top.value
<commit_msg>Add pop method on Stack class<commit_after>#!/usr/bin/env python
'''Implementation of a simple stack data structure.
The stack has push, pop, and peek methods. Items in the stack have a value,
and next_item attribute. The stack has a top attribute.
'''
class Item(object):
def __init__(self, value, next_item=None):
self.value = value
self.next_item = next_item
def __str__(self):
return self.value
class Stack(object):
def __init__(self, top=None):
self.top = top
def push(self, value):
item = Item(value)
item.next_item = self.top
self.top = item
def pop(self):
try:
pop_item = self.top
self.top = pop_item.next_item
return pop_item.value
except AttributeError:
raise ValueError('No items in stack')
def peek(self):
return self.top.value
|
0d475a69ca53eee62aeb39f35b3d3a8f875d5e71
|
tests/menu_test_5.py
|
tests/menu_test_5.py
|
"""Tests the menu features."""
##==============================================================#
## SECTION: Imports #
##==============================================================#
from testlib import *
from qprompt import enum_menu
##==============================================================#
## SECTION: Class Definitions #
##==============================================================#
class TestCase(unittest.TestCase):
def test_menu_1(test):
"""Check for main() call from console functionality."""
test.assertFalse(op.exists("generated_file.txt"))
subprocess.call("menu_helper_1.py g q", shell=True)
test.assertTrue(op.exists("generated_file.txt"))
subprocess.call("menu_helper_1.py d q", shell=True)
test.assertFalse(op.exists("generated_file.txt"))
##==============================================================#
## SECTION: Main Body #
##==============================================================#
if __name__ == '__main__':
unittest.main()
|
"""Tests the menu features."""
##==============================================================#
## SECTION: Imports #
##==============================================================#
from testlib import *
from qprompt import enum_menu
##==============================================================#
## SECTION: Class Definitions #
##==============================================================#
class TestCase(unittest.TestCase):
def test_menu_1(test):
"""Check for main() call from console functionality."""
test.assertFalse(op.exists("generated_file.txt"))
subprocess.call("python ./menu_helper_1.py g q", shell=True)
test.assertTrue(op.exists("generated_file.txt"))
subprocess.call("python ./menu_helper_1.py d q", shell=True)
test.assertFalse(op.exists("generated_file.txt"))
##==============================================================#
## SECTION: Main Body #
##==============================================================#
if __name__ == '__main__':
unittest.main()
|
Change to get test to pass.
|
Change to get test to pass.
|
Python
|
mit
|
jeffrimko/Qprompt
|
"""Tests the menu features."""
##==============================================================#
## SECTION: Imports #
##==============================================================#
from testlib import *
from qprompt import enum_menu
##==============================================================#
## SECTION: Class Definitions #
##==============================================================#
class TestCase(unittest.TestCase):
def test_menu_1(test):
"""Check for main() call from console functionality."""
test.assertFalse(op.exists("generated_file.txt"))
subprocess.call("menu_helper_1.py g q", shell=True)
test.assertTrue(op.exists("generated_file.txt"))
subprocess.call("menu_helper_1.py d q", shell=True)
test.assertFalse(op.exists("generated_file.txt"))
##==============================================================#
## SECTION: Main Body #
##==============================================================#
if __name__ == '__main__':
unittest.main()
Change to get test to pass.
|
"""Tests the menu features."""
##==============================================================#
## SECTION: Imports #
##==============================================================#
from testlib import *
from qprompt import enum_menu
##==============================================================#
## SECTION: Class Definitions #
##==============================================================#
class TestCase(unittest.TestCase):
def test_menu_1(test):
"""Check for main() call from console functionality."""
test.assertFalse(op.exists("generated_file.txt"))
subprocess.call("python ./menu_helper_1.py g q", shell=True)
test.assertTrue(op.exists("generated_file.txt"))
subprocess.call("python ./menu_helper_1.py d q", shell=True)
test.assertFalse(op.exists("generated_file.txt"))
##==============================================================#
## SECTION: Main Body #
##==============================================================#
if __name__ == '__main__':
unittest.main()
|
<commit_before>"""Tests the menu features."""
##==============================================================#
## SECTION: Imports #
##==============================================================#
from testlib import *
from qprompt import enum_menu
##==============================================================#
## SECTION: Class Definitions #
##==============================================================#
class TestCase(unittest.TestCase):
def test_menu_1(test):
"""Check for main() call from console functionality."""
test.assertFalse(op.exists("generated_file.txt"))
subprocess.call("menu_helper_1.py g q", shell=True)
test.assertTrue(op.exists("generated_file.txt"))
subprocess.call("menu_helper_1.py d q", shell=True)
test.assertFalse(op.exists("generated_file.txt"))
##==============================================================#
## SECTION: Main Body #
##==============================================================#
if __name__ == '__main__':
unittest.main()
<commit_msg>Change to get test to pass.<commit_after>
|
"""Tests the menu features."""
##==============================================================#
## SECTION: Imports #
##==============================================================#
from testlib import *
from qprompt import enum_menu
##==============================================================#
## SECTION: Class Definitions #
##==============================================================#
class TestCase(unittest.TestCase):
def test_menu_1(test):
"""Check for main() call from console functionality."""
test.assertFalse(op.exists("generated_file.txt"))
subprocess.call("python ./menu_helper_1.py g q", shell=True)
test.assertTrue(op.exists("generated_file.txt"))
subprocess.call("python ./menu_helper_1.py d q", shell=True)
test.assertFalse(op.exists("generated_file.txt"))
##==============================================================#
## SECTION: Main Body #
##==============================================================#
if __name__ == '__main__':
unittest.main()
|
"""Tests the menu features."""
##==============================================================#
## SECTION: Imports #
##==============================================================#
from testlib import *
from qprompt import enum_menu
##==============================================================#
## SECTION: Class Definitions #
##==============================================================#
class TestCase(unittest.TestCase):
def test_menu_1(test):
"""Check for main() call from console functionality."""
test.assertFalse(op.exists("generated_file.txt"))
subprocess.call("menu_helper_1.py g q", shell=True)
test.assertTrue(op.exists("generated_file.txt"))
subprocess.call("menu_helper_1.py d q", shell=True)
test.assertFalse(op.exists("generated_file.txt"))
##==============================================================#
## SECTION: Main Body #
##==============================================================#
if __name__ == '__main__':
unittest.main()
Change to get test to pass."""Tests the menu features."""
##==============================================================#
## SECTION: Imports #
##==============================================================#
from testlib import *
from qprompt import enum_menu
##==============================================================#
## SECTION: Class Definitions #
##==============================================================#
class TestCase(unittest.TestCase):
def test_menu_1(test):
"""Check for main() call from console functionality."""
test.assertFalse(op.exists("generated_file.txt"))
subprocess.call("python ./menu_helper_1.py g q", shell=True)
test.assertTrue(op.exists("generated_file.txt"))
subprocess.call("python ./menu_helper_1.py d q", shell=True)
test.assertFalse(op.exists("generated_file.txt"))
##==============================================================#
## SECTION: Main Body #
##==============================================================#
if __name__ == '__main__':
unittest.main()
|
<commit_before>"""Tests the menu features."""
##==============================================================#
## SECTION: Imports #
##==============================================================#
from testlib import *
from qprompt import enum_menu
##==============================================================#
## SECTION: Class Definitions #
##==============================================================#
class TestCase(unittest.TestCase):
def test_menu_1(test):
"""Check for main() call from console functionality."""
test.assertFalse(op.exists("generated_file.txt"))
subprocess.call("menu_helper_1.py g q", shell=True)
test.assertTrue(op.exists("generated_file.txt"))
subprocess.call("menu_helper_1.py d q", shell=True)
test.assertFalse(op.exists("generated_file.txt"))
##==============================================================#
## SECTION: Main Body #
##==============================================================#
if __name__ == '__main__':
unittest.main()
<commit_msg>Change to get test to pass.<commit_after>"""Tests the menu features."""
##==============================================================#
## SECTION: Imports #
##==============================================================#
from testlib import *
from qprompt import enum_menu
##==============================================================#
## SECTION: Class Definitions #
##==============================================================#
class TestCase(unittest.TestCase):
def test_menu_1(test):
"""Check for main() call from console functionality."""
test.assertFalse(op.exists("generated_file.txt"))
subprocess.call("python ./menu_helper_1.py g q", shell=True)
test.assertTrue(op.exists("generated_file.txt"))
subprocess.call("python ./menu_helper_1.py d q", shell=True)
test.assertFalse(op.exists("generated_file.txt"))
##==============================================================#
## SECTION: Main Body #
##==============================================================#
if __name__ == '__main__':
unittest.main()
|
ab41fe934ce241a4dbe5f73f648858f6f9351d5c
|
tests/settings.py
|
tests/settings.py
|
import dj_database_url
DATABASES = {
'default': dj_database_url.config(
default='postgres://localhost/test_utils',
),
}
INSTALLED_APPS = (
'incuna_test_utils',
'tests',
'feincms.module.page',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
)
MIDDLEWARE_CLASSES = ()
PASSWORD_HASHERS = ['django.contrib.auth.hashers.MD5PasswordHasher']
ROOT_URLCONF = 'tests.urls'
SECRET_KEY = 'test'
FEINCMS_USE_PAGE_ADMIN = False
|
import dj_database_url
DATABASES = {
'default': dj_database_url.config(
default='postgres://localhost/test_utils',
),
}
INSTALLED_APPS = (
'incuna_test_utils',
'tests',
'feincms.module.page',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
)
MIDDLEWARE_CLASSES = ()
PASSWORD_HASHERS = ['django.contrib.auth.hashers.MD5PasswordHasher']
ROOT_URLCONF = 'tests.urls'
SECRET_KEY = 'test'
FEINCMS_USE_PAGE_ADMIN = False
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {},
},
]
|
Fix TEMPLATES warning on Django 1.9
|
Fix TEMPLATES warning on Django 1.9
|
Python
|
bsd-2-clause
|
incuna/incuna-test-utils,incuna/incuna-test-utils
|
import dj_database_url
DATABASES = {
'default': dj_database_url.config(
default='postgres://localhost/test_utils',
),
}
INSTALLED_APPS = (
'incuna_test_utils',
'tests',
'feincms.module.page',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
)
MIDDLEWARE_CLASSES = ()
PASSWORD_HASHERS = ['django.contrib.auth.hashers.MD5PasswordHasher']
ROOT_URLCONF = 'tests.urls'
SECRET_KEY = 'test'
FEINCMS_USE_PAGE_ADMIN = False
Fix TEMPLATES warning on Django 1.9
|
import dj_database_url
DATABASES = {
'default': dj_database_url.config(
default='postgres://localhost/test_utils',
),
}
INSTALLED_APPS = (
'incuna_test_utils',
'tests',
'feincms.module.page',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
)
MIDDLEWARE_CLASSES = ()
PASSWORD_HASHERS = ['django.contrib.auth.hashers.MD5PasswordHasher']
ROOT_URLCONF = 'tests.urls'
SECRET_KEY = 'test'
FEINCMS_USE_PAGE_ADMIN = False
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {},
},
]
|
<commit_before>import dj_database_url
DATABASES = {
'default': dj_database_url.config(
default='postgres://localhost/test_utils',
),
}
INSTALLED_APPS = (
'incuna_test_utils',
'tests',
'feincms.module.page',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
)
MIDDLEWARE_CLASSES = ()
PASSWORD_HASHERS = ['django.contrib.auth.hashers.MD5PasswordHasher']
ROOT_URLCONF = 'tests.urls'
SECRET_KEY = 'test'
FEINCMS_USE_PAGE_ADMIN = False
<commit_msg>Fix TEMPLATES warning on Django 1.9<commit_after>
|
import dj_database_url
DATABASES = {
'default': dj_database_url.config(
default='postgres://localhost/test_utils',
),
}
INSTALLED_APPS = (
'incuna_test_utils',
'tests',
'feincms.module.page',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
)
MIDDLEWARE_CLASSES = ()
PASSWORD_HASHERS = ['django.contrib.auth.hashers.MD5PasswordHasher']
ROOT_URLCONF = 'tests.urls'
SECRET_KEY = 'test'
FEINCMS_USE_PAGE_ADMIN = False
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {},
},
]
|
import dj_database_url
DATABASES = {
'default': dj_database_url.config(
default='postgres://localhost/test_utils',
),
}
INSTALLED_APPS = (
'incuna_test_utils',
'tests',
'feincms.module.page',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
)
MIDDLEWARE_CLASSES = ()
PASSWORD_HASHERS = ['django.contrib.auth.hashers.MD5PasswordHasher']
ROOT_URLCONF = 'tests.urls'
SECRET_KEY = 'test'
FEINCMS_USE_PAGE_ADMIN = False
Fix TEMPLATES warning on Django 1.9import dj_database_url
DATABASES = {
'default': dj_database_url.config(
default='postgres://localhost/test_utils',
),
}
INSTALLED_APPS = (
'incuna_test_utils',
'tests',
'feincms.module.page',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
)
MIDDLEWARE_CLASSES = ()
PASSWORD_HASHERS = ['django.contrib.auth.hashers.MD5PasswordHasher']
ROOT_URLCONF = 'tests.urls'
SECRET_KEY = 'test'
FEINCMS_USE_PAGE_ADMIN = False
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {},
},
]
|
<commit_before>import dj_database_url
DATABASES = {
'default': dj_database_url.config(
default='postgres://localhost/test_utils',
),
}
INSTALLED_APPS = (
'incuna_test_utils',
'tests',
'feincms.module.page',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
)
MIDDLEWARE_CLASSES = ()
PASSWORD_HASHERS = ['django.contrib.auth.hashers.MD5PasswordHasher']
ROOT_URLCONF = 'tests.urls'
SECRET_KEY = 'test'
FEINCMS_USE_PAGE_ADMIN = False
<commit_msg>Fix TEMPLATES warning on Django 1.9<commit_after>import dj_database_url
DATABASES = {
'default': dj_database_url.config(
default='postgres://localhost/test_utils',
),
}
INSTALLED_APPS = (
'incuna_test_utils',
'tests',
'feincms.module.page',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
)
MIDDLEWARE_CLASSES = ()
PASSWORD_HASHERS = ['django.contrib.auth.hashers.MD5PasswordHasher']
ROOT_URLCONF = 'tests.urls'
SECRET_KEY = 'test'
FEINCMS_USE_PAGE_ADMIN = False
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {},
},
]
|
f3cd06721efaf3045d09f2d3c2c067e01b27953a
|
tests/som_test.py
|
tests/som_test.py
|
import unittest
from parameterized import parameterized
from som.vm.universe import Universe
class SomTest(unittest.TestCase):
@parameterized.expand([
("ClassStructure",),
("Array" ,),
("Block" ,),
("ClassLoading" ,),
("Closure" ,),
("Coercion" ,),
("CompilerReturn",),
("Double" ,),
("DoesNotUnderstand",),
("Empty" ,),
("Global" ,),
("Hash" ,),
("Integer" ,),
("Preliminary" ,),
("Reflection" ,),
("SelfBlock" ,),
("Set",),
("SpecialSelectors",),
("Super" ,),
("String" ,),
("Symbol" ,),
("System" ,),
("Vector" ,)])
def test_som_test(self, test_name):
args = ["-cp", "Smalltalk", "TestSuite/TestHarness.som", test_name]
u = Universe(True)
u.interpret(args)
self.assertEquals(0, u.last_exit_code())
import sys
if 'pytest' in sys.modules:
# hack to make pytest not to collect the unexpanded test method
delattr(SomTest, "test_som_test")
|
import unittest
from parameterized import parameterized
from som.vm.universe import Universe
class SomTest(unittest.TestCase):
@parameterized.expand([
("Array" ,),
("Block" ,),
("ClassLoading" ,),
("ClassStructure",),
("Closure" ,),
("Coercion" ,),
("CompilerReturn",),
("DoesNotUnderstand",),
("Double" ,),
("Empty" ,),
("Global" ,),
("Hash" ,),
("Integer" ,),
("Preliminary" ,),
("Reflection" ,),
("SelfBlock" ,),
("SpecialSelectors",),
("Super" ,),
("Set",),
("String" ,),
("Symbol" ,),
("System" ,),
("Vector" ,)])
def test_som_test(self, test_name):
args = ["-cp", "Smalltalk", "TestSuite/TestHarness.som", test_name]
u = Universe(True)
u.interpret(args)
self.assertEquals(0, u.last_exit_code())
import sys
if 'pytest' in sys.modules:
# hack to make pytest not to collect the unexpanded test method
delattr(SomTest, "test_som_test")
|
Sort tests, to verify they are complete
|
Sort tests, to verify they are complete
Signed-off-by: Stefan Marr <46f1a0bd5592a2f9244ca321b129902a06b53e03@stefan-marr.de>
|
Python
|
mit
|
SOM-st/PySOM,SOM-st/RPySOM,SOM-st/RTruffleSOM,SOM-st/RPySOM,smarr/PySOM,smarr/PySOM,smarr/RTruffleSOM,SOM-st/RTruffleSOM,smarr/RTruffleSOM,SOM-st/PySOM
|
import unittest
from parameterized import parameterized
from som.vm.universe import Universe
class SomTest(unittest.TestCase):
@parameterized.expand([
("ClassStructure",),
("Array" ,),
("Block" ,),
("ClassLoading" ,),
("Closure" ,),
("Coercion" ,),
("CompilerReturn",),
("Double" ,),
("DoesNotUnderstand",),
("Empty" ,),
("Global" ,),
("Hash" ,),
("Integer" ,),
("Preliminary" ,),
("Reflection" ,),
("SelfBlock" ,),
("Set",),
("SpecialSelectors",),
("Super" ,),
("String" ,),
("Symbol" ,),
("System" ,),
("Vector" ,)])
def test_som_test(self, test_name):
args = ["-cp", "Smalltalk", "TestSuite/TestHarness.som", test_name]
u = Universe(True)
u.interpret(args)
self.assertEquals(0, u.last_exit_code())
import sys
if 'pytest' in sys.modules:
# hack to make pytest not to collect the unexpanded test method
delattr(SomTest, "test_som_test")
Sort tests, to verify they are complete
Signed-off-by: Stefan Marr <46f1a0bd5592a2f9244ca321b129902a06b53e03@stefan-marr.de>
|
import unittest
from parameterized import parameterized
from som.vm.universe import Universe
class SomTest(unittest.TestCase):
@parameterized.expand([
("Array" ,),
("Block" ,),
("ClassLoading" ,),
("ClassStructure",),
("Closure" ,),
("Coercion" ,),
("CompilerReturn",),
("DoesNotUnderstand",),
("Double" ,),
("Empty" ,),
("Global" ,),
("Hash" ,),
("Integer" ,),
("Preliminary" ,),
("Reflection" ,),
("SelfBlock" ,),
("SpecialSelectors",),
("Super" ,),
("Set",),
("String" ,),
("Symbol" ,),
("System" ,),
("Vector" ,)])
def test_som_test(self, test_name):
args = ["-cp", "Smalltalk", "TestSuite/TestHarness.som", test_name]
u = Universe(True)
u.interpret(args)
self.assertEquals(0, u.last_exit_code())
import sys
if 'pytest' in sys.modules:
# hack to make pytest not to collect the unexpanded test method
delattr(SomTest, "test_som_test")
|
<commit_before>import unittest
from parameterized import parameterized
from som.vm.universe import Universe
class SomTest(unittest.TestCase):
@parameterized.expand([
("ClassStructure",),
("Array" ,),
("Block" ,),
("ClassLoading" ,),
("Closure" ,),
("Coercion" ,),
("CompilerReturn",),
("Double" ,),
("DoesNotUnderstand",),
("Empty" ,),
("Global" ,),
("Hash" ,),
("Integer" ,),
("Preliminary" ,),
("Reflection" ,),
("SelfBlock" ,),
("Set",),
("SpecialSelectors",),
("Super" ,),
("String" ,),
("Symbol" ,),
("System" ,),
("Vector" ,)])
def test_som_test(self, test_name):
args = ["-cp", "Smalltalk", "TestSuite/TestHarness.som", test_name]
u = Universe(True)
u.interpret(args)
self.assertEquals(0, u.last_exit_code())
import sys
if 'pytest' in sys.modules:
# hack to make pytest not to collect the unexpanded test method
delattr(SomTest, "test_som_test")
<commit_msg>Sort tests, to verify they are complete
Signed-off-by: Stefan Marr <46f1a0bd5592a2f9244ca321b129902a06b53e03@stefan-marr.de><commit_after>
|
import unittest
from parameterized import parameterized
from som.vm.universe import Universe
class SomTest(unittest.TestCase):
@parameterized.expand([
("Array" ,),
("Block" ,),
("ClassLoading" ,),
("ClassStructure",),
("Closure" ,),
("Coercion" ,),
("CompilerReturn",),
("DoesNotUnderstand",),
("Double" ,),
("Empty" ,),
("Global" ,),
("Hash" ,),
("Integer" ,),
("Preliminary" ,),
("Reflection" ,),
("SelfBlock" ,),
("SpecialSelectors",),
("Super" ,),
("Set",),
("String" ,),
("Symbol" ,),
("System" ,),
("Vector" ,)])
def test_som_test(self, test_name):
args = ["-cp", "Smalltalk", "TestSuite/TestHarness.som", test_name]
u = Universe(True)
u.interpret(args)
self.assertEquals(0, u.last_exit_code())
import sys
if 'pytest' in sys.modules:
# hack to make pytest not to collect the unexpanded test method
delattr(SomTest, "test_som_test")
|
import unittest
from parameterized import parameterized
from som.vm.universe import Universe
class SomTest(unittest.TestCase):
@parameterized.expand([
("ClassStructure",),
("Array" ,),
("Block" ,),
("ClassLoading" ,),
("Closure" ,),
("Coercion" ,),
("CompilerReturn",),
("Double" ,),
("DoesNotUnderstand",),
("Empty" ,),
("Global" ,),
("Hash" ,),
("Integer" ,),
("Preliminary" ,),
("Reflection" ,),
("SelfBlock" ,),
("Set",),
("SpecialSelectors",),
("Super" ,),
("String" ,),
("Symbol" ,),
("System" ,),
("Vector" ,)])
def test_som_test(self, test_name):
args = ["-cp", "Smalltalk", "TestSuite/TestHarness.som", test_name]
u = Universe(True)
u.interpret(args)
self.assertEquals(0, u.last_exit_code())
import sys
if 'pytest' in sys.modules:
# hack to make pytest not to collect the unexpanded test method
delattr(SomTest, "test_som_test")
Sort tests, to verify they are complete
Signed-off-by: Stefan Marr <46f1a0bd5592a2f9244ca321b129902a06b53e03@stefan-marr.de>import unittest
from parameterized import parameterized
from som.vm.universe import Universe
class SomTest(unittest.TestCase):
@parameterized.expand([
("Array" ,),
("Block" ,),
("ClassLoading" ,),
("ClassStructure",),
("Closure" ,),
("Coercion" ,),
("CompilerReturn",),
("DoesNotUnderstand",),
("Double" ,),
("Empty" ,),
("Global" ,),
("Hash" ,),
("Integer" ,),
("Preliminary" ,),
("Reflection" ,),
("SelfBlock" ,),
("SpecialSelectors",),
("Super" ,),
("Set",),
("String" ,),
("Symbol" ,),
("System" ,),
("Vector" ,)])
def test_som_test(self, test_name):
args = ["-cp", "Smalltalk", "TestSuite/TestHarness.som", test_name]
u = Universe(True)
u.interpret(args)
self.assertEquals(0, u.last_exit_code())
import sys
if 'pytest' in sys.modules:
# hack to make pytest not to collect the unexpanded test method
delattr(SomTest, "test_som_test")
|
<commit_before>import unittest
from parameterized import parameterized
from som.vm.universe import Universe
class SomTest(unittest.TestCase):
@parameterized.expand([
("ClassStructure",),
("Array" ,),
("Block" ,),
("ClassLoading" ,),
("Closure" ,),
("Coercion" ,),
("CompilerReturn",),
("Double" ,),
("DoesNotUnderstand",),
("Empty" ,),
("Global" ,),
("Hash" ,),
("Integer" ,),
("Preliminary" ,),
("Reflection" ,),
("SelfBlock" ,),
("Set",),
("SpecialSelectors",),
("Super" ,),
("String" ,),
("Symbol" ,),
("System" ,),
("Vector" ,)])
def test_som_test(self, test_name):
args = ["-cp", "Smalltalk", "TestSuite/TestHarness.som", test_name]
u = Universe(True)
u.interpret(args)
self.assertEquals(0, u.last_exit_code())
import sys
if 'pytest' in sys.modules:
# hack to make pytest not to collect the unexpanded test method
delattr(SomTest, "test_som_test")
<commit_msg>Sort tests, to verify they are complete
Signed-off-by: Stefan Marr <46f1a0bd5592a2f9244ca321b129902a06b53e03@stefan-marr.de><commit_after>import unittest
from parameterized import parameterized
from som.vm.universe import Universe
class SomTest(unittest.TestCase):
@parameterized.expand([
("Array" ,),
("Block" ,),
("ClassLoading" ,),
("ClassStructure",),
("Closure" ,),
("Coercion" ,),
("CompilerReturn",),
("DoesNotUnderstand",),
("Double" ,),
("Empty" ,),
("Global" ,),
("Hash" ,),
("Integer" ,),
("Preliminary" ,),
("Reflection" ,),
("SelfBlock" ,),
("SpecialSelectors",),
("Super" ,),
("Set",),
("String" ,),
("Symbol" ,),
("System" ,),
("Vector" ,)])
def test_som_test(self, test_name):
args = ["-cp", "Smalltalk", "TestSuite/TestHarness.som", test_name]
u = Universe(True)
u.interpret(args)
self.assertEquals(0, u.last_exit_code())
import sys
if 'pytest' in sys.modules:
# hack to make pytest not to collect the unexpanded test method
delattr(SomTest, "test_som_test")
|
4f3f738d7fc4b1728c74d6ffc7bf3064ce969520
|
tests/test_cli.py
|
tests/test_cli.py
|
import subprocess
def test_cli_usage():
cmd = ["salib"]
out = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
out = out.decode()
assert len(out) > 0 and "usage" in out.lower(), \
"Incorrect message returned from utility"
def test_cli_setup():
cmd = ["salib", "sample", "morris"]
out = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
out = out.decode()
assert len(out) > 0 and "error" not in out.lower(), \
"Could not use salib as command line utility!"
cmd = ["salib", "sample", "unknown_method"]
out = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
out = out.decode()
assert len(str(out)) > 0 and "invalid choice" in out.lower(), \
"Unimplemented method selected but no error outputted!"
if __name__ == '__main__':
test_cli_setup()
|
import subprocess
import importlib
from SALib.util import avail_approaches
def test_cli_usage():
cmd = ["salib"]
out = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
out = out.decode()
assert len(out) > 0 and "usage" in out.lower(), \
"Incorrect message returned from utility"
def test_cli_setup():
cmd = ["salib", "sample", "morris"]
out = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
out = out.decode()
assert len(out) > 0 and "error" not in out.lower(), \
"Could not use salib as command line utility!"
cmd = ["salib", "sample", "unknown_method"]
out = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
out = out.decode()
assert len(str(out)) > 0 and "invalid choice" in out.lower(), \
"Unimplemented method selected but no error outputted!"
def test_cli_avail_methods():
method_types = ['sample', 'analyze']
for method in method_types:
module = importlib.import_module('.'.join(['SALib', method]))
actions = avail_approaches(module)
for act in actions:
approach = importlib.import_module('.'.join(
['SALib', method, act]))
# Just try to access the functions - raises error on failure
approach.cli_parse
approach.cli_action
if __name__ == '__main__':
test_cli_usage()
test_cli_setup()
test_cli_avail_methods()
|
Test to ensure all samplers and analyzers have required functions
|
Test to ensure all samplers and analyzers have required functions
All methods should have `cli_parse` and `cli_action` functions available
|
Python
|
mit
|
willu47/SALib,willu47/SALib,jdherman/SALib,jdherman/SALib,SALib/SALib
|
import subprocess
def test_cli_usage():
cmd = ["salib"]
out = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
out = out.decode()
assert len(out) > 0 and "usage" in out.lower(), \
"Incorrect message returned from utility"
def test_cli_setup():
cmd = ["salib", "sample", "morris"]
out = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
out = out.decode()
assert len(out) > 0 and "error" not in out.lower(), \
"Could not use salib as command line utility!"
cmd = ["salib", "sample", "unknown_method"]
out = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
out = out.decode()
assert len(str(out)) > 0 and "invalid choice" in out.lower(), \
"Unimplemented method selected but no error outputted!"
if __name__ == '__main__':
test_cli_setup()
Test to ensure all samplers and analyzers have required functions
All methods should have `cli_parse` and `cli_action` functions available
|
import subprocess
import importlib
from SALib.util import avail_approaches
def test_cli_usage():
cmd = ["salib"]
out = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
out = out.decode()
assert len(out) > 0 and "usage" in out.lower(), \
"Incorrect message returned from utility"
def test_cli_setup():
cmd = ["salib", "sample", "morris"]
out = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
out = out.decode()
assert len(out) > 0 and "error" not in out.lower(), \
"Could not use salib as command line utility!"
cmd = ["salib", "sample", "unknown_method"]
out = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
out = out.decode()
assert len(str(out)) > 0 and "invalid choice" in out.lower(), \
"Unimplemented method selected but no error outputted!"
def test_cli_avail_methods():
method_types = ['sample', 'analyze']
for method in method_types:
module = importlib.import_module('.'.join(['SALib', method]))
actions = avail_approaches(module)
for act in actions:
approach = importlib.import_module('.'.join(
['SALib', method, act]))
# Just try to access the functions - raises error on failure
approach.cli_parse
approach.cli_action
if __name__ == '__main__':
test_cli_usage()
test_cli_setup()
test_cli_avail_methods()
|
<commit_before>import subprocess
def test_cli_usage():
cmd = ["salib"]
out = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
out = out.decode()
assert len(out) > 0 and "usage" in out.lower(), \
"Incorrect message returned from utility"
def test_cli_setup():
cmd = ["salib", "sample", "morris"]
out = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
out = out.decode()
assert len(out) > 0 and "error" not in out.lower(), \
"Could not use salib as command line utility!"
cmd = ["salib", "sample", "unknown_method"]
out = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
out = out.decode()
assert len(str(out)) > 0 and "invalid choice" in out.lower(), \
"Unimplemented method selected but no error outputted!"
if __name__ == '__main__':
test_cli_setup()
<commit_msg>Test to ensure all samplers and analyzers have required functions
All methods should have `cli_parse` and `cli_action` functions available<commit_after>
|
import subprocess
import importlib
from SALib.util import avail_approaches
def test_cli_usage():
cmd = ["salib"]
out = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
out = out.decode()
assert len(out) > 0 and "usage" in out.lower(), \
"Incorrect message returned from utility"
def test_cli_setup():
cmd = ["salib", "sample", "morris"]
out = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
out = out.decode()
assert len(out) > 0 and "error" not in out.lower(), \
"Could not use salib as command line utility!"
cmd = ["salib", "sample", "unknown_method"]
out = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
out = out.decode()
assert len(str(out)) > 0 and "invalid choice" in out.lower(), \
"Unimplemented method selected but no error outputted!"
def test_cli_avail_methods():
method_types = ['sample', 'analyze']
for method in method_types:
module = importlib.import_module('.'.join(['SALib', method]))
actions = avail_approaches(module)
for act in actions:
approach = importlib.import_module('.'.join(
['SALib', method, act]))
# Just try to access the functions - raises error on failure
approach.cli_parse
approach.cli_action
if __name__ == '__main__':
test_cli_usage()
test_cli_setup()
test_cli_avail_methods()
|
import subprocess
def test_cli_usage():
cmd = ["salib"]
out = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
out = out.decode()
assert len(out) > 0 and "usage" in out.lower(), \
"Incorrect message returned from utility"
def test_cli_setup():
cmd = ["salib", "sample", "morris"]
out = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
out = out.decode()
assert len(out) > 0 and "error" not in out.lower(), \
"Could not use salib as command line utility!"
cmd = ["salib", "sample", "unknown_method"]
out = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
out = out.decode()
assert len(str(out)) > 0 and "invalid choice" in out.lower(), \
"Unimplemented method selected but no error outputted!"
if __name__ == '__main__':
test_cli_setup()
Test to ensure all samplers and analyzers have required functions
All methods should have `cli_parse` and `cli_action` functions availableimport subprocess
import importlib
from SALib.util import avail_approaches
def test_cli_usage():
cmd = ["salib"]
out = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
out = out.decode()
assert len(out) > 0 and "usage" in out.lower(), \
"Incorrect message returned from utility"
def test_cli_setup():
cmd = ["salib", "sample", "morris"]
out = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
out = out.decode()
assert len(out) > 0 and "error" not in out.lower(), \
"Could not use salib as command line utility!"
cmd = ["salib", "sample", "unknown_method"]
out = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
out = out.decode()
assert len(str(out)) > 0 and "invalid choice" in out.lower(), \
"Unimplemented method selected but no error outputted!"
def test_cli_avail_methods():
method_types = ['sample', 'analyze']
for method in method_types:
module = importlib.import_module('.'.join(['SALib', method]))
actions = avail_approaches(module)
for act in actions:
approach = importlib.import_module('.'.join(
['SALib', method, act]))
# Just try to access the functions - raises error on failure
approach.cli_parse
approach.cli_action
if __name__ == '__main__':
test_cli_usage()
test_cli_setup()
test_cli_avail_methods()
|
<commit_before>import subprocess
def test_cli_usage():
cmd = ["salib"]
out = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
out = out.decode()
assert len(out) > 0 and "usage" in out.lower(), \
"Incorrect message returned from utility"
def test_cli_setup():
cmd = ["salib", "sample", "morris"]
out = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
out = out.decode()
assert len(out) > 0 and "error" not in out.lower(), \
"Could not use salib as command line utility!"
cmd = ["salib", "sample", "unknown_method"]
out = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
out = out.decode()
assert len(str(out)) > 0 and "invalid choice" in out.lower(), \
"Unimplemented method selected but no error outputted!"
if __name__ == '__main__':
test_cli_setup()
<commit_msg>Test to ensure all samplers and analyzers have required functions
All methods should have `cli_parse` and `cli_action` functions available<commit_after>import subprocess
import importlib
from SALib.util import avail_approaches
def test_cli_usage():
cmd = ["salib"]
out = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
out = out.decode()
assert len(out) > 0 and "usage" in out.lower(), \
"Incorrect message returned from utility"
def test_cli_setup():
cmd = ["salib", "sample", "morris"]
out = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
out = out.decode()
assert len(out) > 0 and "error" not in out.lower(), \
"Could not use salib as command line utility!"
cmd = ["salib", "sample", "unknown_method"]
out = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
out = out.decode()
assert len(str(out)) > 0 and "invalid choice" in out.lower(), \
"Unimplemented method selected but no error outputted!"
def test_cli_avail_methods():
method_types = ['sample', 'analyze']
for method in method_types:
module = importlib.import_module('.'.join(['SALib', method]))
actions = avail_approaches(module)
for act in actions:
approach = importlib.import_module('.'.join(
['SALib', method, act]))
# Just try to access the functions - raises error on failure
approach.cli_parse
approach.cli_action
if __name__ == '__main__':
test_cli_usage()
test_cli_setup()
test_cli_avail_methods()
|
658fa530c888eb31b28d5937592fb94d503902fb
|
allmychanges/validators.py
|
allmychanges/validators.py
|
import re
from django.core import validators
class URLValidator(validators.URLValidator):
"""Custom url validator to include git urls and urls with http+ like prefixes
"""
regex = re.compile(
r'^(?:(?:(?:(?:http|git|hg)\+)?' # optional http+ or git+ or hg+
r'(?:http|ftp|)s?|git)://|git@)' # http:// or https:// or git:// or git@
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' # domain...
r'localhost|' # localhost...
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}|' # ...or ipv4
r'\[?[A-F0-9]*:[A-F0-9:]+\]?)' # ...or ipv6
r'(?::\d+)?' # optional port
r'(?:/?|[/?:]\S+)' # slash or question mark or just : followed by uri
r'$', re.IGNORECASE)
def __call__(self, value):
super(URLValidator, self).__call__(value)
|
import re
from django.core import validators
class URLValidator(validators.URLValidator):
"""Custom url validator to include git urls and urls with http+ like prefixes
"""
regex = re.compile(
r'^(?:(?:(?:(?:http|git|hg|rechttp)\+)?' # optional http+ or git+ or hg+
r'(?:http|ftp|)s?|git)://|git@)' # http:// or https:// or git:// or git@
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' # domain...
r'localhost|' # localhost...
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}|' # ...or ipv4
r'\[?[A-F0-9]*:[A-F0-9:]+\]?)' # ...or ipv6
r'(?::\d+)?' # optional port
r'(?:/?|[/?:]\S+)' # slash or question mark or just : followed by uri
r'$', re.IGNORECASE)
def __call__(self, value):
super(URLValidator, self).__call__(value)
|
Allow rechttp prefix for source field.
|
Allow rechttp prefix for source field.
|
Python
|
bsd-2-clause
|
AllMyChanges/allmychanges.com,AllMyChanges/allmychanges.com,AllMyChanges/allmychanges.com,AllMyChanges/allmychanges.com
|
import re
from django.core import validators
class URLValidator(validators.URLValidator):
"""Custom url validator to include git urls and urls with http+ like prefixes
"""
regex = re.compile(
r'^(?:(?:(?:(?:http|git|hg)\+)?' # optional http+ or git+ or hg+
r'(?:http|ftp|)s?|git)://|git@)' # http:// or https:// or git:// or git@
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' # domain...
r'localhost|' # localhost...
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}|' # ...or ipv4
r'\[?[A-F0-9]*:[A-F0-9:]+\]?)' # ...or ipv6
r'(?::\d+)?' # optional port
r'(?:/?|[/?:]\S+)' # slash or question mark or just : followed by uri
r'$', re.IGNORECASE)
def __call__(self, value):
super(URLValidator, self).__call__(value)
Allow rechttp prefix for source field.
|
import re
from django.core import validators
class URLValidator(validators.URLValidator):
"""Custom url validator to include git urls and urls with http+ like prefixes
"""
regex = re.compile(
r'^(?:(?:(?:(?:http|git|hg|rechttp)\+)?' # optional http+ or git+ or hg+
r'(?:http|ftp|)s?|git)://|git@)' # http:// or https:// or git:// or git@
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' # domain...
r'localhost|' # localhost...
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}|' # ...or ipv4
r'\[?[A-F0-9]*:[A-F0-9:]+\]?)' # ...or ipv6
r'(?::\d+)?' # optional port
r'(?:/?|[/?:]\S+)' # slash or question mark or just : followed by uri
r'$', re.IGNORECASE)
def __call__(self, value):
super(URLValidator, self).__call__(value)
|
<commit_before>import re
from django.core import validators
class URLValidator(validators.URLValidator):
"""Custom url validator to include git urls and urls with http+ like prefixes
"""
regex = re.compile(
r'^(?:(?:(?:(?:http|git|hg)\+)?' # optional http+ or git+ or hg+
r'(?:http|ftp|)s?|git)://|git@)' # http:// or https:// or git:// or git@
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' # domain...
r'localhost|' # localhost...
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}|' # ...or ipv4
r'\[?[A-F0-9]*:[A-F0-9:]+\]?)' # ...or ipv6
r'(?::\d+)?' # optional port
r'(?:/?|[/?:]\S+)' # slash or question mark or just : followed by uri
r'$', re.IGNORECASE)
def __call__(self, value):
super(URLValidator, self).__call__(value)
<commit_msg>Allow rechttp prefix for source field.<commit_after>
|
import re
from django.core import validators
class URLValidator(validators.URLValidator):
"""Custom url validator to include git urls and urls with http+ like prefixes
"""
regex = re.compile(
r'^(?:(?:(?:(?:http|git|hg|rechttp)\+)?' # optional http+ or git+ or hg+
r'(?:http|ftp|)s?|git)://|git@)' # http:// or https:// or git:// or git@
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' # domain...
r'localhost|' # localhost...
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}|' # ...or ipv4
r'\[?[A-F0-9]*:[A-F0-9:]+\]?)' # ...or ipv6
r'(?::\d+)?' # optional port
r'(?:/?|[/?:]\S+)' # slash or question mark or just : followed by uri
r'$', re.IGNORECASE)
def __call__(self, value):
super(URLValidator, self).__call__(value)
|
import re
from django.core import validators
class URLValidator(validators.URLValidator):
"""Custom url validator to include git urls and urls with http+ like prefixes
"""
regex = re.compile(
r'^(?:(?:(?:(?:http|git|hg)\+)?' # optional http+ or git+ or hg+
r'(?:http|ftp|)s?|git)://|git@)' # http:// or https:// or git:// or git@
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' # domain...
r'localhost|' # localhost...
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}|' # ...or ipv4
r'\[?[A-F0-9]*:[A-F0-9:]+\]?)' # ...or ipv6
r'(?::\d+)?' # optional port
r'(?:/?|[/?:]\S+)' # slash or question mark or just : followed by uri
r'$', re.IGNORECASE)
def __call__(self, value):
super(URLValidator, self).__call__(value)
Allow rechttp prefix for source field.import re
from django.core import validators
class URLValidator(validators.URLValidator):
"""Custom url validator to include git urls and urls with http+ like prefixes
"""
regex = re.compile(
r'^(?:(?:(?:(?:http|git|hg|rechttp)\+)?' # optional http+ or git+ or hg+
r'(?:http|ftp|)s?|git)://|git@)' # http:// or https:// or git:// or git@
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' # domain...
r'localhost|' # localhost...
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}|' # ...or ipv4
r'\[?[A-F0-9]*:[A-F0-9:]+\]?)' # ...or ipv6
r'(?::\d+)?' # optional port
r'(?:/?|[/?:]\S+)' # slash or question mark or just : followed by uri
r'$', re.IGNORECASE)
def __call__(self, value):
super(URLValidator, self).__call__(value)
|
<commit_before>import re
from django.core import validators
class URLValidator(validators.URLValidator):
"""Custom url validator to include git urls and urls with http+ like prefixes
"""
regex = re.compile(
r'^(?:(?:(?:(?:http|git|hg)\+)?' # optional http+ or git+ or hg+
r'(?:http|ftp|)s?|git)://|git@)' # http:// or https:// or git:// or git@
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' # domain...
r'localhost|' # localhost...
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}|' # ...or ipv4
r'\[?[A-F0-9]*:[A-F0-9:]+\]?)' # ...or ipv6
r'(?::\d+)?' # optional port
r'(?:/?|[/?:]\S+)' # slash or question mark or just : followed by uri
r'$', re.IGNORECASE)
def __call__(self, value):
super(URLValidator, self).__call__(value)
<commit_msg>Allow rechttp prefix for source field.<commit_after>import re
from django.core import validators
class URLValidator(validators.URLValidator):
"""Custom url validator to include git urls and urls with http+ like prefixes
"""
regex = re.compile(
r'^(?:(?:(?:(?:http|git|hg|rechttp)\+)?' # optional http+ or git+ or hg+
r'(?:http|ftp|)s?|git)://|git@)' # http:// or https:// or git:// or git@
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' # domain...
r'localhost|' # localhost...
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}|' # ...or ipv4
r'\[?[A-F0-9]*:[A-F0-9:]+\]?)' # ...or ipv6
r'(?::\d+)?' # optional port
r'(?:/?|[/?:]\S+)' # slash or question mark or just : followed by uri
r'$', re.IGNORECASE)
def __call__(self, value):
super(URLValidator, self).__call__(value)
|
ace38e69c66a5957a155091fbd3c746952f982fc
|
tests.py
|
tests.py
|
from scraper.tivix import get_list_of_tivix_members
from scraper.tivix import get_random_tivix_member_bio
def test_get_all_tivix_members():
members = get_list_of_tivix_members()
assert members
assert '/team-members/jack-muratore/' in members
assert '/team-members/kyle-connors/' in members
assert '/team-members/tan-nguyen/' in members
assert '/team-members/will-liu/' in members
assert '/team-members/george-bush/' not in members
def test_output_random_bio():
bret_bio = get_random_tivix_member_bio('bret-waters')
assert 'Bret Waters' in bret_bio
assert 'ridiculously smart team' in bret_bio
flavio_bio = get_random_tivix_member_bio('flavio-zhingri')
assert 'hardest person' in flavio_bio
|
from scraper.tivix import get_list_of_tivix_members
from scraper.tivix import get_random_tivix_member_bio
def test_get_all_tivix_members():
members = get_list_of_tivix_members()
assert members
assert '/team-members/jack-muratore/' in members
assert '/team-members/kyle-connors/' in members
assert '/team-members/tan-nguyen/' in members
assert '/team-members/will-liu/' in members
assert '/team-members/george-bush/' not in members
def test_bio_to_alexa_string():
bret_bio = get_random_tivix_member_bio('bret-waters')
assert 'Bret Waters' in bret_bio
assert 'ridiculously smart team' in bret_bio
flavio_bio = get_random_tivix_member_bio('flavio-zhingri')
assert 'hardest person' in flavio_bio
|
Rename test to be more fitting
|
Rename test to be more fitting
|
Python
|
mit
|
banjocat/alexa-tivix-members
|
from scraper.tivix import get_list_of_tivix_members
from scraper.tivix import get_random_tivix_member_bio
def test_get_all_tivix_members():
members = get_list_of_tivix_members()
assert members
assert '/team-members/jack-muratore/' in members
assert '/team-members/kyle-connors/' in members
assert '/team-members/tan-nguyen/' in members
assert '/team-members/will-liu/' in members
assert '/team-members/george-bush/' not in members
def test_output_random_bio():
bret_bio = get_random_tivix_member_bio('bret-waters')
assert 'Bret Waters' in bret_bio
assert 'ridiculously smart team' in bret_bio
flavio_bio = get_random_tivix_member_bio('flavio-zhingri')
assert 'hardest person' in flavio_bio
Rename test to be more fitting
|
from scraper.tivix import get_list_of_tivix_members
from scraper.tivix import get_random_tivix_member_bio
def test_get_all_tivix_members():
members = get_list_of_tivix_members()
assert members
assert '/team-members/jack-muratore/' in members
assert '/team-members/kyle-connors/' in members
assert '/team-members/tan-nguyen/' in members
assert '/team-members/will-liu/' in members
assert '/team-members/george-bush/' not in members
def test_bio_to_alexa_string():
bret_bio = get_random_tivix_member_bio('bret-waters')
assert 'Bret Waters' in bret_bio
assert 'ridiculously smart team' in bret_bio
flavio_bio = get_random_tivix_member_bio('flavio-zhingri')
assert 'hardest person' in flavio_bio
|
<commit_before>from scraper.tivix import get_list_of_tivix_members
from scraper.tivix import get_random_tivix_member_bio
def test_get_all_tivix_members():
members = get_list_of_tivix_members()
assert members
assert '/team-members/jack-muratore/' in members
assert '/team-members/kyle-connors/' in members
assert '/team-members/tan-nguyen/' in members
assert '/team-members/will-liu/' in members
assert '/team-members/george-bush/' not in members
def test_output_random_bio():
bret_bio = get_random_tivix_member_bio('bret-waters')
assert 'Bret Waters' in bret_bio
assert 'ridiculously smart team' in bret_bio
flavio_bio = get_random_tivix_member_bio('flavio-zhingri')
assert 'hardest person' in flavio_bio
<commit_msg>Rename test to be more fitting<commit_after>
|
from scraper.tivix import get_list_of_tivix_members
from scraper.tivix import get_random_tivix_member_bio
def test_get_all_tivix_members():
members = get_list_of_tivix_members()
assert members
assert '/team-members/jack-muratore/' in members
assert '/team-members/kyle-connors/' in members
assert '/team-members/tan-nguyen/' in members
assert '/team-members/will-liu/' in members
assert '/team-members/george-bush/' not in members
def test_bio_to_alexa_string():
bret_bio = get_random_tivix_member_bio('bret-waters')
assert 'Bret Waters' in bret_bio
assert 'ridiculously smart team' in bret_bio
flavio_bio = get_random_tivix_member_bio('flavio-zhingri')
assert 'hardest person' in flavio_bio
|
from scraper.tivix import get_list_of_tivix_members
from scraper.tivix import get_random_tivix_member_bio
def test_get_all_tivix_members():
members = get_list_of_tivix_members()
assert members
assert '/team-members/jack-muratore/' in members
assert '/team-members/kyle-connors/' in members
assert '/team-members/tan-nguyen/' in members
assert '/team-members/will-liu/' in members
assert '/team-members/george-bush/' not in members
def test_output_random_bio():
bret_bio = get_random_tivix_member_bio('bret-waters')
assert 'Bret Waters' in bret_bio
assert 'ridiculously smart team' in bret_bio
flavio_bio = get_random_tivix_member_bio('flavio-zhingri')
assert 'hardest person' in flavio_bio
Rename test to be more fittingfrom scraper.tivix import get_list_of_tivix_members
from scraper.tivix import get_random_tivix_member_bio
def test_get_all_tivix_members():
members = get_list_of_tivix_members()
assert members
assert '/team-members/jack-muratore/' in members
assert '/team-members/kyle-connors/' in members
assert '/team-members/tan-nguyen/' in members
assert '/team-members/will-liu/' in members
assert '/team-members/george-bush/' not in members
def test_bio_to_alexa_string():
bret_bio = get_random_tivix_member_bio('bret-waters')
assert 'Bret Waters' in bret_bio
assert 'ridiculously smart team' in bret_bio
flavio_bio = get_random_tivix_member_bio('flavio-zhingri')
assert 'hardest person' in flavio_bio
|
<commit_before>from scraper.tivix import get_list_of_tivix_members
from scraper.tivix import get_random_tivix_member_bio
def test_get_all_tivix_members():
members = get_list_of_tivix_members()
assert members
assert '/team-members/jack-muratore/' in members
assert '/team-members/kyle-connors/' in members
assert '/team-members/tan-nguyen/' in members
assert '/team-members/will-liu/' in members
assert '/team-members/george-bush/' not in members
def test_output_random_bio():
bret_bio = get_random_tivix_member_bio('bret-waters')
assert 'Bret Waters' in bret_bio
assert 'ridiculously smart team' in bret_bio
flavio_bio = get_random_tivix_member_bio('flavio-zhingri')
assert 'hardest person' in flavio_bio
<commit_msg>Rename test to be more fitting<commit_after>from scraper.tivix import get_list_of_tivix_members
from scraper.tivix import get_random_tivix_member_bio
def test_get_all_tivix_members():
members = get_list_of_tivix_members()
assert members
assert '/team-members/jack-muratore/' in members
assert '/team-members/kyle-connors/' in members
assert '/team-members/tan-nguyen/' in members
assert '/team-members/will-liu/' in members
assert '/team-members/george-bush/' not in members
def test_bio_to_alexa_string():
bret_bio = get_random_tivix_member_bio('bret-waters')
assert 'Bret Waters' in bret_bio
assert 'ridiculously smart team' in bret_bio
flavio_bio = get_random_tivix_member_bio('flavio-zhingri')
assert 'hardest person' in flavio_bio
|
797e9f3e4fad744e9211c07067992c245a344fb5
|
tests/test_whatcd.py
|
tests/test_whatcd.py
|
from __future__ import unicode_literals, division, absolute_import
from tests import FlexGetBase, use_vcr
class TestInputWhatCD(FlexGetBase):
__yaml__ = """
tasks:
no_fields:
whatcd:
no_user:
whatcd:
password: test
no_pass:
whatcd:
username: test
"""
def test_missing_fields(self):
self.execute_task('no_fields', abort_ok=True)
assert self.task.aborted, 'Task not aborted with no fields present'
self.execute_task('no_user', abort_ok=True)
assert self.task.aborted, 'Task not aborted with no username'
self.execute_task('no_pass', abort_ok=True)
assert self.task.aborted, 'Task not aborted with no password'
class TestWhatCDOnline(FlexGetBase):
__yaml__ = """
tasks:
badlogin:
whatcd:
username: invalid
password: invalid
"""
@use_vcr
def test_invalid_login(self):
self.execute_task("badlogin", abort_ok=True)
assert self.task.aborted, 'Task not aborted with invalid login credentials'
|
from __future__ import unicode_literals, division, absolute_import
from tests import FlexGetBase, use_vcr
class TestWhatCDOnline(FlexGetBase):
__yaml__ = """
tasks:
badlogin:
whatcd:
username: invalid
password: invalid
"""
@use_vcr
def test_invalid_login(self):
self.execute_task("badlogin", abort_ok=True)
assert self.task.aborted, 'Task not aborted with invalid login credentials'
|
Remove schema validation unit tests frow whatcd
|
Remove schema validation unit tests frow whatcd
|
Python
|
mit
|
JorisDeRieck/Flexget,Danfocus/Flexget,qk4l/Flexget,Flexget/Flexget,JorisDeRieck/Flexget,qk4l/Flexget,ianstalk/Flexget,dsemi/Flexget,oxc/Flexget,crawln45/Flexget,qvazzler/Flexget,Flexget/Flexget,sean797/Flexget,oxc/Flexget,Flexget/Flexget,dsemi/Flexget,drwyrm/Flexget,OmgOhnoes/Flexget,drwyrm/Flexget,jacobmetrick/Flexget,jawilson/Flexget,malkavi/Flexget,crawln45/Flexget,qvazzler/Flexget,malkavi/Flexget,tarzasai/Flexget,cvium/Flexget,drwyrm/Flexget,jawilson/Flexget,LynxyssCZ/Flexget,lildadou/Flexget,cvium/Flexget,lildadou/Flexget,Pretagonist/Flexget,LynxyssCZ/Flexget,tobinjt/Flexget,poulpito/Flexget,JorisDeRieck/Flexget,LynxyssCZ/Flexget,jacobmetrick/Flexget,poulpito/Flexget,jacobmetrick/Flexget,Pretagonist/Flexget,antivirtel/Flexget,tsnoam/Flexget,Danfocus/Flexget,tobinjt/Flexget,Pretagonist/Flexget,tsnoam/Flexget,gazpachoking/Flexget,qk4l/Flexget,antivirtel/Flexget,tarzasai/Flexget,lildadou/Flexget,ianstalk/Flexget,poulpito/Flexget,tobinjt/Flexget,jawilson/Flexget,Danfocus/Flexget,Flexget/Flexget,JorisDeRieck/Flexget,oxc/Flexget,OmgOhnoes/Flexget,antivirtel/Flexget,OmgOhnoes/Flexget,jawilson/Flexget,gazpachoking/Flexget,crawln45/Flexget,cvium/Flexget,qvazzler/Flexget,tarzasai/Flexget,LynxyssCZ/Flexget,malkavi/Flexget,crawln45/Flexget,malkavi/Flexget,sean797/Flexget,tsnoam/Flexget,dsemi/Flexget,Danfocus/Flexget,sean797/Flexget,tobinjt/Flexget,ianstalk/Flexget
|
from __future__ import unicode_literals, division, absolute_import
from tests import FlexGetBase, use_vcr
class TestInputWhatCD(FlexGetBase):
__yaml__ = """
tasks:
no_fields:
whatcd:
no_user:
whatcd:
password: test
no_pass:
whatcd:
username: test
"""
def test_missing_fields(self):
self.execute_task('no_fields', abort_ok=True)
assert self.task.aborted, 'Task not aborted with no fields present'
self.execute_task('no_user', abort_ok=True)
assert self.task.aborted, 'Task not aborted with no username'
self.execute_task('no_pass', abort_ok=True)
assert self.task.aborted, 'Task not aborted with no password'
class TestWhatCDOnline(FlexGetBase):
__yaml__ = """
tasks:
badlogin:
whatcd:
username: invalid
password: invalid
"""
@use_vcr
def test_invalid_login(self):
self.execute_task("badlogin", abort_ok=True)
assert self.task.aborted, 'Task not aborted with invalid login credentials'
Remove schema validation unit tests frow whatcd
|
from __future__ import unicode_literals, division, absolute_import
from tests import FlexGetBase, use_vcr
class TestWhatCDOnline(FlexGetBase):
__yaml__ = """
tasks:
badlogin:
whatcd:
username: invalid
password: invalid
"""
@use_vcr
def test_invalid_login(self):
self.execute_task("badlogin", abort_ok=True)
assert self.task.aborted, 'Task not aborted with invalid login credentials'
|
<commit_before>from __future__ import unicode_literals, division, absolute_import
from tests import FlexGetBase, use_vcr
class TestInputWhatCD(FlexGetBase):
__yaml__ = """
tasks:
no_fields:
whatcd:
no_user:
whatcd:
password: test
no_pass:
whatcd:
username: test
"""
def test_missing_fields(self):
self.execute_task('no_fields', abort_ok=True)
assert self.task.aborted, 'Task not aborted with no fields present'
self.execute_task('no_user', abort_ok=True)
assert self.task.aborted, 'Task not aborted with no username'
self.execute_task('no_pass', abort_ok=True)
assert self.task.aborted, 'Task not aborted with no password'
class TestWhatCDOnline(FlexGetBase):
__yaml__ = """
tasks:
badlogin:
whatcd:
username: invalid
password: invalid
"""
@use_vcr
def test_invalid_login(self):
self.execute_task("badlogin", abort_ok=True)
assert self.task.aborted, 'Task not aborted with invalid login credentials'
<commit_msg>Remove schema validation unit tests frow whatcd<commit_after>
|
from __future__ import unicode_literals, division, absolute_import
from tests import FlexGetBase, use_vcr
class TestWhatCDOnline(FlexGetBase):
__yaml__ = """
tasks:
badlogin:
whatcd:
username: invalid
password: invalid
"""
@use_vcr
def test_invalid_login(self):
self.execute_task("badlogin", abort_ok=True)
assert self.task.aborted, 'Task not aborted with invalid login credentials'
|
from __future__ import unicode_literals, division, absolute_import
from tests import FlexGetBase, use_vcr
class TestInputWhatCD(FlexGetBase):
__yaml__ = """
tasks:
no_fields:
whatcd:
no_user:
whatcd:
password: test
no_pass:
whatcd:
username: test
"""
def test_missing_fields(self):
self.execute_task('no_fields', abort_ok=True)
assert self.task.aborted, 'Task not aborted with no fields present'
self.execute_task('no_user', abort_ok=True)
assert self.task.aborted, 'Task not aborted with no username'
self.execute_task('no_pass', abort_ok=True)
assert self.task.aborted, 'Task not aborted with no password'
class TestWhatCDOnline(FlexGetBase):
__yaml__ = """
tasks:
badlogin:
whatcd:
username: invalid
password: invalid
"""
@use_vcr
def test_invalid_login(self):
self.execute_task("badlogin", abort_ok=True)
assert self.task.aborted, 'Task not aborted with invalid login credentials'
Remove schema validation unit tests frow whatcdfrom __future__ import unicode_literals, division, absolute_import
from tests import FlexGetBase, use_vcr
class TestWhatCDOnline(FlexGetBase):
__yaml__ = """
tasks:
badlogin:
whatcd:
username: invalid
password: invalid
"""
@use_vcr
def test_invalid_login(self):
self.execute_task("badlogin", abort_ok=True)
assert self.task.aborted, 'Task not aborted with invalid login credentials'
|
<commit_before>from __future__ import unicode_literals, division, absolute_import
from tests import FlexGetBase, use_vcr
class TestInputWhatCD(FlexGetBase):
__yaml__ = """
tasks:
no_fields:
whatcd:
no_user:
whatcd:
password: test
no_pass:
whatcd:
username: test
"""
def test_missing_fields(self):
self.execute_task('no_fields', abort_ok=True)
assert self.task.aborted, 'Task not aborted with no fields present'
self.execute_task('no_user', abort_ok=True)
assert self.task.aborted, 'Task not aborted with no username'
self.execute_task('no_pass', abort_ok=True)
assert self.task.aborted, 'Task not aborted with no password'
class TestWhatCDOnline(FlexGetBase):
__yaml__ = """
tasks:
badlogin:
whatcd:
username: invalid
password: invalid
"""
@use_vcr
def test_invalid_login(self):
self.execute_task("badlogin", abort_ok=True)
assert self.task.aborted, 'Task not aborted with invalid login credentials'
<commit_msg>Remove schema validation unit tests frow whatcd<commit_after>from __future__ import unicode_literals, division, absolute_import
from tests import FlexGetBase, use_vcr
class TestWhatCDOnline(FlexGetBase):
__yaml__ = """
tasks:
badlogin:
whatcd:
username: invalid
password: invalid
"""
@use_vcr
def test_invalid_login(self):
self.execute_task("badlogin", abort_ok=True)
assert self.task.aborted, 'Task not aborted with invalid login credentials'
|
e07e1468128d423bbb9f0dd0cb79d09620b69e48
|
misc/decode-mirax-tile-position.py
|
misc/decode-mirax-tile-position.py
|
#!/usr/bin/python
import struct, sys, os
f = open(sys.argv[1])
HEADER_OFFSET = 296
f.seek(HEADER_OFFSET)
try:
while True:
x = int(struct.unpack("<i", f.read(4))[0]) / 256.0
y = int(struct.unpack("<i", f.read(4))[0]) / 256.0
zz = f.read(1)
print '%10g %10g' % (x, y)
except:
pass
|
#!/usr/bin/python
import struct, sys, os
f = open(sys.argv[1])
HEADER_OFFSET = 296
f.seek(HEADER_OFFSET)
try:
while True:
x = int(struct.unpack("<i", f.read(4))[0]) / 256.0
y = int(struct.unpack("<i", f.read(4))[0]) / 256.0
zz = f.read(1)
print '%10.100g %10.100g' % (x, y)
except:
pass
|
Fix numerical printout in python script
|
Fix numerical printout in python script
|
Python
|
lgpl-2.1
|
openslide/openslide,openslide/openslide,openslide/openslide,openslide/openslide
|
#!/usr/bin/python
import struct, sys, os
f = open(sys.argv[1])
HEADER_OFFSET = 296
f.seek(HEADER_OFFSET)
try:
while True:
x = int(struct.unpack("<i", f.read(4))[0]) / 256.0
y = int(struct.unpack("<i", f.read(4))[0]) / 256.0
zz = f.read(1)
print '%10g %10g' % (x, y)
except:
pass
Fix numerical printout in python script
|
#!/usr/bin/python
import struct, sys, os
f = open(sys.argv[1])
HEADER_OFFSET = 296
f.seek(HEADER_OFFSET)
try:
while True:
x = int(struct.unpack("<i", f.read(4))[0]) / 256.0
y = int(struct.unpack("<i", f.read(4))[0]) / 256.0
zz = f.read(1)
print '%10.100g %10.100g' % (x, y)
except:
pass
|
<commit_before>#!/usr/bin/python
import struct, sys, os
f = open(sys.argv[1])
HEADER_OFFSET = 296
f.seek(HEADER_OFFSET)
try:
while True:
x = int(struct.unpack("<i", f.read(4))[0]) / 256.0
y = int(struct.unpack("<i", f.read(4))[0]) / 256.0
zz = f.read(1)
print '%10g %10g' % (x, y)
except:
pass
<commit_msg>Fix numerical printout in python script<commit_after>
|
#!/usr/bin/python
import struct, sys, os
f = open(sys.argv[1])
HEADER_OFFSET = 296
f.seek(HEADER_OFFSET)
try:
while True:
x = int(struct.unpack("<i", f.read(4))[0]) / 256.0
y = int(struct.unpack("<i", f.read(4))[0]) / 256.0
zz = f.read(1)
print '%10.100g %10.100g' % (x, y)
except:
pass
|
#!/usr/bin/python
import struct, sys, os
f = open(sys.argv[1])
HEADER_OFFSET = 296
f.seek(HEADER_OFFSET)
try:
while True:
x = int(struct.unpack("<i", f.read(4))[0]) / 256.0
y = int(struct.unpack("<i", f.read(4))[0]) / 256.0
zz = f.read(1)
print '%10g %10g' % (x, y)
except:
pass
Fix numerical printout in python script#!/usr/bin/python
import struct, sys, os
f = open(sys.argv[1])
HEADER_OFFSET = 296
f.seek(HEADER_OFFSET)
try:
while True:
x = int(struct.unpack("<i", f.read(4))[0]) / 256.0
y = int(struct.unpack("<i", f.read(4))[0]) / 256.0
zz = f.read(1)
print '%10.100g %10.100g' % (x, y)
except:
pass
|
<commit_before>#!/usr/bin/python
import struct, sys, os
f = open(sys.argv[1])
HEADER_OFFSET = 296
f.seek(HEADER_OFFSET)
try:
while True:
x = int(struct.unpack("<i", f.read(4))[0]) / 256.0
y = int(struct.unpack("<i", f.read(4))[0]) / 256.0
zz = f.read(1)
print '%10g %10g' % (x, y)
except:
pass
<commit_msg>Fix numerical printout in python script<commit_after>#!/usr/bin/python
import struct, sys, os
f = open(sys.argv[1])
HEADER_OFFSET = 296
f.seek(HEADER_OFFSET)
try:
while True:
x = int(struct.unpack("<i", f.read(4))[0]) / 256.0
y = int(struct.unpack("<i", f.read(4))[0]) / 256.0
zz = f.read(1)
print '%10.100g %10.100g' % (x, y)
except:
pass
|
e3a3f55b0db2a5ed323e23dc0d949378a9871a15
|
nex/parsing/general_text_parser.py
|
nex/parsing/general_text_parser.py
|
from ..tokens import BuiltToken
from .common_parsing import pg as common_pg
gen_txt_pg = common_pg.copy_to_extend()
@gen_txt_pg.production('general_text : filler LEFT_BRACE BALANCED_TEXT_AND_RIGHT_BRACE')
def general_text(p):
return BuiltToken(type_='general_text', value=p[2].value,
position_like=p)
@gen_txt_pg.production('filler : optional_spaces')
@gen_txt_pg.production('filler : filler RELAX optional_spaces')
def filler(p):
return None
general_text_parser = gen_txt_pg.build()
|
from ..rply import ParserGenerator
from ..tokens import BuiltToken
term_types = ['SPACE', 'RELAX', 'LEFT_BRACE', 'BALANCED_TEXT_AND_RIGHT_BRACE']
gen_txt_pg = ParserGenerator(term_types, cache_id="general_text")
@gen_txt_pg.production('general_text : filler LEFT_BRACE BALANCED_TEXT_AND_RIGHT_BRACE')
def general_text(p):
return BuiltToken(type_='general_text', value=p[2].value,
position_like=p)
@gen_txt_pg.production('filler : optional_spaces')
@gen_txt_pg.production('filler : filler RELAX optional_spaces')
def filler(p):
return None
@gen_txt_pg.production('optional_spaces : SPACE optional_spaces')
@gen_txt_pg.production('optional_spaces : empty')
def optional_spaces(p):
return None
@gen_txt_pg.production('empty :')
def empty(p):
return None
general_text_parser = gen_txt_pg.build()
|
Duplicate small parts to make general text parser independent and simple
|
Duplicate small parts to make general text parser independent and simple
|
Python
|
mit
|
eddiejessup/nex
|
from ..tokens import BuiltToken
from .common_parsing import pg as common_pg
gen_txt_pg = common_pg.copy_to_extend()
@gen_txt_pg.production('general_text : filler LEFT_BRACE BALANCED_TEXT_AND_RIGHT_BRACE')
def general_text(p):
return BuiltToken(type_='general_text', value=p[2].value,
position_like=p)
@gen_txt_pg.production('filler : optional_spaces')
@gen_txt_pg.production('filler : filler RELAX optional_spaces')
def filler(p):
return None
general_text_parser = gen_txt_pg.build()
Duplicate small parts to make general text parser independent and simple
|
from ..rply import ParserGenerator
from ..tokens import BuiltToken
term_types = ['SPACE', 'RELAX', 'LEFT_BRACE', 'BALANCED_TEXT_AND_RIGHT_BRACE']
gen_txt_pg = ParserGenerator(term_types, cache_id="general_text")
@gen_txt_pg.production('general_text : filler LEFT_BRACE BALANCED_TEXT_AND_RIGHT_BRACE')
def general_text(p):
return BuiltToken(type_='general_text', value=p[2].value,
position_like=p)
@gen_txt_pg.production('filler : optional_spaces')
@gen_txt_pg.production('filler : filler RELAX optional_spaces')
def filler(p):
return None
@gen_txt_pg.production('optional_spaces : SPACE optional_spaces')
@gen_txt_pg.production('optional_spaces : empty')
def optional_spaces(p):
return None
@gen_txt_pg.production('empty :')
def empty(p):
return None
general_text_parser = gen_txt_pg.build()
|
<commit_before>from ..tokens import BuiltToken
from .common_parsing import pg as common_pg
gen_txt_pg = common_pg.copy_to_extend()
@gen_txt_pg.production('general_text : filler LEFT_BRACE BALANCED_TEXT_AND_RIGHT_BRACE')
def general_text(p):
return BuiltToken(type_='general_text', value=p[2].value,
position_like=p)
@gen_txt_pg.production('filler : optional_spaces')
@gen_txt_pg.production('filler : filler RELAX optional_spaces')
def filler(p):
return None
general_text_parser = gen_txt_pg.build()
<commit_msg>Duplicate small parts to make general text parser independent and simple<commit_after>
|
from ..rply import ParserGenerator
from ..tokens import BuiltToken
term_types = ['SPACE', 'RELAX', 'LEFT_BRACE', 'BALANCED_TEXT_AND_RIGHT_BRACE']
gen_txt_pg = ParserGenerator(term_types, cache_id="general_text")
@gen_txt_pg.production('general_text : filler LEFT_BRACE BALANCED_TEXT_AND_RIGHT_BRACE')
def general_text(p):
return BuiltToken(type_='general_text', value=p[2].value,
position_like=p)
@gen_txt_pg.production('filler : optional_spaces')
@gen_txt_pg.production('filler : filler RELAX optional_spaces')
def filler(p):
return None
@gen_txt_pg.production('optional_spaces : SPACE optional_spaces')
@gen_txt_pg.production('optional_spaces : empty')
def optional_spaces(p):
return None
@gen_txt_pg.production('empty :')
def empty(p):
return None
general_text_parser = gen_txt_pg.build()
|
from ..tokens import BuiltToken
from .common_parsing import pg as common_pg
gen_txt_pg = common_pg.copy_to_extend()
@gen_txt_pg.production('general_text : filler LEFT_BRACE BALANCED_TEXT_AND_RIGHT_BRACE')
def general_text(p):
return BuiltToken(type_='general_text', value=p[2].value,
position_like=p)
@gen_txt_pg.production('filler : optional_spaces')
@gen_txt_pg.production('filler : filler RELAX optional_spaces')
def filler(p):
return None
general_text_parser = gen_txt_pg.build()
Duplicate small parts to make general text parser independent and simplefrom ..rply import ParserGenerator
from ..tokens import BuiltToken
term_types = ['SPACE', 'RELAX', 'LEFT_BRACE', 'BALANCED_TEXT_AND_RIGHT_BRACE']
gen_txt_pg = ParserGenerator(term_types, cache_id="general_text")
@gen_txt_pg.production('general_text : filler LEFT_BRACE BALANCED_TEXT_AND_RIGHT_BRACE')
def general_text(p):
return BuiltToken(type_='general_text', value=p[2].value,
position_like=p)
@gen_txt_pg.production('filler : optional_spaces')
@gen_txt_pg.production('filler : filler RELAX optional_spaces')
def filler(p):
return None
@gen_txt_pg.production('optional_spaces : SPACE optional_spaces')
@gen_txt_pg.production('optional_spaces : empty')
def optional_spaces(p):
return None
@gen_txt_pg.production('empty :')
def empty(p):
return None
general_text_parser = gen_txt_pg.build()
|
<commit_before>from ..tokens import BuiltToken
from .common_parsing import pg as common_pg
gen_txt_pg = common_pg.copy_to_extend()
@gen_txt_pg.production('general_text : filler LEFT_BRACE BALANCED_TEXT_AND_RIGHT_BRACE')
def general_text(p):
return BuiltToken(type_='general_text', value=p[2].value,
position_like=p)
@gen_txt_pg.production('filler : optional_spaces')
@gen_txt_pg.production('filler : filler RELAX optional_spaces')
def filler(p):
return None
general_text_parser = gen_txt_pg.build()
<commit_msg>Duplicate small parts to make general text parser independent and simple<commit_after>from ..rply import ParserGenerator
from ..tokens import BuiltToken
term_types = ['SPACE', 'RELAX', 'LEFT_BRACE', 'BALANCED_TEXT_AND_RIGHT_BRACE']
gen_txt_pg = ParserGenerator(term_types, cache_id="general_text")
@gen_txt_pg.production('general_text : filler LEFT_BRACE BALANCED_TEXT_AND_RIGHT_BRACE')
def general_text(p):
return BuiltToken(type_='general_text', value=p[2].value,
position_like=p)
@gen_txt_pg.production('filler : optional_spaces')
@gen_txt_pg.production('filler : filler RELAX optional_spaces')
def filler(p):
return None
@gen_txt_pg.production('optional_spaces : SPACE optional_spaces')
@gen_txt_pg.production('optional_spaces : empty')
def optional_spaces(p):
return None
@gen_txt_pg.production('empty :')
def empty(p):
return None
general_text_parser = gen_txt_pg.build()
|
c6b8ff0f5c8b67dd6d48ccfe8c82b98d33b979a6
|
openfisca_web_api/scripts/serve.py
|
openfisca_web_api/scripts/serve.py
|
# -*- coding: utf-8 -*-
import os
import sys
from logging.config import fileConfig
from wsgiref.simple_server import make_server
from paste.deploy import loadapp
hostname = 'localhost'
port = 2000
def main():
conf_file_path = os.path.join(sys.prefix, 'share', 'openfisca', 'openfisca-web-api', 'development-france.ini')
fileConfig(conf_file_path)
application = loadapp('config:{}'.format(conf_file_path))
httpd = make_server(hostname, port, application)
print u'Serving on http://{}:{}/'.format(hostname, port)
try:
httpd.serve_forever()
except KeyboardInterrupt:
return
if __name__ == '__main__':
sys.exit(main())
|
# -*- coding: utf-8 -*-
import os
import sys
from logging.config import fileConfig
from wsgiref.simple_server import make_server
from paste.deploy import loadapp
hostname = 'localhost'
port = 2000
def main():
conf_file_path = os.path.join(sys.prefix, 'share', 'openfisca', 'openfisca-web-api', 'development-france.ini')
# If openfisca_web_api has been installed with --editable
if not os.path.isfile(conf_file_path):
import pkg_resources
api_sources_path = pkg_resources.get_distribution("openfisca_web_api").location
conf_file_path = os.path.join(api_sources_path, 'development-france.ini')
fileConfig(conf_file_path)
application = loadapp('config:{}'.format(conf_file_path))
httpd = make_server(hostname, port, application)
print u'Serving on http://{}:{}/'.format(hostname, port)
try:
httpd.serve_forever()
except KeyboardInterrupt:
return
if __name__ == '__main__':
sys.exit(main())
|
Manage case where api installed with --editable
|
Manage case where api installed with --editable
|
Python
|
agpl-3.0
|
openfisca/openfisca-web-api,openfisca/openfisca-web-api
|
# -*- coding: utf-8 -*-
import os
import sys
from logging.config import fileConfig
from wsgiref.simple_server import make_server
from paste.deploy import loadapp
hostname = 'localhost'
port = 2000
def main():
conf_file_path = os.path.join(sys.prefix, 'share', 'openfisca', 'openfisca-web-api', 'development-france.ini')
fileConfig(conf_file_path)
application = loadapp('config:{}'.format(conf_file_path))
httpd = make_server(hostname, port, application)
print u'Serving on http://{}:{}/'.format(hostname, port)
try:
httpd.serve_forever()
except KeyboardInterrupt:
return
if __name__ == '__main__':
sys.exit(main())
Manage case where api installed with --editable
|
# -*- coding: utf-8 -*-
import os
import sys
from logging.config import fileConfig
from wsgiref.simple_server import make_server
from paste.deploy import loadapp
hostname = 'localhost'
port = 2000
def main():
conf_file_path = os.path.join(sys.prefix, 'share', 'openfisca', 'openfisca-web-api', 'development-france.ini')
# If openfisca_web_api has been installed with --editable
if not os.path.isfile(conf_file_path):
import pkg_resources
api_sources_path = pkg_resources.get_distribution("openfisca_web_api").location
conf_file_path = os.path.join(api_sources_path, 'development-france.ini')
fileConfig(conf_file_path)
application = loadapp('config:{}'.format(conf_file_path))
httpd = make_server(hostname, port, application)
print u'Serving on http://{}:{}/'.format(hostname, port)
try:
httpd.serve_forever()
except KeyboardInterrupt:
return
if __name__ == '__main__':
sys.exit(main())
|
<commit_before># -*- coding: utf-8 -*-
import os
import sys
from logging.config import fileConfig
from wsgiref.simple_server import make_server
from paste.deploy import loadapp
hostname = 'localhost'
port = 2000
def main():
conf_file_path = os.path.join(sys.prefix, 'share', 'openfisca', 'openfisca-web-api', 'development-france.ini')
fileConfig(conf_file_path)
application = loadapp('config:{}'.format(conf_file_path))
httpd = make_server(hostname, port, application)
print u'Serving on http://{}:{}/'.format(hostname, port)
try:
httpd.serve_forever()
except KeyboardInterrupt:
return
if __name__ == '__main__':
sys.exit(main())
<commit_msg>Manage case where api installed with --editable<commit_after>
|
# -*- coding: utf-8 -*-
import os
import sys
from logging.config import fileConfig
from wsgiref.simple_server import make_server
from paste.deploy import loadapp
hostname = 'localhost'
port = 2000
def main():
conf_file_path = os.path.join(sys.prefix, 'share', 'openfisca', 'openfisca-web-api', 'development-france.ini')
# If openfisca_web_api has been installed with --editable
if not os.path.isfile(conf_file_path):
import pkg_resources
api_sources_path = pkg_resources.get_distribution("openfisca_web_api").location
conf_file_path = os.path.join(api_sources_path, 'development-france.ini')
fileConfig(conf_file_path)
application = loadapp('config:{}'.format(conf_file_path))
httpd = make_server(hostname, port, application)
print u'Serving on http://{}:{}/'.format(hostname, port)
try:
httpd.serve_forever()
except KeyboardInterrupt:
return
if __name__ == '__main__':
sys.exit(main())
|
# -*- coding: utf-8 -*-
import os
import sys
from logging.config import fileConfig
from wsgiref.simple_server import make_server
from paste.deploy import loadapp
hostname = 'localhost'
port = 2000
def main():
conf_file_path = os.path.join(sys.prefix, 'share', 'openfisca', 'openfisca-web-api', 'development-france.ini')
fileConfig(conf_file_path)
application = loadapp('config:{}'.format(conf_file_path))
httpd = make_server(hostname, port, application)
print u'Serving on http://{}:{}/'.format(hostname, port)
try:
httpd.serve_forever()
except KeyboardInterrupt:
return
if __name__ == '__main__':
sys.exit(main())
Manage case where api installed with --editable# -*- coding: utf-8 -*-
import os
import sys
from logging.config import fileConfig
from wsgiref.simple_server import make_server
from paste.deploy import loadapp
hostname = 'localhost'
port = 2000
def main():
conf_file_path = os.path.join(sys.prefix, 'share', 'openfisca', 'openfisca-web-api', 'development-france.ini')
# If openfisca_web_api has been installed with --editable
if not os.path.isfile(conf_file_path):
import pkg_resources
api_sources_path = pkg_resources.get_distribution("openfisca_web_api").location
conf_file_path = os.path.join(api_sources_path, 'development-france.ini')
fileConfig(conf_file_path)
application = loadapp('config:{}'.format(conf_file_path))
httpd = make_server(hostname, port, application)
print u'Serving on http://{}:{}/'.format(hostname, port)
try:
httpd.serve_forever()
except KeyboardInterrupt:
return
if __name__ == '__main__':
sys.exit(main())
|
<commit_before># -*- coding: utf-8 -*-
import os
import sys
from logging.config import fileConfig
from wsgiref.simple_server import make_server
from paste.deploy import loadapp
hostname = 'localhost'
port = 2000
def main():
conf_file_path = os.path.join(sys.prefix, 'share', 'openfisca', 'openfisca-web-api', 'development-france.ini')
fileConfig(conf_file_path)
application = loadapp('config:{}'.format(conf_file_path))
httpd = make_server(hostname, port, application)
print u'Serving on http://{}:{}/'.format(hostname, port)
try:
httpd.serve_forever()
except KeyboardInterrupt:
return
if __name__ == '__main__':
sys.exit(main())
<commit_msg>Manage case where api installed with --editable<commit_after># -*- coding: utf-8 -*-
import os
import sys
from logging.config import fileConfig
from wsgiref.simple_server import make_server
from paste.deploy import loadapp
hostname = 'localhost'
port = 2000
def main():
conf_file_path = os.path.join(sys.prefix, 'share', 'openfisca', 'openfisca-web-api', 'development-france.ini')
# If openfisca_web_api has been installed with --editable
if not os.path.isfile(conf_file_path):
import pkg_resources
api_sources_path = pkg_resources.get_distribution("openfisca_web_api").location
conf_file_path = os.path.join(api_sources_path, 'development-france.ini')
fileConfig(conf_file_path)
application = loadapp('config:{}'.format(conf_file_path))
httpd = make_server(hostname, port, application)
print u'Serving on http://{}:{}/'.format(hostname, port)
try:
httpd.serve_forever()
except KeyboardInterrupt:
return
if __name__ == '__main__':
sys.exit(main())
|
537cf5da8b0328d7e6d745a4ab5456b77702e124
|
delivery/services/external_program_service.py
|
delivery/services/external_program_service.py
|
import subprocess
import logging
from delivery.models.execution import ExecutionResult, Execution
log = logging.getLogger(__name__)
class ExternalProgramService(object):
"""
A service for running external programs
"""
@staticmethod
def run(cmd):
"""
Run a process and do not wait for it to finish
:param cmd: the command to run as a list, i.e. ['ls','-l', '/']
:return: A instance of Execution
"""
log.debug("Running command: {}".format(" ".join(cmd)))
p = subprocess.Popen(cmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
stdin=subprocess.PIPE)
return Execution(pid=p.pid, process_obj=p)
@staticmethod
def wait_for_execution(execution):
"""
Wait for an execution to finish
:param execution: instance of Execution
:return: an ExecutionResult for the execution
"""
out, err = execution.process_obj.communicate()
status_code = execution.process_obj.wait()
return ExecutionResult(out, err, status_code)
@staticmethod
def run_and_wait(cmd):
"""
Run an external command and wait for it to finish
:param cmd: the command to run as a list, i.e. ['ls','-l', '/']
:return: an ExecutionResult for the execution
"""
execution = ExternalProgramService.run(cmd)
return ExternalProgramService.wait_for_execution(execution)
|
from tornado.process import Subprocess
from tornado import gen
from subprocess import PIPE
from delivery.models.execution import ExecutionResult, Execution
class ExternalProgramService(object):
"""
A service for running external programs
"""
@staticmethod
def run(cmd):
"""
Run a process and do not wait for it to finish
:param cmd: the command to run as a list, i.e. ['ls','-l', '/']
:return: A instance of Execution
"""
p = Subprocess(cmd,
stdout=PIPE,
stderr=PIPE,
stdin=PIPE)
return Execution(pid=p.pid, process_obj=p)
@staticmethod
@gen.coroutine
def wait_for_execution(execution):
"""
Wait for an execution to finish
:param execution: instance of Execution
:return: an ExecutionResult for the execution
"""
status_code = yield execution.process_obj.wait_for_exit(raise_error=False)
out = str(execution.process_obj.stdout.read())
err = str(execution.process_obj.stderr.read())
return ExecutionResult(out, err, status_code)
@staticmethod
def run_and_wait(cmd):
"""
Run an external command and wait for it to finish
:param cmd: the command to run as a list, i.e. ['ls','-l', '/']
:return: an ExecutionResult for the execution
"""
execution = ExternalProgramService.run(cmd)
return ExternalProgramService.wait_for_execution(execution)
|
Make ExternalProgramService use async process from tornado
|
Make ExternalProgramService use async process from tornado
|
Python
|
mit
|
arteria-project/arteria-delivery
|
import subprocess
import logging
from delivery.models.execution import ExecutionResult, Execution
log = logging.getLogger(__name__)
class ExternalProgramService(object):
"""
A service for running external programs
"""
@staticmethod
def run(cmd):
"""
Run a process and do not wait for it to finish
:param cmd: the command to run as a list, i.e. ['ls','-l', '/']
:return: A instance of Execution
"""
log.debug("Running command: {}".format(" ".join(cmd)))
p = subprocess.Popen(cmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
stdin=subprocess.PIPE)
return Execution(pid=p.pid, process_obj=p)
@staticmethod
def wait_for_execution(execution):
"""
Wait for an execution to finish
:param execution: instance of Execution
:return: an ExecutionResult for the execution
"""
out, err = execution.process_obj.communicate()
status_code = execution.process_obj.wait()
return ExecutionResult(out, err, status_code)
@staticmethod
def run_and_wait(cmd):
"""
Run an external command and wait for it to finish
:param cmd: the command to run as a list, i.e. ['ls','-l', '/']
:return: an ExecutionResult for the execution
"""
execution = ExternalProgramService.run(cmd)
return ExternalProgramService.wait_for_execution(execution)
Make ExternalProgramService use async process from tornado
|
from tornado.process import Subprocess
from tornado import gen
from subprocess import PIPE
from delivery.models.execution import ExecutionResult, Execution
class ExternalProgramService(object):
"""
A service for running external programs
"""
@staticmethod
def run(cmd):
"""
Run a process and do not wait for it to finish
:param cmd: the command to run as a list, i.e. ['ls','-l', '/']
:return: A instance of Execution
"""
p = Subprocess(cmd,
stdout=PIPE,
stderr=PIPE,
stdin=PIPE)
return Execution(pid=p.pid, process_obj=p)
@staticmethod
@gen.coroutine
def wait_for_execution(execution):
"""
Wait for an execution to finish
:param execution: instance of Execution
:return: an ExecutionResult for the execution
"""
status_code = yield execution.process_obj.wait_for_exit(raise_error=False)
out = str(execution.process_obj.stdout.read())
err = str(execution.process_obj.stderr.read())
return ExecutionResult(out, err, status_code)
@staticmethod
def run_and_wait(cmd):
"""
Run an external command and wait for it to finish
:param cmd: the command to run as a list, i.e. ['ls','-l', '/']
:return: an ExecutionResult for the execution
"""
execution = ExternalProgramService.run(cmd)
return ExternalProgramService.wait_for_execution(execution)
|
<commit_before>
import subprocess
import logging
from delivery.models.execution import ExecutionResult, Execution
log = logging.getLogger(__name__)
class ExternalProgramService(object):
"""
A service for running external programs
"""
@staticmethod
def run(cmd):
"""
Run a process and do not wait for it to finish
:param cmd: the command to run as a list, i.e. ['ls','-l', '/']
:return: A instance of Execution
"""
log.debug("Running command: {}".format(" ".join(cmd)))
p = subprocess.Popen(cmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
stdin=subprocess.PIPE)
return Execution(pid=p.pid, process_obj=p)
@staticmethod
def wait_for_execution(execution):
"""
Wait for an execution to finish
:param execution: instance of Execution
:return: an ExecutionResult for the execution
"""
out, err = execution.process_obj.communicate()
status_code = execution.process_obj.wait()
return ExecutionResult(out, err, status_code)
@staticmethod
def run_and_wait(cmd):
"""
Run an external command and wait for it to finish
:param cmd: the command to run as a list, i.e. ['ls','-l', '/']
:return: an ExecutionResult for the execution
"""
execution = ExternalProgramService.run(cmd)
return ExternalProgramService.wait_for_execution(execution)
<commit_msg>Make ExternalProgramService use async process from tornado<commit_after>
|
from tornado.process import Subprocess
from tornado import gen
from subprocess import PIPE
from delivery.models.execution import ExecutionResult, Execution
class ExternalProgramService(object):
"""
A service for running external programs
"""
@staticmethod
def run(cmd):
"""
Run a process and do not wait for it to finish
:param cmd: the command to run as a list, i.e. ['ls','-l', '/']
:return: A instance of Execution
"""
p = Subprocess(cmd,
stdout=PIPE,
stderr=PIPE,
stdin=PIPE)
return Execution(pid=p.pid, process_obj=p)
@staticmethod
@gen.coroutine
def wait_for_execution(execution):
"""
Wait for an execution to finish
:param execution: instance of Execution
:return: an ExecutionResult for the execution
"""
status_code = yield execution.process_obj.wait_for_exit(raise_error=False)
out = str(execution.process_obj.stdout.read())
err = str(execution.process_obj.stderr.read())
return ExecutionResult(out, err, status_code)
@staticmethod
def run_and_wait(cmd):
"""
Run an external command and wait for it to finish
:param cmd: the command to run as a list, i.e. ['ls','-l', '/']
:return: an ExecutionResult for the execution
"""
execution = ExternalProgramService.run(cmd)
return ExternalProgramService.wait_for_execution(execution)
|
import subprocess
import logging
from delivery.models.execution import ExecutionResult, Execution
log = logging.getLogger(__name__)
class ExternalProgramService(object):
"""
A service for running external programs
"""
@staticmethod
def run(cmd):
"""
Run a process and do not wait for it to finish
:param cmd: the command to run as a list, i.e. ['ls','-l', '/']
:return: A instance of Execution
"""
log.debug("Running command: {}".format(" ".join(cmd)))
p = subprocess.Popen(cmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
stdin=subprocess.PIPE)
return Execution(pid=p.pid, process_obj=p)
@staticmethod
def wait_for_execution(execution):
"""
Wait for an execution to finish
:param execution: instance of Execution
:return: an ExecutionResult for the execution
"""
out, err = execution.process_obj.communicate()
status_code = execution.process_obj.wait()
return ExecutionResult(out, err, status_code)
@staticmethod
def run_and_wait(cmd):
"""
Run an external command and wait for it to finish
:param cmd: the command to run as a list, i.e. ['ls','-l', '/']
:return: an ExecutionResult for the execution
"""
execution = ExternalProgramService.run(cmd)
return ExternalProgramService.wait_for_execution(execution)
Make ExternalProgramService use async process from tornado
from tornado.process import Subprocess
from tornado import gen
from subprocess import PIPE
from delivery.models.execution import ExecutionResult, Execution
class ExternalProgramService(object):
"""
A service for running external programs
"""
@staticmethod
def run(cmd):
"""
Run a process and do not wait for it to finish
:param cmd: the command to run as a list, i.e. ['ls','-l', '/']
:return: A instance of Execution
"""
p = Subprocess(cmd,
stdout=PIPE,
stderr=PIPE,
stdin=PIPE)
return Execution(pid=p.pid, process_obj=p)
@staticmethod
@gen.coroutine
def wait_for_execution(execution):
"""
Wait for an execution to finish
:param execution: instance of Execution
:return: an ExecutionResult for the execution
"""
status_code = yield execution.process_obj.wait_for_exit(raise_error=False)
out = str(execution.process_obj.stdout.read())
err = str(execution.process_obj.stderr.read())
return ExecutionResult(out, err, status_code)
@staticmethod
def run_and_wait(cmd):
"""
Run an external command and wait for it to finish
:param cmd: the command to run as a list, i.e. ['ls','-l', '/']
:return: an ExecutionResult for the execution
"""
execution = ExternalProgramService.run(cmd)
return ExternalProgramService.wait_for_execution(execution)
|
<commit_before>
import subprocess
import logging
from delivery.models.execution import ExecutionResult, Execution
log = logging.getLogger(__name__)
class ExternalProgramService(object):
"""
A service for running external programs
"""
@staticmethod
def run(cmd):
"""
Run a process and do not wait for it to finish
:param cmd: the command to run as a list, i.e. ['ls','-l', '/']
:return: A instance of Execution
"""
log.debug("Running command: {}".format(" ".join(cmd)))
p = subprocess.Popen(cmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
stdin=subprocess.PIPE)
return Execution(pid=p.pid, process_obj=p)
@staticmethod
def wait_for_execution(execution):
"""
Wait for an execution to finish
:param execution: instance of Execution
:return: an ExecutionResult for the execution
"""
out, err = execution.process_obj.communicate()
status_code = execution.process_obj.wait()
return ExecutionResult(out, err, status_code)
@staticmethod
def run_and_wait(cmd):
"""
Run an external command and wait for it to finish
:param cmd: the command to run as a list, i.e. ['ls','-l', '/']
:return: an ExecutionResult for the execution
"""
execution = ExternalProgramService.run(cmd)
return ExternalProgramService.wait_for_execution(execution)
<commit_msg>Make ExternalProgramService use async process from tornado<commit_after>
from tornado.process import Subprocess
from tornado import gen
from subprocess import PIPE
from delivery.models.execution import ExecutionResult, Execution
class ExternalProgramService(object):
"""
A service for running external programs
"""
@staticmethod
def run(cmd):
"""
Run a process and do not wait for it to finish
:param cmd: the command to run as a list, i.e. ['ls','-l', '/']
:return: A instance of Execution
"""
p = Subprocess(cmd,
stdout=PIPE,
stderr=PIPE,
stdin=PIPE)
return Execution(pid=p.pid, process_obj=p)
@staticmethod
@gen.coroutine
def wait_for_execution(execution):
"""
Wait for an execution to finish
:param execution: instance of Execution
:return: an ExecutionResult for the execution
"""
status_code = yield execution.process_obj.wait_for_exit(raise_error=False)
out = str(execution.process_obj.stdout.read())
err = str(execution.process_obj.stderr.read())
return ExecutionResult(out, err, status_code)
@staticmethod
def run_and_wait(cmd):
"""
Run an external command and wait for it to finish
:param cmd: the command to run as a list, i.e. ['ls','-l', '/']
:return: an ExecutionResult for the execution
"""
execution = ExternalProgramService.run(cmd)
return ExternalProgramService.wait_for_execution(execution)
|
5827c09e3a003f53baa5abe2d2d0fc5d695d4334
|
arxiv_vanity/papers/management/commands/delete_all_expired_renders.py
|
arxiv_vanity/papers/management/commands/delete_all_expired_renders.py
|
from django.core.management.base import BaseCommand, CommandError
from ...models import Render
class Command(BaseCommand):
help = 'Deletes output of all expired renders'
def handle(self, *args, **options):
for render in Render.objects.expired().iterator():
try:
render.delete_output()
except FileNotFoundError:
print(f"❌ Render {render.id} already deleted")
else:
print(f"✅ Render {render.id} deleted")
|
from django.core.management.base import BaseCommand, CommandError
from ...models import Render
class Command(BaseCommand):
help = 'Deletes output of all expired renders'
def handle(self, *args, **options):
for render in Render.objects.expired().iterator():
try:
render.delete_output()
except FileNotFoundError:
print(f"❌ Render {render.id} already deleted", flush=True)
else:
print(f"✅ Render {render.id} deleted", flush=True)
|
Add flush to delete all renders print
|
Add flush to delete all renders print
|
Python
|
apache-2.0
|
arxiv-vanity/arxiv-vanity,arxiv-vanity/arxiv-vanity,arxiv-vanity/arxiv-vanity,arxiv-vanity/arxiv-vanity
|
from django.core.management.base import BaseCommand, CommandError
from ...models import Render
class Command(BaseCommand):
help = 'Deletes output of all expired renders'
def handle(self, *args, **options):
for render in Render.objects.expired().iterator():
try:
render.delete_output()
except FileNotFoundError:
print(f"❌ Render {render.id} already deleted")
else:
print(f"✅ Render {render.id} deleted")
Add flush to delete all renders print
|
from django.core.management.base import BaseCommand, CommandError
from ...models import Render
class Command(BaseCommand):
help = 'Deletes output of all expired renders'
def handle(self, *args, **options):
for render in Render.objects.expired().iterator():
try:
render.delete_output()
except FileNotFoundError:
print(f"❌ Render {render.id} already deleted", flush=True)
else:
print(f"✅ Render {render.id} deleted", flush=True)
|
<commit_before>from django.core.management.base import BaseCommand, CommandError
from ...models import Render
class Command(BaseCommand):
help = 'Deletes output of all expired renders'
def handle(self, *args, **options):
for render in Render.objects.expired().iterator():
try:
render.delete_output()
except FileNotFoundError:
print(f"❌ Render {render.id} already deleted")
else:
print(f"✅ Render {render.id} deleted")
<commit_msg>Add flush to delete all renders print<commit_after>
|
from django.core.management.base import BaseCommand, CommandError
from ...models import Render
class Command(BaseCommand):
help = 'Deletes output of all expired renders'
def handle(self, *args, **options):
for render in Render.objects.expired().iterator():
try:
render.delete_output()
except FileNotFoundError:
print(f"❌ Render {render.id} already deleted", flush=True)
else:
print(f"✅ Render {render.id} deleted", flush=True)
|
from django.core.management.base import BaseCommand, CommandError
from ...models import Render
class Command(BaseCommand):
help = 'Deletes output of all expired renders'
def handle(self, *args, **options):
for render in Render.objects.expired().iterator():
try:
render.delete_output()
except FileNotFoundError:
print(f"❌ Render {render.id} already deleted")
else:
print(f"✅ Render {render.id} deleted")
Add flush to delete all renders printfrom django.core.management.base import BaseCommand, CommandError
from ...models import Render
class Command(BaseCommand):
help = 'Deletes output of all expired renders'
def handle(self, *args, **options):
for render in Render.objects.expired().iterator():
try:
render.delete_output()
except FileNotFoundError:
print(f"❌ Render {render.id} already deleted", flush=True)
else:
print(f"✅ Render {render.id} deleted", flush=True)
|
<commit_before>from django.core.management.base import BaseCommand, CommandError
from ...models import Render
class Command(BaseCommand):
help = 'Deletes output of all expired renders'
def handle(self, *args, **options):
for render in Render.objects.expired().iterator():
try:
render.delete_output()
except FileNotFoundError:
print(f"❌ Render {render.id} already deleted")
else:
print(f"✅ Render {render.id} deleted")
<commit_msg>Add flush to delete all renders print<commit_after>from django.core.management.base import BaseCommand, CommandError
from ...models import Render
class Command(BaseCommand):
help = 'Deletes output of all expired renders'
def handle(self, *args, **options):
for render in Render.objects.expired().iterator():
try:
render.delete_output()
except FileNotFoundError:
print(f"❌ Render {render.id} already deleted", flush=True)
else:
print(f"✅ Render {render.id} deleted", flush=True)
|
38a6486cb4909b552181482bbf3360fd51168cd1
|
pywikibot/echo.py
|
pywikibot/echo.py
|
# -*- coding: utf-8 -*-
"""Classes and functions for working with the Echo extension."""
from __future__ import absolute_import, unicode_literals
import pywikibot
class Notification(object):
"""A notification issued by the Echo extension."""
def __init__(self, site):
"""Construct an empty Notification object."""
self.site = site
@classmethod
def fromJSON(cls, site, data):
"""
Construct a Notification object from JSON data returned by the API.
@rtype: Notification
"""
notif = cls(site)
notif.id = data['id'] # TODO: use numeric id ?
notif.type = data['type']
notif.category = data['category']
notif.timestamp = pywikibot.Timestamp.fromtimestampformat(data['timestamp']['mw'])
if 'title' in data and 'full' in data['title']:
notif.page = pywikibot.Page(site, data['title']['full'])
else:
notif.page = None
if 'agent' in data and 'name' in data['agent']:
notif.agent = pywikibot.User(site, data['agent']['name'])
else:
notif.agent = None
if 'read' in data:
notif.read = pywikibot.Timestamp.fromtimestampformat(data['read'])
else:
notif.read = False
notif.content = data.get('*', None)
return notif
def mark_as_read(self):
"""Mark the notification as read."""
return self.site.notifications_mark_read(list=self.id)
|
# -*- coding: utf-8 -*-
"""Classes and functions for working with the Echo extension."""
from __future__ import absolute_import, unicode_literals
import pywikibot
class Notification(object):
"""A notification issued by the Echo extension."""
def __init__(self, site):
"""Construct an empty Notification object."""
self.site = site
@classmethod
def fromJSON(cls, site, data):
"""
Construct a Notification object from JSON data returned by the API.
@rtype: Notification
"""
notif = cls(site)
notif.id = data['id'] # TODO: use numeric id ?
notif.type = data['type']
notif.category = data['category']
notif.timestamp = pywikibot.Timestamp.fromtimestampformat(data['timestamp']['mw'])
if 'title' in data and 'full' in data['title']:
notif.page = pywikibot.Page(site, data['title']['full'])
else:
notif.page = None
if 'agent' in data and 'name' in data['agent']:
notif.agent = pywikibot.User(site, data['agent']['name'])
else:
notif.agent = None
if 'read' in data:
notif.read = pywikibot.Timestamp.fromtimestampformat(data['read'])
else:
notif.read = False
notif.content = data.get('*', None)
notif.revid = data.get('revid', None)
return notif
def mark_as_read(self):
"""Mark the notification as read."""
return self.site.notifications_mark_read(list=self.id)
|
Add revid to Notification object
|
Add revid to Notification object
Since most edit-related notifications include revid attributes,
we should include the revids in the Notification objects as we're
building them (if they exist).
Change-Id: Ifdb98e7c79729a1c2f7a5c4c4366e28071a48239
|
Python
|
mit
|
wikimedia/pywikibot-core,jayvdb/pywikibot-core,npdoty/pywikibot,PersianWikipedia/pywikibot-core,hasteur/g13bot_tools_new,wikimedia/pywikibot-core,happy5214/pywikibot-core,happy5214/pywikibot-core,npdoty/pywikibot,Darkdadaah/pywikibot-core,magul/pywikibot-core,jayvdb/pywikibot-core,hasteur/g13bot_tools_new,magul/pywikibot-core,Darkdadaah/pywikibot-core,hasteur/g13bot_tools_new
|
# -*- coding: utf-8 -*-
"""Classes and functions for working with the Echo extension."""
from __future__ import absolute_import, unicode_literals
import pywikibot
class Notification(object):
"""A notification issued by the Echo extension."""
def __init__(self, site):
"""Construct an empty Notification object."""
self.site = site
@classmethod
def fromJSON(cls, site, data):
"""
Construct a Notification object from JSON data returned by the API.
@rtype: Notification
"""
notif = cls(site)
notif.id = data['id'] # TODO: use numeric id ?
notif.type = data['type']
notif.category = data['category']
notif.timestamp = pywikibot.Timestamp.fromtimestampformat(data['timestamp']['mw'])
if 'title' in data and 'full' in data['title']:
notif.page = pywikibot.Page(site, data['title']['full'])
else:
notif.page = None
if 'agent' in data and 'name' in data['agent']:
notif.agent = pywikibot.User(site, data['agent']['name'])
else:
notif.agent = None
if 'read' in data:
notif.read = pywikibot.Timestamp.fromtimestampformat(data['read'])
else:
notif.read = False
notif.content = data.get('*', None)
return notif
def mark_as_read(self):
"""Mark the notification as read."""
return self.site.notifications_mark_read(list=self.id)
Add revid to Notification object
Since most edit-related notifications include revid attributes,
we should include the revids in the Notification objects as we're
building them (if they exist).
Change-Id: Ifdb98e7c79729a1c2f7a5c4c4366e28071a48239
|
# -*- coding: utf-8 -*-
"""Classes and functions for working with the Echo extension."""
from __future__ import absolute_import, unicode_literals
import pywikibot
class Notification(object):
"""A notification issued by the Echo extension."""
def __init__(self, site):
"""Construct an empty Notification object."""
self.site = site
@classmethod
def fromJSON(cls, site, data):
"""
Construct a Notification object from JSON data returned by the API.
@rtype: Notification
"""
notif = cls(site)
notif.id = data['id'] # TODO: use numeric id ?
notif.type = data['type']
notif.category = data['category']
notif.timestamp = pywikibot.Timestamp.fromtimestampformat(data['timestamp']['mw'])
if 'title' in data and 'full' in data['title']:
notif.page = pywikibot.Page(site, data['title']['full'])
else:
notif.page = None
if 'agent' in data and 'name' in data['agent']:
notif.agent = pywikibot.User(site, data['agent']['name'])
else:
notif.agent = None
if 'read' in data:
notif.read = pywikibot.Timestamp.fromtimestampformat(data['read'])
else:
notif.read = False
notif.content = data.get('*', None)
notif.revid = data.get('revid', None)
return notif
def mark_as_read(self):
"""Mark the notification as read."""
return self.site.notifications_mark_read(list=self.id)
|
<commit_before># -*- coding: utf-8 -*-
"""Classes and functions for working with the Echo extension."""
from __future__ import absolute_import, unicode_literals
import pywikibot
class Notification(object):
"""A notification issued by the Echo extension."""
def __init__(self, site):
"""Construct an empty Notification object."""
self.site = site
@classmethod
def fromJSON(cls, site, data):
"""
Construct a Notification object from JSON data returned by the API.
@rtype: Notification
"""
notif = cls(site)
notif.id = data['id'] # TODO: use numeric id ?
notif.type = data['type']
notif.category = data['category']
notif.timestamp = pywikibot.Timestamp.fromtimestampformat(data['timestamp']['mw'])
if 'title' in data and 'full' in data['title']:
notif.page = pywikibot.Page(site, data['title']['full'])
else:
notif.page = None
if 'agent' in data and 'name' in data['agent']:
notif.agent = pywikibot.User(site, data['agent']['name'])
else:
notif.agent = None
if 'read' in data:
notif.read = pywikibot.Timestamp.fromtimestampformat(data['read'])
else:
notif.read = False
notif.content = data.get('*', None)
return notif
def mark_as_read(self):
"""Mark the notification as read."""
return self.site.notifications_mark_read(list=self.id)
<commit_msg>Add revid to Notification object
Since most edit-related notifications include revid attributes,
we should include the revids in the Notification objects as we're
building them (if they exist).
Change-Id: Ifdb98e7c79729a1c2f7a5c4c4366e28071a48239<commit_after>
|
# -*- coding: utf-8 -*-
"""Classes and functions for working with the Echo extension."""
from __future__ import absolute_import, unicode_literals
import pywikibot
class Notification(object):
"""A notification issued by the Echo extension."""
def __init__(self, site):
"""Construct an empty Notification object."""
self.site = site
@classmethod
def fromJSON(cls, site, data):
"""
Construct a Notification object from JSON data returned by the API.
@rtype: Notification
"""
notif = cls(site)
notif.id = data['id'] # TODO: use numeric id ?
notif.type = data['type']
notif.category = data['category']
notif.timestamp = pywikibot.Timestamp.fromtimestampformat(data['timestamp']['mw'])
if 'title' in data and 'full' in data['title']:
notif.page = pywikibot.Page(site, data['title']['full'])
else:
notif.page = None
if 'agent' in data and 'name' in data['agent']:
notif.agent = pywikibot.User(site, data['agent']['name'])
else:
notif.agent = None
if 'read' in data:
notif.read = pywikibot.Timestamp.fromtimestampformat(data['read'])
else:
notif.read = False
notif.content = data.get('*', None)
notif.revid = data.get('revid', None)
return notif
def mark_as_read(self):
"""Mark the notification as read."""
return self.site.notifications_mark_read(list=self.id)
|
# -*- coding: utf-8 -*-
"""Classes and functions for working with the Echo extension."""
from __future__ import absolute_import, unicode_literals
import pywikibot
class Notification(object):
"""A notification issued by the Echo extension."""
def __init__(self, site):
"""Construct an empty Notification object."""
self.site = site
@classmethod
def fromJSON(cls, site, data):
"""
Construct a Notification object from JSON data returned by the API.
@rtype: Notification
"""
notif = cls(site)
notif.id = data['id'] # TODO: use numeric id ?
notif.type = data['type']
notif.category = data['category']
notif.timestamp = pywikibot.Timestamp.fromtimestampformat(data['timestamp']['mw'])
if 'title' in data and 'full' in data['title']:
notif.page = pywikibot.Page(site, data['title']['full'])
else:
notif.page = None
if 'agent' in data and 'name' in data['agent']:
notif.agent = pywikibot.User(site, data['agent']['name'])
else:
notif.agent = None
if 'read' in data:
notif.read = pywikibot.Timestamp.fromtimestampformat(data['read'])
else:
notif.read = False
notif.content = data.get('*', None)
return notif
def mark_as_read(self):
"""Mark the notification as read."""
return self.site.notifications_mark_read(list=self.id)
Add revid to Notification object
Since most edit-related notifications include revid attributes,
we should include the revids in the Notification objects as we're
building them (if they exist).
Change-Id: Ifdb98e7c79729a1c2f7a5c4c4366e28071a48239# -*- coding: utf-8 -*-
"""Classes and functions for working with the Echo extension."""
from __future__ import absolute_import, unicode_literals
import pywikibot
class Notification(object):
"""A notification issued by the Echo extension."""
def __init__(self, site):
"""Construct an empty Notification object."""
self.site = site
@classmethod
def fromJSON(cls, site, data):
"""
Construct a Notification object from JSON data returned by the API.
@rtype: Notification
"""
notif = cls(site)
notif.id = data['id'] # TODO: use numeric id ?
notif.type = data['type']
notif.category = data['category']
notif.timestamp = pywikibot.Timestamp.fromtimestampformat(data['timestamp']['mw'])
if 'title' in data and 'full' in data['title']:
notif.page = pywikibot.Page(site, data['title']['full'])
else:
notif.page = None
if 'agent' in data and 'name' in data['agent']:
notif.agent = pywikibot.User(site, data['agent']['name'])
else:
notif.agent = None
if 'read' in data:
notif.read = pywikibot.Timestamp.fromtimestampformat(data['read'])
else:
notif.read = False
notif.content = data.get('*', None)
notif.revid = data.get('revid', None)
return notif
def mark_as_read(self):
"""Mark the notification as read."""
return self.site.notifications_mark_read(list=self.id)
|
<commit_before># -*- coding: utf-8 -*-
"""Classes and functions for working with the Echo extension."""
from __future__ import absolute_import, unicode_literals
import pywikibot
class Notification(object):
"""A notification issued by the Echo extension."""
def __init__(self, site):
"""Construct an empty Notification object."""
self.site = site
@classmethod
def fromJSON(cls, site, data):
"""
Construct a Notification object from JSON data returned by the API.
@rtype: Notification
"""
notif = cls(site)
notif.id = data['id'] # TODO: use numeric id ?
notif.type = data['type']
notif.category = data['category']
notif.timestamp = pywikibot.Timestamp.fromtimestampformat(data['timestamp']['mw'])
if 'title' in data and 'full' in data['title']:
notif.page = pywikibot.Page(site, data['title']['full'])
else:
notif.page = None
if 'agent' in data and 'name' in data['agent']:
notif.agent = pywikibot.User(site, data['agent']['name'])
else:
notif.agent = None
if 'read' in data:
notif.read = pywikibot.Timestamp.fromtimestampformat(data['read'])
else:
notif.read = False
notif.content = data.get('*', None)
return notif
def mark_as_read(self):
"""Mark the notification as read."""
return self.site.notifications_mark_read(list=self.id)
<commit_msg>Add revid to Notification object
Since most edit-related notifications include revid attributes,
we should include the revids in the Notification objects as we're
building them (if they exist).
Change-Id: Ifdb98e7c79729a1c2f7a5c4c4366e28071a48239<commit_after># -*- coding: utf-8 -*-
"""Classes and functions for working with the Echo extension."""
from __future__ import absolute_import, unicode_literals
import pywikibot
class Notification(object):
"""A notification issued by the Echo extension."""
def __init__(self, site):
"""Construct an empty Notification object."""
self.site = site
@classmethod
def fromJSON(cls, site, data):
"""
Construct a Notification object from JSON data returned by the API.
@rtype: Notification
"""
notif = cls(site)
notif.id = data['id'] # TODO: use numeric id ?
notif.type = data['type']
notif.category = data['category']
notif.timestamp = pywikibot.Timestamp.fromtimestampformat(data['timestamp']['mw'])
if 'title' in data and 'full' in data['title']:
notif.page = pywikibot.Page(site, data['title']['full'])
else:
notif.page = None
if 'agent' in data and 'name' in data['agent']:
notif.agent = pywikibot.User(site, data['agent']['name'])
else:
notif.agent = None
if 'read' in data:
notif.read = pywikibot.Timestamp.fromtimestampformat(data['read'])
else:
notif.read = False
notif.content = data.get('*', None)
notif.revid = data.get('revid', None)
return notif
def mark_as_read(self):
"""Mark the notification as read."""
return self.site.notifications_mark_read(list=self.id)
|
16ad5a3f17fdb96f2660019fabbd7bb787ae4ffb
|
pywsd/baseline.py
|
pywsd/baseline.py
|
#!/usr/bin/env python -*- coding: utf-8 -*-
#
# Python Word Sense Disambiguation (pyWSD): Baseline WSD
#
# Copyright (C) 2014-2020 alvations
# URL:
# For license information, see LICENSE.md
import random
custom_random = random.Random(0)
def random_sense(ambiguous_word, pos=None):
""" Returns a random sense. """
if pos is None:
return custom_random.choice(wn.synsets(ambiguous_word))
else:
return custom_random.choice(wn.synsets(ambiguous_word, pos))
def first_sense(ambiguous_word, pos=None):
""" Returns the first sense. """
if pos is None:
return wn.synsets(ambiguous_word)[0]
else:
return wn.synsets(ambiguous_word, pos)[0]
def max_lemma_count(ambiguous_word):
"""
Returns the sense with the highest lemma_name count.
The max_lemma_count() can be treated as a rough gauge for the
Most Frequent Sense (MFS), if no other sense annotated corpus is available.
NOTE: The lemma counts are from the Brown Corpus
"""
sense2lemmacounts = {}
for i in wn.synsets(ambiguous_word):
sense2lemmacounts[i] = sum(j.count() for j in i.lemmas())
return max(sense2lemmacounts, key=sense2lemmacounts.get)
|
#!/usr/bin/env python -*- coding: utf-8 -*-
#
# Python Word Sense Disambiguation (pyWSD): Baseline WSD
#
# Copyright (C) 2014-2020 alvations
# URL:
# For license information, see LICENSE.md
import random
custom_random = random.Random(0)
def random_sense(ambiguous_word, pos=None):
""" Returns a random sense. """
if pos is None:
return custom_random.choice(wn.synsets(ambiguous_word))
else:
return custom_random.choice(wn.synsets(ambiguous_word, pos))
def first_sense(ambiguous_word, pos=None):
""" Returns the first sense. """
if pos is None:
return wn.synsets(ambiguous_word)[0]
else:
return wn.synsets(ambiguous_word, pos)[0]
def max_lemma_count(ambiguous_word, pos=None):
"""
Returns the sense with the highest lemma_name count.
The max_lemma_count() can be treated as a rough gauge for the
Most Frequent Sense (MFS), if no other sense annotated corpus is available.
NOTE: The lemma counts are from the Brown Corpus
"""
sense2lemmacounts = {}
for i in wn.synsets(ambiguous_word, pos=None):
sense2lemmacounts[i] = sum(j.count() for j in i.lemmas())
return max(sense2lemmacounts, key=sense2lemmacounts.get)
|
Add pos for max_lemma_count also
|
Add pos for max_lemma_count also
|
Python
|
mit
|
alvations/pywsd,alvations/pywsd
|
#!/usr/bin/env python -*- coding: utf-8 -*-
#
# Python Word Sense Disambiguation (pyWSD): Baseline WSD
#
# Copyright (C) 2014-2020 alvations
# URL:
# For license information, see LICENSE.md
import random
custom_random = random.Random(0)
def random_sense(ambiguous_word, pos=None):
""" Returns a random sense. """
if pos is None:
return custom_random.choice(wn.synsets(ambiguous_word))
else:
return custom_random.choice(wn.synsets(ambiguous_word, pos))
def first_sense(ambiguous_word, pos=None):
""" Returns the first sense. """
if pos is None:
return wn.synsets(ambiguous_word)[0]
else:
return wn.synsets(ambiguous_word, pos)[0]
def max_lemma_count(ambiguous_word):
"""
Returns the sense with the highest lemma_name count.
The max_lemma_count() can be treated as a rough gauge for the
Most Frequent Sense (MFS), if no other sense annotated corpus is available.
NOTE: The lemma counts are from the Brown Corpus
"""
sense2lemmacounts = {}
for i in wn.synsets(ambiguous_word):
sense2lemmacounts[i] = sum(j.count() for j in i.lemmas())
return max(sense2lemmacounts, key=sense2lemmacounts.get)
Add pos for max_lemma_count also
|
#!/usr/bin/env python -*- coding: utf-8 -*-
#
# Python Word Sense Disambiguation (pyWSD): Baseline WSD
#
# Copyright (C) 2014-2020 alvations
# URL:
# For license information, see LICENSE.md
import random
custom_random = random.Random(0)
def random_sense(ambiguous_word, pos=None):
""" Returns a random sense. """
if pos is None:
return custom_random.choice(wn.synsets(ambiguous_word))
else:
return custom_random.choice(wn.synsets(ambiguous_word, pos))
def first_sense(ambiguous_word, pos=None):
""" Returns the first sense. """
if pos is None:
return wn.synsets(ambiguous_word)[0]
else:
return wn.synsets(ambiguous_word, pos)[0]
def max_lemma_count(ambiguous_word, pos=None):
"""
Returns the sense with the highest lemma_name count.
The max_lemma_count() can be treated as a rough gauge for the
Most Frequent Sense (MFS), if no other sense annotated corpus is available.
NOTE: The lemma counts are from the Brown Corpus
"""
sense2lemmacounts = {}
for i in wn.synsets(ambiguous_word, pos=None):
sense2lemmacounts[i] = sum(j.count() for j in i.lemmas())
return max(sense2lemmacounts, key=sense2lemmacounts.get)
|
<commit_before>#!/usr/bin/env python -*- coding: utf-8 -*-
#
# Python Word Sense Disambiguation (pyWSD): Baseline WSD
#
# Copyright (C) 2014-2020 alvations
# URL:
# For license information, see LICENSE.md
import random
custom_random = random.Random(0)
def random_sense(ambiguous_word, pos=None):
""" Returns a random sense. """
if pos is None:
return custom_random.choice(wn.synsets(ambiguous_word))
else:
return custom_random.choice(wn.synsets(ambiguous_word, pos))
def first_sense(ambiguous_word, pos=None):
""" Returns the first sense. """
if pos is None:
return wn.synsets(ambiguous_word)[0]
else:
return wn.synsets(ambiguous_word, pos)[0]
def max_lemma_count(ambiguous_word):
"""
Returns the sense with the highest lemma_name count.
The max_lemma_count() can be treated as a rough gauge for the
Most Frequent Sense (MFS), if no other sense annotated corpus is available.
NOTE: The lemma counts are from the Brown Corpus
"""
sense2lemmacounts = {}
for i in wn.synsets(ambiguous_word):
sense2lemmacounts[i] = sum(j.count() for j in i.lemmas())
return max(sense2lemmacounts, key=sense2lemmacounts.get)
<commit_msg>Add pos for max_lemma_count also<commit_after>
|
#!/usr/bin/env python -*- coding: utf-8 -*-
#
# Python Word Sense Disambiguation (pyWSD): Baseline WSD
#
# Copyright (C) 2014-2020 alvations
# URL:
# For license information, see LICENSE.md
import random
custom_random = random.Random(0)
def random_sense(ambiguous_word, pos=None):
""" Returns a random sense. """
if pos is None:
return custom_random.choice(wn.synsets(ambiguous_word))
else:
return custom_random.choice(wn.synsets(ambiguous_word, pos))
def first_sense(ambiguous_word, pos=None):
""" Returns the first sense. """
if pos is None:
return wn.synsets(ambiguous_word)[0]
else:
return wn.synsets(ambiguous_word, pos)[0]
def max_lemma_count(ambiguous_word, pos=None):
"""
Returns the sense with the highest lemma_name count.
The max_lemma_count() can be treated as a rough gauge for the
Most Frequent Sense (MFS), if no other sense annotated corpus is available.
NOTE: The lemma counts are from the Brown Corpus
"""
sense2lemmacounts = {}
for i in wn.synsets(ambiguous_word, pos=None):
sense2lemmacounts[i] = sum(j.count() for j in i.lemmas())
return max(sense2lemmacounts, key=sense2lemmacounts.get)
|
#!/usr/bin/env python -*- coding: utf-8 -*-
#
# Python Word Sense Disambiguation (pyWSD): Baseline WSD
#
# Copyright (C) 2014-2020 alvations
# URL:
# For license information, see LICENSE.md
import random
custom_random = random.Random(0)
def random_sense(ambiguous_word, pos=None):
""" Returns a random sense. """
if pos is None:
return custom_random.choice(wn.synsets(ambiguous_word))
else:
return custom_random.choice(wn.synsets(ambiguous_word, pos))
def first_sense(ambiguous_word, pos=None):
""" Returns the first sense. """
if pos is None:
return wn.synsets(ambiguous_word)[0]
else:
return wn.synsets(ambiguous_word, pos)[0]
def max_lemma_count(ambiguous_word):
"""
Returns the sense with the highest lemma_name count.
The max_lemma_count() can be treated as a rough gauge for the
Most Frequent Sense (MFS), if no other sense annotated corpus is available.
NOTE: The lemma counts are from the Brown Corpus
"""
sense2lemmacounts = {}
for i in wn.synsets(ambiguous_word):
sense2lemmacounts[i] = sum(j.count() for j in i.lemmas())
return max(sense2lemmacounts, key=sense2lemmacounts.get)
Add pos for max_lemma_count also#!/usr/bin/env python -*- coding: utf-8 -*-
#
# Python Word Sense Disambiguation (pyWSD): Baseline WSD
#
# Copyright (C) 2014-2020 alvations
# URL:
# For license information, see LICENSE.md
import random
custom_random = random.Random(0)
def random_sense(ambiguous_word, pos=None):
""" Returns a random sense. """
if pos is None:
return custom_random.choice(wn.synsets(ambiguous_word))
else:
return custom_random.choice(wn.synsets(ambiguous_word, pos))
def first_sense(ambiguous_word, pos=None):
""" Returns the first sense. """
if pos is None:
return wn.synsets(ambiguous_word)[0]
else:
return wn.synsets(ambiguous_word, pos)[0]
def max_lemma_count(ambiguous_word, pos=None):
"""
Returns the sense with the highest lemma_name count.
The max_lemma_count() can be treated as a rough gauge for the
Most Frequent Sense (MFS), if no other sense annotated corpus is available.
NOTE: The lemma counts are from the Brown Corpus
"""
sense2lemmacounts = {}
for i in wn.synsets(ambiguous_word, pos=None):
sense2lemmacounts[i] = sum(j.count() for j in i.lemmas())
return max(sense2lemmacounts, key=sense2lemmacounts.get)
|
<commit_before>#!/usr/bin/env python -*- coding: utf-8 -*-
#
# Python Word Sense Disambiguation (pyWSD): Baseline WSD
#
# Copyright (C) 2014-2020 alvations
# URL:
# For license information, see LICENSE.md
import random
custom_random = random.Random(0)
def random_sense(ambiguous_word, pos=None):
""" Returns a random sense. """
if pos is None:
return custom_random.choice(wn.synsets(ambiguous_word))
else:
return custom_random.choice(wn.synsets(ambiguous_word, pos))
def first_sense(ambiguous_word, pos=None):
""" Returns the first sense. """
if pos is None:
return wn.synsets(ambiguous_word)[0]
else:
return wn.synsets(ambiguous_word, pos)[0]
def max_lemma_count(ambiguous_word):
"""
Returns the sense with the highest lemma_name count.
The max_lemma_count() can be treated as a rough gauge for the
Most Frequent Sense (MFS), if no other sense annotated corpus is available.
NOTE: The lemma counts are from the Brown Corpus
"""
sense2lemmacounts = {}
for i in wn.synsets(ambiguous_word):
sense2lemmacounts[i] = sum(j.count() for j in i.lemmas())
return max(sense2lemmacounts, key=sense2lemmacounts.get)
<commit_msg>Add pos for max_lemma_count also<commit_after>#!/usr/bin/env python -*- coding: utf-8 -*-
#
# Python Word Sense Disambiguation (pyWSD): Baseline WSD
#
# Copyright (C) 2014-2020 alvations
# URL:
# For license information, see LICENSE.md
import random
custom_random = random.Random(0)
def random_sense(ambiguous_word, pos=None):
""" Returns a random sense. """
if pos is None:
return custom_random.choice(wn.synsets(ambiguous_word))
else:
return custom_random.choice(wn.synsets(ambiguous_word, pos))
def first_sense(ambiguous_word, pos=None):
""" Returns the first sense. """
if pos is None:
return wn.synsets(ambiguous_word)[0]
else:
return wn.synsets(ambiguous_word, pos)[0]
def max_lemma_count(ambiguous_word, pos=None):
"""
Returns the sense with the highest lemma_name count.
The max_lemma_count() can be treated as a rough gauge for the
Most Frequent Sense (MFS), if no other sense annotated corpus is available.
NOTE: The lemma counts are from the Brown Corpus
"""
sense2lemmacounts = {}
for i in wn.synsets(ambiguous_word, pos=None):
sense2lemmacounts[i] = sum(j.count() for j in i.lemmas())
return max(sense2lemmacounts, key=sense2lemmacounts.get)
|
bfedd0eb87ad5bdf937a1f5f3e143a8e538ce86f
|
rafem/__init__.py
|
rafem/__init__.py
|
"""River Avulsion Module."""
from .riverbmi import BmiRiverModule
from .rivermodule import RiverModule
__all__ = ['BmiRiverModule', 'RiverModule']
|
"""River Avulsion Module."""
from .riverbmi import BmiRiverModule
from .rivermodule import rivermodule
__all__ = ['BmiRiverModule', 'rivermodule']
|
Rename package from avulsion to rafem.
|
Rename package from avulsion to rafem.
|
Python
|
mit
|
katmratliff/avulsion-bmi,mcflugen/avulsion-bmi
|
"""River Avulsion Module."""
from .riverbmi import BmiRiverModule
from .rivermodule import RiverModule
__all__ = ['BmiRiverModule', 'RiverModule']
Rename package from avulsion to rafem.
|
"""River Avulsion Module."""
from .riverbmi import BmiRiverModule
from .rivermodule import rivermodule
__all__ = ['BmiRiverModule', 'rivermodule']
|
<commit_before>"""River Avulsion Module."""
from .riverbmi import BmiRiverModule
from .rivermodule import RiverModule
__all__ = ['BmiRiverModule', 'RiverModule']
<commit_msg>Rename package from avulsion to rafem.<commit_after>
|
"""River Avulsion Module."""
from .riverbmi import BmiRiverModule
from .rivermodule import rivermodule
__all__ = ['BmiRiverModule', 'rivermodule']
|
"""River Avulsion Module."""
from .riverbmi import BmiRiverModule
from .rivermodule import RiverModule
__all__ = ['BmiRiverModule', 'RiverModule']
Rename package from avulsion to rafem."""River Avulsion Module."""
from .riverbmi import BmiRiverModule
from .rivermodule import rivermodule
__all__ = ['BmiRiverModule', 'rivermodule']
|
<commit_before>"""River Avulsion Module."""
from .riverbmi import BmiRiverModule
from .rivermodule import RiverModule
__all__ = ['BmiRiverModule', 'RiverModule']
<commit_msg>Rename package from avulsion to rafem.<commit_after>"""River Avulsion Module."""
from .riverbmi import BmiRiverModule
from .rivermodule import rivermodule
__all__ = ['BmiRiverModule', 'rivermodule']
|
dfbe71a6d6a1e8591b1a6d7d5baeda20f2e40c47
|
indra/explanation/model_checker/__init__.py
|
indra/explanation/model_checker/__init__.py
|
from .model_checker import ModelChecker, PathResult, PathMetric, get_path_iter
from .pysb import PysbModelChecker
from .signed_graph import SignedGraphModelChecker
from .unsigned_graph import UnsignedGraphModelChecker
from .pybel import PybelModelChecker
|
from .model_checker import ModelChecker, PathResult, PathMetric, get_path_iter
from .pysb import PysbModelChecker
from .signed_graph import SignedGraphModelChecker
from .unsigned_graph import UnsignedGraphModelChecker
from .pybel import PybelModelChecker
from .model_checker import signed_edges_to_signed_nodes, prune_signed_nodes
|
Make function top level importable
|
Make function top level importable
|
Python
|
bsd-2-clause
|
bgyori/indra,johnbachman/belpy,sorgerlab/belpy,johnbachman/indra,sorgerlab/belpy,sorgerlab/indra,sorgerlab/indra,bgyori/indra,johnbachman/indra,johnbachman/belpy,johnbachman/indra,bgyori/indra,sorgerlab/indra,sorgerlab/belpy,johnbachman/belpy
|
from .model_checker import ModelChecker, PathResult, PathMetric, get_path_iter
from .pysb import PysbModelChecker
from .signed_graph import SignedGraphModelChecker
from .unsigned_graph import UnsignedGraphModelChecker
from .pybel import PybelModelChecker
Make function top level importable
|
from .model_checker import ModelChecker, PathResult, PathMetric, get_path_iter
from .pysb import PysbModelChecker
from .signed_graph import SignedGraphModelChecker
from .unsigned_graph import UnsignedGraphModelChecker
from .pybel import PybelModelChecker
from .model_checker import signed_edges_to_signed_nodes, prune_signed_nodes
|
<commit_before>from .model_checker import ModelChecker, PathResult, PathMetric, get_path_iter
from .pysb import PysbModelChecker
from .signed_graph import SignedGraphModelChecker
from .unsigned_graph import UnsignedGraphModelChecker
from .pybel import PybelModelChecker
<commit_msg>Make function top level importable<commit_after>
|
from .model_checker import ModelChecker, PathResult, PathMetric, get_path_iter
from .pysb import PysbModelChecker
from .signed_graph import SignedGraphModelChecker
from .unsigned_graph import UnsignedGraphModelChecker
from .pybel import PybelModelChecker
from .model_checker import signed_edges_to_signed_nodes, prune_signed_nodes
|
from .model_checker import ModelChecker, PathResult, PathMetric, get_path_iter
from .pysb import PysbModelChecker
from .signed_graph import SignedGraphModelChecker
from .unsigned_graph import UnsignedGraphModelChecker
from .pybel import PybelModelChecker
Make function top level importablefrom .model_checker import ModelChecker, PathResult, PathMetric, get_path_iter
from .pysb import PysbModelChecker
from .signed_graph import SignedGraphModelChecker
from .unsigned_graph import UnsignedGraphModelChecker
from .pybel import PybelModelChecker
from .model_checker import signed_edges_to_signed_nodes, prune_signed_nodes
|
<commit_before>from .model_checker import ModelChecker, PathResult, PathMetric, get_path_iter
from .pysb import PysbModelChecker
from .signed_graph import SignedGraphModelChecker
from .unsigned_graph import UnsignedGraphModelChecker
from .pybel import PybelModelChecker
<commit_msg>Make function top level importable<commit_after>from .model_checker import ModelChecker, PathResult, PathMetric, get_path_iter
from .pysb import PysbModelChecker
from .signed_graph import SignedGraphModelChecker
from .unsigned_graph import UnsignedGraphModelChecker
from .pybel import PybelModelChecker
from .model_checker import signed_edges_to_signed_nodes, prune_signed_nodes
|
d0e5c03fe37d89747e870c57312701df0e2949c0
|
ulp/urlextract.py
|
ulp/urlextract.py
|
# coding=utf-8
import re
import os
import sys
# Regex for matching URLs
# See https://mathiasbynens.be/demo/url-regex
url_regex = re.compile(r"((https?|ftp)://(-\.)?([^\s/?\.#-]+\.?)+(/[^\s]*)?)")
ansi_escape_regex = re.compile(r"(\x9B|\x1B\[)[0-?]*[ -/]*[@-~]", re.IGNORECASE)
INPUT_FILE = os.path.join(os.getenv('HOME'), '.cache', 'ulp', 'links')
def escape_ansi(text):
return ansi_escape_regex.sub("", text)
def parse_stdin():
stdin = [escape_ansi(line.strip()) for line in sys.stdin]
print(os.linesep.join(stdin).strip(), file=sys.stderr)
return parse_input(os.linesep.join(stdin))
def parse_input(text):
matches = url_regex.findall(text.strip())
return [result[0] for result in matches]
def read_inputfile():
with open(INPUT_FILE) as f:
return [l.strip() for l in f.readlines()]
def main():
#If we are not being piped, exit
if sys.stdin.isatty():
sys.exit(1)
result = parse_stdin()
for url in result:
print(url)
if __name__ == '__main__':
main()
|
# coding=utf-8
import re
import os
import sys
# Regex for matching URLs
# See https://mathiasbynens.be/demo/url-regex
url_regex = re.compile(r"((https?|ftp)://(-\.)?([^\s/?\.#-]+\.?)+(/[^\s]*)?)")
ansi_escape_regex = re.compile(r"(\x9B|\x1B\[)[0-?]*[ -/]*[@-~]", re.IGNORECASE)
INPUT_FILE = os.path.join(os.getenv('HOME'), '.cache', 'ulp', 'links')
def escape_ansi(text):
return ansi_escape_regex.sub("", text)
def parse_stdin():
lines = [line.strip() for line in sys.stdin]
print(os.linesep.join(lines).strip(), file=sys.stderr)
return parse_input(os.linesep.join(lines))
def parse_input(text):
matches = url_regex.findall(escape_ansi(text.strip()))
return [result[0] for result in matches]
def read_inputfile():
with open(INPUT_FILE) as f:
return [l.strip() for l in f.readlines()]
def main():
#If we are not being piped, exit
if sys.stdin.isatty():
sys.exit(1)
result = parse_stdin()
for url in result:
print(url)
if __name__ == '__main__':
main()
|
Move ansi_escape to generic function
|
Move ansi_escape to generic function
|
Python
|
mit
|
victal/ulp,victal/ulp
|
# coding=utf-8
import re
import os
import sys
# Regex for matching URLs
# See https://mathiasbynens.be/demo/url-regex
url_regex = re.compile(r"((https?|ftp)://(-\.)?([^\s/?\.#-]+\.?)+(/[^\s]*)?)")
ansi_escape_regex = re.compile(r"(\x9B|\x1B\[)[0-?]*[ -/]*[@-~]", re.IGNORECASE)
INPUT_FILE = os.path.join(os.getenv('HOME'), '.cache', 'ulp', 'links')
def escape_ansi(text):
return ansi_escape_regex.sub("", text)
def parse_stdin():
stdin = [escape_ansi(line.strip()) for line in sys.stdin]
print(os.linesep.join(stdin).strip(), file=sys.stderr)
return parse_input(os.linesep.join(stdin))
def parse_input(text):
matches = url_regex.findall(text.strip())
return [result[0] for result in matches]
def read_inputfile():
with open(INPUT_FILE) as f:
return [l.strip() for l in f.readlines()]
def main():
#If we are not being piped, exit
if sys.stdin.isatty():
sys.exit(1)
result = parse_stdin()
for url in result:
print(url)
if __name__ == '__main__':
main()
Move ansi_escape to generic function
|
# coding=utf-8
import re
import os
import sys
# Regex for matching URLs
# See https://mathiasbynens.be/demo/url-regex
url_regex = re.compile(r"((https?|ftp)://(-\.)?([^\s/?\.#-]+\.?)+(/[^\s]*)?)")
ansi_escape_regex = re.compile(r"(\x9B|\x1B\[)[0-?]*[ -/]*[@-~]", re.IGNORECASE)
INPUT_FILE = os.path.join(os.getenv('HOME'), '.cache', 'ulp', 'links')
def escape_ansi(text):
return ansi_escape_regex.sub("", text)
def parse_stdin():
lines = [line.strip() for line in sys.stdin]
print(os.linesep.join(lines).strip(), file=sys.stderr)
return parse_input(os.linesep.join(lines))
def parse_input(text):
matches = url_regex.findall(escape_ansi(text.strip()))
return [result[0] for result in matches]
def read_inputfile():
with open(INPUT_FILE) as f:
return [l.strip() for l in f.readlines()]
def main():
#If we are not being piped, exit
if sys.stdin.isatty():
sys.exit(1)
result = parse_stdin()
for url in result:
print(url)
if __name__ == '__main__':
main()
|
<commit_before># coding=utf-8
import re
import os
import sys
# Regex for matching URLs
# See https://mathiasbynens.be/demo/url-regex
url_regex = re.compile(r"((https?|ftp)://(-\.)?([^\s/?\.#-]+\.?)+(/[^\s]*)?)")
ansi_escape_regex = re.compile(r"(\x9B|\x1B\[)[0-?]*[ -/]*[@-~]", re.IGNORECASE)
INPUT_FILE = os.path.join(os.getenv('HOME'), '.cache', 'ulp', 'links')
def escape_ansi(text):
return ansi_escape_regex.sub("", text)
def parse_stdin():
stdin = [escape_ansi(line.strip()) for line in sys.stdin]
print(os.linesep.join(stdin).strip(), file=sys.stderr)
return parse_input(os.linesep.join(stdin))
def parse_input(text):
matches = url_regex.findall(text.strip())
return [result[0] for result in matches]
def read_inputfile():
with open(INPUT_FILE) as f:
return [l.strip() for l in f.readlines()]
def main():
#If we are not being piped, exit
if sys.stdin.isatty():
sys.exit(1)
result = parse_stdin()
for url in result:
print(url)
if __name__ == '__main__':
main()
<commit_msg>Move ansi_escape to generic function<commit_after>
|
# coding=utf-8
import re
import os
import sys
# Regex for matching URLs
# See https://mathiasbynens.be/demo/url-regex
url_regex = re.compile(r"((https?|ftp)://(-\.)?([^\s/?\.#-]+\.?)+(/[^\s]*)?)")
ansi_escape_regex = re.compile(r"(\x9B|\x1B\[)[0-?]*[ -/]*[@-~]", re.IGNORECASE)
INPUT_FILE = os.path.join(os.getenv('HOME'), '.cache', 'ulp', 'links')
def escape_ansi(text):
return ansi_escape_regex.sub("", text)
def parse_stdin():
lines = [line.strip() for line in sys.stdin]
print(os.linesep.join(lines).strip(), file=sys.stderr)
return parse_input(os.linesep.join(lines))
def parse_input(text):
matches = url_regex.findall(escape_ansi(text.strip()))
return [result[0] for result in matches]
def read_inputfile():
with open(INPUT_FILE) as f:
return [l.strip() for l in f.readlines()]
def main():
#If we are not being piped, exit
if sys.stdin.isatty():
sys.exit(1)
result = parse_stdin()
for url in result:
print(url)
if __name__ == '__main__':
main()
|
# coding=utf-8
import re
import os
import sys
# Regex for matching URLs
# See https://mathiasbynens.be/demo/url-regex
url_regex = re.compile(r"((https?|ftp)://(-\.)?([^\s/?\.#-]+\.?)+(/[^\s]*)?)")
ansi_escape_regex = re.compile(r"(\x9B|\x1B\[)[0-?]*[ -/]*[@-~]", re.IGNORECASE)
INPUT_FILE = os.path.join(os.getenv('HOME'), '.cache', 'ulp', 'links')
def escape_ansi(text):
return ansi_escape_regex.sub("", text)
def parse_stdin():
stdin = [escape_ansi(line.strip()) for line in sys.stdin]
print(os.linesep.join(stdin).strip(), file=sys.stderr)
return parse_input(os.linesep.join(stdin))
def parse_input(text):
matches = url_regex.findall(text.strip())
return [result[0] for result in matches]
def read_inputfile():
with open(INPUT_FILE) as f:
return [l.strip() for l in f.readlines()]
def main():
#If we are not being piped, exit
if sys.stdin.isatty():
sys.exit(1)
result = parse_stdin()
for url in result:
print(url)
if __name__ == '__main__':
main()
Move ansi_escape to generic function# coding=utf-8
import re
import os
import sys
# Regex for matching URLs
# See https://mathiasbynens.be/demo/url-regex
url_regex = re.compile(r"((https?|ftp)://(-\.)?([^\s/?\.#-]+\.?)+(/[^\s]*)?)")
ansi_escape_regex = re.compile(r"(\x9B|\x1B\[)[0-?]*[ -/]*[@-~]", re.IGNORECASE)
INPUT_FILE = os.path.join(os.getenv('HOME'), '.cache', 'ulp', 'links')
def escape_ansi(text):
return ansi_escape_regex.sub("", text)
def parse_stdin():
lines = [line.strip() for line in sys.stdin]
print(os.linesep.join(lines).strip(), file=sys.stderr)
return parse_input(os.linesep.join(lines))
def parse_input(text):
matches = url_regex.findall(escape_ansi(text.strip()))
return [result[0] for result in matches]
def read_inputfile():
with open(INPUT_FILE) as f:
return [l.strip() for l in f.readlines()]
def main():
#If we are not being piped, exit
if sys.stdin.isatty():
sys.exit(1)
result = parse_stdin()
for url in result:
print(url)
if __name__ == '__main__':
main()
|
<commit_before># coding=utf-8
import re
import os
import sys
# Regex for matching URLs
# See https://mathiasbynens.be/demo/url-regex
url_regex = re.compile(r"((https?|ftp)://(-\.)?([^\s/?\.#-]+\.?)+(/[^\s]*)?)")
ansi_escape_regex = re.compile(r"(\x9B|\x1B\[)[0-?]*[ -/]*[@-~]", re.IGNORECASE)
INPUT_FILE = os.path.join(os.getenv('HOME'), '.cache', 'ulp', 'links')
def escape_ansi(text):
return ansi_escape_regex.sub("", text)
def parse_stdin():
stdin = [escape_ansi(line.strip()) for line in sys.stdin]
print(os.linesep.join(stdin).strip(), file=sys.stderr)
return parse_input(os.linesep.join(stdin))
def parse_input(text):
matches = url_regex.findall(text.strip())
return [result[0] for result in matches]
def read_inputfile():
with open(INPUT_FILE) as f:
return [l.strip() for l in f.readlines()]
def main():
#If we are not being piped, exit
if sys.stdin.isatty():
sys.exit(1)
result = parse_stdin()
for url in result:
print(url)
if __name__ == '__main__':
main()
<commit_msg>Move ansi_escape to generic function<commit_after># coding=utf-8
import re
import os
import sys
# Regex for matching URLs
# See https://mathiasbynens.be/demo/url-regex
url_regex = re.compile(r"((https?|ftp)://(-\.)?([^\s/?\.#-]+\.?)+(/[^\s]*)?)")
ansi_escape_regex = re.compile(r"(\x9B|\x1B\[)[0-?]*[ -/]*[@-~]", re.IGNORECASE)
INPUT_FILE = os.path.join(os.getenv('HOME'), '.cache', 'ulp', 'links')
def escape_ansi(text):
return ansi_escape_regex.sub("", text)
def parse_stdin():
lines = [line.strip() for line in sys.stdin]
print(os.linesep.join(lines).strip(), file=sys.stderr)
return parse_input(os.linesep.join(lines))
def parse_input(text):
matches = url_regex.findall(escape_ansi(text.strip()))
return [result[0] for result in matches]
def read_inputfile():
with open(INPUT_FILE) as f:
return [l.strip() for l in f.readlines()]
def main():
#If we are not being piped, exit
if sys.stdin.isatty():
sys.exit(1)
result = parse_stdin()
for url in result:
print(url)
if __name__ == '__main__':
main()
|
b4f4e870877e4eae8e7dbf2dd9c961e5eec6980d
|
devtools/ci/push-docs-to-s3.py
|
devtools/ci/push-docs-to-s3.py
|
import os
import pip
import tempfile
import subprocess
import openpathsampling.version
BUCKET_NAME = 'openpathsampling.org'
if not openpathsampling.version.release:
PREFIX = 'latest'
else:
PREFIX = openpathsampling.version.short_version
if not any(d.project_name == 's3cmd' for d in pip.get_installed_distributions()):
raise ImportError('The s3cmd package is required. try $ pip install s3cmd')
# The secret key is available as a secure environment variable
# on travis-ci to push the build documentation to Amazon S3.
with tempfile.NamedTemporaryFile('w') as f:
f.write('''[default]
access_key = {AWS_ACCESS_KEY_ID}
secret_key = {AWS_SECRET_ACCESS_KEY}
'''.format(**os.environ))
f.flush()
template = ('s3cmd --config {config} '
'sync docs/_build/html/ s3://{bucket}/{prefix}/')
cmd = template.format(
config=f.name,
bucket=BUCKET_NAME,
prefix=PREFIX)
return_val = subprocess.call(cmd.split())
# Sync index file.
template = ('s3cmd --config {config} '
'sync devtools/ci/index.html s3://{bucket}/')
cmd = template.format(
config=f.name,
bucket=BUCKET_NAME)
return_val = subprocess.call(cmd.split())
|
import os
import pip
import tempfile
import subprocess
import openpathsampling.version
BUCKET_NAME = 'openpathsampling.org'
if not openpathsampling.version.release:
PREFIX = 'latest'
else:
PREFIX = openpathsampling.version.short_version
if not any(d.project_name == 's3cmd' for d in pip.get_installed_distributions()):
raise ImportError('The s3cmd package is required. try $ pip install s3cmd')
# The secret key is available as a secure environment variable
# on travis-ci to push the build documentation to Amazon S3.
with tempfile.NamedTemporaryFile('w') as f:
f.write('''[default]
access_key = {AWS_ACCESS_KEY_ID}
secret_key = {AWS_SECRET_ACCESS_KEY}
'''.format(**os.environ))
f.flush()
template = ('s3cmd --config {config} '
'sync docs/_build/html/ s3://{bucket}/{prefix}/ '
'--no-mime-magic --guess-mime-type')
cmd = template.format(
config=f.name,
bucket=BUCKET_NAME,
prefix=PREFIX)
return_val = subprocess.call(cmd.split())
# Sync index file.
template = ('s3cmd --config {config} '
'sync devtools/ci/index.html s3://{bucket}/')
cmd = template.format(
config=f.name,
bucket=BUCKET_NAME)
return_val = subprocess.call(cmd.split())
|
Add MIME-handling options to s3cmd
|
Add MIME-handling options to s3cmd
|
Python
|
mit
|
openpathsampling/openpathsampling,choderalab/openpathsampling,choderalab/openpathsampling,dwhswenson/openpathsampling,openpathsampling/openpathsampling,choderalab/openpathsampling,dwhswenson/openpathsampling,dwhswenson/openpathsampling,openpathsampling/openpathsampling,dwhswenson/openpathsampling,openpathsampling/openpathsampling
|
import os
import pip
import tempfile
import subprocess
import openpathsampling.version
BUCKET_NAME = 'openpathsampling.org'
if not openpathsampling.version.release:
PREFIX = 'latest'
else:
PREFIX = openpathsampling.version.short_version
if not any(d.project_name == 's3cmd' for d in pip.get_installed_distributions()):
raise ImportError('The s3cmd package is required. try $ pip install s3cmd')
# The secret key is available as a secure environment variable
# on travis-ci to push the build documentation to Amazon S3.
with tempfile.NamedTemporaryFile('w') as f:
f.write('''[default]
access_key = {AWS_ACCESS_KEY_ID}
secret_key = {AWS_SECRET_ACCESS_KEY}
'''.format(**os.environ))
f.flush()
template = ('s3cmd --config {config} '
'sync docs/_build/html/ s3://{bucket}/{prefix}/')
cmd = template.format(
config=f.name,
bucket=BUCKET_NAME,
prefix=PREFIX)
return_val = subprocess.call(cmd.split())
# Sync index file.
template = ('s3cmd --config {config} '
'sync devtools/ci/index.html s3://{bucket}/')
cmd = template.format(
config=f.name,
bucket=BUCKET_NAME)
return_val = subprocess.call(cmd.split())
Add MIME-handling options to s3cmd
|
import os
import pip
import tempfile
import subprocess
import openpathsampling.version
BUCKET_NAME = 'openpathsampling.org'
if not openpathsampling.version.release:
PREFIX = 'latest'
else:
PREFIX = openpathsampling.version.short_version
if not any(d.project_name == 's3cmd' for d in pip.get_installed_distributions()):
raise ImportError('The s3cmd package is required. try $ pip install s3cmd')
# The secret key is available as a secure environment variable
# on travis-ci to push the build documentation to Amazon S3.
with tempfile.NamedTemporaryFile('w') as f:
f.write('''[default]
access_key = {AWS_ACCESS_KEY_ID}
secret_key = {AWS_SECRET_ACCESS_KEY}
'''.format(**os.environ))
f.flush()
template = ('s3cmd --config {config} '
'sync docs/_build/html/ s3://{bucket}/{prefix}/ '
'--no-mime-magic --guess-mime-type')
cmd = template.format(
config=f.name,
bucket=BUCKET_NAME,
prefix=PREFIX)
return_val = subprocess.call(cmd.split())
# Sync index file.
template = ('s3cmd --config {config} '
'sync devtools/ci/index.html s3://{bucket}/')
cmd = template.format(
config=f.name,
bucket=BUCKET_NAME)
return_val = subprocess.call(cmd.split())
|
<commit_before>import os
import pip
import tempfile
import subprocess
import openpathsampling.version
BUCKET_NAME = 'openpathsampling.org'
if not openpathsampling.version.release:
PREFIX = 'latest'
else:
PREFIX = openpathsampling.version.short_version
if not any(d.project_name == 's3cmd' for d in pip.get_installed_distributions()):
raise ImportError('The s3cmd package is required. try $ pip install s3cmd')
# The secret key is available as a secure environment variable
# on travis-ci to push the build documentation to Amazon S3.
with tempfile.NamedTemporaryFile('w') as f:
f.write('''[default]
access_key = {AWS_ACCESS_KEY_ID}
secret_key = {AWS_SECRET_ACCESS_KEY}
'''.format(**os.environ))
f.flush()
template = ('s3cmd --config {config} '
'sync docs/_build/html/ s3://{bucket}/{prefix}/')
cmd = template.format(
config=f.name,
bucket=BUCKET_NAME,
prefix=PREFIX)
return_val = subprocess.call(cmd.split())
# Sync index file.
template = ('s3cmd --config {config} '
'sync devtools/ci/index.html s3://{bucket}/')
cmd = template.format(
config=f.name,
bucket=BUCKET_NAME)
return_val = subprocess.call(cmd.split())
<commit_msg>Add MIME-handling options to s3cmd<commit_after>
|
import os
import pip
import tempfile
import subprocess
import openpathsampling.version
BUCKET_NAME = 'openpathsampling.org'
if not openpathsampling.version.release:
PREFIX = 'latest'
else:
PREFIX = openpathsampling.version.short_version
if not any(d.project_name == 's3cmd' for d in pip.get_installed_distributions()):
raise ImportError('The s3cmd package is required. try $ pip install s3cmd')
# The secret key is available as a secure environment variable
# on travis-ci to push the build documentation to Amazon S3.
with tempfile.NamedTemporaryFile('w') as f:
f.write('''[default]
access_key = {AWS_ACCESS_KEY_ID}
secret_key = {AWS_SECRET_ACCESS_KEY}
'''.format(**os.environ))
f.flush()
template = ('s3cmd --config {config} '
'sync docs/_build/html/ s3://{bucket}/{prefix}/ '
'--no-mime-magic --guess-mime-type')
cmd = template.format(
config=f.name,
bucket=BUCKET_NAME,
prefix=PREFIX)
return_val = subprocess.call(cmd.split())
# Sync index file.
template = ('s3cmd --config {config} '
'sync devtools/ci/index.html s3://{bucket}/')
cmd = template.format(
config=f.name,
bucket=BUCKET_NAME)
return_val = subprocess.call(cmd.split())
|
import os
import pip
import tempfile
import subprocess
import openpathsampling.version
BUCKET_NAME = 'openpathsampling.org'
if not openpathsampling.version.release:
PREFIX = 'latest'
else:
PREFIX = openpathsampling.version.short_version
if not any(d.project_name == 's3cmd' for d in pip.get_installed_distributions()):
raise ImportError('The s3cmd package is required. try $ pip install s3cmd')
# The secret key is available as a secure environment variable
# on travis-ci to push the build documentation to Amazon S3.
with tempfile.NamedTemporaryFile('w') as f:
f.write('''[default]
access_key = {AWS_ACCESS_KEY_ID}
secret_key = {AWS_SECRET_ACCESS_KEY}
'''.format(**os.environ))
f.flush()
template = ('s3cmd --config {config} '
'sync docs/_build/html/ s3://{bucket}/{prefix}/')
cmd = template.format(
config=f.name,
bucket=BUCKET_NAME,
prefix=PREFIX)
return_val = subprocess.call(cmd.split())
# Sync index file.
template = ('s3cmd --config {config} '
'sync devtools/ci/index.html s3://{bucket}/')
cmd = template.format(
config=f.name,
bucket=BUCKET_NAME)
return_val = subprocess.call(cmd.split())
Add MIME-handling options to s3cmdimport os
import pip
import tempfile
import subprocess
import openpathsampling.version
BUCKET_NAME = 'openpathsampling.org'
if not openpathsampling.version.release:
PREFIX = 'latest'
else:
PREFIX = openpathsampling.version.short_version
if not any(d.project_name == 's3cmd' for d in pip.get_installed_distributions()):
raise ImportError('The s3cmd package is required. try $ pip install s3cmd')
# The secret key is available as a secure environment variable
# on travis-ci to push the build documentation to Amazon S3.
with tempfile.NamedTemporaryFile('w') as f:
f.write('''[default]
access_key = {AWS_ACCESS_KEY_ID}
secret_key = {AWS_SECRET_ACCESS_KEY}
'''.format(**os.environ))
f.flush()
template = ('s3cmd --config {config} '
'sync docs/_build/html/ s3://{bucket}/{prefix}/ '
'--no-mime-magic --guess-mime-type')
cmd = template.format(
config=f.name,
bucket=BUCKET_NAME,
prefix=PREFIX)
return_val = subprocess.call(cmd.split())
# Sync index file.
template = ('s3cmd --config {config} '
'sync devtools/ci/index.html s3://{bucket}/')
cmd = template.format(
config=f.name,
bucket=BUCKET_NAME)
return_val = subprocess.call(cmd.split())
|
<commit_before>import os
import pip
import tempfile
import subprocess
import openpathsampling.version
BUCKET_NAME = 'openpathsampling.org'
if not openpathsampling.version.release:
PREFIX = 'latest'
else:
PREFIX = openpathsampling.version.short_version
if not any(d.project_name == 's3cmd' for d in pip.get_installed_distributions()):
raise ImportError('The s3cmd package is required. try $ pip install s3cmd')
# The secret key is available as a secure environment variable
# on travis-ci to push the build documentation to Amazon S3.
with tempfile.NamedTemporaryFile('w') as f:
f.write('''[default]
access_key = {AWS_ACCESS_KEY_ID}
secret_key = {AWS_SECRET_ACCESS_KEY}
'''.format(**os.environ))
f.flush()
template = ('s3cmd --config {config} '
'sync docs/_build/html/ s3://{bucket}/{prefix}/')
cmd = template.format(
config=f.name,
bucket=BUCKET_NAME,
prefix=PREFIX)
return_val = subprocess.call(cmd.split())
# Sync index file.
template = ('s3cmd --config {config} '
'sync devtools/ci/index.html s3://{bucket}/')
cmd = template.format(
config=f.name,
bucket=BUCKET_NAME)
return_val = subprocess.call(cmd.split())
<commit_msg>Add MIME-handling options to s3cmd<commit_after>import os
import pip
import tempfile
import subprocess
import openpathsampling.version
BUCKET_NAME = 'openpathsampling.org'
if not openpathsampling.version.release:
PREFIX = 'latest'
else:
PREFIX = openpathsampling.version.short_version
if not any(d.project_name == 's3cmd' for d in pip.get_installed_distributions()):
raise ImportError('The s3cmd package is required. try $ pip install s3cmd')
# The secret key is available as a secure environment variable
# on travis-ci to push the build documentation to Amazon S3.
with tempfile.NamedTemporaryFile('w') as f:
f.write('''[default]
access_key = {AWS_ACCESS_KEY_ID}
secret_key = {AWS_SECRET_ACCESS_KEY}
'''.format(**os.environ))
f.flush()
template = ('s3cmd --config {config} '
'sync docs/_build/html/ s3://{bucket}/{prefix}/ '
'--no-mime-magic --guess-mime-type')
cmd = template.format(
config=f.name,
bucket=BUCKET_NAME,
prefix=PREFIX)
return_val = subprocess.call(cmd.split())
# Sync index file.
template = ('s3cmd --config {config} '
'sync devtools/ci/index.html s3://{bucket}/')
cmd = template.format(
config=f.name,
bucket=BUCKET_NAME)
return_val = subprocess.call(cmd.split())
|
40491b243beca358e81184857a155fb4d2d52157
|
drogher/shippers/base.py
|
drogher/shippers/base.py
|
import re
class Shipper(object):
barcode = None
barcode_pattern = None
shipper = None
def __init__(self, barcode):
self.barcode = barcode
@property
def is_valid(self):
if self.matches_barcode and self.valid_checksum:
return True
return False
@property
def matches_barcode(self):
return bool(re.match(self.barcode_pattern, self.barcode))
@property
def tracking_number(self):
return self.barcode
@property
def valid_checksum(self):
return False
class Unknown(Shipper):
shipper = 'Unknown'
@property
def matches_barcode(self):
return False
|
import re
class Shipper(object):
barcode = None
barcode_pattern = None
shipper = None
def __init__(self, barcode):
self.barcode = barcode
def __repr__(self):
return "%s('%s')" % ('shippers.' + self.__class__.__name__, self.barcode)
@property
def is_valid(self):
if self.matches_barcode and self.valid_checksum:
return True
return False
@property
def matches_barcode(self):
return bool(re.match(self.barcode_pattern, self.barcode))
@property
def tracking_number(self):
return self.barcode
@property
def valid_checksum(self):
return False
class Unknown(Shipper):
shipper = 'Unknown'
@property
def matches_barcode(self):
return False
|
Add a more useful representation of Shipper objects
|
Add a more useful representation of Shipper objects
|
Python
|
bsd-3-clause
|
jbittel/drogher
|
import re
class Shipper(object):
barcode = None
barcode_pattern = None
shipper = None
def __init__(self, barcode):
self.barcode = barcode
@property
def is_valid(self):
if self.matches_barcode and self.valid_checksum:
return True
return False
@property
def matches_barcode(self):
return bool(re.match(self.barcode_pattern, self.barcode))
@property
def tracking_number(self):
return self.barcode
@property
def valid_checksum(self):
return False
class Unknown(Shipper):
shipper = 'Unknown'
@property
def matches_barcode(self):
return False
Add a more useful representation of Shipper objects
|
import re
class Shipper(object):
barcode = None
barcode_pattern = None
shipper = None
def __init__(self, barcode):
self.barcode = barcode
def __repr__(self):
return "%s('%s')" % ('shippers.' + self.__class__.__name__, self.barcode)
@property
def is_valid(self):
if self.matches_barcode and self.valid_checksum:
return True
return False
@property
def matches_barcode(self):
return bool(re.match(self.barcode_pattern, self.barcode))
@property
def tracking_number(self):
return self.barcode
@property
def valid_checksum(self):
return False
class Unknown(Shipper):
shipper = 'Unknown'
@property
def matches_barcode(self):
return False
|
<commit_before>import re
class Shipper(object):
barcode = None
barcode_pattern = None
shipper = None
def __init__(self, barcode):
self.barcode = barcode
@property
def is_valid(self):
if self.matches_barcode and self.valid_checksum:
return True
return False
@property
def matches_barcode(self):
return bool(re.match(self.barcode_pattern, self.barcode))
@property
def tracking_number(self):
return self.barcode
@property
def valid_checksum(self):
return False
class Unknown(Shipper):
shipper = 'Unknown'
@property
def matches_barcode(self):
return False
<commit_msg>Add a more useful representation of Shipper objects<commit_after>
|
import re
class Shipper(object):
barcode = None
barcode_pattern = None
shipper = None
def __init__(self, barcode):
self.barcode = barcode
def __repr__(self):
return "%s('%s')" % ('shippers.' + self.__class__.__name__, self.barcode)
@property
def is_valid(self):
if self.matches_barcode and self.valid_checksum:
return True
return False
@property
def matches_barcode(self):
return bool(re.match(self.barcode_pattern, self.barcode))
@property
def tracking_number(self):
return self.barcode
@property
def valid_checksum(self):
return False
class Unknown(Shipper):
shipper = 'Unknown'
@property
def matches_barcode(self):
return False
|
import re
class Shipper(object):
barcode = None
barcode_pattern = None
shipper = None
def __init__(self, barcode):
self.barcode = barcode
@property
def is_valid(self):
if self.matches_barcode and self.valid_checksum:
return True
return False
@property
def matches_barcode(self):
return bool(re.match(self.barcode_pattern, self.barcode))
@property
def tracking_number(self):
return self.barcode
@property
def valid_checksum(self):
return False
class Unknown(Shipper):
shipper = 'Unknown'
@property
def matches_barcode(self):
return False
Add a more useful representation of Shipper objectsimport re
class Shipper(object):
barcode = None
barcode_pattern = None
shipper = None
def __init__(self, barcode):
self.barcode = barcode
def __repr__(self):
return "%s('%s')" % ('shippers.' + self.__class__.__name__, self.barcode)
@property
def is_valid(self):
if self.matches_barcode and self.valid_checksum:
return True
return False
@property
def matches_barcode(self):
return bool(re.match(self.barcode_pattern, self.barcode))
@property
def tracking_number(self):
return self.barcode
@property
def valid_checksum(self):
return False
class Unknown(Shipper):
shipper = 'Unknown'
@property
def matches_barcode(self):
return False
|
<commit_before>import re
class Shipper(object):
barcode = None
barcode_pattern = None
shipper = None
def __init__(self, barcode):
self.barcode = barcode
@property
def is_valid(self):
if self.matches_barcode and self.valid_checksum:
return True
return False
@property
def matches_barcode(self):
return bool(re.match(self.barcode_pattern, self.barcode))
@property
def tracking_number(self):
return self.barcode
@property
def valid_checksum(self):
return False
class Unknown(Shipper):
shipper = 'Unknown'
@property
def matches_barcode(self):
return False
<commit_msg>Add a more useful representation of Shipper objects<commit_after>import re
class Shipper(object):
barcode = None
barcode_pattern = None
shipper = None
def __init__(self, barcode):
self.barcode = barcode
def __repr__(self):
return "%s('%s')" % ('shippers.' + self.__class__.__name__, self.barcode)
@property
def is_valid(self):
if self.matches_barcode and self.valid_checksum:
return True
return False
@property
def matches_barcode(self):
return bool(re.match(self.barcode_pattern, self.barcode))
@property
def tracking_number(self):
return self.barcode
@property
def valid_checksum(self):
return False
class Unknown(Shipper):
shipper = 'Unknown'
@property
def matches_barcode(self):
return False
|
9f363aee856b46570707af844092126dfa6ecf1e
|
instrument-classification/predict_webapp.py
|
instrument-classification/predict_webapp.py
|
from flask import Flask, redirect, render_template, request
from gevent.wsgi import WSGIServer
from predict import InstrumentClassifier
app = Flask(__name__)
app.config['MAX_CONTENT_LENGTH'] = 1 * 2**20
model = InstrumentClassifier(model_dir='data/working/single-notes-2000/model')
@app.route('/')
def hello():
return render_template('home.html')
@app.route('/api/classify/instrument', methods=['POST'])
def classify():
if 'audio_file' not in request.files:
return redirect('/')
# File-like object than can be directy passed to soundfile.read()
# without saving to disk.
audio_file = request.files['audio_file']
if audio_file.filename == '':
return redirect('/')
class_probabilities = model.predict_probabilities(audio_file)
class_probabilities = class_probabilities.round(5)
label = model.class_label_from_probabilities(
class_probabilities)
return render_template('home.html',
audio_file=audio_file.filename,
predicted_label=label,
class_probabilities=class_probabilities)
if __name__ == '__main__':
# app.run(debug=True)
app.debug = True
# needed since Flask dev mode interacts badly with TensorFlow
http_server = WSGIServer(('', 5000), app)
http_server.serve_forever()
|
from flask import Flask, redirect, render_template, request
from gevent.wsgi import WSGIServer
from predict import InstrumentClassifier
from leaderboard import LeaderBoard
app = Flask(__name__)
app.config['MAX_CONTENT_LENGTH'] = 1 * 2**20
model_dir = 'data/working/single-notes-2000/models'
model_id = LeaderBoard(model_dir).best_model()
model = InstrumentClassifier(model_dir + '/' + model_id)
print('Using model:', model_id)
@app.route('/')
def hello():
return render_template('home.html')
@app.route('/api/classify/instrument', methods=['POST'])
def classify():
if 'audio_file' not in request.files:
return redirect('/')
# File-like object than can be directy passed to soundfile.read()
# without saving to disk.
audio_file = request.files['audio_file']
if audio_file.filename == '':
return redirect('/')
class_probabilities = model.predict_probabilities(audio_file)
class_probabilities = class_probabilities.round(5)
label = model.class_label_from_probabilities(
class_probabilities)
return render_template('home.html',
audio_file=audio_file.filename,
predicted_label=label,
class_probabilities=class_probabilities)
if __name__ == '__main__':
# app.run(debug=True)
app.debug = True
# needed since Flask dev mode interacts badly with TensorFlow
http_server = WSGIServer(('', 5000), app)
http_server.serve_forever()
|
Use the best model from the leader board in the prediction web app.
|
Use the best model from the leader board in the prediction web app.
|
Python
|
mit
|
bzamecnik/ml,bzamecnik/ml-playground,bzamecnik/ml,bzamecnik/ml-playground,bzamecnik/ml
|
from flask import Flask, redirect, render_template, request
from gevent.wsgi import WSGIServer
from predict import InstrumentClassifier
app = Flask(__name__)
app.config['MAX_CONTENT_LENGTH'] = 1 * 2**20
model = InstrumentClassifier(model_dir='data/working/single-notes-2000/model')
@app.route('/')
def hello():
return render_template('home.html')
@app.route('/api/classify/instrument', methods=['POST'])
def classify():
if 'audio_file' not in request.files:
return redirect('/')
# File-like object than can be directy passed to soundfile.read()
# without saving to disk.
audio_file = request.files['audio_file']
if audio_file.filename == '':
return redirect('/')
class_probabilities = model.predict_probabilities(audio_file)
class_probabilities = class_probabilities.round(5)
label = model.class_label_from_probabilities(
class_probabilities)
return render_template('home.html',
audio_file=audio_file.filename,
predicted_label=label,
class_probabilities=class_probabilities)
if __name__ == '__main__':
# app.run(debug=True)
app.debug = True
# needed since Flask dev mode interacts badly with TensorFlow
http_server = WSGIServer(('', 5000), app)
http_server.serve_forever()
Use the best model from the leader board in the prediction web app.
|
from flask import Flask, redirect, render_template, request
from gevent.wsgi import WSGIServer
from predict import InstrumentClassifier
from leaderboard import LeaderBoard
app = Flask(__name__)
app.config['MAX_CONTENT_LENGTH'] = 1 * 2**20
model_dir = 'data/working/single-notes-2000/models'
model_id = LeaderBoard(model_dir).best_model()
model = InstrumentClassifier(model_dir + '/' + model_id)
print('Using model:', model_id)
@app.route('/')
def hello():
return render_template('home.html')
@app.route('/api/classify/instrument', methods=['POST'])
def classify():
if 'audio_file' not in request.files:
return redirect('/')
# File-like object than can be directy passed to soundfile.read()
# without saving to disk.
audio_file = request.files['audio_file']
if audio_file.filename == '':
return redirect('/')
class_probabilities = model.predict_probabilities(audio_file)
class_probabilities = class_probabilities.round(5)
label = model.class_label_from_probabilities(
class_probabilities)
return render_template('home.html',
audio_file=audio_file.filename,
predicted_label=label,
class_probabilities=class_probabilities)
if __name__ == '__main__':
# app.run(debug=True)
app.debug = True
# needed since Flask dev mode interacts badly with TensorFlow
http_server = WSGIServer(('', 5000), app)
http_server.serve_forever()
|
<commit_before>from flask import Flask, redirect, render_template, request
from gevent.wsgi import WSGIServer
from predict import InstrumentClassifier
app = Flask(__name__)
app.config['MAX_CONTENT_LENGTH'] = 1 * 2**20
model = InstrumentClassifier(model_dir='data/working/single-notes-2000/model')
@app.route('/')
def hello():
return render_template('home.html')
@app.route('/api/classify/instrument', methods=['POST'])
def classify():
if 'audio_file' not in request.files:
return redirect('/')
# File-like object than can be directy passed to soundfile.read()
# without saving to disk.
audio_file = request.files['audio_file']
if audio_file.filename == '':
return redirect('/')
class_probabilities = model.predict_probabilities(audio_file)
class_probabilities = class_probabilities.round(5)
label = model.class_label_from_probabilities(
class_probabilities)
return render_template('home.html',
audio_file=audio_file.filename,
predicted_label=label,
class_probabilities=class_probabilities)
if __name__ == '__main__':
# app.run(debug=True)
app.debug = True
# needed since Flask dev mode interacts badly with TensorFlow
http_server = WSGIServer(('', 5000), app)
http_server.serve_forever()
<commit_msg>Use the best model from the leader board in the prediction web app.<commit_after>
|
from flask import Flask, redirect, render_template, request
from gevent.wsgi import WSGIServer
from predict import InstrumentClassifier
from leaderboard import LeaderBoard
app = Flask(__name__)
app.config['MAX_CONTENT_LENGTH'] = 1 * 2**20
model_dir = 'data/working/single-notes-2000/models'
model_id = LeaderBoard(model_dir).best_model()
model = InstrumentClassifier(model_dir + '/' + model_id)
print('Using model:', model_id)
@app.route('/')
def hello():
return render_template('home.html')
@app.route('/api/classify/instrument', methods=['POST'])
def classify():
if 'audio_file' not in request.files:
return redirect('/')
# File-like object than can be directy passed to soundfile.read()
# without saving to disk.
audio_file = request.files['audio_file']
if audio_file.filename == '':
return redirect('/')
class_probabilities = model.predict_probabilities(audio_file)
class_probabilities = class_probabilities.round(5)
label = model.class_label_from_probabilities(
class_probabilities)
return render_template('home.html',
audio_file=audio_file.filename,
predicted_label=label,
class_probabilities=class_probabilities)
if __name__ == '__main__':
# app.run(debug=True)
app.debug = True
# needed since Flask dev mode interacts badly with TensorFlow
http_server = WSGIServer(('', 5000), app)
http_server.serve_forever()
|
from flask import Flask, redirect, render_template, request
from gevent.wsgi import WSGIServer
from predict import InstrumentClassifier
app = Flask(__name__)
app.config['MAX_CONTENT_LENGTH'] = 1 * 2**20
model = InstrumentClassifier(model_dir='data/working/single-notes-2000/model')
@app.route('/')
def hello():
return render_template('home.html')
@app.route('/api/classify/instrument', methods=['POST'])
def classify():
if 'audio_file' not in request.files:
return redirect('/')
# File-like object than can be directy passed to soundfile.read()
# without saving to disk.
audio_file = request.files['audio_file']
if audio_file.filename == '':
return redirect('/')
class_probabilities = model.predict_probabilities(audio_file)
class_probabilities = class_probabilities.round(5)
label = model.class_label_from_probabilities(
class_probabilities)
return render_template('home.html',
audio_file=audio_file.filename,
predicted_label=label,
class_probabilities=class_probabilities)
if __name__ == '__main__':
# app.run(debug=True)
app.debug = True
# needed since Flask dev mode interacts badly with TensorFlow
http_server = WSGIServer(('', 5000), app)
http_server.serve_forever()
Use the best model from the leader board in the prediction web app.from flask import Flask, redirect, render_template, request
from gevent.wsgi import WSGIServer
from predict import InstrumentClassifier
from leaderboard import LeaderBoard
app = Flask(__name__)
app.config['MAX_CONTENT_LENGTH'] = 1 * 2**20
model_dir = 'data/working/single-notes-2000/models'
model_id = LeaderBoard(model_dir).best_model()
model = InstrumentClassifier(model_dir + '/' + model_id)
print('Using model:', model_id)
@app.route('/')
def hello():
return render_template('home.html')
@app.route('/api/classify/instrument', methods=['POST'])
def classify():
if 'audio_file' not in request.files:
return redirect('/')
# File-like object than can be directy passed to soundfile.read()
# without saving to disk.
audio_file = request.files['audio_file']
if audio_file.filename == '':
return redirect('/')
class_probabilities = model.predict_probabilities(audio_file)
class_probabilities = class_probabilities.round(5)
label = model.class_label_from_probabilities(
class_probabilities)
return render_template('home.html',
audio_file=audio_file.filename,
predicted_label=label,
class_probabilities=class_probabilities)
if __name__ == '__main__':
# app.run(debug=True)
app.debug = True
# needed since Flask dev mode interacts badly with TensorFlow
http_server = WSGIServer(('', 5000), app)
http_server.serve_forever()
|
<commit_before>from flask import Flask, redirect, render_template, request
from gevent.wsgi import WSGIServer
from predict import InstrumentClassifier
app = Flask(__name__)
app.config['MAX_CONTENT_LENGTH'] = 1 * 2**20
model = InstrumentClassifier(model_dir='data/working/single-notes-2000/model')
@app.route('/')
def hello():
return render_template('home.html')
@app.route('/api/classify/instrument', methods=['POST'])
def classify():
if 'audio_file' not in request.files:
return redirect('/')
# File-like object than can be directy passed to soundfile.read()
# without saving to disk.
audio_file = request.files['audio_file']
if audio_file.filename == '':
return redirect('/')
class_probabilities = model.predict_probabilities(audio_file)
class_probabilities = class_probabilities.round(5)
label = model.class_label_from_probabilities(
class_probabilities)
return render_template('home.html',
audio_file=audio_file.filename,
predicted_label=label,
class_probabilities=class_probabilities)
if __name__ == '__main__':
# app.run(debug=True)
app.debug = True
# needed since Flask dev mode interacts badly with TensorFlow
http_server = WSGIServer(('', 5000), app)
http_server.serve_forever()
<commit_msg>Use the best model from the leader board in the prediction web app.<commit_after>from flask import Flask, redirect, render_template, request
from gevent.wsgi import WSGIServer
from predict import InstrumentClassifier
from leaderboard import LeaderBoard
app = Flask(__name__)
app.config['MAX_CONTENT_LENGTH'] = 1 * 2**20
model_dir = 'data/working/single-notes-2000/models'
model_id = LeaderBoard(model_dir).best_model()
model = InstrumentClassifier(model_dir + '/' + model_id)
print('Using model:', model_id)
@app.route('/')
def hello():
return render_template('home.html')
@app.route('/api/classify/instrument', methods=['POST'])
def classify():
if 'audio_file' not in request.files:
return redirect('/')
# File-like object than can be directy passed to soundfile.read()
# without saving to disk.
audio_file = request.files['audio_file']
if audio_file.filename == '':
return redirect('/')
class_probabilities = model.predict_probabilities(audio_file)
class_probabilities = class_probabilities.round(5)
label = model.class_label_from_probabilities(
class_probabilities)
return render_template('home.html',
audio_file=audio_file.filename,
predicted_label=label,
class_probabilities=class_probabilities)
if __name__ == '__main__':
# app.run(debug=True)
app.debug = True
# needed since Flask dev mode interacts badly with TensorFlow
http_server = WSGIServer(('', 5000), app)
http_server.serve_forever()
|
25b56a4bb7c5937671a509aa92b9fd28d972bff9
|
fabfile.py
|
fabfile.py
|
from fabric.api import *
"""
Overview
========
This fabric file automates the process of pip packaging and
deploying your new pip package to our private pip repository.
Requirements
------------
- Must have fabric installed via `pip install fabric`
- Must have your setup.py working and up to date. Make sure
it works by running `python setup.py test` or do a test install
via `python setup.py install` inside a virtualenv.
Deploying
---------
Run `fab publish` for a one step pip package deploy!
"""
def prep():
local("pip install dir2pi")
def package():
local("python setup.py sdist")
def deploy(pip_repo):
name = local("python setup.py --name", capture=True)
ver = local("python setup.py --version", capture=True)
sdist_name = '{}-{}.tar.gz'.format(name, ver)
local("dir2pi {} dist/{}".format(pip_repo, sdist_name))
def publish():
prep()
package()
deploy()
|
from fabric.api import *
"""
Overview
========
This fabric file automates the process of pip packaging and
deploying your new pip package to our private pip repository.
Requirements
------------
- Must have fabric installed via `pip install fabric`
- Must have your setup.py working and up to date. Make sure
it works by running `python setup.py test` or do a test install
via `python setup.py install` inside a virtualenv.
Deploying
---------
Run `fab publish` for a one step pip package deploy!
"""
def prep():
local("pip install pip2pi")
def package():
local("python setup.py sdist")
def deploy(pip_repo):
name = local("python setup.py --name", capture=True)
ver = local("python setup.py --version", capture=True)
sdist_name = '{}-{}.tar.gz'.format(name, ver)
local("pip2pi {} dist/{}".format(pip_repo, sdist_name))
def publish():
prep()
package()
deploy()
|
Use pip2pi, not dir2pi for deploys
|
Use pip2pi, not dir2pi for deploys
|
Python
|
mit
|
istresearch/traptor,istresearch/traptor
|
from fabric.api import *
"""
Overview
========
This fabric file automates the process of pip packaging and
deploying your new pip package to our private pip repository.
Requirements
------------
- Must have fabric installed via `pip install fabric`
- Must have your setup.py working and up to date. Make sure
it works by running `python setup.py test` or do a test install
via `python setup.py install` inside a virtualenv.
Deploying
---------
Run `fab publish` for a one step pip package deploy!
"""
def prep():
local("pip install dir2pi")
def package():
local("python setup.py sdist")
def deploy(pip_repo):
name = local("python setup.py --name", capture=True)
ver = local("python setup.py --version", capture=True)
sdist_name = '{}-{}.tar.gz'.format(name, ver)
local("dir2pi {} dist/{}".format(pip_repo, sdist_name))
def publish():
prep()
package()
deploy()Use pip2pi, not dir2pi for deploys
|
from fabric.api import *
"""
Overview
========
This fabric file automates the process of pip packaging and
deploying your new pip package to our private pip repository.
Requirements
------------
- Must have fabric installed via `pip install fabric`
- Must have your setup.py working and up to date. Make sure
it works by running `python setup.py test` or do a test install
via `python setup.py install` inside a virtualenv.
Deploying
---------
Run `fab publish` for a one step pip package deploy!
"""
def prep():
local("pip install pip2pi")
def package():
local("python setup.py sdist")
def deploy(pip_repo):
name = local("python setup.py --name", capture=True)
ver = local("python setup.py --version", capture=True)
sdist_name = '{}-{}.tar.gz'.format(name, ver)
local("pip2pi {} dist/{}".format(pip_repo, sdist_name))
def publish():
prep()
package()
deploy()
|
<commit_before>from fabric.api import *
"""
Overview
========
This fabric file automates the process of pip packaging and
deploying your new pip package to our private pip repository.
Requirements
------------
- Must have fabric installed via `pip install fabric`
- Must have your setup.py working and up to date. Make sure
it works by running `python setup.py test` or do a test install
via `python setup.py install` inside a virtualenv.
Deploying
---------
Run `fab publish` for a one step pip package deploy!
"""
def prep():
local("pip install dir2pi")
def package():
local("python setup.py sdist")
def deploy(pip_repo):
name = local("python setup.py --name", capture=True)
ver = local("python setup.py --version", capture=True)
sdist_name = '{}-{}.tar.gz'.format(name, ver)
local("dir2pi {} dist/{}".format(pip_repo, sdist_name))
def publish():
prep()
package()
deploy()<commit_msg>Use pip2pi, not dir2pi for deploys<commit_after>
|
from fabric.api import *
"""
Overview
========
This fabric file automates the process of pip packaging and
deploying your new pip package to our private pip repository.
Requirements
------------
- Must have fabric installed via `pip install fabric`
- Must have your setup.py working and up to date. Make sure
it works by running `python setup.py test` or do a test install
via `python setup.py install` inside a virtualenv.
Deploying
---------
Run `fab publish` for a one step pip package deploy!
"""
def prep():
local("pip install pip2pi")
def package():
local("python setup.py sdist")
def deploy(pip_repo):
name = local("python setup.py --name", capture=True)
ver = local("python setup.py --version", capture=True)
sdist_name = '{}-{}.tar.gz'.format(name, ver)
local("pip2pi {} dist/{}".format(pip_repo, sdist_name))
def publish():
prep()
package()
deploy()
|
from fabric.api import *
"""
Overview
========
This fabric file automates the process of pip packaging and
deploying your new pip package to our private pip repository.
Requirements
------------
- Must have fabric installed via `pip install fabric`
- Must have your setup.py working and up to date. Make sure
it works by running `python setup.py test` or do a test install
via `python setup.py install` inside a virtualenv.
Deploying
---------
Run `fab publish` for a one step pip package deploy!
"""
def prep():
local("pip install dir2pi")
def package():
local("python setup.py sdist")
def deploy(pip_repo):
name = local("python setup.py --name", capture=True)
ver = local("python setup.py --version", capture=True)
sdist_name = '{}-{}.tar.gz'.format(name, ver)
local("dir2pi {} dist/{}".format(pip_repo, sdist_name))
def publish():
prep()
package()
deploy()Use pip2pi, not dir2pi for deploysfrom fabric.api import *
"""
Overview
========
This fabric file automates the process of pip packaging and
deploying your new pip package to our private pip repository.
Requirements
------------
- Must have fabric installed via `pip install fabric`
- Must have your setup.py working and up to date. Make sure
it works by running `python setup.py test` or do a test install
via `python setup.py install` inside a virtualenv.
Deploying
---------
Run `fab publish` for a one step pip package deploy!
"""
def prep():
local("pip install pip2pi")
def package():
local("python setup.py sdist")
def deploy(pip_repo):
name = local("python setup.py --name", capture=True)
ver = local("python setup.py --version", capture=True)
sdist_name = '{}-{}.tar.gz'.format(name, ver)
local("pip2pi {} dist/{}".format(pip_repo, sdist_name))
def publish():
prep()
package()
deploy()
|
<commit_before>from fabric.api import *
"""
Overview
========
This fabric file automates the process of pip packaging and
deploying your new pip package to our private pip repository.
Requirements
------------
- Must have fabric installed via `pip install fabric`
- Must have your setup.py working and up to date. Make sure
it works by running `python setup.py test` or do a test install
via `python setup.py install` inside a virtualenv.
Deploying
---------
Run `fab publish` for a one step pip package deploy!
"""
def prep():
local("pip install dir2pi")
def package():
local("python setup.py sdist")
def deploy(pip_repo):
name = local("python setup.py --name", capture=True)
ver = local("python setup.py --version", capture=True)
sdist_name = '{}-{}.tar.gz'.format(name, ver)
local("dir2pi {} dist/{}".format(pip_repo, sdist_name))
def publish():
prep()
package()
deploy()<commit_msg>Use pip2pi, not dir2pi for deploys<commit_after>from fabric.api import *
"""
Overview
========
This fabric file automates the process of pip packaging and
deploying your new pip package to our private pip repository.
Requirements
------------
- Must have fabric installed via `pip install fabric`
- Must have your setup.py working and up to date. Make sure
it works by running `python setup.py test` or do a test install
via `python setup.py install` inside a virtualenv.
Deploying
---------
Run `fab publish` for a one step pip package deploy!
"""
def prep():
local("pip install pip2pi")
def package():
local("python setup.py sdist")
def deploy(pip_repo):
name = local("python setup.py --name", capture=True)
ver = local("python setup.py --version", capture=True)
sdist_name = '{}-{}.tar.gz'.format(name, ver)
local("pip2pi {} dist/{}".format(pip_repo, sdist_name))
def publish():
prep()
package()
deploy()
|
5a44392b810800c4440b04aa92a4614e45c12e86
|
mla_game/apps/transcript/management/commands/fake_game_one_gameplay.py
|
mla_game/apps/transcript/management/commands/fake_game_one_gameplay.py
|
import random
from django.core.management.base import BaseCommand
from django.contrib.auth.models import User
from mla_game.apps.accounts.models import Profile
from ...models import (
Transcript, TranscriptPhraseDownvote
)
class Command(BaseCommand):
help = 'Creates random votes for all phrases in a random transcript'
def handle(self, *args, **options):
users = User.objects.all()
transcript = Transcript.objects.random()
for phrase in transcript.phrases.all():
for user in users:
profile = Profile.objects.get(user=user)
profile.considered_phrases.add(phrase)
if random.choice([True, False]):
TranscriptPhraseDownvote.objects.create(
transcript_phrase=phrase,
user=user
)
|
import random
from django.core.management.base import BaseCommand
from django.contrib.auth.models import User
from mla_game.apps.accounts.models import Profile
from ...models import (
Transcript, TranscriptPhraseDownvote
)
class Command(BaseCommand):
help = 'Creates random votes for all phrases in a random transcript'
def handle(self, *args, **options):
users = User.objects.all()
transcript = Transcript.objects.random_transcript()
for phrase in transcript.phrases.all():
for user in users:
profile = Profile.objects.get(user=user)
profile.considered_phrases.add(phrase)
if random.choice([True, False]):
TranscriptPhraseDownvote.objects.create(
transcript_phrase=phrase,
user=user
)
|
Fix fake game one script
|
Fix fake game one script
|
Python
|
mit
|
WGBH/FixIt,WGBH/FixIt,WGBH/FixIt
|
import random
from django.core.management.base import BaseCommand
from django.contrib.auth.models import User
from mla_game.apps.accounts.models import Profile
from ...models import (
Transcript, TranscriptPhraseDownvote
)
class Command(BaseCommand):
help = 'Creates random votes for all phrases in a random transcript'
def handle(self, *args, **options):
users = User.objects.all()
transcript = Transcript.objects.random()
for phrase in transcript.phrases.all():
for user in users:
profile = Profile.objects.get(user=user)
profile.considered_phrases.add(phrase)
if random.choice([True, False]):
TranscriptPhraseDownvote.objects.create(
transcript_phrase=phrase,
user=user
)
Fix fake game one script
|
import random
from django.core.management.base import BaseCommand
from django.contrib.auth.models import User
from mla_game.apps.accounts.models import Profile
from ...models import (
Transcript, TranscriptPhraseDownvote
)
class Command(BaseCommand):
help = 'Creates random votes for all phrases in a random transcript'
def handle(self, *args, **options):
users = User.objects.all()
transcript = Transcript.objects.random_transcript()
for phrase in transcript.phrases.all():
for user in users:
profile = Profile.objects.get(user=user)
profile.considered_phrases.add(phrase)
if random.choice([True, False]):
TranscriptPhraseDownvote.objects.create(
transcript_phrase=phrase,
user=user
)
|
<commit_before>import random
from django.core.management.base import BaseCommand
from django.contrib.auth.models import User
from mla_game.apps.accounts.models import Profile
from ...models import (
Transcript, TranscriptPhraseDownvote
)
class Command(BaseCommand):
help = 'Creates random votes for all phrases in a random transcript'
def handle(self, *args, **options):
users = User.objects.all()
transcript = Transcript.objects.random()
for phrase in transcript.phrases.all():
for user in users:
profile = Profile.objects.get(user=user)
profile.considered_phrases.add(phrase)
if random.choice([True, False]):
TranscriptPhraseDownvote.objects.create(
transcript_phrase=phrase,
user=user
)
<commit_msg>Fix fake game one script<commit_after>
|
import random
from django.core.management.base import BaseCommand
from django.contrib.auth.models import User
from mla_game.apps.accounts.models import Profile
from ...models import (
Transcript, TranscriptPhraseDownvote
)
class Command(BaseCommand):
help = 'Creates random votes for all phrases in a random transcript'
def handle(self, *args, **options):
users = User.objects.all()
transcript = Transcript.objects.random_transcript()
for phrase in transcript.phrases.all():
for user in users:
profile = Profile.objects.get(user=user)
profile.considered_phrases.add(phrase)
if random.choice([True, False]):
TranscriptPhraseDownvote.objects.create(
transcript_phrase=phrase,
user=user
)
|
import random
from django.core.management.base import BaseCommand
from django.contrib.auth.models import User
from mla_game.apps.accounts.models import Profile
from ...models import (
Transcript, TranscriptPhraseDownvote
)
class Command(BaseCommand):
help = 'Creates random votes for all phrases in a random transcript'
def handle(self, *args, **options):
users = User.objects.all()
transcript = Transcript.objects.random()
for phrase in transcript.phrases.all():
for user in users:
profile = Profile.objects.get(user=user)
profile.considered_phrases.add(phrase)
if random.choice([True, False]):
TranscriptPhraseDownvote.objects.create(
transcript_phrase=phrase,
user=user
)
Fix fake game one scriptimport random
from django.core.management.base import BaseCommand
from django.contrib.auth.models import User
from mla_game.apps.accounts.models import Profile
from ...models import (
Transcript, TranscriptPhraseDownvote
)
class Command(BaseCommand):
help = 'Creates random votes for all phrases in a random transcript'
def handle(self, *args, **options):
users = User.objects.all()
transcript = Transcript.objects.random_transcript()
for phrase in transcript.phrases.all():
for user in users:
profile = Profile.objects.get(user=user)
profile.considered_phrases.add(phrase)
if random.choice([True, False]):
TranscriptPhraseDownvote.objects.create(
transcript_phrase=phrase,
user=user
)
|
<commit_before>import random
from django.core.management.base import BaseCommand
from django.contrib.auth.models import User
from mla_game.apps.accounts.models import Profile
from ...models import (
Transcript, TranscriptPhraseDownvote
)
class Command(BaseCommand):
help = 'Creates random votes for all phrases in a random transcript'
def handle(self, *args, **options):
users = User.objects.all()
transcript = Transcript.objects.random()
for phrase in transcript.phrases.all():
for user in users:
profile = Profile.objects.get(user=user)
profile.considered_phrases.add(phrase)
if random.choice([True, False]):
TranscriptPhraseDownvote.objects.create(
transcript_phrase=phrase,
user=user
)
<commit_msg>Fix fake game one script<commit_after>import random
from django.core.management.base import BaseCommand
from django.contrib.auth.models import User
from mla_game.apps.accounts.models import Profile
from ...models import (
Transcript, TranscriptPhraseDownvote
)
class Command(BaseCommand):
help = 'Creates random votes for all phrases in a random transcript'
def handle(self, *args, **options):
users = User.objects.all()
transcript = Transcript.objects.random_transcript()
for phrase in transcript.phrases.all():
for user in users:
profile = Profile.objects.get(user=user)
profile.considered_phrases.add(phrase)
if random.choice([True, False]):
TranscriptPhraseDownvote.objects.create(
transcript_phrase=phrase,
user=user
)
|
a595e75968fa26a49b3d08661b9a0e3bb192929e
|
kokki/cookbooks/cloudera/recipes/default.py
|
kokki/cookbooks/cloudera/recipes/default.py
|
from kokki import *
apt_list_path = '/etc/apt/sources.list.d/cloudera.list'
apt = (
"deb http://archive.cloudera.com/debian {distro}-cdh3 contrib\n"
"deb-src http://archive.cloudera.com/debian {distro}-cdh3 contrib\n"
).format(distro=env.system.lsb['codename'])
Execute("apt-get update", action="nothing")
Execute("curl -s http://archive.cloudera.com/debian/archive.key | sudo apt-key add -",
not_if = "(apt-key list | grep Cloudera > /dev/null)")
File(apt_list_path,
owner = "root",
group ="root",
mode = 0644,
content = apt,
notifies = [("run", env.resources["Execute"]["apt-get update"], True)])
|
from kokki import *
env.include_recipe("java.jre")
apt_list_path = '/etc/apt/sources.list.d/cloudera.list'
apt = (
"deb http://archive.cloudera.com/debian {distro}-cdh3 contrib\n"
"deb-src http://archive.cloudera.com/debian {distro}-cdh3 contrib\n"
).format(distro=env.system.lsb['codename'])
Execute("apt-get update", action="nothing")
Execute("curl -s http://archive.cloudera.com/debian/archive.key | sudo apt-key add -",
not_if = "(apt-key list | grep Cloudera > /dev/null)")
File(apt_list_path,
owner = "root",
group ="root",
mode = 0644,
content = apt,
notifies = [("run", env.resources["Execute"]["apt-get update"], True)])
|
Install sun java when using cloudera
|
Install sun java when using cloudera
|
Python
|
bsd-3-clause
|
samuel/kokki
|
from kokki import *
apt_list_path = '/etc/apt/sources.list.d/cloudera.list'
apt = (
"deb http://archive.cloudera.com/debian {distro}-cdh3 contrib\n"
"deb-src http://archive.cloudera.com/debian {distro}-cdh3 contrib\n"
).format(distro=env.system.lsb['codename'])
Execute("apt-get update", action="nothing")
Execute("curl -s http://archive.cloudera.com/debian/archive.key | sudo apt-key add -",
not_if = "(apt-key list | grep Cloudera > /dev/null)")
File(apt_list_path,
owner = "root",
group ="root",
mode = 0644,
content = apt,
notifies = [("run", env.resources["Execute"]["apt-get update"], True)])
Install sun java when using cloudera
|
from kokki import *
env.include_recipe("java.jre")
apt_list_path = '/etc/apt/sources.list.d/cloudera.list'
apt = (
"deb http://archive.cloudera.com/debian {distro}-cdh3 contrib\n"
"deb-src http://archive.cloudera.com/debian {distro}-cdh3 contrib\n"
).format(distro=env.system.lsb['codename'])
Execute("apt-get update", action="nothing")
Execute("curl -s http://archive.cloudera.com/debian/archive.key | sudo apt-key add -",
not_if = "(apt-key list | grep Cloudera > /dev/null)")
File(apt_list_path,
owner = "root",
group ="root",
mode = 0644,
content = apt,
notifies = [("run", env.resources["Execute"]["apt-get update"], True)])
|
<commit_before>
from kokki import *
apt_list_path = '/etc/apt/sources.list.d/cloudera.list'
apt = (
"deb http://archive.cloudera.com/debian {distro}-cdh3 contrib\n"
"deb-src http://archive.cloudera.com/debian {distro}-cdh3 contrib\n"
).format(distro=env.system.lsb['codename'])
Execute("apt-get update", action="nothing")
Execute("curl -s http://archive.cloudera.com/debian/archive.key | sudo apt-key add -",
not_if = "(apt-key list | grep Cloudera > /dev/null)")
File(apt_list_path,
owner = "root",
group ="root",
mode = 0644,
content = apt,
notifies = [("run", env.resources["Execute"]["apt-get update"], True)])
<commit_msg>Install sun java when using cloudera<commit_after>
|
from kokki import *
env.include_recipe("java.jre")
apt_list_path = '/etc/apt/sources.list.d/cloudera.list'
apt = (
"deb http://archive.cloudera.com/debian {distro}-cdh3 contrib\n"
"deb-src http://archive.cloudera.com/debian {distro}-cdh3 contrib\n"
).format(distro=env.system.lsb['codename'])
Execute("apt-get update", action="nothing")
Execute("curl -s http://archive.cloudera.com/debian/archive.key | sudo apt-key add -",
not_if = "(apt-key list | grep Cloudera > /dev/null)")
File(apt_list_path,
owner = "root",
group ="root",
mode = 0644,
content = apt,
notifies = [("run", env.resources["Execute"]["apt-get update"], True)])
|
from kokki import *
apt_list_path = '/etc/apt/sources.list.d/cloudera.list'
apt = (
"deb http://archive.cloudera.com/debian {distro}-cdh3 contrib\n"
"deb-src http://archive.cloudera.com/debian {distro}-cdh3 contrib\n"
).format(distro=env.system.lsb['codename'])
Execute("apt-get update", action="nothing")
Execute("curl -s http://archive.cloudera.com/debian/archive.key | sudo apt-key add -",
not_if = "(apt-key list | grep Cloudera > /dev/null)")
File(apt_list_path,
owner = "root",
group ="root",
mode = 0644,
content = apt,
notifies = [("run", env.resources["Execute"]["apt-get update"], True)])
Install sun java when using cloudera
from kokki import *
env.include_recipe("java.jre")
apt_list_path = '/etc/apt/sources.list.d/cloudera.list'
apt = (
"deb http://archive.cloudera.com/debian {distro}-cdh3 contrib\n"
"deb-src http://archive.cloudera.com/debian {distro}-cdh3 contrib\n"
).format(distro=env.system.lsb['codename'])
Execute("apt-get update", action="nothing")
Execute("curl -s http://archive.cloudera.com/debian/archive.key | sudo apt-key add -",
not_if = "(apt-key list | grep Cloudera > /dev/null)")
File(apt_list_path,
owner = "root",
group ="root",
mode = 0644,
content = apt,
notifies = [("run", env.resources["Execute"]["apt-get update"], True)])
|
<commit_before>
from kokki import *
apt_list_path = '/etc/apt/sources.list.d/cloudera.list'
apt = (
"deb http://archive.cloudera.com/debian {distro}-cdh3 contrib\n"
"deb-src http://archive.cloudera.com/debian {distro}-cdh3 contrib\n"
).format(distro=env.system.lsb['codename'])
Execute("apt-get update", action="nothing")
Execute("curl -s http://archive.cloudera.com/debian/archive.key | sudo apt-key add -",
not_if = "(apt-key list | grep Cloudera > /dev/null)")
File(apt_list_path,
owner = "root",
group ="root",
mode = 0644,
content = apt,
notifies = [("run", env.resources["Execute"]["apt-get update"], True)])
<commit_msg>Install sun java when using cloudera<commit_after>
from kokki import *
env.include_recipe("java.jre")
apt_list_path = '/etc/apt/sources.list.d/cloudera.list'
apt = (
"deb http://archive.cloudera.com/debian {distro}-cdh3 contrib\n"
"deb-src http://archive.cloudera.com/debian {distro}-cdh3 contrib\n"
).format(distro=env.system.lsb['codename'])
Execute("apt-get update", action="nothing")
Execute("curl -s http://archive.cloudera.com/debian/archive.key | sudo apt-key add -",
not_if = "(apt-key list | grep Cloudera > /dev/null)")
File(apt_list_path,
owner = "root",
group ="root",
mode = 0644,
content = apt,
notifies = [("run", env.resources["Execute"]["apt-get update"], True)])
|
0b69f5882f251162e7898b9eadaa6874b76215d7
|
example_config.py
|
example_config.py
|
"""
File to easily switch between configurations between production and
development, etc.
"""
import os
# You must set each of these in your heroku environment with the heroku
# config:set command. See README.md for more information.
HEROKU_ENV_REQUIREMENTS = ('HEROKU', 'SECRET_KEY', 'GITHUB_CLIENT_ID',
'GITHUB_SECRET', 'DATABASE_URL',
'SQLALCHEMY_DATABASE_URI', 'REPO_OWNER',
'REPO_NAME')
class Config(object):
DEBUG = False
CSRF_ENABLED = True
GITHUB_CLIENT_ID = 'replace-me'
GITHUB_SECRET = 'replace-me'
HEROKU = False
SECRET_KEY = 'not-a-good-value'
# Details of the repo where all articles are stored. The GITHUB_CLIENT_ID
# and GITHUB_SECRET should allow full-access to this database.
REPO_OWNER = None
REPO_NAME = None
# This should automatically be set by heroku if you've added a database to
# your app.
SQLALCHEMY_DATABASE_URI = os.environ['DATABASE_URL']
class DevelopmentConfig(Config):
DEBUG = True
|
"""
File to easily switch between configurations between production and
development, etc.
"""
import os
# You must set each of these in your heroku environment with the heroku
# config:set command. See README.md for more information.
HEROKU_ENV_REQUIREMENTS = ('HEROKU', 'SECRET_KEY', 'GITHUB_CLIENT_ID',
'GITHUB_SECRET', 'DATABASE_URL',
'SQLALCHEMY_DATABASE_URI', 'REPO_OWNER',
'REPO_NAME', 'REPO_OWNER_ACCESS_TOKEN')
class Config(object):
DEBUG = False
CSRF_ENABLED = True
GITHUB_CLIENT_ID = 'replace-me'
GITHUB_SECRET = 'replace-me'
HEROKU = False
SECRET_KEY = 'not-a-good-value'
# Details of the repo where all articles are stored. The GITHUB_CLIENT_ID
# and GITHUB_SECRET should allow full-access to this database.
REPO_OWNER = None
REPO_NAME = None
REPO_OWNER_ACCESS_TOKEN = None
# This should automatically be set by heroku if you've added a database to
# your app.
SQLALCHEMY_DATABASE_URI = os.environ['DATABASE_URL']
class DevelopmentConfig(Config):
DEBUG = True
|
Add new environment variable so we can create articles with different committer and author
|
Add new environment variable so we can create articles with different committer and author
|
Python
|
agpl-3.0
|
pluralsight/guides-cms,paulocheque/guides-cms,paulocheque/guides-cms,pluralsight/guides-cms,paulocheque/guides-cms,pluralsight/guides-cms
|
"""
File to easily switch between configurations between production and
development, etc.
"""
import os
# You must set each of these in your heroku environment with the heroku
# config:set command. See README.md for more information.
HEROKU_ENV_REQUIREMENTS = ('HEROKU', 'SECRET_KEY', 'GITHUB_CLIENT_ID',
'GITHUB_SECRET', 'DATABASE_URL',
'SQLALCHEMY_DATABASE_URI', 'REPO_OWNER',
'REPO_NAME')
class Config(object):
DEBUG = False
CSRF_ENABLED = True
GITHUB_CLIENT_ID = 'replace-me'
GITHUB_SECRET = 'replace-me'
HEROKU = False
SECRET_KEY = 'not-a-good-value'
# Details of the repo where all articles are stored. The GITHUB_CLIENT_ID
# and GITHUB_SECRET should allow full-access to this database.
REPO_OWNER = None
REPO_NAME = None
# This should automatically be set by heroku if you've added a database to
# your app.
SQLALCHEMY_DATABASE_URI = os.environ['DATABASE_URL']
class DevelopmentConfig(Config):
DEBUG = True
Add new environment variable so we can create articles with different committer and author
|
"""
File to easily switch between configurations between production and
development, etc.
"""
import os
# You must set each of these in your heroku environment with the heroku
# config:set command. See README.md for more information.
HEROKU_ENV_REQUIREMENTS = ('HEROKU', 'SECRET_KEY', 'GITHUB_CLIENT_ID',
'GITHUB_SECRET', 'DATABASE_URL',
'SQLALCHEMY_DATABASE_URI', 'REPO_OWNER',
'REPO_NAME', 'REPO_OWNER_ACCESS_TOKEN')
class Config(object):
DEBUG = False
CSRF_ENABLED = True
GITHUB_CLIENT_ID = 'replace-me'
GITHUB_SECRET = 'replace-me'
HEROKU = False
SECRET_KEY = 'not-a-good-value'
# Details of the repo where all articles are stored. The GITHUB_CLIENT_ID
# and GITHUB_SECRET should allow full-access to this database.
REPO_OWNER = None
REPO_NAME = None
REPO_OWNER_ACCESS_TOKEN = None
# This should automatically be set by heroku if you've added a database to
# your app.
SQLALCHEMY_DATABASE_URI = os.environ['DATABASE_URL']
class DevelopmentConfig(Config):
DEBUG = True
|
<commit_before>"""
File to easily switch between configurations between production and
development, etc.
"""
import os
# You must set each of these in your heroku environment with the heroku
# config:set command. See README.md for more information.
HEROKU_ENV_REQUIREMENTS = ('HEROKU', 'SECRET_KEY', 'GITHUB_CLIENT_ID',
'GITHUB_SECRET', 'DATABASE_URL',
'SQLALCHEMY_DATABASE_URI', 'REPO_OWNER',
'REPO_NAME')
class Config(object):
DEBUG = False
CSRF_ENABLED = True
GITHUB_CLIENT_ID = 'replace-me'
GITHUB_SECRET = 'replace-me'
HEROKU = False
SECRET_KEY = 'not-a-good-value'
# Details of the repo where all articles are stored. The GITHUB_CLIENT_ID
# and GITHUB_SECRET should allow full-access to this database.
REPO_OWNER = None
REPO_NAME = None
# This should automatically be set by heroku if you've added a database to
# your app.
SQLALCHEMY_DATABASE_URI = os.environ['DATABASE_URL']
class DevelopmentConfig(Config):
DEBUG = True
<commit_msg>Add new environment variable so we can create articles with different committer and author<commit_after>
|
"""
File to easily switch between configurations between production and
development, etc.
"""
import os
# You must set each of these in your heroku environment with the heroku
# config:set command. See README.md for more information.
HEROKU_ENV_REQUIREMENTS = ('HEROKU', 'SECRET_KEY', 'GITHUB_CLIENT_ID',
'GITHUB_SECRET', 'DATABASE_URL',
'SQLALCHEMY_DATABASE_URI', 'REPO_OWNER',
'REPO_NAME', 'REPO_OWNER_ACCESS_TOKEN')
class Config(object):
DEBUG = False
CSRF_ENABLED = True
GITHUB_CLIENT_ID = 'replace-me'
GITHUB_SECRET = 'replace-me'
HEROKU = False
SECRET_KEY = 'not-a-good-value'
# Details of the repo where all articles are stored. The GITHUB_CLIENT_ID
# and GITHUB_SECRET should allow full-access to this database.
REPO_OWNER = None
REPO_NAME = None
REPO_OWNER_ACCESS_TOKEN = None
# This should automatically be set by heroku if you've added a database to
# your app.
SQLALCHEMY_DATABASE_URI = os.environ['DATABASE_URL']
class DevelopmentConfig(Config):
DEBUG = True
|
"""
File to easily switch between configurations between production and
development, etc.
"""
import os
# You must set each of these in your heroku environment with the heroku
# config:set command. See README.md for more information.
HEROKU_ENV_REQUIREMENTS = ('HEROKU', 'SECRET_KEY', 'GITHUB_CLIENT_ID',
'GITHUB_SECRET', 'DATABASE_URL',
'SQLALCHEMY_DATABASE_URI', 'REPO_OWNER',
'REPO_NAME')
class Config(object):
DEBUG = False
CSRF_ENABLED = True
GITHUB_CLIENT_ID = 'replace-me'
GITHUB_SECRET = 'replace-me'
HEROKU = False
SECRET_KEY = 'not-a-good-value'
# Details of the repo where all articles are stored. The GITHUB_CLIENT_ID
# and GITHUB_SECRET should allow full-access to this database.
REPO_OWNER = None
REPO_NAME = None
# This should automatically be set by heroku if you've added a database to
# your app.
SQLALCHEMY_DATABASE_URI = os.environ['DATABASE_URL']
class DevelopmentConfig(Config):
DEBUG = True
Add new environment variable so we can create articles with different committer and author"""
File to easily switch between configurations between production and
development, etc.
"""
import os
# You must set each of these in your heroku environment with the heroku
# config:set command. See README.md for more information.
HEROKU_ENV_REQUIREMENTS = ('HEROKU', 'SECRET_KEY', 'GITHUB_CLIENT_ID',
'GITHUB_SECRET', 'DATABASE_URL',
'SQLALCHEMY_DATABASE_URI', 'REPO_OWNER',
'REPO_NAME', 'REPO_OWNER_ACCESS_TOKEN')
class Config(object):
DEBUG = False
CSRF_ENABLED = True
GITHUB_CLIENT_ID = 'replace-me'
GITHUB_SECRET = 'replace-me'
HEROKU = False
SECRET_KEY = 'not-a-good-value'
# Details of the repo where all articles are stored. The GITHUB_CLIENT_ID
# and GITHUB_SECRET should allow full-access to this database.
REPO_OWNER = None
REPO_NAME = None
REPO_OWNER_ACCESS_TOKEN = None
# This should automatically be set by heroku if you've added a database to
# your app.
SQLALCHEMY_DATABASE_URI = os.environ['DATABASE_URL']
class DevelopmentConfig(Config):
DEBUG = True
|
<commit_before>"""
File to easily switch between configurations between production and
development, etc.
"""
import os
# You must set each of these in your heroku environment with the heroku
# config:set command. See README.md for more information.
HEROKU_ENV_REQUIREMENTS = ('HEROKU', 'SECRET_KEY', 'GITHUB_CLIENT_ID',
'GITHUB_SECRET', 'DATABASE_URL',
'SQLALCHEMY_DATABASE_URI', 'REPO_OWNER',
'REPO_NAME')
class Config(object):
DEBUG = False
CSRF_ENABLED = True
GITHUB_CLIENT_ID = 'replace-me'
GITHUB_SECRET = 'replace-me'
HEROKU = False
SECRET_KEY = 'not-a-good-value'
# Details of the repo where all articles are stored. The GITHUB_CLIENT_ID
# and GITHUB_SECRET should allow full-access to this database.
REPO_OWNER = None
REPO_NAME = None
# This should automatically be set by heroku if you've added a database to
# your app.
SQLALCHEMY_DATABASE_URI = os.environ['DATABASE_URL']
class DevelopmentConfig(Config):
DEBUG = True
<commit_msg>Add new environment variable so we can create articles with different committer and author<commit_after>"""
File to easily switch between configurations between production and
development, etc.
"""
import os
# You must set each of these in your heroku environment with the heroku
# config:set command. See README.md for more information.
HEROKU_ENV_REQUIREMENTS = ('HEROKU', 'SECRET_KEY', 'GITHUB_CLIENT_ID',
'GITHUB_SECRET', 'DATABASE_URL',
'SQLALCHEMY_DATABASE_URI', 'REPO_OWNER',
'REPO_NAME', 'REPO_OWNER_ACCESS_TOKEN')
class Config(object):
DEBUG = False
CSRF_ENABLED = True
GITHUB_CLIENT_ID = 'replace-me'
GITHUB_SECRET = 'replace-me'
HEROKU = False
SECRET_KEY = 'not-a-good-value'
# Details of the repo where all articles are stored. The GITHUB_CLIENT_ID
# and GITHUB_SECRET should allow full-access to this database.
REPO_OWNER = None
REPO_NAME = None
REPO_OWNER_ACCESS_TOKEN = None
# This should automatically be set by heroku if you've added a database to
# your app.
SQLALCHEMY_DATABASE_URI = os.environ['DATABASE_URL']
class DevelopmentConfig(Config):
DEBUG = True
|
700912cc3db69cfd99e33b715dcba7b6717aa225
|
apps/bluebottle_utils/models.py
|
apps/bluebottle_utils/models.py
|
from django.db import models
from django_countries import CountryField
class Address(models.Model):
"""
A postal address.
"""
address_line1 = models.CharField(max_length=100, blank=True)
address_line2 = models.CharField(max_length=100, blank=True)
city = models.CharField(max_length=100, blank=True)
state = models.CharField(max_length=100, blank=True)
country = CountryField(blank=True)
zip_code = models.CharField(max_length=20, blank=True)
def __unicode__(self):
return self.address_line1[:80]
class Meta:
abstract = True
|
from django.db import models
from django_countries import CountryField
class Address(models.Model):
"""
A postal address.
"""
line1 = models.CharField(max_length=100, blank=True)
line2 = models.CharField(max_length=100, blank=True)
city = models.CharField(max_length=100, blank=True)
state = models.CharField(max_length=100, blank=True)
country = CountryField(blank=True)
zip_code = models.CharField(max_length=20, blank=True)
def __unicode__(self):
return self.address_line1[:80]
class Meta:
abstract = True
|
Remove the address_ prefix from line1 and line2 in the Address model.
|
Remove the address_ prefix from line1 and line2 in the Address model.
The address_ prefix is redundant now that we have an Address model.
|
Python
|
bsd-3-clause
|
onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site
|
from django.db import models
from django_countries import CountryField
class Address(models.Model):
"""
A postal address.
"""
address_line1 = models.CharField(max_length=100, blank=True)
address_line2 = models.CharField(max_length=100, blank=True)
city = models.CharField(max_length=100, blank=True)
state = models.CharField(max_length=100, blank=True)
country = CountryField(blank=True)
zip_code = models.CharField(max_length=20, blank=True)
def __unicode__(self):
return self.address_line1[:80]
class Meta:
abstract = True
Remove the address_ prefix from line1 and line2 in the Address model.
The address_ prefix is redundant now that we have an Address model.
|
from django.db import models
from django_countries import CountryField
class Address(models.Model):
"""
A postal address.
"""
line1 = models.CharField(max_length=100, blank=True)
line2 = models.CharField(max_length=100, blank=True)
city = models.CharField(max_length=100, blank=True)
state = models.CharField(max_length=100, blank=True)
country = CountryField(blank=True)
zip_code = models.CharField(max_length=20, blank=True)
def __unicode__(self):
return self.address_line1[:80]
class Meta:
abstract = True
|
<commit_before>from django.db import models
from django_countries import CountryField
class Address(models.Model):
"""
A postal address.
"""
address_line1 = models.CharField(max_length=100, blank=True)
address_line2 = models.CharField(max_length=100, blank=True)
city = models.CharField(max_length=100, blank=True)
state = models.CharField(max_length=100, blank=True)
country = CountryField(blank=True)
zip_code = models.CharField(max_length=20, blank=True)
def __unicode__(self):
return self.address_line1[:80]
class Meta:
abstract = True
<commit_msg>Remove the address_ prefix from line1 and line2 in the Address model.
The address_ prefix is redundant now that we have an Address model.<commit_after>
|
from django.db import models
from django_countries import CountryField
class Address(models.Model):
"""
A postal address.
"""
line1 = models.CharField(max_length=100, blank=True)
line2 = models.CharField(max_length=100, blank=True)
city = models.CharField(max_length=100, blank=True)
state = models.CharField(max_length=100, blank=True)
country = CountryField(blank=True)
zip_code = models.CharField(max_length=20, blank=True)
def __unicode__(self):
return self.address_line1[:80]
class Meta:
abstract = True
|
from django.db import models
from django_countries import CountryField
class Address(models.Model):
"""
A postal address.
"""
address_line1 = models.CharField(max_length=100, blank=True)
address_line2 = models.CharField(max_length=100, blank=True)
city = models.CharField(max_length=100, blank=True)
state = models.CharField(max_length=100, blank=True)
country = CountryField(blank=True)
zip_code = models.CharField(max_length=20, blank=True)
def __unicode__(self):
return self.address_line1[:80]
class Meta:
abstract = True
Remove the address_ prefix from line1 and line2 in the Address model.
The address_ prefix is redundant now that we have an Address model.from django.db import models
from django_countries import CountryField
class Address(models.Model):
"""
A postal address.
"""
line1 = models.CharField(max_length=100, blank=True)
line2 = models.CharField(max_length=100, blank=True)
city = models.CharField(max_length=100, blank=True)
state = models.CharField(max_length=100, blank=True)
country = CountryField(blank=True)
zip_code = models.CharField(max_length=20, blank=True)
def __unicode__(self):
return self.address_line1[:80]
class Meta:
abstract = True
|
<commit_before>from django.db import models
from django_countries import CountryField
class Address(models.Model):
"""
A postal address.
"""
address_line1 = models.CharField(max_length=100, blank=True)
address_line2 = models.CharField(max_length=100, blank=True)
city = models.CharField(max_length=100, blank=True)
state = models.CharField(max_length=100, blank=True)
country = CountryField(blank=True)
zip_code = models.CharField(max_length=20, blank=True)
def __unicode__(self):
return self.address_line1[:80]
class Meta:
abstract = True
<commit_msg>Remove the address_ prefix from line1 and line2 in the Address model.
The address_ prefix is redundant now that we have an Address model.<commit_after>from django.db import models
from django_countries import CountryField
class Address(models.Model):
"""
A postal address.
"""
line1 = models.CharField(max_length=100, blank=True)
line2 = models.CharField(max_length=100, blank=True)
city = models.CharField(max_length=100, blank=True)
state = models.CharField(max_length=100, blank=True)
country = CountryField(blank=True)
zip_code = models.CharField(max_length=20, blank=True)
def __unicode__(self):
return self.address_line1[:80]
class Meta:
abstract = True
|
efaa172668b8961734fa8a10650dc3191b4a7348
|
website/project/metadata/authorizers/__init__.py
|
website/project/metadata/authorizers/__init__.py
|
import json
import os
import logging
logger = logging.getLogger(__name__)
HERE = os.path.dirname(os.path.realpath(__file__))
groups = json.load(open('{0}/defaults.json'.format(HERE)))
try:
fp = open('{0}/local.json'.format(HERE))
except IOError:
logger.info('No local.json found to populate lists of DraftRegistrationApproval authorizers.')
for group, members in json.load(fp).iteritems():
if group not in groups:
groups[group] = members
else:
groups[group] = set(groups[group]) | set(members)
def members_for(group):
global_members = set(groups['global'])
return global_members | set(groups.get(group, []))
|
import json
import os
import logging
logger = logging.getLogger(__name__)
HERE = os.path.dirname(os.path.realpath(__file__))
groups = json.load(open('{0}/defaults.json'.format(HERE)))
fp = None
try:
fp = open('{0}/local.json'.format(HERE))
except IOError:
logger.info('No local.json found to populate lists of DraftRegistrationApproval authorizers.')
if fp:
for group, members in json.load(fp).iteritems():
if group not in groups:
groups[group] = members
else:
groups[group] = set(groups[group]) | set(members)
def members_for(group):
global_members = set(groups['global'])
return global_members | set(groups.get(group, []))
|
Allow local.json to be missing
|
Allow local.json to be missing
|
Python
|
apache-2.0
|
kch8qx/osf.io,acshi/osf.io,binoculars/osf.io,abought/osf.io,mluo613/osf.io,cslzchen/osf.io,chrisseto/osf.io,ticklemepierce/osf.io,cslzchen/osf.io,brianjgeiger/osf.io,kwierman/osf.io,brandonPurvis/osf.io,icereval/osf.io,TomBaxter/osf.io,doublebits/osf.io,mluke93/osf.io,wearpants/osf.io,alexschiller/osf.io,billyhunt/osf.io,danielneis/osf.io,rdhyee/osf.io,leb2dg/osf.io,DanielSBrown/osf.io,Ghalko/osf.io,mattclark/osf.io,rdhyee/osf.io,kwierman/osf.io,cwisecarver/osf.io,mluke93/osf.io,Johnetordoff/osf.io,GageGaskins/osf.io,abought/osf.io,RomanZWang/osf.io,acshi/osf.io,alexschiller/osf.io,cslzchen/osf.io,GageGaskins/osf.io,SSJohns/osf.io,KAsante95/osf.io,hmoco/osf.io,saradbowman/osf.io,adlius/osf.io,zamattiac/osf.io,binoculars/osf.io,monikagrabowska/osf.io,CenterForOpenScience/osf.io,DanielSBrown/osf.io,emetsger/osf.io,SSJohns/osf.io,brianjgeiger/osf.io,brandonPurvis/osf.io,zachjanicki/osf.io,samanehsan/osf.io,KAsante95/osf.io,emetsger/osf.io,TomBaxter/osf.io,monikagrabowska/osf.io,danielneis/osf.io,samanehsan/osf.io,mfraezz/osf.io,chennan47/osf.io,kch8qx/osf.io,GageGaskins/osf.io,kwierman/osf.io,asanfilippo7/osf.io,caneruguz/osf.io,acshi/osf.io,Nesiehr/osf.io,caseyrollins/osf.io,GageGaskins/osf.io,doublebits/osf.io,sloria/osf.io,adlius/osf.io,mluke93/osf.io,samanehsan/osf.io,samchrisinger/osf.io,jnayak1/osf.io,jnayak1/osf.io,samchrisinger/osf.io,TomHeatwole/osf.io,binoculars/osf.io,samchrisinger/osf.io,billyhunt/osf.io,crcresearch/osf.io,Johnetordoff/osf.io,doublebits/osf.io,RomanZWang/osf.io,KAsante95/osf.io,icereval/osf.io,amyshi188/osf.io,aaxelb/osf.io,leb2dg/osf.io,zamattiac/osf.io,monikagrabowska/osf.io,zamattiac/osf.io,chrisseto/osf.io,alexschiller/osf.io,TomHeatwole/osf.io,brandonPurvis/osf.io,zachjanicki/osf.io,alexschiller/osf.io,mluo613/osf.io,TomBaxter/osf.io,hmoco/osf.io,chrisseto/osf.io,billyhunt/osf.io,cwisecarver/osf.io,mluo613/osf.io,emetsger/osf.io,Nesiehr/osf.io,Ghalko/osf.io,HalcyonChimera/osf.io,adlius/osf.io,KAsante95/osf.io,felliott/osf.io,aaxelb/osf.io,caneruguz/osf.io,ticklemepierce/osf.io,leb2dg/osf.io,felliott/osf.io,caseyrollins/osf.io,danielneis/osf.io,cslzchen/osf.io,chennan47/osf.io,Nesiehr/osf.io,asanfilippo7/osf.io,crcresearch/osf.io,rdhyee/osf.io,baylee-d/osf.io,alexschiller/osf.io,samanehsan/osf.io,jnayak1/osf.io,danielneis/osf.io,amyshi188/osf.io,DanielSBrown/osf.io,HalcyonChimera/osf.io,wearpants/osf.io,mluo613/osf.io,baylee-d/osf.io,crcresearch/osf.io,mluo613/osf.io,aaxelb/osf.io,laurenrevere/osf.io,erinspace/osf.io,kch8qx/osf.io,chrisseto/osf.io,chennan47/osf.io,doublebits/osf.io,monikagrabowska/osf.io,pattisdr/osf.io,laurenrevere/osf.io,caseyrollins/osf.io,icereval/osf.io,abought/osf.io,emetsger/osf.io,TomHeatwole/osf.io,brianjgeiger/osf.io,RomanZWang/osf.io,brianjgeiger/osf.io,erinspace/osf.io,caneruguz/osf.io,amyshi188/osf.io,RomanZWang/osf.io,asanfilippo7/osf.io,acshi/osf.io,hmoco/osf.io,acshi/osf.io,CenterForOpenScience/osf.io,wearpants/osf.io,zamattiac/osf.io,billyhunt/osf.io,kwierman/osf.io,amyshi188/osf.io,rdhyee/osf.io,Johnetordoff/osf.io,erinspace/osf.io,zachjanicki/osf.io,kch8qx/osf.io,doublebits/osf.io,mfraezz/osf.io,GageGaskins/osf.io,brandonPurvis/osf.io,CenterForOpenScience/osf.io,Johnetordoff/osf.io,laurenrevere/osf.io,samchrisinger/osf.io,mfraezz/osf.io,hmoco/osf.io,RomanZWang/osf.io,wearpants/osf.io,brandonPurvis/osf.io,sloria/osf.io,CenterForOpenScience/osf.io,KAsante95/osf.io,mattclark/osf.io,leb2dg/osf.io,DanielSBrown/osf.io,pattisdr/osf.io,jnayak1/osf.io,mluke93/osf.io,asanfilippo7/osf.io,adlius/osf.io,cwisecarver/osf.io,mfraezz/osf.io,Nesiehr/osf.io,caneruguz/osf.io,zachjanicki/osf.io,SSJohns/osf.io,felliott/osf.io,TomHeatwole/osf.io,mattclark/osf.io,Ghalko/osf.io,cwisecarver/osf.io,ticklemepierce/osf.io,abought/osf.io,ticklemepierce/osf.io,sloria/osf.io,billyhunt/osf.io,felliott/osf.io,SSJohns/osf.io,Ghalko/osf.io,HalcyonChimera/osf.io,pattisdr/osf.io,baylee-d/osf.io,aaxelb/osf.io,saradbowman/osf.io,monikagrabowska/osf.io,kch8qx/osf.io,HalcyonChimera/osf.io
|
import json
import os
import logging
logger = logging.getLogger(__name__)
HERE = os.path.dirname(os.path.realpath(__file__))
groups = json.load(open('{0}/defaults.json'.format(HERE)))
try:
fp = open('{0}/local.json'.format(HERE))
except IOError:
logger.info('No local.json found to populate lists of DraftRegistrationApproval authorizers.')
for group, members in json.load(fp).iteritems():
if group not in groups:
groups[group] = members
else:
groups[group] = set(groups[group]) | set(members)
def members_for(group):
global_members = set(groups['global'])
return global_members | set(groups.get(group, []))
Allow local.json to be missing
|
import json
import os
import logging
logger = logging.getLogger(__name__)
HERE = os.path.dirname(os.path.realpath(__file__))
groups = json.load(open('{0}/defaults.json'.format(HERE)))
fp = None
try:
fp = open('{0}/local.json'.format(HERE))
except IOError:
logger.info('No local.json found to populate lists of DraftRegistrationApproval authorizers.')
if fp:
for group, members in json.load(fp).iteritems():
if group not in groups:
groups[group] = members
else:
groups[group] = set(groups[group]) | set(members)
def members_for(group):
global_members = set(groups['global'])
return global_members | set(groups.get(group, []))
|
<commit_before>import json
import os
import logging
logger = logging.getLogger(__name__)
HERE = os.path.dirname(os.path.realpath(__file__))
groups = json.load(open('{0}/defaults.json'.format(HERE)))
try:
fp = open('{0}/local.json'.format(HERE))
except IOError:
logger.info('No local.json found to populate lists of DraftRegistrationApproval authorizers.')
for group, members in json.load(fp).iteritems():
if group not in groups:
groups[group] = members
else:
groups[group] = set(groups[group]) | set(members)
def members_for(group):
global_members = set(groups['global'])
return global_members | set(groups.get(group, []))
<commit_msg>Allow local.json to be missing<commit_after>
|
import json
import os
import logging
logger = logging.getLogger(__name__)
HERE = os.path.dirname(os.path.realpath(__file__))
groups = json.load(open('{0}/defaults.json'.format(HERE)))
fp = None
try:
fp = open('{0}/local.json'.format(HERE))
except IOError:
logger.info('No local.json found to populate lists of DraftRegistrationApproval authorizers.')
if fp:
for group, members in json.load(fp).iteritems():
if group not in groups:
groups[group] = members
else:
groups[group] = set(groups[group]) | set(members)
def members_for(group):
global_members = set(groups['global'])
return global_members | set(groups.get(group, []))
|
import json
import os
import logging
logger = logging.getLogger(__name__)
HERE = os.path.dirname(os.path.realpath(__file__))
groups = json.load(open('{0}/defaults.json'.format(HERE)))
try:
fp = open('{0}/local.json'.format(HERE))
except IOError:
logger.info('No local.json found to populate lists of DraftRegistrationApproval authorizers.')
for group, members in json.load(fp).iteritems():
if group not in groups:
groups[group] = members
else:
groups[group] = set(groups[group]) | set(members)
def members_for(group):
global_members = set(groups['global'])
return global_members | set(groups.get(group, []))
Allow local.json to be missingimport json
import os
import logging
logger = logging.getLogger(__name__)
HERE = os.path.dirname(os.path.realpath(__file__))
groups = json.load(open('{0}/defaults.json'.format(HERE)))
fp = None
try:
fp = open('{0}/local.json'.format(HERE))
except IOError:
logger.info('No local.json found to populate lists of DraftRegistrationApproval authorizers.')
if fp:
for group, members in json.load(fp).iteritems():
if group not in groups:
groups[group] = members
else:
groups[group] = set(groups[group]) | set(members)
def members_for(group):
global_members = set(groups['global'])
return global_members | set(groups.get(group, []))
|
<commit_before>import json
import os
import logging
logger = logging.getLogger(__name__)
HERE = os.path.dirname(os.path.realpath(__file__))
groups = json.load(open('{0}/defaults.json'.format(HERE)))
try:
fp = open('{0}/local.json'.format(HERE))
except IOError:
logger.info('No local.json found to populate lists of DraftRegistrationApproval authorizers.')
for group, members in json.load(fp).iteritems():
if group not in groups:
groups[group] = members
else:
groups[group] = set(groups[group]) | set(members)
def members_for(group):
global_members = set(groups['global'])
return global_members | set(groups.get(group, []))
<commit_msg>Allow local.json to be missing<commit_after>import json
import os
import logging
logger = logging.getLogger(__name__)
HERE = os.path.dirname(os.path.realpath(__file__))
groups = json.load(open('{0}/defaults.json'.format(HERE)))
fp = None
try:
fp = open('{0}/local.json'.format(HERE))
except IOError:
logger.info('No local.json found to populate lists of DraftRegistrationApproval authorizers.')
if fp:
for group, members in json.load(fp).iteritems():
if group not in groups:
groups[group] = members
else:
groups[group] = set(groups[group]) | set(members)
def members_for(group):
global_members = set(groups['global'])
return global_members | set(groups.get(group, []))
|
07823ae7f7368f4bc4a4e4436129319f7215150b
|
faker/utils/distribution.py
|
faker/utils/distribution.py
|
# coding=utf-8
import bisect
from faker.generator import random
def random_sample():
return random.uniform(0.0, 1.0)
def cumsum(it):
total = 0
for x in it:
total += x
yield total
def choice_distribution(a, p):
assert len(a) == len(p)
cdf = list(cumsum(p))
normal = cdf[-1]
cdf2 = [float(i) / float(normal) for i in cdf]
uniform_sample = random_sample()
idx = bisect.bisect_right(cdf2, uniform_sample)
return a[idx]
|
# coding=utf-8
import bisect
from sys import version_info
from faker.generator import random
def random_sample():
return random.uniform(0.0, 1.0)
def cumsum(it):
total = 0
for x in it:
total += x
yield total
def choice_distribution(a, p):
assert len(a) == len(p)
if version_info.major >= 3 and version_info.minor >= 6:
from random import choices
return choices(a, weights=p)[0]
else:
cdf = list(cumsum(p))
normal = cdf[-1]
cdf2 = [float(i) / float(normal) for i in cdf]
uniform_sample = random_sample()
idx = bisect.bisect_right(cdf2, uniform_sample)
return a[idx]
|
Use random.choices when available for better performance
|
Use random.choices when available for better performance
|
Python
|
mit
|
joke2k/faker,joke2k/faker,danhuss/faker
|
# coding=utf-8
import bisect
from faker.generator import random
def random_sample():
return random.uniform(0.0, 1.0)
def cumsum(it):
total = 0
for x in it:
total += x
yield total
def choice_distribution(a, p):
assert len(a) == len(p)
cdf = list(cumsum(p))
normal = cdf[-1]
cdf2 = [float(i) / float(normal) for i in cdf]
uniform_sample = random_sample()
idx = bisect.bisect_right(cdf2, uniform_sample)
return a[idx]
Use random.choices when available for better performance
|
# coding=utf-8
import bisect
from sys import version_info
from faker.generator import random
def random_sample():
return random.uniform(0.0, 1.0)
def cumsum(it):
total = 0
for x in it:
total += x
yield total
def choice_distribution(a, p):
assert len(a) == len(p)
if version_info.major >= 3 and version_info.minor >= 6:
from random import choices
return choices(a, weights=p)[0]
else:
cdf = list(cumsum(p))
normal = cdf[-1]
cdf2 = [float(i) / float(normal) for i in cdf]
uniform_sample = random_sample()
idx = bisect.bisect_right(cdf2, uniform_sample)
return a[idx]
|
<commit_before># coding=utf-8
import bisect
from faker.generator import random
def random_sample():
return random.uniform(0.0, 1.0)
def cumsum(it):
total = 0
for x in it:
total += x
yield total
def choice_distribution(a, p):
assert len(a) == len(p)
cdf = list(cumsum(p))
normal = cdf[-1]
cdf2 = [float(i) / float(normal) for i in cdf]
uniform_sample = random_sample()
idx = bisect.bisect_right(cdf2, uniform_sample)
return a[idx]
<commit_msg>Use random.choices when available for better performance<commit_after>
|
# coding=utf-8
import bisect
from sys import version_info
from faker.generator import random
def random_sample():
return random.uniform(0.0, 1.0)
def cumsum(it):
total = 0
for x in it:
total += x
yield total
def choice_distribution(a, p):
assert len(a) == len(p)
if version_info.major >= 3 and version_info.minor >= 6:
from random import choices
return choices(a, weights=p)[0]
else:
cdf = list(cumsum(p))
normal = cdf[-1]
cdf2 = [float(i) / float(normal) for i in cdf]
uniform_sample = random_sample()
idx = bisect.bisect_right(cdf2, uniform_sample)
return a[idx]
|
# coding=utf-8
import bisect
from faker.generator import random
def random_sample():
return random.uniform(0.0, 1.0)
def cumsum(it):
total = 0
for x in it:
total += x
yield total
def choice_distribution(a, p):
assert len(a) == len(p)
cdf = list(cumsum(p))
normal = cdf[-1]
cdf2 = [float(i) / float(normal) for i in cdf]
uniform_sample = random_sample()
idx = bisect.bisect_right(cdf2, uniform_sample)
return a[idx]
Use random.choices when available for better performance# coding=utf-8
import bisect
from sys import version_info
from faker.generator import random
def random_sample():
return random.uniform(0.0, 1.0)
def cumsum(it):
total = 0
for x in it:
total += x
yield total
def choice_distribution(a, p):
assert len(a) == len(p)
if version_info.major >= 3 and version_info.minor >= 6:
from random import choices
return choices(a, weights=p)[0]
else:
cdf = list(cumsum(p))
normal = cdf[-1]
cdf2 = [float(i) / float(normal) for i in cdf]
uniform_sample = random_sample()
idx = bisect.bisect_right(cdf2, uniform_sample)
return a[idx]
|
<commit_before># coding=utf-8
import bisect
from faker.generator import random
def random_sample():
return random.uniform(0.0, 1.0)
def cumsum(it):
total = 0
for x in it:
total += x
yield total
def choice_distribution(a, p):
assert len(a) == len(p)
cdf = list(cumsum(p))
normal = cdf[-1]
cdf2 = [float(i) / float(normal) for i in cdf]
uniform_sample = random_sample()
idx = bisect.bisect_right(cdf2, uniform_sample)
return a[idx]
<commit_msg>Use random.choices when available for better performance<commit_after># coding=utf-8
import bisect
from sys import version_info
from faker.generator import random
def random_sample():
return random.uniform(0.0, 1.0)
def cumsum(it):
total = 0
for x in it:
total += x
yield total
def choice_distribution(a, p):
assert len(a) == len(p)
if version_info.major >= 3 and version_info.minor >= 6:
from random import choices
return choices(a, weights=p)[0]
else:
cdf = list(cumsum(p))
normal = cdf[-1]
cdf2 = [float(i) / float(normal) for i in cdf]
uniform_sample = random_sample()
idx = bisect.bisect_right(cdf2, uniform_sample)
return a[idx]
|
4e0db2766a719a347cbdf5b3e2fadd5a807d4a83
|
tests/ipy_test_runner.py
|
tests/ipy_test_runner.py
|
from __future__ import print_function
import os
import pytest
HERE = os.path.dirname(__file__)
if __name__ == '__main__':
# Fake Rhino modules
pytest.load_fake_module('Rhino')
pytest.load_fake_module('Rhino.Geometry', fake_types=['RTree', 'Sphere', 'Point3d'])
pytest.run(HERE, ['tests/compas/files/test_base_reader.py'])
|
from __future__ import print_function
import os
import pytest
HERE = os.path.dirname(__file__)
if __name__ == '__main__':
# Fake Rhino modules
pytest.load_fake_module('Rhino')
pytest.load_fake_module('Rhino.Geometry', fake_types=['RTree', 'Sphere', 'Point3d'])
pytest.run(HERE)
|
Remove deleted test from ignore list
|
Remove deleted test from ignore list
|
Python
|
mit
|
compas-dev/compas
|
from __future__ import print_function
import os
import pytest
HERE = os.path.dirname(__file__)
if __name__ == '__main__':
# Fake Rhino modules
pytest.load_fake_module('Rhino')
pytest.load_fake_module('Rhino.Geometry', fake_types=['RTree', 'Sphere', 'Point3d'])
pytest.run(HERE, ['tests/compas/files/test_base_reader.py'])
Remove deleted test from ignore list
|
from __future__ import print_function
import os
import pytest
HERE = os.path.dirname(__file__)
if __name__ == '__main__':
# Fake Rhino modules
pytest.load_fake_module('Rhino')
pytest.load_fake_module('Rhino.Geometry', fake_types=['RTree', 'Sphere', 'Point3d'])
pytest.run(HERE)
|
<commit_before>from __future__ import print_function
import os
import pytest
HERE = os.path.dirname(__file__)
if __name__ == '__main__':
# Fake Rhino modules
pytest.load_fake_module('Rhino')
pytest.load_fake_module('Rhino.Geometry', fake_types=['RTree', 'Sphere', 'Point3d'])
pytest.run(HERE, ['tests/compas/files/test_base_reader.py'])
<commit_msg>Remove deleted test from ignore list<commit_after>
|
from __future__ import print_function
import os
import pytest
HERE = os.path.dirname(__file__)
if __name__ == '__main__':
# Fake Rhino modules
pytest.load_fake_module('Rhino')
pytest.load_fake_module('Rhino.Geometry', fake_types=['RTree', 'Sphere', 'Point3d'])
pytest.run(HERE)
|
from __future__ import print_function
import os
import pytest
HERE = os.path.dirname(__file__)
if __name__ == '__main__':
# Fake Rhino modules
pytest.load_fake_module('Rhino')
pytest.load_fake_module('Rhino.Geometry', fake_types=['RTree', 'Sphere', 'Point3d'])
pytest.run(HERE, ['tests/compas/files/test_base_reader.py'])
Remove deleted test from ignore listfrom __future__ import print_function
import os
import pytest
HERE = os.path.dirname(__file__)
if __name__ == '__main__':
# Fake Rhino modules
pytest.load_fake_module('Rhino')
pytest.load_fake_module('Rhino.Geometry', fake_types=['RTree', 'Sphere', 'Point3d'])
pytest.run(HERE)
|
<commit_before>from __future__ import print_function
import os
import pytest
HERE = os.path.dirname(__file__)
if __name__ == '__main__':
# Fake Rhino modules
pytest.load_fake_module('Rhino')
pytest.load_fake_module('Rhino.Geometry', fake_types=['RTree', 'Sphere', 'Point3d'])
pytest.run(HERE, ['tests/compas/files/test_base_reader.py'])
<commit_msg>Remove deleted test from ignore list<commit_after>from __future__ import print_function
import os
import pytest
HERE = os.path.dirname(__file__)
if __name__ == '__main__':
# Fake Rhino modules
pytest.load_fake_module('Rhino')
pytest.load_fake_module('Rhino.Geometry', fake_types=['RTree', 'Sphere', 'Point3d'])
pytest.run(HERE)
|
6290b81234f92073262a3fa784ae4e94f16192a8
|
tests/test_autoconfig.py
|
tests/test_autoconfig.py
|
# coding: utf-8
import os
import pytest
from mock import patch
from decouple import AutoConfig
def test_autoconfig_env():
config = AutoConfig()
path = os.path.join(os.path.dirname(__file__), 'autoconfig', 'env', 'project')
with patch.object(config, '_caller_path', return_value=path):
assert 'ENV' == config('KEY')
def test_autoconfig_ini():
config = AutoConfig()
path = os.path.join(os.path.dirname(__file__), 'autoconfig', 'ini', 'project')
with patch.object(config, '_caller_path', return_value=path):
assert 'INI' == config('KEY')
def test_autoconfig_none():
os.environ['KeyFallback'] = 'On'
config = AutoConfig()
path = os.path.join(os.path.dirname(__file__), 'autoconfig', 'none')
with patch('os.path.exists', return_value=False):
assert True == config('KeyFallback', cast=bool)
del os.environ['KeyFallback']
|
# coding: utf-8
import os
import pytest
from mock import patch
from decouple import AutoConfig
def test_autoconfig_env():
config = AutoConfig()
path = os.path.join(os.path.dirname(__file__), 'autoconfig', 'env', 'project')
with patch.object(config, '_caller_path', return_value=path):
assert 'ENV' == config('KEY')
def test_autoconfig_ini():
config = AutoConfig()
path = os.path.join(os.path.dirname(__file__), 'autoconfig', 'ini', 'project')
with patch.object(config, '_caller_path', return_value=path):
assert 'INI' == config('KEY')
def test_autoconfig_none():
os.environ['KeyFallback'] = 'On'
config = AutoConfig()
path = os.path.join(os.path.dirname(__file__), 'autoconfig', 'none')
with patch('os.path.exists', return_value=False):
assert True == config('KeyFallback', cast=bool)
del os.environ['KeyFallback']
def test_autoconfig_exception():
os.environ['KeyFallback'] = 'On'
config = AutoConfig()
with patch('os.path.exists', side_effect=Exception('PermissionDenied')):
assert True == config('KeyFallback', cast=bool)
del os.environ['KeyFallback']
|
Test we have access to envvar when we have no file
|
Test we have access to envvar when we have no file
|
Python
|
mit
|
henriquebastos/python-decouple,flaviohenriqu/python-decouple,mrkschan/python-decouple,henriquebastos/django-decouple,liukaijv/python-decouple
|
# coding: utf-8
import os
import pytest
from mock import patch
from decouple import AutoConfig
def test_autoconfig_env():
config = AutoConfig()
path = os.path.join(os.path.dirname(__file__), 'autoconfig', 'env', 'project')
with patch.object(config, '_caller_path', return_value=path):
assert 'ENV' == config('KEY')
def test_autoconfig_ini():
config = AutoConfig()
path = os.path.join(os.path.dirname(__file__), 'autoconfig', 'ini', 'project')
with patch.object(config, '_caller_path', return_value=path):
assert 'INI' == config('KEY')
def test_autoconfig_none():
os.environ['KeyFallback'] = 'On'
config = AutoConfig()
path = os.path.join(os.path.dirname(__file__), 'autoconfig', 'none')
with patch('os.path.exists', return_value=False):
assert True == config('KeyFallback', cast=bool)
del os.environ['KeyFallback']
Test we have access to envvar when we have no file
|
# coding: utf-8
import os
import pytest
from mock import patch
from decouple import AutoConfig
def test_autoconfig_env():
config = AutoConfig()
path = os.path.join(os.path.dirname(__file__), 'autoconfig', 'env', 'project')
with patch.object(config, '_caller_path', return_value=path):
assert 'ENV' == config('KEY')
def test_autoconfig_ini():
config = AutoConfig()
path = os.path.join(os.path.dirname(__file__), 'autoconfig', 'ini', 'project')
with patch.object(config, '_caller_path', return_value=path):
assert 'INI' == config('KEY')
def test_autoconfig_none():
os.environ['KeyFallback'] = 'On'
config = AutoConfig()
path = os.path.join(os.path.dirname(__file__), 'autoconfig', 'none')
with patch('os.path.exists', return_value=False):
assert True == config('KeyFallback', cast=bool)
del os.environ['KeyFallback']
def test_autoconfig_exception():
os.environ['KeyFallback'] = 'On'
config = AutoConfig()
with patch('os.path.exists', side_effect=Exception('PermissionDenied')):
assert True == config('KeyFallback', cast=bool)
del os.environ['KeyFallback']
|
<commit_before># coding: utf-8
import os
import pytest
from mock import patch
from decouple import AutoConfig
def test_autoconfig_env():
config = AutoConfig()
path = os.path.join(os.path.dirname(__file__), 'autoconfig', 'env', 'project')
with patch.object(config, '_caller_path', return_value=path):
assert 'ENV' == config('KEY')
def test_autoconfig_ini():
config = AutoConfig()
path = os.path.join(os.path.dirname(__file__), 'autoconfig', 'ini', 'project')
with patch.object(config, '_caller_path', return_value=path):
assert 'INI' == config('KEY')
def test_autoconfig_none():
os.environ['KeyFallback'] = 'On'
config = AutoConfig()
path = os.path.join(os.path.dirname(__file__), 'autoconfig', 'none')
with patch('os.path.exists', return_value=False):
assert True == config('KeyFallback', cast=bool)
del os.environ['KeyFallback']
<commit_msg>Test we have access to envvar when we have no file<commit_after>
|
# coding: utf-8
import os
import pytest
from mock import patch
from decouple import AutoConfig
def test_autoconfig_env():
config = AutoConfig()
path = os.path.join(os.path.dirname(__file__), 'autoconfig', 'env', 'project')
with patch.object(config, '_caller_path', return_value=path):
assert 'ENV' == config('KEY')
def test_autoconfig_ini():
config = AutoConfig()
path = os.path.join(os.path.dirname(__file__), 'autoconfig', 'ini', 'project')
with patch.object(config, '_caller_path', return_value=path):
assert 'INI' == config('KEY')
def test_autoconfig_none():
os.environ['KeyFallback'] = 'On'
config = AutoConfig()
path = os.path.join(os.path.dirname(__file__), 'autoconfig', 'none')
with patch('os.path.exists', return_value=False):
assert True == config('KeyFallback', cast=bool)
del os.environ['KeyFallback']
def test_autoconfig_exception():
os.environ['KeyFallback'] = 'On'
config = AutoConfig()
with patch('os.path.exists', side_effect=Exception('PermissionDenied')):
assert True == config('KeyFallback', cast=bool)
del os.environ['KeyFallback']
|
# coding: utf-8
import os
import pytest
from mock import patch
from decouple import AutoConfig
def test_autoconfig_env():
config = AutoConfig()
path = os.path.join(os.path.dirname(__file__), 'autoconfig', 'env', 'project')
with patch.object(config, '_caller_path', return_value=path):
assert 'ENV' == config('KEY')
def test_autoconfig_ini():
config = AutoConfig()
path = os.path.join(os.path.dirname(__file__), 'autoconfig', 'ini', 'project')
with patch.object(config, '_caller_path', return_value=path):
assert 'INI' == config('KEY')
def test_autoconfig_none():
os.environ['KeyFallback'] = 'On'
config = AutoConfig()
path = os.path.join(os.path.dirname(__file__), 'autoconfig', 'none')
with patch('os.path.exists', return_value=False):
assert True == config('KeyFallback', cast=bool)
del os.environ['KeyFallback']
Test we have access to envvar when we have no file# coding: utf-8
import os
import pytest
from mock import patch
from decouple import AutoConfig
def test_autoconfig_env():
config = AutoConfig()
path = os.path.join(os.path.dirname(__file__), 'autoconfig', 'env', 'project')
with patch.object(config, '_caller_path', return_value=path):
assert 'ENV' == config('KEY')
def test_autoconfig_ini():
config = AutoConfig()
path = os.path.join(os.path.dirname(__file__), 'autoconfig', 'ini', 'project')
with patch.object(config, '_caller_path', return_value=path):
assert 'INI' == config('KEY')
def test_autoconfig_none():
os.environ['KeyFallback'] = 'On'
config = AutoConfig()
path = os.path.join(os.path.dirname(__file__), 'autoconfig', 'none')
with patch('os.path.exists', return_value=False):
assert True == config('KeyFallback', cast=bool)
del os.environ['KeyFallback']
def test_autoconfig_exception():
os.environ['KeyFallback'] = 'On'
config = AutoConfig()
with patch('os.path.exists', side_effect=Exception('PermissionDenied')):
assert True == config('KeyFallback', cast=bool)
del os.environ['KeyFallback']
|
<commit_before># coding: utf-8
import os
import pytest
from mock import patch
from decouple import AutoConfig
def test_autoconfig_env():
config = AutoConfig()
path = os.path.join(os.path.dirname(__file__), 'autoconfig', 'env', 'project')
with patch.object(config, '_caller_path', return_value=path):
assert 'ENV' == config('KEY')
def test_autoconfig_ini():
config = AutoConfig()
path = os.path.join(os.path.dirname(__file__), 'autoconfig', 'ini', 'project')
with patch.object(config, '_caller_path', return_value=path):
assert 'INI' == config('KEY')
def test_autoconfig_none():
os.environ['KeyFallback'] = 'On'
config = AutoConfig()
path = os.path.join(os.path.dirname(__file__), 'autoconfig', 'none')
with patch('os.path.exists', return_value=False):
assert True == config('KeyFallback', cast=bool)
del os.environ['KeyFallback']
<commit_msg>Test we have access to envvar when we have no file<commit_after># coding: utf-8
import os
import pytest
from mock import patch
from decouple import AutoConfig
def test_autoconfig_env():
config = AutoConfig()
path = os.path.join(os.path.dirname(__file__), 'autoconfig', 'env', 'project')
with patch.object(config, '_caller_path', return_value=path):
assert 'ENV' == config('KEY')
def test_autoconfig_ini():
config = AutoConfig()
path = os.path.join(os.path.dirname(__file__), 'autoconfig', 'ini', 'project')
with patch.object(config, '_caller_path', return_value=path):
assert 'INI' == config('KEY')
def test_autoconfig_none():
os.environ['KeyFallback'] = 'On'
config = AutoConfig()
path = os.path.join(os.path.dirname(__file__), 'autoconfig', 'none')
with patch('os.path.exists', return_value=False):
assert True == config('KeyFallback', cast=bool)
del os.environ['KeyFallback']
def test_autoconfig_exception():
os.environ['KeyFallback'] = 'On'
config = AutoConfig()
with patch('os.path.exists', side_effect=Exception('PermissionDenied')):
assert True == config('KeyFallback', cast=bool)
del os.environ['KeyFallback']
|
5251c84a8e409a279cf8dc205673d57406be0782
|
tests/test_serializer.py
|
tests/test_serializer.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
from pprint import pprint
from django.test import TestCase
from mock import Mock
from rest_framework_webdav.serializers import *
from rest_framework_webdav.resources import *
from .resources import MockResource
class TestPropfindSerializer(TestCase):
def setUp(self):
pass
def test_request(self):
pass
class TestMultiStatusSerializer(TestCase):
def setUp(self):
#FIXME use proper mock objects
class TestDirFSResource(MetaEtagMixIn, BaseFSDavResource):
root = os.path.dirname(os.path.realpath(__file__))
def __str__(self):
return "<Resource object for %s>" % self.get_abs_path()
self.resource = TestDirFSResource('/')
def test_1(self):
# TODO proper testing, currently this is used to check the output by eye
expect = 'asdasd'
ser1 = MultistatusSerializer(instance=self.resource, context={
'depth': 1,
})
rep1 = ser1.data
pprint(rep1)
print('-----------')
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
from pprint import pprint
from django.test import TestCase
from mock import Mock
from rest_framework_webdav.serializers import *
from rest_framework_webdav.resources import *
from .resources import MockResource
class TestPropfindSerializer(TestCase):
def setUp(self):
pass
def test_request(self):
pass
class TestResponseSerializers(TestCase):
def setUp(self):
#FIXME use proper mock objects
class TestDirFSResource(MetaEtagMixIn, BaseFSDavResource):
root = os.path.dirname(os.path.realpath(__file__))
def __str__(self):
return "<Resource object for %s>" % self.get_abs_path()
self.resource = TestDirFSResource('/')
ser1 = MultistatusSerializer(instance=self.resource, context={
'depth': 1,
})
self.rep1 = ser1.data
def test_multistatus(self):
self.assertIsNone(self.rep1['d:responsedescription'])
self.assertIsInstance(self.rep1['d:responses'], list)
# print actual data for humans to check
pprint(self.rep1)
print('-----------')
def test_responses(self):
self.assertEqual(self.rep1['d:responses'][0]['d:href'], '/')
self.assertIsInstance(self.rep1['d:responses'][0]['d:propstat'], dict)
def test_propstat(self):
self.assertEqual(self.rep1['d:responses'][0]['d:href'], '/')
self.assertIsInstance(self.rep1['d:responses'][0]['d:propstat'], dict)
|
Make tests a little better
|
Make tests a little better
|
Python
|
agpl-3.0
|
pellaeon/django-rest-framework-webdav
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
from pprint import pprint
from django.test import TestCase
from mock import Mock
from rest_framework_webdav.serializers import *
from rest_framework_webdav.resources import *
from .resources import MockResource
class TestPropfindSerializer(TestCase):
def setUp(self):
pass
def test_request(self):
pass
class TestMultiStatusSerializer(TestCase):
def setUp(self):
#FIXME use proper mock objects
class TestDirFSResource(MetaEtagMixIn, BaseFSDavResource):
root = os.path.dirname(os.path.realpath(__file__))
def __str__(self):
return "<Resource object for %s>" % self.get_abs_path()
self.resource = TestDirFSResource('/')
def test_1(self):
# TODO proper testing, currently this is used to check the output by eye
expect = 'asdasd'
ser1 = MultistatusSerializer(instance=self.resource, context={
'depth': 1,
})
rep1 = ser1.data
pprint(rep1)
print('-----------')
Make tests a little better
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
from pprint import pprint
from django.test import TestCase
from mock import Mock
from rest_framework_webdav.serializers import *
from rest_framework_webdav.resources import *
from .resources import MockResource
class TestPropfindSerializer(TestCase):
def setUp(self):
pass
def test_request(self):
pass
class TestResponseSerializers(TestCase):
def setUp(self):
#FIXME use proper mock objects
class TestDirFSResource(MetaEtagMixIn, BaseFSDavResource):
root = os.path.dirname(os.path.realpath(__file__))
def __str__(self):
return "<Resource object for %s>" % self.get_abs_path()
self.resource = TestDirFSResource('/')
ser1 = MultistatusSerializer(instance=self.resource, context={
'depth': 1,
})
self.rep1 = ser1.data
def test_multistatus(self):
self.assertIsNone(self.rep1['d:responsedescription'])
self.assertIsInstance(self.rep1['d:responses'], list)
# print actual data for humans to check
pprint(self.rep1)
print('-----------')
def test_responses(self):
self.assertEqual(self.rep1['d:responses'][0]['d:href'], '/')
self.assertIsInstance(self.rep1['d:responses'][0]['d:propstat'], dict)
def test_propstat(self):
self.assertEqual(self.rep1['d:responses'][0]['d:href'], '/')
self.assertIsInstance(self.rep1['d:responses'][0]['d:propstat'], dict)
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
from pprint import pprint
from django.test import TestCase
from mock import Mock
from rest_framework_webdav.serializers import *
from rest_framework_webdav.resources import *
from .resources import MockResource
class TestPropfindSerializer(TestCase):
def setUp(self):
pass
def test_request(self):
pass
class TestMultiStatusSerializer(TestCase):
def setUp(self):
#FIXME use proper mock objects
class TestDirFSResource(MetaEtagMixIn, BaseFSDavResource):
root = os.path.dirname(os.path.realpath(__file__))
def __str__(self):
return "<Resource object for %s>" % self.get_abs_path()
self.resource = TestDirFSResource('/')
def test_1(self):
# TODO proper testing, currently this is used to check the output by eye
expect = 'asdasd'
ser1 = MultistatusSerializer(instance=self.resource, context={
'depth': 1,
})
rep1 = ser1.data
pprint(rep1)
print('-----------')
<commit_msg>Make tests a little better<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
from pprint import pprint
from django.test import TestCase
from mock import Mock
from rest_framework_webdav.serializers import *
from rest_framework_webdav.resources import *
from .resources import MockResource
class TestPropfindSerializer(TestCase):
def setUp(self):
pass
def test_request(self):
pass
class TestResponseSerializers(TestCase):
def setUp(self):
#FIXME use proper mock objects
class TestDirFSResource(MetaEtagMixIn, BaseFSDavResource):
root = os.path.dirname(os.path.realpath(__file__))
def __str__(self):
return "<Resource object for %s>" % self.get_abs_path()
self.resource = TestDirFSResource('/')
ser1 = MultistatusSerializer(instance=self.resource, context={
'depth': 1,
})
self.rep1 = ser1.data
def test_multistatus(self):
self.assertIsNone(self.rep1['d:responsedescription'])
self.assertIsInstance(self.rep1['d:responses'], list)
# print actual data for humans to check
pprint(self.rep1)
print('-----------')
def test_responses(self):
self.assertEqual(self.rep1['d:responses'][0]['d:href'], '/')
self.assertIsInstance(self.rep1['d:responses'][0]['d:propstat'], dict)
def test_propstat(self):
self.assertEqual(self.rep1['d:responses'][0]['d:href'], '/')
self.assertIsInstance(self.rep1['d:responses'][0]['d:propstat'], dict)
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
from pprint import pprint
from django.test import TestCase
from mock import Mock
from rest_framework_webdav.serializers import *
from rest_framework_webdav.resources import *
from .resources import MockResource
class TestPropfindSerializer(TestCase):
def setUp(self):
pass
def test_request(self):
pass
class TestMultiStatusSerializer(TestCase):
def setUp(self):
#FIXME use proper mock objects
class TestDirFSResource(MetaEtagMixIn, BaseFSDavResource):
root = os.path.dirname(os.path.realpath(__file__))
def __str__(self):
return "<Resource object for %s>" % self.get_abs_path()
self.resource = TestDirFSResource('/')
def test_1(self):
# TODO proper testing, currently this is used to check the output by eye
expect = 'asdasd'
ser1 = MultistatusSerializer(instance=self.resource, context={
'depth': 1,
})
rep1 = ser1.data
pprint(rep1)
print('-----------')
Make tests a little better# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
from pprint import pprint
from django.test import TestCase
from mock import Mock
from rest_framework_webdav.serializers import *
from rest_framework_webdav.resources import *
from .resources import MockResource
class TestPropfindSerializer(TestCase):
def setUp(self):
pass
def test_request(self):
pass
class TestResponseSerializers(TestCase):
def setUp(self):
#FIXME use proper mock objects
class TestDirFSResource(MetaEtagMixIn, BaseFSDavResource):
root = os.path.dirname(os.path.realpath(__file__))
def __str__(self):
return "<Resource object for %s>" % self.get_abs_path()
self.resource = TestDirFSResource('/')
ser1 = MultistatusSerializer(instance=self.resource, context={
'depth': 1,
})
self.rep1 = ser1.data
def test_multistatus(self):
self.assertIsNone(self.rep1['d:responsedescription'])
self.assertIsInstance(self.rep1['d:responses'], list)
# print actual data for humans to check
pprint(self.rep1)
print('-----------')
def test_responses(self):
self.assertEqual(self.rep1['d:responses'][0]['d:href'], '/')
self.assertIsInstance(self.rep1['d:responses'][0]['d:propstat'], dict)
def test_propstat(self):
self.assertEqual(self.rep1['d:responses'][0]['d:href'], '/')
self.assertIsInstance(self.rep1['d:responses'][0]['d:propstat'], dict)
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
from pprint import pprint
from django.test import TestCase
from mock import Mock
from rest_framework_webdav.serializers import *
from rest_framework_webdav.resources import *
from .resources import MockResource
class TestPropfindSerializer(TestCase):
def setUp(self):
pass
def test_request(self):
pass
class TestMultiStatusSerializer(TestCase):
def setUp(self):
#FIXME use proper mock objects
class TestDirFSResource(MetaEtagMixIn, BaseFSDavResource):
root = os.path.dirname(os.path.realpath(__file__))
def __str__(self):
return "<Resource object for %s>" % self.get_abs_path()
self.resource = TestDirFSResource('/')
def test_1(self):
# TODO proper testing, currently this is used to check the output by eye
expect = 'asdasd'
ser1 = MultistatusSerializer(instance=self.resource, context={
'depth': 1,
})
rep1 = ser1.data
pprint(rep1)
print('-----------')
<commit_msg>Make tests a little better<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
from pprint import pprint
from django.test import TestCase
from mock import Mock
from rest_framework_webdav.serializers import *
from rest_framework_webdav.resources import *
from .resources import MockResource
class TestPropfindSerializer(TestCase):
def setUp(self):
pass
def test_request(self):
pass
class TestResponseSerializers(TestCase):
def setUp(self):
#FIXME use proper mock objects
class TestDirFSResource(MetaEtagMixIn, BaseFSDavResource):
root = os.path.dirname(os.path.realpath(__file__))
def __str__(self):
return "<Resource object for %s>" % self.get_abs_path()
self.resource = TestDirFSResource('/')
ser1 = MultistatusSerializer(instance=self.resource, context={
'depth': 1,
})
self.rep1 = ser1.data
def test_multistatus(self):
self.assertIsNone(self.rep1['d:responsedescription'])
self.assertIsInstance(self.rep1['d:responses'], list)
# print actual data for humans to check
pprint(self.rep1)
print('-----------')
def test_responses(self):
self.assertEqual(self.rep1['d:responses'][0]['d:href'], '/')
self.assertIsInstance(self.rep1['d:responses'][0]['d:propstat'], dict)
def test_propstat(self):
self.assertEqual(self.rep1['d:responses'][0]['d:href'], '/')
self.assertIsInstance(self.rep1['d:responses'][0]['d:propstat'], dict)
|
d976bc3b992811911e5b28cf29df1df936ca7cc5
|
localtv/subsite/__init__.py
|
localtv/subsite/__init__.py
|
from django.conf import settings
from django.contrib.sites.models import Site
from localtv import models
def context_processor(request):
sitelocation = models.SiteLocation.objects.get(
site=Site.objects.get_current())
display_submit_button = sitelocation.display_submit_button
if display_submit_button:
if request.user.is_anonymous() and \
sitelocation.submission_requires_login:
display_submit_button = False
else:
if sitelocation.user_is_admin(request.user):
display_submit_button = True
return {
'sitelocation': sitelocation,
'request': request,
'user_is_admin': sitelocation.user_is_admin(request.user),
'display_submit_button': display_submit_button,
'settings': settings,
'VIDEO_STATUS_UNAPPROVED': models.VIDEO_STATUS_UNAPPROVED,
'VIDEO_STATUS_ACTIVE': models.VIDEO_STATUS_ACTIVE,
'VIDEO_STATUS_REJECTED': models.VIDEO_STATUS_REJECTED}
|
import urlparse
from django.conf import settings
from django.contrib.sites.models import Site
from localtv import models
class FixAJAXMiddleware:
"""
Firefox doesn't handle redirects in XMLHttpRequests correctly (it doesn't
set X-Requested-With) so we fake it with a GET argument.
"""
def process_request(self, request):
if 'from_ajax' in request.GET and not request.is_ajax():
request.META['HTTP_X_REQUESTED_WITH'] = 'XMLHttpRequest'
def process_response(self, request, response):
if 300 <= response.status_code < 400 and request.is_ajax():
parts = list(urlparse.urlparse(response['Location']))
if parts[4]: # query
parts[4] = parts[4] + '&from_ajax'
else:
parts[4] = 'from_ajax'
response['Location'] = urlparse.urlunparse(parts)
return response
def context_processor(request):
sitelocation = models.SiteLocation.objects.get(
site=Site.objects.get_current())
display_submit_button = sitelocation.display_submit_button
if display_submit_button:
if request.user.is_anonymous() and \
sitelocation.submission_requires_login:
display_submit_button = False
else:
if sitelocation.user_is_admin(request.user):
display_submit_button = True
return {
'sitelocation': sitelocation,
'request': request,
'user_is_admin': sitelocation.user_is_admin(request.user),
'display_submit_button': display_submit_button,
'settings': settings,
'VIDEO_STATUS_UNAPPROVED': models.VIDEO_STATUS_UNAPPROVED,
'VIDEO_STATUS_ACTIVE': models.VIDEO_STATUS_ACTIVE,
'VIDEO_STATUS_REJECTED': models.VIDEO_STATUS_REJECTED}
|
Add a middleware class to fix Firefox's bad AJAX redirect handling
|
Add a middleware class to fix Firefox's bad AJAX redirect handling
|
Python
|
agpl-3.0
|
pculture/mirocommunity,natea/Miro-Community,pculture/mirocommunity,natea/Miro-Community,pculture/mirocommunity,pculture/mirocommunity
|
from django.conf import settings
from django.contrib.sites.models import Site
from localtv import models
def context_processor(request):
sitelocation = models.SiteLocation.objects.get(
site=Site.objects.get_current())
display_submit_button = sitelocation.display_submit_button
if display_submit_button:
if request.user.is_anonymous() and \
sitelocation.submission_requires_login:
display_submit_button = False
else:
if sitelocation.user_is_admin(request.user):
display_submit_button = True
return {
'sitelocation': sitelocation,
'request': request,
'user_is_admin': sitelocation.user_is_admin(request.user),
'display_submit_button': display_submit_button,
'settings': settings,
'VIDEO_STATUS_UNAPPROVED': models.VIDEO_STATUS_UNAPPROVED,
'VIDEO_STATUS_ACTIVE': models.VIDEO_STATUS_ACTIVE,
'VIDEO_STATUS_REJECTED': models.VIDEO_STATUS_REJECTED}
Add a middleware class to fix Firefox's bad AJAX redirect handling
|
import urlparse
from django.conf import settings
from django.contrib.sites.models import Site
from localtv import models
class FixAJAXMiddleware:
"""
Firefox doesn't handle redirects in XMLHttpRequests correctly (it doesn't
set X-Requested-With) so we fake it with a GET argument.
"""
def process_request(self, request):
if 'from_ajax' in request.GET and not request.is_ajax():
request.META['HTTP_X_REQUESTED_WITH'] = 'XMLHttpRequest'
def process_response(self, request, response):
if 300 <= response.status_code < 400 and request.is_ajax():
parts = list(urlparse.urlparse(response['Location']))
if parts[4]: # query
parts[4] = parts[4] + '&from_ajax'
else:
parts[4] = 'from_ajax'
response['Location'] = urlparse.urlunparse(parts)
return response
def context_processor(request):
sitelocation = models.SiteLocation.objects.get(
site=Site.objects.get_current())
display_submit_button = sitelocation.display_submit_button
if display_submit_button:
if request.user.is_anonymous() and \
sitelocation.submission_requires_login:
display_submit_button = False
else:
if sitelocation.user_is_admin(request.user):
display_submit_button = True
return {
'sitelocation': sitelocation,
'request': request,
'user_is_admin': sitelocation.user_is_admin(request.user),
'display_submit_button': display_submit_button,
'settings': settings,
'VIDEO_STATUS_UNAPPROVED': models.VIDEO_STATUS_UNAPPROVED,
'VIDEO_STATUS_ACTIVE': models.VIDEO_STATUS_ACTIVE,
'VIDEO_STATUS_REJECTED': models.VIDEO_STATUS_REJECTED}
|
<commit_before>from django.conf import settings
from django.contrib.sites.models import Site
from localtv import models
def context_processor(request):
sitelocation = models.SiteLocation.objects.get(
site=Site.objects.get_current())
display_submit_button = sitelocation.display_submit_button
if display_submit_button:
if request.user.is_anonymous() and \
sitelocation.submission_requires_login:
display_submit_button = False
else:
if sitelocation.user_is_admin(request.user):
display_submit_button = True
return {
'sitelocation': sitelocation,
'request': request,
'user_is_admin': sitelocation.user_is_admin(request.user),
'display_submit_button': display_submit_button,
'settings': settings,
'VIDEO_STATUS_UNAPPROVED': models.VIDEO_STATUS_UNAPPROVED,
'VIDEO_STATUS_ACTIVE': models.VIDEO_STATUS_ACTIVE,
'VIDEO_STATUS_REJECTED': models.VIDEO_STATUS_REJECTED}
<commit_msg>Add a middleware class to fix Firefox's bad AJAX redirect handling<commit_after>
|
import urlparse
from django.conf import settings
from django.contrib.sites.models import Site
from localtv import models
class FixAJAXMiddleware:
"""
Firefox doesn't handle redirects in XMLHttpRequests correctly (it doesn't
set X-Requested-With) so we fake it with a GET argument.
"""
def process_request(self, request):
if 'from_ajax' in request.GET and not request.is_ajax():
request.META['HTTP_X_REQUESTED_WITH'] = 'XMLHttpRequest'
def process_response(self, request, response):
if 300 <= response.status_code < 400 and request.is_ajax():
parts = list(urlparse.urlparse(response['Location']))
if parts[4]: # query
parts[4] = parts[4] + '&from_ajax'
else:
parts[4] = 'from_ajax'
response['Location'] = urlparse.urlunparse(parts)
return response
def context_processor(request):
sitelocation = models.SiteLocation.objects.get(
site=Site.objects.get_current())
display_submit_button = sitelocation.display_submit_button
if display_submit_button:
if request.user.is_anonymous() and \
sitelocation.submission_requires_login:
display_submit_button = False
else:
if sitelocation.user_is_admin(request.user):
display_submit_button = True
return {
'sitelocation': sitelocation,
'request': request,
'user_is_admin': sitelocation.user_is_admin(request.user),
'display_submit_button': display_submit_button,
'settings': settings,
'VIDEO_STATUS_UNAPPROVED': models.VIDEO_STATUS_UNAPPROVED,
'VIDEO_STATUS_ACTIVE': models.VIDEO_STATUS_ACTIVE,
'VIDEO_STATUS_REJECTED': models.VIDEO_STATUS_REJECTED}
|
from django.conf import settings
from django.contrib.sites.models import Site
from localtv import models
def context_processor(request):
sitelocation = models.SiteLocation.objects.get(
site=Site.objects.get_current())
display_submit_button = sitelocation.display_submit_button
if display_submit_button:
if request.user.is_anonymous() and \
sitelocation.submission_requires_login:
display_submit_button = False
else:
if sitelocation.user_is_admin(request.user):
display_submit_button = True
return {
'sitelocation': sitelocation,
'request': request,
'user_is_admin': sitelocation.user_is_admin(request.user),
'display_submit_button': display_submit_button,
'settings': settings,
'VIDEO_STATUS_UNAPPROVED': models.VIDEO_STATUS_UNAPPROVED,
'VIDEO_STATUS_ACTIVE': models.VIDEO_STATUS_ACTIVE,
'VIDEO_STATUS_REJECTED': models.VIDEO_STATUS_REJECTED}
Add a middleware class to fix Firefox's bad AJAX redirect handlingimport urlparse
from django.conf import settings
from django.contrib.sites.models import Site
from localtv import models
class FixAJAXMiddleware:
"""
Firefox doesn't handle redirects in XMLHttpRequests correctly (it doesn't
set X-Requested-With) so we fake it with a GET argument.
"""
def process_request(self, request):
if 'from_ajax' in request.GET and not request.is_ajax():
request.META['HTTP_X_REQUESTED_WITH'] = 'XMLHttpRequest'
def process_response(self, request, response):
if 300 <= response.status_code < 400 and request.is_ajax():
parts = list(urlparse.urlparse(response['Location']))
if parts[4]: # query
parts[4] = parts[4] + '&from_ajax'
else:
parts[4] = 'from_ajax'
response['Location'] = urlparse.urlunparse(parts)
return response
def context_processor(request):
sitelocation = models.SiteLocation.objects.get(
site=Site.objects.get_current())
display_submit_button = sitelocation.display_submit_button
if display_submit_button:
if request.user.is_anonymous() and \
sitelocation.submission_requires_login:
display_submit_button = False
else:
if sitelocation.user_is_admin(request.user):
display_submit_button = True
return {
'sitelocation': sitelocation,
'request': request,
'user_is_admin': sitelocation.user_is_admin(request.user),
'display_submit_button': display_submit_button,
'settings': settings,
'VIDEO_STATUS_UNAPPROVED': models.VIDEO_STATUS_UNAPPROVED,
'VIDEO_STATUS_ACTIVE': models.VIDEO_STATUS_ACTIVE,
'VIDEO_STATUS_REJECTED': models.VIDEO_STATUS_REJECTED}
|
<commit_before>from django.conf import settings
from django.contrib.sites.models import Site
from localtv import models
def context_processor(request):
sitelocation = models.SiteLocation.objects.get(
site=Site.objects.get_current())
display_submit_button = sitelocation.display_submit_button
if display_submit_button:
if request.user.is_anonymous() and \
sitelocation.submission_requires_login:
display_submit_button = False
else:
if sitelocation.user_is_admin(request.user):
display_submit_button = True
return {
'sitelocation': sitelocation,
'request': request,
'user_is_admin': sitelocation.user_is_admin(request.user),
'display_submit_button': display_submit_button,
'settings': settings,
'VIDEO_STATUS_UNAPPROVED': models.VIDEO_STATUS_UNAPPROVED,
'VIDEO_STATUS_ACTIVE': models.VIDEO_STATUS_ACTIVE,
'VIDEO_STATUS_REJECTED': models.VIDEO_STATUS_REJECTED}
<commit_msg>Add a middleware class to fix Firefox's bad AJAX redirect handling<commit_after>import urlparse
from django.conf import settings
from django.contrib.sites.models import Site
from localtv import models
class FixAJAXMiddleware:
"""
Firefox doesn't handle redirects in XMLHttpRequests correctly (it doesn't
set X-Requested-With) so we fake it with a GET argument.
"""
def process_request(self, request):
if 'from_ajax' in request.GET and not request.is_ajax():
request.META['HTTP_X_REQUESTED_WITH'] = 'XMLHttpRequest'
def process_response(self, request, response):
if 300 <= response.status_code < 400 and request.is_ajax():
parts = list(urlparse.urlparse(response['Location']))
if parts[4]: # query
parts[4] = parts[4] + '&from_ajax'
else:
parts[4] = 'from_ajax'
response['Location'] = urlparse.urlunparse(parts)
return response
def context_processor(request):
sitelocation = models.SiteLocation.objects.get(
site=Site.objects.get_current())
display_submit_button = sitelocation.display_submit_button
if display_submit_button:
if request.user.is_anonymous() and \
sitelocation.submission_requires_login:
display_submit_button = False
else:
if sitelocation.user_is_admin(request.user):
display_submit_button = True
return {
'sitelocation': sitelocation,
'request': request,
'user_is_admin': sitelocation.user_is_admin(request.user),
'display_submit_button': display_submit_button,
'settings': settings,
'VIDEO_STATUS_UNAPPROVED': models.VIDEO_STATUS_UNAPPROVED,
'VIDEO_STATUS_ACTIVE': models.VIDEO_STATUS_ACTIVE,
'VIDEO_STATUS_REJECTED': models.VIDEO_STATUS_REJECTED}
|
8278da2e22bc1a10ada43585685aa4a0841d14c5
|
apps/bluebottle_utils/tests.py
|
apps/bluebottle_utils/tests.py
|
import uuid
from django.contrib.auth.models import User
class UserTestsMixin(object):
""" Mixin base class for tests requiring users. """
def create_user(self, username=None, password=None):
""" Create, save and return a new user. """
if not username:
# Generate a random username
username = str(uuid.uuid4())[:30]
user = User.objects.create_user(username=username)
return user
|
import uuid
from django.contrib.auth.models import User
class UserTestsMixin(object):
""" Mixin base class for tests requiring users. """
def create_user(self, username=None, password=None):
""" Create, save and return a new user. """
# If no username is set, create a random unique username
while not username or User.objects.filter(username=username).exists():
# Generate a random username
username = str(uuid.uuid4())[:30]
user = User.objects.create_user(username=username)
return user
|
Make sure generated usernames are unique.
|
Make sure generated usernames are unique.
|
Python
|
bsd-3-clause
|
onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site
|
import uuid
from django.contrib.auth.models import User
class UserTestsMixin(object):
""" Mixin base class for tests requiring users. """
def create_user(self, username=None, password=None):
""" Create, save and return a new user. """
if not username:
# Generate a random username
username = str(uuid.uuid4())[:30]
user = User.objects.create_user(username=username)
return user
Make sure generated usernames are unique.
|
import uuid
from django.contrib.auth.models import User
class UserTestsMixin(object):
""" Mixin base class for tests requiring users. """
def create_user(self, username=None, password=None):
""" Create, save and return a new user. """
# If no username is set, create a random unique username
while not username or User.objects.filter(username=username).exists():
# Generate a random username
username = str(uuid.uuid4())[:30]
user = User.objects.create_user(username=username)
return user
|
<commit_before>import uuid
from django.contrib.auth.models import User
class UserTestsMixin(object):
""" Mixin base class for tests requiring users. """
def create_user(self, username=None, password=None):
""" Create, save and return a new user. """
if not username:
# Generate a random username
username = str(uuid.uuid4())[:30]
user = User.objects.create_user(username=username)
return user
<commit_msg>Make sure generated usernames are unique.<commit_after>
|
import uuid
from django.contrib.auth.models import User
class UserTestsMixin(object):
""" Mixin base class for tests requiring users. """
def create_user(self, username=None, password=None):
""" Create, save and return a new user. """
# If no username is set, create a random unique username
while not username or User.objects.filter(username=username).exists():
# Generate a random username
username = str(uuid.uuid4())[:30]
user = User.objects.create_user(username=username)
return user
|
import uuid
from django.contrib.auth.models import User
class UserTestsMixin(object):
""" Mixin base class for tests requiring users. """
def create_user(self, username=None, password=None):
""" Create, save and return a new user. """
if not username:
# Generate a random username
username = str(uuid.uuid4())[:30]
user = User.objects.create_user(username=username)
return user
Make sure generated usernames are unique.import uuid
from django.contrib.auth.models import User
class UserTestsMixin(object):
""" Mixin base class for tests requiring users. """
def create_user(self, username=None, password=None):
""" Create, save and return a new user. """
# If no username is set, create a random unique username
while not username or User.objects.filter(username=username).exists():
# Generate a random username
username = str(uuid.uuid4())[:30]
user = User.objects.create_user(username=username)
return user
|
<commit_before>import uuid
from django.contrib.auth.models import User
class UserTestsMixin(object):
""" Mixin base class for tests requiring users. """
def create_user(self, username=None, password=None):
""" Create, save and return a new user. """
if not username:
# Generate a random username
username = str(uuid.uuid4())[:30]
user = User.objects.create_user(username=username)
return user
<commit_msg>Make sure generated usernames are unique.<commit_after>import uuid
from django.contrib.auth.models import User
class UserTestsMixin(object):
""" Mixin base class for tests requiring users. """
def create_user(self, username=None, password=None):
""" Create, save and return a new user. """
# If no username is set, create a random unique username
while not username or User.objects.filter(username=username).exists():
# Generate a random username
username = str(uuid.uuid4())[:30]
user = User.objects.create_user(username=username)
return user
|
ea250cdd086059ea7976a38c8e94cb4a39709357
|
feincms/views/decorators.py
|
feincms/views/decorators.py
|
try:
from functools import wraps
except ImportError:
from django.utils.functional import wraps
from feincms.models import Page
def add_page_to_extra_context(view_func):
def inner(request, *args, **kwargs):
kwargs.setdefault('extra_context', {})
kwargs['extra_context']['feincms_page'] = Page.objects.best_match_for_path(request.path)
return view_func(request, *args, **kwargs)
return wraps(view_func)(inner)
|
try:
from functools import wraps
except ImportError:
from django.utils.functional import wraps
from feincms.models import Page
def add_page_to_extra_context(view_func):
def inner(request, *args, **kwargs):
kwargs.setdefault('extra_context', {})
kwargs['extra_context']['feincms_page'] = Page.objects.best_match_from_request(request)
return view_func(request, *args, **kwargs)
return wraps(view_func)(inner)
|
Call the setup_request page method too in generic views replacements
|
Call the setup_request page method too in generic views replacements
|
Python
|
bsd-3-clause
|
mjl/feincms,matthiask/django-content-editor,pjdelport/feincms,hgrimelid/feincms,nickburlett/feincms,mjl/feincms,matthiask/feincms2-content,michaelkuty/feincms,joshuajonah/feincms,michaelkuty/feincms,matthiask/feincms2-content,feincms/feincms,hgrimelid/feincms,mjl/feincms,matthiask/django-content-editor,pjdelport/feincms,nickburlett/feincms,nickburlett/feincms,feincms/feincms,feincms/feincms,matthiask/django-content-editor,joshuajonah/feincms,pjdelport/feincms,matthiask/feincms2-content,nickburlett/feincms,joshuajonah/feincms,michaelkuty/feincms,michaelkuty/feincms,matthiask/django-content-editor,joshuajonah/feincms,hgrimelid/feincms
|
try:
from functools import wraps
except ImportError:
from django.utils.functional import wraps
from feincms.models import Page
def add_page_to_extra_context(view_func):
def inner(request, *args, **kwargs):
kwargs.setdefault('extra_context', {})
kwargs['extra_context']['feincms_page'] = Page.objects.best_match_for_path(request.path)
return view_func(request, *args, **kwargs)
return wraps(view_func)(inner)
Call the setup_request page method too in generic views replacements
|
try:
from functools import wraps
except ImportError:
from django.utils.functional import wraps
from feincms.models import Page
def add_page_to_extra_context(view_func):
def inner(request, *args, **kwargs):
kwargs.setdefault('extra_context', {})
kwargs['extra_context']['feincms_page'] = Page.objects.best_match_from_request(request)
return view_func(request, *args, **kwargs)
return wraps(view_func)(inner)
|
<commit_before>try:
from functools import wraps
except ImportError:
from django.utils.functional import wraps
from feincms.models import Page
def add_page_to_extra_context(view_func):
def inner(request, *args, **kwargs):
kwargs.setdefault('extra_context', {})
kwargs['extra_context']['feincms_page'] = Page.objects.best_match_for_path(request.path)
return view_func(request, *args, **kwargs)
return wraps(view_func)(inner)
<commit_msg>Call the setup_request page method too in generic views replacements<commit_after>
|
try:
from functools import wraps
except ImportError:
from django.utils.functional import wraps
from feincms.models import Page
def add_page_to_extra_context(view_func):
def inner(request, *args, **kwargs):
kwargs.setdefault('extra_context', {})
kwargs['extra_context']['feincms_page'] = Page.objects.best_match_from_request(request)
return view_func(request, *args, **kwargs)
return wraps(view_func)(inner)
|
try:
from functools import wraps
except ImportError:
from django.utils.functional import wraps
from feincms.models import Page
def add_page_to_extra_context(view_func):
def inner(request, *args, **kwargs):
kwargs.setdefault('extra_context', {})
kwargs['extra_context']['feincms_page'] = Page.objects.best_match_for_path(request.path)
return view_func(request, *args, **kwargs)
return wraps(view_func)(inner)
Call the setup_request page method too in generic views replacementstry:
from functools import wraps
except ImportError:
from django.utils.functional import wraps
from feincms.models import Page
def add_page_to_extra_context(view_func):
def inner(request, *args, **kwargs):
kwargs.setdefault('extra_context', {})
kwargs['extra_context']['feincms_page'] = Page.objects.best_match_from_request(request)
return view_func(request, *args, **kwargs)
return wraps(view_func)(inner)
|
<commit_before>try:
from functools import wraps
except ImportError:
from django.utils.functional import wraps
from feincms.models import Page
def add_page_to_extra_context(view_func):
def inner(request, *args, **kwargs):
kwargs.setdefault('extra_context', {})
kwargs['extra_context']['feincms_page'] = Page.objects.best_match_for_path(request.path)
return view_func(request, *args, **kwargs)
return wraps(view_func)(inner)
<commit_msg>Call the setup_request page method too in generic views replacements<commit_after>try:
from functools import wraps
except ImportError:
from django.utils.functional import wraps
from feincms.models import Page
def add_page_to_extra_context(view_func):
def inner(request, *args, **kwargs):
kwargs.setdefault('extra_context', {})
kwargs['extra_context']['feincms_page'] = Page.objects.best_match_from_request(request)
return view_func(request, *args, **kwargs)
return wraps(view_func)(inner)
|
d1a868ab1ac8163828479e61d1d3efcae127543b
|
fileapi/tests/test_qunit.py
|
fileapi/tests/test_qunit.py
|
import os
from django.conf import settings
from django.contrib.staticfiles import finders, storage
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from django.test.utils import modify_settings
from django.utils.functional import empty
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.ui import WebDriverWait
@modify_settings(STATICFILES_DIRS={
'append': os.path.join(os.path.dirname(__file__), 'static')})
class QunitTests(StaticLiveServerTestCase):
"""Iteractive tests with selenium."""
@classmethod
def setUpClass(cls):
cls.browser = webdriver.PhantomJS()
super().setUpClass()
@classmethod
def tearDownClass(cls):
cls.browser.quit()
super().tearDownClass()
def setUp(self):
# Clear the cache versions of the staticfiles finders and storage
# See https://code.djangoproject.com/ticket/24197
storage.staticfiles_storage._wrapped = empty
finders.get_finder.cache_clear()
def test_qunit(self):
"""Load the QUnit tests and check for failures."""
self.browser.get(self.live_server_url + settings.STATIC_URL + 'index.html')
results = WebDriverWait(self.browser, 5).until(
expected_conditions.visibility_of_element_located((By.ID, 'qunit-testresult')))
total = int(results.find_element_by_class_name('total').text)
failed = int(results.find_element_by_class_name('failed').text)
self.assertTrue(total and not failed, results.text)
|
import os
from django.conf import settings
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from django.test.utils import modify_settings
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.ui import WebDriverWait
@modify_settings(STATICFILES_DIRS={
'append': os.path.join(os.path.dirname(__file__), 'static')})
class QunitTests(StaticLiveServerTestCase):
"""Iteractive tests with selenium."""
@classmethod
def setUpClass(cls):
cls.browser = webdriver.PhantomJS()
super().setUpClass()
@classmethod
def tearDownClass(cls):
cls.browser.quit()
super().tearDownClass()
def test_qunit(self):
"""Load the QUnit tests and check for failures."""
self.browser.get(self.live_server_url + settings.STATIC_URL + 'index.html')
results = WebDriverWait(self.browser, 5).until(
expected_conditions.visibility_of_element_located((By.ID, 'qunit-testresult')))
total = int(results.find_element_by_class_name('total').text)
failed = int(results.find_element_by_class_name('failed').text)
self.assertTrue(total and not failed, results.text)
|
Remove code which worked around a Django bug which is fixed in 1.8+
|
Remove code which worked around a Django bug which is fixed in 1.8+
|
Python
|
bsd-2-clause
|
mlavin/fileapi,mlavin/fileapi,mlavin/fileapi
|
import os
from django.conf import settings
from django.contrib.staticfiles import finders, storage
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from django.test.utils import modify_settings
from django.utils.functional import empty
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.ui import WebDriverWait
@modify_settings(STATICFILES_DIRS={
'append': os.path.join(os.path.dirname(__file__), 'static')})
class QunitTests(StaticLiveServerTestCase):
"""Iteractive tests with selenium."""
@classmethod
def setUpClass(cls):
cls.browser = webdriver.PhantomJS()
super().setUpClass()
@classmethod
def tearDownClass(cls):
cls.browser.quit()
super().tearDownClass()
def setUp(self):
# Clear the cache versions of the staticfiles finders and storage
# See https://code.djangoproject.com/ticket/24197
storage.staticfiles_storage._wrapped = empty
finders.get_finder.cache_clear()
def test_qunit(self):
"""Load the QUnit tests and check for failures."""
self.browser.get(self.live_server_url + settings.STATIC_URL + 'index.html')
results = WebDriverWait(self.browser, 5).until(
expected_conditions.visibility_of_element_located((By.ID, 'qunit-testresult')))
total = int(results.find_element_by_class_name('total').text)
failed = int(results.find_element_by_class_name('failed').text)
self.assertTrue(total and not failed, results.text)
Remove code which worked around a Django bug which is fixed in 1.8+
|
import os
from django.conf import settings
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from django.test.utils import modify_settings
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.ui import WebDriverWait
@modify_settings(STATICFILES_DIRS={
'append': os.path.join(os.path.dirname(__file__), 'static')})
class QunitTests(StaticLiveServerTestCase):
"""Iteractive tests with selenium."""
@classmethod
def setUpClass(cls):
cls.browser = webdriver.PhantomJS()
super().setUpClass()
@classmethod
def tearDownClass(cls):
cls.browser.quit()
super().tearDownClass()
def test_qunit(self):
"""Load the QUnit tests and check for failures."""
self.browser.get(self.live_server_url + settings.STATIC_URL + 'index.html')
results = WebDriverWait(self.browser, 5).until(
expected_conditions.visibility_of_element_located((By.ID, 'qunit-testresult')))
total = int(results.find_element_by_class_name('total').text)
failed = int(results.find_element_by_class_name('failed').text)
self.assertTrue(total and not failed, results.text)
|
<commit_before>import os
from django.conf import settings
from django.contrib.staticfiles import finders, storage
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from django.test.utils import modify_settings
from django.utils.functional import empty
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.ui import WebDriverWait
@modify_settings(STATICFILES_DIRS={
'append': os.path.join(os.path.dirname(__file__), 'static')})
class QunitTests(StaticLiveServerTestCase):
"""Iteractive tests with selenium."""
@classmethod
def setUpClass(cls):
cls.browser = webdriver.PhantomJS()
super().setUpClass()
@classmethod
def tearDownClass(cls):
cls.browser.quit()
super().tearDownClass()
def setUp(self):
# Clear the cache versions of the staticfiles finders and storage
# See https://code.djangoproject.com/ticket/24197
storage.staticfiles_storage._wrapped = empty
finders.get_finder.cache_clear()
def test_qunit(self):
"""Load the QUnit tests and check for failures."""
self.browser.get(self.live_server_url + settings.STATIC_URL + 'index.html')
results = WebDriverWait(self.browser, 5).until(
expected_conditions.visibility_of_element_located((By.ID, 'qunit-testresult')))
total = int(results.find_element_by_class_name('total').text)
failed = int(results.find_element_by_class_name('failed').text)
self.assertTrue(total and not failed, results.text)
<commit_msg>Remove code which worked around a Django bug which is fixed in 1.8+<commit_after>
|
import os
from django.conf import settings
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from django.test.utils import modify_settings
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.ui import WebDriverWait
@modify_settings(STATICFILES_DIRS={
'append': os.path.join(os.path.dirname(__file__), 'static')})
class QunitTests(StaticLiveServerTestCase):
"""Iteractive tests with selenium."""
@classmethod
def setUpClass(cls):
cls.browser = webdriver.PhantomJS()
super().setUpClass()
@classmethod
def tearDownClass(cls):
cls.browser.quit()
super().tearDownClass()
def test_qunit(self):
"""Load the QUnit tests and check for failures."""
self.browser.get(self.live_server_url + settings.STATIC_URL + 'index.html')
results = WebDriverWait(self.browser, 5).until(
expected_conditions.visibility_of_element_located((By.ID, 'qunit-testresult')))
total = int(results.find_element_by_class_name('total').text)
failed = int(results.find_element_by_class_name('failed').text)
self.assertTrue(total and not failed, results.text)
|
import os
from django.conf import settings
from django.contrib.staticfiles import finders, storage
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from django.test.utils import modify_settings
from django.utils.functional import empty
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.ui import WebDriverWait
@modify_settings(STATICFILES_DIRS={
'append': os.path.join(os.path.dirname(__file__), 'static')})
class QunitTests(StaticLiveServerTestCase):
"""Iteractive tests with selenium."""
@classmethod
def setUpClass(cls):
cls.browser = webdriver.PhantomJS()
super().setUpClass()
@classmethod
def tearDownClass(cls):
cls.browser.quit()
super().tearDownClass()
def setUp(self):
# Clear the cache versions of the staticfiles finders and storage
# See https://code.djangoproject.com/ticket/24197
storage.staticfiles_storage._wrapped = empty
finders.get_finder.cache_clear()
def test_qunit(self):
"""Load the QUnit tests and check for failures."""
self.browser.get(self.live_server_url + settings.STATIC_URL + 'index.html')
results = WebDriverWait(self.browser, 5).until(
expected_conditions.visibility_of_element_located((By.ID, 'qunit-testresult')))
total = int(results.find_element_by_class_name('total').text)
failed = int(results.find_element_by_class_name('failed').text)
self.assertTrue(total and not failed, results.text)
Remove code which worked around a Django bug which is fixed in 1.8+import os
from django.conf import settings
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from django.test.utils import modify_settings
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.ui import WebDriverWait
@modify_settings(STATICFILES_DIRS={
'append': os.path.join(os.path.dirname(__file__), 'static')})
class QunitTests(StaticLiveServerTestCase):
"""Iteractive tests with selenium."""
@classmethod
def setUpClass(cls):
cls.browser = webdriver.PhantomJS()
super().setUpClass()
@classmethod
def tearDownClass(cls):
cls.browser.quit()
super().tearDownClass()
def test_qunit(self):
"""Load the QUnit tests and check for failures."""
self.browser.get(self.live_server_url + settings.STATIC_URL + 'index.html')
results = WebDriverWait(self.browser, 5).until(
expected_conditions.visibility_of_element_located((By.ID, 'qunit-testresult')))
total = int(results.find_element_by_class_name('total').text)
failed = int(results.find_element_by_class_name('failed').text)
self.assertTrue(total and not failed, results.text)
|
<commit_before>import os
from django.conf import settings
from django.contrib.staticfiles import finders, storage
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from django.test.utils import modify_settings
from django.utils.functional import empty
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.ui import WebDriverWait
@modify_settings(STATICFILES_DIRS={
'append': os.path.join(os.path.dirname(__file__), 'static')})
class QunitTests(StaticLiveServerTestCase):
"""Iteractive tests with selenium."""
@classmethod
def setUpClass(cls):
cls.browser = webdriver.PhantomJS()
super().setUpClass()
@classmethod
def tearDownClass(cls):
cls.browser.quit()
super().tearDownClass()
def setUp(self):
# Clear the cache versions of the staticfiles finders and storage
# See https://code.djangoproject.com/ticket/24197
storage.staticfiles_storage._wrapped = empty
finders.get_finder.cache_clear()
def test_qunit(self):
"""Load the QUnit tests and check for failures."""
self.browser.get(self.live_server_url + settings.STATIC_URL + 'index.html')
results = WebDriverWait(self.browser, 5).until(
expected_conditions.visibility_of_element_located((By.ID, 'qunit-testresult')))
total = int(results.find_element_by_class_name('total').text)
failed = int(results.find_element_by_class_name('failed').text)
self.assertTrue(total and not failed, results.text)
<commit_msg>Remove code which worked around a Django bug which is fixed in 1.8+<commit_after>import os
from django.conf import settings
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from django.test.utils import modify_settings
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.ui import WebDriverWait
@modify_settings(STATICFILES_DIRS={
'append': os.path.join(os.path.dirname(__file__), 'static')})
class QunitTests(StaticLiveServerTestCase):
"""Iteractive tests with selenium."""
@classmethod
def setUpClass(cls):
cls.browser = webdriver.PhantomJS()
super().setUpClass()
@classmethod
def tearDownClass(cls):
cls.browser.quit()
super().tearDownClass()
def test_qunit(self):
"""Load the QUnit tests and check for failures."""
self.browser.get(self.live_server_url + settings.STATIC_URL + 'index.html')
results = WebDriverWait(self.browser, 5).until(
expected_conditions.visibility_of_element_located((By.ID, 'qunit-testresult')))
total = int(results.find_element_by_class_name('total').text)
failed = int(results.find_element_by_class_name('failed').text)
self.assertTrue(total and not failed, results.text)
|
b6c44e90df31c42137a80a64f6069056b16e3239
|
plugins/auth/crypto/algo_bcrypt.py
|
plugins/auth/crypto/algo_bcrypt.py
|
# coding=utf-8
from plugins.auth.crypto.algo_base import BaseAlgorithm
import bcrypt
__author__ = 'Gareth Coles'
class BcryptAlgo(BaseAlgorithm):
def check(self, hash, value, salt=None):
return hash == self.hash(value, hash)
def hash(self, value, salt):
return bcrypt.hashpw(
value, salt=salt
)
def gen_salt(self):
return bcrypt.gensalt()
|
# coding=utf-8
import bcrypt
from kitchen.text.converters import to_bytes
from plugins.auth.crypto.algo_base import BaseAlgorithm
__author__ = 'Gareth Coles'
class BcryptAlgo(BaseAlgorithm):
def check(self, hash, value, salt=None):
return hash == self.hash(value, hash)
def hash(self, value, salt):
return bcrypt.hashpw(
to_bytes(value), salt=salt
)
def gen_salt(self):
return bcrypt.gensalt()
|
Make bcrypt work with Unicode passwords
|
[Auth] Make bcrypt work with Unicode passwords
|
Python
|
artistic-2.0
|
UltrosBot/Ultros,UltrosBot/Ultros
|
# coding=utf-8
from plugins.auth.crypto.algo_base import BaseAlgorithm
import bcrypt
__author__ = 'Gareth Coles'
class BcryptAlgo(BaseAlgorithm):
def check(self, hash, value, salt=None):
return hash == self.hash(value, hash)
def hash(self, value, salt):
return bcrypt.hashpw(
value, salt=salt
)
def gen_salt(self):
return bcrypt.gensalt()
[Auth] Make bcrypt work with Unicode passwords
|
# coding=utf-8
import bcrypt
from kitchen.text.converters import to_bytes
from plugins.auth.crypto.algo_base import BaseAlgorithm
__author__ = 'Gareth Coles'
class BcryptAlgo(BaseAlgorithm):
def check(self, hash, value, salt=None):
return hash == self.hash(value, hash)
def hash(self, value, salt):
return bcrypt.hashpw(
to_bytes(value), salt=salt
)
def gen_salt(self):
return bcrypt.gensalt()
|
<commit_before># coding=utf-8
from plugins.auth.crypto.algo_base import BaseAlgorithm
import bcrypt
__author__ = 'Gareth Coles'
class BcryptAlgo(BaseAlgorithm):
def check(self, hash, value, salt=None):
return hash == self.hash(value, hash)
def hash(self, value, salt):
return bcrypt.hashpw(
value, salt=salt
)
def gen_salt(self):
return bcrypt.gensalt()
<commit_msg>[Auth] Make bcrypt work with Unicode passwords<commit_after>
|
# coding=utf-8
import bcrypt
from kitchen.text.converters import to_bytes
from plugins.auth.crypto.algo_base import BaseAlgorithm
__author__ = 'Gareth Coles'
class BcryptAlgo(BaseAlgorithm):
def check(self, hash, value, salt=None):
return hash == self.hash(value, hash)
def hash(self, value, salt):
return bcrypt.hashpw(
to_bytes(value), salt=salt
)
def gen_salt(self):
return bcrypt.gensalt()
|
# coding=utf-8
from plugins.auth.crypto.algo_base import BaseAlgorithm
import bcrypt
__author__ = 'Gareth Coles'
class BcryptAlgo(BaseAlgorithm):
def check(self, hash, value, salt=None):
return hash == self.hash(value, hash)
def hash(self, value, salt):
return bcrypt.hashpw(
value, salt=salt
)
def gen_salt(self):
return bcrypt.gensalt()
[Auth] Make bcrypt work with Unicode passwords# coding=utf-8
import bcrypt
from kitchen.text.converters import to_bytes
from plugins.auth.crypto.algo_base import BaseAlgorithm
__author__ = 'Gareth Coles'
class BcryptAlgo(BaseAlgorithm):
def check(self, hash, value, salt=None):
return hash == self.hash(value, hash)
def hash(self, value, salt):
return bcrypt.hashpw(
to_bytes(value), salt=salt
)
def gen_salt(self):
return bcrypt.gensalt()
|
<commit_before># coding=utf-8
from plugins.auth.crypto.algo_base import BaseAlgorithm
import bcrypt
__author__ = 'Gareth Coles'
class BcryptAlgo(BaseAlgorithm):
def check(self, hash, value, salt=None):
return hash == self.hash(value, hash)
def hash(self, value, salt):
return bcrypt.hashpw(
value, salt=salt
)
def gen_salt(self):
return bcrypt.gensalt()
<commit_msg>[Auth] Make bcrypt work with Unicode passwords<commit_after># coding=utf-8
import bcrypt
from kitchen.text.converters import to_bytes
from plugins.auth.crypto.algo_base import BaseAlgorithm
__author__ = 'Gareth Coles'
class BcryptAlgo(BaseAlgorithm):
def check(self, hash, value, salt=None):
return hash == self.hash(value, hash)
def hash(self, value, salt):
return bcrypt.hashpw(
to_bytes(value), salt=salt
)
def gen_salt(self):
return bcrypt.gensalt()
|
83f606e50b2a2ba2f283434d6449a46ad405e548
|
flask_mongorest/utils.py
|
flask_mongorest/utils.py
|
import json
import decimal
import datetime
from bson.dbref import DBRef
from bson.objectid import ObjectId
from mongoengine.base import BaseDocument
isbound = lambda m: getattr(m, 'im_self', None) is not None
class MongoEncoder(json.JSONEncoder):
def default(self, value, **kwargs):
if isinstance(value, ObjectId):
return unicode(value)
elif isinstance(value, DBRef):
return value.id
if isinstance(value, datetime.datetime):
return value.isoformat()
if isinstance(value, datetime.date):
return value.strftime("%Y-%m-%d")
if isinstance(value, decimal.Decimal):
return str(value)
return json.JSONEncoder.default(value, **kwargs)
|
import json
import decimal
import datetime
from bson.dbref import DBRef
from bson.objectid import ObjectId
from mongoengine.base import BaseDocument
isbound = lambda m: getattr(m, 'im_self', None) is not None
class MongoEncoder(json.JSONEncoder):
def default(self, value, **kwargs):
if isinstance(value, ObjectId):
return unicode(value)
elif isinstance(value, DBRef):
return value.id
if isinstance(value, datetime.datetime):
return value.isoformat()
if isinstance(value, datetime.date):
return value.strftime("%Y-%m-%d")
if isinstance(value, decimal.Decimal):
return str(value)
return super(MongoEncoder, self).default(value, **kwargs)
|
Fix bad call to superclass method
|
Fix bad call to superclass method
|
Python
|
bsd-3-clause
|
elasticsales/flask-mongorest,DropD/flask-mongorest,elasticsales/flask-mongorest,DropD/flask-mongorest
|
import json
import decimal
import datetime
from bson.dbref import DBRef
from bson.objectid import ObjectId
from mongoengine.base import BaseDocument
isbound = lambda m: getattr(m, 'im_self', None) is not None
class MongoEncoder(json.JSONEncoder):
def default(self, value, **kwargs):
if isinstance(value, ObjectId):
return unicode(value)
elif isinstance(value, DBRef):
return value.id
if isinstance(value, datetime.datetime):
return value.isoformat()
if isinstance(value, datetime.date):
return value.strftime("%Y-%m-%d")
if isinstance(value, decimal.Decimal):
return str(value)
return json.JSONEncoder.default(value, **kwargs)
Fix bad call to superclass method
|
import json
import decimal
import datetime
from bson.dbref import DBRef
from bson.objectid import ObjectId
from mongoengine.base import BaseDocument
isbound = lambda m: getattr(m, 'im_self', None) is not None
class MongoEncoder(json.JSONEncoder):
def default(self, value, **kwargs):
if isinstance(value, ObjectId):
return unicode(value)
elif isinstance(value, DBRef):
return value.id
if isinstance(value, datetime.datetime):
return value.isoformat()
if isinstance(value, datetime.date):
return value.strftime("%Y-%m-%d")
if isinstance(value, decimal.Decimal):
return str(value)
return super(MongoEncoder, self).default(value, **kwargs)
|
<commit_before>import json
import decimal
import datetime
from bson.dbref import DBRef
from bson.objectid import ObjectId
from mongoengine.base import BaseDocument
isbound = lambda m: getattr(m, 'im_self', None) is not None
class MongoEncoder(json.JSONEncoder):
def default(self, value, **kwargs):
if isinstance(value, ObjectId):
return unicode(value)
elif isinstance(value, DBRef):
return value.id
if isinstance(value, datetime.datetime):
return value.isoformat()
if isinstance(value, datetime.date):
return value.strftime("%Y-%m-%d")
if isinstance(value, decimal.Decimal):
return str(value)
return json.JSONEncoder.default(value, **kwargs)
<commit_msg>Fix bad call to superclass method<commit_after>
|
import json
import decimal
import datetime
from bson.dbref import DBRef
from bson.objectid import ObjectId
from mongoengine.base import BaseDocument
isbound = lambda m: getattr(m, 'im_self', None) is not None
class MongoEncoder(json.JSONEncoder):
def default(self, value, **kwargs):
if isinstance(value, ObjectId):
return unicode(value)
elif isinstance(value, DBRef):
return value.id
if isinstance(value, datetime.datetime):
return value.isoformat()
if isinstance(value, datetime.date):
return value.strftime("%Y-%m-%d")
if isinstance(value, decimal.Decimal):
return str(value)
return super(MongoEncoder, self).default(value, **kwargs)
|
import json
import decimal
import datetime
from bson.dbref import DBRef
from bson.objectid import ObjectId
from mongoengine.base import BaseDocument
isbound = lambda m: getattr(m, 'im_self', None) is not None
class MongoEncoder(json.JSONEncoder):
def default(self, value, **kwargs):
if isinstance(value, ObjectId):
return unicode(value)
elif isinstance(value, DBRef):
return value.id
if isinstance(value, datetime.datetime):
return value.isoformat()
if isinstance(value, datetime.date):
return value.strftime("%Y-%m-%d")
if isinstance(value, decimal.Decimal):
return str(value)
return json.JSONEncoder.default(value, **kwargs)
Fix bad call to superclass methodimport json
import decimal
import datetime
from bson.dbref import DBRef
from bson.objectid import ObjectId
from mongoengine.base import BaseDocument
isbound = lambda m: getattr(m, 'im_self', None) is not None
class MongoEncoder(json.JSONEncoder):
def default(self, value, **kwargs):
if isinstance(value, ObjectId):
return unicode(value)
elif isinstance(value, DBRef):
return value.id
if isinstance(value, datetime.datetime):
return value.isoformat()
if isinstance(value, datetime.date):
return value.strftime("%Y-%m-%d")
if isinstance(value, decimal.Decimal):
return str(value)
return super(MongoEncoder, self).default(value, **kwargs)
|
<commit_before>import json
import decimal
import datetime
from bson.dbref import DBRef
from bson.objectid import ObjectId
from mongoengine.base import BaseDocument
isbound = lambda m: getattr(m, 'im_self', None) is not None
class MongoEncoder(json.JSONEncoder):
def default(self, value, **kwargs):
if isinstance(value, ObjectId):
return unicode(value)
elif isinstance(value, DBRef):
return value.id
if isinstance(value, datetime.datetime):
return value.isoformat()
if isinstance(value, datetime.date):
return value.strftime("%Y-%m-%d")
if isinstance(value, decimal.Decimal):
return str(value)
return json.JSONEncoder.default(value, **kwargs)
<commit_msg>Fix bad call to superclass method<commit_after>import json
import decimal
import datetime
from bson.dbref import DBRef
from bson.objectid import ObjectId
from mongoengine.base import BaseDocument
isbound = lambda m: getattr(m, 'im_self', None) is not None
class MongoEncoder(json.JSONEncoder):
def default(self, value, **kwargs):
if isinstance(value, ObjectId):
return unicode(value)
elif isinstance(value, DBRef):
return value.id
if isinstance(value, datetime.datetime):
return value.isoformat()
if isinstance(value, datetime.date):
return value.strftime("%Y-%m-%d")
if isinstance(value, decimal.Decimal):
return str(value)
return super(MongoEncoder, self).default(value, **kwargs)
|
9fb17d4612fa250ebce09334cd8141ac071532cc
|
utils/addressTest.py
|
utils/addressTest.py
|
#!/usr/bin/python
import minimalmodbus
from time import sleep
ADDRESS1 = 1
ADDRESS2 = 2
minimalmodbus.CLOSE_PORT_AFTER_EACH_CALL = True
sensor = minimalmodbus.Instrument('/dev/ttyUSB5', slaveaddress=ADDRESS1)
print("writing new address: " + str(ADDRESS2))
sensor.write_register(0, value=ADDRESS2, functioncode=6)
sleep(0.2)
sensor = minimalmodbus.Instrument('/dev/ttyUSB5', slaveaddress=ADDRESS2)
print("reading address from holding register: ")
print(sensor.read_register(0, functioncode=3))
print("writing new address: " + str(ADDRESS1))
sensor.write_register(0, value=ADDRESS1, functioncode=6)
sleep(0.2)
sensor = minimalmodbus.Instrument('/dev/ttyUSB5', slaveaddress=ADDRESS1)
print("reading address from holding register: ")
print(sensor.read_register(0, functioncode=3))
|
#!/usr/bin/python
"""Looks for sensor with ADDRESS1 and changes it's address to ADDRESS2 then changes it back to ADDRESS1"""
import minimalmodbus
import serial
from time import sleep
ADDRESS1 = 1
ADDRESS2 = 2
minimalmodbus.CLOSE_PORT_AFTER_EACH_CALL = True
minimalmodbus.PARITY=serial.PARITY_NONE
minimalmodbus.STOPBITS = 2
minimalmodbus.BAUDRATE=19200
minimalmodbus.CLOSE_PORT_AFTER_EACH_CALL = True
sensor = minimalmodbus.Instrument('/dev/ttyUSB5', slaveaddress=ADDRESS1)
sensor.debug=True
print(("reading address: ") + str(sensor.read_register(0, functioncode=3)))
print("writing new address: " + str(ADDRESS2))
sensor.write_register(0, value=ADDRESS2, functioncode=6)
sleep(0.2)
sensor = minimalmodbus.Instrument('/dev/ttyUSB5', slaveaddress=ADDRESS2)
print("reading address from holding register: ")
print(sensor.read_register(0, functioncode=3))
print("writing new address: " + str(ADDRESS1))
sensor.write_register(0, value=ADDRESS1, functioncode=6)
sleep(0.2)
sensor = minimalmodbus.Instrument('/dev/ttyUSB5', slaveaddress=ADDRESS1)
print("reading address from holding register: ")
print(sensor.read_register(0, functioncode=3))
|
Add utility for address change functionality testing
|
Add utility for address change functionality testing
|
Python
|
apache-2.0
|
Miceuz/rs485-moist-sensor,Miceuz/rs485-moist-sensor
|
#!/usr/bin/python
import minimalmodbus
from time import sleep
ADDRESS1 = 1
ADDRESS2 = 2
minimalmodbus.CLOSE_PORT_AFTER_EACH_CALL = True
sensor = minimalmodbus.Instrument('/dev/ttyUSB5', slaveaddress=ADDRESS1)
print("writing new address: " + str(ADDRESS2))
sensor.write_register(0, value=ADDRESS2, functioncode=6)
sleep(0.2)
sensor = minimalmodbus.Instrument('/dev/ttyUSB5', slaveaddress=ADDRESS2)
print("reading address from holding register: ")
print(sensor.read_register(0, functioncode=3))
print("writing new address: " + str(ADDRESS1))
sensor.write_register(0, value=ADDRESS1, functioncode=6)
sleep(0.2)
sensor = minimalmodbus.Instrument('/dev/ttyUSB5', slaveaddress=ADDRESS1)
print("reading address from holding register: ")
print(sensor.read_register(0, functioncode=3))
Add utility for address change functionality testing
|
#!/usr/bin/python
"""Looks for sensor with ADDRESS1 and changes it's address to ADDRESS2 then changes it back to ADDRESS1"""
import minimalmodbus
import serial
from time import sleep
ADDRESS1 = 1
ADDRESS2 = 2
minimalmodbus.CLOSE_PORT_AFTER_EACH_CALL = True
minimalmodbus.PARITY=serial.PARITY_NONE
minimalmodbus.STOPBITS = 2
minimalmodbus.BAUDRATE=19200
minimalmodbus.CLOSE_PORT_AFTER_EACH_CALL = True
sensor = minimalmodbus.Instrument('/dev/ttyUSB5', slaveaddress=ADDRESS1)
sensor.debug=True
print(("reading address: ") + str(sensor.read_register(0, functioncode=3)))
print("writing new address: " + str(ADDRESS2))
sensor.write_register(0, value=ADDRESS2, functioncode=6)
sleep(0.2)
sensor = minimalmodbus.Instrument('/dev/ttyUSB5', slaveaddress=ADDRESS2)
print("reading address from holding register: ")
print(sensor.read_register(0, functioncode=3))
print("writing new address: " + str(ADDRESS1))
sensor.write_register(0, value=ADDRESS1, functioncode=6)
sleep(0.2)
sensor = minimalmodbus.Instrument('/dev/ttyUSB5', slaveaddress=ADDRESS1)
print("reading address from holding register: ")
print(sensor.read_register(0, functioncode=3))
|
<commit_before>#!/usr/bin/python
import minimalmodbus
from time import sleep
ADDRESS1 = 1
ADDRESS2 = 2
minimalmodbus.CLOSE_PORT_AFTER_EACH_CALL = True
sensor = minimalmodbus.Instrument('/dev/ttyUSB5', slaveaddress=ADDRESS1)
print("writing new address: " + str(ADDRESS2))
sensor.write_register(0, value=ADDRESS2, functioncode=6)
sleep(0.2)
sensor = minimalmodbus.Instrument('/dev/ttyUSB5', slaveaddress=ADDRESS2)
print("reading address from holding register: ")
print(sensor.read_register(0, functioncode=3))
print("writing new address: " + str(ADDRESS1))
sensor.write_register(0, value=ADDRESS1, functioncode=6)
sleep(0.2)
sensor = minimalmodbus.Instrument('/dev/ttyUSB5', slaveaddress=ADDRESS1)
print("reading address from holding register: ")
print(sensor.read_register(0, functioncode=3))
<commit_msg>Add utility for address change functionality testing<commit_after>
|
#!/usr/bin/python
"""Looks for sensor with ADDRESS1 and changes it's address to ADDRESS2 then changes it back to ADDRESS1"""
import minimalmodbus
import serial
from time import sleep
ADDRESS1 = 1
ADDRESS2 = 2
minimalmodbus.CLOSE_PORT_AFTER_EACH_CALL = True
minimalmodbus.PARITY=serial.PARITY_NONE
minimalmodbus.STOPBITS = 2
minimalmodbus.BAUDRATE=19200
minimalmodbus.CLOSE_PORT_AFTER_EACH_CALL = True
sensor = minimalmodbus.Instrument('/dev/ttyUSB5', slaveaddress=ADDRESS1)
sensor.debug=True
print(("reading address: ") + str(sensor.read_register(0, functioncode=3)))
print("writing new address: " + str(ADDRESS2))
sensor.write_register(0, value=ADDRESS2, functioncode=6)
sleep(0.2)
sensor = minimalmodbus.Instrument('/dev/ttyUSB5', slaveaddress=ADDRESS2)
print("reading address from holding register: ")
print(sensor.read_register(0, functioncode=3))
print("writing new address: " + str(ADDRESS1))
sensor.write_register(0, value=ADDRESS1, functioncode=6)
sleep(0.2)
sensor = minimalmodbus.Instrument('/dev/ttyUSB5', slaveaddress=ADDRESS1)
print("reading address from holding register: ")
print(sensor.read_register(0, functioncode=3))
|
#!/usr/bin/python
import minimalmodbus
from time import sleep
ADDRESS1 = 1
ADDRESS2 = 2
minimalmodbus.CLOSE_PORT_AFTER_EACH_CALL = True
sensor = minimalmodbus.Instrument('/dev/ttyUSB5', slaveaddress=ADDRESS1)
print("writing new address: " + str(ADDRESS2))
sensor.write_register(0, value=ADDRESS2, functioncode=6)
sleep(0.2)
sensor = minimalmodbus.Instrument('/dev/ttyUSB5', slaveaddress=ADDRESS2)
print("reading address from holding register: ")
print(sensor.read_register(0, functioncode=3))
print("writing new address: " + str(ADDRESS1))
sensor.write_register(0, value=ADDRESS1, functioncode=6)
sleep(0.2)
sensor = minimalmodbus.Instrument('/dev/ttyUSB5', slaveaddress=ADDRESS1)
print("reading address from holding register: ")
print(sensor.read_register(0, functioncode=3))
Add utility for address change functionality testing#!/usr/bin/python
"""Looks for sensor with ADDRESS1 and changes it's address to ADDRESS2 then changes it back to ADDRESS1"""
import minimalmodbus
import serial
from time import sleep
ADDRESS1 = 1
ADDRESS2 = 2
minimalmodbus.CLOSE_PORT_AFTER_EACH_CALL = True
minimalmodbus.PARITY=serial.PARITY_NONE
minimalmodbus.STOPBITS = 2
minimalmodbus.BAUDRATE=19200
minimalmodbus.CLOSE_PORT_AFTER_EACH_CALL = True
sensor = minimalmodbus.Instrument('/dev/ttyUSB5', slaveaddress=ADDRESS1)
sensor.debug=True
print(("reading address: ") + str(sensor.read_register(0, functioncode=3)))
print("writing new address: " + str(ADDRESS2))
sensor.write_register(0, value=ADDRESS2, functioncode=6)
sleep(0.2)
sensor = minimalmodbus.Instrument('/dev/ttyUSB5', slaveaddress=ADDRESS2)
print("reading address from holding register: ")
print(sensor.read_register(0, functioncode=3))
print("writing new address: " + str(ADDRESS1))
sensor.write_register(0, value=ADDRESS1, functioncode=6)
sleep(0.2)
sensor = minimalmodbus.Instrument('/dev/ttyUSB5', slaveaddress=ADDRESS1)
print("reading address from holding register: ")
print(sensor.read_register(0, functioncode=3))
|
<commit_before>#!/usr/bin/python
import minimalmodbus
from time import sleep
ADDRESS1 = 1
ADDRESS2 = 2
minimalmodbus.CLOSE_PORT_AFTER_EACH_CALL = True
sensor = minimalmodbus.Instrument('/dev/ttyUSB5', slaveaddress=ADDRESS1)
print("writing new address: " + str(ADDRESS2))
sensor.write_register(0, value=ADDRESS2, functioncode=6)
sleep(0.2)
sensor = minimalmodbus.Instrument('/dev/ttyUSB5', slaveaddress=ADDRESS2)
print("reading address from holding register: ")
print(sensor.read_register(0, functioncode=3))
print("writing new address: " + str(ADDRESS1))
sensor.write_register(0, value=ADDRESS1, functioncode=6)
sleep(0.2)
sensor = minimalmodbus.Instrument('/dev/ttyUSB5', slaveaddress=ADDRESS1)
print("reading address from holding register: ")
print(sensor.read_register(0, functioncode=3))
<commit_msg>Add utility for address change functionality testing<commit_after>#!/usr/bin/python
"""Looks for sensor with ADDRESS1 and changes it's address to ADDRESS2 then changes it back to ADDRESS1"""
import minimalmodbus
import serial
from time import sleep
ADDRESS1 = 1
ADDRESS2 = 2
minimalmodbus.CLOSE_PORT_AFTER_EACH_CALL = True
minimalmodbus.PARITY=serial.PARITY_NONE
minimalmodbus.STOPBITS = 2
minimalmodbus.BAUDRATE=19200
minimalmodbus.CLOSE_PORT_AFTER_EACH_CALL = True
sensor = minimalmodbus.Instrument('/dev/ttyUSB5', slaveaddress=ADDRESS1)
sensor.debug=True
print(("reading address: ") + str(sensor.read_register(0, functioncode=3)))
print("writing new address: " + str(ADDRESS2))
sensor.write_register(0, value=ADDRESS2, functioncode=6)
sleep(0.2)
sensor = minimalmodbus.Instrument('/dev/ttyUSB5', slaveaddress=ADDRESS2)
print("reading address from holding register: ")
print(sensor.read_register(0, functioncode=3))
print("writing new address: " + str(ADDRESS1))
sensor.write_register(0, value=ADDRESS1, functioncode=6)
sleep(0.2)
sensor = minimalmodbus.Instrument('/dev/ttyUSB5', slaveaddress=ADDRESS1)
print("reading address from holding register: ")
print(sensor.read_register(0, functioncode=3))
|
0e1dd74c70a2fa682b3cd3b0027162ad50ee9998
|
social/app/views/friend.py
|
social/app/views/friend.py
|
from django.http import HttpResponseRedirect
from django.urls import reverse
from django.views import generic
from social.app.models.author import Author
class FriendRequestsListView(generic.ListView):
context_object_name = "all_friend_requests"
template_name = "app/friend_requests_list.html"
def get_queryset(self):
return self.request.user.profile.incoming_friend_requests.all()
def post(self, request):
logged_in_author = self.request.user.profile
accepted_friend_requests = request.POST.getlist('accepted_friend_requests')
for new_friend_id in accepted_friend_requests:
new_friend = Author.objects.get(id=new_friend_id)
if new_friend.node.local:
logged_in_author.accept_friend_request(new_friend)
else:
r = new_friend.node.post_friend_request(request, logged_in_author, new_friend)
if 200 <= r.status_code < 300:
# Success!
logged_in_author.accept_friend_request(new_friend)
else:
# This one didn't work. Oh well! No easy way to show an error without ruining other accepts
pass
logged_in_author.save()
return HttpResponseRedirect(reverse("app:friend-requests-list"))
|
from django.http import HttpResponseRedirect
from django.urls import reverse
from django.views import generic
from social.app.models.author import Author
class FriendRequestsListView(generic.ListView):
context_object_name = "all_friend_requests"
template_name = "app/friend_requests_list.html"
def get_queryset(self):
return self.request.user.profile.incoming_friend_requests.all()
def post(self, request):
logged_in_author = self.request.user.profile
accepted_friend_requests = request.POST.getlist('accepted_friend_requests')
for new_friend_id in accepted_friend_requests:
new_friend = Author.objects.get(id=new_friend_id)
if new_friend.node.local:
logged_in_author.accept_friend_request(new_friend)
else:
r = new_friend.node.post_friend_request(request, logged_in_author, new_friend)
if 200 <= r.status_code < 300:
# Success!
logged_in_author.accept_friend_request(new_friend)
else:
r.raise_for_status()
logged_in_author.save()
return HttpResponseRedirect(reverse("app:friend-requests-list"))
|
Put in a raise for status for now
|
Put in a raise for status for now
|
Python
|
apache-2.0
|
TeamAADGT/CMPUT404-project-socialdistribution,TeamAADGT/CMPUT404-project-socialdistribution,TeamAADGT/CMPUT404-project-socialdistribution
|
from django.http import HttpResponseRedirect
from django.urls import reverse
from django.views import generic
from social.app.models.author import Author
class FriendRequestsListView(generic.ListView):
context_object_name = "all_friend_requests"
template_name = "app/friend_requests_list.html"
def get_queryset(self):
return self.request.user.profile.incoming_friend_requests.all()
def post(self, request):
logged_in_author = self.request.user.profile
accepted_friend_requests = request.POST.getlist('accepted_friend_requests')
for new_friend_id in accepted_friend_requests:
new_friend = Author.objects.get(id=new_friend_id)
if new_friend.node.local:
logged_in_author.accept_friend_request(new_friend)
else:
r = new_friend.node.post_friend_request(request, logged_in_author, new_friend)
if 200 <= r.status_code < 300:
# Success!
logged_in_author.accept_friend_request(new_friend)
else:
# This one didn't work. Oh well! No easy way to show an error without ruining other accepts
pass
logged_in_author.save()
return HttpResponseRedirect(reverse("app:friend-requests-list"))
Put in a raise for status for now
|
from django.http import HttpResponseRedirect
from django.urls import reverse
from django.views import generic
from social.app.models.author import Author
class FriendRequestsListView(generic.ListView):
context_object_name = "all_friend_requests"
template_name = "app/friend_requests_list.html"
def get_queryset(self):
return self.request.user.profile.incoming_friend_requests.all()
def post(self, request):
logged_in_author = self.request.user.profile
accepted_friend_requests = request.POST.getlist('accepted_friend_requests')
for new_friend_id in accepted_friend_requests:
new_friend = Author.objects.get(id=new_friend_id)
if new_friend.node.local:
logged_in_author.accept_friend_request(new_friend)
else:
r = new_friend.node.post_friend_request(request, logged_in_author, new_friend)
if 200 <= r.status_code < 300:
# Success!
logged_in_author.accept_friend_request(new_friend)
else:
r.raise_for_status()
logged_in_author.save()
return HttpResponseRedirect(reverse("app:friend-requests-list"))
|
<commit_before>from django.http import HttpResponseRedirect
from django.urls import reverse
from django.views import generic
from social.app.models.author import Author
class FriendRequestsListView(generic.ListView):
context_object_name = "all_friend_requests"
template_name = "app/friend_requests_list.html"
def get_queryset(self):
return self.request.user.profile.incoming_friend_requests.all()
def post(self, request):
logged_in_author = self.request.user.profile
accepted_friend_requests = request.POST.getlist('accepted_friend_requests')
for new_friend_id in accepted_friend_requests:
new_friend = Author.objects.get(id=new_friend_id)
if new_friend.node.local:
logged_in_author.accept_friend_request(new_friend)
else:
r = new_friend.node.post_friend_request(request, logged_in_author, new_friend)
if 200 <= r.status_code < 300:
# Success!
logged_in_author.accept_friend_request(new_friend)
else:
# This one didn't work. Oh well! No easy way to show an error without ruining other accepts
pass
logged_in_author.save()
return HttpResponseRedirect(reverse("app:friend-requests-list"))
<commit_msg>Put in a raise for status for now<commit_after>
|
from django.http import HttpResponseRedirect
from django.urls import reverse
from django.views import generic
from social.app.models.author import Author
class FriendRequestsListView(generic.ListView):
context_object_name = "all_friend_requests"
template_name = "app/friend_requests_list.html"
def get_queryset(self):
return self.request.user.profile.incoming_friend_requests.all()
def post(self, request):
logged_in_author = self.request.user.profile
accepted_friend_requests = request.POST.getlist('accepted_friend_requests')
for new_friend_id in accepted_friend_requests:
new_friend = Author.objects.get(id=new_friend_id)
if new_friend.node.local:
logged_in_author.accept_friend_request(new_friend)
else:
r = new_friend.node.post_friend_request(request, logged_in_author, new_friend)
if 200 <= r.status_code < 300:
# Success!
logged_in_author.accept_friend_request(new_friend)
else:
r.raise_for_status()
logged_in_author.save()
return HttpResponseRedirect(reverse("app:friend-requests-list"))
|
from django.http import HttpResponseRedirect
from django.urls import reverse
from django.views import generic
from social.app.models.author import Author
class FriendRequestsListView(generic.ListView):
context_object_name = "all_friend_requests"
template_name = "app/friend_requests_list.html"
def get_queryset(self):
return self.request.user.profile.incoming_friend_requests.all()
def post(self, request):
logged_in_author = self.request.user.profile
accepted_friend_requests = request.POST.getlist('accepted_friend_requests')
for new_friend_id in accepted_friend_requests:
new_friend = Author.objects.get(id=new_friend_id)
if new_friend.node.local:
logged_in_author.accept_friend_request(new_friend)
else:
r = new_friend.node.post_friend_request(request, logged_in_author, new_friend)
if 200 <= r.status_code < 300:
# Success!
logged_in_author.accept_friend_request(new_friend)
else:
# This one didn't work. Oh well! No easy way to show an error without ruining other accepts
pass
logged_in_author.save()
return HttpResponseRedirect(reverse("app:friend-requests-list"))
Put in a raise for status for nowfrom django.http import HttpResponseRedirect
from django.urls import reverse
from django.views import generic
from social.app.models.author import Author
class FriendRequestsListView(generic.ListView):
context_object_name = "all_friend_requests"
template_name = "app/friend_requests_list.html"
def get_queryset(self):
return self.request.user.profile.incoming_friend_requests.all()
def post(self, request):
logged_in_author = self.request.user.profile
accepted_friend_requests = request.POST.getlist('accepted_friend_requests')
for new_friend_id in accepted_friend_requests:
new_friend = Author.objects.get(id=new_friend_id)
if new_friend.node.local:
logged_in_author.accept_friend_request(new_friend)
else:
r = new_friend.node.post_friend_request(request, logged_in_author, new_friend)
if 200 <= r.status_code < 300:
# Success!
logged_in_author.accept_friend_request(new_friend)
else:
r.raise_for_status()
logged_in_author.save()
return HttpResponseRedirect(reverse("app:friend-requests-list"))
|
<commit_before>from django.http import HttpResponseRedirect
from django.urls import reverse
from django.views import generic
from social.app.models.author import Author
class FriendRequestsListView(generic.ListView):
context_object_name = "all_friend_requests"
template_name = "app/friend_requests_list.html"
def get_queryset(self):
return self.request.user.profile.incoming_friend_requests.all()
def post(self, request):
logged_in_author = self.request.user.profile
accepted_friend_requests = request.POST.getlist('accepted_friend_requests')
for new_friend_id in accepted_friend_requests:
new_friend = Author.objects.get(id=new_friend_id)
if new_friend.node.local:
logged_in_author.accept_friend_request(new_friend)
else:
r = new_friend.node.post_friend_request(request, logged_in_author, new_friend)
if 200 <= r.status_code < 300:
# Success!
logged_in_author.accept_friend_request(new_friend)
else:
# This one didn't work. Oh well! No easy way to show an error without ruining other accepts
pass
logged_in_author.save()
return HttpResponseRedirect(reverse("app:friend-requests-list"))
<commit_msg>Put in a raise for status for now<commit_after>from django.http import HttpResponseRedirect
from django.urls import reverse
from django.views import generic
from social.app.models.author import Author
class FriendRequestsListView(generic.ListView):
context_object_name = "all_friend_requests"
template_name = "app/friend_requests_list.html"
def get_queryset(self):
return self.request.user.profile.incoming_friend_requests.all()
def post(self, request):
logged_in_author = self.request.user.profile
accepted_friend_requests = request.POST.getlist('accepted_friend_requests')
for new_friend_id in accepted_friend_requests:
new_friend = Author.objects.get(id=new_friend_id)
if new_friend.node.local:
logged_in_author.accept_friend_request(new_friend)
else:
r = new_friend.node.post_friend_request(request, logged_in_author, new_friend)
if 200 <= r.status_code < 300:
# Success!
logged_in_author.accept_friend_request(new_friend)
else:
r.raise_for_status()
logged_in_author.save()
return HttpResponseRedirect(reverse("app:friend-requests-list"))
|
8e87689fd0edaf36349c3a6390fd8a6d18038f41
|
fortuitus/fcore/views.py
|
fortuitus/fcore/views.py
|
from django.contrib import messages, auth
from django.shortcuts import redirect
from django.template.response import TemplateResponse
from django.views.generic.base import TemplateView
from fortuitus.fcore import forms
class Home(TemplateView):
""" Home page. """
template_name = 'fortuitus/fcore/home.html'
def signup(request):
""" Registration view. """
if request.user.is_authenticated():
return redirect('home')
if request.method == 'POST':
form = forms.RegistrationForm(request.POST)
if form.is_valid():
user = form.save()
user = auth.authenticate(username=user.username,
password=request.POST['password1'])
auth.login(request, user)
messages.success(request, 'Thanks for signing up.')
return redirect('home')
else:
form = forms.RegistrationForm()
return TemplateResponse(request, 'fortuitus/fcore/registration.html',
{'form': form})
def demo(request):
""" Demo mode. Automatically sign in demo user and show 'em dashboard. """
# TODO autologin
return redirect('feditor_project', company='demo', project='twitter')
|
from django.contrib import messages, auth
from django.contrib.auth.models import User
from django.shortcuts import redirect
from django.template.response import TemplateResponse
from django.views.generic.base import TemplateView
from fortuitus.fcore import forms
class Home(TemplateView):
""" Home page. """
template_name = 'fortuitus/fcore/home.html'
def signup(request):
""" Registration view. """
if request.user.is_authenticated():
return redirect('home')
if request.method == 'POST':
form = forms.RegistrationForm(request.POST)
if form.is_valid():
user = form.save()
user = auth.authenticate(username=user.username,
password=request.POST['password1'])
auth.login(request, user)
messages.success(request, 'Thanks for signing up.')
return redirect('home')
else:
form = forms.RegistrationForm()
return TemplateResponse(request, 'fortuitus/fcore/registration.html',
{'form': form})
def demo(request):
""" Demo mode. Automatically sign in demo user and show 'em dashboard. """
if request.user.is_anonymous():
params = dict(username='demo', password='demo')
# We can't use get_or_create because we have to use `create_user`.
try:
user = User.objects.get(username=params['username'])
except User.DoesNotExist:
user = User.objects.create_user(**params)
user = auth.authenticate(**params)
auth.login(request, user)
return redirect('feditor_project', company='demo', project='twitter')
|
Add auto-login feature for demo view
|
Add auto-login feature for demo view
|
Python
|
mit
|
elegion/djangodash2012,elegion/djangodash2012
|
from django.contrib import messages, auth
from django.shortcuts import redirect
from django.template.response import TemplateResponse
from django.views.generic.base import TemplateView
from fortuitus.fcore import forms
class Home(TemplateView):
""" Home page. """
template_name = 'fortuitus/fcore/home.html'
def signup(request):
""" Registration view. """
if request.user.is_authenticated():
return redirect('home')
if request.method == 'POST':
form = forms.RegistrationForm(request.POST)
if form.is_valid():
user = form.save()
user = auth.authenticate(username=user.username,
password=request.POST['password1'])
auth.login(request, user)
messages.success(request, 'Thanks for signing up.')
return redirect('home')
else:
form = forms.RegistrationForm()
return TemplateResponse(request, 'fortuitus/fcore/registration.html',
{'form': form})
def demo(request):
""" Demo mode. Automatically sign in demo user and show 'em dashboard. """
# TODO autologin
return redirect('feditor_project', company='demo', project='twitter')
Add auto-login feature for demo view
|
from django.contrib import messages, auth
from django.contrib.auth.models import User
from django.shortcuts import redirect
from django.template.response import TemplateResponse
from django.views.generic.base import TemplateView
from fortuitus.fcore import forms
class Home(TemplateView):
""" Home page. """
template_name = 'fortuitus/fcore/home.html'
def signup(request):
""" Registration view. """
if request.user.is_authenticated():
return redirect('home')
if request.method == 'POST':
form = forms.RegistrationForm(request.POST)
if form.is_valid():
user = form.save()
user = auth.authenticate(username=user.username,
password=request.POST['password1'])
auth.login(request, user)
messages.success(request, 'Thanks for signing up.')
return redirect('home')
else:
form = forms.RegistrationForm()
return TemplateResponse(request, 'fortuitus/fcore/registration.html',
{'form': form})
def demo(request):
""" Demo mode. Automatically sign in demo user and show 'em dashboard. """
if request.user.is_anonymous():
params = dict(username='demo', password='demo')
# We can't use get_or_create because we have to use `create_user`.
try:
user = User.objects.get(username=params['username'])
except User.DoesNotExist:
user = User.objects.create_user(**params)
user = auth.authenticate(**params)
auth.login(request, user)
return redirect('feditor_project', company='demo', project='twitter')
|
<commit_before>from django.contrib import messages, auth
from django.shortcuts import redirect
from django.template.response import TemplateResponse
from django.views.generic.base import TemplateView
from fortuitus.fcore import forms
class Home(TemplateView):
""" Home page. """
template_name = 'fortuitus/fcore/home.html'
def signup(request):
""" Registration view. """
if request.user.is_authenticated():
return redirect('home')
if request.method == 'POST':
form = forms.RegistrationForm(request.POST)
if form.is_valid():
user = form.save()
user = auth.authenticate(username=user.username,
password=request.POST['password1'])
auth.login(request, user)
messages.success(request, 'Thanks for signing up.')
return redirect('home')
else:
form = forms.RegistrationForm()
return TemplateResponse(request, 'fortuitus/fcore/registration.html',
{'form': form})
def demo(request):
""" Demo mode. Automatically sign in demo user and show 'em dashboard. """
# TODO autologin
return redirect('feditor_project', company='demo', project='twitter')
<commit_msg>Add auto-login feature for demo view<commit_after>
|
from django.contrib import messages, auth
from django.contrib.auth.models import User
from django.shortcuts import redirect
from django.template.response import TemplateResponse
from django.views.generic.base import TemplateView
from fortuitus.fcore import forms
class Home(TemplateView):
""" Home page. """
template_name = 'fortuitus/fcore/home.html'
def signup(request):
""" Registration view. """
if request.user.is_authenticated():
return redirect('home')
if request.method == 'POST':
form = forms.RegistrationForm(request.POST)
if form.is_valid():
user = form.save()
user = auth.authenticate(username=user.username,
password=request.POST['password1'])
auth.login(request, user)
messages.success(request, 'Thanks for signing up.')
return redirect('home')
else:
form = forms.RegistrationForm()
return TemplateResponse(request, 'fortuitus/fcore/registration.html',
{'form': form})
def demo(request):
""" Demo mode. Automatically sign in demo user and show 'em dashboard. """
if request.user.is_anonymous():
params = dict(username='demo', password='demo')
# We can't use get_or_create because we have to use `create_user`.
try:
user = User.objects.get(username=params['username'])
except User.DoesNotExist:
user = User.objects.create_user(**params)
user = auth.authenticate(**params)
auth.login(request, user)
return redirect('feditor_project', company='demo', project='twitter')
|
from django.contrib import messages, auth
from django.shortcuts import redirect
from django.template.response import TemplateResponse
from django.views.generic.base import TemplateView
from fortuitus.fcore import forms
class Home(TemplateView):
""" Home page. """
template_name = 'fortuitus/fcore/home.html'
def signup(request):
""" Registration view. """
if request.user.is_authenticated():
return redirect('home')
if request.method == 'POST':
form = forms.RegistrationForm(request.POST)
if form.is_valid():
user = form.save()
user = auth.authenticate(username=user.username,
password=request.POST['password1'])
auth.login(request, user)
messages.success(request, 'Thanks for signing up.')
return redirect('home')
else:
form = forms.RegistrationForm()
return TemplateResponse(request, 'fortuitus/fcore/registration.html',
{'form': form})
def demo(request):
""" Demo mode. Automatically sign in demo user and show 'em dashboard. """
# TODO autologin
return redirect('feditor_project', company='demo', project='twitter')
Add auto-login feature for demo viewfrom django.contrib import messages, auth
from django.contrib.auth.models import User
from django.shortcuts import redirect
from django.template.response import TemplateResponse
from django.views.generic.base import TemplateView
from fortuitus.fcore import forms
class Home(TemplateView):
""" Home page. """
template_name = 'fortuitus/fcore/home.html'
def signup(request):
""" Registration view. """
if request.user.is_authenticated():
return redirect('home')
if request.method == 'POST':
form = forms.RegistrationForm(request.POST)
if form.is_valid():
user = form.save()
user = auth.authenticate(username=user.username,
password=request.POST['password1'])
auth.login(request, user)
messages.success(request, 'Thanks for signing up.')
return redirect('home')
else:
form = forms.RegistrationForm()
return TemplateResponse(request, 'fortuitus/fcore/registration.html',
{'form': form})
def demo(request):
""" Demo mode. Automatically sign in demo user and show 'em dashboard. """
if request.user.is_anonymous():
params = dict(username='demo', password='demo')
# We can't use get_or_create because we have to use `create_user`.
try:
user = User.objects.get(username=params['username'])
except User.DoesNotExist:
user = User.objects.create_user(**params)
user = auth.authenticate(**params)
auth.login(request, user)
return redirect('feditor_project', company='demo', project='twitter')
|
<commit_before>from django.contrib import messages, auth
from django.shortcuts import redirect
from django.template.response import TemplateResponse
from django.views.generic.base import TemplateView
from fortuitus.fcore import forms
class Home(TemplateView):
""" Home page. """
template_name = 'fortuitus/fcore/home.html'
def signup(request):
""" Registration view. """
if request.user.is_authenticated():
return redirect('home')
if request.method == 'POST':
form = forms.RegistrationForm(request.POST)
if form.is_valid():
user = form.save()
user = auth.authenticate(username=user.username,
password=request.POST['password1'])
auth.login(request, user)
messages.success(request, 'Thanks for signing up.')
return redirect('home')
else:
form = forms.RegistrationForm()
return TemplateResponse(request, 'fortuitus/fcore/registration.html',
{'form': form})
def demo(request):
""" Demo mode. Automatically sign in demo user and show 'em dashboard. """
# TODO autologin
return redirect('feditor_project', company='demo', project='twitter')
<commit_msg>Add auto-login feature for demo view<commit_after>from django.contrib import messages, auth
from django.contrib.auth.models import User
from django.shortcuts import redirect
from django.template.response import TemplateResponse
from django.views.generic.base import TemplateView
from fortuitus.fcore import forms
class Home(TemplateView):
""" Home page. """
template_name = 'fortuitus/fcore/home.html'
def signup(request):
""" Registration view. """
if request.user.is_authenticated():
return redirect('home')
if request.method == 'POST':
form = forms.RegistrationForm(request.POST)
if form.is_valid():
user = form.save()
user = auth.authenticate(username=user.username,
password=request.POST['password1'])
auth.login(request, user)
messages.success(request, 'Thanks for signing up.')
return redirect('home')
else:
form = forms.RegistrationForm()
return TemplateResponse(request, 'fortuitus/fcore/registration.html',
{'form': form})
def demo(request):
""" Demo mode. Automatically sign in demo user and show 'em dashboard. """
if request.user.is_anonymous():
params = dict(username='demo', password='demo')
# We can't use get_or_create because we have to use `create_user`.
try:
user = User.objects.get(username=params['username'])
except User.DoesNotExist:
user = User.objects.create_user(**params)
user = auth.authenticate(**params)
auth.login(request, user)
return redirect('feditor_project', company='demo', project='twitter')
|
cbb11e996381197d551425585fca225d630fa383
|
tests/test_simpleflow/utils/test_misc.py
|
tests/test_simpleflow/utils/test_misc.py
|
import unittest
from simpleflow.utils import format_exc
class MyTestCase(unittest.TestCase):
def test_format_final_exc_line(self):
line = None
try:
1/0
except Exception as e:
line = format_exc(e)
self.assertEqual("ZeroDivisionError: division by zero", line)
try:
{}[1]
except Exception as e:
line = format_exc(e)
self.assertEqual("KeyError: 1", line)
if __name__ == '__main__':
unittest.main()
|
import unittest
from simpleflow.utils import format_exc
class MyTestCase(unittest.TestCase):
def test_format_final_exc_line(self):
line = None
try:
{}[1]
except Exception as e:
line = format_exc(e)
self.assertEqual("KeyError: 1", line)
if __name__ == '__main__':
unittest.main()
|
Remove version-specific exception text test
|
Remove version-specific exception text test
Signed-off-by: Yves Bastide <3b1fe340dba76bf37270abad774f327f50b5e1d8@botify.com>
|
Python
|
mit
|
botify-labs/simpleflow,botify-labs/simpleflow
|
import unittest
from simpleflow.utils import format_exc
class MyTestCase(unittest.TestCase):
def test_format_final_exc_line(self):
line = None
try:
1/0
except Exception as e:
line = format_exc(e)
self.assertEqual("ZeroDivisionError: division by zero", line)
try:
{}[1]
except Exception as e:
line = format_exc(e)
self.assertEqual("KeyError: 1", line)
if __name__ == '__main__':
unittest.main()
Remove version-specific exception text test
Signed-off-by: Yves Bastide <3b1fe340dba76bf37270abad774f327f50b5e1d8@botify.com>
|
import unittest
from simpleflow.utils import format_exc
class MyTestCase(unittest.TestCase):
def test_format_final_exc_line(self):
line = None
try:
{}[1]
except Exception as e:
line = format_exc(e)
self.assertEqual("KeyError: 1", line)
if __name__ == '__main__':
unittest.main()
|
<commit_before>import unittest
from simpleflow.utils import format_exc
class MyTestCase(unittest.TestCase):
def test_format_final_exc_line(self):
line = None
try:
1/0
except Exception as e:
line = format_exc(e)
self.assertEqual("ZeroDivisionError: division by zero", line)
try:
{}[1]
except Exception as e:
line = format_exc(e)
self.assertEqual("KeyError: 1", line)
if __name__ == '__main__':
unittest.main()
<commit_msg>Remove version-specific exception text test
Signed-off-by: Yves Bastide <3b1fe340dba76bf37270abad774f327f50b5e1d8@botify.com><commit_after>
|
import unittest
from simpleflow.utils import format_exc
class MyTestCase(unittest.TestCase):
def test_format_final_exc_line(self):
line = None
try:
{}[1]
except Exception as e:
line = format_exc(e)
self.assertEqual("KeyError: 1", line)
if __name__ == '__main__':
unittest.main()
|
import unittest
from simpleflow.utils import format_exc
class MyTestCase(unittest.TestCase):
def test_format_final_exc_line(self):
line = None
try:
1/0
except Exception as e:
line = format_exc(e)
self.assertEqual("ZeroDivisionError: division by zero", line)
try:
{}[1]
except Exception as e:
line = format_exc(e)
self.assertEqual("KeyError: 1", line)
if __name__ == '__main__':
unittest.main()
Remove version-specific exception text test
Signed-off-by: Yves Bastide <3b1fe340dba76bf37270abad774f327f50b5e1d8@botify.com>import unittest
from simpleflow.utils import format_exc
class MyTestCase(unittest.TestCase):
def test_format_final_exc_line(self):
line = None
try:
{}[1]
except Exception as e:
line = format_exc(e)
self.assertEqual("KeyError: 1", line)
if __name__ == '__main__':
unittest.main()
|
<commit_before>import unittest
from simpleflow.utils import format_exc
class MyTestCase(unittest.TestCase):
def test_format_final_exc_line(self):
line = None
try:
1/0
except Exception as e:
line = format_exc(e)
self.assertEqual("ZeroDivisionError: division by zero", line)
try:
{}[1]
except Exception as e:
line = format_exc(e)
self.assertEqual("KeyError: 1", line)
if __name__ == '__main__':
unittest.main()
<commit_msg>Remove version-specific exception text test
Signed-off-by: Yves Bastide <3b1fe340dba76bf37270abad774f327f50b5e1d8@botify.com><commit_after>import unittest
from simpleflow.utils import format_exc
class MyTestCase(unittest.TestCase):
def test_format_final_exc_line(self):
line = None
try:
{}[1]
except Exception as e:
line = format_exc(e)
self.assertEqual("KeyError: 1", line)
if __name__ == '__main__':
unittest.main()
|
39314b70125d41fb57a468684209bdcfdfb8096f
|
frigg/builds/serializers.py
|
frigg/builds/serializers.py
|
from rest_framework import serializers
from frigg.projects.models import Project
from .models import Build, BuildResult
class ProjectInlineSerializer(serializers.ModelSerializer):
class Meta:
model = Project
fields = (
'id',
'owner',
'name',
'private',
'approved',
)
class BuildResultSerializer(serializers.ModelSerializer):
class Meta:
model = BuildResult
fields = (
'id',
'coverage',
'succeeded',
'tasks',
'setup_tasks',
)
class BuildInlineSerializer(serializers.ModelSerializer):
result = BuildResultSerializer(read_only=True)
class Meta:
model = Build
fields = (
'id',
'build_number',
'branch',
'sha',
'pull_request_id',
'start_time',
'end_time',
'result',
'short_message',
'message',
'color',
'pull_request_url',
'commit_url',
)
class BuildSerializer(serializers.ModelSerializer):
project = ProjectInlineSerializer(read_only=True)
result = BuildResultSerializer(read_only=True)
class Meta:
model = Build
fields = (
'id',
'project',
'result',
'build_number',
'branch',
'pull_request_id',
'sha',
'start_time',
'end_time',
'short_message',
'message',
'color',
'pull_request_url',
'commit_url',
)
|
from rest_framework import serializers
from frigg.projects.models import Project
from .models import Build, BuildResult
class ProjectInlineSerializer(serializers.ModelSerializer):
class Meta:
model = Project
fields = (
'id',
'owner',
'name',
'private',
'approved',
)
class BuildResultSerializer(serializers.ModelSerializer):
class Meta:
model = BuildResult
fields = (
'id',
'coverage',
'succeeded',
'tasks',
'setup_tasks',
'still_running',
)
class BuildInlineSerializer(serializers.ModelSerializer):
result = BuildResultSerializer(read_only=True)
class Meta:
model = Build
fields = (
'id',
'build_number',
'branch',
'sha',
'pull_request_id',
'start_time',
'end_time',
'result',
'short_message',
'message',
'color',
'pull_request_url',
'commit_url',
)
class BuildSerializer(serializers.ModelSerializer):
project = ProjectInlineSerializer(read_only=True)
result = BuildResultSerializer(read_only=True)
class Meta:
model = Build
fields = (
'id',
'project',
'result',
'build_number',
'branch',
'pull_request_id',
'sha',
'start_time',
'end_time',
'short_message',
'message',
'color',
'pull_request_url',
'commit_url',
)
|
Add still_running to build result serializer
|
Add still_running to build result serializer
|
Python
|
mit
|
frigg/frigg-hq,frigg/frigg-hq,frigg/frigg-hq
|
from rest_framework import serializers
from frigg.projects.models import Project
from .models import Build, BuildResult
class ProjectInlineSerializer(serializers.ModelSerializer):
class Meta:
model = Project
fields = (
'id',
'owner',
'name',
'private',
'approved',
)
class BuildResultSerializer(serializers.ModelSerializer):
class Meta:
model = BuildResult
fields = (
'id',
'coverage',
'succeeded',
'tasks',
'setup_tasks',
)
class BuildInlineSerializer(serializers.ModelSerializer):
result = BuildResultSerializer(read_only=True)
class Meta:
model = Build
fields = (
'id',
'build_number',
'branch',
'sha',
'pull_request_id',
'start_time',
'end_time',
'result',
'short_message',
'message',
'color',
'pull_request_url',
'commit_url',
)
class BuildSerializer(serializers.ModelSerializer):
project = ProjectInlineSerializer(read_only=True)
result = BuildResultSerializer(read_only=True)
class Meta:
model = Build
fields = (
'id',
'project',
'result',
'build_number',
'branch',
'pull_request_id',
'sha',
'start_time',
'end_time',
'short_message',
'message',
'color',
'pull_request_url',
'commit_url',
)
Add still_running to build result serializer
|
from rest_framework import serializers
from frigg.projects.models import Project
from .models import Build, BuildResult
class ProjectInlineSerializer(serializers.ModelSerializer):
class Meta:
model = Project
fields = (
'id',
'owner',
'name',
'private',
'approved',
)
class BuildResultSerializer(serializers.ModelSerializer):
class Meta:
model = BuildResult
fields = (
'id',
'coverage',
'succeeded',
'tasks',
'setup_tasks',
'still_running',
)
class BuildInlineSerializer(serializers.ModelSerializer):
result = BuildResultSerializer(read_only=True)
class Meta:
model = Build
fields = (
'id',
'build_number',
'branch',
'sha',
'pull_request_id',
'start_time',
'end_time',
'result',
'short_message',
'message',
'color',
'pull_request_url',
'commit_url',
)
class BuildSerializer(serializers.ModelSerializer):
project = ProjectInlineSerializer(read_only=True)
result = BuildResultSerializer(read_only=True)
class Meta:
model = Build
fields = (
'id',
'project',
'result',
'build_number',
'branch',
'pull_request_id',
'sha',
'start_time',
'end_time',
'short_message',
'message',
'color',
'pull_request_url',
'commit_url',
)
|
<commit_before>from rest_framework import serializers
from frigg.projects.models import Project
from .models import Build, BuildResult
class ProjectInlineSerializer(serializers.ModelSerializer):
class Meta:
model = Project
fields = (
'id',
'owner',
'name',
'private',
'approved',
)
class BuildResultSerializer(serializers.ModelSerializer):
class Meta:
model = BuildResult
fields = (
'id',
'coverage',
'succeeded',
'tasks',
'setup_tasks',
)
class BuildInlineSerializer(serializers.ModelSerializer):
result = BuildResultSerializer(read_only=True)
class Meta:
model = Build
fields = (
'id',
'build_number',
'branch',
'sha',
'pull_request_id',
'start_time',
'end_time',
'result',
'short_message',
'message',
'color',
'pull_request_url',
'commit_url',
)
class BuildSerializer(serializers.ModelSerializer):
project = ProjectInlineSerializer(read_only=True)
result = BuildResultSerializer(read_only=True)
class Meta:
model = Build
fields = (
'id',
'project',
'result',
'build_number',
'branch',
'pull_request_id',
'sha',
'start_time',
'end_time',
'short_message',
'message',
'color',
'pull_request_url',
'commit_url',
)
<commit_msg>Add still_running to build result serializer<commit_after>
|
from rest_framework import serializers
from frigg.projects.models import Project
from .models import Build, BuildResult
class ProjectInlineSerializer(serializers.ModelSerializer):
class Meta:
model = Project
fields = (
'id',
'owner',
'name',
'private',
'approved',
)
class BuildResultSerializer(serializers.ModelSerializer):
class Meta:
model = BuildResult
fields = (
'id',
'coverage',
'succeeded',
'tasks',
'setup_tasks',
'still_running',
)
class BuildInlineSerializer(serializers.ModelSerializer):
result = BuildResultSerializer(read_only=True)
class Meta:
model = Build
fields = (
'id',
'build_number',
'branch',
'sha',
'pull_request_id',
'start_time',
'end_time',
'result',
'short_message',
'message',
'color',
'pull_request_url',
'commit_url',
)
class BuildSerializer(serializers.ModelSerializer):
project = ProjectInlineSerializer(read_only=True)
result = BuildResultSerializer(read_only=True)
class Meta:
model = Build
fields = (
'id',
'project',
'result',
'build_number',
'branch',
'pull_request_id',
'sha',
'start_time',
'end_time',
'short_message',
'message',
'color',
'pull_request_url',
'commit_url',
)
|
from rest_framework import serializers
from frigg.projects.models import Project
from .models import Build, BuildResult
class ProjectInlineSerializer(serializers.ModelSerializer):
class Meta:
model = Project
fields = (
'id',
'owner',
'name',
'private',
'approved',
)
class BuildResultSerializer(serializers.ModelSerializer):
class Meta:
model = BuildResult
fields = (
'id',
'coverage',
'succeeded',
'tasks',
'setup_tasks',
)
class BuildInlineSerializer(serializers.ModelSerializer):
result = BuildResultSerializer(read_only=True)
class Meta:
model = Build
fields = (
'id',
'build_number',
'branch',
'sha',
'pull_request_id',
'start_time',
'end_time',
'result',
'short_message',
'message',
'color',
'pull_request_url',
'commit_url',
)
class BuildSerializer(serializers.ModelSerializer):
project = ProjectInlineSerializer(read_only=True)
result = BuildResultSerializer(read_only=True)
class Meta:
model = Build
fields = (
'id',
'project',
'result',
'build_number',
'branch',
'pull_request_id',
'sha',
'start_time',
'end_time',
'short_message',
'message',
'color',
'pull_request_url',
'commit_url',
)
Add still_running to build result serializerfrom rest_framework import serializers
from frigg.projects.models import Project
from .models import Build, BuildResult
class ProjectInlineSerializer(serializers.ModelSerializer):
class Meta:
model = Project
fields = (
'id',
'owner',
'name',
'private',
'approved',
)
class BuildResultSerializer(serializers.ModelSerializer):
class Meta:
model = BuildResult
fields = (
'id',
'coverage',
'succeeded',
'tasks',
'setup_tasks',
'still_running',
)
class BuildInlineSerializer(serializers.ModelSerializer):
result = BuildResultSerializer(read_only=True)
class Meta:
model = Build
fields = (
'id',
'build_number',
'branch',
'sha',
'pull_request_id',
'start_time',
'end_time',
'result',
'short_message',
'message',
'color',
'pull_request_url',
'commit_url',
)
class BuildSerializer(serializers.ModelSerializer):
project = ProjectInlineSerializer(read_only=True)
result = BuildResultSerializer(read_only=True)
class Meta:
model = Build
fields = (
'id',
'project',
'result',
'build_number',
'branch',
'pull_request_id',
'sha',
'start_time',
'end_time',
'short_message',
'message',
'color',
'pull_request_url',
'commit_url',
)
|
<commit_before>from rest_framework import serializers
from frigg.projects.models import Project
from .models import Build, BuildResult
class ProjectInlineSerializer(serializers.ModelSerializer):
class Meta:
model = Project
fields = (
'id',
'owner',
'name',
'private',
'approved',
)
class BuildResultSerializer(serializers.ModelSerializer):
class Meta:
model = BuildResult
fields = (
'id',
'coverage',
'succeeded',
'tasks',
'setup_tasks',
)
class BuildInlineSerializer(serializers.ModelSerializer):
result = BuildResultSerializer(read_only=True)
class Meta:
model = Build
fields = (
'id',
'build_number',
'branch',
'sha',
'pull_request_id',
'start_time',
'end_time',
'result',
'short_message',
'message',
'color',
'pull_request_url',
'commit_url',
)
class BuildSerializer(serializers.ModelSerializer):
project = ProjectInlineSerializer(read_only=True)
result = BuildResultSerializer(read_only=True)
class Meta:
model = Build
fields = (
'id',
'project',
'result',
'build_number',
'branch',
'pull_request_id',
'sha',
'start_time',
'end_time',
'short_message',
'message',
'color',
'pull_request_url',
'commit_url',
)
<commit_msg>Add still_running to build result serializer<commit_after>from rest_framework import serializers
from frigg.projects.models import Project
from .models import Build, BuildResult
class ProjectInlineSerializer(serializers.ModelSerializer):
class Meta:
model = Project
fields = (
'id',
'owner',
'name',
'private',
'approved',
)
class BuildResultSerializer(serializers.ModelSerializer):
class Meta:
model = BuildResult
fields = (
'id',
'coverage',
'succeeded',
'tasks',
'setup_tasks',
'still_running',
)
class BuildInlineSerializer(serializers.ModelSerializer):
result = BuildResultSerializer(read_only=True)
class Meta:
model = Build
fields = (
'id',
'build_number',
'branch',
'sha',
'pull_request_id',
'start_time',
'end_time',
'result',
'short_message',
'message',
'color',
'pull_request_url',
'commit_url',
)
class BuildSerializer(serializers.ModelSerializer):
project = ProjectInlineSerializer(read_only=True)
result = BuildResultSerializer(read_only=True)
class Meta:
model = Build
fields = (
'id',
'project',
'result',
'build_number',
'branch',
'pull_request_id',
'sha',
'start_time',
'end_time',
'short_message',
'message',
'color',
'pull_request_url',
'commit_url',
)
|
8e907ad431dfe5395741d26ea46c50c118355d69
|
src/webassets/ext/werkzeug.py
|
src/webassets/ext/werkzeug.py
|
import logging
from webassets.script import CommandLineEnvironment
__all__ = ('make_assets_action',)
def make_assets_action(environment, loaders=[]):
"""Creates a ``werkzeug.script`` action which interfaces
with the webassets command line tools.
Since Werkzeug does not provide a way to have subcommands,
we need to model the assets subcommands as options.
If ``loaders`` is given, the command will use these loaders
to add bundles to the environment. This is mainly useful if
you are defining your bundles inside templates only, and
need to find them first using something like the Jinja2Loader.
"""
log = logging.getLogger('webassets')
log.addHandler(logging.StreamHandler())
def action(rebuild=False, watch=False, clean=False, quiet=('q', False),
verbose=('v', False)):
if len(filter(bool, [rebuild, watch, clean])) != 1:
print "Error: exactly one of --rebuild, --watch or --clean must be given"
return 1
if rebuild:
command = 'rebuild'
elif watch:
command = 'watch'
elif clean:
command = 'clean'
log.setLevel(logging.DEBUG if verbose else (logging.WARNING if quiet else logging.INFO))
cmdenv = CommandLineEnvironment(environment, log)
if loaders:
log.info('Finding bundles...')
for loader in loaders:
environment.add(*[b for b in loader.load_bundles() if not b.is_container])
cmdenv.invoke(command)
return action
|
import logging
from webassets.script import CommandLineEnvironment
__all__ = ('make_assets_action',)
def make_assets_action(environment, loaders=[]):
"""Creates a ``werkzeug.script`` action which interfaces
with the webassets command line tools.
Since Werkzeug does not provide a way to have subcommands,
we need to model the assets subcommands as options.
If ``loaders`` is given, the command will use these loaders
to add bundles to the environment. This is mainly useful if
you are defining your bundles inside templates only, and
need to find them first using something like the Jinja2Loader.
"""
log = logging.getLogger('webassets')
log.addHandler(logging.StreamHandler())
def action(rebuild=False, watch=False, check=False, clean=False,
quiet=('q', False), verbose=('v', False)):
if len(filter(bool, [rebuild, watch, clean, check])) != 1:
print "Error: exactly one of --rebuild, --watch, --check or --clean must be given"
return 1
if rebuild:
command = 'rebuild'
elif watch:
command = 'watch'
elif clean:
command = 'clean'
elif check:
command = 'check'
log.setLevel(logging.DEBUG if verbose else (logging.WARNING if quiet else logging.INFO))
cmdenv = CommandLineEnvironment(environment, log)
if loaders:
log.info('Finding bundles...')
for loader in loaders:
environment.add(*[b for b in loader.load_bundles() if not b.is_container])
cmdenv.invoke(command)
return action
|
Make the "check" command available via the Werkzeug extension.
|
Make the "check" command available via the Werkzeug extension.
|
Python
|
bsd-2-clause
|
scorphus/webassets,wijerasa/webassets,JDeuce/webassets,heynemann/webassets,heynemann/webassets,heynemann/webassets,aconrad/webassets,aconrad/webassets,glorpen/webassets,glorpen/webassets,john2x/webassets,florianjacob/webassets,0x1997/webassets,JDeuce/webassets,0x1997/webassets,wijerasa/webassets,glorpen/webassets,aconrad/webassets,john2x/webassets,florianjacob/webassets,scorphus/webassets
|
import logging
from webassets.script import CommandLineEnvironment
__all__ = ('make_assets_action',)
def make_assets_action(environment, loaders=[]):
"""Creates a ``werkzeug.script`` action which interfaces
with the webassets command line tools.
Since Werkzeug does not provide a way to have subcommands,
we need to model the assets subcommands as options.
If ``loaders`` is given, the command will use these loaders
to add bundles to the environment. This is mainly useful if
you are defining your bundles inside templates only, and
need to find them first using something like the Jinja2Loader.
"""
log = logging.getLogger('webassets')
log.addHandler(logging.StreamHandler())
def action(rebuild=False, watch=False, clean=False, quiet=('q', False),
verbose=('v', False)):
if len(filter(bool, [rebuild, watch, clean])) != 1:
print "Error: exactly one of --rebuild, --watch or --clean must be given"
return 1
if rebuild:
command = 'rebuild'
elif watch:
command = 'watch'
elif clean:
command = 'clean'
log.setLevel(logging.DEBUG if verbose else (logging.WARNING if quiet else logging.INFO))
cmdenv = CommandLineEnvironment(environment, log)
if loaders:
log.info('Finding bundles...')
for loader in loaders:
environment.add(*[b for b in loader.load_bundles() if not b.is_container])
cmdenv.invoke(command)
return action
Make the "check" command available via the Werkzeug extension.
|
import logging
from webassets.script import CommandLineEnvironment
__all__ = ('make_assets_action',)
def make_assets_action(environment, loaders=[]):
"""Creates a ``werkzeug.script`` action which interfaces
with the webassets command line tools.
Since Werkzeug does not provide a way to have subcommands,
we need to model the assets subcommands as options.
If ``loaders`` is given, the command will use these loaders
to add bundles to the environment. This is mainly useful if
you are defining your bundles inside templates only, and
need to find them first using something like the Jinja2Loader.
"""
log = logging.getLogger('webassets')
log.addHandler(logging.StreamHandler())
def action(rebuild=False, watch=False, check=False, clean=False,
quiet=('q', False), verbose=('v', False)):
if len(filter(bool, [rebuild, watch, clean, check])) != 1:
print "Error: exactly one of --rebuild, --watch, --check or --clean must be given"
return 1
if rebuild:
command = 'rebuild'
elif watch:
command = 'watch'
elif clean:
command = 'clean'
elif check:
command = 'check'
log.setLevel(logging.DEBUG if verbose else (logging.WARNING if quiet else logging.INFO))
cmdenv = CommandLineEnvironment(environment, log)
if loaders:
log.info('Finding bundles...')
for loader in loaders:
environment.add(*[b for b in loader.load_bundles() if not b.is_container])
cmdenv.invoke(command)
return action
|
<commit_before>import logging
from webassets.script import CommandLineEnvironment
__all__ = ('make_assets_action',)
def make_assets_action(environment, loaders=[]):
"""Creates a ``werkzeug.script`` action which interfaces
with the webassets command line tools.
Since Werkzeug does not provide a way to have subcommands,
we need to model the assets subcommands as options.
If ``loaders`` is given, the command will use these loaders
to add bundles to the environment. This is mainly useful if
you are defining your bundles inside templates only, and
need to find them first using something like the Jinja2Loader.
"""
log = logging.getLogger('webassets')
log.addHandler(logging.StreamHandler())
def action(rebuild=False, watch=False, clean=False, quiet=('q', False),
verbose=('v', False)):
if len(filter(bool, [rebuild, watch, clean])) != 1:
print "Error: exactly one of --rebuild, --watch or --clean must be given"
return 1
if rebuild:
command = 'rebuild'
elif watch:
command = 'watch'
elif clean:
command = 'clean'
log.setLevel(logging.DEBUG if verbose else (logging.WARNING if quiet else logging.INFO))
cmdenv = CommandLineEnvironment(environment, log)
if loaders:
log.info('Finding bundles...')
for loader in loaders:
environment.add(*[b for b in loader.load_bundles() if not b.is_container])
cmdenv.invoke(command)
return action
<commit_msg>Make the "check" command available via the Werkzeug extension.<commit_after>
|
import logging
from webassets.script import CommandLineEnvironment
__all__ = ('make_assets_action',)
def make_assets_action(environment, loaders=[]):
"""Creates a ``werkzeug.script`` action which interfaces
with the webassets command line tools.
Since Werkzeug does not provide a way to have subcommands,
we need to model the assets subcommands as options.
If ``loaders`` is given, the command will use these loaders
to add bundles to the environment. This is mainly useful if
you are defining your bundles inside templates only, and
need to find them first using something like the Jinja2Loader.
"""
log = logging.getLogger('webassets')
log.addHandler(logging.StreamHandler())
def action(rebuild=False, watch=False, check=False, clean=False,
quiet=('q', False), verbose=('v', False)):
if len(filter(bool, [rebuild, watch, clean, check])) != 1:
print "Error: exactly one of --rebuild, --watch, --check or --clean must be given"
return 1
if rebuild:
command = 'rebuild'
elif watch:
command = 'watch'
elif clean:
command = 'clean'
elif check:
command = 'check'
log.setLevel(logging.DEBUG if verbose else (logging.WARNING if quiet else logging.INFO))
cmdenv = CommandLineEnvironment(environment, log)
if loaders:
log.info('Finding bundles...')
for loader in loaders:
environment.add(*[b for b in loader.load_bundles() if not b.is_container])
cmdenv.invoke(command)
return action
|
import logging
from webassets.script import CommandLineEnvironment
__all__ = ('make_assets_action',)
def make_assets_action(environment, loaders=[]):
"""Creates a ``werkzeug.script`` action which interfaces
with the webassets command line tools.
Since Werkzeug does not provide a way to have subcommands,
we need to model the assets subcommands as options.
If ``loaders`` is given, the command will use these loaders
to add bundles to the environment. This is mainly useful if
you are defining your bundles inside templates only, and
need to find them first using something like the Jinja2Loader.
"""
log = logging.getLogger('webassets')
log.addHandler(logging.StreamHandler())
def action(rebuild=False, watch=False, clean=False, quiet=('q', False),
verbose=('v', False)):
if len(filter(bool, [rebuild, watch, clean])) != 1:
print "Error: exactly one of --rebuild, --watch or --clean must be given"
return 1
if rebuild:
command = 'rebuild'
elif watch:
command = 'watch'
elif clean:
command = 'clean'
log.setLevel(logging.DEBUG if verbose else (logging.WARNING if quiet else logging.INFO))
cmdenv = CommandLineEnvironment(environment, log)
if loaders:
log.info('Finding bundles...')
for loader in loaders:
environment.add(*[b for b in loader.load_bundles() if not b.is_container])
cmdenv.invoke(command)
return action
Make the "check" command available via the Werkzeug extension.import logging
from webassets.script import CommandLineEnvironment
__all__ = ('make_assets_action',)
def make_assets_action(environment, loaders=[]):
"""Creates a ``werkzeug.script`` action which interfaces
with the webassets command line tools.
Since Werkzeug does not provide a way to have subcommands,
we need to model the assets subcommands as options.
If ``loaders`` is given, the command will use these loaders
to add bundles to the environment. This is mainly useful if
you are defining your bundles inside templates only, and
need to find them first using something like the Jinja2Loader.
"""
log = logging.getLogger('webassets')
log.addHandler(logging.StreamHandler())
def action(rebuild=False, watch=False, check=False, clean=False,
quiet=('q', False), verbose=('v', False)):
if len(filter(bool, [rebuild, watch, clean, check])) != 1:
print "Error: exactly one of --rebuild, --watch, --check or --clean must be given"
return 1
if rebuild:
command = 'rebuild'
elif watch:
command = 'watch'
elif clean:
command = 'clean'
elif check:
command = 'check'
log.setLevel(logging.DEBUG if verbose else (logging.WARNING if quiet else logging.INFO))
cmdenv = CommandLineEnvironment(environment, log)
if loaders:
log.info('Finding bundles...')
for loader in loaders:
environment.add(*[b for b in loader.load_bundles() if not b.is_container])
cmdenv.invoke(command)
return action
|
<commit_before>import logging
from webassets.script import CommandLineEnvironment
__all__ = ('make_assets_action',)
def make_assets_action(environment, loaders=[]):
"""Creates a ``werkzeug.script`` action which interfaces
with the webassets command line tools.
Since Werkzeug does not provide a way to have subcommands,
we need to model the assets subcommands as options.
If ``loaders`` is given, the command will use these loaders
to add bundles to the environment. This is mainly useful if
you are defining your bundles inside templates only, and
need to find them first using something like the Jinja2Loader.
"""
log = logging.getLogger('webassets')
log.addHandler(logging.StreamHandler())
def action(rebuild=False, watch=False, clean=False, quiet=('q', False),
verbose=('v', False)):
if len(filter(bool, [rebuild, watch, clean])) != 1:
print "Error: exactly one of --rebuild, --watch or --clean must be given"
return 1
if rebuild:
command = 'rebuild'
elif watch:
command = 'watch'
elif clean:
command = 'clean'
log.setLevel(logging.DEBUG if verbose else (logging.WARNING if quiet else logging.INFO))
cmdenv = CommandLineEnvironment(environment, log)
if loaders:
log.info('Finding bundles...')
for loader in loaders:
environment.add(*[b for b in loader.load_bundles() if not b.is_container])
cmdenv.invoke(command)
return action
<commit_msg>Make the "check" command available via the Werkzeug extension.<commit_after>import logging
from webassets.script import CommandLineEnvironment
__all__ = ('make_assets_action',)
def make_assets_action(environment, loaders=[]):
"""Creates a ``werkzeug.script`` action which interfaces
with the webassets command line tools.
Since Werkzeug does not provide a way to have subcommands,
we need to model the assets subcommands as options.
If ``loaders`` is given, the command will use these loaders
to add bundles to the environment. This is mainly useful if
you are defining your bundles inside templates only, and
need to find them first using something like the Jinja2Loader.
"""
log = logging.getLogger('webassets')
log.addHandler(logging.StreamHandler())
def action(rebuild=False, watch=False, check=False, clean=False,
quiet=('q', False), verbose=('v', False)):
if len(filter(bool, [rebuild, watch, clean, check])) != 1:
print "Error: exactly one of --rebuild, --watch, --check or --clean must be given"
return 1
if rebuild:
command = 'rebuild'
elif watch:
command = 'watch'
elif clean:
command = 'clean'
elif check:
command = 'check'
log.setLevel(logging.DEBUG if verbose else (logging.WARNING if quiet else logging.INFO))
cmdenv = CommandLineEnvironment(environment, log)
if loaders:
log.info('Finding bundles...')
for loader in loaders:
environment.add(*[b for b in loader.load_bundles() if not b.is_container])
cmdenv.invoke(command)
return action
|
2c2604527cfe0ceb3dbf052bbcaf9e2e660b9e47
|
app.py
|
app.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# ephemeral by st0le
# quick way share text between your network devices
from flask import Flask, request, render_template, redirect, url_for
db = {}
app = Flask(__name__)
@app.route('/')
def get():
ip = request.remote_addr
return render_template("index.html", text = db.get(ip, ''))
@app.route('/', methods=['POST'])
def post():
ip, content = request.remote_addr, request.form.get('text')
if len(content) == 0:
del db[ip]
else:
db[ip] = content
return redirect(url_for('get'))
if __name__ == "__main__":
app.run(host="0.0.0.0")
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# ephemeral by st0le
# quick way share text between your network devices
from flask import Flask, request, render_template, redirect, url_for
db = {}
app = Flask(__name__)
def get_client_ip(request):
# PythonAnywhere.com calls our service through a loabalancer
# the remote_addr is therefore the IP of the loaabalancer, PythonAnywhere stores Client IP in header
if request.headers['X-Real-IP']: return request.headers['X-Real-IP']
return request.remote_addr
if __name__ == "__main__":
app.run(host="0.0.0.0")
@app.route('/')
def get():
ip = get_client_ip(request)
return render_template("index.html", text = db.get(ip, ''))
@app.route('/', methods=['POST'])
def post():
ip, content = get_client_ip(request), request.form.get('text')
if len(content) == 0:
del db[ip]
else:
db[ip] = content
return redirect(url_for('get'))
if __name__ == "__main__":
app.run(host="0.0.0.0")
|
Fix for PythonAnywhere LoadBalancer IP
|
Fix for PythonAnywhere LoadBalancer IP
|
Python
|
mit
|
st0le/ephemeral,st0le/ephemeral
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# ephemeral by st0le
# quick way share text between your network devices
from flask import Flask, request, render_template, redirect, url_for
db = {}
app = Flask(__name__)
@app.route('/')
def get():
ip = request.remote_addr
return render_template("index.html", text = db.get(ip, ''))
@app.route('/', methods=['POST'])
def post():
ip, content = request.remote_addr, request.form.get('text')
if len(content) == 0:
del db[ip]
else:
db[ip] = content
return redirect(url_for('get'))
if __name__ == "__main__":
app.run(host="0.0.0.0")Fix for PythonAnywhere LoadBalancer IP
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# ephemeral by st0le
# quick way share text between your network devices
from flask import Flask, request, render_template, redirect, url_for
db = {}
app = Flask(__name__)
def get_client_ip(request):
# PythonAnywhere.com calls our service through a loabalancer
# the remote_addr is therefore the IP of the loaabalancer, PythonAnywhere stores Client IP in header
if request.headers['X-Real-IP']: return request.headers['X-Real-IP']
return request.remote_addr
if __name__ == "__main__":
app.run(host="0.0.0.0")
@app.route('/')
def get():
ip = get_client_ip(request)
return render_template("index.html", text = db.get(ip, ''))
@app.route('/', methods=['POST'])
def post():
ip, content = get_client_ip(request), request.form.get('text')
if len(content) == 0:
del db[ip]
else:
db[ip] = content
return redirect(url_for('get'))
if __name__ == "__main__":
app.run(host="0.0.0.0")
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# ephemeral by st0le
# quick way share text between your network devices
from flask import Flask, request, render_template, redirect, url_for
db = {}
app = Flask(__name__)
@app.route('/')
def get():
ip = request.remote_addr
return render_template("index.html", text = db.get(ip, ''))
@app.route('/', methods=['POST'])
def post():
ip, content = request.remote_addr, request.form.get('text')
if len(content) == 0:
del db[ip]
else:
db[ip] = content
return redirect(url_for('get'))
if __name__ == "__main__":
app.run(host="0.0.0.0")<commit_msg>Fix for PythonAnywhere LoadBalancer IP<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# ephemeral by st0le
# quick way share text between your network devices
from flask import Flask, request, render_template, redirect, url_for
db = {}
app = Flask(__name__)
def get_client_ip(request):
# PythonAnywhere.com calls our service through a loabalancer
# the remote_addr is therefore the IP of the loaabalancer, PythonAnywhere stores Client IP in header
if request.headers['X-Real-IP']: return request.headers['X-Real-IP']
return request.remote_addr
if __name__ == "__main__":
app.run(host="0.0.0.0")
@app.route('/')
def get():
ip = get_client_ip(request)
return render_template("index.html", text = db.get(ip, ''))
@app.route('/', methods=['POST'])
def post():
ip, content = get_client_ip(request), request.form.get('text')
if len(content) == 0:
del db[ip]
else:
db[ip] = content
return redirect(url_for('get'))
if __name__ == "__main__":
app.run(host="0.0.0.0")
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# ephemeral by st0le
# quick way share text between your network devices
from flask import Flask, request, render_template, redirect, url_for
db = {}
app = Flask(__name__)
@app.route('/')
def get():
ip = request.remote_addr
return render_template("index.html", text = db.get(ip, ''))
@app.route('/', methods=['POST'])
def post():
ip, content = request.remote_addr, request.form.get('text')
if len(content) == 0:
del db[ip]
else:
db[ip] = content
return redirect(url_for('get'))
if __name__ == "__main__":
app.run(host="0.0.0.0")Fix for PythonAnywhere LoadBalancer IP#!/usr/bin/env python
# -*- coding: utf-8 -*-
# ephemeral by st0le
# quick way share text between your network devices
from flask import Flask, request, render_template, redirect, url_for
db = {}
app = Flask(__name__)
def get_client_ip(request):
# PythonAnywhere.com calls our service through a loabalancer
# the remote_addr is therefore the IP of the loaabalancer, PythonAnywhere stores Client IP in header
if request.headers['X-Real-IP']: return request.headers['X-Real-IP']
return request.remote_addr
if __name__ == "__main__":
app.run(host="0.0.0.0")
@app.route('/')
def get():
ip = get_client_ip(request)
return render_template("index.html", text = db.get(ip, ''))
@app.route('/', methods=['POST'])
def post():
ip, content = get_client_ip(request), request.form.get('text')
if len(content) == 0:
del db[ip]
else:
db[ip] = content
return redirect(url_for('get'))
if __name__ == "__main__":
app.run(host="0.0.0.0")
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# ephemeral by st0le
# quick way share text between your network devices
from flask import Flask, request, render_template, redirect, url_for
db = {}
app = Flask(__name__)
@app.route('/')
def get():
ip = request.remote_addr
return render_template("index.html", text = db.get(ip, ''))
@app.route('/', methods=['POST'])
def post():
ip, content = request.remote_addr, request.form.get('text')
if len(content) == 0:
del db[ip]
else:
db[ip] = content
return redirect(url_for('get'))
if __name__ == "__main__":
app.run(host="0.0.0.0")<commit_msg>Fix for PythonAnywhere LoadBalancer IP<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# ephemeral by st0le
# quick way share text between your network devices
from flask import Flask, request, render_template, redirect, url_for
db = {}
app = Flask(__name__)
def get_client_ip(request):
# PythonAnywhere.com calls our service through a loabalancer
# the remote_addr is therefore the IP of the loaabalancer, PythonAnywhere stores Client IP in header
if request.headers['X-Real-IP']: return request.headers['X-Real-IP']
return request.remote_addr
if __name__ == "__main__":
app.run(host="0.0.0.0")
@app.route('/')
def get():
ip = get_client_ip(request)
return render_template("index.html", text = db.get(ip, ''))
@app.route('/', methods=['POST'])
def post():
ip, content = get_client_ip(request), request.form.get('text')
if len(content) == 0:
del db[ip]
else:
db[ip] = content
return redirect(url_for('get'))
if __name__ == "__main__":
app.run(host="0.0.0.0")
|
816b222dd771c84267b3f8c64fd2c1ec7dabfbc4
|
ex6.py
|
ex6.py
|
x = f"There are {10} types of people."
binary = "binary"
do_not = "don't"
y = f"Those who know {binary} and those who {do_not}."
print(x)
print(y)
print("I said: {x}.")
print("I also said: '{y}'.")
hilarious = False
joke_evaluation = "Isn't that joke so funny?! {}"
print(joke_evaluation.format(hilarious))
w = "This is the left side of..."
e = "a string with a right side."
print(w + e)
|
# left out assignment for types_of_people mentioned in intro
types_of_people = 10
# change variable from 10 to types_of_people
x = f"There are {types_of_people} types of people."
binary = "binary"
do_not = "don't"
y = f"Those who know {binary} and those who {do_not}."
print(x)
print(y)
# left out f in front of string and omit extra period
print(f"I said: {x}")
# left out f in front of string and omit extra period
print(f"I also said: '{y}'")
hilarious = False
joke_evaluation = "Isn't that joke so funny?! {}"
print(joke_evaluation.format(hilarious))
w = "This is the left side of..."
e = "a string with a right side."
print(w + e)
# change "What You Should See" snapshot to reflect changes
|
Add missing variable and correct f-strings
|
fix: Add missing variable and correct f-strings
See commented lines for changes to ex6.py:
- add types_of_people variable assigment
- change variable from 10 to types_of_people
- add letter f before f-strings
- omit unnecessary periods
|
Python
|
mit
|
zedshaw/learn-python3-thw-code,zedshaw/learn-python3-thw-code,zedshaw/learn-python3-thw-code
|
x = f"There are {10} types of people."
binary = "binary"
do_not = "don't"
y = f"Those who know {binary} and those who {do_not}."
print(x)
print(y)
print("I said: {x}.")
print("I also said: '{y}'.")
hilarious = False
joke_evaluation = "Isn't that joke so funny?! {}"
print(joke_evaluation.format(hilarious))
w = "This is the left side of..."
e = "a string with a right side."
print(w + e)
fix: Add missing variable and correct f-strings
See commented lines for changes to ex6.py:
- add types_of_people variable assigment
- change variable from 10 to types_of_people
- add letter f before f-strings
- omit unnecessary periods
|
# left out assignment for types_of_people mentioned in intro
types_of_people = 10
# change variable from 10 to types_of_people
x = f"There are {types_of_people} types of people."
binary = "binary"
do_not = "don't"
y = f"Those who know {binary} and those who {do_not}."
print(x)
print(y)
# left out f in front of string and omit extra period
print(f"I said: {x}")
# left out f in front of string and omit extra period
print(f"I also said: '{y}'")
hilarious = False
joke_evaluation = "Isn't that joke so funny?! {}"
print(joke_evaluation.format(hilarious))
w = "This is the left side of..."
e = "a string with a right side."
print(w + e)
# change "What You Should See" snapshot to reflect changes
|
<commit_before>x = f"There are {10} types of people."
binary = "binary"
do_not = "don't"
y = f"Those who know {binary} and those who {do_not}."
print(x)
print(y)
print("I said: {x}.")
print("I also said: '{y}'.")
hilarious = False
joke_evaluation = "Isn't that joke so funny?! {}"
print(joke_evaluation.format(hilarious))
w = "This is the left side of..."
e = "a string with a right side."
print(w + e)
<commit_msg>fix: Add missing variable and correct f-strings
See commented lines for changes to ex6.py:
- add types_of_people variable assigment
- change variable from 10 to types_of_people
- add letter f before f-strings
- omit unnecessary periods<commit_after>
|
# left out assignment for types_of_people mentioned in intro
types_of_people = 10
# change variable from 10 to types_of_people
x = f"There are {types_of_people} types of people."
binary = "binary"
do_not = "don't"
y = f"Those who know {binary} and those who {do_not}."
print(x)
print(y)
# left out f in front of string and omit extra period
print(f"I said: {x}")
# left out f in front of string and omit extra period
print(f"I also said: '{y}'")
hilarious = False
joke_evaluation = "Isn't that joke so funny?! {}"
print(joke_evaluation.format(hilarious))
w = "This is the left side of..."
e = "a string with a right side."
print(w + e)
# change "What You Should See" snapshot to reflect changes
|
x = f"There are {10} types of people."
binary = "binary"
do_not = "don't"
y = f"Those who know {binary} and those who {do_not}."
print(x)
print(y)
print("I said: {x}.")
print("I also said: '{y}'.")
hilarious = False
joke_evaluation = "Isn't that joke so funny?! {}"
print(joke_evaluation.format(hilarious))
w = "This is the left side of..."
e = "a string with a right side."
print(w + e)
fix: Add missing variable and correct f-strings
See commented lines for changes to ex6.py:
- add types_of_people variable assigment
- change variable from 10 to types_of_people
- add letter f before f-strings
- omit unnecessary periods# left out assignment for types_of_people mentioned in intro
types_of_people = 10
# change variable from 10 to types_of_people
x = f"There are {types_of_people} types of people."
binary = "binary"
do_not = "don't"
y = f"Those who know {binary} and those who {do_not}."
print(x)
print(y)
# left out f in front of string and omit extra period
print(f"I said: {x}")
# left out f in front of string and omit extra period
print(f"I also said: '{y}'")
hilarious = False
joke_evaluation = "Isn't that joke so funny?! {}"
print(joke_evaluation.format(hilarious))
w = "This is the left side of..."
e = "a string with a right side."
print(w + e)
# change "What You Should See" snapshot to reflect changes
|
<commit_before>x = f"There are {10} types of people."
binary = "binary"
do_not = "don't"
y = f"Those who know {binary} and those who {do_not}."
print(x)
print(y)
print("I said: {x}.")
print("I also said: '{y}'.")
hilarious = False
joke_evaluation = "Isn't that joke so funny?! {}"
print(joke_evaluation.format(hilarious))
w = "This is the left side of..."
e = "a string with a right side."
print(w + e)
<commit_msg>fix: Add missing variable and correct f-strings
See commented lines for changes to ex6.py:
- add types_of_people variable assigment
- change variable from 10 to types_of_people
- add letter f before f-strings
- omit unnecessary periods<commit_after># left out assignment for types_of_people mentioned in intro
types_of_people = 10
# change variable from 10 to types_of_people
x = f"There are {types_of_people} types of people."
binary = "binary"
do_not = "don't"
y = f"Those who know {binary} and those who {do_not}."
print(x)
print(y)
# left out f in front of string and omit extra period
print(f"I said: {x}")
# left out f in front of string and omit extra period
print(f"I also said: '{y}'")
hilarious = False
joke_evaluation = "Isn't that joke so funny?! {}"
print(joke_evaluation.format(hilarious))
w = "This is the left side of..."
e = "a string with a right side."
print(w + e)
# change "What You Should See" snapshot to reflect changes
|
0260e50ab4d1449fa95b8e712861b7e44ac21965
|
umessages/appsettings.py
|
umessages/appsettings.py
|
# Umessages settings file.
#
# Please consult the docs for more information about each setting.
from django.conf import settings
gettext = lambda s: s
"""
Boolean value that defines ifumessages should use the django messages
framework to notify the user of any changes.
"""
UMESSAGES_USE_MESSAGES = getattr(settings,
'UMESSAGES_USE_MESSAGES',
True)
|
# Umessages settings file.
#
# Please consult the docs for more information about each setting.
from django.conf import settings
gettext = lambda s: s
CRISPY_TEMPLATE_PACK = getattr(settings, 'CRISPY_TEMPLATE_PACK', 'bootstrap')
"""
Boolean value that defines ifumessages should use the django messages
framework to notify the user of any changes.
"""
UMESSAGES_USE_MESSAGES = getattr(settings,
'UMESSAGES_USE_MESSAGES',
True)
|
Use bootstrap template pack by default
|
Use bootstrap template pack by default
|
Python
|
bsd-3-clause
|
euanlau/django-umessages,euanlau/django-umessages
|
# Umessages settings file.
#
# Please consult the docs for more information about each setting.
from django.conf import settings
gettext = lambda s: s
"""
Boolean value that defines ifumessages should use the django messages
framework to notify the user of any changes.
"""
UMESSAGES_USE_MESSAGES = getattr(settings,
'UMESSAGES_USE_MESSAGES',
True)
Use bootstrap template pack by default
|
# Umessages settings file.
#
# Please consult the docs for more information about each setting.
from django.conf import settings
gettext = lambda s: s
CRISPY_TEMPLATE_PACK = getattr(settings, 'CRISPY_TEMPLATE_PACK', 'bootstrap')
"""
Boolean value that defines ifumessages should use the django messages
framework to notify the user of any changes.
"""
UMESSAGES_USE_MESSAGES = getattr(settings,
'UMESSAGES_USE_MESSAGES',
True)
|
<commit_before># Umessages settings file.
#
# Please consult the docs for more information about each setting.
from django.conf import settings
gettext = lambda s: s
"""
Boolean value that defines ifumessages should use the django messages
framework to notify the user of any changes.
"""
UMESSAGES_USE_MESSAGES = getattr(settings,
'UMESSAGES_USE_MESSAGES',
True)
<commit_msg>Use bootstrap template pack by default<commit_after>
|
# Umessages settings file.
#
# Please consult the docs for more information about each setting.
from django.conf import settings
gettext = lambda s: s
CRISPY_TEMPLATE_PACK = getattr(settings, 'CRISPY_TEMPLATE_PACK', 'bootstrap')
"""
Boolean value that defines ifumessages should use the django messages
framework to notify the user of any changes.
"""
UMESSAGES_USE_MESSAGES = getattr(settings,
'UMESSAGES_USE_MESSAGES',
True)
|
# Umessages settings file.
#
# Please consult the docs for more information about each setting.
from django.conf import settings
gettext = lambda s: s
"""
Boolean value that defines ifumessages should use the django messages
framework to notify the user of any changes.
"""
UMESSAGES_USE_MESSAGES = getattr(settings,
'UMESSAGES_USE_MESSAGES',
True)
Use bootstrap template pack by default# Umessages settings file.
#
# Please consult the docs for more information about each setting.
from django.conf import settings
gettext = lambda s: s
CRISPY_TEMPLATE_PACK = getattr(settings, 'CRISPY_TEMPLATE_PACK', 'bootstrap')
"""
Boolean value that defines ifumessages should use the django messages
framework to notify the user of any changes.
"""
UMESSAGES_USE_MESSAGES = getattr(settings,
'UMESSAGES_USE_MESSAGES',
True)
|
<commit_before># Umessages settings file.
#
# Please consult the docs for more information about each setting.
from django.conf import settings
gettext = lambda s: s
"""
Boolean value that defines ifumessages should use the django messages
framework to notify the user of any changes.
"""
UMESSAGES_USE_MESSAGES = getattr(settings,
'UMESSAGES_USE_MESSAGES',
True)
<commit_msg>Use bootstrap template pack by default<commit_after># Umessages settings file.
#
# Please consult the docs for more information about each setting.
from django.conf import settings
gettext = lambda s: s
CRISPY_TEMPLATE_PACK = getattr(settings, 'CRISPY_TEMPLATE_PACK', 'bootstrap')
"""
Boolean value that defines ifumessages should use the django messages
framework to notify the user of any changes.
"""
UMESSAGES_USE_MESSAGES = getattr(settings,
'UMESSAGES_USE_MESSAGES',
True)
|
497f3a61b7f7fb758ba7093e04310621842bbcd7
|
splunklib/__init__.py
|
splunklib/__init__.py
|
# Copyright 2011-2012 Splunk, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"): you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Python library for Splunk."""
__version_info__ = (0, 1, '0a')
__version__ = ".".join(map(str, __version_info__))
|
# Copyright 2011-2012 Splunk, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"): you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Python library for Splunk."""
__version_info__ = (0, 8, 0)
__version__ = ".".join(map(str, __version_info__))
|
Bump version number if preparation for release.
|
Bump version number if preparation for release.
|
Python
|
apache-2.0
|
sullivanmatt/splunk-sdk-python,kkirsche/splunk-sdk-python,kkirsche/splunk-sdk-python,kkirsche/splunk-sdk-python,splunk/splunk-sdk-python,ww9rivers/splunk-sdk-python,splunk/splunk-sdk-python,lowtalker/splunk-sdk-python,kkirsche/splunk-sdk-python
|
# Copyright 2011-2012 Splunk, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"): you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Python library for Splunk."""
__version_info__ = (0, 1, '0a')
__version__ = ".".join(map(str, __version_info__))
Bump version number if preparation for release.
|
# Copyright 2011-2012 Splunk, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"): you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Python library for Splunk."""
__version_info__ = (0, 8, 0)
__version__ = ".".join(map(str, __version_info__))
|
<commit_before># Copyright 2011-2012 Splunk, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"): you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Python library for Splunk."""
__version_info__ = (0, 1, '0a')
__version__ = ".".join(map(str, __version_info__))
<commit_msg>Bump version number if preparation for release.<commit_after>
|
# Copyright 2011-2012 Splunk, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"): you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Python library for Splunk."""
__version_info__ = (0, 8, 0)
__version__ = ".".join(map(str, __version_info__))
|
# Copyright 2011-2012 Splunk, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"): you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Python library for Splunk."""
__version_info__ = (0, 1, '0a')
__version__ = ".".join(map(str, __version_info__))
Bump version number if preparation for release.# Copyright 2011-2012 Splunk, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"): you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Python library for Splunk."""
__version_info__ = (0, 8, 0)
__version__ = ".".join(map(str, __version_info__))
|
<commit_before># Copyright 2011-2012 Splunk, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"): you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Python library for Splunk."""
__version_info__ = (0, 1, '0a')
__version__ = ".".join(map(str, __version_info__))
<commit_msg>Bump version number if preparation for release.<commit_after># Copyright 2011-2012 Splunk, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"): you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Python library for Splunk."""
__version_info__ = (0, 8, 0)
__version__ = ".".join(map(str, __version_info__))
|
bc9c0120523548d5a28c6a21f48831c1daa39af3
|
tests/test_data_structures.py
|
tests/test_data_structures.py
|
try:
import unittest2 as unittest
except ImportError:
import unittest
import zstd
class TestCompressionParameters(unittest.TestCase):
def test_init_bad_arg_type(self):
with self.assertRaises(TypeError):
zstd.CompressionParameters()
with self.assertRaises(TypeError):
zstd.CompressionParameters((0, 1))
def test_get_compression_parameters(self):
p = zstd.get_compression_parameters(1)
self.assertIsInstance(p, zstd.CompressionParameters)
self.assertEqual(p[0], 19)
|
try:
import unittest2 as unittest
except ImportError:
import unittest
try:
import hypothesis
import hypothesis.strategies as strategies
except ImportError:
hypothesis = None
import zstd
class TestCompressionParameters(unittest.TestCase):
def test_init_bad_arg_type(self):
with self.assertRaises(TypeError):
zstd.CompressionParameters()
with self.assertRaises(TypeError):
zstd.CompressionParameters((0, 1))
def test_get_compression_parameters(self):
p = zstd.get_compression_parameters(1)
self.assertIsInstance(p, zstd.CompressionParameters)
self.assertEqual(p[0], 19)
if hypothesis:
s_windowlog = strategies.integers(min_value=zstd.WINDOWLOG_MIN,
max_value=zstd.WINDOWLOG_MAX)
s_chainlog = strategies.integers(min_value=zstd.CHAINLOG_MIN,
max_value=zstd.CHAINLOG_MAX)
s_hashlog = strategies.integers(min_value=zstd.HASHLOG_MIN,
max_value=zstd.HASHLOG_MAX)
s_searchlog = strategies.integers(min_value=zstd.SEARCHLOG_MIN,
max_value=zstd.SEARCHLOG_MAX)
s_searchlength = strategies.integers(min_value=zstd.SEARCHLENGTH_MIN,
max_value=zstd.SEARCHLENGTH_MAX)
s_targetlength = strategies.integers(min_value=zstd.TARGETLENGTH_MIN,
max_value=zstd.TARGETLENGTH_MAX)
s_strategy = strategies.sampled_from((zstd.STRATEGY_FAST,
zstd.STRATEGY_DFAST,
zstd.STRATEGY_GREEDY,
zstd.STRATEGY_LAZY,
zstd.STRATEGY_LAZY2,
zstd.STRATEGY_BTLAZY2,
zstd.STRATEGY_BTOPT))
class TestCompressionParametersHypothesis(unittest.TestCase):
@hypothesis.given(s_windowlog, s_chainlog, s_hashlog, s_searchlog,
s_searchlength, s_targetlength, s_strategy)
def test_valid_init(self, windowlog, chainlog, hashlog, searchlog,
searchlength, targetlength, strategy):
p = zstd.CompressionParameters(windowlog, chainlog, hashlog,
searchlog, searchlength,
targetlength, strategy)
self.assertEqual(tuple(p),
(windowlog, chainlog, hashlog, searchlog,
searchlength, targetlength, strategy))
|
Add hypothesis test to randomly generate CompressionParameters
|
Add hypothesis test to randomly generate CompressionParameters
|
Python
|
bsd-3-clause
|
terrelln/python-zstandard,indygreg/python-zstandard,terrelln/python-zstandard,terrelln/python-zstandard,indygreg/python-zstandard,indygreg/python-zstandard,indygreg/python-zstandard,terrelln/python-zstandard
|
try:
import unittest2 as unittest
except ImportError:
import unittest
import zstd
class TestCompressionParameters(unittest.TestCase):
def test_init_bad_arg_type(self):
with self.assertRaises(TypeError):
zstd.CompressionParameters()
with self.assertRaises(TypeError):
zstd.CompressionParameters((0, 1))
def test_get_compression_parameters(self):
p = zstd.get_compression_parameters(1)
self.assertIsInstance(p, zstd.CompressionParameters)
self.assertEqual(p[0], 19)
Add hypothesis test to randomly generate CompressionParameters
|
try:
import unittest2 as unittest
except ImportError:
import unittest
try:
import hypothesis
import hypothesis.strategies as strategies
except ImportError:
hypothesis = None
import zstd
class TestCompressionParameters(unittest.TestCase):
def test_init_bad_arg_type(self):
with self.assertRaises(TypeError):
zstd.CompressionParameters()
with self.assertRaises(TypeError):
zstd.CompressionParameters((0, 1))
def test_get_compression_parameters(self):
p = zstd.get_compression_parameters(1)
self.assertIsInstance(p, zstd.CompressionParameters)
self.assertEqual(p[0], 19)
if hypothesis:
s_windowlog = strategies.integers(min_value=zstd.WINDOWLOG_MIN,
max_value=zstd.WINDOWLOG_MAX)
s_chainlog = strategies.integers(min_value=zstd.CHAINLOG_MIN,
max_value=zstd.CHAINLOG_MAX)
s_hashlog = strategies.integers(min_value=zstd.HASHLOG_MIN,
max_value=zstd.HASHLOG_MAX)
s_searchlog = strategies.integers(min_value=zstd.SEARCHLOG_MIN,
max_value=zstd.SEARCHLOG_MAX)
s_searchlength = strategies.integers(min_value=zstd.SEARCHLENGTH_MIN,
max_value=zstd.SEARCHLENGTH_MAX)
s_targetlength = strategies.integers(min_value=zstd.TARGETLENGTH_MIN,
max_value=zstd.TARGETLENGTH_MAX)
s_strategy = strategies.sampled_from((zstd.STRATEGY_FAST,
zstd.STRATEGY_DFAST,
zstd.STRATEGY_GREEDY,
zstd.STRATEGY_LAZY,
zstd.STRATEGY_LAZY2,
zstd.STRATEGY_BTLAZY2,
zstd.STRATEGY_BTOPT))
class TestCompressionParametersHypothesis(unittest.TestCase):
@hypothesis.given(s_windowlog, s_chainlog, s_hashlog, s_searchlog,
s_searchlength, s_targetlength, s_strategy)
def test_valid_init(self, windowlog, chainlog, hashlog, searchlog,
searchlength, targetlength, strategy):
p = zstd.CompressionParameters(windowlog, chainlog, hashlog,
searchlog, searchlength,
targetlength, strategy)
self.assertEqual(tuple(p),
(windowlog, chainlog, hashlog, searchlog,
searchlength, targetlength, strategy))
|
<commit_before>try:
import unittest2 as unittest
except ImportError:
import unittest
import zstd
class TestCompressionParameters(unittest.TestCase):
def test_init_bad_arg_type(self):
with self.assertRaises(TypeError):
zstd.CompressionParameters()
with self.assertRaises(TypeError):
zstd.CompressionParameters((0, 1))
def test_get_compression_parameters(self):
p = zstd.get_compression_parameters(1)
self.assertIsInstance(p, zstd.CompressionParameters)
self.assertEqual(p[0], 19)
<commit_msg>Add hypothesis test to randomly generate CompressionParameters<commit_after>
|
try:
import unittest2 as unittest
except ImportError:
import unittest
try:
import hypothesis
import hypothesis.strategies as strategies
except ImportError:
hypothesis = None
import zstd
class TestCompressionParameters(unittest.TestCase):
def test_init_bad_arg_type(self):
with self.assertRaises(TypeError):
zstd.CompressionParameters()
with self.assertRaises(TypeError):
zstd.CompressionParameters((0, 1))
def test_get_compression_parameters(self):
p = zstd.get_compression_parameters(1)
self.assertIsInstance(p, zstd.CompressionParameters)
self.assertEqual(p[0], 19)
if hypothesis:
s_windowlog = strategies.integers(min_value=zstd.WINDOWLOG_MIN,
max_value=zstd.WINDOWLOG_MAX)
s_chainlog = strategies.integers(min_value=zstd.CHAINLOG_MIN,
max_value=zstd.CHAINLOG_MAX)
s_hashlog = strategies.integers(min_value=zstd.HASHLOG_MIN,
max_value=zstd.HASHLOG_MAX)
s_searchlog = strategies.integers(min_value=zstd.SEARCHLOG_MIN,
max_value=zstd.SEARCHLOG_MAX)
s_searchlength = strategies.integers(min_value=zstd.SEARCHLENGTH_MIN,
max_value=zstd.SEARCHLENGTH_MAX)
s_targetlength = strategies.integers(min_value=zstd.TARGETLENGTH_MIN,
max_value=zstd.TARGETLENGTH_MAX)
s_strategy = strategies.sampled_from((zstd.STRATEGY_FAST,
zstd.STRATEGY_DFAST,
zstd.STRATEGY_GREEDY,
zstd.STRATEGY_LAZY,
zstd.STRATEGY_LAZY2,
zstd.STRATEGY_BTLAZY2,
zstd.STRATEGY_BTOPT))
class TestCompressionParametersHypothesis(unittest.TestCase):
@hypothesis.given(s_windowlog, s_chainlog, s_hashlog, s_searchlog,
s_searchlength, s_targetlength, s_strategy)
def test_valid_init(self, windowlog, chainlog, hashlog, searchlog,
searchlength, targetlength, strategy):
p = zstd.CompressionParameters(windowlog, chainlog, hashlog,
searchlog, searchlength,
targetlength, strategy)
self.assertEqual(tuple(p),
(windowlog, chainlog, hashlog, searchlog,
searchlength, targetlength, strategy))
|
try:
import unittest2 as unittest
except ImportError:
import unittest
import zstd
class TestCompressionParameters(unittest.TestCase):
def test_init_bad_arg_type(self):
with self.assertRaises(TypeError):
zstd.CompressionParameters()
with self.assertRaises(TypeError):
zstd.CompressionParameters((0, 1))
def test_get_compression_parameters(self):
p = zstd.get_compression_parameters(1)
self.assertIsInstance(p, zstd.CompressionParameters)
self.assertEqual(p[0], 19)
Add hypothesis test to randomly generate CompressionParameterstry:
import unittest2 as unittest
except ImportError:
import unittest
try:
import hypothesis
import hypothesis.strategies as strategies
except ImportError:
hypothesis = None
import zstd
class TestCompressionParameters(unittest.TestCase):
def test_init_bad_arg_type(self):
with self.assertRaises(TypeError):
zstd.CompressionParameters()
with self.assertRaises(TypeError):
zstd.CompressionParameters((0, 1))
def test_get_compression_parameters(self):
p = zstd.get_compression_parameters(1)
self.assertIsInstance(p, zstd.CompressionParameters)
self.assertEqual(p[0], 19)
if hypothesis:
s_windowlog = strategies.integers(min_value=zstd.WINDOWLOG_MIN,
max_value=zstd.WINDOWLOG_MAX)
s_chainlog = strategies.integers(min_value=zstd.CHAINLOG_MIN,
max_value=zstd.CHAINLOG_MAX)
s_hashlog = strategies.integers(min_value=zstd.HASHLOG_MIN,
max_value=zstd.HASHLOG_MAX)
s_searchlog = strategies.integers(min_value=zstd.SEARCHLOG_MIN,
max_value=zstd.SEARCHLOG_MAX)
s_searchlength = strategies.integers(min_value=zstd.SEARCHLENGTH_MIN,
max_value=zstd.SEARCHLENGTH_MAX)
s_targetlength = strategies.integers(min_value=zstd.TARGETLENGTH_MIN,
max_value=zstd.TARGETLENGTH_MAX)
s_strategy = strategies.sampled_from((zstd.STRATEGY_FAST,
zstd.STRATEGY_DFAST,
zstd.STRATEGY_GREEDY,
zstd.STRATEGY_LAZY,
zstd.STRATEGY_LAZY2,
zstd.STRATEGY_BTLAZY2,
zstd.STRATEGY_BTOPT))
class TestCompressionParametersHypothesis(unittest.TestCase):
@hypothesis.given(s_windowlog, s_chainlog, s_hashlog, s_searchlog,
s_searchlength, s_targetlength, s_strategy)
def test_valid_init(self, windowlog, chainlog, hashlog, searchlog,
searchlength, targetlength, strategy):
p = zstd.CompressionParameters(windowlog, chainlog, hashlog,
searchlog, searchlength,
targetlength, strategy)
self.assertEqual(tuple(p),
(windowlog, chainlog, hashlog, searchlog,
searchlength, targetlength, strategy))
|
<commit_before>try:
import unittest2 as unittest
except ImportError:
import unittest
import zstd
class TestCompressionParameters(unittest.TestCase):
def test_init_bad_arg_type(self):
with self.assertRaises(TypeError):
zstd.CompressionParameters()
with self.assertRaises(TypeError):
zstd.CompressionParameters((0, 1))
def test_get_compression_parameters(self):
p = zstd.get_compression_parameters(1)
self.assertIsInstance(p, zstd.CompressionParameters)
self.assertEqual(p[0], 19)
<commit_msg>Add hypothesis test to randomly generate CompressionParameters<commit_after>try:
import unittest2 as unittest
except ImportError:
import unittest
try:
import hypothesis
import hypothesis.strategies as strategies
except ImportError:
hypothesis = None
import zstd
class TestCompressionParameters(unittest.TestCase):
def test_init_bad_arg_type(self):
with self.assertRaises(TypeError):
zstd.CompressionParameters()
with self.assertRaises(TypeError):
zstd.CompressionParameters((0, 1))
def test_get_compression_parameters(self):
p = zstd.get_compression_parameters(1)
self.assertIsInstance(p, zstd.CompressionParameters)
self.assertEqual(p[0], 19)
if hypothesis:
s_windowlog = strategies.integers(min_value=zstd.WINDOWLOG_MIN,
max_value=zstd.WINDOWLOG_MAX)
s_chainlog = strategies.integers(min_value=zstd.CHAINLOG_MIN,
max_value=zstd.CHAINLOG_MAX)
s_hashlog = strategies.integers(min_value=zstd.HASHLOG_MIN,
max_value=zstd.HASHLOG_MAX)
s_searchlog = strategies.integers(min_value=zstd.SEARCHLOG_MIN,
max_value=zstd.SEARCHLOG_MAX)
s_searchlength = strategies.integers(min_value=zstd.SEARCHLENGTH_MIN,
max_value=zstd.SEARCHLENGTH_MAX)
s_targetlength = strategies.integers(min_value=zstd.TARGETLENGTH_MIN,
max_value=zstd.TARGETLENGTH_MAX)
s_strategy = strategies.sampled_from((zstd.STRATEGY_FAST,
zstd.STRATEGY_DFAST,
zstd.STRATEGY_GREEDY,
zstd.STRATEGY_LAZY,
zstd.STRATEGY_LAZY2,
zstd.STRATEGY_BTLAZY2,
zstd.STRATEGY_BTOPT))
class TestCompressionParametersHypothesis(unittest.TestCase):
@hypothesis.given(s_windowlog, s_chainlog, s_hashlog, s_searchlog,
s_searchlength, s_targetlength, s_strategy)
def test_valid_init(self, windowlog, chainlog, hashlog, searchlog,
searchlength, targetlength, strategy):
p = zstd.CompressionParameters(windowlog, chainlog, hashlog,
searchlog, searchlength,
targetlength, strategy)
self.assertEqual(tuple(p),
(windowlog, chainlog, hashlog, searchlog,
searchlength, targetlength, strategy))
|
4ca1c0bf5e950ab9710d8a76aa788a5f22641395
|
wagtail_mvc/tests.py
|
wagtail_mvc/tests.py
|
# -*- coding: utf-8 -*-
"""
wagtail_mvc tests
"""
from __future__ import unicode_literals
from django.test import TestCase
from mock import Mock
from wagtail_mvc.models import WagtailMvcViewWrapper
class WagtailMvcViewWrapperTestCase(TestCase):
"""
Tests the WagtailMvcViewWrapper
"""
def setUp(self):
super(WagtailMvcViewWrapperTestCase, self).setUp()
self.view = Mock()
self.page = Mock(methods=['get_view_restrictions'])
self.instance = WagtailMvcViewWrapper(self.view, self.page)
def test_serve_calls_view(self):
"""
The instances serve attribute should call the view
"""
self.instance.serve()
self.view.assert_called_with()
def test_get_view_restrictions(self):
"""
The method should call the get_view_restrictions method on the page
"""
self.instance.get_view_restrictions()
self.page.get_view_restrictions.assert_called_with()
|
# -*- coding: utf-8 -*-
"""
wagtail_mvc tests
"""
from __future__ import unicode_literals
from django.test import TestCase
from mock import Mock
from wagtail_mvc.models import WagtailMvcViewWrapper
class WagtailMvcViewWrapperTestCase(TestCase):
"""
Tests the WagtailMvcViewWrapper
"""
def setUp(self):
super(WagtailMvcViewWrapperTestCase, self).setUp()
self.view = Mock()
self.page = Mock(methods=['get_view_restrictions'])
self.instance = WagtailMvcViewWrapper(self.view, self.page)
def test_serve_calls_view(self):
"""
The instances serve attribute should call the view
"""
self.instance.serve()
self.view.assert_called_with()
def test_get_view_restrictions(self):
"""
The method should call the get_view_restrictions method on the page
"""
self.instance.get_view_restrictions()
self.page.get_view_restrictions.assert_called_with()
class WagtailMvcMixinTestCase(TestCase):
"""
Tests the WagtailMvcMixin
"""
def test_calls_serve_if_wagtail_url_conf_not_defined(self):
"""
The serve method should still be called if the wagtail_url_conf attribute is not defined
"""
pass
def test_resolve_view_resolves_view(self):
"""
The resolve_view method should return the correct data
"""
pass
def test_page_in_view_kwargs(self):
"""
The resolve_view method should add the page instance to the view kwargs
"""
pass
def test_resolve_view_raises_404(self):
"""
The resolve_view method should raise a Resolver404 exception
"""
pass
def test_url_config_used_to_serve_actual_page(self):
"""
The defined url config should be used to serve the page when a full url is matched
"""
pass
def test_url_config_used_to_serve_sub_page(self):
"""
The defined url config should be used to serve a sub page when a partial url is matched
"""
pass
|
Add test stubs for Model Mixin behaviour
|
Add test stubs for Model Mixin behaviour
|
Python
|
mit
|
fatboystring/Wagtail-MVC,fatboystring/Wagtail-MVC
|
# -*- coding: utf-8 -*-
"""
wagtail_mvc tests
"""
from __future__ import unicode_literals
from django.test import TestCase
from mock import Mock
from wagtail_mvc.models import WagtailMvcViewWrapper
class WagtailMvcViewWrapperTestCase(TestCase):
"""
Tests the WagtailMvcViewWrapper
"""
def setUp(self):
super(WagtailMvcViewWrapperTestCase, self).setUp()
self.view = Mock()
self.page = Mock(methods=['get_view_restrictions'])
self.instance = WagtailMvcViewWrapper(self.view, self.page)
def test_serve_calls_view(self):
"""
The instances serve attribute should call the view
"""
self.instance.serve()
self.view.assert_called_with()
def test_get_view_restrictions(self):
"""
The method should call the get_view_restrictions method on the page
"""
self.instance.get_view_restrictions()
self.page.get_view_restrictions.assert_called_with()
Add test stubs for Model Mixin behaviour
|
# -*- coding: utf-8 -*-
"""
wagtail_mvc tests
"""
from __future__ import unicode_literals
from django.test import TestCase
from mock import Mock
from wagtail_mvc.models import WagtailMvcViewWrapper
class WagtailMvcViewWrapperTestCase(TestCase):
"""
Tests the WagtailMvcViewWrapper
"""
def setUp(self):
super(WagtailMvcViewWrapperTestCase, self).setUp()
self.view = Mock()
self.page = Mock(methods=['get_view_restrictions'])
self.instance = WagtailMvcViewWrapper(self.view, self.page)
def test_serve_calls_view(self):
"""
The instances serve attribute should call the view
"""
self.instance.serve()
self.view.assert_called_with()
def test_get_view_restrictions(self):
"""
The method should call the get_view_restrictions method on the page
"""
self.instance.get_view_restrictions()
self.page.get_view_restrictions.assert_called_with()
class WagtailMvcMixinTestCase(TestCase):
"""
Tests the WagtailMvcMixin
"""
def test_calls_serve_if_wagtail_url_conf_not_defined(self):
"""
The serve method should still be called if the wagtail_url_conf attribute is not defined
"""
pass
def test_resolve_view_resolves_view(self):
"""
The resolve_view method should return the correct data
"""
pass
def test_page_in_view_kwargs(self):
"""
The resolve_view method should add the page instance to the view kwargs
"""
pass
def test_resolve_view_raises_404(self):
"""
The resolve_view method should raise a Resolver404 exception
"""
pass
def test_url_config_used_to_serve_actual_page(self):
"""
The defined url config should be used to serve the page when a full url is matched
"""
pass
def test_url_config_used_to_serve_sub_page(self):
"""
The defined url config should be used to serve a sub page when a partial url is matched
"""
pass
|
<commit_before># -*- coding: utf-8 -*-
"""
wagtail_mvc tests
"""
from __future__ import unicode_literals
from django.test import TestCase
from mock import Mock
from wagtail_mvc.models import WagtailMvcViewWrapper
class WagtailMvcViewWrapperTestCase(TestCase):
"""
Tests the WagtailMvcViewWrapper
"""
def setUp(self):
super(WagtailMvcViewWrapperTestCase, self).setUp()
self.view = Mock()
self.page = Mock(methods=['get_view_restrictions'])
self.instance = WagtailMvcViewWrapper(self.view, self.page)
def test_serve_calls_view(self):
"""
The instances serve attribute should call the view
"""
self.instance.serve()
self.view.assert_called_with()
def test_get_view_restrictions(self):
"""
The method should call the get_view_restrictions method on the page
"""
self.instance.get_view_restrictions()
self.page.get_view_restrictions.assert_called_with()
<commit_msg>Add test stubs for Model Mixin behaviour<commit_after>
|
# -*- coding: utf-8 -*-
"""
wagtail_mvc tests
"""
from __future__ import unicode_literals
from django.test import TestCase
from mock import Mock
from wagtail_mvc.models import WagtailMvcViewWrapper
class WagtailMvcViewWrapperTestCase(TestCase):
"""
Tests the WagtailMvcViewWrapper
"""
def setUp(self):
super(WagtailMvcViewWrapperTestCase, self).setUp()
self.view = Mock()
self.page = Mock(methods=['get_view_restrictions'])
self.instance = WagtailMvcViewWrapper(self.view, self.page)
def test_serve_calls_view(self):
"""
The instances serve attribute should call the view
"""
self.instance.serve()
self.view.assert_called_with()
def test_get_view_restrictions(self):
"""
The method should call the get_view_restrictions method on the page
"""
self.instance.get_view_restrictions()
self.page.get_view_restrictions.assert_called_with()
class WagtailMvcMixinTestCase(TestCase):
"""
Tests the WagtailMvcMixin
"""
def test_calls_serve_if_wagtail_url_conf_not_defined(self):
"""
The serve method should still be called if the wagtail_url_conf attribute is not defined
"""
pass
def test_resolve_view_resolves_view(self):
"""
The resolve_view method should return the correct data
"""
pass
def test_page_in_view_kwargs(self):
"""
The resolve_view method should add the page instance to the view kwargs
"""
pass
def test_resolve_view_raises_404(self):
"""
The resolve_view method should raise a Resolver404 exception
"""
pass
def test_url_config_used_to_serve_actual_page(self):
"""
The defined url config should be used to serve the page when a full url is matched
"""
pass
def test_url_config_used_to_serve_sub_page(self):
"""
The defined url config should be used to serve a sub page when a partial url is matched
"""
pass
|
# -*- coding: utf-8 -*-
"""
wagtail_mvc tests
"""
from __future__ import unicode_literals
from django.test import TestCase
from mock import Mock
from wagtail_mvc.models import WagtailMvcViewWrapper
class WagtailMvcViewWrapperTestCase(TestCase):
"""
Tests the WagtailMvcViewWrapper
"""
def setUp(self):
super(WagtailMvcViewWrapperTestCase, self).setUp()
self.view = Mock()
self.page = Mock(methods=['get_view_restrictions'])
self.instance = WagtailMvcViewWrapper(self.view, self.page)
def test_serve_calls_view(self):
"""
The instances serve attribute should call the view
"""
self.instance.serve()
self.view.assert_called_with()
def test_get_view_restrictions(self):
"""
The method should call the get_view_restrictions method on the page
"""
self.instance.get_view_restrictions()
self.page.get_view_restrictions.assert_called_with()
Add test stubs for Model Mixin behaviour# -*- coding: utf-8 -*-
"""
wagtail_mvc tests
"""
from __future__ import unicode_literals
from django.test import TestCase
from mock import Mock
from wagtail_mvc.models import WagtailMvcViewWrapper
class WagtailMvcViewWrapperTestCase(TestCase):
"""
Tests the WagtailMvcViewWrapper
"""
def setUp(self):
super(WagtailMvcViewWrapperTestCase, self).setUp()
self.view = Mock()
self.page = Mock(methods=['get_view_restrictions'])
self.instance = WagtailMvcViewWrapper(self.view, self.page)
def test_serve_calls_view(self):
"""
The instances serve attribute should call the view
"""
self.instance.serve()
self.view.assert_called_with()
def test_get_view_restrictions(self):
"""
The method should call the get_view_restrictions method on the page
"""
self.instance.get_view_restrictions()
self.page.get_view_restrictions.assert_called_with()
class WagtailMvcMixinTestCase(TestCase):
"""
Tests the WagtailMvcMixin
"""
def test_calls_serve_if_wagtail_url_conf_not_defined(self):
"""
The serve method should still be called if the wagtail_url_conf attribute is not defined
"""
pass
def test_resolve_view_resolves_view(self):
"""
The resolve_view method should return the correct data
"""
pass
def test_page_in_view_kwargs(self):
"""
The resolve_view method should add the page instance to the view kwargs
"""
pass
def test_resolve_view_raises_404(self):
"""
The resolve_view method should raise a Resolver404 exception
"""
pass
def test_url_config_used_to_serve_actual_page(self):
"""
The defined url config should be used to serve the page when a full url is matched
"""
pass
def test_url_config_used_to_serve_sub_page(self):
"""
The defined url config should be used to serve a sub page when a partial url is matched
"""
pass
|
<commit_before># -*- coding: utf-8 -*-
"""
wagtail_mvc tests
"""
from __future__ import unicode_literals
from django.test import TestCase
from mock import Mock
from wagtail_mvc.models import WagtailMvcViewWrapper
class WagtailMvcViewWrapperTestCase(TestCase):
"""
Tests the WagtailMvcViewWrapper
"""
def setUp(self):
super(WagtailMvcViewWrapperTestCase, self).setUp()
self.view = Mock()
self.page = Mock(methods=['get_view_restrictions'])
self.instance = WagtailMvcViewWrapper(self.view, self.page)
def test_serve_calls_view(self):
"""
The instances serve attribute should call the view
"""
self.instance.serve()
self.view.assert_called_with()
def test_get_view_restrictions(self):
"""
The method should call the get_view_restrictions method on the page
"""
self.instance.get_view_restrictions()
self.page.get_view_restrictions.assert_called_with()
<commit_msg>Add test stubs for Model Mixin behaviour<commit_after># -*- coding: utf-8 -*-
"""
wagtail_mvc tests
"""
from __future__ import unicode_literals
from django.test import TestCase
from mock import Mock
from wagtail_mvc.models import WagtailMvcViewWrapper
class WagtailMvcViewWrapperTestCase(TestCase):
"""
Tests the WagtailMvcViewWrapper
"""
def setUp(self):
super(WagtailMvcViewWrapperTestCase, self).setUp()
self.view = Mock()
self.page = Mock(methods=['get_view_restrictions'])
self.instance = WagtailMvcViewWrapper(self.view, self.page)
def test_serve_calls_view(self):
"""
The instances serve attribute should call the view
"""
self.instance.serve()
self.view.assert_called_with()
def test_get_view_restrictions(self):
"""
The method should call the get_view_restrictions method on the page
"""
self.instance.get_view_restrictions()
self.page.get_view_restrictions.assert_called_with()
class WagtailMvcMixinTestCase(TestCase):
"""
Tests the WagtailMvcMixin
"""
def test_calls_serve_if_wagtail_url_conf_not_defined(self):
"""
The serve method should still be called if the wagtail_url_conf attribute is not defined
"""
pass
def test_resolve_view_resolves_view(self):
"""
The resolve_view method should return the correct data
"""
pass
def test_page_in_view_kwargs(self):
"""
The resolve_view method should add the page instance to the view kwargs
"""
pass
def test_resolve_view_raises_404(self):
"""
The resolve_view method should raise a Resolver404 exception
"""
pass
def test_url_config_used_to_serve_actual_page(self):
"""
The defined url config should be used to serve the page when a full url is matched
"""
pass
def test_url_config_used_to_serve_sub_page(self):
"""
The defined url config should be used to serve a sub page when a partial url is matched
"""
pass
|
9c596afebfe5fb6746ec2a157d71bb315b02c0cf
|
tests/unit/test_exceptions.py
|
tests/unit/test_exceptions.py
|
#!/usr/bin/env
# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from nose.tools import assert_equals
from botocore import exceptions
def test_client_error_can_handle_missing_code_or_message():
response = {'Error': {}}
expect = 'An error occurred (Unknown) when calling the blackhole operation: Unknown'
assert_equals(str(exceptions.ClientError(response, 'blackhole')), expect)
def test_client_error_has_operation_name_set():
response = {'Error': {}}
exception = exceptions.ClientError(response, 'blackhole')
assert_equals(exception.operation_name, 'blackhole')
|
#!/usr/bin/env
# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from nose.tools import assert_equals
from botocore import exceptions
def test_client_error_can_handle_missing_code_or_message():
response = {'Error': {}}
expect = 'An error occurred (Unknown) when calling the blackhole operation: Unknown'
assert_equals(str(exceptions.ClientError(response, 'blackhole')), expect)
def test_client_error_has_operation_name_set():
response = {'Error': {}}
exception = exceptions.ClientError(response, 'blackhole')
assert(hasattr(exception, 'operation_name'))
def test_client_error_set_correct_operation_name():
response = {'Error': {}}
exception = exceptions.ClientError(response, 'blackhole')
assert_equals(exception.operation_name, 'blackhole')
|
Add test to check excplicitly if the attribute is set
|
Add test to check excplicitly if the attribute is set
|
Python
|
apache-2.0
|
boto/botocore,pplu/botocore
|
#!/usr/bin/env
# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from nose.tools import assert_equals
from botocore import exceptions
def test_client_error_can_handle_missing_code_or_message():
response = {'Error': {}}
expect = 'An error occurred (Unknown) when calling the blackhole operation: Unknown'
assert_equals(str(exceptions.ClientError(response, 'blackhole')), expect)
def test_client_error_has_operation_name_set():
response = {'Error': {}}
exception = exceptions.ClientError(response, 'blackhole')
assert_equals(exception.operation_name, 'blackhole')
Add test to check excplicitly if the attribute is set
|
#!/usr/bin/env
# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from nose.tools import assert_equals
from botocore import exceptions
def test_client_error_can_handle_missing_code_or_message():
response = {'Error': {}}
expect = 'An error occurred (Unknown) when calling the blackhole operation: Unknown'
assert_equals(str(exceptions.ClientError(response, 'blackhole')), expect)
def test_client_error_has_operation_name_set():
response = {'Error': {}}
exception = exceptions.ClientError(response, 'blackhole')
assert(hasattr(exception, 'operation_name'))
def test_client_error_set_correct_operation_name():
response = {'Error': {}}
exception = exceptions.ClientError(response, 'blackhole')
assert_equals(exception.operation_name, 'blackhole')
|
<commit_before>#!/usr/bin/env
# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from nose.tools import assert_equals
from botocore import exceptions
def test_client_error_can_handle_missing_code_or_message():
response = {'Error': {}}
expect = 'An error occurred (Unknown) when calling the blackhole operation: Unknown'
assert_equals(str(exceptions.ClientError(response, 'blackhole')), expect)
def test_client_error_has_operation_name_set():
response = {'Error': {}}
exception = exceptions.ClientError(response, 'blackhole')
assert_equals(exception.operation_name, 'blackhole')
<commit_msg>Add test to check excplicitly if the attribute is set<commit_after>
|
#!/usr/bin/env
# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from nose.tools import assert_equals
from botocore import exceptions
def test_client_error_can_handle_missing_code_or_message():
response = {'Error': {}}
expect = 'An error occurred (Unknown) when calling the blackhole operation: Unknown'
assert_equals(str(exceptions.ClientError(response, 'blackhole')), expect)
def test_client_error_has_operation_name_set():
response = {'Error': {}}
exception = exceptions.ClientError(response, 'blackhole')
assert(hasattr(exception, 'operation_name'))
def test_client_error_set_correct_operation_name():
response = {'Error': {}}
exception = exceptions.ClientError(response, 'blackhole')
assert_equals(exception.operation_name, 'blackhole')
|
#!/usr/bin/env
# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from nose.tools import assert_equals
from botocore import exceptions
def test_client_error_can_handle_missing_code_or_message():
response = {'Error': {}}
expect = 'An error occurred (Unknown) when calling the blackhole operation: Unknown'
assert_equals(str(exceptions.ClientError(response, 'blackhole')), expect)
def test_client_error_has_operation_name_set():
response = {'Error': {}}
exception = exceptions.ClientError(response, 'blackhole')
assert_equals(exception.operation_name, 'blackhole')
Add test to check excplicitly if the attribute is set#!/usr/bin/env
# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from nose.tools import assert_equals
from botocore import exceptions
def test_client_error_can_handle_missing_code_or_message():
response = {'Error': {}}
expect = 'An error occurred (Unknown) when calling the blackhole operation: Unknown'
assert_equals(str(exceptions.ClientError(response, 'blackhole')), expect)
def test_client_error_has_operation_name_set():
response = {'Error': {}}
exception = exceptions.ClientError(response, 'blackhole')
assert(hasattr(exception, 'operation_name'))
def test_client_error_set_correct_operation_name():
response = {'Error': {}}
exception = exceptions.ClientError(response, 'blackhole')
assert_equals(exception.operation_name, 'blackhole')
|
<commit_before>#!/usr/bin/env
# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from nose.tools import assert_equals
from botocore import exceptions
def test_client_error_can_handle_missing_code_or_message():
response = {'Error': {}}
expect = 'An error occurred (Unknown) when calling the blackhole operation: Unknown'
assert_equals(str(exceptions.ClientError(response, 'blackhole')), expect)
def test_client_error_has_operation_name_set():
response = {'Error': {}}
exception = exceptions.ClientError(response, 'blackhole')
assert_equals(exception.operation_name, 'blackhole')
<commit_msg>Add test to check excplicitly if the attribute is set<commit_after>#!/usr/bin/env
# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from nose.tools import assert_equals
from botocore import exceptions
def test_client_error_can_handle_missing_code_or_message():
response = {'Error': {}}
expect = 'An error occurred (Unknown) when calling the blackhole operation: Unknown'
assert_equals(str(exceptions.ClientError(response, 'blackhole')), expect)
def test_client_error_has_operation_name_set():
response = {'Error': {}}
exception = exceptions.ClientError(response, 'blackhole')
assert(hasattr(exception, 'operation_name'))
def test_client_error_set_correct_operation_name():
response = {'Error': {}}
exception = exceptions.ClientError(response, 'blackhole')
assert_equals(exception.operation_name, 'blackhole')
|
e4580a598e7d930ad90f5480751804fc1fa89826
|
pronto/__init__.py
|
pronto/__init__.py
|
import pkg_resources
__author__ = "Martin Larralde <martin.larralde@embl.de>"
__license__ = "MIT"
__version__ = pkg_resources.resource_string(__name__, "_version.txt").decode('utf-8').strip()
from .ontology import Ontology # noqa: F401
from .term import Term # noqa: F401
from .definition import Definition # noqa: F401
from .relationship import Relationship # noqa: F401
from .synonym import Synonym, SynonymType # noqa: F401
from .xref import Xref # noqa: F401
|
import pkg_resources
__author__ = "Martin Larralde <martin.larralde@embl.de>"
__license__ = "MIT"
__version__ = (
__import__('pkg_resources')
.resource_string(__name__, "_version.txt")
.decode('utf-8')
.strip()
)
from .ontology import Ontology # noqa: F401
from .term import Term # noqa: F401
from .definition import Definition # noqa: F401
from .relationship import Relationship # noqa: F401
from .synonym import Synonym, SynonymType # noqa: F401
from .xref import Xref # noqa: F401
|
Remove `pkg_resources` from the top-level package
|
Remove `pkg_resources` from the top-level package
|
Python
|
mit
|
althonos/pronto
|
import pkg_resources
__author__ = "Martin Larralde <martin.larralde@embl.de>"
__license__ = "MIT"
__version__ = pkg_resources.resource_string(__name__, "_version.txt").decode('utf-8').strip()
from .ontology import Ontology # noqa: F401
from .term import Term # noqa: F401
from .definition import Definition # noqa: F401
from .relationship import Relationship # noqa: F401
from .synonym import Synonym, SynonymType # noqa: F401
from .xref import Xref # noqa: F401
Remove `pkg_resources` from the top-level package
|
import pkg_resources
__author__ = "Martin Larralde <martin.larralde@embl.de>"
__license__ = "MIT"
__version__ = (
__import__('pkg_resources')
.resource_string(__name__, "_version.txt")
.decode('utf-8')
.strip()
)
from .ontology import Ontology # noqa: F401
from .term import Term # noqa: F401
from .definition import Definition # noqa: F401
from .relationship import Relationship # noqa: F401
from .synonym import Synonym, SynonymType # noqa: F401
from .xref import Xref # noqa: F401
|
<commit_before>import pkg_resources
__author__ = "Martin Larralde <martin.larralde@embl.de>"
__license__ = "MIT"
__version__ = pkg_resources.resource_string(__name__, "_version.txt").decode('utf-8').strip()
from .ontology import Ontology # noqa: F401
from .term import Term # noqa: F401
from .definition import Definition # noqa: F401
from .relationship import Relationship # noqa: F401
from .synonym import Synonym, SynonymType # noqa: F401
from .xref import Xref # noqa: F401
<commit_msg>Remove `pkg_resources` from the top-level package<commit_after>
|
import pkg_resources
__author__ = "Martin Larralde <martin.larralde@embl.de>"
__license__ = "MIT"
__version__ = (
__import__('pkg_resources')
.resource_string(__name__, "_version.txt")
.decode('utf-8')
.strip()
)
from .ontology import Ontology # noqa: F401
from .term import Term # noqa: F401
from .definition import Definition # noqa: F401
from .relationship import Relationship # noqa: F401
from .synonym import Synonym, SynonymType # noqa: F401
from .xref import Xref # noqa: F401
|
import pkg_resources
__author__ = "Martin Larralde <martin.larralde@embl.de>"
__license__ = "MIT"
__version__ = pkg_resources.resource_string(__name__, "_version.txt").decode('utf-8').strip()
from .ontology import Ontology # noqa: F401
from .term import Term # noqa: F401
from .definition import Definition # noqa: F401
from .relationship import Relationship # noqa: F401
from .synonym import Synonym, SynonymType # noqa: F401
from .xref import Xref # noqa: F401
Remove `pkg_resources` from the top-level packageimport pkg_resources
__author__ = "Martin Larralde <martin.larralde@embl.de>"
__license__ = "MIT"
__version__ = (
__import__('pkg_resources')
.resource_string(__name__, "_version.txt")
.decode('utf-8')
.strip()
)
from .ontology import Ontology # noqa: F401
from .term import Term # noqa: F401
from .definition import Definition # noqa: F401
from .relationship import Relationship # noqa: F401
from .synonym import Synonym, SynonymType # noqa: F401
from .xref import Xref # noqa: F401
|
<commit_before>import pkg_resources
__author__ = "Martin Larralde <martin.larralde@embl.de>"
__license__ = "MIT"
__version__ = pkg_resources.resource_string(__name__, "_version.txt").decode('utf-8').strip()
from .ontology import Ontology # noqa: F401
from .term import Term # noqa: F401
from .definition import Definition # noqa: F401
from .relationship import Relationship # noqa: F401
from .synonym import Synonym, SynonymType # noqa: F401
from .xref import Xref # noqa: F401
<commit_msg>Remove `pkg_resources` from the top-level package<commit_after>import pkg_resources
__author__ = "Martin Larralde <martin.larralde@embl.de>"
__license__ = "MIT"
__version__ = (
__import__('pkg_resources')
.resource_string(__name__, "_version.txt")
.decode('utf-8')
.strip()
)
from .ontology import Ontology # noqa: F401
from .term import Term # noqa: F401
from .definition import Definition # noqa: F401
from .relationship import Relationship # noqa: F401
from .synonym import Synonym, SynonymType # noqa: F401
from .xref import Xref # noqa: F401
|
1c65f476c34345267cadb4851fb4fb2ca21333c4
|
axelrod/strategies/__init__.py
|
axelrod/strategies/__init__.py
|
from cooperator import *
from defector import *
from grudger import *
from rand import *
from titfortat import *
from gobymajority import *
from alternator import *
from averagecopier import *
from grumpy import *
strategies = [
Defector,
Cooperator,
TitForTat,
Grudger,
GoByMajority,
Random,
Alternator,
AverageCopier,
Grumpy,
]
|
from cooperator import *
from defector import *
from grudger import *
from rand import *
from titfortat import *
from gobymajority import *
from alternator import *
from averagecopier import *
from grumpy import *
from inverse import *
strategies = [
Defector,
Cooperator,
TitForTat,
Grudger,
GoByMajority,
Random,
Alternator,
AverageCopier,
Grumpy,
Inverse
]
|
Change init to add inverse strategy
|
Change init to add inverse strategy
|
Python
|
mit
|
bootandy/Axelrod,emmagordon/Axelrod,mojones/Axelrod,drvinceknight/Axelrod,emmagordon/Axelrod,kathryncrouch/Axelrod,bootandy/Axelrod,risicle/Axelrod,uglyfruitcake/Axelrod,kathryncrouch/Axelrod,mojones/Axelrod,uglyfruitcake/Axelrod,risicle/Axelrod
|
from cooperator import *
from defector import *
from grudger import *
from rand import *
from titfortat import *
from gobymajority import *
from alternator import *
from averagecopier import *
from grumpy import *
strategies = [
Defector,
Cooperator,
TitForTat,
Grudger,
GoByMajority,
Random,
Alternator,
AverageCopier,
Grumpy,
]
Change init to add inverse strategy
|
from cooperator import *
from defector import *
from grudger import *
from rand import *
from titfortat import *
from gobymajority import *
from alternator import *
from averagecopier import *
from grumpy import *
from inverse import *
strategies = [
Defector,
Cooperator,
TitForTat,
Grudger,
GoByMajority,
Random,
Alternator,
AverageCopier,
Grumpy,
Inverse
]
|
<commit_before>from cooperator import *
from defector import *
from grudger import *
from rand import *
from titfortat import *
from gobymajority import *
from alternator import *
from averagecopier import *
from grumpy import *
strategies = [
Defector,
Cooperator,
TitForTat,
Grudger,
GoByMajority,
Random,
Alternator,
AverageCopier,
Grumpy,
]
<commit_msg>Change init to add inverse strategy<commit_after>
|
from cooperator import *
from defector import *
from grudger import *
from rand import *
from titfortat import *
from gobymajority import *
from alternator import *
from averagecopier import *
from grumpy import *
from inverse import *
strategies = [
Defector,
Cooperator,
TitForTat,
Grudger,
GoByMajority,
Random,
Alternator,
AverageCopier,
Grumpy,
Inverse
]
|
from cooperator import *
from defector import *
from grudger import *
from rand import *
from titfortat import *
from gobymajority import *
from alternator import *
from averagecopier import *
from grumpy import *
strategies = [
Defector,
Cooperator,
TitForTat,
Grudger,
GoByMajority,
Random,
Alternator,
AverageCopier,
Grumpy,
]
Change init to add inverse strategyfrom cooperator import *
from defector import *
from grudger import *
from rand import *
from titfortat import *
from gobymajority import *
from alternator import *
from averagecopier import *
from grumpy import *
from inverse import *
strategies = [
Defector,
Cooperator,
TitForTat,
Grudger,
GoByMajority,
Random,
Alternator,
AverageCopier,
Grumpy,
Inverse
]
|
<commit_before>from cooperator import *
from defector import *
from grudger import *
from rand import *
from titfortat import *
from gobymajority import *
from alternator import *
from averagecopier import *
from grumpy import *
strategies = [
Defector,
Cooperator,
TitForTat,
Grudger,
GoByMajority,
Random,
Alternator,
AverageCopier,
Grumpy,
]
<commit_msg>Change init to add inverse strategy<commit_after>from cooperator import *
from defector import *
from grudger import *
from rand import *
from titfortat import *
from gobymajority import *
from alternator import *
from averagecopier import *
from grumpy import *
from inverse import *
strategies = [
Defector,
Cooperator,
TitForTat,
Grudger,
GoByMajority,
Random,
Alternator,
AverageCopier,
Grumpy,
Inverse
]
|
074c6fb8bf3f7092920ccae04de26a1a822c38a9
|
tohu/v3/derived_generators.py
|
tohu/v3/derived_generators.py
|
from .base import TohuBaseGenerator
DERIVED_GENERATORS = ['Apply']
__all__ = DERIVED_GENERATORS + ['DERIVED_GENERATORS']
class Apply(TohuBaseGenerator):
def __init__(self, func, *arg_gens, **kwarg_gens):
self.func = func
self.orig_arg_gens = arg_gens
self.orig_kwarg_gens = kwarg_gens
self.arg_gens = [g.clone() for g in arg_gens]
self.kwarg_gens = {name: g.clone() for name, g in kwarg_gens.items()}
def __next__(self):
next_args = (next(g) for g in self.arg_gens)
next_kwargs = {name: next(g) for name, g in self.kwarg_gens.items()}
return self.func(*next_args, **next_kwargs)
def reset(self, seed=None):
pass
|
from .base import TohuBaseGenerator
DERIVED_GENERATORS = ['Apply']
__all__ = DERIVED_GENERATORS + ['DERIVED_GENERATORS']
class Apply(TohuBaseGenerator):
def __init__(self, func, *arg_gens, **kwarg_gens):
super().__init__()
self.func = func
self.orig_arg_gens = arg_gens
self.orig_kwarg_gens = kwarg_gens
self.arg_gens = [g.clone() for g in arg_gens]
self.kwarg_gens = {name: g.clone() for name, g in kwarg_gens.items()}
def __next__(self):
next_args = (next(g) for g in self.arg_gens)
next_kwargs = {name: next(g) for name, g in self.kwarg_gens.items()}
return self.func(*next_args, **next_kwargs)
def reset(self, seed=None):
super().reset(seed)
def spawn(self):
return Apply(self.func, *self.orig_arg_gens, **self.orig_kwarg_gens)
|
Add spawn method to Apply; initialise clones by calling super().__init__()
|
Add spawn method to Apply; initialise clones by calling super().__init__()
|
Python
|
mit
|
maxalbert/tohu
|
from .base import TohuBaseGenerator
DERIVED_GENERATORS = ['Apply']
__all__ = DERIVED_GENERATORS + ['DERIVED_GENERATORS']
class Apply(TohuBaseGenerator):
def __init__(self, func, *arg_gens, **kwarg_gens):
self.func = func
self.orig_arg_gens = arg_gens
self.orig_kwarg_gens = kwarg_gens
self.arg_gens = [g.clone() for g in arg_gens]
self.kwarg_gens = {name: g.clone() for name, g in kwarg_gens.items()}
def __next__(self):
next_args = (next(g) for g in self.arg_gens)
next_kwargs = {name: next(g) for name, g in self.kwarg_gens.items()}
return self.func(*next_args, **next_kwargs)
def reset(self, seed=None):
passAdd spawn method to Apply; initialise clones by calling super().__init__()
|
from .base import TohuBaseGenerator
DERIVED_GENERATORS = ['Apply']
__all__ = DERIVED_GENERATORS + ['DERIVED_GENERATORS']
class Apply(TohuBaseGenerator):
def __init__(self, func, *arg_gens, **kwarg_gens):
super().__init__()
self.func = func
self.orig_arg_gens = arg_gens
self.orig_kwarg_gens = kwarg_gens
self.arg_gens = [g.clone() for g in arg_gens]
self.kwarg_gens = {name: g.clone() for name, g in kwarg_gens.items()}
def __next__(self):
next_args = (next(g) for g in self.arg_gens)
next_kwargs = {name: next(g) for name, g in self.kwarg_gens.items()}
return self.func(*next_args, **next_kwargs)
def reset(self, seed=None):
super().reset(seed)
def spawn(self):
return Apply(self.func, *self.orig_arg_gens, **self.orig_kwarg_gens)
|
<commit_before>from .base import TohuBaseGenerator
DERIVED_GENERATORS = ['Apply']
__all__ = DERIVED_GENERATORS + ['DERIVED_GENERATORS']
class Apply(TohuBaseGenerator):
def __init__(self, func, *arg_gens, **kwarg_gens):
self.func = func
self.orig_arg_gens = arg_gens
self.orig_kwarg_gens = kwarg_gens
self.arg_gens = [g.clone() for g in arg_gens]
self.kwarg_gens = {name: g.clone() for name, g in kwarg_gens.items()}
def __next__(self):
next_args = (next(g) for g in self.arg_gens)
next_kwargs = {name: next(g) for name, g in self.kwarg_gens.items()}
return self.func(*next_args, **next_kwargs)
def reset(self, seed=None):
pass<commit_msg>Add spawn method to Apply; initialise clones by calling super().__init__()<commit_after>
|
from .base import TohuBaseGenerator
DERIVED_GENERATORS = ['Apply']
__all__ = DERIVED_GENERATORS + ['DERIVED_GENERATORS']
class Apply(TohuBaseGenerator):
def __init__(self, func, *arg_gens, **kwarg_gens):
super().__init__()
self.func = func
self.orig_arg_gens = arg_gens
self.orig_kwarg_gens = kwarg_gens
self.arg_gens = [g.clone() for g in arg_gens]
self.kwarg_gens = {name: g.clone() for name, g in kwarg_gens.items()}
def __next__(self):
next_args = (next(g) for g in self.arg_gens)
next_kwargs = {name: next(g) for name, g in self.kwarg_gens.items()}
return self.func(*next_args, **next_kwargs)
def reset(self, seed=None):
super().reset(seed)
def spawn(self):
return Apply(self.func, *self.orig_arg_gens, **self.orig_kwarg_gens)
|
from .base import TohuBaseGenerator
DERIVED_GENERATORS = ['Apply']
__all__ = DERIVED_GENERATORS + ['DERIVED_GENERATORS']
class Apply(TohuBaseGenerator):
def __init__(self, func, *arg_gens, **kwarg_gens):
self.func = func
self.orig_arg_gens = arg_gens
self.orig_kwarg_gens = kwarg_gens
self.arg_gens = [g.clone() for g in arg_gens]
self.kwarg_gens = {name: g.clone() for name, g in kwarg_gens.items()}
def __next__(self):
next_args = (next(g) for g in self.arg_gens)
next_kwargs = {name: next(g) for name, g in self.kwarg_gens.items()}
return self.func(*next_args, **next_kwargs)
def reset(self, seed=None):
passAdd spawn method to Apply; initialise clones by calling super().__init__()from .base import TohuBaseGenerator
DERIVED_GENERATORS = ['Apply']
__all__ = DERIVED_GENERATORS + ['DERIVED_GENERATORS']
class Apply(TohuBaseGenerator):
def __init__(self, func, *arg_gens, **kwarg_gens):
super().__init__()
self.func = func
self.orig_arg_gens = arg_gens
self.orig_kwarg_gens = kwarg_gens
self.arg_gens = [g.clone() for g in arg_gens]
self.kwarg_gens = {name: g.clone() for name, g in kwarg_gens.items()}
def __next__(self):
next_args = (next(g) for g in self.arg_gens)
next_kwargs = {name: next(g) for name, g in self.kwarg_gens.items()}
return self.func(*next_args, **next_kwargs)
def reset(self, seed=None):
super().reset(seed)
def spawn(self):
return Apply(self.func, *self.orig_arg_gens, **self.orig_kwarg_gens)
|
<commit_before>from .base import TohuBaseGenerator
DERIVED_GENERATORS = ['Apply']
__all__ = DERIVED_GENERATORS + ['DERIVED_GENERATORS']
class Apply(TohuBaseGenerator):
def __init__(self, func, *arg_gens, **kwarg_gens):
self.func = func
self.orig_arg_gens = arg_gens
self.orig_kwarg_gens = kwarg_gens
self.arg_gens = [g.clone() for g in arg_gens]
self.kwarg_gens = {name: g.clone() for name, g in kwarg_gens.items()}
def __next__(self):
next_args = (next(g) for g in self.arg_gens)
next_kwargs = {name: next(g) for name, g in self.kwarg_gens.items()}
return self.func(*next_args, **next_kwargs)
def reset(self, seed=None):
pass<commit_msg>Add spawn method to Apply; initialise clones by calling super().__init__()<commit_after>from .base import TohuBaseGenerator
DERIVED_GENERATORS = ['Apply']
__all__ = DERIVED_GENERATORS + ['DERIVED_GENERATORS']
class Apply(TohuBaseGenerator):
def __init__(self, func, *arg_gens, **kwarg_gens):
super().__init__()
self.func = func
self.orig_arg_gens = arg_gens
self.orig_kwarg_gens = kwarg_gens
self.arg_gens = [g.clone() for g in arg_gens]
self.kwarg_gens = {name: g.clone() for name, g in kwarg_gens.items()}
def __next__(self):
next_args = (next(g) for g in self.arg_gens)
next_kwargs = {name: next(g) for name, g in self.kwarg_gens.items()}
return self.func(*next_args, **next_kwargs)
def reset(self, seed=None):
super().reset(seed)
def spawn(self):
return Apply(self.func, *self.orig_arg_gens, **self.orig_kwarg_gens)
|
e2fc339b20f013d561ed7365a20d0b39c24dcb46
|
scikits/talkbox/__init__.py
|
scikits/talkbox/__init__.py
|
from lpc import *
import lpc
__all__ = lpc.__all__
|
from lpc import *
import lpc
__all__ = lpc.__all__
from tools import *
import tools
__all__ += tools.__all__
|
Add tools general imports to talkbox namespace.
|
Add tools general imports to talkbox namespace.
|
Python
|
mit
|
cournape/talkbox,cournape/talkbox
|
from lpc import *
import lpc
__all__ = lpc.__all__
Add tools general imports to talkbox namespace.
|
from lpc import *
import lpc
__all__ = lpc.__all__
from tools import *
import tools
__all__ += tools.__all__
|
<commit_before>from lpc import *
import lpc
__all__ = lpc.__all__
<commit_msg>Add tools general imports to talkbox namespace.<commit_after>
|
from lpc import *
import lpc
__all__ = lpc.__all__
from tools import *
import tools
__all__ += tools.__all__
|
from lpc import *
import lpc
__all__ = lpc.__all__
Add tools general imports to talkbox namespace.from lpc import *
import lpc
__all__ = lpc.__all__
from tools import *
import tools
__all__ += tools.__all__
|
<commit_before>from lpc import *
import lpc
__all__ = lpc.__all__
<commit_msg>Add tools general imports to talkbox namespace.<commit_after>from lpc import *
import lpc
__all__ = lpc.__all__
from tools import *
import tools
__all__ += tools.__all__
|
e9fd097aac951e6d38246fc4fb01db0e0b6513eb
|
scikits/talkbox/__init__.py
|
scikits/talkbox/__init__.py
|
from linpred import *
import linpred
__all__ = linpred.__all__
from tools import *
import tools
__all__ += tools.__all__
|
from linpred import *
import linpred
__all__ = linpred.__all__
from tools import *
import tools
__all__ += tools.__all__
from numpy.testing import Tester
test = Tester().test
bench = Tester().bench
|
Add module-wide bench and test functions.
|
Add module-wide bench and test functions.
|
Python
|
mit
|
cournape/talkbox,cournape/talkbox
|
from linpred import *
import linpred
__all__ = linpred.__all__
from tools import *
import tools
__all__ += tools.__all__
Add module-wide bench and test functions.
|
from linpred import *
import linpred
__all__ = linpred.__all__
from tools import *
import tools
__all__ += tools.__all__
from numpy.testing import Tester
test = Tester().test
bench = Tester().bench
|
<commit_before>from linpred import *
import linpred
__all__ = linpred.__all__
from tools import *
import tools
__all__ += tools.__all__
<commit_msg>Add module-wide bench and test functions.<commit_after>
|
from linpred import *
import linpred
__all__ = linpred.__all__
from tools import *
import tools
__all__ += tools.__all__
from numpy.testing import Tester
test = Tester().test
bench = Tester().bench
|
from linpred import *
import linpred
__all__ = linpred.__all__
from tools import *
import tools
__all__ += tools.__all__
Add module-wide bench and test functions.from linpred import *
import linpred
__all__ = linpred.__all__
from tools import *
import tools
__all__ += tools.__all__
from numpy.testing import Tester
test = Tester().test
bench = Tester().bench
|
<commit_before>from linpred import *
import linpred
__all__ = linpred.__all__
from tools import *
import tools
__all__ += tools.__all__
<commit_msg>Add module-wide bench and test functions.<commit_after>from linpred import *
import linpred
__all__ = linpred.__all__
from tools import *
import tools
__all__ += tools.__all__
from numpy.testing import Tester
test = Tester().test
bench = Tester().bench
|
5252e86a9613545cbd6db2f0867276abac994282
|
run.py
|
run.py
|
from flask import Flask, request, redirect
import twilio.twiml
app = Flask(__name__)
@app.route("/", methods=['GET', 'POST'])
def hello_monkey():
"""Respond to incoming requests"""
resp = twilio.twiml.Response()
with resp.gather(numDigits=1, action="/handle-key", method="POST") as g:
g.say("press 1 or something")
return str(resp)
@app.route("/handle-key", methods=['GET', 'POST'])
def handle_key():
digit_pressed = request.values.get('Digits', None)
if digit_pressed == "1":
resp = twilio.twiml.Response()
resp.redirect("http://b9ff5a36.ngrok.io/twl/modified.xml")
return str(resp)
else:
return redirect("/")
if __name__ == "__main__":
app.run(debug=True)
|
from flask import Flask, request, redirect
import twilio.twiml
app = Flask(__name__)
@app.route("/", methods=['GET', 'POST'])
def hello_monkey():
"""Respond to incoming requests"""
resp = twilio.twiml.Response()
with resp.gather(numDigits=1, action="/handle-key", method="POST") as g:
g.say("press 1 or something")
return str(resp)
@app.route("/handle-key", methods=['GET', 'POST'])
def handle_key():
digit_pressed = request.values.get('Digits', None)
if digit_pressed == "1":
resp = twilio.twiml.Response()
resp.play("http://demo.twilio.com/hellomonkey/monkey.mp3")
return str(resp)
else:
return redirect("/")
if __name__ == "__main__":
app.run(debug=True)
|
Add ability to programatically play sound after button press
|
Add ability to programatically play sound after button press
|
Python
|
mit
|
ColdSauce/tw-1,zachlatta/tw-1,christophert/tw-1
|
from flask import Flask, request, redirect
import twilio.twiml
app = Flask(__name__)
@app.route("/", methods=['GET', 'POST'])
def hello_monkey():
"""Respond to incoming requests"""
resp = twilio.twiml.Response()
with resp.gather(numDigits=1, action="/handle-key", method="POST") as g:
g.say("press 1 or something")
return str(resp)
@app.route("/handle-key", methods=['GET', 'POST'])
def handle_key():
digit_pressed = request.values.get('Digits', None)
if digit_pressed == "1":
resp = twilio.twiml.Response()
resp.redirect("http://b9ff5a36.ngrok.io/twl/modified.xml")
return str(resp)
else:
return redirect("/")
if __name__ == "__main__":
app.run(debug=True)
Add ability to programatically play sound after button press
|
from flask import Flask, request, redirect
import twilio.twiml
app = Flask(__name__)
@app.route("/", methods=['GET', 'POST'])
def hello_monkey():
"""Respond to incoming requests"""
resp = twilio.twiml.Response()
with resp.gather(numDigits=1, action="/handle-key", method="POST") as g:
g.say("press 1 or something")
return str(resp)
@app.route("/handle-key", methods=['GET', 'POST'])
def handle_key():
digit_pressed = request.values.get('Digits', None)
if digit_pressed == "1":
resp = twilio.twiml.Response()
resp.play("http://demo.twilio.com/hellomonkey/monkey.mp3")
return str(resp)
else:
return redirect("/")
if __name__ == "__main__":
app.run(debug=True)
|
<commit_before>from flask import Flask, request, redirect
import twilio.twiml
app = Flask(__name__)
@app.route("/", methods=['GET', 'POST'])
def hello_monkey():
"""Respond to incoming requests"""
resp = twilio.twiml.Response()
with resp.gather(numDigits=1, action="/handle-key", method="POST") as g:
g.say("press 1 or something")
return str(resp)
@app.route("/handle-key", methods=['GET', 'POST'])
def handle_key():
digit_pressed = request.values.get('Digits', None)
if digit_pressed == "1":
resp = twilio.twiml.Response()
resp.redirect("http://b9ff5a36.ngrok.io/twl/modified.xml")
return str(resp)
else:
return redirect("/")
if __name__ == "__main__":
app.run(debug=True)
<commit_msg>Add ability to programatically play sound after button press<commit_after>
|
from flask import Flask, request, redirect
import twilio.twiml
app = Flask(__name__)
@app.route("/", methods=['GET', 'POST'])
def hello_monkey():
"""Respond to incoming requests"""
resp = twilio.twiml.Response()
with resp.gather(numDigits=1, action="/handle-key", method="POST") as g:
g.say("press 1 or something")
return str(resp)
@app.route("/handle-key", methods=['GET', 'POST'])
def handle_key():
digit_pressed = request.values.get('Digits', None)
if digit_pressed == "1":
resp = twilio.twiml.Response()
resp.play("http://demo.twilio.com/hellomonkey/monkey.mp3")
return str(resp)
else:
return redirect("/")
if __name__ == "__main__":
app.run(debug=True)
|
from flask import Flask, request, redirect
import twilio.twiml
app = Flask(__name__)
@app.route("/", methods=['GET', 'POST'])
def hello_monkey():
"""Respond to incoming requests"""
resp = twilio.twiml.Response()
with resp.gather(numDigits=1, action="/handle-key", method="POST") as g:
g.say("press 1 or something")
return str(resp)
@app.route("/handle-key", methods=['GET', 'POST'])
def handle_key():
digit_pressed = request.values.get('Digits', None)
if digit_pressed == "1":
resp = twilio.twiml.Response()
resp.redirect("http://b9ff5a36.ngrok.io/twl/modified.xml")
return str(resp)
else:
return redirect("/")
if __name__ == "__main__":
app.run(debug=True)
Add ability to programatically play sound after button pressfrom flask import Flask, request, redirect
import twilio.twiml
app = Flask(__name__)
@app.route("/", methods=['GET', 'POST'])
def hello_monkey():
"""Respond to incoming requests"""
resp = twilio.twiml.Response()
with resp.gather(numDigits=1, action="/handle-key", method="POST") as g:
g.say("press 1 or something")
return str(resp)
@app.route("/handle-key", methods=['GET', 'POST'])
def handle_key():
digit_pressed = request.values.get('Digits', None)
if digit_pressed == "1":
resp = twilio.twiml.Response()
resp.play("http://demo.twilio.com/hellomonkey/monkey.mp3")
return str(resp)
else:
return redirect("/")
if __name__ == "__main__":
app.run(debug=True)
|
<commit_before>from flask import Flask, request, redirect
import twilio.twiml
app = Flask(__name__)
@app.route("/", methods=['GET', 'POST'])
def hello_monkey():
"""Respond to incoming requests"""
resp = twilio.twiml.Response()
with resp.gather(numDigits=1, action="/handle-key", method="POST") as g:
g.say("press 1 or something")
return str(resp)
@app.route("/handle-key", methods=['GET', 'POST'])
def handle_key():
digit_pressed = request.values.get('Digits', None)
if digit_pressed == "1":
resp = twilio.twiml.Response()
resp.redirect("http://b9ff5a36.ngrok.io/twl/modified.xml")
return str(resp)
else:
return redirect("/")
if __name__ == "__main__":
app.run(debug=True)
<commit_msg>Add ability to programatically play sound after button press<commit_after>from flask import Flask, request, redirect
import twilio.twiml
app = Flask(__name__)
@app.route("/", methods=['GET', 'POST'])
def hello_monkey():
"""Respond to incoming requests"""
resp = twilio.twiml.Response()
with resp.gather(numDigits=1, action="/handle-key", method="POST") as g:
g.say("press 1 or something")
return str(resp)
@app.route("/handle-key", methods=['GET', 'POST'])
def handle_key():
digit_pressed = request.values.get('Digits', None)
if digit_pressed == "1":
resp = twilio.twiml.Response()
resp.play("http://demo.twilio.com/hellomonkey/monkey.mp3")
return str(resp)
else:
return redirect("/")
if __name__ == "__main__":
app.run(debug=True)
|
2cd0412ab14b92b7607d283d51e1650d008b6ad4
|
scipy/spatial/setupscons.py
|
scipy/spatial/setupscons.py
|
#!/usr/bin/env python
from os.path import join
def configuration(parent_package = '', top_path = None):
from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs
config = Configuration('cluster', parent_package, top_path)
config.add_data_dir('tests')
#config.add_extension('_vq',
# sources=[join('src', 'vq_module.c'), join('src', 'vq.c')],
# include_dirs = [get_numpy_include_dirs()])
config.add_sconscript('SConstruct')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(maintainer = "SciPy Developers",
author = "Eric Jones",
maintainer_email = "scipy-dev@scipy.org",
description = "Clustering Algorithms (Information Theory)",
url = "http://www.scipy.org",
license = "SciPy License (BSD Style)",
**configuration(top_path='').todict()
)
|
#!/usr/bin/env python
from os.path import join
def configuration(parent_package = '', top_path = None):
from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs
config = Configuration('spatial', parent_package, top_path)
config.add_data_dir('tests')
#config.add_extension('_vq',
# sources=[join('src', 'vq_module.c'), join('src', 'vq.c')],
# include_dirs = [get_numpy_include_dirs()])
config.add_sconscript('SConstruct')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(maintainer = "SciPy Developers",
author = "Eric Jones",
maintainer_email = "scipy-dev@scipy.org",
description = "Clustering Algorithms (Information Theory)",
url = "http://www.scipy.org",
license = "SciPy License (BSD Style)",
**configuration(top_path='').todict()
)
|
Update setup.py file for numscons build.
|
Update setup.py file for numscons build.
git-svn-id: 003f22d385e25de9cff933a5ea4efd77cb5e7b28@4958 d6536bca-fef9-0310-8506-e4c0a848fbcf
|
Python
|
bsd-3-clause
|
scipy/scipy-svn,scipy/scipy-svn,scipy/scipy-svn,jasonmccampbell/scipy-refactor,jasonmccampbell/scipy-refactor,jasonmccampbell/scipy-refactor,lesserwhirls/scipy-cwt,lesserwhirls/scipy-cwt,scipy/scipy-svn,lesserwhirls/scipy-cwt,lesserwhirls/scipy-cwt,jasonmccampbell/scipy-refactor
|
#!/usr/bin/env python
from os.path import join
def configuration(parent_package = '', top_path = None):
from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs
config = Configuration('cluster', parent_package, top_path)
config.add_data_dir('tests')
#config.add_extension('_vq',
# sources=[join('src', 'vq_module.c'), join('src', 'vq.c')],
# include_dirs = [get_numpy_include_dirs()])
config.add_sconscript('SConstruct')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(maintainer = "SciPy Developers",
author = "Eric Jones",
maintainer_email = "scipy-dev@scipy.org",
description = "Clustering Algorithms (Information Theory)",
url = "http://www.scipy.org",
license = "SciPy License (BSD Style)",
**configuration(top_path='').todict()
)
Update setup.py file for numscons build.
git-svn-id: 003f22d385e25de9cff933a5ea4efd77cb5e7b28@4958 d6536bca-fef9-0310-8506-e4c0a848fbcf
|
#!/usr/bin/env python
from os.path import join
def configuration(parent_package = '', top_path = None):
from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs
config = Configuration('spatial', parent_package, top_path)
config.add_data_dir('tests')
#config.add_extension('_vq',
# sources=[join('src', 'vq_module.c'), join('src', 'vq.c')],
# include_dirs = [get_numpy_include_dirs()])
config.add_sconscript('SConstruct')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(maintainer = "SciPy Developers",
author = "Eric Jones",
maintainer_email = "scipy-dev@scipy.org",
description = "Clustering Algorithms (Information Theory)",
url = "http://www.scipy.org",
license = "SciPy License (BSD Style)",
**configuration(top_path='').todict()
)
|
<commit_before>#!/usr/bin/env python
from os.path import join
def configuration(parent_package = '', top_path = None):
from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs
config = Configuration('cluster', parent_package, top_path)
config.add_data_dir('tests')
#config.add_extension('_vq',
# sources=[join('src', 'vq_module.c'), join('src', 'vq.c')],
# include_dirs = [get_numpy_include_dirs()])
config.add_sconscript('SConstruct')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(maintainer = "SciPy Developers",
author = "Eric Jones",
maintainer_email = "scipy-dev@scipy.org",
description = "Clustering Algorithms (Information Theory)",
url = "http://www.scipy.org",
license = "SciPy License (BSD Style)",
**configuration(top_path='').todict()
)
<commit_msg>Update setup.py file for numscons build.
git-svn-id: 003f22d385e25de9cff933a5ea4efd77cb5e7b28@4958 d6536bca-fef9-0310-8506-e4c0a848fbcf<commit_after>
|
#!/usr/bin/env python
from os.path import join
def configuration(parent_package = '', top_path = None):
from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs
config = Configuration('spatial', parent_package, top_path)
config.add_data_dir('tests')
#config.add_extension('_vq',
# sources=[join('src', 'vq_module.c'), join('src', 'vq.c')],
# include_dirs = [get_numpy_include_dirs()])
config.add_sconscript('SConstruct')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(maintainer = "SciPy Developers",
author = "Eric Jones",
maintainer_email = "scipy-dev@scipy.org",
description = "Clustering Algorithms (Information Theory)",
url = "http://www.scipy.org",
license = "SciPy License (BSD Style)",
**configuration(top_path='').todict()
)
|
#!/usr/bin/env python
from os.path import join
def configuration(parent_package = '', top_path = None):
from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs
config = Configuration('cluster', parent_package, top_path)
config.add_data_dir('tests')
#config.add_extension('_vq',
# sources=[join('src', 'vq_module.c'), join('src', 'vq.c')],
# include_dirs = [get_numpy_include_dirs()])
config.add_sconscript('SConstruct')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(maintainer = "SciPy Developers",
author = "Eric Jones",
maintainer_email = "scipy-dev@scipy.org",
description = "Clustering Algorithms (Information Theory)",
url = "http://www.scipy.org",
license = "SciPy License (BSD Style)",
**configuration(top_path='').todict()
)
Update setup.py file for numscons build.
git-svn-id: 003f22d385e25de9cff933a5ea4efd77cb5e7b28@4958 d6536bca-fef9-0310-8506-e4c0a848fbcf#!/usr/bin/env python
from os.path import join
def configuration(parent_package = '', top_path = None):
from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs
config = Configuration('spatial', parent_package, top_path)
config.add_data_dir('tests')
#config.add_extension('_vq',
# sources=[join('src', 'vq_module.c'), join('src', 'vq.c')],
# include_dirs = [get_numpy_include_dirs()])
config.add_sconscript('SConstruct')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(maintainer = "SciPy Developers",
author = "Eric Jones",
maintainer_email = "scipy-dev@scipy.org",
description = "Clustering Algorithms (Information Theory)",
url = "http://www.scipy.org",
license = "SciPy License (BSD Style)",
**configuration(top_path='').todict()
)
|
<commit_before>#!/usr/bin/env python
from os.path import join
def configuration(parent_package = '', top_path = None):
from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs
config = Configuration('cluster', parent_package, top_path)
config.add_data_dir('tests')
#config.add_extension('_vq',
# sources=[join('src', 'vq_module.c'), join('src', 'vq.c')],
# include_dirs = [get_numpy_include_dirs()])
config.add_sconscript('SConstruct')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(maintainer = "SciPy Developers",
author = "Eric Jones",
maintainer_email = "scipy-dev@scipy.org",
description = "Clustering Algorithms (Information Theory)",
url = "http://www.scipy.org",
license = "SciPy License (BSD Style)",
**configuration(top_path='').todict()
)
<commit_msg>Update setup.py file for numscons build.
git-svn-id: 003f22d385e25de9cff933a5ea4efd77cb5e7b28@4958 d6536bca-fef9-0310-8506-e4c0a848fbcf<commit_after>#!/usr/bin/env python
from os.path import join
def configuration(parent_package = '', top_path = None):
from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs
config = Configuration('spatial', parent_package, top_path)
config.add_data_dir('tests')
#config.add_extension('_vq',
# sources=[join('src', 'vq_module.c'), join('src', 'vq.c')],
# include_dirs = [get_numpy_include_dirs()])
config.add_sconscript('SConstruct')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(maintainer = "SciPy Developers",
author = "Eric Jones",
maintainer_email = "scipy-dev@scipy.org",
description = "Clustering Algorithms (Information Theory)",
url = "http://www.scipy.org",
license = "SciPy License (BSD Style)",
**configuration(top_path='').todict()
)
|
a9bdf9ec691f0e688af41be1216977b9ce9c8976
|
helpers.py
|
helpers.py
|
""" A bunch of helper functions that, when fixed up, will return the things we
need to make this website work! These functions use the weather and twitter APIs!!!
"""
## Import python libraries we need up here.
###############################################
### Problem One! ###
###############################################
def get_city_coordinates():
"""Find the GPS coordinates for Charlottesville,
and fill in the information below
"""
lattitude = ???
longitude = ???
return lattitude, longitude
###############################################
### Problem Two! ###
###############################################
def get_icon_size():
""" Modify this function to return a number of instagram photos
you want to appear on the site at a time! Because of how the instagram API works,
it won't return more than 20 photos at once.
"""
size = ???
return size
###############################################
### Problem Three! ###
###############################################
def choose_number_of_tweets():
""" Modify this function to return the max number of tweets
you want to appear on the site at a time!
"""
return number
###############################################
### Problem Four! ###
###############################################
def choose_hashtag():
""" Modify this function to use return the hashtah #sparkhackathon
"""
return hashtag
|
""" A bunch of helper functions that, when fixed up, will return the things we
need to make this website work! These functions use the weather and twitter APIs!!!
"""
## Import python libraries we need up here.
###############################################
### Problem One! ###
###############################################
def get_city_coordinates():
"""Find the GPS coordinates for Charlottesville,
and fill in the information below
"""
lattitude = ???
longitude = ???
return lattitude, longitude
###############################################
### Problem Two! ###
###############################################
def get_icon_size():
""" Modify this function to return a number of instagram photos
you want to appear on the site at a time! Because of how the instagram API works,
it won't return more than 20 photos at once.
"""
size = ???
return size
###############################################
### Problem Three! ###
###############################################
def choose_number_of_tweets():
""" Modify this function to return the max number of tweets
you want to appear on the site at a time!
"""
number = 30
return number
###############################################
### Problem Four! ###
###############################################
def choose_hashtag():
""" Modify this function to use return the hashtah #sparkhackathon
"""
return hashtag
|
Set tweet limit to 30 tweets
|
Set tweet limit to 30 tweets
|
Python
|
apache-2.0
|
samanehsan/spark_github,samanehsan/spark_github,samanehsan/learn-git,samanehsan/learn-git
|
""" A bunch of helper functions that, when fixed up, will return the things we
need to make this website work! These functions use the weather and twitter APIs!!!
"""
## Import python libraries we need up here.
###############################################
### Problem One! ###
###############################################
def get_city_coordinates():
"""Find the GPS coordinates for Charlottesville,
and fill in the information below
"""
lattitude = ???
longitude = ???
return lattitude, longitude
###############################################
### Problem Two! ###
###############################################
def get_icon_size():
""" Modify this function to return a number of instagram photos
you want to appear on the site at a time! Because of how the instagram API works,
it won't return more than 20 photos at once.
"""
size = ???
return size
###############################################
### Problem Three! ###
###############################################
def choose_number_of_tweets():
""" Modify this function to return the max number of tweets
you want to appear on the site at a time!
"""
return number
###############################################
### Problem Four! ###
###############################################
def choose_hashtag():
""" Modify this function to use return the hashtah #sparkhackathon
"""
return hashtag
Set tweet limit to 30 tweets
|
""" A bunch of helper functions that, when fixed up, will return the things we
need to make this website work! These functions use the weather and twitter APIs!!!
"""
## Import python libraries we need up here.
###############################################
### Problem One! ###
###############################################
def get_city_coordinates():
"""Find the GPS coordinates for Charlottesville,
and fill in the information below
"""
lattitude = ???
longitude = ???
return lattitude, longitude
###############################################
### Problem Two! ###
###############################################
def get_icon_size():
""" Modify this function to return a number of instagram photos
you want to appear on the site at a time! Because of how the instagram API works,
it won't return more than 20 photos at once.
"""
size = ???
return size
###############################################
### Problem Three! ###
###############################################
def choose_number_of_tweets():
""" Modify this function to return the max number of tweets
you want to appear on the site at a time!
"""
number = 30
return number
###############################################
### Problem Four! ###
###############################################
def choose_hashtag():
""" Modify this function to use return the hashtah #sparkhackathon
"""
return hashtag
|
<commit_before>""" A bunch of helper functions that, when fixed up, will return the things we
need to make this website work! These functions use the weather and twitter APIs!!!
"""
## Import python libraries we need up here.
###############################################
### Problem One! ###
###############################################
def get_city_coordinates():
"""Find the GPS coordinates for Charlottesville,
and fill in the information below
"""
lattitude = ???
longitude = ???
return lattitude, longitude
###############################################
### Problem Two! ###
###############################################
def get_icon_size():
""" Modify this function to return a number of instagram photos
you want to appear on the site at a time! Because of how the instagram API works,
it won't return more than 20 photos at once.
"""
size = ???
return size
###############################################
### Problem Three! ###
###############################################
def choose_number_of_tweets():
""" Modify this function to return the max number of tweets
you want to appear on the site at a time!
"""
return number
###############################################
### Problem Four! ###
###############################################
def choose_hashtag():
""" Modify this function to use return the hashtah #sparkhackathon
"""
return hashtag
<commit_msg>Set tweet limit to 30 tweets<commit_after>
|
""" A bunch of helper functions that, when fixed up, will return the things we
need to make this website work! These functions use the weather and twitter APIs!!!
"""
## Import python libraries we need up here.
###############################################
### Problem One! ###
###############################################
def get_city_coordinates():
"""Find the GPS coordinates for Charlottesville,
and fill in the information below
"""
lattitude = ???
longitude = ???
return lattitude, longitude
###############################################
### Problem Two! ###
###############################################
def get_icon_size():
""" Modify this function to return a number of instagram photos
you want to appear on the site at a time! Because of how the instagram API works,
it won't return more than 20 photos at once.
"""
size = ???
return size
###############################################
### Problem Three! ###
###############################################
def choose_number_of_tweets():
""" Modify this function to return the max number of tweets
you want to appear on the site at a time!
"""
number = 30
return number
###############################################
### Problem Four! ###
###############################################
def choose_hashtag():
""" Modify this function to use return the hashtah #sparkhackathon
"""
return hashtag
|
""" A bunch of helper functions that, when fixed up, will return the things we
need to make this website work! These functions use the weather and twitter APIs!!!
"""
## Import python libraries we need up here.
###############################################
### Problem One! ###
###############################################
def get_city_coordinates():
"""Find the GPS coordinates for Charlottesville,
and fill in the information below
"""
lattitude = ???
longitude = ???
return lattitude, longitude
###############################################
### Problem Two! ###
###############################################
def get_icon_size():
""" Modify this function to return a number of instagram photos
you want to appear on the site at a time! Because of how the instagram API works,
it won't return more than 20 photos at once.
"""
size = ???
return size
###############################################
### Problem Three! ###
###############################################
def choose_number_of_tweets():
""" Modify this function to return the max number of tweets
you want to appear on the site at a time!
"""
return number
###############################################
### Problem Four! ###
###############################################
def choose_hashtag():
""" Modify this function to use return the hashtah #sparkhackathon
"""
return hashtag
Set tweet limit to 30 tweets""" A bunch of helper functions that, when fixed up, will return the things we
need to make this website work! These functions use the weather and twitter APIs!!!
"""
## Import python libraries we need up here.
###############################################
### Problem One! ###
###############################################
def get_city_coordinates():
"""Find the GPS coordinates for Charlottesville,
and fill in the information below
"""
lattitude = ???
longitude = ???
return lattitude, longitude
###############################################
### Problem Two! ###
###############################################
def get_icon_size():
""" Modify this function to return a number of instagram photos
you want to appear on the site at a time! Because of how the instagram API works,
it won't return more than 20 photos at once.
"""
size = ???
return size
###############################################
### Problem Three! ###
###############################################
def choose_number_of_tweets():
""" Modify this function to return the max number of tweets
you want to appear on the site at a time!
"""
number = 30
return number
###############################################
### Problem Four! ###
###############################################
def choose_hashtag():
""" Modify this function to use return the hashtah #sparkhackathon
"""
return hashtag
|
<commit_before>""" A bunch of helper functions that, when fixed up, will return the things we
need to make this website work! These functions use the weather and twitter APIs!!!
"""
## Import python libraries we need up here.
###############################################
### Problem One! ###
###############################################
def get_city_coordinates():
"""Find the GPS coordinates for Charlottesville,
and fill in the information below
"""
lattitude = ???
longitude = ???
return lattitude, longitude
###############################################
### Problem Two! ###
###############################################
def get_icon_size():
""" Modify this function to return a number of instagram photos
you want to appear on the site at a time! Because of how the instagram API works,
it won't return more than 20 photos at once.
"""
size = ???
return size
###############################################
### Problem Three! ###
###############################################
def choose_number_of_tweets():
""" Modify this function to return the max number of tweets
you want to appear on the site at a time!
"""
return number
###############################################
### Problem Four! ###
###############################################
def choose_hashtag():
""" Modify this function to use return the hashtah #sparkhackathon
"""
return hashtag
<commit_msg>Set tweet limit to 30 tweets<commit_after>""" A bunch of helper functions that, when fixed up, will return the things we
need to make this website work! These functions use the weather and twitter APIs!!!
"""
## Import python libraries we need up here.
###############################################
### Problem One! ###
###############################################
def get_city_coordinates():
"""Find the GPS coordinates for Charlottesville,
and fill in the information below
"""
lattitude = ???
longitude = ???
return lattitude, longitude
###############################################
### Problem Two! ###
###############################################
def get_icon_size():
""" Modify this function to return a number of instagram photos
you want to appear on the site at a time! Because of how the instagram API works,
it won't return more than 20 photos at once.
"""
size = ???
return size
###############################################
### Problem Three! ###
###############################################
def choose_number_of_tweets():
""" Modify this function to return the max number of tweets
you want to appear on the site at a time!
"""
number = 30
return number
###############################################
### Problem Four! ###
###############################################
def choose_hashtag():
""" Modify this function to use return the hashtah #sparkhackathon
"""
return hashtag
|
93bd76fc99ef6f399393761aef11c0840e587b2d
|
update-zips.py
|
update-zips.py
|
#!/usr/bin/env python3
"""Remake the ziptestdata.zip file.
Run this to rebuild the importlib_resources/tests/data/ziptestdata.zip file,
e.g. if you want to add a new file to the zip.
This will replace the file with the new build, but it won't commit anything to
git.
"""
import contextlib
import os
import pathlib
from zipfile import ZipFile
def main():
suffixes = '01', '02'
tuple(map(generate, suffixes))
def generate(suffix):
basepath = pathlib.Path('ziptestdata')
base = pathlib.Path('importlib_resources/tests')
zfpath = base / f'zipdata{suffix}/ziptestdata.zip'
with ZipFile(zfpath, 'w') as zf:
relpath = base / f'data{suffix}'
for dirpath, dirnames, filenames in os.walk(relpath):
with contextlib.suppress(KeyError):
dirnames.remove('__pycache__')
for filename in filenames:
src = os.path.join(dirpath, filename)
if src == zfpath:
continue
commonpath = os.path.commonpath((relpath, dirpath))
dst = basepath / dirpath[len(commonpath) + 1 :] / filename
print(src, '->', dst)
zf.write(src, dst)
__name__ == '__main__' and main()
|
#!/usr/bin/env python3
"""Remake the ziptestdata.zip file.
Run this to rebuild the importlib_resources/tests/data/ziptestdata.zip file,
e.g. if you want to add a new file to the zip.
This will replace the file with the new build, but it won't commit anything to
git.
"""
import contextlib
import os
import pathlib
from zipfile import ZipFile
def main():
suffixes = '01', '02'
tuple(map(generate, suffixes))
def generate(suffix):
basepath = pathlib.Path('ziptestdata')
base = pathlib.Path('importlib_resources/tests')
zfpath = base / f'zipdata{suffix}/ziptestdata.zip'
with ZipFile(zfpath, 'w') as zf:
datapath = base / f'data{suffix}'
for dirpath, dirnames, filenames in os.walk(datapath):
with contextlib.suppress(KeyError):
dirnames.remove('__pycache__')
loc = pathlib.Path(dirpath).relative_to(datapath)
for filename in filenames:
src = os.path.join(dirpath, filename)
if src == zfpath:
continue
dst = basepath / loc / filename
print(src, '->', dst)
zf.write(src, dst)
__name__ == '__main__' and main()
|
Use relative_to instead of string manipulation
|
Use relative_to instead of string manipulation
|
Python
|
apache-2.0
|
python/importlib_resources
|
#!/usr/bin/env python3
"""Remake the ziptestdata.zip file.
Run this to rebuild the importlib_resources/tests/data/ziptestdata.zip file,
e.g. if you want to add a new file to the zip.
This will replace the file with the new build, but it won't commit anything to
git.
"""
import contextlib
import os
import pathlib
from zipfile import ZipFile
def main():
suffixes = '01', '02'
tuple(map(generate, suffixes))
def generate(suffix):
basepath = pathlib.Path('ziptestdata')
base = pathlib.Path('importlib_resources/tests')
zfpath = base / f'zipdata{suffix}/ziptestdata.zip'
with ZipFile(zfpath, 'w') as zf:
relpath = base / f'data{suffix}'
for dirpath, dirnames, filenames in os.walk(relpath):
with contextlib.suppress(KeyError):
dirnames.remove('__pycache__')
for filename in filenames:
src = os.path.join(dirpath, filename)
if src == zfpath:
continue
commonpath = os.path.commonpath((relpath, dirpath))
dst = basepath / dirpath[len(commonpath) + 1 :] / filename
print(src, '->', dst)
zf.write(src, dst)
__name__ == '__main__' and main()
Use relative_to instead of string manipulation
|
#!/usr/bin/env python3
"""Remake the ziptestdata.zip file.
Run this to rebuild the importlib_resources/tests/data/ziptestdata.zip file,
e.g. if you want to add a new file to the zip.
This will replace the file with the new build, but it won't commit anything to
git.
"""
import contextlib
import os
import pathlib
from zipfile import ZipFile
def main():
suffixes = '01', '02'
tuple(map(generate, suffixes))
def generate(suffix):
basepath = pathlib.Path('ziptestdata')
base = pathlib.Path('importlib_resources/tests')
zfpath = base / f'zipdata{suffix}/ziptestdata.zip'
with ZipFile(zfpath, 'w') as zf:
datapath = base / f'data{suffix}'
for dirpath, dirnames, filenames in os.walk(datapath):
with contextlib.suppress(KeyError):
dirnames.remove('__pycache__')
loc = pathlib.Path(dirpath).relative_to(datapath)
for filename in filenames:
src = os.path.join(dirpath, filename)
if src == zfpath:
continue
dst = basepath / loc / filename
print(src, '->', dst)
zf.write(src, dst)
__name__ == '__main__' and main()
|
<commit_before>#!/usr/bin/env python3
"""Remake the ziptestdata.zip file.
Run this to rebuild the importlib_resources/tests/data/ziptestdata.zip file,
e.g. if you want to add a new file to the zip.
This will replace the file with the new build, but it won't commit anything to
git.
"""
import contextlib
import os
import pathlib
from zipfile import ZipFile
def main():
suffixes = '01', '02'
tuple(map(generate, suffixes))
def generate(suffix):
basepath = pathlib.Path('ziptestdata')
base = pathlib.Path('importlib_resources/tests')
zfpath = base / f'zipdata{suffix}/ziptestdata.zip'
with ZipFile(zfpath, 'w') as zf:
relpath = base / f'data{suffix}'
for dirpath, dirnames, filenames in os.walk(relpath):
with contextlib.suppress(KeyError):
dirnames.remove('__pycache__')
for filename in filenames:
src = os.path.join(dirpath, filename)
if src == zfpath:
continue
commonpath = os.path.commonpath((relpath, dirpath))
dst = basepath / dirpath[len(commonpath) + 1 :] / filename
print(src, '->', dst)
zf.write(src, dst)
__name__ == '__main__' and main()
<commit_msg>Use relative_to instead of string manipulation<commit_after>
|
#!/usr/bin/env python3
"""Remake the ziptestdata.zip file.
Run this to rebuild the importlib_resources/tests/data/ziptestdata.zip file,
e.g. if you want to add a new file to the zip.
This will replace the file with the new build, but it won't commit anything to
git.
"""
import contextlib
import os
import pathlib
from zipfile import ZipFile
def main():
suffixes = '01', '02'
tuple(map(generate, suffixes))
def generate(suffix):
basepath = pathlib.Path('ziptestdata')
base = pathlib.Path('importlib_resources/tests')
zfpath = base / f'zipdata{suffix}/ziptestdata.zip'
with ZipFile(zfpath, 'w') as zf:
datapath = base / f'data{suffix}'
for dirpath, dirnames, filenames in os.walk(datapath):
with contextlib.suppress(KeyError):
dirnames.remove('__pycache__')
loc = pathlib.Path(dirpath).relative_to(datapath)
for filename in filenames:
src = os.path.join(dirpath, filename)
if src == zfpath:
continue
dst = basepath / loc / filename
print(src, '->', dst)
zf.write(src, dst)
__name__ == '__main__' and main()
|
#!/usr/bin/env python3
"""Remake the ziptestdata.zip file.
Run this to rebuild the importlib_resources/tests/data/ziptestdata.zip file,
e.g. if you want to add a new file to the zip.
This will replace the file with the new build, but it won't commit anything to
git.
"""
import contextlib
import os
import pathlib
from zipfile import ZipFile
def main():
suffixes = '01', '02'
tuple(map(generate, suffixes))
def generate(suffix):
basepath = pathlib.Path('ziptestdata')
base = pathlib.Path('importlib_resources/tests')
zfpath = base / f'zipdata{suffix}/ziptestdata.zip'
with ZipFile(zfpath, 'w') as zf:
relpath = base / f'data{suffix}'
for dirpath, dirnames, filenames in os.walk(relpath):
with contextlib.suppress(KeyError):
dirnames.remove('__pycache__')
for filename in filenames:
src = os.path.join(dirpath, filename)
if src == zfpath:
continue
commonpath = os.path.commonpath((relpath, dirpath))
dst = basepath / dirpath[len(commonpath) + 1 :] / filename
print(src, '->', dst)
zf.write(src, dst)
__name__ == '__main__' and main()
Use relative_to instead of string manipulation#!/usr/bin/env python3
"""Remake the ziptestdata.zip file.
Run this to rebuild the importlib_resources/tests/data/ziptestdata.zip file,
e.g. if you want to add a new file to the zip.
This will replace the file with the new build, but it won't commit anything to
git.
"""
import contextlib
import os
import pathlib
from zipfile import ZipFile
def main():
suffixes = '01', '02'
tuple(map(generate, suffixes))
def generate(suffix):
basepath = pathlib.Path('ziptestdata')
base = pathlib.Path('importlib_resources/tests')
zfpath = base / f'zipdata{suffix}/ziptestdata.zip'
with ZipFile(zfpath, 'w') as zf:
datapath = base / f'data{suffix}'
for dirpath, dirnames, filenames in os.walk(datapath):
with contextlib.suppress(KeyError):
dirnames.remove('__pycache__')
loc = pathlib.Path(dirpath).relative_to(datapath)
for filename in filenames:
src = os.path.join(dirpath, filename)
if src == zfpath:
continue
dst = basepath / loc / filename
print(src, '->', dst)
zf.write(src, dst)
__name__ == '__main__' and main()
|
<commit_before>#!/usr/bin/env python3
"""Remake the ziptestdata.zip file.
Run this to rebuild the importlib_resources/tests/data/ziptestdata.zip file,
e.g. if you want to add a new file to the zip.
This will replace the file with the new build, but it won't commit anything to
git.
"""
import contextlib
import os
import pathlib
from zipfile import ZipFile
def main():
suffixes = '01', '02'
tuple(map(generate, suffixes))
def generate(suffix):
basepath = pathlib.Path('ziptestdata')
base = pathlib.Path('importlib_resources/tests')
zfpath = base / f'zipdata{suffix}/ziptestdata.zip'
with ZipFile(zfpath, 'w') as zf:
relpath = base / f'data{suffix}'
for dirpath, dirnames, filenames in os.walk(relpath):
with contextlib.suppress(KeyError):
dirnames.remove('__pycache__')
for filename in filenames:
src = os.path.join(dirpath, filename)
if src == zfpath:
continue
commonpath = os.path.commonpath((relpath, dirpath))
dst = basepath / dirpath[len(commonpath) + 1 :] / filename
print(src, '->', dst)
zf.write(src, dst)
__name__ == '__main__' and main()
<commit_msg>Use relative_to instead of string manipulation<commit_after>#!/usr/bin/env python3
"""Remake the ziptestdata.zip file.
Run this to rebuild the importlib_resources/tests/data/ziptestdata.zip file,
e.g. if you want to add a new file to the zip.
This will replace the file with the new build, but it won't commit anything to
git.
"""
import contextlib
import os
import pathlib
from zipfile import ZipFile
def main():
suffixes = '01', '02'
tuple(map(generate, suffixes))
def generate(suffix):
basepath = pathlib.Path('ziptestdata')
base = pathlib.Path('importlib_resources/tests')
zfpath = base / f'zipdata{suffix}/ziptestdata.zip'
with ZipFile(zfpath, 'w') as zf:
datapath = base / f'data{suffix}'
for dirpath, dirnames, filenames in os.walk(datapath):
with contextlib.suppress(KeyError):
dirnames.remove('__pycache__')
loc = pathlib.Path(dirpath).relative_to(datapath)
for filename in filenames:
src = os.path.join(dirpath, filename)
if src == zfpath:
continue
dst = basepath / loc / filename
print(src, '->', dst)
zf.write(src, dst)
__name__ == '__main__' and main()
|
c6716b20a43bafc0fbcec0d1c159fe55e87b22cc
|
totalimpactwebapp/__init__.py
|
totalimpactwebapp/__init__.py
|
import os, logging, sys
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.login import LoginManager
# set up logging
# see http://wiki.pylonshq.com/display/pylonscookbook/Alternative+logging+configuration
logging.basicConfig(
stream=sys.stdout,
level=logging.DEBUG,
format='[%(process)d] %(levelname)8s %(threadName)30s %(name)s - %(message)s'
)
logger = logging.getLogger("tiwebapp")
# set up application
app = Flask(__name__)
db = SQLAlchemy(app)
login_manager = LoginManager()
login_manager.setup_app(app)
# set up configs
# Setting ASSETS_DEBUG=True makes debugging easier by NOT minimizing the assets.
# Production should have ASSETS_DEBUG=False
# ASSETS_DEBUG=True is the default
app.config["ASSETS_DEBUG"] = (os.getenv("ASSETS_DEBUG", "True") == "True")
app.config["SQLALCHEMY_DATABASE_URI"] = os.getenv("DATABASE_URL")
app.config["SECRET_KEY"] = os.getenv("SECRET_KEY")
# set up views
from totalimpactwebapp import views
|
import os, logging, sys
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.login import LoginManager
# set up logging
# see http://wiki.pylonshq.com/display/pylonscookbook/Alternative+logging+configuration
logging.basicConfig(
stream=sys.stdout,
level=logging.DEBUG,
format='[%(process)d] %(levelname)8s %(threadName)30s %(name)s - %(message)s'
)
logger = logging.getLogger("tiwebapp")
# set up application
app = Flask(__name__)
# allow slashes and end of URLs even when they're not part of views:
# http://flask.pocoo.org/mailinglist/archive/2011/2/27/re-automatic-removal-of-trailing-slashes/#043b1a0b6e841ab8e7d38bd7374cbb58
app.url_map.strict_slashes = False
db = SQLAlchemy(app)
login_manager = LoginManager()
login_manager.setup_app(app)
# set up configs
# Setting ASSETS_DEBUG=True makes debugging easier by NOT minimizing the assets.
# Production should have ASSETS_DEBUG=False
# ASSETS_DEBUG=True is the default
app.config["ASSETS_DEBUG"] = (os.getenv("ASSETS_DEBUG", "True") == "True")
app.config["SQLALCHEMY_DATABASE_URI"] = os.getenv("DATABASE_URL")
app.config["SECRET_KEY"] = os.getenv("SECRET_KEY")
# set up views
from totalimpactwebapp import views
|
Allow URLs to end with spaces even if they're not defined that way in views.
|
Allow URLs to end with spaces even if they're not defined that way in views.
Closes #35
See http://flask.pocoo.org/mailinglist/archive/2011/2/27/re-automatic-removal-of-trailing-slashes/#043b1a0b6e841ab8e7d38bd7374cbb58
|
Python
|
mit
|
total-impact/total-impact-webapp,Impactstory/total-impact-webapp,total-impact/total-impact-webapp,Impactstory/total-impact-webapp,Impactstory/total-impact-webapp,total-impact/total-impact-webapp,Impactstory/total-impact-webapp,total-impact/total-impact-webapp
|
import os, logging, sys
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.login import LoginManager
# set up logging
# see http://wiki.pylonshq.com/display/pylonscookbook/Alternative+logging+configuration
logging.basicConfig(
stream=sys.stdout,
level=logging.DEBUG,
format='[%(process)d] %(levelname)8s %(threadName)30s %(name)s - %(message)s'
)
logger = logging.getLogger("tiwebapp")
# set up application
app = Flask(__name__)
db = SQLAlchemy(app)
login_manager = LoginManager()
login_manager.setup_app(app)
# set up configs
# Setting ASSETS_DEBUG=True makes debugging easier by NOT minimizing the assets.
# Production should have ASSETS_DEBUG=False
# ASSETS_DEBUG=True is the default
app.config["ASSETS_DEBUG"] = (os.getenv("ASSETS_DEBUG", "True") == "True")
app.config["SQLALCHEMY_DATABASE_URI"] = os.getenv("DATABASE_URL")
app.config["SECRET_KEY"] = os.getenv("SECRET_KEY")
# set up views
from totalimpactwebapp import views
Allow URLs to end with spaces even if they're not defined that way in views.
Closes #35
See http://flask.pocoo.org/mailinglist/archive/2011/2/27/re-automatic-removal-of-trailing-slashes/#043b1a0b6e841ab8e7d38bd7374cbb58
|
import os, logging, sys
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.login import LoginManager
# set up logging
# see http://wiki.pylonshq.com/display/pylonscookbook/Alternative+logging+configuration
logging.basicConfig(
stream=sys.stdout,
level=logging.DEBUG,
format='[%(process)d] %(levelname)8s %(threadName)30s %(name)s - %(message)s'
)
logger = logging.getLogger("tiwebapp")
# set up application
app = Flask(__name__)
# allow slashes and end of URLs even when they're not part of views:
# http://flask.pocoo.org/mailinglist/archive/2011/2/27/re-automatic-removal-of-trailing-slashes/#043b1a0b6e841ab8e7d38bd7374cbb58
app.url_map.strict_slashes = False
db = SQLAlchemy(app)
login_manager = LoginManager()
login_manager.setup_app(app)
# set up configs
# Setting ASSETS_DEBUG=True makes debugging easier by NOT minimizing the assets.
# Production should have ASSETS_DEBUG=False
# ASSETS_DEBUG=True is the default
app.config["ASSETS_DEBUG"] = (os.getenv("ASSETS_DEBUG", "True") == "True")
app.config["SQLALCHEMY_DATABASE_URI"] = os.getenv("DATABASE_URL")
app.config["SECRET_KEY"] = os.getenv("SECRET_KEY")
# set up views
from totalimpactwebapp import views
|
<commit_before>import os, logging, sys
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.login import LoginManager
# set up logging
# see http://wiki.pylonshq.com/display/pylonscookbook/Alternative+logging+configuration
logging.basicConfig(
stream=sys.stdout,
level=logging.DEBUG,
format='[%(process)d] %(levelname)8s %(threadName)30s %(name)s - %(message)s'
)
logger = logging.getLogger("tiwebapp")
# set up application
app = Flask(__name__)
db = SQLAlchemy(app)
login_manager = LoginManager()
login_manager.setup_app(app)
# set up configs
# Setting ASSETS_DEBUG=True makes debugging easier by NOT minimizing the assets.
# Production should have ASSETS_DEBUG=False
# ASSETS_DEBUG=True is the default
app.config["ASSETS_DEBUG"] = (os.getenv("ASSETS_DEBUG", "True") == "True")
app.config["SQLALCHEMY_DATABASE_URI"] = os.getenv("DATABASE_URL")
app.config["SECRET_KEY"] = os.getenv("SECRET_KEY")
# set up views
from totalimpactwebapp import views
<commit_msg>Allow URLs to end with spaces even if they're not defined that way in views.
Closes #35
See http://flask.pocoo.org/mailinglist/archive/2011/2/27/re-automatic-removal-of-trailing-slashes/#043b1a0b6e841ab8e7d38bd7374cbb58<commit_after>
|
import os, logging, sys
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.login import LoginManager
# set up logging
# see http://wiki.pylonshq.com/display/pylonscookbook/Alternative+logging+configuration
logging.basicConfig(
stream=sys.stdout,
level=logging.DEBUG,
format='[%(process)d] %(levelname)8s %(threadName)30s %(name)s - %(message)s'
)
logger = logging.getLogger("tiwebapp")
# set up application
app = Flask(__name__)
# allow slashes and end of URLs even when they're not part of views:
# http://flask.pocoo.org/mailinglist/archive/2011/2/27/re-automatic-removal-of-trailing-slashes/#043b1a0b6e841ab8e7d38bd7374cbb58
app.url_map.strict_slashes = False
db = SQLAlchemy(app)
login_manager = LoginManager()
login_manager.setup_app(app)
# set up configs
# Setting ASSETS_DEBUG=True makes debugging easier by NOT minimizing the assets.
# Production should have ASSETS_DEBUG=False
# ASSETS_DEBUG=True is the default
app.config["ASSETS_DEBUG"] = (os.getenv("ASSETS_DEBUG", "True") == "True")
app.config["SQLALCHEMY_DATABASE_URI"] = os.getenv("DATABASE_URL")
app.config["SECRET_KEY"] = os.getenv("SECRET_KEY")
# set up views
from totalimpactwebapp import views
|
import os, logging, sys
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.login import LoginManager
# set up logging
# see http://wiki.pylonshq.com/display/pylonscookbook/Alternative+logging+configuration
logging.basicConfig(
stream=sys.stdout,
level=logging.DEBUG,
format='[%(process)d] %(levelname)8s %(threadName)30s %(name)s - %(message)s'
)
logger = logging.getLogger("tiwebapp")
# set up application
app = Flask(__name__)
db = SQLAlchemy(app)
login_manager = LoginManager()
login_manager.setup_app(app)
# set up configs
# Setting ASSETS_DEBUG=True makes debugging easier by NOT minimizing the assets.
# Production should have ASSETS_DEBUG=False
# ASSETS_DEBUG=True is the default
app.config["ASSETS_DEBUG"] = (os.getenv("ASSETS_DEBUG", "True") == "True")
app.config["SQLALCHEMY_DATABASE_URI"] = os.getenv("DATABASE_URL")
app.config["SECRET_KEY"] = os.getenv("SECRET_KEY")
# set up views
from totalimpactwebapp import views
Allow URLs to end with spaces even if they're not defined that way in views.
Closes #35
See http://flask.pocoo.org/mailinglist/archive/2011/2/27/re-automatic-removal-of-trailing-slashes/#043b1a0b6e841ab8e7d38bd7374cbb58import os, logging, sys
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.login import LoginManager
# set up logging
# see http://wiki.pylonshq.com/display/pylonscookbook/Alternative+logging+configuration
logging.basicConfig(
stream=sys.stdout,
level=logging.DEBUG,
format='[%(process)d] %(levelname)8s %(threadName)30s %(name)s - %(message)s'
)
logger = logging.getLogger("tiwebapp")
# set up application
app = Flask(__name__)
# allow slashes and end of URLs even when they're not part of views:
# http://flask.pocoo.org/mailinglist/archive/2011/2/27/re-automatic-removal-of-trailing-slashes/#043b1a0b6e841ab8e7d38bd7374cbb58
app.url_map.strict_slashes = False
db = SQLAlchemy(app)
login_manager = LoginManager()
login_manager.setup_app(app)
# set up configs
# Setting ASSETS_DEBUG=True makes debugging easier by NOT minimizing the assets.
# Production should have ASSETS_DEBUG=False
# ASSETS_DEBUG=True is the default
app.config["ASSETS_DEBUG"] = (os.getenv("ASSETS_DEBUG", "True") == "True")
app.config["SQLALCHEMY_DATABASE_URI"] = os.getenv("DATABASE_URL")
app.config["SECRET_KEY"] = os.getenv("SECRET_KEY")
# set up views
from totalimpactwebapp import views
|
<commit_before>import os, logging, sys
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.login import LoginManager
# set up logging
# see http://wiki.pylonshq.com/display/pylonscookbook/Alternative+logging+configuration
logging.basicConfig(
stream=sys.stdout,
level=logging.DEBUG,
format='[%(process)d] %(levelname)8s %(threadName)30s %(name)s - %(message)s'
)
logger = logging.getLogger("tiwebapp")
# set up application
app = Flask(__name__)
db = SQLAlchemy(app)
login_manager = LoginManager()
login_manager.setup_app(app)
# set up configs
# Setting ASSETS_DEBUG=True makes debugging easier by NOT minimizing the assets.
# Production should have ASSETS_DEBUG=False
# ASSETS_DEBUG=True is the default
app.config["ASSETS_DEBUG"] = (os.getenv("ASSETS_DEBUG", "True") == "True")
app.config["SQLALCHEMY_DATABASE_URI"] = os.getenv("DATABASE_URL")
app.config["SECRET_KEY"] = os.getenv("SECRET_KEY")
# set up views
from totalimpactwebapp import views
<commit_msg>Allow URLs to end with spaces even if they're not defined that way in views.
Closes #35
See http://flask.pocoo.org/mailinglist/archive/2011/2/27/re-automatic-removal-of-trailing-slashes/#043b1a0b6e841ab8e7d38bd7374cbb58<commit_after>import os, logging, sys
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.login import LoginManager
# set up logging
# see http://wiki.pylonshq.com/display/pylonscookbook/Alternative+logging+configuration
logging.basicConfig(
stream=sys.stdout,
level=logging.DEBUG,
format='[%(process)d] %(levelname)8s %(threadName)30s %(name)s - %(message)s'
)
logger = logging.getLogger("tiwebapp")
# set up application
app = Flask(__name__)
# allow slashes and end of URLs even when they're not part of views:
# http://flask.pocoo.org/mailinglist/archive/2011/2/27/re-automatic-removal-of-trailing-slashes/#043b1a0b6e841ab8e7d38bd7374cbb58
app.url_map.strict_slashes = False
db = SQLAlchemy(app)
login_manager = LoginManager()
login_manager.setup_app(app)
# set up configs
# Setting ASSETS_DEBUG=True makes debugging easier by NOT minimizing the assets.
# Production should have ASSETS_DEBUG=False
# ASSETS_DEBUG=True is the default
app.config["ASSETS_DEBUG"] = (os.getenv("ASSETS_DEBUG", "True") == "True")
app.config["SQLALCHEMY_DATABASE_URI"] = os.getenv("DATABASE_URL")
app.config["SECRET_KEY"] = os.getenv("SECRET_KEY")
# set up views
from totalimpactwebapp import views
|
97940ed6ddd7d50feb47a932be096be5b223b1f0
|
assassins/assassins/views.py
|
assassins/assassins/views.py
|
from django.shortcuts import render, redirect
from django.contrib.auth import views as auth_views
# Create your views here.
def index(request):
pass
def login(request, **kwargs):
if request.user.is_authenticated():
return redirect('index')
else:
return auth_views.login(request)
|
from django.shortcuts import render, redirect
from django.contrib.auth import views as auth_views
from django.views.decorators.http import require_POST
# Create your views here.
def index(request):
pass
@require_POST
def login(request, **kwargs):
if request.user.is_authenticated():
return redirect('index')
else:
return auth_views.login(request)
|
Modify login view to be a post endpoint
|
Modify login view to be a post endpoint
|
Python
|
mit
|
Squa256/assassins,bobandbetty/assassins,bobandbetty/assassins,bobandbetty/assassins,Squa256/assassins,Squa256/assassins
|
from django.shortcuts import render, redirect
from django.contrib.auth import views as auth_views
# Create your views here.
def index(request):
pass
def login(request, **kwargs):
if request.user.is_authenticated():
return redirect('index')
else:
return auth_views.login(request)
Modify login view to be a post endpoint
|
from django.shortcuts import render, redirect
from django.contrib.auth import views as auth_views
from django.views.decorators.http import require_POST
# Create your views here.
def index(request):
pass
@require_POST
def login(request, **kwargs):
if request.user.is_authenticated():
return redirect('index')
else:
return auth_views.login(request)
|
<commit_before>from django.shortcuts import render, redirect
from django.contrib.auth import views as auth_views
# Create your views here.
def index(request):
pass
def login(request, **kwargs):
if request.user.is_authenticated():
return redirect('index')
else:
return auth_views.login(request)
<commit_msg>Modify login view to be a post endpoint<commit_after>
|
from django.shortcuts import render, redirect
from django.contrib.auth import views as auth_views
from django.views.decorators.http import require_POST
# Create your views here.
def index(request):
pass
@require_POST
def login(request, **kwargs):
if request.user.is_authenticated():
return redirect('index')
else:
return auth_views.login(request)
|
from django.shortcuts import render, redirect
from django.contrib.auth import views as auth_views
# Create your views here.
def index(request):
pass
def login(request, **kwargs):
if request.user.is_authenticated():
return redirect('index')
else:
return auth_views.login(request)
Modify login view to be a post endpointfrom django.shortcuts import render, redirect
from django.contrib.auth import views as auth_views
from django.views.decorators.http import require_POST
# Create your views here.
def index(request):
pass
@require_POST
def login(request, **kwargs):
if request.user.is_authenticated():
return redirect('index')
else:
return auth_views.login(request)
|
<commit_before>from django.shortcuts import render, redirect
from django.contrib.auth import views as auth_views
# Create your views here.
def index(request):
pass
def login(request, **kwargs):
if request.user.is_authenticated():
return redirect('index')
else:
return auth_views.login(request)
<commit_msg>Modify login view to be a post endpoint<commit_after>from django.shortcuts import render, redirect
from django.contrib.auth import views as auth_views
from django.views.decorators.http import require_POST
# Create your views here.
def index(request):
pass
@require_POST
def login(request, **kwargs):
if request.user.is_authenticated():
return redirect('index')
else:
return auth_views.login(request)
|
da990bff61c0088f239defac486da1303f97c08a
|
app/admin/routes.py
|
app/admin/routes.py
|
from flask import render_template, redirect, url_for, flash, request
from flask.ext.login import login_required, current_user
from . import admin
from .forms import ProfileForm
from .. import db
from ..models import User
@admin.route('/')
@login_required
def index():
return render_template('admin/user.html', user=current_user)
@admin.route('/edit_user', methods=['GET', 'POST'])
@login_required
def edit_user():
form = ProfileForm()
if form.validate_on_submit():
current_user.name = form.name.data
current_user.location = form.location.data
current_user.bio = form.bio.data
db.session.add(current_user._get_current_object())
db.session.commit()
flash("Síðan hefur verið uppfærð")
return redirect(url_for('admin.index'))
form.name.data = current_user.name
form.location.data = current_user.location
form.bio.data = current_user.bio
return render_template('admin/edit_user.html', form=form)
|
from flask import render_template, redirect, url_for, flash, request
from flask.ext.login import login_required, current_user
from . import admin
from .forms import ProfileForm
from .. import db
from ..models import User
@admin.route('/')
@login_required
def index():
return render_template('admin/user.html', user=current_user)
@admin.route('/edit_user', methods=['GET', 'POST'])
@login_required
def edit_user():
form = ProfileForm()
if form.validate_on_submit():
current_user.name = form.name.data
current_user.location = form.location.data
current_user.bio = form.bio.data
db.session.add(current_user._get_current_object())
db.session.commit()
flash("Síðan hefur verið uppfærð")
return redirect(url_for('admin.index'))
form.name.data = current_user.name
form.location.data = current_user.location
form.bio.data = current_user.bio
return render_template('admin/edit_user.html', form=form)
@admin.route('/news')
@login_required
def news():
return render_template('admin/news.html')
|
Add a route to admin/news
|
Add a route to admin/news
|
Python
|
mit
|
finnurtorfa/aflafrettir.is,finnurtorfa/aflafrettir.is,finnurtorfa/aflafrettir.is,finnurtorfa/aflafrettir.is
|
from flask import render_template, redirect, url_for, flash, request
from flask.ext.login import login_required, current_user
from . import admin
from .forms import ProfileForm
from .. import db
from ..models import User
@admin.route('/')
@login_required
def index():
return render_template('admin/user.html', user=current_user)
@admin.route('/edit_user', methods=['GET', 'POST'])
@login_required
def edit_user():
form = ProfileForm()
if form.validate_on_submit():
current_user.name = form.name.data
current_user.location = form.location.data
current_user.bio = form.bio.data
db.session.add(current_user._get_current_object())
db.session.commit()
flash("Síðan hefur verið uppfærð")
return redirect(url_for('admin.index'))
form.name.data = current_user.name
form.location.data = current_user.location
form.bio.data = current_user.bio
return render_template('admin/edit_user.html', form=form)
Add a route to admin/news
|
from flask import render_template, redirect, url_for, flash, request
from flask.ext.login import login_required, current_user
from . import admin
from .forms import ProfileForm
from .. import db
from ..models import User
@admin.route('/')
@login_required
def index():
return render_template('admin/user.html', user=current_user)
@admin.route('/edit_user', methods=['GET', 'POST'])
@login_required
def edit_user():
form = ProfileForm()
if form.validate_on_submit():
current_user.name = form.name.data
current_user.location = form.location.data
current_user.bio = form.bio.data
db.session.add(current_user._get_current_object())
db.session.commit()
flash("Síðan hefur verið uppfærð")
return redirect(url_for('admin.index'))
form.name.data = current_user.name
form.location.data = current_user.location
form.bio.data = current_user.bio
return render_template('admin/edit_user.html', form=form)
@admin.route('/news')
@login_required
def news():
return render_template('admin/news.html')
|
<commit_before>from flask import render_template, redirect, url_for, flash, request
from flask.ext.login import login_required, current_user
from . import admin
from .forms import ProfileForm
from .. import db
from ..models import User
@admin.route('/')
@login_required
def index():
return render_template('admin/user.html', user=current_user)
@admin.route('/edit_user', methods=['GET', 'POST'])
@login_required
def edit_user():
form = ProfileForm()
if form.validate_on_submit():
current_user.name = form.name.data
current_user.location = form.location.data
current_user.bio = form.bio.data
db.session.add(current_user._get_current_object())
db.session.commit()
flash("Síðan hefur verið uppfærð")
return redirect(url_for('admin.index'))
form.name.data = current_user.name
form.location.data = current_user.location
form.bio.data = current_user.bio
return render_template('admin/edit_user.html', form=form)
<commit_msg>Add a route to admin/news<commit_after>
|
from flask import render_template, redirect, url_for, flash, request
from flask.ext.login import login_required, current_user
from . import admin
from .forms import ProfileForm
from .. import db
from ..models import User
@admin.route('/')
@login_required
def index():
return render_template('admin/user.html', user=current_user)
@admin.route('/edit_user', methods=['GET', 'POST'])
@login_required
def edit_user():
form = ProfileForm()
if form.validate_on_submit():
current_user.name = form.name.data
current_user.location = form.location.data
current_user.bio = form.bio.data
db.session.add(current_user._get_current_object())
db.session.commit()
flash("Síðan hefur verið uppfærð")
return redirect(url_for('admin.index'))
form.name.data = current_user.name
form.location.data = current_user.location
form.bio.data = current_user.bio
return render_template('admin/edit_user.html', form=form)
@admin.route('/news')
@login_required
def news():
return render_template('admin/news.html')
|
from flask import render_template, redirect, url_for, flash, request
from flask.ext.login import login_required, current_user
from . import admin
from .forms import ProfileForm
from .. import db
from ..models import User
@admin.route('/')
@login_required
def index():
return render_template('admin/user.html', user=current_user)
@admin.route('/edit_user', methods=['GET', 'POST'])
@login_required
def edit_user():
form = ProfileForm()
if form.validate_on_submit():
current_user.name = form.name.data
current_user.location = form.location.data
current_user.bio = form.bio.data
db.session.add(current_user._get_current_object())
db.session.commit()
flash("Síðan hefur verið uppfærð")
return redirect(url_for('admin.index'))
form.name.data = current_user.name
form.location.data = current_user.location
form.bio.data = current_user.bio
return render_template('admin/edit_user.html', form=form)
Add a route to admin/newsfrom flask import render_template, redirect, url_for, flash, request
from flask.ext.login import login_required, current_user
from . import admin
from .forms import ProfileForm
from .. import db
from ..models import User
@admin.route('/')
@login_required
def index():
return render_template('admin/user.html', user=current_user)
@admin.route('/edit_user', methods=['GET', 'POST'])
@login_required
def edit_user():
form = ProfileForm()
if form.validate_on_submit():
current_user.name = form.name.data
current_user.location = form.location.data
current_user.bio = form.bio.data
db.session.add(current_user._get_current_object())
db.session.commit()
flash("Síðan hefur verið uppfærð")
return redirect(url_for('admin.index'))
form.name.data = current_user.name
form.location.data = current_user.location
form.bio.data = current_user.bio
return render_template('admin/edit_user.html', form=form)
@admin.route('/news')
@login_required
def news():
return render_template('admin/news.html')
|
<commit_before>from flask import render_template, redirect, url_for, flash, request
from flask.ext.login import login_required, current_user
from . import admin
from .forms import ProfileForm
from .. import db
from ..models import User
@admin.route('/')
@login_required
def index():
return render_template('admin/user.html', user=current_user)
@admin.route('/edit_user', methods=['GET', 'POST'])
@login_required
def edit_user():
form = ProfileForm()
if form.validate_on_submit():
current_user.name = form.name.data
current_user.location = form.location.data
current_user.bio = form.bio.data
db.session.add(current_user._get_current_object())
db.session.commit()
flash("Síðan hefur verið uppfærð")
return redirect(url_for('admin.index'))
form.name.data = current_user.name
form.location.data = current_user.location
form.bio.data = current_user.bio
return render_template('admin/edit_user.html', form=form)
<commit_msg>Add a route to admin/news<commit_after>from flask import render_template, redirect, url_for, flash, request
from flask.ext.login import login_required, current_user
from . import admin
from .forms import ProfileForm
from .. import db
from ..models import User
@admin.route('/')
@login_required
def index():
return render_template('admin/user.html', user=current_user)
@admin.route('/edit_user', methods=['GET', 'POST'])
@login_required
def edit_user():
form = ProfileForm()
if form.validate_on_submit():
current_user.name = form.name.data
current_user.location = form.location.data
current_user.bio = form.bio.data
db.session.add(current_user._get_current_object())
db.session.commit()
flash("Síðan hefur verið uppfærð")
return redirect(url_for('admin.index'))
form.name.data = current_user.name
form.location.data = current_user.location
form.bio.data = current_user.bio
return render_template('admin/edit_user.html', form=form)
@admin.route('/news')
@login_required
def news():
return render_template('admin/news.html')
|
8c2b5fd2813c6d89fb8ac71c08760a8d799c31a2
|
dev_config.py
|
dev_config.py
|
DEBUG = True
BOOTSTRAP_USE_MINIFIED = False
BOOTSTRAP_USE_CDN = False
BOOTSTRAP_FONTAWESOME = True
SECRET_KEY = "\xdb\xf1\xf6\x14\x88\xd4i\xda\xbc/E'4\x7f`iz\x98r\xb9s\x1c\xca\xcd"
SQLALCHEMY_DATABASE_URI = 'sqlite:///datamart.db'
|
DEBUG = True
BOOTSTRAP_USE_MINIFIED = False
BOOTSTRAP_USE_CDN = False
BOOTSTRAP_FONTAWESOME = True
SECRET_KEY = "\xdb\xf1\xf6\x14\x88\xd4i\xda\xbc/E'4\x7f`iz\x98r\xb9s\x1c\xca\xcd"
SQLALCHEMY_DATABASE_URI = 'postgresql+psycopg2://datamart:datamart@localhost/datamart'
|
Correct DB URI for postgres and hstore.
|
Correct DB URI for postgres and hstore.
|
Python
|
mit
|
msscully/datamart,msscully/datamart,msscully/datamart
|
DEBUG = True
BOOTSTRAP_USE_MINIFIED = False
BOOTSTRAP_USE_CDN = False
BOOTSTRAP_FONTAWESOME = True
SECRET_KEY = "\xdb\xf1\xf6\x14\x88\xd4i\xda\xbc/E'4\x7f`iz\x98r\xb9s\x1c\xca\xcd"
SQLALCHEMY_DATABASE_URI = 'sqlite:///datamart.db'
Correct DB URI for postgres and hstore.
|
DEBUG = True
BOOTSTRAP_USE_MINIFIED = False
BOOTSTRAP_USE_CDN = False
BOOTSTRAP_FONTAWESOME = True
SECRET_KEY = "\xdb\xf1\xf6\x14\x88\xd4i\xda\xbc/E'4\x7f`iz\x98r\xb9s\x1c\xca\xcd"
SQLALCHEMY_DATABASE_URI = 'postgresql+psycopg2://datamart:datamart@localhost/datamart'
|
<commit_before>DEBUG = True
BOOTSTRAP_USE_MINIFIED = False
BOOTSTRAP_USE_CDN = False
BOOTSTRAP_FONTAWESOME = True
SECRET_KEY = "\xdb\xf1\xf6\x14\x88\xd4i\xda\xbc/E'4\x7f`iz\x98r\xb9s\x1c\xca\xcd"
SQLALCHEMY_DATABASE_URI = 'sqlite:///datamart.db'
<commit_msg>Correct DB URI for postgres and hstore.<commit_after>
|
DEBUG = True
BOOTSTRAP_USE_MINIFIED = False
BOOTSTRAP_USE_CDN = False
BOOTSTRAP_FONTAWESOME = True
SECRET_KEY = "\xdb\xf1\xf6\x14\x88\xd4i\xda\xbc/E'4\x7f`iz\x98r\xb9s\x1c\xca\xcd"
SQLALCHEMY_DATABASE_URI = 'postgresql+psycopg2://datamart:datamart@localhost/datamart'
|
DEBUG = True
BOOTSTRAP_USE_MINIFIED = False
BOOTSTRAP_USE_CDN = False
BOOTSTRAP_FONTAWESOME = True
SECRET_KEY = "\xdb\xf1\xf6\x14\x88\xd4i\xda\xbc/E'4\x7f`iz\x98r\xb9s\x1c\xca\xcd"
SQLALCHEMY_DATABASE_URI = 'sqlite:///datamart.db'
Correct DB URI for postgres and hstore.DEBUG = True
BOOTSTRAP_USE_MINIFIED = False
BOOTSTRAP_USE_CDN = False
BOOTSTRAP_FONTAWESOME = True
SECRET_KEY = "\xdb\xf1\xf6\x14\x88\xd4i\xda\xbc/E'4\x7f`iz\x98r\xb9s\x1c\xca\xcd"
SQLALCHEMY_DATABASE_URI = 'postgresql+psycopg2://datamart:datamart@localhost/datamart'
|
<commit_before>DEBUG = True
BOOTSTRAP_USE_MINIFIED = False
BOOTSTRAP_USE_CDN = False
BOOTSTRAP_FONTAWESOME = True
SECRET_KEY = "\xdb\xf1\xf6\x14\x88\xd4i\xda\xbc/E'4\x7f`iz\x98r\xb9s\x1c\xca\xcd"
SQLALCHEMY_DATABASE_URI = 'sqlite:///datamart.db'
<commit_msg>Correct DB URI for postgres and hstore.<commit_after>DEBUG = True
BOOTSTRAP_USE_MINIFIED = False
BOOTSTRAP_USE_CDN = False
BOOTSTRAP_FONTAWESOME = True
SECRET_KEY = "\xdb\xf1\xf6\x14\x88\xd4i\xda\xbc/E'4\x7f`iz\x98r\xb9s\x1c\xca\xcd"
SQLALCHEMY_DATABASE_URI = 'postgresql+psycopg2://datamart:datamart@localhost/datamart'
|
992525f8b371582598fa915128eccd3528e427a6
|
main.py
|
main.py
|
# coding: utf-8
from flask import Flask, abort, request, redirect, render_template, url_for
from log import log
import util
app = Flask(__name__)
app.config.from_pyfile('config.cfg', silent=True)
@app.route('/')
def home():
log.info('Fetching demo gist.')
gist_id = '5123482'
gist = util.get_gist_by_id(gist_id)
source = util.get_slides_source_from_gist(gist)
return render_template('index.html', gist_id=gist_id, source=source)
@app.route('/s/', methods=['GET'])
@app.route('/s/<gist_id>/', methods=['GET'])
def play_gist(gist_id=None):
# Fix url to a restful style.
if gist_id is None:
if 'gist_id' in request.args:
return redirect(url_for('play_gist', gist_id=request.args['gist_id']))
else:
abort(404)
else:
log.info('Creating slides from gist: %s' % gist_id)
gist = util.get_gist_by_id(gist_id)
if gist is None:
abort(404)
title = gist.get('description', 'Remarks')
source = util.get_slides_source_from_gist(gist)
return render_template('slideshow.html', title=title, source=source)
|
# coding: utf-8
from flask import Flask, abort, request, redirect, render_template, url_for
from log import log
import util
import os
app = Flask(__name__)
app.config.from_pyfile(os.path.join(os.path.dirname(__file__), 'config.cfg'), silent=True)
@app.route('/')
def home():
log.info('Fetching demo gist.')
gist_id = '5123482'
gist = util.get_gist_by_id(gist_id)
source = util.get_slides_source_from_gist(gist)
return render_template('index.html', gist_id=gist_id, source=source)
@app.route('/s/', methods=['GET'])
@app.route('/s/<gist_id>/', methods=['GET'])
def play_gist(gist_id=None):
# Fix url to a restful style.
if gist_id is None:
if 'gist_id' in request.args:
return redirect(url_for('play_gist', gist_id=request.args['gist_id']))
else:
abort(404)
else:
log.info('Creating slides from gist: %s' % gist_id)
gist = util.get_gist_by_id(gist_id)
if gist is None:
abort(404)
title = gist.get('description', 'Remarks')
source = util.get_slides_source_from_gist(gist)
return render_template('slideshow.html', title=title, source=source)
|
Fix config file path error on the server
|
Fix config file path error on the server
|
Python
|
mit
|
moreati/remarks,greatghoul/remarks,greatghoul/remarks,moreati/remarks,greatghoul/remarks,moreati/remarks
|
# coding: utf-8
from flask import Flask, abort, request, redirect, render_template, url_for
from log import log
import util
app = Flask(__name__)
app.config.from_pyfile('config.cfg', silent=True)
@app.route('/')
def home():
log.info('Fetching demo gist.')
gist_id = '5123482'
gist = util.get_gist_by_id(gist_id)
source = util.get_slides_source_from_gist(gist)
return render_template('index.html', gist_id=gist_id, source=source)
@app.route('/s/', methods=['GET'])
@app.route('/s/<gist_id>/', methods=['GET'])
def play_gist(gist_id=None):
# Fix url to a restful style.
if gist_id is None:
if 'gist_id' in request.args:
return redirect(url_for('play_gist', gist_id=request.args['gist_id']))
else:
abort(404)
else:
log.info('Creating slides from gist: %s' % gist_id)
gist = util.get_gist_by_id(gist_id)
if gist is None:
abort(404)
title = gist.get('description', 'Remarks')
source = util.get_slides_source_from_gist(gist)
return render_template('slideshow.html', title=title, source=source)
Fix config file path error on the server
|
# coding: utf-8
from flask import Flask, abort, request, redirect, render_template, url_for
from log import log
import util
import os
app = Flask(__name__)
app.config.from_pyfile(os.path.join(os.path.dirname(__file__), 'config.cfg'), silent=True)
@app.route('/')
def home():
log.info('Fetching demo gist.')
gist_id = '5123482'
gist = util.get_gist_by_id(gist_id)
source = util.get_slides_source_from_gist(gist)
return render_template('index.html', gist_id=gist_id, source=source)
@app.route('/s/', methods=['GET'])
@app.route('/s/<gist_id>/', methods=['GET'])
def play_gist(gist_id=None):
# Fix url to a restful style.
if gist_id is None:
if 'gist_id' in request.args:
return redirect(url_for('play_gist', gist_id=request.args['gist_id']))
else:
abort(404)
else:
log.info('Creating slides from gist: %s' % gist_id)
gist = util.get_gist_by_id(gist_id)
if gist is None:
abort(404)
title = gist.get('description', 'Remarks')
source = util.get_slides_source_from_gist(gist)
return render_template('slideshow.html', title=title, source=source)
|
<commit_before># coding: utf-8
from flask import Flask, abort, request, redirect, render_template, url_for
from log import log
import util
app = Flask(__name__)
app.config.from_pyfile('config.cfg', silent=True)
@app.route('/')
def home():
log.info('Fetching demo gist.')
gist_id = '5123482'
gist = util.get_gist_by_id(gist_id)
source = util.get_slides_source_from_gist(gist)
return render_template('index.html', gist_id=gist_id, source=source)
@app.route('/s/', methods=['GET'])
@app.route('/s/<gist_id>/', methods=['GET'])
def play_gist(gist_id=None):
# Fix url to a restful style.
if gist_id is None:
if 'gist_id' in request.args:
return redirect(url_for('play_gist', gist_id=request.args['gist_id']))
else:
abort(404)
else:
log.info('Creating slides from gist: %s' % gist_id)
gist = util.get_gist_by_id(gist_id)
if gist is None:
abort(404)
title = gist.get('description', 'Remarks')
source = util.get_slides_source_from_gist(gist)
return render_template('slideshow.html', title=title, source=source)
<commit_msg>Fix config file path error on the server<commit_after>
|
# coding: utf-8
from flask import Flask, abort, request, redirect, render_template, url_for
from log import log
import util
import os
app = Flask(__name__)
app.config.from_pyfile(os.path.join(os.path.dirname(__file__), 'config.cfg'), silent=True)
@app.route('/')
def home():
log.info('Fetching demo gist.')
gist_id = '5123482'
gist = util.get_gist_by_id(gist_id)
source = util.get_slides_source_from_gist(gist)
return render_template('index.html', gist_id=gist_id, source=source)
@app.route('/s/', methods=['GET'])
@app.route('/s/<gist_id>/', methods=['GET'])
def play_gist(gist_id=None):
# Fix url to a restful style.
if gist_id is None:
if 'gist_id' in request.args:
return redirect(url_for('play_gist', gist_id=request.args['gist_id']))
else:
abort(404)
else:
log.info('Creating slides from gist: %s' % gist_id)
gist = util.get_gist_by_id(gist_id)
if gist is None:
abort(404)
title = gist.get('description', 'Remarks')
source = util.get_slides_source_from_gist(gist)
return render_template('slideshow.html', title=title, source=source)
|
# coding: utf-8
from flask import Flask, abort, request, redirect, render_template, url_for
from log import log
import util
app = Flask(__name__)
app.config.from_pyfile('config.cfg', silent=True)
@app.route('/')
def home():
log.info('Fetching demo gist.')
gist_id = '5123482'
gist = util.get_gist_by_id(gist_id)
source = util.get_slides_source_from_gist(gist)
return render_template('index.html', gist_id=gist_id, source=source)
@app.route('/s/', methods=['GET'])
@app.route('/s/<gist_id>/', methods=['GET'])
def play_gist(gist_id=None):
# Fix url to a restful style.
if gist_id is None:
if 'gist_id' in request.args:
return redirect(url_for('play_gist', gist_id=request.args['gist_id']))
else:
abort(404)
else:
log.info('Creating slides from gist: %s' % gist_id)
gist = util.get_gist_by_id(gist_id)
if gist is None:
abort(404)
title = gist.get('description', 'Remarks')
source = util.get_slides_source_from_gist(gist)
return render_template('slideshow.html', title=title, source=source)
Fix config file path error on the server# coding: utf-8
from flask import Flask, abort, request, redirect, render_template, url_for
from log import log
import util
import os
app = Flask(__name__)
app.config.from_pyfile(os.path.join(os.path.dirname(__file__), 'config.cfg'), silent=True)
@app.route('/')
def home():
log.info('Fetching demo gist.')
gist_id = '5123482'
gist = util.get_gist_by_id(gist_id)
source = util.get_slides_source_from_gist(gist)
return render_template('index.html', gist_id=gist_id, source=source)
@app.route('/s/', methods=['GET'])
@app.route('/s/<gist_id>/', methods=['GET'])
def play_gist(gist_id=None):
# Fix url to a restful style.
if gist_id is None:
if 'gist_id' in request.args:
return redirect(url_for('play_gist', gist_id=request.args['gist_id']))
else:
abort(404)
else:
log.info('Creating slides from gist: %s' % gist_id)
gist = util.get_gist_by_id(gist_id)
if gist is None:
abort(404)
title = gist.get('description', 'Remarks')
source = util.get_slides_source_from_gist(gist)
return render_template('slideshow.html', title=title, source=source)
|
<commit_before># coding: utf-8
from flask import Flask, abort, request, redirect, render_template, url_for
from log import log
import util
app = Flask(__name__)
app.config.from_pyfile('config.cfg', silent=True)
@app.route('/')
def home():
log.info('Fetching demo gist.')
gist_id = '5123482'
gist = util.get_gist_by_id(gist_id)
source = util.get_slides_source_from_gist(gist)
return render_template('index.html', gist_id=gist_id, source=source)
@app.route('/s/', methods=['GET'])
@app.route('/s/<gist_id>/', methods=['GET'])
def play_gist(gist_id=None):
# Fix url to a restful style.
if gist_id is None:
if 'gist_id' in request.args:
return redirect(url_for('play_gist', gist_id=request.args['gist_id']))
else:
abort(404)
else:
log.info('Creating slides from gist: %s' % gist_id)
gist = util.get_gist_by_id(gist_id)
if gist is None:
abort(404)
title = gist.get('description', 'Remarks')
source = util.get_slides_source_from_gist(gist)
return render_template('slideshow.html', title=title, source=source)
<commit_msg>Fix config file path error on the server<commit_after># coding: utf-8
from flask import Flask, abort, request, redirect, render_template, url_for
from log import log
import util
import os
app = Flask(__name__)
app.config.from_pyfile(os.path.join(os.path.dirname(__file__), 'config.cfg'), silent=True)
@app.route('/')
def home():
log.info('Fetching demo gist.')
gist_id = '5123482'
gist = util.get_gist_by_id(gist_id)
source = util.get_slides_source_from_gist(gist)
return render_template('index.html', gist_id=gist_id, source=source)
@app.route('/s/', methods=['GET'])
@app.route('/s/<gist_id>/', methods=['GET'])
def play_gist(gist_id=None):
# Fix url to a restful style.
if gist_id is None:
if 'gist_id' in request.args:
return redirect(url_for('play_gist', gist_id=request.args['gist_id']))
else:
abort(404)
else:
log.info('Creating slides from gist: %s' % gist_id)
gist = util.get_gist_by_id(gist_id)
if gist is None:
abort(404)
title = gist.get('description', 'Remarks')
source = util.get_slides_source_from_gist(gist)
return render_template('slideshow.html', title=title, source=source)
|
2d8b7253445193131d027bd12d3389bbc03858e5
|
massa/__init__.py
|
massa/__init__.py
|
# -*- coding: utf-8 -*-
from flask import Flask, render_template, g
from .container import build
from .web import bp as web
from .api import bp as api
from .middleware import HTTPMethodOverrideMiddleware
def create_app(config=None):
app = Flask('massa')
app.config.from_object(config or 'massa.config.Production')
app.config.from_envvar('MASSA_CONFIG', silent=True)
sl = build(app)
app.register_blueprint(web)
app.register_blueprint(api, url_prefix='/api')
@app.before_request
def globals():
g.sl = sl
app.wsgi_app = HTTPMethodOverrideMiddleware(app.wsgi_app)
return app
|
# -*- coding: utf-8 -*-
from flask import Flask, g
from .container import build
from .web import bp as web
from .api import bp as api
from .middleware import HTTPMethodOverrideMiddleware
def create_app(config=None):
app = Flask('massa')
app.config.from_object(config or 'massa.config.Production')
app.config.from_envvar('MASSA_CONFIG', silent=True)
sl = build(app)
app.register_blueprint(web)
app.register_blueprint(api, url_prefix='/api')
@app.before_request
def globals():
g.sl = sl
app.wsgi_app = HTTPMethodOverrideMiddleware(app.wsgi_app)
return app
|
Remove unused render_template from import statement.
|
Remove unused render_template from import statement.
|
Python
|
mit
|
jaapverloop/massa
|
# -*- coding: utf-8 -*-
from flask import Flask, render_template, g
from .container import build
from .web import bp as web
from .api import bp as api
from .middleware import HTTPMethodOverrideMiddleware
def create_app(config=None):
app = Flask('massa')
app.config.from_object(config or 'massa.config.Production')
app.config.from_envvar('MASSA_CONFIG', silent=True)
sl = build(app)
app.register_blueprint(web)
app.register_blueprint(api, url_prefix='/api')
@app.before_request
def globals():
g.sl = sl
app.wsgi_app = HTTPMethodOverrideMiddleware(app.wsgi_app)
return app
Remove unused render_template from import statement.
|
# -*- coding: utf-8 -*-
from flask import Flask, g
from .container import build
from .web import bp as web
from .api import bp as api
from .middleware import HTTPMethodOverrideMiddleware
def create_app(config=None):
app = Flask('massa')
app.config.from_object(config or 'massa.config.Production')
app.config.from_envvar('MASSA_CONFIG', silent=True)
sl = build(app)
app.register_blueprint(web)
app.register_blueprint(api, url_prefix='/api')
@app.before_request
def globals():
g.sl = sl
app.wsgi_app = HTTPMethodOverrideMiddleware(app.wsgi_app)
return app
|
<commit_before># -*- coding: utf-8 -*-
from flask import Flask, render_template, g
from .container import build
from .web import bp as web
from .api import bp as api
from .middleware import HTTPMethodOverrideMiddleware
def create_app(config=None):
app = Flask('massa')
app.config.from_object(config or 'massa.config.Production')
app.config.from_envvar('MASSA_CONFIG', silent=True)
sl = build(app)
app.register_blueprint(web)
app.register_blueprint(api, url_prefix='/api')
@app.before_request
def globals():
g.sl = sl
app.wsgi_app = HTTPMethodOverrideMiddleware(app.wsgi_app)
return app
<commit_msg>Remove unused render_template from import statement.<commit_after>
|
# -*- coding: utf-8 -*-
from flask import Flask, g
from .container import build
from .web import bp as web
from .api import bp as api
from .middleware import HTTPMethodOverrideMiddleware
def create_app(config=None):
app = Flask('massa')
app.config.from_object(config or 'massa.config.Production')
app.config.from_envvar('MASSA_CONFIG', silent=True)
sl = build(app)
app.register_blueprint(web)
app.register_blueprint(api, url_prefix='/api')
@app.before_request
def globals():
g.sl = sl
app.wsgi_app = HTTPMethodOverrideMiddleware(app.wsgi_app)
return app
|
# -*- coding: utf-8 -*-
from flask import Flask, render_template, g
from .container import build
from .web import bp as web
from .api import bp as api
from .middleware import HTTPMethodOverrideMiddleware
def create_app(config=None):
app = Flask('massa')
app.config.from_object(config or 'massa.config.Production')
app.config.from_envvar('MASSA_CONFIG', silent=True)
sl = build(app)
app.register_blueprint(web)
app.register_blueprint(api, url_prefix='/api')
@app.before_request
def globals():
g.sl = sl
app.wsgi_app = HTTPMethodOverrideMiddleware(app.wsgi_app)
return app
Remove unused render_template from import statement.# -*- coding: utf-8 -*-
from flask import Flask, g
from .container import build
from .web import bp as web
from .api import bp as api
from .middleware import HTTPMethodOverrideMiddleware
def create_app(config=None):
app = Flask('massa')
app.config.from_object(config or 'massa.config.Production')
app.config.from_envvar('MASSA_CONFIG', silent=True)
sl = build(app)
app.register_blueprint(web)
app.register_blueprint(api, url_prefix='/api')
@app.before_request
def globals():
g.sl = sl
app.wsgi_app = HTTPMethodOverrideMiddleware(app.wsgi_app)
return app
|
<commit_before># -*- coding: utf-8 -*-
from flask import Flask, render_template, g
from .container import build
from .web import bp as web
from .api import bp as api
from .middleware import HTTPMethodOverrideMiddleware
def create_app(config=None):
app = Flask('massa')
app.config.from_object(config or 'massa.config.Production')
app.config.from_envvar('MASSA_CONFIG', silent=True)
sl = build(app)
app.register_blueprint(web)
app.register_blueprint(api, url_prefix='/api')
@app.before_request
def globals():
g.sl = sl
app.wsgi_app = HTTPMethodOverrideMiddleware(app.wsgi_app)
return app
<commit_msg>Remove unused render_template from import statement.<commit_after># -*- coding: utf-8 -*-
from flask import Flask, g
from .container import build
from .web import bp as web
from .api import bp as api
from .middleware import HTTPMethodOverrideMiddleware
def create_app(config=None):
app = Flask('massa')
app.config.from_object(config or 'massa.config.Production')
app.config.from_envvar('MASSA_CONFIG', silent=True)
sl = build(app)
app.register_blueprint(web)
app.register_blueprint(api, url_prefix='/api')
@app.before_request
def globals():
g.sl = sl
app.wsgi_app = HTTPMethodOverrideMiddleware(app.wsgi_app)
return app
|
4c258d04f8859632a1d7728a143b6a60e37199cf
|
plasmapy/utils/tests/test_pytest_helpers.py
|
plasmapy/utils/tests/test_pytest_helpers.py
|
import pytest
from ..pytest_helpers import (
_function_call_string,
run_test_of_function,
)
def f(*args, **kwargs):
return None
# f, args, kwargs, expected
call_string_table = [
(f, (), {}, "f()"),
(f, (1), {}, "f(1)"),
(f, ('x'), {}, "f('x')"),
(f, (1, 'b', {}), {}, "f(1, 'b', {})"),
(f, (), {'kw': 1}, "f(kw=1)"),
(f, (), {'x': 'c'}, "f(x='c')"),
(f, (1, 'b'), {'b': 42, 'R2': 'D2'}, "f(1, 'b', b=42, R2='D2')"),
]
@pytest.mark.parametrize("f,args,kwargs,expected", call_string_table)
def test__function_call_string(f, args, kwargs, expected):
"""Tests that _function_call_string returns a string that is
equivalent to the function call."""
assert expected == _function_call_string(f, args, kwargs)
|
Add tests for function to reproduce a call string
|
Add tests for function to reproduce a call string
|
Python
|
bsd-3-clause
|
StanczakDominik/PlasmaPy
|
Add tests for function to reproduce a call string
|
import pytest
from ..pytest_helpers import (
_function_call_string,
run_test_of_function,
)
def f(*args, **kwargs):
return None
# f, args, kwargs, expected
call_string_table = [
(f, (), {}, "f()"),
(f, (1), {}, "f(1)"),
(f, ('x'), {}, "f('x')"),
(f, (1, 'b', {}), {}, "f(1, 'b', {})"),
(f, (), {'kw': 1}, "f(kw=1)"),
(f, (), {'x': 'c'}, "f(x='c')"),
(f, (1, 'b'), {'b': 42, 'R2': 'D2'}, "f(1, 'b', b=42, R2='D2')"),
]
@pytest.mark.parametrize("f,args,kwargs,expected", call_string_table)
def test__function_call_string(f, args, kwargs, expected):
"""Tests that _function_call_string returns a string that is
equivalent to the function call."""
assert expected == _function_call_string(f, args, kwargs)
|
<commit_before><commit_msg>Add tests for function to reproduce a call string<commit_after>
|
import pytest
from ..pytest_helpers import (
_function_call_string,
run_test_of_function,
)
def f(*args, **kwargs):
return None
# f, args, kwargs, expected
call_string_table = [
(f, (), {}, "f()"),
(f, (1), {}, "f(1)"),
(f, ('x'), {}, "f('x')"),
(f, (1, 'b', {}), {}, "f(1, 'b', {})"),
(f, (), {'kw': 1}, "f(kw=1)"),
(f, (), {'x': 'c'}, "f(x='c')"),
(f, (1, 'b'), {'b': 42, 'R2': 'D2'}, "f(1, 'b', b=42, R2='D2')"),
]
@pytest.mark.parametrize("f,args,kwargs,expected", call_string_table)
def test__function_call_string(f, args, kwargs, expected):
"""Tests that _function_call_string returns a string that is
equivalent to the function call."""
assert expected == _function_call_string(f, args, kwargs)
|
Add tests for function to reproduce a call stringimport pytest
from ..pytest_helpers import (
_function_call_string,
run_test_of_function,
)
def f(*args, **kwargs):
return None
# f, args, kwargs, expected
call_string_table = [
(f, (), {}, "f()"),
(f, (1), {}, "f(1)"),
(f, ('x'), {}, "f('x')"),
(f, (1, 'b', {}), {}, "f(1, 'b', {})"),
(f, (), {'kw': 1}, "f(kw=1)"),
(f, (), {'x': 'c'}, "f(x='c')"),
(f, (1, 'b'), {'b': 42, 'R2': 'D2'}, "f(1, 'b', b=42, R2='D2')"),
]
@pytest.mark.parametrize("f,args,kwargs,expected", call_string_table)
def test__function_call_string(f, args, kwargs, expected):
"""Tests that _function_call_string returns a string that is
equivalent to the function call."""
assert expected == _function_call_string(f, args, kwargs)
|
<commit_before><commit_msg>Add tests for function to reproduce a call string<commit_after>import pytest
from ..pytest_helpers import (
_function_call_string,
run_test_of_function,
)
def f(*args, **kwargs):
return None
# f, args, kwargs, expected
call_string_table = [
(f, (), {}, "f()"),
(f, (1), {}, "f(1)"),
(f, ('x'), {}, "f('x')"),
(f, (1, 'b', {}), {}, "f(1, 'b', {})"),
(f, (), {'kw': 1}, "f(kw=1)"),
(f, (), {'x': 'c'}, "f(x='c')"),
(f, (1, 'b'), {'b': 42, 'R2': 'D2'}, "f(1, 'b', b=42, R2='D2')"),
]
@pytest.mark.parametrize("f,args,kwargs,expected", call_string_table)
def test__function_call_string(f, args, kwargs, expected):
"""Tests that _function_call_string returns a string that is
equivalent to the function call."""
assert expected == _function_call_string(f, args, kwargs)
|
|
a1e1340285e190f5b0cc3cce2c4155cb313df6a7
|
wafer/schedule/serializers.py
|
wafer/schedule/serializers.py
|
from rest_framework import serializers
from wafer.talks.models import Talk
from wafer.pages.models import Page
from wafer.schedule.models import ScheduleItem, Venue, Slot
class ScheduleItemSerializer(serializers.HyperlinkedModelSerializer):
page = serializers.PrimaryKeyRelatedField(
allow_null=True, queryset=Page.objects.all())
talk = serializers.PrimaryKeyRelatedField(
allow_null=True, queryset=Talk.objects.all())
venue = serializers.PrimaryKeyRelatedField(
allow_null=True, queryset=Venue.objects.all())
slots = serializers.PrimaryKeyRelatedField(
allow_null=True, many=True, queryset=Slot.objects.all())
class Meta:
model = ScheduleItem
fields = ('id', 'talk', 'page', 'venue', 'slots')
def create(self, validated_data):
venue_id = validated_data['venue']
slots = validated_data['slots']
talk = validated_data.get('talk')
page = validated_data.get('page')
try:
existing_schedule_item = ScheduleItem.objects.get(
venue_id=venue_id, slots__in=slots)
except ScheduleItem.DoesNotExist:
pass
else:
existing_schedule_item.talk = talk
existing_schedule_item.page = page
existing_schedule_item.slots = slots
existing_schedule_item.save()
return existing_schedule_item
return super(ScheduleItemSerializer, self).create(validated_data)
|
from rest_framework import serializers
from wafer.talks.models import Talk
from wafer.pages.models import Page
from wafer.schedule.models import ScheduleItem, Venue, Slot
class ScheduleItemSerializer(serializers.HyperlinkedModelSerializer):
page = serializers.PrimaryKeyRelatedField(
allow_null=True, queryset=Page.objects.all())
talk = serializers.PrimaryKeyRelatedField(
allow_null=True, queryset=Talk.objects.all())
venue = serializers.PrimaryKeyRelatedField(
allow_null=True, queryset=Venue.objects.all())
slots = serializers.PrimaryKeyRelatedField(
allow_null=True, many=True, queryset=Slot.objects.all())
class Meta:
model = ScheduleItem
fields = ('id', 'talk', 'page', 'venue', 'slots')
def create(self, validated_data):
venue_id = validated_data['venue']
slots = validated_data['slots']
talk = validated_data.get('talk')
page = validated_data.get('page')
try:
existing_schedule_item = ScheduleItem.objects.get(
venue_id=venue_id, slots__in=slots)
except ScheduleItem.DoesNotExist:
pass
else:
existing_schedule_item.talk = talk
existing_schedule_item.page = page
existing_schedule_item.slots = slots
# Clear any existing details that aren't editable by the
# schedule edit view
existing_schedule_item.details = ''
existing_schedule_item.notes = ''
existing_schedule_item.css_class = ''
existing_schedule_item.expand = False
existing_schedule_item.save()
return existing_schedule_item
return super(ScheduleItemSerializer, self).create(validated_data)
|
Clear extra fields when changing items through the schedule view
|
Clear extra fields when changing items through the schedule view
|
Python
|
isc
|
CTPUG/wafer,CTPUG/wafer,CTPUG/wafer,CTPUG/wafer
|
from rest_framework import serializers
from wafer.talks.models import Talk
from wafer.pages.models import Page
from wafer.schedule.models import ScheduleItem, Venue, Slot
class ScheduleItemSerializer(serializers.HyperlinkedModelSerializer):
page = serializers.PrimaryKeyRelatedField(
allow_null=True, queryset=Page.objects.all())
talk = serializers.PrimaryKeyRelatedField(
allow_null=True, queryset=Talk.objects.all())
venue = serializers.PrimaryKeyRelatedField(
allow_null=True, queryset=Venue.objects.all())
slots = serializers.PrimaryKeyRelatedField(
allow_null=True, many=True, queryset=Slot.objects.all())
class Meta:
model = ScheduleItem
fields = ('id', 'talk', 'page', 'venue', 'slots')
def create(self, validated_data):
venue_id = validated_data['venue']
slots = validated_data['slots']
talk = validated_data.get('talk')
page = validated_data.get('page')
try:
existing_schedule_item = ScheduleItem.objects.get(
venue_id=venue_id, slots__in=slots)
except ScheduleItem.DoesNotExist:
pass
else:
existing_schedule_item.talk = talk
existing_schedule_item.page = page
existing_schedule_item.slots = slots
existing_schedule_item.save()
return existing_schedule_item
return super(ScheduleItemSerializer, self).create(validated_data)
Clear extra fields when changing items through the schedule view
|
from rest_framework import serializers
from wafer.talks.models import Talk
from wafer.pages.models import Page
from wafer.schedule.models import ScheduleItem, Venue, Slot
class ScheduleItemSerializer(serializers.HyperlinkedModelSerializer):
page = serializers.PrimaryKeyRelatedField(
allow_null=True, queryset=Page.objects.all())
talk = serializers.PrimaryKeyRelatedField(
allow_null=True, queryset=Talk.objects.all())
venue = serializers.PrimaryKeyRelatedField(
allow_null=True, queryset=Venue.objects.all())
slots = serializers.PrimaryKeyRelatedField(
allow_null=True, many=True, queryset=Slot.objects.all())
class Meta:
model = ScheduleItem
fields = ('id', 'talk', 'page', 'venue', 'slots')
def create(self, validated_data):
venue_id = validated_data['venue']
slots = validated_data['slots']
talk = validated_data.get('talk')
page = validated_data.get('page')
try:
existing_schedule_item = ScheduleItem.objects.get(
venue_id=venue_id, slots__in=slots)
except ScheduleItem.DoesNotExist:
pass
else:
existing_schedule_item.talk = talk
existing_schedule_item.page = page
existing_schedule_item.slots = slots
# Clear any existing details that aren't editable by the
# schedule edit view
existing_schedule_item.details = ''
existing_schedule_item.notes = ''
existing_schedule_item.css_class = ''
existing_schedule_item.expand = False
existing_schedule_item.save()
return existing_schedule_item
return super(ScheduleItemSerializer, self).create(validated_data)
|
<commit_before>from rest_framework import serializers
from wafer.talks.models import Talk
from wafer.pages.models import Page
from wafer.schedule.models import ScheduleItem, Venue, Slot
class ScheduleItemSerializer(serializers.HyperlinkedModelSerializer):
page = serializers.PrimaryKeyRelatedField(
allow_null=True, queryset=Page.objects.all())
talk = serializers.PrimaryKeyRelatedField(
allow_null=True, queryset=Talk.objects.all())
venue = serializers.PrimaryKeyRelatedField(
allow_null=True, queryset=Venue.objects.all())
slots = serializers.PrimaryKeyRelatedField(
allow_null=True, many=True, queryset=Slot.objects.all())
class Meta:
model = ScheduleItem
fields = ('id', 'talk', 'page', 'venue', 'slots')
def create(self, validated_data):
venue_id = validated_data['venue']
slots = validated_data['slots']
talk = validated_data.get('talk')
page = validated_data.get('page')
try:
existing_schedule_item = ScheduleItem.objects.get(
venue_id=venue_id, slots__in=slots)
except ScheduleItem.DoesNotExist:
pass
else:
existing_schedule_item.talk = talk
existing_schedule_item.page = page
existing_schedule_item.slots = slots
existing_schedule_item.save()
return existing_schedule_item
return super(ScheduleItemSerializer, self).create(validated_data)
<commit_msg>Clear extra fields when changing items through the schedule view<commit_after>
|
from rest_framework import serializers
from wafer.talks.models import Talk
from wafer.pages.models import Page
from wafer.schedule.models import ScheduleItem, Venue, Slot
class ScheduleItemSerializer(serializers.HyperlinkedModelSerializer):
page = serializers.PrimaryKeyRelatedField(
allow_null=True, queryset=Page.objects.all())
talk = serializers.PrimaryKeyRelatedField(
allow_null=True, queryset=Talk.objects.all())
venue = serializers.PrimaryKeyRelatedField(
allow_null=True, queryset=Venue.objects.all())
slots = serializers.PrimaryKeyRelatedField(
allow_null=True, many=True, queryset=Slot.objects.all())
class Meta:
model = ScheduleItem
fields = ('id', 'talk', 'page', 'venue', 'slots')
def create(self, validated_data):
venue_id = validated_data['venue']
slots = validated_data['slots']
talk = validated_data.get('talk')
page = validated_data.get('page')
try:
existing_schedule_item = ScheduleItem.objects.get(
venue_id=venue_id, slots__in=slots)
except ScheduleItem.DoesNotExist:
pass
else:
existing_schedule_item.talk = talk
existing_schedule_item.page = page
existing_schedule_item.slots = slots
# Clear any existing details that aren't editable by the
# schedule edit view
existing_schedule_item.details = ''
existing_schedule_item.notes = ''
existing_schedule_item.css_class = ''
existing_schedule_item.expand = False
existing_schedule_item.save()
return existing_schedule_item
return super(ScheduleItemSerializer, self).create(validated_data)
|
from rest_framework import serializers
from wafer.talks.models import Talk
from wafer.pages.models import Page
from wafer.schedule.models import ScheduleItem, Venue, Slot
class ScheduleItemSerializer(serializers.HyperlinkedModelSerializer):
page = serializers.PrimaryKeyRelatedField(
allow_null=True, queryset=Page.objects.all())
talk = serializers.PrimaryKeyRelatedField(
allow_null=True, queryset=Talk.objects.all())
venue = serializers.PrimaryKeyRelatedField(
allow_null=True, queryset=Venue.objects.all())
slots = serializers.PrimaryKeyRelatedField(
allow_null=True, many=True, queryset=Slot.objects.all())
class Meta:
model = ScheduleItem
fields = ('id', 'talk', 'page', 'venue', 'slots')
def create(self, validated_data):
venue_id = validated_data['venue']
slots = validated_data['slots']
talk = validated_data.get('talk')
page = validated_data.get('page')
try:
existing_schedule_item = ScheduleItem.objects.get(
venue_id=venue_id, slots__in=slots)
except ScheduleItem.DoesNotExist:
pass
else:
existing_schedule_item.talk = talk
existing_schedule_item.page = page
existing_schedule_item.slots = slots
existing_schedule_item.save()
return existing_schedule_item
return super(ScheduleItemSerializer, self).create(validated_data)
Clear extra fields when changing items through the schedule viewfrom rest_framework import serializers
from wafer.talks.models import Talk
from wafer.pages.models import Page
from wafer.schedule.models import ScheduleItem, Venue, Slot
class ScheduleItemSerializer(serializers.HyperlinkedModelSerializer):
page = serializers.PrimaryKeyRelatedField(
allow_null=True, queryset=Page.objects.all())
talk = serializers.PrimaryKeyRelatedField(
allow_null=True, queryset=Talk.objects.all())
venue = serializers.PrimaryKeyRelatedField(
allow_null=True, queryset=Venue.objects.all())
slots = serializers.PrimaryKeyRelatedField(
allow_null=True, many=True, queryset=Slot.objects.all())
class Meta:
model = ScheduleItem
fields = ('id', 'talk', 'page', 'venue', 'slots')
def create(self, validated_data):
venue_id = validated_data['venue']
slots = validated_data['slots']
talk = validated_data.get('talk')
page = validated_data.get('page')
try:
existing_schedule_item = ScheduleItem.objects.get(
venue_id=venue_id, slots__in=slots)
except ScheduleItem.DoesNotExist:
pass
else:
existing_schedule_item.talk = talk
existing_schedule_item.page = page
existing_schedule_item.slots = slots
# Clear any existing details that aren't editable by the
# schedule edit view
existing_schedule_item.details = ''
existing_schedule_item.notes = ''
existing_schedule_item.css_class = ''
existing_schedule_item.expand = False
existing_schedule_item.save()
return existing_schedule_item
return super(ScheduleItemSerializer, self).create(validated_data)
|
<commit_before>from rest_framework import serializers
from wafer.talks.models import Talk
from wafer.pages.models import Page
from wafer.schedule.models import ScheduleItem, Venue, Slot
class ScheduleItemSerializer(serializers.HyperlinkedModelSerializer):
page = serializers.PrimaryKeyRelatedField(
allow_null=True, queryset=Page.objects.all())
talk = serializers.PrimaryKeyRelatedField(
allow_null=True, queryset=Talk.objects.all())
venue = serializers.PrimaryKeyRelatedField(
allow_null=True, queryset=Venue.objects.all())
slots = serializers.PrimaryKeyRelatedField(
allow_null=True, many=True, queryset=Slot.objects.all())
class Meta:
model = ScheduleItem
fields = ('id', 'talk', 'page', 'venue', 'slots')
def create(self, validated_data):
venue_id = validated_data['venue']
slots = validated_data['slots']
talk = validated_data.get('talk')
page = validated_data.get('page')
try:
existing_schedule_item = ScheduleItem.objects.get(
venue_id=venue_id, slots__in=slots)
except ScheduleItem.DoesNotExist:
pass
else:
existing_schedule_item.talk = talk
existing_schedule_item.page = page
existing_schedule_item.slots = slots
existing_schedule_item.save()
return existing_schedule_item
return super(ScheduleItemSerializer, self).create(validated_data)
<commit_msg>Clear extra fields when changing items through the schedule view<commit_after>from rest_framework import serializers
from wafer.talks.models import Talk
from wafer.pages.models import Page
from wafer.schedule.models import ScheduleItem, Venue, Slot
class ScheduleItemSerializer(serializers.HyperlinkedModelSerializer):
page = serializers.PrimaryKeyRelatedField(
allow_null=True, queryset=Page.objects.all())
talk = serializers.PrimaryKeyRelatedField(
allow_null=True, queryset=Talk.objects.all())
venue = serializers.PrimaryKeyRelatedField(
allow_null=True, queryset=Venue.objects.all())
slots = serializers.PrimaryKeyRelatedField(
allow_null=True, many=True, queryset=Slot.objects.all())
class Meta:
model = ScheduleItem
fields = ('id', 'talk', 'page', 'venue', 'slots')
def create(self, validated_data):
venue_id = validated_data['venue']
slots = validated_data['slots']
talk = validated_data.get('talk')
page = validated_data.get('page')
try:
existing_schedule_item = ScheduleItem.objects.get(
venue_id=venue_id, slots__in=slots)
except ScheduleItem.DoesNotExist:
pass
else:
existing_schedule_item.talk = talk
existing_schedule_item.page = page
existing_schedule_item.slots = slots
# Clear any existing details that aren't editable by the
# schedule edit view
existing_schedule_item.details = ''
existing_schedule_item.notes = ''
existing_schedule_item.css_class = ''
existing_schedule_item.expand = False
existing_schedule_item.save()
return existing_schedule_item
return super(ScheduleItemSerializer, self).create(validated_data)
|
8684376106d2b6763823573662ffde574d075d1b
|
workers/subscriptions.py
|
workers/subscriptions.py
|
import os
import telegram
from leonard import Leonard
if __name__ == '__main__':
os.chdir('../')
telegram_client = telegram.Bot(os.environ['BOT_TOKEN'])
bot = Leonard(telegram_client)
i = 0
while True:
if i % 10 == 0:
bot.collect_plugins()
for name, check, send in bot.subscriptions:
send(bot, check(bot))
i += 1
|
import os
import time
import telegram
from leonard import Leonard
if __name__ == '__main__':
os.chdir('../')
telegram_client = telegram.Bot(os.environ['BOT_TOKEN'])
bot = Leonard(telegram_client)
bot.collect_plugins()
while True:
for name, check, send in bot.subscriptions:
send(bot, check(bot))
time.sleep(60)
|
Remove collecting plugins every second
|
Remove collecting plugins every second
|
Python
|
mit
|
sevazhidkov/leonard
|
import os
import telegram
from leonard import Leonard
if __name__ == '__main__':
os.chdir('../')
telegram_client = telegram.Bot(os.environ['BOT_TOKEN'])
bot = Leonard(telegram_client)
i = 0
while True:
if i % 10 == 0:
bot.collect_plugins()
for name, check, send in bot.subscriptions:
send(bot, check(bot))
i += 1
Remove collecting plugins every second
|
import os
import time
import telegram
from leonard import Leonard
if __name__ == '__main__':
os.chdir('../')
telegram_client = telegram.Bot(os.environ['BOT_TOKEN'])
bot = Leonard(telegram_client)
bot.collect_plugins()
while True:
for name, check, send in bot.subscriptions:
send(bot, check(bot))
time.sleep(60)
|
<commit_before>import os
import telegram
from leonard import Leonard
if __name__ == '__main__':
os.chdir('../')
telegram_client = telegram.Bot(os.environ['BOT_TOKEN'])
bot = Leonard(telegram_client)
i = 0
while True:
if i % 10 == 0:
bot.collect_plugins()
for name, check, send in bot.subscriptions:
send(bot, check(bot))
i += 1
<commit_msg>Remove collecting plugins every second<commit_after>
|
import os
import time
import telegram
from leonard import Leonard
if __name__ == '__main__':
os.chdir('../')
telegram_client = telegram.Bot(os.environ['BOT_TOKEN'])
bot = Leonard(telegram_client)
bot.collect_plugins()
while True:
for name, check, send in bot.subscriptions:
send(bot, check(bot))
time.sleep(60)
|
import os
import telegram
from leonard import Leonard
if __name__ == '__main__':
os.chdir('../')
telegram_client = telegram.Bot(os.environ['BOT_TOKEN'])
bot = Leonard(telegram_client)
i = 0
while True:
if i % 10 == 0:
bot.collect_plugins()
for name, check, send in bot.subscriptions:
send(bot, check(bot))
i += 1
Remove collecting plugins every secondimport os
import time
import telegram
from leonard import Leonard
if __name__ == '__main__':
os.chdir('../')
telegram_client = telegram.Bot(os.environ['BOT_TOKEN'])
bot = Leonard(telegram_client)
bot.collect_plugins()
while True:
for name, check, send in bot.subscriptions:
send(bot, check(bot))
time.sleep(60)
|
<commit_before>import os
import telegram
from leonard import Leonard
if __name__ == '__main__':
os.chdir('../')
telegram_client = telegram.Bot(os.environ['BOT_TOKEN'])
bot = Leonard(telegram_client)
i = 0
while True:
if i % 10 == 0:
bot.collect_plugins()
for name, check, send in bot.subscriptions:
send(bot, check(bot))
i += 1
<commit_msg>Remove collecting plugins every second<commit_after>import os
import time
import telegram
from leonard import Leonard
if __name__ == '__main__':
os.chdir('../')
telegram_client = telegram.Bot(os.environ['BOT_TOKEN'])
bot = Leonard(telegram_client)
bot.collect_plugins()
while True:
for name, check, send in bot.subscriptions:
send(bot, check(bot))
time.sleep(60)
|
854b0968afc41894d8cf79d712175b497df9828e
|
bolt/spark/utils.py
|
bolt/spark/utils.py
|
def get_kv_shape(shape, key_axes):
func = lambda axis: shape[axis]
return _get_kv_func(func, shape, key_axes)
def get_kv_axes(shape, key_axes):
func = lambda axis: axis
return _get_kv_func(func, shape, key_axes)
def _get_kv_func(func, shape, key_axes):
key_res = [func(axis) for axis in key_axes]
value_res = [func(axis) for axis in range(len(shape)) if axis not in key_axes]
return key_res, value_res
def zip_with_index(rdd):
"""
Alternate version of Spark's zipWithIndex that eagerly returns count.
"""
starts = [0]
count = None
if rdd.getNumPartitions() > 1:
nums = rdd.mapPartitions(lambda it: [sum(1 for _ in it)]).collect()
count = sum(nums)
for i in range(len(nums) - 1):
starts.append(starts[-1] + nums[i])
def func(k, it):
for i, v in enumerate(it, starts[k]):
yield v, i
return count, rdd.mapPartitionsWithIndex(func)
|
def get_kv_shape(shape, key_axes):
func = lambda axis: shape[axis]
return _get_kv_func(func, shape, key_axes)
def get_kv_axes(shape, key_axes):
func = lambda axis: axis
return _get_kv_func(func, shape, key_axes)
def _get_kv_func(func, shape, key_axes):
key_res = [func(axis) for axis in key_axes]
value_res = [func(axis) for axis in range(len(shape)) if axis not in key_axes]
return key_res, value_res
def zip_with_index(rdd):
"""
Alternate version of Spark's zipWithIndex that eagerly returns count.
"""
starts = [0]
if rdd.getNumPartitions() > 1:
nums = rdd.mapPartitions(lambda it: [sum(1 for _ in it)]).collect()
count = sum(nums)
for i in range(len(nums) - 1):
starts.append(starts[-1] + nums[i])
else:
count = rdd.count()
def func(k, it):
for i, v in enumerate(it, starts[k]):
yield v, i
return count, rdd.mapPartitionsWithIndex(func)
|
Fix for count with one partition
|
Fix for count with one partition
|
Python
|
apache-2.0
|
bolt-project/bolt,andrewosh/bolt,jwittenbach/bolt
|
def get_kv_shape(shape, key_axes):
func = lambda axis: shape[axis]
return _get_kv_func(func, shape, key_axes)
def get_kv_axes(shape, key_axes):
func = lambda axis: axis
return _get_kv_func(func, shape, key_axes)
def _get_kv_func(func, shape, key_axes):
key_res = [func(axis) for axis in key_axes]
value_res = [func(axis) for axis in range(len(shape)) if axis not in key_axes]
return key_res, value_res
def zip_with_index(rdd):
"""
Alternate version of Spark's zipWithIndex that eagerly returns count.
"""
starts = [0]
count = None
if rdd.getNumPartitions() > 1:
nums = rdd.mapPartitions(lambda it: [sum(1 for _ in it)]).collect()
count = sum(nums)
for i in range(len(nums) - 1):
starts.append(starts[-1] + nums[i])
def func(k, it):
for i, v in enumerate(it, starts[k]):
yield v, i
return count, rdd.mapPartitionsWithIndex(func)
Fix for count with one partition
|
def get_kv_shape(shape, key_axes):
func = lambda axis: shape[axis]
return _get_kv_func(func, shape, key_axes)
def get_kv_axes(shape, key_axes):
func = lambda axis: axis
return _get_kv_func(func, shape, key_axes)
def _get_kv_func(func, shape, key_axes):
key_res = [func(axis) for axis in key_axes]
value_res = [func(axis) for axis in range(len(shape)) if axis not in key_axes]
return key_res, value_res
def zip_with_index(rdd):
"""
Alternate version of Spark's zipWithIndex that eagerly returns count.
"""
starts = [0]
if rdd.getNumPartitions() > 1:
nums = rdd.mapPartitions(lambda it: [sum(1 for _ in it)]).collect()
count = sum(nums)
for i in range(len(nums) - 1):
starts.append(starts[-1] + nums[i])
else:
count = rdd.count()
def func(k, it):
for i, v in enumerate(it, starts[k]):
yield v, i
return count, rdd.mapPartitionsWithIndex(func)
|
<commit_before>def get_kv_shape(shape, key_axes):
func = lambda axis: shape[axis]
return _get_kv_func(func, shape, key_axes)
def get_kv_axes(shape, key_axes):
func = lambda axis: axis
return _get_kv_func(func, shape, key_axes)
def _get_kv_func(func, shape, key_axes):
key_res = [func(axis) for axis in key_axes]
value_res = [func(axis) for axis in range(len(shape)) if axis not in key_axes]
return key_res, value_res
def zip_with_index(rdd):
"""
Alternate version of Spark's zipWithIndex that eagerly returns count.
"""
starts = [0]
count = None
if rdd.getNumPartitions() > 1:
nums = rdd.mapPartitions(lambda it: [sum(1 for _ in it)]).collect()
count = sum(nums)
for i in range(len(nums) - 1):
starts.append(starts[-1] + nums[i])
def func(k, it):
for i, v in enumerate(it, starts[k]):
yield v, i
return count, rdd.mapPartitionsWithIndex(func)
<commit_msg>Fix for count with one partition<commit_after>
|
def get_kv_shape(shape, key_axes):
func = lambda axis: shape[axis]
return _get_kv_func(func, shape, key_axes)
def get_kv_axes(shape, key_axes):
func = lambda axis: axis
return _get_kv_func(func, shape, key_axes)
def _get_kv_func(func, shape, key_axes):
key_res = [func(axis) for axis in key_axes]
value_res = [func(axis) for axis in range(len(shape)) if axis not in key_axes]
return key_res, value_res
def zip_with_index(rdd):
"""
Alternate version of Spark's zipWithIndex that eagerly returns count.
"""
starts = [0]
if rdd.getNumPartitions() > 1:
nums = rdd.mapPartitions(lambda it: [sum(1 for _ in it)]).collect()
count = sum(nums)
for i in range(len(nums) - 1):
starts.append(starts[-1] + nums[i])
else:
count = rdd.count()
def func(k, it):
for i, v in enumerate(it, starts[k]):
yield v, i
return count, rdd.mapPartitionsWithIndex(func)
|
def get_kv_shape(shape, key_axes):
func = lambda axis: shape[axis]
return _get_kv_func(func, shape, key_axes)
def get_kv_axes(shape, key_axes):
func = lambda axis: axis
return _get_kv_func(func, shape, key_axes)
def _get_kv_func(func, shape, key_axes):
key_res = [func(axis) for axis in key_axes]
value_res = [func(axis) for axis in range(len(shape)) if axis not in key_axes]
return key_res, value_res
def zip_with_index(rdd):
"""
Alternate version of Spark's zipWithIndex that eagerly returns count.
"""
starts = [0]
count = None
if rdd.getNumPartitions() > 1:
nums = rdd.mapPartitions(lambda it: [sum(1 for _ in it)]).collect()
count = sum(nums)
for i in range(len(nums) - 1):
starts.append(starts[-1] + nums[i])
def func(k, it):
for i, v in enumerate(it, starts[k]):
yield v, i
return count, rdd.mapPartitionsWithIndex(func)
Fix for count with one partitiondef get_kv_shape(shape, key_axes):
func = lambda axis: shape[axis]
return _get_kv_func(func, shape, key_axes)
def get_kv_axes(shape, key_axes):
func = lambda axis: axis
return _get_kv_func(func, shape, key_axes)
def _get_kv_func(func, shape, key_axes):
key_res = [func(axis) for axis in key_axes]
value_res = [func(axis) for axis in range(len(shape)) if axis not in key_axes]
return key_res, value_res
def zip_with_index(rdd):
"""
Alternate version of Spark's zipWithIndex that eagerly returns count.
"""
starts = [0]
if rdd.getNumPartitions() > 1:
nums = rdd.mapPartitions(lambda it: [sum(1 for _ in it)]).collect()
count = sum(nums)
for i in range(len(nums) - 1):
starts.append(starts[-1] + nums[i])
else:
count = rdd.count()
def func(k, it):
for i, v in enumerate(it, starts[k]):
yield v, i
return count, rdd.mapPartitionsWithIndex(func)
|
<commit_before>def get_kv_shape(shape, key_axes):
func = lambda axis: shape[axis]
return _get_kv_func(func, shape, key_axes)
def get_kv_axes(shape, key_axes):
func = lambda axis: axis
return _get_kv_func(func, shape, key_axes)
def _get_kv_func(func, shape, key_axes):
key_res = [func(axis) for axis in key_axes]
value_res = [func(axis) for axis in range(len(shape)) if axis not in key_axes]
return key_res, value_res
def zip_with_index(rdd):
"""
Alternate version of Spark's zipWithIndex that eagerly returns count.
"""
starts = [0]
count = None
if rdd.getNumPartitions() > 1:
nums = rdd.mapPartitions(lambda it: [sum(1 for _ in it)]).collect()
count = sum(nums)
for i in range(len(nums) - 1):
starts.append(starts[-1] + nums[i])
def func(k, it):
for i, v in enumerate(it, starts[k]):
yield v, i
return count, rdd.mapPartitionsWithIndex(func)
<commit_msg>Fix for count with one partition<commit_after>def get_kv_shape(shape, key_axes):
func = lambda axis: shape[axis]
return _get_kv_func(func, shape, key_axes)
def get_kv_axes(shape, key_axes):
func = lambda axis: axis
return _get_kv_func(func, shape, key_axes)
def _get_kv_func(func, shape, key_axes):
key_res = [func(axis) for axis in key_axes]
value_res = [func(axis) for axis in range(len(shape)) if axis not in key_axes]
return key_res, value_res
def zip_with_index(rdd):
"""
Alternate version of Spark's zipWithIndex that eagerly returns count.
"""
starts = [0]
if rdd.getNumPartitions() > 1:
nums = rdd.mapPartitions(lambda it: [sum(1 for _ in it)]).collect()
count = sum(nums)
for i in range(len(nums) - 1):
starts.append(starts[-1] + nums[i])
else:
count = rdd.count()
def func(k, it):
for i, v in enumerate(it, starts[k]):
yield v, i
return count, rdd.mapPartitionsWithIndex(func)
|
d53ff6a32f9de757c7eef841d35d110a389419ae
|
cattle/plugins/docker/agent.py
|
cattle/plugins/docker/agent.py
|
from cattle import Config
from cattle.plugins.docker.util import add_to_env
from urlparse import urlparse
def setup_cattle_config_url(instance, create_config):
if instance.get('agentId') is None:
return
if 'labels' not in create_config:
create_config['labels'] = {}
create_config['labels']['io.rancher.container.agent_id'] = \
str(instance.get('agentId'))
url = Config.config_url()
if url is not None:
parsed = urlparse(url)
if 'localhost' == parsed.hostname:
port = Config.api_proxy_listen_port()
add_to_env(create_config,
CATTLE_AGENT_INSTANCE='true',
CATTLE_CONFIG_URL_SCHEME=parsed.scheme,
CATTLE_CONFIG_URL_PATH=parsed.path,
CATTLE_CONFIG_URL_PORT=port)
else:
add_to_env(create_config, CATTLE_CONFIG_URL=url)
add_to_env(create_config, CATTLE_URL=url)
|
from cattle import Config
from cattle.plugins.docker.util import add_to_env
from urlparse import urlparse
def _has_label(instance):
try:
return instance.labels['io.rancher.container.cattle_url'] == 'true'
except:
pass
return False
def setup_cattle_config_url(instance, create_config):
if instance.get('agentId') is None and not _has_label(instance):
return
if 'labels' not in create_config:
create_config['labels'] = {}
create_config['labels']['io.rancher.container.agent_id'] = \
str(instance.get('agentId'))
url = Config.config_url()
if url is not None:
parsed = urlparse(url)
if 'localhost' == parsed.hostname:
port = Config.api_proxy_listen_port()
add_to_env(create_config,
CATTLE_AGENT_INSTANCE='true',
CATTLE_CONFIG_URL_SCHEME=parsed.scheme,
CATTLE_CONFIG_URL_PATH=parsed.path,
CATTLE_CONFIG_URL_PORT=port)
else:
add_to_env(create_config, CATTLE_CONFIG_URL=url)
add_to_env(create_config, CATTLE_URL=url)
|
Add label io.rancher.container.cattle_url=true to get CATTLE_URL env var
|
Add label io.rancher.container.cattle_url=true to get CATTLE_URL env var
|
Python
|
apache-2.0
|
rancherio/python-agent,rancherio/python-agent,rancher/python-agent,rancher/python-agent
|
from cattle import Config
from cattle.plugins.docker.util import add_to_env
from urlparse import urlparse
def setup_cattle_config_url(instance, create_config):
if instance.get('agentId') is None:
return
if 'labels' not in create_config:
create_config['labels'] = {}
create_config['labels']['io.rancher.container.agent_id'] = \
str(instance.get('agentId'))
url = Config.config_url()
if url is not None:
parsed = urlparse(url)
if 'localhost' == parsed.hostname:
port = Config.api_proxy_listen_port()
add_to_env(create_config,
CATTLE_AGENT_INSTANCE='true',
CATTLE_CONFIG_URL_SCHEME=parsed.scheme,
CATTLE_CONFIG_URL_PATH=parsed.path,
CATTLE_CONFIG_URL_PORT=port)
else:
add_to_env(create_config, CATTLE_CONFIG_URL=url)
add_to_env(create_config, CATTLE_URL=url)
Add label io.rancher.container.cattle_url=true to get CATTLE_URL env var
|
from cattle import Config
from cattle.plugins.docker.util import add_to_env
from urlparse import urlparse
def _has_label(instance):
try:
return instance.labels['io.rancher.container.cattle_url'] == 'true'
except:
pass
return False
def setup_cattle_config_url(instance, create_config):
if instance.get('agentId') is None and not _has_label(instance):
return
if 'labels' not in create_config:
create_config['labels'] = {}
create_config['labels']['io.rancher.container.agent_id'] = \
str(instance.get('agentId'))
url = Config.config_url()
if url is not None:
parsed = urlparse(url)
if 'localhost' == parsed.hostname:
port = Config.api_proxy_listen_port()
add_to_env(create_config,
CATTLE_AGENT_INSTANCE='true',
CATTLE_CONFIG_URL_SCHEME=parsed.scheme,
CATTLE_CONFIG_URL_PATH=parsed.path,
CATTLE_CONFIG_URL_PORT=port)
else:
add_to_env(create_config, CATTLE_CONFIG_URL=url)
add_to_env(create_config, CATTLE_URL=url)
|
<commit_before>from cattle import Config
from cattle.plugins.docker.util import add_to_env
from urlparse import urlparse
def setup_cattle_config_url(instance, create_config):
if instance.get('agentId') is None:
return
if 'labels' not in create_config:
create_config['labels'] = {}
create_config['labels']['io.rancher.container.agent_id'] = \
str(instance.get('agentId'))
url = Config.config_url()
if url is not None:
parsed = urlparse(url)
if 'localhost' == parsed.hostname:
port = Config.api_proxy_listen_port()
add_to_env(create_config,
CATTLE_AGENT_INSTANCE='true',
CATTLE_CONFIG_URL_SCHEME=parsed.scheme,
CATTLE_CONFIG_URL_PATH=parsed.path,
CATTLE_CONFIG_URL_PORT=port)
else:
add_to_env(create_config, CATTLE_CONFIG_URL=url)
add_to_env(create_config, CATTLE_URL=url)
<commit_msg>Add label io.rancher.container.cattle_url=true to get CATTLE_URL env var<commit_after>
|
from cattle import Config
from cattle.plugins.docker.util import add_to_env
from urlparse import urlparse
def _has_label(instance):
try:
return instance.labels['io.rancher.container.cattle_url'] == 'true'
except:
pass
return False
def setup_cattle_config_url(instance, create_config):
if instance.get('agentId') is None and not _has_label(instance):
return
if 'labels' not in create_config:
create_config['labels'] = {}
create_config['labels']['io.rancher.container.agent_id'] = \
str(instance.get('agentId'))
url = Config.config_url()
if url is not None:
parsed = urlparse(url)
if 'localhost' == parsed.hostname:
port = Config.api_proxy_listen_port()
add_to_env(create_config,
CATTLE_AGENT_INSTANCE='true',
CATTLE_CONFIG_URL_SCHEME=parsed.scheme,
CATTLE_CONFIG_URL_PATH=parsed.path,
CATTLE_CONFIG_URL_PORT=port)
else:
add_to_env(create_config, CATTLE_CONFIG_URL=url)
add_to_env(create_config, CATTLE_URL=url)
|
from cattle import Config
from cattle.plugins.docker.util import add_to_env
from urlparse import urlparse
def setup_cattle_config_url(instance, create_config):
if instance.get('agentId') is None:
return
if 'labels' not in create_config:
create_config['labels'] = {}
create_config['labels']['io.rancher.container.agent_id'] = \
str(instance.get('agentId'))
url = Config.config_url()
if url is not None:
parsed = urlparse(url)
if 'localhost' == parsed.hostname:
port = Config.api_proxy_listen_port()
add_to_env(create_config,
CATTLE_AGENT_INSTANCE='true',
CATTLE_CONFIG_URL_SCHEME=parsed.scheme,
CATTLE_CONFIG_URL_PATH=parsed.path,
CATTLE_CONFIG_URL_PORT=port)
else:
add_to_env(create_config, CATTLE_CONFIG_URL=url)
add_to_env(create_config, CATTLE_URL=url)
Add label io.rancher.container.cattle_url=true to get CATTLE_URL env varfrom cattle import Config
from cattle.plugins.docker.util import add_to_env
from urlparse import urlparse
def _has_label(instance):
try:
return instance.labels['io.rancher.container.cattle_url'] == 'true'
except:
pass
return False
def setup_cattle_config_url(instance, create_config):
if instance.get('agentId') is None and not _has_label(instance):
return
if 'labels' not in create_config:
create_config['labels'] = {}
create_config['labels']['io.rancher.container.agent_id'] = \
str(instance.get('agentId'))
url = Config.config_url()
if url is not None:
parsed = urlparse(url)
if 'localhost' == parsed.hostname:
port = Config.api_proxy_listen_port()
add_to_env(create_config,
CATTLE_AGENT_INSTANCE='true',
CATTLE_CONFIG_URL_SCHEME=parsed.scheme,
CATTLE_CONFIG_URL_PATH=parsed.path,
CATTLE_CONFIG_URL_PORT=port)
else:
add_to_env(create_config, CATTLE_CONFIG_URL=url)
add_to_env(create_config, CATTLE_URL=url)
|
<commit_before>from cattle import Config
from cattle.plugins.docker.util import add_to_env
from urlparse import urlparse
def setup_cattle_config_url(instance, create_config):
if instance.get('agentId') is None:
return
if 'labels' not in create_config:
create_config['labels'] = {}
create_config['labels']['io.rancher.container.agent_id'] = \
str(instance.get('agentId'))
url = Config.config_url()
if url is not None:
parsed = urlparse(url)
if 'localhost' == parsed.hostname:
port = Config.api_proxy_listen_port()
add_to_env(create_config,
CATTLE_AGENT_INSTANCE='true',
CATTLE_CONFIG_URL_SCHEME=parsed.scheme,
CATTLE_CONFIG_URL_PATH=parsed.path,
CATTLE_CONFIG_URL_PORT=port)
else:
add_to_env(create_config, CATTLE_CONFIG_URL=url)
add_to_env(create_config, CATTLE_URL=url)
<commit_msg>Add label io.rancher.container.cattle_url=true to get CATTLE_URL env var<commit_after>from cattle import Config
from cattle.plugins.docker.util import add_to_env
from urlparse import urlparse
def _has_label(instance):
try:
return instance.labels['io.rancher.container.cattle_url'] == 'true'
except:
pass
return False
def setup_cattle_config_url(instance, create_config):
if instance.get('agentId') is None and not _has_label(instance):
return
if 'labels' not in create_config:
create_config['labels'] = {}
create_config['labels']['io.rancher.container.agent_id'] = \
str(instance.get('agentId'))
url = Config.config_url()
if url is not None:
parsed = urlparse(url)
if 'localhost' == parsed.hostname:
port = Config.api_proxy_listen_port()
add_to_env(create_config,
CATTLE_AGENT_INSTANCE='true',
CATTLE_CONFIG_URL_SCHEME=parsed.scheme,
CATTLE_CONFIG_URL_PATH=parsed.path,
CATTLE_CONFIG_URL_PORT=port)
else:
add_to_env(create_config, CATTLE_CONFIG_URL=url)
add_to_env(create_config, CATTLE_URL=url)
|
9a251ec185d53ad0bc11d492443ac15e45b95d5e
|
cbagent/collectors/__init__.py
|
cbagent/collectors/__init__.py
|
from collector import Collector
from active_tasks import ActiveTasks
from atop import Atop
from iostat import IO
from latency import Latency
from observe import ObserveLatency
from net import Net
from ns_server import NSServer
from secondary_stats import SecondaryStats
from n1ql_stats import N1QLStats
from ps import PS
from typeperf import TypePerf
from spring_latency import SpringLatency,SpringSubdocLatency, SpringQueryLatency, \
SpringN1QLQueryLatency
from sync_gateway import SyncGateway
from xdcr_lag import XdcrLag
|
from collector import Collector
from active_tasks import ActiveTasks
from atop import Atop
from iostat import IO
from latency import Latency
from observe import ObserveLatency
from net import Net
from ns_server import NSServer
from secondary_stats import SecondaryStats
from secondary_debugstats import SecondaryDebugStats
from secondary_latency import SecondaryLatencyStats
from n1ql_stats import N1QLStats
from ps import PS
from typeperf import TypePerf
from spring_latency import (SpringLatency, SpringQueryLatency,
SpringSubdocLatency, SpringSpatialQueryLatency,
SpringN1QLQueryLatency)
from sync_gateway import SyncGateway
from xdcr_lag import XdcrLag
|
Revert "CBD: 1686 cbagent changes for cbagent"
|
Revert "CBD: 1686 cbagent changes for cbagent"
This reverts commit 9fb8d1f0f9548a4a5fb5438b0e04996b9828f202.
Change-Id: Ifcc2e797ba28dcc838c7dae7541e0bc325d954be
Reviewed-on: http://review.couchbase.org/59921
Reviewed-by: sandip nandi <7cd9c95c004724be086626cd470f9c018506c6b8@couchbase.com>
Tested-by: sandip nandi <7cd9c95c004724be086626cd470f9c018506c6b8@couchbase.com>
|
Python
|
apache-2.0
|
couchbase/cbagent
|
from collector import Collector
from active_tasks import ActiveTasks
from atop import Atop
from iostat import IO
from latency import Latency
from observe import ObserveLatency
from net import Net
from ns_server import NSServer
from secondary_stats import SecondaryStats
from n1ql_stats import N1QLStats
from ps import PS
from typeperf import TypePerf
from spring_latency import SpringLatency,SpringSubdocLatency, SpringQueryLatency, \
SpringN1QLQueryLatency
from sync_gateway import SyncGateway
from xdcr_lag import XdcrLag
Revert "CBD: 1686 cbagent changes for cbagent"
This reverts commit 9fb8d1f0f9548a4a5fb5438b0e04996b9828f202.
Change-Id: Ifcc2e797ba28dcc838c7dae7541e0bc325d954be
Reviewed-on: http://review.couchbase.org/59921
Reviewed-by: sandip nandi <7cd9c95c004724be086626cd470f9c018506c6b8@couchbase.com>
Tested-by: sandip nandi <7cd9c95c004724be086626cd470f9c018506c6b8@couchbase.com>
|
from collector import Collector
from active_tasks import ActiveTasks
from atop import Atop
from iostat import IO
from latency import Latency
from observe import ObserveLatency
from net import Net
from ns_server import NSServer
from secondary_stats import SecondaryStats
from secondary_debugstats import SecondaryDebugStats
from secondary_latency import SecondaryLatencyStats
from n1ql_stats import N1QLStats
from ps import PS
from typeperf import TypePerf
from spring_latency import (SpringLatency, SpringQueryLatency,
SpringSubdocLatency, SpringSpatialQueryLatency,
SpringN1QLQueryLatency)
from sync_gateway import SyncGateway
from xdcr_lag import XdcrLag
|
<commit_before>from collector import Collector
from active_tasks import ActiveTasks
from atop import Atop
from iostat import IO
from latency import Latency
from observe import ObserveLatency
from net import Net
from ns_server import NSServer
from secondary_stats import SecondaryStats
from n1ql_stats import N1QLStats
from ps import PS
from typeperf import TypePerf
from spring_latency import SpringLatency,SpringSubdocLatency, SpringQueryLatency, \
SpringN1QLQueryLatency
from sync_gateway import SyncGateway
from xdcr_lag import XdcrLag
<commit_msg>Revert "CBD: 1686 cbagent changes for cbagent"
This reverts commit 9fb8d1f0f9548a4a5fb5438b0e04996b9828f202.
Change-Id: Ifcc2e797ba28dcc838c7dae7541e0bc325d954be
Reviewed-on: http://review.couchbase.org/59921
Reviewed-by: sandip nandi <7cd9c95c004724be086626cd470f9c018506c6b8@couchbase.com>
Tested-by: sandip nandi <7cd9c95c004724be086626cd470f9c018506c6b8@couchbase.com><commit_after>
|
from collector import Collector
from active_tasks import ActiveTasks
from atop import Atop
from iostat import IO
from latency import Latency
from observe import ObserveLatency
from net import Net
from ns_server import NSServer
from secondary_stats import SecondaryStats
from secondary_debugstats import SecondaryDebugStats
from secondary_latency import SecondaryLatencyStats
from n1ql_stats import N1QLStats
from ps import PS
from typeperf import TypePerf
from spring_latency import (SpringLatency, SpringQueryLatency,
SpringSubdocLatency, SpringSpatialQueryLatency,
SpringN1QLQueryLatency)
from sync_gateway import SyncGateway
from xdcr_lag import XdcrLag
|
from collector import Collector
from active_tasks import ActiveTasks
from atop import Atop
from iostat import IO
from latency import Latency
from observe import ObserveLatency
from net import Net
from ns_server import NSServer
from secondary_stats import SecondaryStats
from n1ql_stats import N1QLStats
from ps import PS
from typeperf import TypePerf
from spring_latency import SpringLatency,SpringSubdocLatency, SpringQueryLatency, \
SpringN1QLQueryLatency
from sync_gateway import SyncGateway
from xdcr_lag import XdcrLag
Revert "CBD: 1686 cbagent changes for cbagent"
This reverts commit 9fb8d1f0f9548a4a5fb5438b0e04996b9828f202.
Change-Id: Ifcc2e797ba28dcc838c7dae7541e0bc325d954be
Reviewed-on: http://review.couchbase.org/59921
Reviewed-by: sandip nandi <7cd9c95c004724be086626cd470f9c018506c6b8@couchbase.com>
Tested-by: sandip nandi <7cd9c95c004724be086626cd470f9c018506c6b8@couchbase.com>from collector import Collector
from active_tasks import ActiveTasks
from atop import Atop
from iostat import IO
from latency import Latency
from observe import ObserveLatency
from net import Net
from ns_server import NSServer
from secondary_stats import SecondaryStats
from secondary_debugstats import SecondaryDebugStats
from secondary_latency import SecondaryLatencyStats
from n1ql_stats import N1QLStats
from ps import PS
from typeperf import TypePerf
from spring_latency import (SpringLatency, SpringQueryLatency,
SpringSubdocLatency, SpringSpatialQueryLatency,
SpringN1QLQueryLatency)
from sync_gateway import SyncGateway
from xdcr_lag import XdcrLag
|
<commit_before>from collector import Collector
from active_tasks import ActiveTasks
from atop import Atop
from iostat import IO
from latency import Latency
from observe import ObserveLatency
from net import Net
from ns_server import NSServer
from secondary_stats import SecondaryStats
from n1ql_stats import N1QLStats
from ps import PS
from typeperf import TypePerf
from spring_latency import SpringLatency,SpringSubdocLatency, SpringQueryLatency, \
SpringN1QLQueryLatency
from sync_gateway import SyncGateway
from xdcr_lag import XdcrLag
<commit_msg>Revert "CBD: 1686 cbagent changes for cbagent"
This reverts commit 9fb8d1f0f9548a4a5fb5438b0e04996b9828f202.
Change-Id: Ifcc2e797ba28dcc838c7dae7541e0bc325d954be
Reviewed-on: http://review.couchbase.org/59921
Reviewed-by: sandip nandi <7cd9c95c004724be086626cd470f9c018506c6b8@couchbase.com>
Tested-by: sandip nandi <7cd9c95c004724be086626cd470f9c018506c6b8@couchbase.com><commit_after>from collector import Collector
from active_tasks import ActiveTasks
from atop import Atop
from iostat import IO
from latency import Latency
from observe import ObserveLatency
from net import Net
from ns_server import NSServer
from secondary_stats import SecondaryStats
from secondary_debugstats import SecondaryDebugStats
from secondary_latency import SecondaryLatencyStats
from n1ql_stats import N1QLStats
from ps import PS
from typeperf import TypePerf
from spring_latency import (SpringLatency, SpringQueryLatency,
SpringSubdocLatency, SpringSpatialQueryLatency,
SpringN1QLQueryLatency)
from sync_gateway import SyncGateway
from xdcr_lag import XdcrLag
|
44d437e7c7daf3255c3ab9b0dbaa9bdd700008a4
|
foliant/gdrive.py
|
foliant/gdrive.py
|
import os.path
import webbrowser
import pydrive.auth, pydrive.drive
def upload(document):
"""Upload .docx file to Google Drive and return a web view link to it."""
auth = pydrive.auth.GoogleAuth()
auth.LocalWebserverAuth()
gdrive = pydrive.drive.GoogleDrive(auth)
gdoc = gdrive.CreateFile({
"title": os.path.splitext(os.path.basename(document))[0]
})
gdoc.SetContentFile(document)
gdoc.Upload({"convert": True})
webbrowser.open(gdoc["alternateLink"])
return gdoc["alternateLink"]
|
import os.path
import webbrowser
import pydrive.auth, pydrive.drive
def upload(document):
"""Upload .docx file to Google Drive and return a web view link to it."""
auth = pydrive.auth.GoogleAuth()
auth.CommandLineAuth()
gdrive = pydrive.drive.GoogleDrive(auth)
gdoc = gdrive.CreateFile({
"title": os.path.splitext(os.path.basename(document))[0]
})
gdoc.SetContentFile(document)
gdoc.Upload({"convert": True})
webbrowser.open(gdoc["alternateLink"])
return gdoc["alternateLink"]
|
Switch from local server to command line auth to fix upload in Docker.
|
GDrive: Switch from local server to command line auth to fix upload in Docker.
|
Python
|
mit
|
foliant-docs/foliant
|
import os.path
import webbrowser
import pydrive.auth, pydrive.drive
def upload(document):
"""Upload .docx file to Google Drive and return a web view link to it."""
auth = pydrive.auth.GoogleAuth()
auth.LocalWebserverAuth()
gdrive = pydrive.drive.GoogleDrive(auth)
gdoc = gdrive.CreateFile({
"title": os.path.splitext(os.path.basename(document))[0]
})
gdoc.SetContentFile(document)
gdoc.Upload({"convert": True})
webbrowser.open(gdoc["alternateLink"])
return gdoc["alternateLink"]
GDrive: Switch from local server to command line auth to fix upload in Docker.
|
import os.path
import webbrowser
import pydrive.auth, pydrive.drive
def upload(document):
"""Upload .docx file to Google Drive and return a web view link to it."""
auth = pydrive.auth.GoogleAuth()
auth.CommandLineAuth()
gdrive = pydrive.drive.GoogleDrive(auth)
gdoc = gdrive.CreateFile({
"title": os.path.splitext(os.path.basename(document))[0]
})
gdoc.SetContentFile(document)
gdoc.Upload({"convert": True})
webbrowser.open(gdoc["alternateLink"])
return gdoc["alternateLink"]
|
<commit_before>import os.path
import webbrowser
import pydrive.auth, pydrive.drive
def upload(document):
"""Upload .docx file to Google Drive and return a web view link to it."""
auth = pydrive.auth.GoogleAuth()
auth.LocalWebserverAuth()
gdrive = pydrive.drive.GoogleDrive(auth)
gdoc = gdrive.CreateFile({
"title": os.path.splitext(os.path.basename(document))[0]
})
gdoc.SetContentFile(document)
gdoc.Upload({"convert": True})
webbrowser.open(gdoc["alternateLink"])
return gdoc["alternateLink"]
<commit_msg>GDrive: Switch from local server to command line auth to fix upload in Docker.<commit_after>
|
import os.path
import webbrowser
import pydrive.auth, pydrive.drive
def upload(document):
"""Upload .docx file to Google Drive and return a web view link to it."""
auth = pydrive.auth.GoogleAuth()
auth.CommandLineAuth()
gdrive = pydrive.drive.GoogleDrive(auth)
gdoc = gdrive.CreateFile({
"title": os.path.splitext(os.path.basename(document))[0]
})
gdoc.SetContentFile(document)
gdoc.Upload({"convert": True})
webbrowser.open(gdoc["alternateLink"])
return gdoc["alternateLink"]
|
import os.path
import webbrowser
import pydrive.auth, pydrive.drive
def upload(document):
"""Upload .docx file to Google Drive and return a web view link to it."""
auth = pydrive.auth.GoogleAuth()
auth.LocalWebserverAuth()
gdrive = pydrive.drive.GoogleDrive(auth)
gdoc = gdrive.CreateFile({
"title": os.path.splitext(os.path.basename(document))[0]
})
gdoc.SetContentFile(document)
gdoc.Upload({"convert": True})
webbrowser.open(gdoc["alternateLink"])
return gdoc["alternateLink"]
GDrive: Switch from local server to command line auth to fix upload in Docker.import os.path
import webbrowser
import pydrive.auth, pydrive.drive
def upload(document):
"""Upload .docx file to Google Drive and return a web view link to it."""
auth = pydrive.auth.GoogleAuth()
auth.CommandLineAuth()
gdrive = pydrive.drive.GoogleDrive(auth)
gdoc = gdrive.CreateFile({
"title": os.path.splitext(os.path.basename(document))[0]
})
gdoc.SetContentFile(document)
gdoc.Upload({"convert": True})
webbrowser.open(gdoc["alternateLink"])
return gdoc["alternateLink"]
|
<commit_before>import os.path
import webbrowser
import pydrive.auth, pydrive.drive
def upload(document):
"""Upload .docx file to Google Drive and return a web view link to it."""
auth = pydrive.auth.GoogleAuth()
auth.LocalWebserverAuth()
gdrive = pydrive.drive.GoogleDrive(auth)
gdoc = gdrive.CreateFile({
"title": os.path.splitext(os.path.basename(document))[0]
})
gdoc.SetContentFile(document)
gdoc.Upload({"convert": True})
webbrowser.open(gdoc["alternateLink"])
return gdoc["alternateLink"]
<commit_msg>GDrive: Switch from local server to command line auth to fix upload in Docker.<commit_after>import os.path
import webbrowser
import pydrive.auth, pydrive.drive
def upload(document):
"""Upload .docx file to Google Drive and return a web view link to it."""
auth = pydrive.auth.GoogleAuth()
auth.CommandLineAuth()
gdrive = pydrive.drive.GoogleDrive(auth)
gdoc = gdrive.CreateFile({
"title": os.path.splitext(os.path.basename(document))[0]
})
gdoc.SetContentFile(document)
gdoc.Upload({"convert": True})
webbrowser.open(gdoc["alternateLink"])
return gdoc["alternateLink"]
|
45f56adc0e9c935f5377791f3735e692b6e57c74
|
pinax_theme_bootstrap/templatetags/pinax_theme_bootstrap_tags.py
|
pinax_theme_bootstrap/templatetags/pinax_theme_bootstrap_tags.py
|
from django import template
from django.contrib.messages.utils import get_level_tags
from django.utils.encoding import force_text
LEVEL_TAGS = get_level_tags()
register = template.Library()
@register.simple_tag()
def get_message_tags(message):
"""
Returns the message's level_tag prefixed with Bootstrap's "alert-" prefix
along with any tags included in message.extra_tags
Messages in Django >= 1.7 have a message.level_tag attr
"""
level_tag = force_text(LEVEL_TAGS.get(message.level, ''), strings_only=True)
if level_tag == u"error":
level_tag = u"danger"
alert_level_tag = u"alert-{tag}".format(tag=level_tag)
tags = [alert_level_tag]
extra_tags = force_text(message.extra_tags, strings_only=True)
if extra_tags:
tags.append(extra_tags)
return u" ".join(tags)
|
from django import template
from django.contrib.messages.utils import get_level_tags
from django.utils.encoding import force_text
LEVEL_TAGS = get_level_tags()
register = template.Library()
@register.simple_tag()
def get_message_tags(message):
"""
Returns the message's level_tag prefixed with Bootstrap's "alert-" prefix
along with any tags included in message.extra_tags
Messages in Django >= 1.7 have a message.level_tag attr
"""
level_tag = force_text(LEVEL_TAGS.get(message.level, ''), strings_only=True)
if level_tag == u"error":
level_tag = u"danger"
if level_tag:
alert_level_tag = u"alert-{tag}".format(tag=level_tag)
else:
alert_level_tag = None
extra_tags = force_text(message.extra_tags, strings_only=True)
if extra_tags and alert_level_tag:
return u' '.join([extra_tags, alert_level_tag])
elif extra_tags:
return extra_tags
elif alert_level_tag:
return alert_level_tag
return u''
|
Handle instances where level_tag is undefined
|
Handle instances where level_tag is undefined
Better mimics the implementation of message.tags in Django 1.7
|
Python
|
mit
|
foraliving/foraliving,druss16/danslist,grahamu/pinax-theme-bootstrap,foraliving/foraliving,jacobwegner/pinax-theme-bootstrap,druss16/danslist,jacobwegner/pinax-theme-bootstrap,druss16/danslist,foraliving/foraliving,grahamu/pinax-theme-bootstrap,grahamu/pinax-theme-bootstrap,jacobwegner/pinax-theme-bootstrap
|
from django import template
from django.contrib.messages.utils import get_level_tags
from django.utils.encoding import force_text
LEVEL_TAGS = get_level_tags()
register = template.Library()
@register.simple_tag()
def get_message_tags(message):
"""
Returns the message's level_tag prefixed with Bootstrap's "alert-" prefix
along with any tags included in message.extra_tags
Messages in Django >= 1.7 have a message.level_tag attr
"""
level_tag = force_text(LEVEL_TAGS.get(message.level, ''), strings_only=True)
if level_tag == u"error":
level_tag = u"danger"
alert_level_tag = u"alert-{tag}".format(tag=level_tag)
tags = [alert_level_tag]
extra_tags = force_text(message.extra_tags, strings_only=True)
if extra_tags:
tags.append(extra_tags)
return u" ".join(tags)
Handle instances where level_tag is undefined
Better mimics the implementation of message.tags in Django 1.7
|
from django import template
from django.contrib.messages.utils import get_level_tags
from django.utils.encoding import force_text
LEVEL_TAGS = get_level_tags()
register = template.Library()
@register.simple_tag()
def get_message_tags(message):
"""
Returns the message's level_tag prefixed with Bootstrap's "alert-" prefix
along with any tags included in message.extra_tags
Messages in Django >= 1.7 have a message.level_tag attr
"""
level_tag = force_text(LEVEL_TAGS.get(message.level, ''), strings_only=True)
if level_tag == u"error":
level_tag = u"danger"
if level_tag:
alert_level_tag = u"alert-{tag}".format(tag=level_tag)
else:
alert_level_tag = None
extra_tags = force_text(message.extra_tags, strings_only=True)
if extra_tags and alert_level_tag:
return u' '.join([extra_tags, alert_level_tag])
elif extra_tags:
return extra_tags
elif alert_level_tag:
return alert_level_tag
return u''
|
<commit_before>from django import template
from django.contrib.messages.utils import get_level_tags
from django.utils.encoding import force_text
LEVEL_TAGS = get_level_tags()
register = template.Library()
@register.simple_tag()
def get_message_tags(message):
"""
Returns the message's level_tag prefixed with Bootstrap's "alert-" prefix
along with any tags included in message.extra_tags
Messages in Django >= 1.7 have a message.level_tag attr
"""
level_tag = force_text(LEVEL_TAGS.get(message.level, ''), strings_only=True)
if level_tag == u"error":
level_tag = u"danger"
alert_level_tag = u"alert-{tag}".format(tag=level_tag)
tags = [alert_level_tag]
extra_tags = force_text(message.extra_tags, strings_only=True)
if extra_tags:
tags.append(extra_tags)
return u" ".join(tags)
<commit_msg>Handle instances where level_tag is undefined
Better mimics the implementation of message.tags in Django 1.7<commit_after>
|
from django import template
from django.contrib.messages.utils import get_level_tags
from django.utils.encoding import force_text
LEVEL_TAGS = get_level_tags()
register = template.Library()
@register.simple_tag()
def get_message_tags(message):
"""
Returns the message's level_tag prefixed with Bootstrap's "alert-" prefix
along with any tags included in message.extra_tags
Messages in Django >= 1.7 have a message.level_tag attr
"""
level_tag = force_text(LEVEL_TAGS.get(message.level, ''), strings_only=True)
if level_tag == u"error":
level_tag = u"danger"
if level_tag:
alert_level_tag = u"alert-{tag}".format(tag=level_tag)
else:
alert_level_tag = None
extra_tags = force_text(message.extra_tags, strings_only=True)
if extra_tags and alert_level_tag:
return u' '.join([extra_tags, alert_level_tag])
elif extra_tags:
return extra_tags
elif alert_level_tag:
return alert_level_tag
return u''
|
from django import template
from django.contrib.messages.utils import get_level_tags
from django.utils.encoding import force_text
LEVEL_TAGS = get_level_tags()
register = template.Library()
@register.simple_tag()
def get_message_tags(message):
"""
Returns the message's level_tag prefixed with Bootstrap's "alert-" prefix
along with any tags included in message.extra_tags
Messages in Django >= 1.7 have a message.level_tag attr
"""
level_tag = force_text(LEVEL_TAGS.get(message.level, ''), strings_only=True)
if level_tag == u"error":
level_tag = u"danger"
alert_level_tag = u"alert-{tag}".format(tag=level_tag)
tags = [alert_level_tag]
extra_tags = force_text(message.extra_tags, strings_only=True)
if extra_tags:
tags.append(extra_tags)
return u" ".join(tags)
Handle instances where level_tag is undefined
Better mimics the implementation of message.tags in Django 1.7from django import template
from django.contrib.messages.utils import get_level_tags
from django.utils.encoding import force_text
LEVEL_TAGS = get_level_tags()
register = template.Library()
@register.simple_tag()
def get_message_tags(message):
"""
Returns the message's level_tag prefixed with Bootstrap's "alert-" prefix
along with any tags included in message.extra_tags
Messages in Django >= 1.7 have a message.level_tag attr
"""
level_tag = force_text(LEVEL_TAGS.get(message.level, ''), strings_only=True)
if level_tag == u"error":
level_tag = u"danger"
if level_tag:
alert_level_tag = u"alert-{tag}".format(tag=level_tag)
else:
alert_level_tag = None
extra_tags = force_text(message.extra_tags, strings_only=True)
if extra_tags and alert_level_tag:
return u' '.join([extra_tags, alert_level_tag])
elif extra_tags:
return extra_tags
elif alert_level_tag:
return alert_level_tag
return u''
|
<commit_before>from django import template
from django.contrib.messages.utils import get_level_tags
from django.utils.encoding import force_text
LEVEL_TAGS = get_level_tags()
register = template.Library()
@register.simple_tag()
def get_message_tags(message):
"""
Returns the message's level_tag prefixed with Bootstrap's "alert-" prefix
along with any tags included in message.extra_tags
Messages in Django >= 1.7 have a message.level_tag attr
"""
level_tag = force_text(LEVEL_TAGS.get(message.level, ''), strings_only=True)
if level_tag == u"error":
level_tag = u"danger"
alert_level_tag = u"alert-{tag}".format(tag=level_tag)
tags = [alert_level_tag]
extra_tags = force_text(message.extra_tags, strings_only=True)
if extra_tags:
tags.append(extra_tags)
return u" ".join(tags)
<commit_msg>Handle instances where level_tag is undefined
Better mimics the implementation of message.tags in Django 1.7<commit_after>from django import template
from django.contrib.messages.utils import get_level_tags
from django.utils.encoding import force_text
LEVEL_TAGS = get_level_tags()
register = template.Library()
@register.simple_tag()
def get_message_tags(message):
"""
Returns the message's level_tag prefixed with Bootstrap's "alert-" prefix
along with any tags included in message.extra_tags
Messages in Django >= 1.7 have a message.level_tag attr
"""
level_tag = force_text(LEVEL_TAGS.get(message.level, ''), strings_only=True)
if level_tag == u"error":
level_tag = u"danger"
if level_tag:
alert_level_tag = u"alert-{tag}".format(tag=level_tag)
else:
alert_level_tag = None
extra_tags = force_text(message.extra_tags, strings_only=True)
if extra_tags and alert_level_tag:
return u' '.join([extra_tags, alert_level_tag])
elif extra_tags:
return extra_tags
elif alert_level_tag:
return alert_level_tag
return u''
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.